{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef aws_client(self, client_id=None):\n if client_id is None:\n return self._aws_clients\n elif self._aws_clients is not None and self._aws_clients.has_key(client_id):\n return self._aws_clients[client_id]\n else:\n return None", "response": "Returns the AWS client if it exists or None if not."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef set_aws_clients(self, clients):\n if type(clients) is not dict:\n raise TypeError(\"clients must be a dict\")\n self._aws_clients = clients", "response": "Sets the AWS clients in the context object"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nhandle command line args and sets the EFVersionContext object.", "response": "def handle_args_and_set_context(args):\n \"\"\"\n Args:\n args: the command line args, probably passed from main() as sys.argv[1:]\n Returns:\n a populated EFVersionContext object\n \"\"\"\n parser = argparse.ArgumentParser()\n parser.add_argument(\"service_name\", help=\"name of the service\")\n parser.add_argument(\"key\", help=\"version key to look up for such as 'ami-id' (list in EF_Config)\")\n parser.add_argument(\"env\", help=\", \".join(EFConfig.ENV_LIST))\n group = parser.add_mutually_exclusive_group(required=True)\n group.add_argument(\"--get\", help=\"get current version\", action=\"store_true\")\n group.add_argument(\"--set\", help=\"set current version of to for \")\n group.add_argument(\"--rollback\", help=\"set current version to most recent 'stable' version in history\",\n action=\"store_true\")\n group.add_argument(\"--rollback-to\", help=\"rollback current version to in history\",\n action=\"store\", metavar='')\n group.add_argument(\"--history\", help=\"Show version history for env/service/key\", choices=['json', 'text'])\n group.add_argument(\"--show\", help=\"Show keys and values. '*' allowed for and \",\n action=\"store_true\", default=False)\n parser.add_argument(\"--build\",\n help=\"On --set, also set the externally defined build number associated with the version entity\",\n default=\"\")\n parser.add_argument(\"--commit_hash\", help=\"On --set, also set the commit hash associated with the version entity\",\n default=\"\")\n parser.add_argument(\"--commit\", help=\"Actually --set or --rollback (dry run if omitted)\",\n action=\"store_true\", default=False)\n parser.add_argument(\"--devel\", help=\"Allow running from branch; don't refresh from origin\", action=\"store_true\",\n default=False)\n parser.add_argument(\"--force_env_full\", help=\"Override env with env_full for account-scoped environments\",\n action=\"store_true\", default=False)\n parser.add_argument(\"--limit\", help=\"Limit 'history', 'rollback', 'show' to first N records (default 100, max 1000)\",\n type=int, default=100)\n parser.add_argument(\"--location\", help=\"On --set, also mark the url location of the static build's version file to\"\n \"support dist-hash precheck\", default=\"\")\n if EFConfig.ALLOW_EF_VERSION_SKIP_PRECHECK:\n parser.add_argument(\"--noprecheck\", help=\"--set or --rollback without precheck\", action=\"store_true\", default=False)\n parser.add_argument(\"--sr\", help=\"optional /path/to/service_registry_file.json\", default=None)\n parser.add_argument(\"--stable\", help=\"On --set, also mark the version 'stable'\", action=\"store_true\")\n parser.add_argument(\"--verbose\", help=\"Print additional info\", action=\"store_true\", default=False)\n # parse\n parsed_args = vars(parser.parse_args(args))\n context = EFVersionContext()\n # marshall the inherited context values\n context._build_number = parsed_args[\"build\"]\n context._commit_hash = parsed_args[\"commit_hash\"]\n context.commit = parsed_args[\"commit\"]\n context.devel = parsed_args[\"devel\"]\n context._force_env_full = parsed_args[\"force_env_full\"]\n try:\n context.env = parsed_args[\"env\"]\n except ValueError as e:\n fail(\"Error in env: {}\".format(e.message))\n # marshall this module's additional context values\n context._get = parsed_args[\"get\"]\n context._history = parsed_args[\"history\"]\n context._key = parsed_args[\"key\"]\n if EFConfig.ALLOW_EF_VERSION_SKIP_PRECHECK:\n context._noprecheck = parsed_args[\"noprecheck\"]\n if not 1 <= parsed_args[\"limit\"] <= 1000:\n fail(\"Error in --limit. Valid range: 1..1000\")\n context._limit = parsed_args[\"limit\"]\n context._location = parsed_args[\"location\"]\n context._rollback = parsed_args[\"rollback\"]\n context._rollback_to = parsed_args[\"rollback_to\"]\n context._service_name = parsed_args[\"service_name\"]\n context._show = parsed_args[\"show\"]\n context._stable = parsed_args[\"stable\"]\n context._value = parsed_args[\"set\"]\n # Set up service registry and policy template path which depends on it\n context.service_registry = EFServiceRegistry(parsed_args[\"sr\"])\n\n # VERBOSE is global\n global VERBOSE\n VERBOSE = parsed_args[\"verbose\"]\n\n validate_context(context)\n return context"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nvalidate that the current context is valid.", "response": "def validate_context(context):\n \"\"\"\n Set the key for the current context.\n Args:\n context: a populated EFVersionContext object\n \"\"\"\n # Service must exist in service registry\n if not context.service_registry.service_record(context.service_name):\n fail(\"service: {} not found in service registry: {}\".format(\n context.service_name, context.service_registry.filespec))\n service_type = context.service_registry.service_record(context.service_name)[\"type\"]\n\n # Key must be valid\n if context.key not in EFConfig.VERSION_KEYS:\n fail(\"invalid key: {}; see VERSION_KEYS in ef_config for supported keys\".format(context.key))\n\n # Lookup allowed key for service type\n if \"allowed_types\" in EFConfig.VERSION_KEYS[context.key] and \\\n service_type not in EFConfig.VERSION_KEYS[context.key][\"allowed_types\"]:\n fail(\"service_type: {} is not allowed for key {}; see VERSION_KEYS[KEY]['allowed_types']\"\n \"in ef_config and validate service registry entry\".format(service_type, context.key))\n\n return True"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef precheck_ami_id(context):\n # get the current AMI\n key = \"{}/{}\".format(context.env, context.service_name)\n print_if_verbose(\"precheck_ami_id with key: {}\".format(key))\n current_ami = context.versionresolver.lookup(\"ami-id,{}\".format(key))\n print_if_verbose(\"ami found: {}\".format(current_ami))\n\n # If bootstrapping (this will be the first entry in the version history)\n # then we can't check it vs. running version\n if current_ami is None:\n print_if_verbose(\"precheck passed without check because current AMI is None\")\n return True\n\n # Otherwise perform a consistency check\n # 1. get IDs of instances running the AMI - will find instances in all environments\n instances_running_ami = context.aws_client(\"ec2\").describe_instances(\n Filters=[{\n 'Name': 'image-id',\n 'Values': [current_ami]\n }]\n )[\"Reservations\"]\n if instances_running_ami:\n instances_running_ami = [resv[\"Instances\"][0][\"InstanceId\"] for resv in instances_running_ami]\n print_if_verbose(\"instances running ami {}:\\n{}\".format(current_ami, repr(instances_running_ami)))\n\n # 2. Get IDs of instances running as -\n env_service = \"{}-{}\".format(context.env, context.service_name)\n instances_running_as_env_service = context.aws_client(\"ec2\").describe_instances(\n Filters=[{\n 'Name': 'iam-instance-profile.arn',\n 'Values': [\"arn:aws:iam::*:instance-profile/{}-{}\".format(context.env, context.service_name)]\n }]\n )[\"Reservations\"]\n if instances_running_as_env_service:\n instances_running_as_env_service = \\\n [resv[\"Instances\"][0][\"InstanceId\"] for resv in instances_running_as_env_service]\n print_if_verbose(\"instances running as {}\".format(env_service))\n print_if_verbose(repr(instances_running_as_env_service))\n\n # 3. Instances running as env-service should be a subset of instances running the AMI\n for instance_id in instances_running_as_env_service:\n if instance_id not in instances_running_ami:\n raise RuntimeError(\"Instance: {} not running expected ami: {}\".format(instance_id, current_ami))\n\n # Check passed - all is well\n return True", "response": "Check if AMI is the same as the current AMI in the version records."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncheck if the dist in service is the same as the current dist version in the version records.", "response": "def precheck_dist_hash(context):\n \"\"\"\n Is the dist in service the same as the dist marked current in the version records?\n This tool won't update records unless the world state is coherent.\n Args:\n context: a populated EFVersionContext object\n Returns:\n True if ok to proceed\n Raises:\n RuntimeError if not ok to proceed\n \"\"\"\n # get the current dist-hash\n key = \"{}/{}/dist-hash\".format(context.service_name, context.env)\n print_if_verbose(\"precheck_dist_hash with key: {}\".format(key))\n try:\n current_dist_hash = Version(context.aws_client(\"s3\").get_object(\n Bucket=EFConfig.S3_VERSION_BUCKET,\n Key=key\n ))\n print_if_verbose(\"dist-hash found: {}\".format(current_dist_hash.value))\n except ClientError as error:\n if error.response[\"Error\"][\"Code\"] == \"NoSuchKey\":\n # If bootstrapping (this will be the first entry in the version history)\n # then we can't check it vs. current version, thus we cannot get the key\n print_if_verbose(\"precheck passed without check because current dist-hash is None\")\n return True\n else:\n fail(\"Exception while prechecking dist_hash for {} {}: {}\".format(context.service_name, context.env, error))\n\n # Otherwise perform a consistency check\n # 1. get dist version in service for environment\n try:\n response = urllib2.urlopen(current_dist_hash.location, None, 5)\n if response.getcode() != 200:\n raise IOError(\"Non-200 response \" + str(response.getcode()) + \" reading \" + current_dist_hash.location)\n dist_hash_in_service = response.read().strip()\n except urllib2.URLError as error:\n raise IOError(\"URLError in http_get_dist_version: \" + repr(error))\n\n # 2. dist version in service should be the same as \"current\" dist version\n if dist_hash_in_service != current_dist_hash.value:\n raise RuntimeError(\"{} dist-hash in service: {} but expected dist-hash: {}\"\n .format(key, dist_hash_in_service, current_dist_hash.value))\n\n # Check passed - all is well\n return True"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nchecks if precheck function for a key is set in EFVersionContext object.", "response": "def precheck(context):\n \"\"\"\n calls a function named \"precheck_\" where is context_key with '-' changed to '_'\n (e.g. \"precheck_ami_id\")\n Checking function should return True if OK, or raise RuntimeError w/ message if not\n Args:\n context: a populated EFVersionContext object\n Returns:\n True if the precheck passed, or if there was no precheck function for context.key\n Raises:\n RuntimeError if precheck failed, with explanatory message\n \"\"\"\n if context.noprecheck:\n return True\n func_name = \"precheck_\" + context.key.replace(\"-\", \"_\")\n if func_name in globals() and isfunction(globals()[func_name]):\n return globals()[func_name](context)\n else:\n return True"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting all versions of a key", "response": "def get_versions(context, return_stable=False):\n \"\"\"\n Get all versions of a key\n Args:\n context: a populated EFVersionContext object\n return_stable: (default:False) If True, stop fetching if 'stable' version is found; return only that version\n Returns:\n json list of object data sorted in reverse by last_modified (newest version is first). Each item is a dict:\n {\n 'value': ,\n 'last_modified\": , (ISO8601 date time string)\n 'modified_by': '',\n 'version_id': '',\n 'status': See EF_Config.S3_VERSION_STATUS_* for possible values\n }\n \"\"\"\n s3_key = \"{}/{}/{}\".format(context.service_name, context.env, context.key)\n object_version_list = context.aws_client(\"s3\").list_object_versions(\n Bucket=EFConfig.S3_VERSION_BUCKET,\n Delimiter='/',\n MaxKeys=context.limit,\n Prefix=s3_key\n )\n if \"Versions\" not in object_version_list:\n return []\n object_versions = []\n for version in object_version_list[\"Versions\"]:\n object_version = Version(context.aws_client(\"s3\").get_object(\n Bucket=EFConfig.S3_VERSION_BUCKET,\n Key=s3_key,\n VersionId=version[\"VersionId\"]\n ))\n # Stop if a stable version was found and return_stable was set\n if return_stable and object_version.status == EFConfig.S3_VERSION_STATUS_STABLE:\n return [object_version]\n object_versions.append(object_version)\n\n # If caller is looking for a 'stable' version and we made it to here, a stable version was not found\n if return_stable:\n return []\n else:\n return sorted(object_versions, key=lambda v: v.last_modified, reverse=True)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_version_by_value(context, value):\n versions = get_versions(context)\n for version in versions:\n if version.value == value:\n return version\n fail(\"Didn't find a matching version for: \"\n \"{}:{} in env/service: {}/{}\".format(\n context.key, value,\n context.env, context.service_name))", "response": "Get the latest version that matches the provided value"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef cmd_rollback(context):\n last_stable = get_versions(context, return_stable=True)\n if len(last_stable) != 1:\n fail(\"Didn't find a version marked stable for key: {} in env/service: {}/{}\".format(\n context.key, context.env, context.service_name))\n context.value = last_stable[0].value\n context.commit_hash = last_stable[0].commit_hash\n context.build_number = last_stable[0].build_number\n context.location = last_stable[0].location\n context.stable = True\n cmd_set(context)", "response": "Roll back by finding the most recent stable tagged version and putting it again."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nroll back to a specific version.", "response": "def cmd_rollback_to(context):\n \"\"\"\n Roll back by finding a specific version in the history of the service and\n putting it as the new current version.\n Args:\n context: a populated EFVersionContext object\n \"\"\"\n version = get_version_by_value(context, context.rollback_to)\n context.value = version.value\n context.commit_hash = version.commit_hash\n context.build_number = version.build_number\n context.location = version.location\n context.stable = True\n cmd_set(context)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsets the current value for a key.", "response": "def cmd_set(context):\n \"\"\"\n Set the new \"current\" value for a key.\n If the existing current version and the new version have identical /value/ and /status,\n then nothing is written, to avoid stacking up redundant entreis in the version table.\n Args:\n context: a populated EFVersionContext object\n \"\"\"\n # If key value is a special symbol, see if this env allows it\n if context.value in EFConfig.SPECIAL_VERSIONS and context.env_short not in EFConfig.SPECIAL_VERSION_ENVS:\n fail(\"special version: {} not allowed in env: {}\".format(context.value, context.env_short))\n # If key value is a special symbol, the record cannot be marked \"stable\"\n if context.value in EFConfig.SPECIAL_VERSIONS and context.stable:\n fail(\"special versions such as: {} cannot be marked 'stable'\".format(context.value))\n\n # Resolve any references\n if context.value == \"=prod\":\n context.value = context.versionresolver.lookup(\"{},{}/{}\".format(context.key, \"prod\", context.service_name))\n elif context.value == \"=staging\":\n context.value = context.versionresolver.lookup(\"{},{}/{}\".format(context.key, \"staging\", context.service_name))\n elif context.value == \"=latest\":\n if not EFConfig.VERSION_KEYS[context.key][\"allow_latest\"]:\n fail(\"=latest cannot be used with key: {}\".format(context.key))\n func_name = \"_getlatest_\" + context.key.replace(\"-\", \"_\")\n if func_name in globals() and isfunction(globals()[func_name]):\n context.value = globals()[func_name](context)\n else:\n raise RuntimeError(\"{} version for {}/{} is '=latest' but can't look up because method not found: {}\".format(\n context.key, context.env, context.service_name, func_name))\n\n # precheck to confirm coherent world state before attempting set - whatever that means for the current key type\n try:\n precheck(context)\n except Exception as e:\n fail(\"Precheck failed: {}\".format(e.message))\n\n s3_key = \"{}/{}/{}\".format(context.service_name, context.env, context.key)\n s3_version_status = EFConfig.S3_VERSION_STATUS_STABLE if context.stable else EFConfig.S3_VERSION_STATUS_UNDEFINED\n\n # If the set would put a value and status that are the same as the existing 'current' value/status, don't do it\n context.limit = 1\n current_version = get_versions(context)\n # If there is no 'current version' it's ok, just means the set will write the first entry\n if len(current_version) == 1 and current_version[0].status == s3_version_status and \\\n current_version[0].value == context.value:\n print(\"Version not written because current version and new version have identical value and status: {} {}\"\n .format(current_version[0].value, current_version[0].status))\n return\n\n if not context.commit:\n print(\"=== DRY RUN ===\\nUse --commit to set value\\n=== DRY RUN ===\")\n print(\"would set key: {} with value: {} {} {} {} {}\".format(\n s3_key, context.value, context.build_number, context.commit_hash, context.location, s3_version_status))\n else:\n context.aws_client(\"s3\").put_object(\n ACL='bucket-owner-full-control',\n Body=context.value,\n Bucket=EFConfig.S3_VERSION_BUCKET,\n ContentEncoding=EFConfig.S3_VERSION_CONTENT_ENCODING,\n Key=s3_key,\n Metadata={\n EFConfig.S3_VERSION_BUILDNUMBER_KEY: context.build_number,\n EFConfig.S3_VERSION_COMMITHASH_KEY: context.commit_hash,\n EFConfig.S3_VERSION_LOCATION_KEY: context.location,\n EFConfig.S3_VERSION_MODIFIEDBY_KEY: context.aws_client(\"sts\").get_caller_identity()[\"Arn\"],\n EFConfig.S3_VERSION_STATUS_KEY: s3_version_status\n },\n StorageClass='STANDARD'\n )\n print(\"set key: {} with value: {} {} {} {} {}\".format(\n s3_key, context.value, context.build_number, context.commit_hash, context.location, s3_version_status))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn a json representation of this object.", "response": "def to_json(self):\n \"\"\"\n called by VersionEncoder.default() when doing json.dumps() on the object\n the json materializes in reverse order from the order used here\n \"\"\"\n return {\n \"build_number\": self._build_number,\n \"commit_hash\": self._commit_hash,\n \"last_modified\": self._last_modified,\n \"location\": self._location,\n \"modified_by\": self._modified_by,\n \"status\": self._status,\n \"value\": self._value,\n \"version_id\": self._version_id\n }"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ntrain the model using the environment and agent.", "response": "def learn(self, steps=1, **kwargs):\n \"\"\"\n Train the model using the environment and the agent.\n\n Note that the model might be shared between multiple agents (which most probably are of the same type)\n at the same time.\n :param steps: The number of steps to train for.\n \"\"\"\n # TODO add some housekeeping\n for i in range(steps):\n self.step(**kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngetting the value for the given symbol in the environment set.", "response": "def get_value(self, symbol):\n \"\"\"\n Hierarchically searches for 'symbol' in the parameters blob if there is one (would have\n been retrieved by 'load()'). Order is: default, , \n Args:\n symbol: the key to resolve\n Returns:\n Hierarchically resolved value for 'symbol' in the environment set by the constructor,\n or None if a match is not found or there are no parameters\n \"\"\"\n default = \"default\"\n if not self.parameters:\n return None\n # Hierarchically lookup the value\n result = None\n if default in self.parameters and symbol in self.parameters[default]:\n result = self.parameters[default][symbol]\n if self.env_short in self.parameters and symbol in self.parameters[self.env_short]:\n result = self.parameters[self.env_short][symbol]\n # This lookup is redundant when env_short == env, but it's also cheap\n if self.env in self.parameters and symbol in self.parameters[self.env]:\n result = self.parameters[self.env][symbol]\n # Finally, convert any list of items into a single \\n-delimited string\n if isinstance(result, list):\n result = \"\\n\".join(result)\n\n return result"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef overlay_classification_on_image(classification, rgb_image, scale=1):\n with tf.variable_scope(\"debug_overlay\"):\n if not classification.get_shape()[3] in [1, 2, 3]:\n raise RuntimeError(\"The classification can either be of 1, 2 or 3 dimensions as last dimension, but shape is {}\".format(classification.get_shape().as_list()))\n\n size = rgb_image.get_shape()[1:3]\n if classification.get_shape()[3] == 1:\n classification = tf.pad(classification, [[0, 0], [0, 0], [0, 0], [0, 2]], \"CONSTANT\")\n elif classification.get_shape()[3] == 2:\n classification = tf.pad(classification, [[0, 0], [0, 0], [0, 0], [0, 1]], \"CONSTANT\")\n casted_classification = tf.cast(classification, dtype=tf.float32)\n target_size = (int(classification.get_shape()[1] * scale), int(classification.get_shape()[2] * scale))\n scaled_image = tf.image.resize_images(casted_classification, size=target_size, method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)\n cropped_img = tf.image.crop_to_bounding_box(scaled_image, 0, 0, size[0], size[1])\n return 0.5 * rgb_image + 0.5 * 255 * cropped_img", "response": "Overlays a classification on an image."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef inflate_to_one_hot(tensor, classes):\n one_hot = tf.one_hot(tensor, classes)\n shape = one_hot.get_shape().as_list()\n return tf.reshape(one_hot, shape=[-1, shape[1], shape[2], shape[4]])", "response": "Converts a tensor with index form to a one hot tensor."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef accountaliasofenv(self, lookup, default=None):\n if lookup in EFConfig.ENV_ACCOUNT_MAP:\n return EFConfig.ENV_ACCOUNT_MAP[lookup]\n else:\n return None", "response": "Returns the account alias of an env"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef customdata(self, lookup, default=None):\n try:\n if lookup in EFConfig.CUSTOM_DATA:\n return EFConfig.CUSTOM_DATA[lookup]\n else:\n return default\n except AttributeError:\n return default", "response": "Returns the custom data for the specified file."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nprinting a failure message and exit nonzero .", "response": "def fail(message, exception_data=None):\n \"\"\"\n Print a failure message and exit nonzero\n \"\"\"\n print(message, file=sys.stderr)\n if exception_data:\n print(repr(exception_data))\n sys.exit(1)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nfetching AWS metadata from http://169. 254. 168. 254. 254.", "response": "def http_get_metadata(metadata_path, timeout=__HTTP_DEFAULT_TIMEOUT_SEC):\n \"\"\"\n Fetch AWS metadata from http://169.254.169.254/latest/meta-data/\n ARGS:\n metadata_path - the optional path and required key to the EC2 metadata (e.g. \"instance-id\")\n RETURN:\n response content on success\n RAISE:\n URLError if there was a problem reading metadata\n \"\"\"\n metadata_path = __METADATA_PREFIX + metadata_path\n try:\n response = urllib2.urlopen(metadata_path, None, timeout)\n if response.getcode() != 200:\n raise IOError(\"Non-200 response \" + str(response.getcode()) + \" reading \" + metadata_path)\n return response.read()\n except urllib2.URLError as error:\n raise IOError(\"URLError in http_get_metadata: \" + repr(error))"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn a boolean indicating if the current environment is in virtualbox.", "response": "def is_in_virtualbox():\n \"\"\"\n Is the current environment a virtualbox instance?\n Returns a boolean\n Raises IOError if the necessary tooling isn't available\n \"\"\"\n if not isfile(__VIRT_WHAT) or not access(__VIRT_WHAT, X_OK):\n raise IOError(\"virt-what not available\")\n try:\n return subprocess.check_output([\"sudo\", \"-n\", __VIRT_WHAT]).split('\\n')[0:2] == __VIRT_WHAT_VIRTUALBOX_WITH_KVM\n except subprocess.CalledProcessError as e:\n raise IOError(\"virt-what failed execution with {}\".format(e))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ndetermine if this is an EC2 instance or running locally", "response": "def whereami():\n \"\"\"\n Determine if this is an ec2 instance or \"running locally\"\n Returns:\n \"ec2\" - this is an ec2 instance\n \"virtualbox-kvm\" - kernel VM (virtualbox with vagrant)\n \"local\" - running locally and not in a known VM\n \"unknown\" - I have no idea where I am\n \"\"\"\n # If the metadata endpoint responds, this is an EC2 instance\n # If it doesn't, we can safely say this isn't EC2 and try the other options\n try:\n response = http_get_metadata(\"instance-id\", 1)\n if response[:2] == \"i-\":\n return \"ec2\"\n except:\n pass\n\n # Virtualbox?\n try:\n if is_in_virtualbox():\n return \"virtualbox-kvm\"\n except:\n pass\n\n # Outside virtualbox/vagrant but not in aws; hostname is \".local\"\n hostname = gethostname()\n if re.findall(r\"\\.local$\", hostname):\n return \"local\"\n\n # we have no idea where we are\n return \"unknown\""} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef http_get_instance_env():\n try:\n info = json.loads(http_get_metadata('iam/info'))\n except Exception as error:\n raise IOError(\"Error looking up metadata:iam/info: \" + repr(error))\n return info[\"InstanceProfileArn\"].split(\":\")[5].split(\"/\")[1].split(\"-\",1)[0]", "response": "Returns just the env this ec2 instance is in. Doesn t require API access like get_instance_aws_context does\n"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_instance_aws_context(ec2_client):\n result = {}\n try:\n result[\"region\"] = http_get_metadata(\"placement/availability-zone/\")\n result[\"region\"] = result[\"region\"][:-1]\n result[\"instance_id\"] = http_get_metadata('instance-id')\n except IOError as error:\n raise IOError(\"Error looking up metadata:availability-zone or instance-id: \" + repr(error))\n try:\n instance_desc = ec2_client.describe_instances(InstanceIds=[result[\"instance_id\"]])\n except Exception as error:\n raise IOError(\"Error calling describe_instances: \" + repr(error))\n result[\"account\"] = instance_desc[\"Reservations\"][0][\"OwnerId\"]\n arn = instance_desc[\"Reservations\"][0][\"Instances\"][0][\"IamInstanceProfile\"][\"Arn\"]\n result[\"role\"] = arn.split(\":\")[5].split(\"/\")[1]\n env = re.search(\"^(\" + EFConfig.VALID_ENV_REGEX + \")-\", result[\"role\"])\n if not env:\n raise IOError(\"Did not find environment in role name: \" + result[\"role\"])\n result[\"env\"] = env.group(1)\n result[\"env_short\"] = result[\"env\"].strip(\".0123456789\")\n result[\"service\"] = \"-\".join(result[\"role\"].split(\"-\")[1:])\n return result", "response": "Get the AWS context for the instance."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef pull_repo():\n try:\n current_repo = subprocess.check_output([\"git\", \"remote\", \"-v\", \"show\"])\n except subprocess.CalledProcessError as error:\n raise RuntimeError(\"Exception checking current repo\", error)\n current_repo = re.findall(\"(https://|@)(.*?)(.git|[ ])\", current_repo)[0][1].replace(\":\", \"/\")\n if current_repo != EFConfig.EF_REPO:\n raise RuntimeError(\"Must be in \" + EFConfig.EF_REPO + \" repo. Current repo is: \" + current_repo)\n try:\n current_branch = subprocess.check_output([\"git\", \"rev-parse\", \"--abbrev-ref\", \"HEAD\"]).rstrip()\n except subprocess.CalledProcessError as error:\n raise RuntimeError(\"Exception checking current branch: \" + repr(error))\n if current_branch != EFConfig.EF_REPO_BRANCH:\n raise RuntimeError(\"Must be on branch: \" + EFConfig.EF_REPO_BRANCH + \". Current branch is: \" + current_branch)\n try:\n subprocess.check_call([\"git\", \"pull\", \"-q\", \"origin\", EFConfig.EF_REPO_BRANCH])\n except subprocess.CalledProcessError as error:\n raise RuntimeError(\"Exception running 'git pull': \" + repr(error))", "response": "Pulls the latest version of EF_REPO_BRANCH from the current repo"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef create_aws_clients(region, profile, *clients):\n if not profile:\n profile = None\n\n client_key = (region, profile)\n\n aws_clients = client_cache.get(client_key, {})\n requested_clients = set(clients)\n new_clients = requested_clients.difference(aws_clients)\n\n if not new_clients:\n return aws_clients\n\n session = aws_clients.get(\"SESSION\")\n try:\n if not session:\n session = boto3.Session(region_name=region, profile_name=profile)\n aws_clients[\"SESSION\"] = session\n # build clients\n client_dict = {c: session.client(c) for c in new_clients}\n # append the session itself in case it's needed by the client code - can't get it from the clients themselves\n aws_clients.update(client_dict)\n\n # add the created clients to the cache\n client_cache[client_key] = aws_clients\n return aws_clients\n except ClientError as error:\n raise RuntimeError(\"Exception logging in with Session() and creating clients\", error)", "response": "Create boto3 clients for one or more AWS services."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngiving an environment return the alias of the AWS account that holds the env", "response": "def get_account_alias(env):\n \"\"\"\n Given an env, return if env is valid\n Args:\n env: an environment, such as \"prod\", \"staging\", \"proto\", \"mgmt.\"\n Returns:\n the alias of the AWS account that holds the env\n Raises:\n ValueError if env is misformatted or doesn't name a known environment\n \"\"\"\n env_valid(env)\n # Env is a global env of the form \"env.\" (e.g. \"mgmt.\")\n if env.find(\".\") > -1:\n base, ext = env.split(\".\")\n return ext\n # Ordinary env, possibly a proto env ending with a digit that is stripped to look up the alias\n else:\n env_short = env.strip(\".0123456789\")\n if env_short not in EFConfig.ENV_ACCOUNT_MAP:\n raise ValueError(\"generic env: {} has no entry in ENV_ACCOUNT_MAP of ef_site_config.py\".format(env_short))\n return EFConfig.ENV_ACCOUNT_MAP[env_short]"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns the short name of a env if it is valid otherwise raises a ValueError", "response": "def get_env_short(env):\n \"\"\"\n Given an env, return if env is valid\n Args:\n env: an environment, such as \"prod\", \"staging\", \"proto\", \"mgmt.\"\n Returns:\n the shortname of the env, such as \"prod\", \"staging\", \"proto\", \"mgmt\"\n Raises:\n ValueError if env is misformatted or doesn't name a known environment\n \"\"\"\n env_valid(env)\n if env.find(\".\") > -1:\n env_short, ext = env.split(\".\")\n else:\n env_short = env.strip(\".0123456789\")\n return env_short"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nchecking if a env is valid", "response": "def env_valid(env):\n \"\"\"\n Given an env, determine if it's valid\n Args:\n env: the env to check\n Returns:\n True if the env is valid\n Raises:\n ValueError with message if the env is not valid\n \"\"\"\n if env not in EFConfig.ENV_LIST:\n raise ValueError(\"unknown env: {}; env must be one of: \".format(env) + \", \".join(EFConfig.ENV_LIST))\n return True"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef global_env_valid(env):\n if env not in EFConfig.ACCOUNT_SCOPED_ENVS:\n raise ValueError(\"Invalid global env: {}; global envs are: {}\".format(env, EFConfig.ACCOUNT_SCOPED_ENVS))\n return True", "response": "Checks if a given env is a valid global or mgmt env"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef kms_encrypt(kms_client, service, env, secret):\n # Converting all periods to underscores because they are invalid in KMS alias names\n key_alias = '{}-{}'.format(env, service.replace('.', '_'))\n\n try:\n response = kms_client.encrypt(\n KeyId='alias/{}'.format(key_alias),\n Plaintext=secret.encode()\n )\n except ClientError as error:\n if error.response['Error']['Code'] == \"NotFoundException\":\n fail(\"Key '{}' not found. You may need to run ef-generate for this environment.\".format(key_alias), error)\n else:\n fail(\"boto3 exception occurred while performing kms encrypt operation.\", error)\n encrypted_secret = base64.b64encode(response['CiphertextBlob'])\n return encrypted_secret", "response": "Encrypt a secret for a given service and environment."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef kms_decrypt(kms_client, secret):\n try:\n decrypted_secret = kms_client.decrypt(CiphertextBlob=base64.b64decode(secret))['Plaintext']\n except TypeError:\n fail(\"Malformed base64 string data\")\n except ClientError as error:\n if error.response[\"Error\"][\"Code\"] == \"InvalidCiphertextException\":\n fail(\"The decrypt request was rejected because the specified ciphertext \\\n has been corrupted or is otherwise invalid.\", error)\n elif error.response[\"Error\"][\"Code\"] == \"NotFoundException\":\n fail(\"The decrypt request was rejected because the specified entity or resource could not be found.\", error)\n else:\n fail(\"boto3 exception occurred while performing kms decrypt operation.\", error)\n return decrypted_secret", "response": "Decrypt a base64 encoded string of ciphertext into a EFPWContext object."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef kms_key_arn(kms_client, alias):\n try:\n response = kms_client.describe_key(KeyId=alias)\n key_arn = response[\"KeyMetadata\"][\"Arn\"]\n except ClientError as error:\n raise RuntimeError(\"Failed to obtain key arn for alias {}, error: {}\".format(alias, error.response[\"Error\"][\"Message\"]))\n\n return key_arn", "response": "Obtain the full key arn based on the alias provided"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncheck for existance of parameters file against supported suffixes and returns parameters file path if it exists", "response": "def get_template_parameters_file(template_full_path):\n \"\"\"\n Checks for existance of parameters file against supported suffixes and returns parameters file path if found\n Args:\n template_full_path: full filepath for template file\n Returns:\n filename of parameters file if it exists\n \"\"\"\n for suffix in EFConfig.PARAMETER_FILE_SUFFIXES:\n parameters_file = template_full_path.replace(\"/templates\", \"/parameters\") + suffix\n if exists(parameters_file):\n return parameters_file\n else:\n continue\n return None"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncheck for existance of parameters object in S3 against supported suffixes and returns the key of the parameters object if it exists", "response": "def get_template_parameters_s3(template_key, s3_resource):\n \"\"\"\n Checks for existance of parameters object in S3 against supported suffixes and returns parameters file key if found\n Args:\n template_key: S3 key for template file. omit bucket.\n s3_resource: a boto3 s3 resource\n Returns:\n filename of parameters file if it exists\n \"\"\"\n for suffix in EFConfig.PARAMETER_FILE_SUFFIXES:\n parameters_key = template_key.replace(\"/templates\", \"/parameters\") + suffix\n try:\n obj = s3_resource.Object(EFConfig.S3_CONFIG_BUCKET, parameters_key)\n obj.get()\n return parameters_key\n except ClientError:\n continue\n return None"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ngetting the autoscaling group properties based on the service name that is provided. This function will attempt the find the autoscaling group base on the following logic: 1. If the service name provided matches the autoscaling group name 2. If the service name provided matches the Name tag of the autoscaling group 3. If the service name provided does not match the above, return None Args: clients: Instantiated boto3 autoscaling client env: Name of the environment to search for the autoscaling group service: Name of the service Returns: JSON object of the autoscaling group properties if it exists", "response": "def get_autoscaling_group_properties(asg_client, env, service):\n \"\"\"\n Gets the autoscaling group properties based on the service name that is provided. This function will attempt the find\n the autoscaling group base on the following logic:\n 1. If the service name provided matches the autoscaling group name\n 2. If the service name provided matches the Name tag of the autoscaling group\n 3. If the service name provided does not match the above, return None\n Args:\n clients: Instantiated boto3 autoscaling client\n env: Name of the environment to search for the autoscaling group\n service: Name of the service\n Returns:\n JSON object of the autoscaling group properties if it exists\n \"\"\"\n try:\n # See if {{ENV}}-{{SERVICE}} matches ASG name\n response = asg_client.describe_auto_scaling_groups(AutoScalingGroupNames=[\"{}-{}\".format(env, service)])\n if len(response[\"AutoScalingGroups\"]) == 0:\n # See if {{ENV}}-{{SERVICE}} matches ASG tag name\n response = asg_client.describe_tags(Filters=[{ \"Name\": \"Key\", \"Values\": [\"Name\"] }, { \"Name\": \"Value\", \"Values\": [\"{}-{}\".format(env, service)]}])\n if len(response[\"Tags\"]) == 0:\n # Query does not match either of the above, return None\n return None\n else:\n asg_name = response[\"Tags\"][0][\"ResourceId\"]\n response = asg_client.describe_auto_scaling_groups(AutoScalingGroupNames=[asg_name])\n return response[\"AutoScalingGroups\"]\n else:\n return response[\"AutoScalingGroups\"]\n except ClientError as error:\n raise RuntimeError(\"Error in finding autoscaling group {} {}\".format(env, service), error)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef export_graph(checkpoint_path, output_nodes):\n if not tf.gfile.Exists(checkpoint_path):\n raise AssertionError(\n \"Export directory doesn't exists. Please specify an export \"\n \"directory: %s\" % checkpoint_path)\n\n if not output_nodes:\n print(\"You need to supply the name of a node to --output_node_names.\")\n return -1\n\n # We retrieve our checkpoint fullpath\n checkpoint = tf.train.get_checkpoint_state(checkpoint_path)\n input_checkpoint = checkpoint.model_checkpoint_path\n\n # We precise the file fullname of our freezed graph\n output_graph = checkpoint_path + \"/frozen_model.pb\"\n\n # We clear devices to allow TensorFlow to control on which device it will load operations\n clear_devices = True\n\n # We start a session using a temporary fresh Graph\n with tf.Session(graph=tf.Graph()) as sess:\n # We import the meta graph in the current default Graph\n saver = tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=clear_devices)\n\n # We restore the weights\n saver.restore(sess, input_checkpoint)\n\n # We use a built-in TF helper to export variables to constants\n output_graph_def = tf.graph_util.convert_variables_to_constants(\n sess, # The session is used to retrieve the weights\n tf.get_default_graph().as_graph_def(), # The graph_def is used to retrieve the nodes\n output_nodes # The output node names are used to select the useful nodes\n )\n\n # Finally we serialize and dump the output graph to the filesystem\n with tf.gfile.GFile(output_graph, \"wb\") as f:\n f.write(output_graph_def.SerializeToString())\n print(\"%d ops in the final graph.\" % len(output_graph_def.node))\n\n return output_graph_def", "response": "Export a graph stored in a checkpoint as a. pb file."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef load_graph(frozen_graph_filename, namespace_prefix=\"\", placeholders=None):\n # Load graph def from protobuff and import the definition\n with tf.gfile.GFile(frozen_graph_filename, \"rb\") as f:\n graph_def = tf.GraphDef()\n graph_def.ParseFromString(f.read())\n\n if placeholders is None:\n with tf.Graph().as_default() as graph:\n tf.import_graph_def(graph_def, name=namespace_prefix)\n else:\n with tf.Graph().as_default() as graph:\n tf.import_graph_def(graph_def, input_map=placeholders, name=namespace_prefix)\n return graph", "response": "Loads a frozen graph from a. pb file."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _elbv2_load_balancer(self, lookup):\n client = EFAwsResolver.__CLIENTS['elbv2']\n elbs = client.describe_load_balancers(Names=[lookup])\n # getting the first one, since we requested only one lb\n elb = elbs['LoadBalancers'][0]\n return elb", "response": "Returns the load - balancer description for the specified load - balancer"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef acm_certificate_arn(self, lookup, default=None):\n # @todo: Only searches the first 100 certificates in the account\n\n try:\n # This a region-specific client, so we'll make a new client in the right place using existing SESSION\n region_name, domain_name = lookup.split(\"/\")\n acm_client = EFAwsResolver.__CLIENTS[\"SESSION\"].client(service_name=\"acm\", region_name=region_name)\n response = acm_client.list_certificates(\n CertificateStatuses=['ISSUED'],\n MaxItems=100\n )\n except Exception:\n return default\n # No certificates\n if len(response[\"CertificateSummaryList\"]) < 1:\n return default\n # One or more certificates - find cert with latest IssuedAt date or an arbitrary cert if none are dated\n best_match_cert = None\n for cert_handle in response[\"CertificateSummaryList\"]:\n if cert_handle[\"DomainName\"] == domain_name:\n cert = acm_client.describe_certificate(CertificateArn=cert_handle[\"CertificateArn\"])[\"Certificate\"]\n # Patch up cert if there is no IssuedAt (i.e. cert was not issued by Amazon)\n if not cert.has_key(\"IssuedAt\"):\n cert[u\"IssuedAt\"] = datetime.datetime(1970, 1, 1, 0, 0)\n if best_match_cert is None:\n best_match_cert = cert\n elif cert[\"IssuedAt\"] > best_match_cert[\"IssuedAt\"]:\n best_match_cert = cert\n if best_match_cert is not None:\n return best_match_cert[\"CertificateArn\"]\n return default", "response": "Returns the ARN of the certificate that is issued by the given region and domain."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the ID of the first Elastic IP with a description matching lookup", "response": "def ec2_elasticip_elasticip_id(self, lookup, default=None):\n \"\"\"\n Args:\n lookup: the CloudFormation resource name of the Elastic IP ID to look up\n default: the optional value to return if lookup failed; returns None if not set\n Returns:\n The ID of the first Elastic IP found with a description matching 'lookup' or default/None if no match found\n \"\"\"\n\n public_ip = self.ec2_elasticip_elasticip_ipaddress(lookup)\n if public_ip is None:\n return default\n try:\n eips = EFAwsResolver.__CLIENTS[\"ec2\"].describe_addresses(\n PublicIps=[public_ip]\n )\n # Public IP not found\n except ClientError:\n return default\n eip_id = eips[\"Addresses\"][0][\"AllocationId\"]\n return eip_id"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef ec2_elasticip_elasticip_ipaddress(self, lookup, default=None):\n # Extract environment from resource ID to build stack name\n m = re.search('ElasticIp([A-Z]?[a-z]+[0-9]?)\\w+', lookup)\n # The lookup string was not a valid ElasticIp resource label\n if m is None:\n return default\n env = m.group(1)\n stackname = \"{}-elasticip\".format(env.lower())\n # Convert env substring to title in case {{ENV}} substitution is being used\n lookup = lookup.replace(env, env.title())\n # Look up the EIP resource in the stack to get the IP address assigned to the EIP\n try:\n eip_stack = EFAwsResolver.__CLIENTS[\"cloudformation\"].describe_stack_resources(\n StackName=stackname,\n LogicalResourceId=lookup\n )\n except ClientError:\n return default\n stack_resources = eip_stack[\"StackResources\"]\n # Resource does not exist in stack\n if len(stack_resources) < 1:\n return default\n eip_publicip = stack_resources[0][\"PhysicalResourceId\"]\n return eip_publicip", "response": "Returns the IP address assigned to the Elastic IP"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef ec2_eni_eni_id(self, lookup, default=None):\n enis = EFAwsResolver.__CLIENTS[\"ec2\"].describe_network_interfaces(Filters=[{\n 'Name': 'description',\n 'Values': [lookup]\n }])\n if len(enis.get(\"NetworkInterfaces\")) > 0:\n return enis[\"NetworkInterfaces\"][0][\"NetworkInterfaceId\"]\n else:\n return default", "response": "Returns the ID of the first Elastic Network Interface with the given description matching lookup or default if lookup failed."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef ec2_network_network_acl_id(self, lookup, default=None):\n network_acl_id = EFAwsResolver.__CLIENTS[\"ec2\"].describe_network_acls(Filters=[{\n 'Name': 'tag:Name',\n 'Values': [lookup]\n }])\n if len(network_acl_id[\"NetworkAcls\"]) > 0:\n return network_acl_id[\"NetworkAcls\"][0][\"NetworkAclId\"]\n else:\n return default", "response": "Returns the ID of the network ACL with the given name."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef ec2_security_group_security_group_id(self, lookup, default=None):\n try:\n response = EFAwsResolver.__CLIENTS[\"ec2\"].describe_security_groups(Filters=[{\n 'Name':'group-name', 'Values':[lookup]\n }])\n except:\n return default\n if len(response[\"SecurityGroups\"]) > 0:\n return response[\"SecurityGroups\"][0][\"GroupId\"]\n else:\n return default", "response": "Returns the security group ID for the specified security group name"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the ID of a single subnet in the EC2 container", "response": "def ec2_subnet_subnet_id(self, lookup, default=None):\n \"\"\"\n Return:\n the ID of a single subnet or default/None if no match\n Args:\n lookup: the friendly name of the subnet to look up (subnet--a or subnet--b)\n default: the optional value to return if lookup failed; returns None if not set\n \"\"\"\n subnets = EFAwsResolver.__CLIENTS[\"ec2\"].describe_subnets(Filters=[{\n 'Name': 'tag:Name',\n 'Values': [lookup]\n }])\n if len(subnets[\"Subnets\"]) > 0:\n return subnets[\"Subnets\"][0][\"SubnetId\"]\n else:\n return default"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef ec2_vpc_availabilityzones(self, lookup, default=None):\n vpc_id = self.ec2_vpc_vpc_id(lookup)\n if vpc_id is None:\n return default\n subnets = EFAwsResolver.__CLIENTS[\"ec2\"].describe_subnets(Filters=[{\n 'Name': 'vpc-id',\n 'Values': [vpc_id]\n }])\n if len(subnets[\"Subnets\"]) > 0:\n # Strip the metadata section (subnets[\"Subnets\"])\n az_list = [s[\"AvailabilityZone\"] for s in subnets[\"Subnets\"]]\n # Add internal \", \" only. This is called literally from: \"{{aws...}}\" - CF template needs the outer quotes\n return \"\\\", \\\"\".join(az_list)\n else:\n return default", "response": "Returns a list of availability zones in use in the named VPC."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns a comma - separated list of all subnets in use in the named VPC", "response": "def ec2_vpc_subnets(self, lookup, default=None):\n \"\"\"\n Args:\n lookup - the friendly name of the VPC whose subnets we want\n Returns:\n A comma-separated list of all subnets in use in the named VPC or default/None if no match found\n \"\"\"\n vpc_id = self.ec2_vpc_vpc_id(lookup)\n if vpc_id is None:\n return default\n subnets = EFAwsResolver.__CLIENTS[\"ec2\"].describe_subnets(Filters=[{\n 'Name': 'vpc-id',\n 'Values': [vpc_id]\n }])\n if len(subnets[\"Subnets\"]) > 0:\n # Strip the metadata section (subnets[\"Subnets\"])\n subnet_list = [s[\"SubnetId\"] for s in subnets[\"Subnets\"]]\n # Add internal \", \" only. This is called literally from: \"{{aws...}}\" - reuses the outer quotes\n return \"\\\", \\\"\".join(subnet_list)\n else:\n return default"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef ec2_vpc_cidrblock(self, lookup, default=None):\n vpcs = EFAwsResolver.__CLIENTS[\"ec2\"].describe_vpcs(Filters=[{\n 'Name': 'tag:Name',\n 'Values': [lookup]\n }])\n if len(vpcs.get(\"Vpcs\")) > 0:\n return vpcs[\"Vpcs\"][0][\"CidrBlock\"]\n else:\n return default", "response": "Returns the CIDR block of the named VPC or default if no match is found"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the hosted zone ID of the ELB found with a friendly name matching lookup.", "response": "def elbv2_load_balancer_hosted_zone(self, lookup, default=None):\n \"\"\"\n Args:\n lookup: the friendly name of the V2 elb to look up\n default: value to return in case of no match\n Returns:\n The hosted zone ID of the ELB found with a name matching 'lookup'.\n \"\"\"\n try:\n elb = self._elbv2_load_balancer(lookup)\n return elb['CanonicalHostedZoneId']\n except ClientError:\n return default"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the hosted zone ID of the ELB with a friendly name matching lookup.", "response": "def elbv2_load_balancer_dns_name(self, lookup, default=None):\n \"\"\"\n Args:\n lookup: the friendly name of the V2 elb to look up\n default: value to return in case of no match\n Returns:\n The hosted zone ID of the ELB found with a name matching 'lookup'.\n \"\"\"\n try:\n elb = self._elbv2_load_balancer(lookup)\n return elb['DNSName']\n except ClientError:\n return default"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef elbv2_load_balancer_arn_suffix(self, lookup, default=None):\n try:\n elb = self._elbv2_load_balancer(lookup)\n m = re.search(r'.+?(app\\/[^\\/]+\\/[^\\/]+)$', elb['LoadBalancerArn'])\n return m.group(1)\n except ClientError:\n return default", "response": "Returns the ARN of the v2 load balancer that has the suffix of app."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning the ARN of the target group in the v2 target group", "response": "def elbv2_target_group_arn_suffix(self, lookup, default=None):\n \"\"\"\n Args:\n lookup: the friendly name of the v2 elb target group\n default: value to return in case of no match\n Returns:\n The shorthand fragment of the target group's ARN, of the form\n `targetgroup/*/*`\n \"\"\"\n try:\n client = EFAwsResolver.__CLIENTS['elbv2']\n elbs = client.describe_target_groups(Names=[lookup])\n elb = elbs['TargetGroups'][0]\n m = re.search(r'.+?(targetgroup\\/[^\\/]+\\/[^\\/]+)$', elb['TargetGroupArn'])\n return m.group(1)\n except ClientError:\n return default"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef waf_rule_id(self, lookup, default=None):\n # list_rules returns at most 100 rules per request\n list_limit = 100\n rules = EFAwsResolver.__CLIENTS[\"waf\"].list_rules(Limit=list_limit)\n while True:\n for rule in rules[\"Rules\"]:\n if rule[\"Name\"] == lookup:\n return rule[\"RuleId\"]\n if rules.has_key(\"NextMarker\"):\n rules = EFAwsResolver.__CLIENTS[\"waf\"].list_rules(Limit=list_limit, NextMarker=rules[\"NextMarker\"])\n else:\n return default", "response": "Returns the ID of the WAF rule whose name matches lookup."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the ID of the WAF Web ACL whose name matches the given name. If no match is found returns default.", "response": "def waf_web_acl_id(self, lookup, default=None):\n \"\"\"\n Args:\n lookup: the friendly name of a Web ACL\n default: the optional value to return if lookup failed; returns None if not set\n Returns:\n the ID of the WAF Web ACL whose name matches rule_name or default/None if no match found\n \"\"\"\n # list_rules returns at most 100 rules per request\n list_limit = 100\n acls = EFAwsResolver.__CLIENTS[\"waf\"].list_web_acls(Limit=list_limit)\n while True:\n for acl in acls[\"WebACLs\"]:\n if acl[\"Name\"] == lookup:\n return acl[\"WebACLId\"]\n if acls.has_key(\"NextMarker\"):\n acls = EFAwsResolver.__CLIENTS[\"waf\"].list_web_acls(Limit=list_limit, NextMarker=acls[\"NextMarker\"])\n else:\n return default"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns the public hosted zone ID for the given domain name.", "response": "def route53_public_hosted_zone_id(self, lookup, default=None):\n \"\"\"\n Args:\n lookup: The zone name to look up. Must end with \".\"\n default: the optional value to return if lookup failed; returns None if not set\n Returns:\n the ID of the public hosted zone for the 'lookup' domain, or default/None if no match found\n \"\"\"\n list_limit = \"100\"\n # enforce terminal '.' in name, otherwise we could get a partial match of the incorrect zones\n if lookup[-1] != '.':\n return default\n hosted_zones = EFAwsResolver.__CLIENTS[\"route53\"].list_hosted_zones_by_name(DNSName=lookup, MaxItems=list_limit)\n # Return if the account has no HostedZones\n if not hosted_zones.has_key(\"HostedZones\"):\n return default\n while True:\n for hosted_zone in hosted_zones[\"HostedZones\"]:\n if lookup == hosted_zone[\"Name\"] and not hosted_zone[\"Config\"][\"PrivateZone\"]:\n return hosted_zone[\"Id\"].split(\"/\")[2]\n if hosted_zones[\"IsTruncated\"]:\n hosted_zones = EFAwsResolver.__CLIENTS[\"route53\"].list_hosted_zones_by_name(\n DNSName=hosted_zones[\"NextDNSName\"], HostedZoneId=hosted_zones[\"NextHostedZoneId\"], MaxItems=list_limit)\n else:\n return default"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns the ID of the main route table of the named VPC or None if lookup failed", "response": "def ec2_route_table_main_route_table_id(self, lookup, default=None):\n \"\"\"\n Args:\n lookup: the friendly name of the VPC whose main route table we are looking up\n default: the optional value to return if lookup failed; returns None if not set\n Returns:\n the ID of the main route table of the named VPC, or default if no match/multiple matches found\n \"\"\"\n vpc_id = self.ec2_vpc_vpc_id(lookup)\n if vpc_id is None:\n return default\n route_table = EFAwsResolver.__CLIENTS[\"ec2\"].describe_route_tables(Filters=[\n {'Name': 'vpc-id', 'Values': [vpc_id]},\n {'Name': 'association.main', 'Values': ['true']}\n ])\n if len(route_table[\"RouteTables\"]) is not 1:\n return default\n return route_table[\"RouteTables\"][0][\"RouteTableId\"]"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the ID of the route table with the given name. If no match or multiple matches returns default.", "response": "def ec2_route_table_tagged_route_table_id(self, lookup, default=None):\n \"\"\"\n Args:\n lookup: the tagged route table name, should be unique\n default: the optional value to return if lookup failed; returns None if not set\n Returns:\n the ID of the route table, or default if no match/multiple matches found\n \"\"\"\n route_table = EFAwsResolver.__CLIENTS[\"ec2\"].describe_route_tables(Filters=[\n {'Name': 'tag-key', 'Values': ['Name']},\n {'Name': 'tag-value', 'Values': [lookup]}\n ])\n if len(route_table[\"RouteTables\"]) is not 1:\n return default\n return route_table[\"RouteTables\"][0][\"RouteTableId\"]"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the domain name of the Cloudfront distrinbution.", "response": "def cloudfront_domain_name(self, lookup, default=None):\n \"\"\"\n Args:\n lookup: any CNAME on the Cloudfront distribution\n default: the optional value to return if lookup failed; returns None if not set\n Returns:\n The domain name (FQDN) of the Cloudfront distrinbution, or default/None if no match\n \"\"\"\n # list_distributions returns at most 100 distributions per request\n list_limit = \"100\"\n distributions = EFAwsResolver.__CLIENTS[\"cloudfront\"].list_distributions(MaxItems=list_limit)[\"DistributionList\"]\n # Return if the account has no Distributions\n if not distributions.has_key(\"Items\"):\n return default\n while True:\n for distribution in distributions[\"Items\"]:\n if lookup in distribution[\"Aliases\"][\"Items\"]:\n return distribution[\"DomainName\"]\n if distributions[\"IsTruncated\"]:\n distributions = EFAwsResolver.__CLIENTS[\"cloudfront\"].list_distributions(\n MaxItems=list_limit, Marker=distributions[\"NextMarker\"])[\"DistributionList\"]\n else:\n return default"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning the S3 Canonical User ID of the Origin Access Identity associated with the FQDN in lookup.", "response": "def cloudfront_origin_access_identity_oai_canonical_user_id(self, lookup, default=None):\n \"\"\"\n Args:\n lookup: the FQDN of the Origin Access Identity (from its comments)\n default: the optional value to return if lookup failed; returns None if not set\n Returns:\n the S3 Canonical User ID of the OAI associated with the named FQDN in 'lookup', or default/None if no match\n \"\"\"\n # list_cloud_front_origin_access_identities returns at most 100 oai's per request\n list_limit = \"100\"\n oais = EFAwsResolver.__CLIENTS[\"cloudfront\"].list_cloud_front_origin_access_identities(\n MaxItems=list_limit)[\"CloudFrontOriginAccessIdentityList\"]\n # Return if the account has no OriginAccessIdentities\n if not oais.has_key(\"Items\"):\n return default\n while True:\n for oai in oais[\"Items\"]:\n if oai[\"Comment\"] == lookup:\n return oai[\"S3CanonicalUserId\"]\n if oais[\"IsTruncated\"]:\n oais = EFAwsResolver.__CLIENTS[\"cloudfront\"].list_cloud_front_origin_access_identities(\n MaxItems=list_limit, Marker=oais[\"NextMarker\"])[\"CloudFrontOriginAccessIdentityList\"]\n else:\n return default"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning the full ARN for a cognito identity pool", "response": "def cognito_identity_identity_pool_arn(self, lookup, default=None):\n \"\"\"\n Args:\n lookup: Cognito Federated Identity name, proto0-cms-identity-pool\n default: the optional value to return if lookup failed; returns None if not set\n\n Returns:\n the constructed ARN for the cognito identity pool, else default/None\n \"\"\"\n identity_pool_id = self.cognito_identity_identity_pool_id(lookup, default)\n\n if identity_pool_id == default:\n return default\n\n # The ARN has to be constructed because there is no boto3 call that returns the full ARN for a cognito identity pool\n return \"arn:aws:cognito-identity:{{{{REGION}}}}:{{{{ACCOUNT}}}}:identitypool/{}\".format(identity_pool_id)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef cognito_identity_identity_pool_id(self, lookup, default=None):\n # List size cannot be greater than 60\n list_limit = 60\n client = EFAwsResolver.__CLIENTS[\"cognito-identity\"]\n response = client.list_identity_pools(MaxResults=list_limit)\n\n while \"IdentityPools\" in response:\n # Loop through all the identity pools\n for pool in response[\"IdentityPools\"]:\n if pool[\"IdentityPoolName\"] == lookup:\n return pool[\"IdentityPoolId\"]\n\n # No match found on this page, but there are more pages\n if response.has_key(\"NextToken\"):\n response = client.list_identity_pools(MaxResults=list_limit, NextToken=response[\"NextToken\"])\n else:\n break\n\n return default", "response": "Returns the Cognito Identity Pool ID corresponding to the given lookup."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef cognito_idp_user_pool_arn(self, lookup, default=None):\n client = EFAwsResolver.__CLIENTS[\"cognito-idp\"]\n user_pool_id = self.cognito_idp_user_pool_id(lookup, default)\n if user_pool_id == default:\n return default\n\n response = client.describe_user_pool(UserPoolId=user_pool_id)\n\n if not response.has_key(\"UserPool\"):\n return default\n\n return response[\"UserPool\"][\"Arn\"]", "response": "Returns the User Pool ARN corresponding to the given lookup."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef kms_key_arn(self, lookup):\n key_arn = ef_utils.kms_key_arn(EFAwsResolver.__CLIENTS[\"kms\"], lookup)\n return key_arn", "response": "Returns the full key arn for a given lookup"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef handle_args_and_set_context(args):\n parser = argparse.ArgumentParser()\n parser.add_argument(\"env\", help=\", \".join(EFConfig.ENV_LIST))\n parser.add_argument(\"--sr\", help=\"optional /path/to/service_registry_file.json\", default=None)\n parser.add_argument(\"--commit\", help=\"Make changes in AWS (dry run if omitted)\", action=\"store_true\", default=False)\n parser.add_argument(\"--verbose\", help=\"Print additional info\", action=\"store_true\", default=False)\n parser.add_argument(\"--devel\", help=\"Allow running from branch; don't refresh from origin\", action=\"store_true\",\n default=False)\n parsed_args = vars(parser.parse_args(args))\n context = EFContext()\n context.commit = parsed_args[\"commit\"]\n context.devel = parsed_args[\"devel\"]\n try:\n context.env = parsed_args[\"env\"]\n except ValueError as e:\n fail(\"Error in env: {}\".format(e.message))\n # Set up service registry and policy template path which depends on it\n context.service_registry = EFServiceRegistry(parsed_args[\"sr\"])\n context.policy_template_path = normpath(dirname(context.service_registry.filespec)) + EFConfig.POLICY_TEMPLATE_PATH_SUFFIX\n context.verbose = parsed_args[\"verbose\"]\n return context", "response": "Handles command line args and sets up EFContext object"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef conditionally_create_security_groups(env, service_name, service_type):\n if service_type not in SG_SERVICE_TYPES:\n print_if_verbose(\"not eligible for security group(s); service type: {}\".format(service_type))\n return\n\n target_name = \"{}-{}\".format(env, service_name)\n if service_type == \"aws_ec2\":\n sg_names = [\"{}-ec2\".format(target_name)]\n elif service_type == \"aws_lambda\":\n sg_names = [\"{}-lambda\".format(target_name)]\n elif service_type == \"http_service\":\n sg_names = [\n \"{}-ec2\".format(target_name),\n \"{}-elb\".format(target_name)\n ]\n elif service_type == \"aws_security_group\":\n sg_names = [target_name]\n else:\n fail(\"Unexpected service_type: {} when creating security group for: {}\".format(service_type, target_name))\n\n for sg_name in sg_names:\n if not AWS_RESOLVER.ec2_security_group_security_group_id(sg_name):\n vpc_name = \"vpc-{}\".format(env)\n print(\"Create security group: {} in vpc: {}\".format(sg_name, vpc_name))\n vpc = AWS_RESOLVER.ec2_vpc_vpc_id(vpc_name)\n if not vpc:\n fail(\"Error: could not get VPC by name: {}\".format(vpc_name))\n # create security group\n if CONTEXT.commit:\n try:\n new_sg = CLIENTS[\"ec2\"].create_security_group(GroupName=sg_name, VpcId=vpc, Description=sg_name)\n except:\n fail(\"Exception creating security group named: {} in VpcId: {}\".format(sg_name, vpc_name), sys.exc_info())\n print(new_sg[\"GroupId\"])\n else:\n print_if_verbose(\"security group already exists: {}\".format(sg_name))", "response": "Create security groups as needed ; name and number created depend on service_type\n Args env service name and type of the service in service registry Returns a new registry with the new security groups created"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncreating a role by name if a role by that name does not already exist ; attach a custom list of Principals athorically defined AssumeRolePolicy athorically defined AssumeRole policy document comprised of two IAM entities and a service.", "response": "def conditionally_create_role(role_name, sr_entry):\n \"\"\"\n Create role_name if a role by that name does not already exist; attach a custom list of Principals\n to its AssumeRolePolicy\n Args:\n role_name: the name for the role to create\n sr_entry: service registry entry\n\n Example of a (complex) AssumeRole policy document comprised of two IAM entities and a service:\n {\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Effect\": \"Allow\",\n \"Principal\": {\n \"Service\": \"ec2.amazonaws.com\",\n \"AWS\": [\n \"arn:aws:iam::978969509086:root\",\n \"arn:aws:iam::978969509086:role/mgmt-jenkins\"\n ]\n },\n \"Action\": \"sts:AssumeRole\"\n }\n ]\n }\n \"\"\"\n service_type = sr_entry['type']\n if service_type not in SERVICE_TYPE_ROLE:\n print_if_verbose(\"not eligible for role (and possibly instance profile); service type: {}\".format(service_type))\n return\n\n if sr_entry.has_key(\"assume_role_policy\"):\n # Explicitly defined AssumeRole policy\n assume_role_policy_document = resolve_policy_document(sr_entry[\"assume_role_policy\"])\n else:\n # Create Service:AssumeRole policy using the service type in the SERVICE_TYPE_ROLE dict\n # which must list a service type to use this capacity (most do)\n if SERVICE_TYPE_ROLE[service_type] is None:\n fail(\"service_type: {} does not have a default service-type AssumeRole policy\".format(service_type))\n formatted_principals = '\"Service\": \"{}\"'.format(SERVICE_TYPE_ROLE[service_type])\n assume_role_policy_document = '''{\n \"Version\" : \"2012-10-17\",\n \"Statement\": [{\n \"Effect\": \"Allow\",\n \"Principal\": { ''' + formatted_principals + ''' },\n \"Action\": [ \"sts:AssumeRole\" ]\n }]\n }'''\n if not get_role_id(role_name):\n print(\"Create role: {}\".format(role_name))\n print_if_verbose(\"AssumeRole policy document:\\n{}\".format(assume_role_policy_document))\n if CONTEXT.commit:\n try:\n new_role = CLIENTS[\"iam\"].create_role(\n RoleName=role_name, AssumeRolePolicyDocument=assume_role_policy_document\n )\n except ClientError as error:\n fail(\"Exception creating new role named: {} {}\".format(role_name, sys.exc_info(), error))\n print(new_role[\"Role\"][\"RoleId\"])\n else:\n print_if_verbose(\"role already exists: {}\".format(role_name))"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncreates an instance profile if needed and attach to the role if needed.", "response": "def conditionally_create_profile(role_name, service_type):\n \"\"\"\n Check that there is a 1:1 correspondence with an InstanceProfile having the same name\n as the role, and that the role is contained in it. Create InstanceProfile and attach to role if needed.\n \"\"\"\n # make instance profile if this service_type gets an instance profile\n if service_type not in INSTANCE_PROFILE_SERVICE_TYPES:\n print_if_verbose(\"service type: {} not eligible for instance profile\".format(service_type))\n return\n\n instance_profile = get_instance_profile(role_name)\n if not instance_profile:\n print(\"Create instance profile: {}\".format(role_name))\n if CONTEXT.commit:\n try:\n instance_profile = CLIENTS[\"iam\"].create_instance_profile(InstanceProfileName=role_name)\n except ClientError as error:\n fail(\"Exception creating instance profile named: {} {}\".format(role_name, sys.exc_info(), error))\n else:\n print_if_verbose(\"instance profile already exists: {}\".format(role_name))\n # attach instance profile to role; test 'if instance_profile' because we drop through to here in a dry run\n if instance_profile and not instance_profile_contains_role(instance_profile, role_name):\n print(\"Add role: {} to instance profile: {}\".format(role_name, role_name))\n if CONTEXT.commit:\n try:\n CLIENTS[\"iam\"].add_role_to_instance_profile(InstanceProfileName=role_name, RoleName=role_name)\n except ClientError as error:\n fail(\"Exception adding role to instance profile: {} {}\".format(role_name, sys.exc_info(), error))\n else:\n print_if_verbose(\"instance profile already contains role: {}\".format(role_name))"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef conditionally_attach_managed_policies(role_name, sr_entry):\n service_type = sr_entry['type']\n if not (service_type in SERVICE_TYPE_ROLE and \"aws_managed_policies\" in sr_entry):\n print_if_verbose(\"not eligible for policies; service_type: {} is not valid for policies \"\n \"or no 'aws_managed_policies' key in service registry for this role\".format(service_type))\n return\n\n for policy_name in sr_entry['aws_managed_policies']:\n print_if_verbose(\"loading policy: {} for role: {}\".format(policy_name, role_name))\n\n if CONTEXT.commit:\n try:\n CLIENTS[\"iam\"].attach_role_policy(RoleName=role_name, PolicyArn='arn:aws:iam::aws:policy/' + policy_name)\n except:\n fail(\"Exception putting policy: {} onto role: {}\".format(policy_name, role_name), sys.exc_info())", "response": "Conditionally attach the policies to a role"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef conditionally_create_kms_key(role_name, service_type):\n if service_type not in KMS_SERVICE_TYPES:\n print_if_verbose(\"not eligible for kms; service_type: {} is not valid for kms\".format(service_type))\n return\n\n # Converting all periods to underscores because they are invalid in KMS alias names\n key_alias = role_name.replace('.', '_')\n\n try:\n kms_key = CLIENTS[\"kms\"].describe_key(KeyId='alias/{}'.format(key_alias))\n except ClientError as error:\n if error.response['Error']['Code'] == 'NotFoundException':\n kms_key = None\n else:\n fail(\"Exception describing KMS key: {} {}\".format(role_name, error))\n\n if service_type == \"aws_fixture\":\n kms_key_policy = '''{\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Sid\": \"Enable IAM User Permissions\",\n \"Effect\": \"Allow\",\n \"Principal\": {\n \"AWS\": \"arn:aws:iam::''' + CONTEXT.account_id + ''':root\"\n },\n \"Action\": \"kms:*\",\n \"Resource\": \"*\"\n }\n ]\n }'''\n else:\n formatted_principal = '\"AWS\": \"arn:aws:iam::{}:role/{}\"'.format(CONTEXT.account_id, role_name)\n kms_key_policy = '''{\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Sid\": \"Enable IAM User Permissions\",\n \"Effect\": \"Allow\",\n \"Principal\": {\n \"AWS\": \"arn:aws:iam::''' + CONTEXT.account_id + ''':root\"\n },\n \"Action\": \"kms:*\",\n \"Resource\": \"*\"\n },\n {\n \"Sid\": \"Allow Service Role Decrypt Privileges\",\n \"Effect\": \"Allow\",\n \"Principal\": { ''' + formatted_principal + ''' },\n \"Action\": \"kms:Decrypt\",\n \"Resource\": \"*\"\n },\n {\n \"Sid\": \"Allow use of the key for default autoscaling group service role\",\n \"Effect\": \"Allow\",\n \"Principal\": { \"AWS\": \"arn:aws:iam::''' + CONTEXT.account_id + ''':role/aws-service-role/autoscaling.amazonaws.com/AWSServiceRoleForAutoScaling\" },\n \"Action\": [\n \"kms:Encrypt\",\n \"kms:Decrypt\",\n \"kms:ReEncrypt*\",\n \"kms:GenerateDataKey*\",\n \"kms:DescribeKey\"\n ],\n \"Resource\": \"*\"\n },\n {\n \"Sid\": \"Allow attachment of persistent resourcesfor default autoscaling group service role\",\n \"Effect\": \"Allow\",\n \"Principal\": { \"AWS\": \"arn:aws:iam::''' + CONTEXT.account_id + ''':role/aws-service-role/autoscaling.amazonaws.com/AWSServiceRoleForAutoScaling\" },\n \"Action\": [\n \"kms:CreateGrant\"\n ],\n \"Resource\": \"*\",\n \"Condition\": {\n \"Bool\": {\n \"kms:GrantIsForAWSResource\": true\n }\n }\n }\n ]\n }'''\n\n if not kms_key:\n print(\"Create KMS key: {}\".format(key_alias))\n if CONTEXT.commit:\n # Create KMS Master Key. Due to AWS eventual consistency a newly created IAM role may not be\n # immediately visible to KMS. Retrying up to 5 times (25 seconds) to account for this behavior.\n create_key_failures = 0\n while create_key_failures <= 5:\n try:\n new_kms_key = CLIENTS[\"kms\"].create_key(\n Policy=kms_key_policy,\n Description='Master Key for {}'.format(role_name)\n )\n break\n except ClientError as error:\n if error.response['Error']['Code'] == 'MalformedPolicyDocumentException':\n if create_key_failures == 5:\n fail(\"Exception creating kms key: {} {}\".format(role_name, error))\n else:\n create_key_failures += 1\n time.sleep(5)\n else:\n fail(\"Exception creating kms key: {} {}\".format(role_name, error))\n\n # Assign key an alias. This is used for all future references to it (rather than the key ARN)\n try:\n CLIENTS[\"kms\"].create_alias(\n AliasName='alias/{}'.format(key_alias),\n TargetKeyId=new_kms_key['KeyMetadata']['KeyId']\n )\n except ClientError as error:\n fail(\"Exception creating alias for kms key: {} {}\".format(role_name, error))\n else:\n print_if_verbose(\"KMS key already exists: {}\".format(key_alias))", "response": "Create a KMS Master Key for encryption and decryption of sensitive values in cf templates and latebind configs."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncomputes the sum of abs distances.", "response": "def sum_abs_distance(labels, preds):\n \"\"\"\n Compute the sum of abs distances.\n\n :param labels: A float tensor of shape [batch_size, ..., X] representing the labels.\n :param preds: A float tensor of shape [batch_size, ..., X] representing the predictions.\n :return: A float tensor of shape [batch_size, ...] representing the summed absolute distance.\n \"\"\"\n with tf.variable_scope(\"sum_abs_distance\"):\n return tf.reduce_sum(tf.abs(preds - labels), axis=-1)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncomputing the l1_distance. :param labels: A float tensor of shape [batch_size, ..., X] representing the labels. :param preds: A float tensor of shape [batch_size, ..., X] representing the predictions. :return: A float tensor of shape [batch_size, ...] representing the l1 distance.", "response": "def l1_distance(labels, preds):\n \"\"\"\n Compute the l1_distance.\n\n :param labels: A float tensor of shape [batch_size, ..., X] representing the labels.\n :param preds: A float tensor of shape [batch_size, ..., X] representing the predictions.\n :return: A float tensor of shape [batch_size, ...] representing the l1 distance.\n \"\"\"\n with tf.variable_scope(\"l1_distance\"):\n return tf.norm(preds - labels, ord=1)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef smooth_l1_distance(labels, preds, delta=1.0):\n with tf.variable_scope(\"smooth_l1\"):\n return tf.reduce_sum(tf.losses.huber_loss(\n labels=labels,\n predictions=preds,\n delta=delta,\n loss_collection=None,\n reduction=tf.losses.Reduction.NONE\n ), axis=-1)", "response": "Compute the smooth l1 distance."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef l2_distance(labels, preds):\n with tf.variable_scope(\"l2_distance\"):\n return tf.norm(preds - labels, ord=2)", "response": "Compute the l2_distance.\n\n :param labels: A float tensor of shape [batch_size, ..., X] representing the labels.\n :param preds: A float tensor of shape [batch_size, ..., X] representing the predictions.\n :return: A float tensor of shape [batch_size, ...] representing the l2 distance."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef cross_entropy(labels, logits):\n with tf.variable_scope(\"cross_entropy\"):\n return tf.nn.softmax_cross_entropy_with_logits(labels=labels, logits=logits)", "response": "Calculate the cross entropy."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef handle_args_and_set_context(args):\n parser = argparse.ArgumentParser()\n parser.add_argument(\"template_file\", help=\"/path/to/template_file.json\")\n parser.add_argument(\"env\", help=\", \".join(EFConfig.ENV_LIST))\n parser.add_argument(\"--sr\", help=\"optional /path/to/service_registry_file.json\", default=None)\n parser.add_argument(\"--verbose\", help=\"Print additional info + resolved template\", action=\"store_true\", default=False)\n parser.add_argument(\"--devel\", help=\"Allow running from branch; don't refresh from origin\", action=\"store_true\",\n default=False)\n group = parser.add_mutually_exclusive_group()\n group.add_argument(\"--changeset\", help=\"create a changeset; cannot be combined with --commit\",\n action=\"store_true\", default=False)\n group.add_argument(\"--commit\", help=\"Make changes in AWS (dry run if omitted); cannot be combined with --changeset\",\n action=\"store_true\", default=False)\n group.add_argument(\"--lint\", help=\"Execute cfn-lint on the rendered template\", action=\"store_true\",\n default=False)\n parser.add_argument(\"--percent\", help=\"Specifies an override to the percentage of instances in an Auto Scaling rolling update (e.g. 10 for 10%%)\",\n type=int, default=False)\n parser.add_argument(\"--poll\", help=\"Poll Cloudformation to check status of stack creation/updates\",\n action=\"store_true\", default=False)\n parsed_args = vars(parser.parse_args(args))\n context = EFCFContext()\n try:\n context.env = parsed_args[\"env\"]\n context.template_file = parsed_args[\"template_file\"]\n except ValueError as e:\n fail(\"Error in argument: {}\".format(e.message))\n context.changeset = parsed_args[\"changeset\"]\n context.commit = parsed_args[\"commit\"]\n context.devel = parsed_args[\"devel\"]\n context.lint = parsed_args[\"lint\"]\n context.percent = parsed_args[\"percent\"]\n context.poll_status = parsed_args[\"poll\"]\n context.verbose = parsed_args[\"verbose\"]\n # Set up service registry and policy template path which depends on it\n context.service_registry = EFServiceRegistry(parsed_args[\"sr\"])\n return context", "response": "Handles command line args and sets the context of the current environment."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef merge_files(service, skip_on_user_group_error=False):\n if WHERE == \"ec2\":\n config_reader = EFInstanceinitConfigReader(\"s3\", service, log_info, RESOURCES[\"s3\"])\n resolver = EFTemplateResolver()\n elif WHERE == \"virtualbox-kvm\":\n config_path = \"{}/{}\".format(VIRTUALBOX_CONFIG_ROOT, service)\n config_reader = EFInstanceinitConfigReader(\"file\", config_path, log_info)\n environment = EFConfig.VAGRANT_ENV\n resolver = EFTemplateResolver(env=environment, profile=get_account_alias(environment),\n region=EFConfig.DEFAULT_REGION, service=service)\n\n while config_reader.next():\n log_info(\"checking: {}\".format(config_reader.current_key))\n\n # if 'dest' for the current object contains an 'environments' list, check it\n dest = config_reader.dest\n if \"environments\" in dest:\n if not resolver.resolved[\"ENV_SHORT\"] in dest[\"environments\"]:\n log_info(\"Environment: {} not enabled for {}\".format(\n resolver.resolved[\"ENV_SHORT\"], config_reader.current_key)\n )\n continue\n\n # If 'dest' for the current object contains a user_group that hasn't been created in the environment yet and the\n # flag is set to True to skip, log the error and move onto the next config file without blowing up.\n if skip_on_user_group_error:\n user, group = get_user_group(dest)\n try:\n getpwnam(user).pw_uid\n except KeyError:\n log_info(\"File specifies user {} that doesn't exist in environment. Skipping config file.\".format(user))\n continue\n try:\n getgrnam(group).gr_gid\n except KeyError:\n log_info(\"File specifies group {} that doesn't exist in environment. Skipping config file.\".format(group))\n continue\n\n # Process the template_body - apply context + parameters\n log_info(\"Resolving template\")\n resolver.load(config_reader.template, config_reader.parameters)\n rendered_body = resolver.render()\n if not resolver.resolved_ok():\n critical(\"Couldn't resolve all symbols; template has leftover {{ or }}: {}\".format(resolver.unresolved_symbols()))\n\n # Write the rendered file\n dir_path = normpath(dirname(dest[\"path\"]))\n # Resolved OK. try to write the template\n log_info(\"make directories: {} {}\".format(dir_path, dest[\"dir_perm\"]))\n try:\n makedirs(dir_path, int(dest[\"dir_perm\"], 8))\n except OSError as error:\n if error.errno != 17:\n critical(\"Error making directories {}\".format(repr(error)))\n log_info(\"open: \" + dest[\"path\"] + \",w+\")\n try:\n outfile = open(dest[\"path\"], 'w+')\n log_info(\"write\")\n outfile.write(rendered_body)\n log_info(\"close\")\n outfile.close()\n log_info(\"chmod file to: \" + dest[\"file_perm\"])\n chmod(dest[\"path\"], int(dest[\"file_perm\"], 8))\n user, group = get_user_group(dest)\n uid = getpwnam(user).pw_uid\n gid = getgrnam(group).gr_gid\n log_info(\"chown last directory in path to: \" + dest[\"user_group\"])\n chown(dir_path, uid, gid)\n log_info(\"chown file to: \" + dest[\"user_group\"])\n chown(dest[\"path\"], uid, gid)\n except Exception as error:\n critical(\"Error writing file: \" + dest[\"path\"] + \": \" + repr(error))", "response": "Given a service name find all templates below ; write to dest"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncall get_metadata ; halt with fail", "response": "def get_metadata_or_fail(metadata_key):\n \"\"\"\n Call get_metadata; halt with fail() if it raises an exception\n \"\"\"\n try:\n return http_get_metadata(metadata_key)\n except IOError as error:\n fail(\"Exception in http_get_metadata {} {}\".format(metadata_key, repr(error)))"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nload a template from a string or file.", "response": "def load(self, template, parameters=None):\n \"\"\"\n 'template'\n Loads template text from a 'string' or 'file' type\n Template text contains {{TOKEN}} symbols to be replaced\n\n 'parameters'\n parameters contains environment-specific sections as discussed in the class documentation.\n the 'parameters' arg can be None, a 'string', 'file', or 'dictionary'\n\n Whether from a string or file, or already in a dictionary, parameters must follow the\n logical format documented in the class docstring.\n if 'parameters' is omitted, template resolution will proceed with AWS, credential, and\n version lookups.\n \"\"\"\n # load template\n if isinstance(template, str):\n self.template = template\n elif isinstance(template, file):\n try:\n self.template = template.read()\n template.close()\n except IOError as error:\n fail(\"Exception loading template from file: \", error)\n else:\n fail(\"Unknown type loading template; expected string or file: \" + type(template))\n\n # load parameters, if any\n if parameters:\n if isinstance(parameters, str):\n try:\n self.parameters = yaml.safe_load(parameters)\n except ValueError as error:\n fail(\"Exception loading parameters from string: \", error)\n elif isinstance(parameters, file):\n try:\n self.parameters = yaml.safe_load(parameters)\n parameters.close()\n except ValueError as error:\n fail(\"Exception loading parameters from file: {}\".format(error), sys.exc_info())\n elif isinstance(parameters, dict):\n self.parameters = parameters\n else:\n fail(\"Unknown type loading parameters; expected string, file, or dict: \" + type(parameters))\n # sanity check the loaded parameters\n if \"params\" not in self.parameters:\n fail(\"'params' field not found in parameters\")\n # just the params, please\n self.parameters = self.parameters[\"params\"]\n # are all the keys valid (must have legal characters)\n for k in set().union(*(self.parameters[d].keys() for d in self.parameters.keys())):\n invalid_char = re.search(ILLEGAL_PARAMETER_CHARS, k)\n if invalid_char:\n fail(\"illegal character: '\" + invalid_char.group(0) + \"' in parameter key: \" + k)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nsearch the parameters blob for a given symbol.", "response": "def search_parameters(self, symbol):\n \"\"\"\n Hierarchically searches for 'symbol' in the parameters blob if there is one (would have\n been retrieved by 'load()'). Order is: default, , \n Returns\n Hierarchically resolved value for 'symbol', or None if a match is not found or there are no parameters\n \"\"\"\n if not self.parameters:\n return None\n # Hierarchically lookup the key\n result = None\n if \"default\" in self.parameters and symbol in self.parameters[\"default\"]:\n result = self.parameters[\"default\"][symbol]\n if self.resolved[\"ENV_SHORT\"] in self.parameters and symbol in self.parameters[self.resolved[\"ENV_SHORT\"]]:\n result = self.parameters[self.resolved[\"ENV_SHORT\"]][symbol]\n # This lookup is redundant when env_short == env, but it's also cheap\n if self.resolved[\"ENV\"] in self.parameters and symbol in self.parameters[self.resolved[\"ENV\"]]:\n result = self.parameters[self.resolved[\"ENV\"]][symbol]\n return result"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nfinding all resolvable symbols in the template and resolve them as described elsewhere", "response": "def render(self):\n \"\"\"\n Find {{}} tokens; resolve then replace them as described elsewhere\n Resolution is multi-pass: tokens may be nested to form parts of other tokens.\n\n Token search steps when resolving symbols\n - 1. lookups of AWS resource identifiers, such as a security group ID\n - 2. secure credentials\n - 3. version registry for versioned content such as an AMI ID\n - 4. built-in context (such as \"ENV\")\n - 5. parameters from a parameter file / dictionary of params\n \"\"\"\n # Ensure that our symbols are clean and are not from a previous template that was rendered\n self.symbols = set()\n # Until all symbols are resolved or it is determined that some cannot be resolved, repeat:\n go_again = True\n while go_again:\n go_again = False # if at least one symbol isn't resolved in a pass, stop\n # Gather all resolvable symbols in the template\n template_symbols = set(re.findall(SYMBOL_PATTERN, self.template))\n self.symbols.update(template_symbols) # include this pass's symbols in full set\n # resolve and replace symbols\n for symbol in template_symbols:\n resolved_symbol = None\n # Lookups in AWS, only if we have an EFAwsResolver\n if symbol[:4] == \"aws:\" and EFTemplateResolver.__AWSR:\n resolved_symbol = EFTemplateResolver.__AWSR.lookup(symbol[4:])\n # Lookups in credentials\n elif symbol[:12] == \"credentials:\":\n pass #TODO\n elif symbol[:9] == \"efconfig:\":\n resolved_symbol = EFTemplateResolver.__EFCR.lookup(symbol[9:])\n elif symbol[:8] == \"version:\":\n resolved_symbol = EFTemplateResolver.__VR.lookup(symbol[8:])\n if not resolved_symbol:\n print(\"WARNING: Lookup failed for {{%s}} - placeholder value of 'NONE' used in rendered template\" % symbol)\n resolved_symbol = \"NONE\"\n else:\n # 1. context - these are already in the resolved table\n # self.resolved[symbol] may have value=None; use has_key tell \"resolved w/value=None\" from \"not resolved\"\n # these may be \"global\" symbols such like \"ENV\", \"ACCOUNT\", etc.\n if symbol in self.resolved:\n resolved_symbol = self.resolved[symbol]\n # 2. parameters\n if not resolved_symbol:\n resolved_symbol = self.search_parameters(symbol)\n # if symbol was resolved, replace it everywhere\n if resolved_symbol is not None:\n if isinstance(resolved_symbol, list):\n self.template = self.template.replace(\"{{\" + symbol + \"}}\", \"\\n\".join(resolved_symbol))\n else:\n self.template = self.template.replace(\"{{\" + symbol + \"}}\", resolved_symbol)\n go_again = True\n return self.template"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn a count of left and right braces in the template as ( N_left_braces N_right_braces", "response": "def count_braces(self):\n \"\"\"\n returns a count of \"{{\" and \"}}\" in the template, as (N_left_braces, N_right_braces)\n Useful to check after resolve() has run, to infer that template has an error since no {{ or }}\n should be present in the template after resolve()\n \"\"\"\n n_left = len(re.findall(\"{{\", self.template))\n n_right = len(re.findall(\"}}\", self.template))\n return n_left, n_right"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn true if all unresolved symbols and count_braces are equal.", "response": "def resolved_ok(self):\n \"\"\"\n Shortcut to testing unresolved_symbols and count_braces separately.\n Returns false if there are unresolved symbols or {{ or }} braces remaining, true otherwise\n \"\"\"\n left_braces, right_braces = self.count_braces()\n return len(self.unresolved_symbols()) == left_braces == right_braces == 0"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef handle_args_and_set_context(args):\n parser = argparse.ArgumentParser()\n parser.add_argument(\"env\", help=\"environment\")\n parser.add_argument(\"path_to_template\", help=\"path to the config template to process\")\n parser.add_argument(\"--no_params\", help=\"disable loading values from params file\", action=\"store_true\", default=False)\n parser.add_argument(\"--verbose\", help=\"Output extra info\", action=\"store_true\", default=False)\n parser.add_argument(\"--lint\", help=\"Test configs for valid JSON/YAML syntax\", action=\"store_true\", default=False)\n parser.add_argument(\"--silent\", help=\"Suppress output of rendered template\", action=\"store_true\", default=False)\n parsed = vars(parser.parse_args(args))\n path_to_template = abspath(parsed[\"path_to_template\"])\n service = path_to_template.split('/')[-3]\n\n return Context(\n get_account_alias(parsed[\"env\"]),\n EFConfig.DEFAULT_REGION,\n parsed[\"env\"],\n service,\n path_to_template,\n parsed[\"no_params\"],\n parsed[\"verbose\"],\n parsed[\"lint\"],\n parsed[\"silent\"]\n )", "response": "Handles the command line args and sets the Context object based on the CLI args."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ngive a context containing path to template env and service merge config into a single object.", "response": "def merge_files(context):\n \"\"\"\n Given a context containing path to template, env, and service:\n merge config into template and output the result to stdout\n Args:\n context: a populated context object\n \"\"\"\n resolver = EFTemplateResolver(\n profile=context.profile,\n region=context.region,\n env=context.env,\n service=context.service\n )\n\n try:\n with open(context.template_path, 'r') as f:\n template_body = f.read()\n f.close()\n except IOError as error:\n raise IOError(\"Error loading template file: {} {}\".format(context.template_path, repr(error)))\n\n if context.no_params is False:\n try:\n with open(context.param_path, 'r') as f:\n param_body = f.read()\n f.close()\n except IOError as error:\n raise IOError(\"Error loading param file: {} {}\".format(context.param_path, repr(error)))\n\n dest = yaml.safe_load(param_body)[\"dest\"]\n\n # if 'dest' for the current object contains an 'environments' list, check it\n if \"environments\" in dest:\n if not resolver.resolved[\"ENV_SHORT\"] in dest[\"environments\"]:\n print(\"Environment: {} not enabled for {}\".format(resolver.resolved[\"ENV_SHORT\"], context.template_path))\n return\n\n # Process the template_body - apply context + parameters\n resolver.load(template_body, param_body)\n else:\n resolver.load(template_body)\n rendered_body = resolver.render()\n\n if not resolver.resolved_ok():\n raise RuntimeError(\"Couldn't resolve all symbols; template has leftover {{ or }}: {}\".format(resolver.unresolved_symbols()))\n\n if context.lint:\n if context.template_path.endswith(\".json\"):\n try:\n json.loads(rendered_body, strict=False)\n print(\"JSON passed linting process.\")\n except ValueError as e:\n fail(\"JSON failed linting process.\", e)\n elif context.template_path.endswith((\".yml\", \".yaml\")):\n conf = yamllint_config.YamlLintConfig(content='extends: relaxed')\n lint_output = yamllinter.run(rendered_body, conf)\n lint_level = 'error'\n lint_errors = [issue for issue in lint_output if issue.level == lint_level]\n if lint_errors:\n split_body = rendered_body.splitlines()\n for error in lint_errors:\n print(error)\n # printing line - 1 because lists start at 0, but files at 1\n print(\"\\t\", split_body[error.line - 1])\n fail(\"YAML failed linting process.\")\n\n if context.verbose:\n print(context)\n if context.no_params:\n print('no_params flag set to true!')\n print('Inline template resolution based on external symbol lookup only and no destination for file write.\\n')\n else:\n dir_path = normpath(dirname(dest[\"path\"]))\n print(\"make directories: {} {}\".format(dir_path, dest[\"dir_perm\"]))\n print(\"chmod file to: \" + dest[\"file_perm\"])\n user, group = dest[\"user_group\"].split(\":\")\n print(\"chown last directory in path to user: {}, group: {}\".format(user, group))\n print(\"chown file to user: {}, group: {}\\n\".format(user, group))\n\n print(\"template body:\\n{}\\nrendered body:\\n{}\\n\".format(template_body, rendered_body))\n elif context.silent:\n print(\"Config template rendered successfully.\")\n else:\n print(rendered_body)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nhandles command line arguments", "response": "def handle_args(args):\n \"\"\"\n Handle command line arguments\n Raises:\n Exception if the config path wasn't explicitly state and dead reckoning based on script location fails\n \"\"\"\n parser = argparse.ArgumentParser()\n parser.add_argument(\"configpath\", default=None, nargs=\"?\",\n help=\"/path/to/configs (always a directory; if omitted, all /configs are checked\")\n parser.add_argument(\"--verbose\", action=\"store_true\", default=False)\n parsed_args = vars(parser.parse_args(args))\n # If a config path wasn't given, calculate it based on location of the script\n if parsed_args[\"configpath\"] is None:\n script_dir = os.path.abspath(os.path.dirname(sys.argv[0]))\n parsed_args[\"configpath\"] = os.path.normpath(\"{}/{}\".format(script_dir, CONFIGS_RELATIVE_PATH_FROM_SCRIPT_DIR))\n else:\n parsed_args[\"configpath\"] = os.path.normpath(parsed_args[\"configpath\"])\n # If the path is a directory, all good. if it's a file, find the directory the file is in and check that instead\n if os.path.isdir(parsed_args[\"configpath\"]):\n parsed_args[\"configdir\"] = parsed_args[\"configpath\"]\n else:\n parsed_args[\"configdir\"] = os.path.dirname(parsed_args[\"configpath\"])\n return parsed_args"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef load_json(json_filespec):\n json_fh = open(json_filespec)\n config_dict = json.load(json_fh)\n json_fh.close()\n return config_dict", "response": "Loads a JSON file into a dict"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef lookup(self, token):\n # get search key and env/service from token\n try:\n key, envservice = token.split(\",\")\n except ValueError:\n return None\n # get env, service from value\n try:\n env, service = envservice.split(\"/\")\n except ValueError as e:\n raise RuntimeError(\"Request:{} can't resolve to env, service. {}\".format(envservice, e.message))\n\n return self._s3_get(env, service, key)", "response": "Lookup a key version from a token in a node s bucket."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef alert_policy_exists(self, policy_name):\n if next((policy for policy in self.all_alerts if policy['name'] == policy_name), False):\n return True", "response": "Check to see if an alert policy exists in NewRelic. Return True if so False if not."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef create_alert_policy(self, policy_name):\n policy_data = { 'policy': { 'incident_preference': 'PER_POLICY', 'name': policy_name } }\n create_policy = requests.post(\n 'https://api.newrelic.com/v2/alerts_policies.json',\n headers=self.auth_header,\n data=json.dumps(policy_data))\n create_policy.raise_for_status()\n policy_id = create_policy.json()['policy']['id']\n self.refresh_all_alerts()\n return policy_id", "response": "Creates an alert policy in NewRelic"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef element_href(name):\n if name:\n element = fetch_meta_by_name(name)\n if element.href:\n return element.href", "response": "Get specified element href by element name"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget specified element json data by name.", "response": "def element_as_json(name):\n \"\"\" Get specified element json data by name\n\n :param name: name of element\n :return: json data representing element, else None\n \"\"\"\n if name:\n element = fetch_json_by_name(name)\n if element.json:\n return element.json"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef element_as_json_with_filter(name, _filter):\n if name:\n element_href = element_href_use_filter(name, _filter)\n if element_href:\n return element_by_href_as_json(element_href)", "response": "Get specified element json data by name with filter."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef element_info_as_json(name):\n if name:\n element = fetch_meta_by_name(name)\n if element.json:\n return element.json", "response": "Get specified element META data based on search query\n "} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting the element metadata as json with the given name and filter", "response": "def element_info_as_json_with_filter(name, _filter):\n \"\"\"\n Top level json meta data (href, name, type) for element\n\n :param str name: name of element\n :param str _filter: filter of entry point\n :return: list dict with metadata, otherwise None\n \"\"\"\n if name and _filter:\n element = fetch_meta_by_name(name, filter_context=_filter)\n if element.json:\n return element.json"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets element href using a wildcard rather than matching only on the name field.", "response": "def element_href_use_wildcard(name):\n \"\"\" Get element href using a wildcard rather than matching only on the name\n field. This will likely return multiple results.\n\n :param name: name of element\n :return: list of matched elements\n \"\"\"\n if name:\n element = fetch_meta_by_name(name, exact_match=False)\n return element.json"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget element href using filter", "response": "def element_href_use_filter(name, _filter):\n \"\"\" Get element href using filter\n\n Filter should be a valid entry point value, ie host, router, network,\n single_fw, etc\n\n :param name: name of element\n :param _filter: filter type, unknown filter will result in no matches\n :return: element href (if found), else None\n \"\"\"\n if name and _filter:\n element = fetch_meta_by_name(name, filter_context=_filter)\n if element.json:\n return element.json.pop().get('href')"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ngetting specified element by href as json data.", "response": "def element_by_href_as_json(href, params=None):\n \"\"\" Get specified element by href\n\n :param href: link to object\n :param params: optional search query parameters\n :return: json data representing element, else None\n \"\"\"\n if href:\n element = fetch_json_by_href(href, params=params)\n if element:\n return element.json"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nget the name of an element from an analyzed element.", "response": "def element_name_by_href(href):\n \"\"\"\n The element href is known, possibly from a reference in an\n elements json. You want to retrieve the name of this element.\n\n :param str href: href of element\n :return: str name of element, or None\n \"\"\"\n if href:\n element = fetch_json_by_href(href)\n if element.json:\n return element.json.get('name')"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nretrieves the element name and type of element based on the href.", "response": "def element_name_and_type_by_href(href):\n \"\"\"\n Retrieve the element name and type of element based on the href.\n You may have a href that is within another element reference and\n want more information on that reference.\n\n :param str href: href of element\n :return: tuple (name, type)\n \"\"\"\n if href:\n element = fetch_json_by_href(href)\n if element.json:\n for entries in element.json.get('link'):\n if entries.get('rel') == 'self':\n typeof = entries.get('type')\n\n return (element.json.get('name'),\n typeof)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef element_attribute_by_href(href, attr_name):\n if href:\n element = fetch_json_by_href(href)\n if element.json:\n return element.json.get(attr_name)", "response": "Get an attribute from an element by href."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef element_by_href_as_smcresult(href, params=None):\n if href:\n element = fetch_json_by_href(href, params=params)\n if element:\n return element", "response": "Get specified element returned as an SMCResult object"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning SMCResult object and use search filter to find object", "response": "def element_as_smcresult_use_filter(name, _filter):\n \"\"\" Return SMCResult object and use search filter to\n find object\n\n :param name: name of element to find\n :param _filter: filter to use, i.e. tcp_service, host, etc\n :return: :py:class:`smc.api.web.SMCResult`\n \"\"\"\n if name:\n element = fetch_meta_by_name(name, filter_context=_filter)\n if element.msg:\n return element\n if element.json:\n return element_by_href_as_smcresult(element.json.pop().get('href'))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef element_href_by_batch(list_to_find, filter=None): # @ReservedAssignment\n try:\n if filter:\n return [{k: element_href_use_filter(k, filter)\n for k in list_to_find}]\n return [{k: element_href(k) for k in list_to_find}]\n except TypeError:\n logger.error(\"{} is not iterable\".format(list_to_find))", "response": "Find the href of the elements in a list of names."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget all elements of the specified type from the top level entry point verb from SMC API.", "response": "def all_elements_by_type(name):\n \"\"\" Get specified elements based on the entry point verb from SMC api\n To get the entry points available, you can get these from the session::\n\n session.cache.entry_points\n\n Execution will get the entry point for the element type, then get all\n elements that match.\n\n For example::\n\n search.all_elements_by_type('host')\n\n :param name: top level entry point name\n :raises: `smc.api.exceptions.UnsupportedEntryPoint`\n :return: list with json representation of name match, else None\n \"\"\"\n if name:\n entry = element_entry_point(name)\n if entry: # in case an invalid entry point is specified\n result = element_by_href_as_json(entry)\n return result"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef fetch_json_by_name(name):\n result = fetch_meta_by_name(name)\n if result.href:\n result = fetch_json_by_href(result.href)\n return result", "response": "Fetch json based on the element name"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef fetch_json_by_href(href, params=None):\n result = SMCRequest(href=href, params=params).read()\n if result:\n result.href = href\n return result", "response": "Fetch json for element by using href."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create(cls, name, negotiation_expiration=200000,\n negotiation_retry_timer=500,\n negotiation_retry_max_number=32,\n negotiation_retry_timer_max=7000,\n certificate_cache_crl_validity=90000,\n mobike_after_sa_update=False,\n mobike_before_sa_update=False,\n mobike_no_rrc=True):\n \"\"\"\n Create a new gateway setting profile.\n\n :param str name: name of profile\n :param int negotiation_expiration: expire after (ms)\n :param int negotiation_retry_timer: retry time length (ms)\n :param int negotiation_retry_max_num: max number of retries allowed\n :param int negotiation_retry_timer_max: maximum length for retry (ms)\n :param int certificate_cache_crl_validity: cert cache validity (seconds)\n :param boolean mobike_after_sa_update: Whether the After SA flag is set\n for Mobike Policy\n :param boolean mobike_before_sa_update: Whether the Before SA flag is\n set for Mobike Policy\n :param boolean mobike_no_rrc: Whether the No RRC flag is set for \n Mobike Policy\n :raises CreateElementFailed: failed creating profile\n :return: instance with meta\n :rtype: GatewaySettings\n \"\"\"\n json = {'name': name,\n 'negotiation_expiration': negotiation_expiration,\n 'negotiation_retry_timer': negotiation_retry_timer,\n 'negotiation_retry_max_number': negotiation_retry_max_number,\n 'negotiation_retry_timer_max': negotiation_retry_timer_max,\n 'certificate_cache_crl_validity': certificate_cache_crl_validity,\n 'mobike_after_sa_update': mobike_after_sa_update,\n 'mobike_before_sa_update': mobike_before_sa_update,\n 'mobike_no_rrc': mobike_no_rrc}\n\n return ElementCreator(cls, json)", "response": "Creates a new GatewaySettings object."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create(cls, name, trust_all_cas=True):\n json = {'name': name,\n 'trust_all_cas': trust_all_cas}\n\n return ElementCreator(cls, json)", "response": "Create new External Gateway\n\n :param str name: name of test_external gateway\n :param bool trust_all_cas: whether to trust all internal CA's\n (default: True)\n :return: instance with meta\n :rtype: ExternalGateway"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef update_or_create(cls, name, external_endpoint=None, vpn_site=None,\n trust_all_cas=True, with_status=False):\n \"\"\"\n Update or create an ExternalGateway. The ``external_endpoint`` and\n ``vpn_site`` parameters are expected to be a list of dicts with key/value\n pairs to satisfy the respective elements create constructor. VPN Sites will\n represent the final state of the VPN site list. ExternalEndpoint that are\n pre-existing will not be deleted if not provided in the ``external_endpoint``\n parameter, however existing elements will be updated as specified.\n \n :param str name: name of external gateway\n :param list(dict) external_endpoint: list of dict items with key/value\n to satisfy ExternalEndpoint.create constructor\n :param list(dict) vpn_site: list of dict items with key/value to satisfy\n VPNSite.create constructor\n :param bool with_status: If set to True, returns a 3-tuple of\n (ExternalGateway, modified, created), where modified and created\n is the boolean status for operations performed.\n :raises ValueError: missing required argument/s for constructor argument\n :rtype: ExternalGateway\n \"\"\"\n if external_endpoint:\n for endpoint in external_endpoint:\n if 'name' not in endpoint:\n raise ValueError('External endpoints are configured '\n 'but missing the name parameter.')\n \n if vpn_site:\n for site in vpn_site:\n if 'name' not in site:\n raise ValueError('VPN sites are configured but missing '\n 'the name parameter.')\n # Make sure VPN sites are resolvable before continuing\n sites = [element_resolver(element, do_raise=True)\n for element in site.get('site_element', [])]\n site.update(site_element=sites)\n \n updated = False\n created = False\n try:\n extgw = ExternalGateway.get(name)\n except ElementNotFound:\n extgw = ExternalGateway.create(name, trust_all_cas)\n created = True\n \n if external_endpoint:\n for endpoint in external_endpoint:\n _, modified, was_created = ExternalEndpoint.update_or_create(\n extgw, with_status=True, **endpoint)\n if was_created or modified:\n updated = True\n \n if vpn_site:\n for site in vpn_site:\n _, modified, was_created = VPNSite.update_or_create(extgw,\n name=site['name'], site_element=site.get('site_element'),\n with_status=True)\n if was_created or modified:\n updated = True\n\n if with_status:\n return extgw, updated, created\n return extgw", "response": "Update or create an ExternalGateway with the given name."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef create(self, name, address=None, enabled=True, balancing_mode='active',\n ipsec_vpn=True, nat_t=False, force_nat_t=False, dynamic=False,\n ike_phase1_id_type=None, ike_phase1_id_value=None):\n \"\"\"\n Create an test_external endpoint. Define common settings for that\n specify the address, enabled, nat_t, name, etc. You can also omit\n the IP address if the endpoint is dynamic. In that case, you must\n also specify the ike_phase1 settings.\n\n :param str name: name of test_external endpoint\n :param str address: address of remote host\n :param bool enabled: True|False (default: True)\n :param str balancing_mode: active\n :param bool ipsec_vpn: True|False (default: True)\n :param bool nat_t: True|False (default: False)\n :param bool force_nat_t: True|False (default: False)\n :param bool dynamic: is a dynamic VPN (default: False)\n :param int ike_phase1_id_type: If using a dynamic endpoint, you must\n set this value. Valid options: 0=DNS name, 1=Email, 2=DN, 3=IP Address\n :param str ike_phase1_id_value: value of ike_phase1_id. Required if\n ike_phase1_id_type and dynamic set.\n :raises CreateElementFailed: create element with reason\n :return: newly created element\n :rtype: ExternalEndpoint\n \"\"\"\n json = {'name': name,\n 'address': address,\n 'balancing_mode': balancing_mode,\n 'dynamic': dynamic,\n 'enabled': enabled,\n 'nat_t': nat_t,\n 'force_nat_t': force_nat_t,\n 'ipsec_vpn': ipsec_vpn}\n \n if dynamic:\n json.pop('address')\n json.update(\n ike_phase1_id_type=ike_phase1_id_type,\n ike_phase1_id_value=ike_phase1_id_value)\n \n return ElementCreator(\n self.__class__,\n href=self.href,\n json=json)", "response": "Creates an external endpoint."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nupdates or create an external endpoint for the specified external gateway.", "response": "def update_or_create(cls, external_gateway, name, with_status=False, **kw):\n \"\"\"\n Update or create external endpoints for the specified external gateway.\n An ExternalEndpoint is considered unique based on the IP address for the\n endpoint (you cannot add two external endpoints with the same IP). If the\n external endpoint is dynamic, then the name is the unique identifier.\n \n :param ExternalGateway external_gateway: external gateway reference\n :param str name: name of the ExternalEndpoint. This is only used as\n a direct match if the endpoint is dynamic. Otherwise the address\n field in the keyword arguments will be used as you cannot add\n multiple external endpoints with the same IP address.\n :param bool with_status: If set to True, returns a 3-tuple of\n (ExternalEndpoint, modified, created), where modified and created\n is the boolean status for operations performed.\n :param dict kw: keyword arguments to satisfy ExternalEndpoint.create\n constructor\n :raises CreateElementFailed: Failed to create external endpoint with reason\n :raises ElementNotFound: If specified ExternalGateway is not valid\n :return: if with_status=True, return tuple(ExternalEndpoint, created). Otherwise\n return only ExternalEndpoint.\n \"\"\"\n if 'address' in kw:\n external_endpoint = external_gateway.external_endpoint.get_contains(\n '({})'.format(kw['address']))\n else:\n external_endpoint = external_gateway.external_endpoint.get_contains(name)\n \n updated = False\n created = False\n if external_endpoint: # Check for changes\n for name, value in kw.items(): # Check for differences before updating\n if getattr(external_endpoint, name, None) != value:\n external_endpoint.data[name] = value\n updated = True\n if updated:\n external_endpoint.update()\n else:\n external_endpoint = external_gateway.external_endpoint.create(\n name, **kw)\n created = True\n \n if with_status:\n return external_endpoint, updated, created\n return external_endpoint"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nenable or disable this endpoint.", "response": "def enable_disable(self):\n \"\"\"\n Enable or disable this endpoint. If enabled, it will be disabled\n and vice versa.\n\n :return: None\n \"\"\"\n if self.enabled:\n self.data['enabled'] = False\n else:\n self.data['enabled'] = True\n self.update()"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nenable or disable NAT - T on this endpoint.", "response": "def enable_disable_force_nat_t(self):\n \"\"\"\n Enable or disable NAT-T on this endpoint. If enabled, it will be\n disabled and vice versa.\n\n :return: None\n \"\"\"\n if self.force_nat_t:\n self.data['force_nat_t'] = False\n else:\n self.data['force_nat_t'] = True\n self.update()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncreating a VPN site for an internal or external gateway.", "response": "def create(self, name, site_element):\n \"\"\"\n Create a VPN site for an internal or external gateway\n\n :param str name: name of site\n :param list site_element: list of protected networks/hosts\n :type site_element: list[str,Element]\n :raises CreateElementFailed: create element failed with reason\n :return: href of new element\n :rtype: str\n \"\"\"\n site_element = element_resolver(site_element)\n json = {\n 'name': name,\n 'site_element': site_element}\n \n return ElementCreator(\n self.__class__,\n href=self.href,\n json=json)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef update_or_create(cls, external_gateway, name, site_element=None,\n with_status=False):\n \"\"\"\n Update or create a VPN Site elements or modify an existing VPN\n site based on value of provided site_element list. The resultant\n VPN site end result will be what is provided in the site_element\n argument (can also be an empty list to clear existing).\n \n :param ExternalGateway external_gateway: The external gateway for\n this VPN site\n :param str name: name of the VPN site\n :param list(str,Element) site_element: list of resolved Elements to\n add to the VPN site\n :param bool with_status: If set to True, returns a 3-tuple of\n (VPNSite, modified, created), where modified and created\n is the boolean status for operations performed.\n :raises ElementNotFound: ExternalGateway or unresolved site_element\n \"\"\"\n site_element = [] if not site_element else site_element\n site_elements = [element_resolver(element) for element in site_element]\n vpn_site = external_gateway.vpn_site.get_exact(name)\n updated = False\n created = False\n if vpn_site: # If difference, reset\n if set(site_elements) != set(vpn_site.data.get('site_element', [])):\n vpn_site.data['site_element'] = site_elements\n vpn_site.update()\n updated = True\n \n else:\n vpn_site = external_gateway.vpn_site.create(\n name=name, site_element=site_elements)\n created = True\n \n if with_status:\n return vpn_site, updated, created\n return vpn_site", "response": "Update or create VPN Site elements based on value of provided site_element list."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nadds a site element or href s of vpn site elements to this VPN.", "response": "def add_site_element(self, element):\n \"\"\"\n Add a site element or list of elements to this VPN.\n\n :param list element: list of Elements or href's of vpn site\n elements\n :type element: list(str,Network)\n :raises UpdateElementFailed: fails due to reason\n :return: None\n \"\"\"\n element = element_resolver(element)\n self.data['site_element'].extend(element)\n self.update()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef location_helper(name, search_only=False):\n try:\n return name.href\n except AttributeError:\n if name and name.startswith('http'):\n return name\n except ElementNotFound:\n return Location.create(name=name.name).href if not \\\n search_only else None\n \n # Get all locations; tmp to support earlier 6.x versions.\n if name is not None:\n locations = [location for location in Search.objects.entry_point(\n 'location') if location.name == name]\n if not locations:\n return Location.create(name=name).href if not search_only \\\n else None\n return locations[0].href", "response": "Helper function to find a location by name."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef change_password(self, password):\n self.make_request(\n ModificationFailed,\n method='update',\n resource='change_password',\n params={'password': password})", "response": "Change user password. Change is committed immediately."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef add_permission(self, permission):\n if 'permissions' not in self.data:\n self.data['superuser'] = False\n self.data['permissions'] = {'permission':[]}\n \n for p in permission:\n self.data['permissions']['permission'].append(p.data)\n self.update()", "response": "Adds a permission to this Admin User."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef permissions(self):\n if 'permissions' in self.data:\n _permissions = self.data['permissions']['permission']\n return [Permission(**perm) for perm in _permissions]\n return []", "response": "Return each permission mapping for this Admin User."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create(cls, name, local_admin=False, allow_sudo=False,\n superuser=False, enabled=True, engine_target=None,\n can_use_api=True, console_superuser=False,\n allowed_to_login_in_shared=True, comment=None):\n \"\"\"\n Create an admin user account.\n \n .. versionadded:: 0.6.2\n Added can_use_api, console_superuser, and allowed_to_login_in_shared.\n Requires SMC >= SMC 6.4\n\n :param str name: name of account\n :param bool local_admin: is a local admin only\n :param bool allow_sudo: allow sudo on engines\n :param bool can_use_api: can log in to SMC API\n :param bool console_superuser: can this user sudo via SSH/console\n :param bool allowed_to_login_in_shared: can this user log in to the\n shared domain\n :param bool superuser: is a super administrator\n :param bool enabled: is account enabled\n :param list engine_target: engine to allow remote access to\n :raises CreateElementFailed: failure creating element with reason\n :return: instance with meta\n :rtype: AdminUser\n \"\"\"\n engines = [] if engine_target is None else engine_target\n \n json = {'name': name,\n 'enabled': enabled,\n 'allow_sudo': allow_sudo,\n 'console_superuser': console_superuser,\n 'allowed_to_login_in_shared': allowed_to_login_in_shared,\n 'engine_target': engines,\n 'local_admin': local_admin,\n 'superuser': superuser,\n 'can_use_api': can_use_api,\n 'comment': comment}\n \n return ElementCreator(cls, json)", "response": "Create an admin user account."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef change_engine_password(self, password):\n self.make_request(\n ModificationFailed,\n method='update',\n resource='change_engine_password',\n params={'password': password})", "response": "Change Engine password for all engines on allowed\n list."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create(cls, name, enabled=True, superuser=True):\n json = {\n 'enabled': enabled,\n 'name': name,\n 'superuser': superuser}\n\n return ElementCreator(cls, json)", "response": "Create a new API Client."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncreating the engine configuration as a dict.", "response": "def _create(cls, name, node_type,\n physical_interfaces,\n nodes=1, loopback_ndi=None,\n log_server_ref=None,\n domain_server_address=None,\n enable_antivirus=False, enable_gti=False,\n sidewinder_proxy_enabled=False,\n default_nat=False, location_ref=None,\n enable_ospf=None, ospf_profile=None, snmp_agent=None,\n comment=None):\n \"\"\"\n Create will return the engine configuration as a dict that is a\n representation of the engine. The creating class will also add\n engine specific requirements before constructing the request\n and sending to SMC (which will serialize the dict to json).\n\n :param name: name of engine\n :param str node_type: comes from class attribute of engine type\n :param dict physical_interfaces: physical interface list of dict\n :param int nodes: number of nodes for engine\n :param str log_server_ref: href of log server\n :param list domain_server_address: dns addresses\n \"\"\"\n node_list = []\n for nodeid in range(1, nodes + 1): # start at nodeid=1\n node_list.append(Node._create(\n name,\n node_type,\n nodeid,\n loopback_ndi))\n\n domain_server_list = []\n if domain_server_address:\n for num, server in enumerate(domain_server_address):\n try:\n domain_server = {'rank': num, 'ne_ref' : server.href}\n except AttributeError:\n domain_server = {'rank': num, 'value': server}\n \n domain_server_list.append(domain_server)\n\n # Set log server reference, if not explicitly provided\n if not log_server_ref and node_type is not 'virtual_fw_node':\n log_server_ref = LogServer.objects.first().href\n\n base_cfg = {\n 'name': name,\n 'nodes': node_list,\n 'domain_server_address': domain_server_list,\n 'log_server_ref': log_server_ref,\n 'physicalInterfaces': physical_interfaces}\n\n if enable_antivirus:\n antivirus = {\n 'antivirus': {\n 'antivirus_enabled': True,\n 'antivirus_update': 'daily',\n 'virus_log_level': 'stored',\n 'virus_mirror': 'update.nai.com/Products/CommonUpdater'}}\n base_cfg.update(antivirus)\n\n if enable_gti:\n gti = {'gti_settings': {\n 'file_reputation_context': 'gti_cloud_only'}}\n base_cfg.update(gti)\n\n if sidewinder_proxy_enabled:\n base_cfg.update(sidewinder_proxy_enabled=True)\n\n if default_nat:\n base_cfg.update(default_nat=True)\n\n if location_ref:\n base_cfg.update(location_ref=location_helper(location_ref) \\\n if location_ref else None)\n \n if snmp_agent:\n snmp_agent_ref = SNMPAgent(snmp_agent.pop('snmp_agent_ref')).href\n base_cfg.update(\n snmp_agent_ref=snmp_agent_ref,\n **snmp_agent)\n \n if enable_ospf:\n if not ospf_profile: # get default profile\n ospf_profile = OSPFProfile('Default OSPFv2 Profile').href\n ospf = {'dynamic_routing': {\n 'ospfv2': {\n 'enabled': True,\n 'ospfv2_profile_ref': ospf_profile}\n }}\n base_cfg.update(ospf)\n \n base_cfg.update(comment=comment)\n return base_cfg"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef rename(self, name):\n for node in self.nodes:\n node.rename(name)\n self.update(name=name)\n self.vpn.rename(name)", "response": "Rename the firewall engine nodes and internal gateway"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef location(self):\n location = Element.from_href(self.location_ref)\n if location and location.name == 'Default':\n return None\n return location", "response": "Returns the location for this engine. May be None if no specific location has been assigned."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns a list of child nodes of this engine. This is a recursive method that returns a list of child nodes of this engine.", "response": "def nodes(self):\n \"\"\"\n Return a list of child nodes of this engine. This can be\n used to iterate to obtain access to node level operations\n ::\n \n >>> print(list(engine.nodes))\n [Node(name=myfirewall node 1)]\n >>> engine.nodes.get(0)\n Node(name=myfirewall node 1)\n \n \n :return: nodes for this engine\n :rtype: SubElementCollection(Node)\n \"\"\"\n resource = sub_collection(\n self.get_relation( \n 'nodes'), \n Node) \n resource._load_from_engine(self, 'nodes') \n return resource"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef permissions(self):\n acl_list = list(AccessControlList.objects.all())\n def acl_map(elem_href):\n for elem in acl_list:\n if elem.href == elem_href:\n return elem\n \n acls = self.make_request(\n UnsupportedEngineFeature,\n resource='permissions')\n for acl in acls['granted_access_control_list']:\n yield(acl_map(acl))", "response": "Retrieve the permissions for this engine instance."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns a new PendingChanges object that represents the pending changes on an engine that are a pending approval or disapproval.", "response": "def pending_changes(self):\n \"\"\"\n Pending changes provides insight into changes on an engine that are\n pending approval or disapproval. Feature requires SMC >= v6.2.\n\n :raises UnsupportedEngineFeature: SMC version >= 6.2 is required to\n support pending changes\n :rtype: PendingChanges\n \"\"\"\n if 'pending_changes' in self.data.links:\n return PendingChanges(self)\n raise UnsupportedEngineFeature(\n 'Pending changes is an unsupported feature on this engine: {}'\n .format(self.type))"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nyields all aliases that are resolved on this engine.", "response": "def alias_resolving(self):\n \"\"\"\n Alias definitions with resolved values as defined on this engine.\n Aliases can be used in rules to simplify multiple object creation\n ::\n\n fw = Engine('myfirewall')\n for alias in fw.alias_resolving():\n print(alias, alias.resolved_value)\n ...\n (Alias(name=$$ Interface ID 0.ip), [u'10.10.0.1'])\n (Alias(name=$$ Interface ID 0.net), [u'10.10.0.0/24'])\n (Alias(name=$$ Interface ID 1.ip), [u'10.10.10.1'])\n\n :return: generator of aliases\n :rtype: Alias\n \"\"\"\n alias_list = list(Alias.objects.all())\n for alias in self.make_request(resource='alias_resolving'):\n yield Alias._from_engine(alias, alias_list)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef blacklist_bulk(self, blacklist):\n self.make_request(\n EngineCommandFailed,\n method='create',\n resource='blacklist',\n json=blacklist.entries)", "response": "Add blacklist entries to the engine node in bulk."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef blacklist_show(self, **kw):\n try:\n from smc_monitoring.monitors.blacklist import BlacklistQuery\n except ImportError:\n pass\n else:\n query = BlacklistQuery(self.name)\n for record in query.fetch_as_element(**kw):\n yield record", "response": "Returns a generator of the entries in the blacklist for this entry."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef add_route(self, gateway, network):\n self.make_request(\n EngineCommandFailed,\n method='create',\n resource='add_route',\n params={'gateway': gateway,\n 'network': network})", "response": "Add a route to the specified network."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning a list of route elements for the routing monitoring system.", "response": "def routing_monitoring(self):\n \"\"\"\n Return route table for the engine, including\n gateway, networks and type of route (dynamic, static).\n Calling this can take a few seconds to retrieve routes\n from the engine.\n\n Find all routes for engine resource::\n\n >>> engine = Engine('sg_vm')\n >>> for route in engine.routing_monitoring:\n ... route\n ... \n Route(route_network=u'0.0.0.0', route_netmask=0, route_gateway=u'10.0.0.1', route_type=u'Static', dst_if=1, src_if=-1)\n ...\n\n :raises EngineCommandFailed: routes cannot be retrieved\n :return: list of route elements\n :rtype: SerializedIterable(Route)\n \"\"\"\n try:\n result = self.make_request(\n EngineCommandFailed,\n resource='routing_monitoring')\n \n return Route(result)\n except SMCConnectionError:\n raise EngineCommandFailed('Timed out waiting for routes')"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn a new VirtualResource instance with all the virtual resources in the master engine.", "response": "def virtual_resource(self):\n \"\"\"\n Available on a Master Engine only.\n\n To get all virtual resources call::\n\n engine.virtual_resource.all()\n\n :raises UnsupportedEngineFeature: master engine only\n :rtype: CreateCollection(VirtualResource)\n \"\"\"\n resource = create_collection(\n self.get_relation(\n 'virtual_resources',\n UnsupportedEngineFeature),\n VirtualResource)\n resource._load_from_engine(self, 'virtualResources')\n return resource"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nadding an interface to the cache.", "response": "def add_interface(self, interface):\n \"\"\"\n Add interface is a lower level option to adding interfaces directly\n to the engine. The interface is expected to be an instance of\n Layer3PhysicalInterface, Layer2PhysicalInterface, TunnelInterface,\n or ClusterInterface. The engines instance cache is flushed after\n this call is made to provide an updated cache after modification.\n \n .. seealso:: :class:`smc.core.engine.interface.update_or_create`\n \n :param PhysicalInterface,TunnelInterface interface: instance of\n pre-created interface\n :return: None\n \"\"\"\n self.make_request(\n EngineCommandFailed,\n method='create', \n href=self.get_relation(interface.typeof),\n json=interface)\n self._del_cache()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef refresh(self, timeout=3, wait_for_finish=False, **kw):\n return Task.execute(self, 'refresh',\n timeout=timeout, wait_for_finish=wait_for_finish, **kw)", "response": "Refresh existing policy on specified device."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngenerating and retrieve a policy snapshot from the engine", "response": "def generate_snapshot(self, filename='snapshot.zip'):\n \"\"\"\n Generate and retrieve a policy snapshot from the engine\n This is blocking as file is downloaded\n\n :param str filename: name of file to save file to, including directory\n path\n :raises EngineCommandFailed: snapshot failed, possibly invalid filename\n specified\n :return: None\n \"\"\"\n try:\n self.make_request(\n EngineCommandFailed,\n resource='generate_snapshot',\n filename=filename)\n\n except IOError as e:\n raise EngineCommandFailed(\n 'Generate snapshot failed: {}'.format(e))"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nadds a VPN site with site elements to this engine.", "response": "def add_site(self, name, site_elements=None):\n \"\"\"\n Add a VPN site with site elements to this engine.\n VPN sites identify the sites with protected networks\n to be included in the VPN.\n Add a network and new VPN site::\n \n >>> net = Network.get_or_create(name='wireless', ipv4_network='192.168.5.0/24')\n >>> engine.vpn.add_site(name='wireless', site_elements=[net])\n VPNSite(name=wireless)\n >>> list(engine.vpn.sites)\n [VPNSite(name=dingo - Primary Site), VPNSite(name=wireless)]\n \n :param str name: name for VPN site\n :param list site_elements: network elements for VPN site\n :type site_elements: list(str,Element)\n :raises ElementNotFound: if site element is not found\n :raises UpdateElementFailed: failed to add vpn site\n :rtype: VPNSite\n \n .. note:: Update is immediate for this operation.\n \"\"\"\n site_elements = site_elements if site_elements else []\n return self.sites.create(\n name, site_elements)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngenerates an internal VPNCertificate object for the specified key.", "response": "def generate_certificate(self, common_name, public_key_algorithm='rsa',\n signature_algorithm='rsa_sha_512', key_length=2048,\n signing_ca=None):\n \"\"\"\n Generate an internal gateway certificate used for VPN on this engine.\n Certificate request should be an instance of VPNCertificate.\n\n :param: str common_name: common name for certificate\n :param str public_key_algorithm: public key type to use. Valid values\n rsa, dsa, ecdsa.\n :param str signature_algorithm: signature algorithm. Valid values\n dsa_sha_1, dsa_sha_224, dsa_sha_256, rsa_md5, rsa_sha_1, rsa_sha_256,\n rsa_sha_384, rsa_sha_512, ecdsa_sha_1, ecdsa_sha_256, ecdsa_sha_384,\n ecdsa_sha_512. (Default: rsa_sha_512)\n :param int key_length: length of key. Key length depends on the key\n type. For example, RSA keys can be 1024, 2048, 3072, 4096. See SMC\n documentation for more details.\n :param str,VPNCertificateCA signing_ca: by default will use the\n internal RSA CA\n :raises CertificateError: error generating certificate\n :return: GatewayCertificate\n \"\"\"\n return GatewayCertificate._create(self, common_name, public_key_algorithm,\n signature_algorithm, key_length, signing_ca)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef create(self, name, vfw_id, domain='Shared Domain',\n show_master_nic=False, connection_limit=0, comment=None):\n \"\"\"\n Create a new virtual resource. Called through engine\n reference::\n\n engine.virtual_resource.create(....)\n\n :param str name: name of virtual resource\n :param int vfw_id: virtual fw identifier\n :param str domain: name of domain to install, (default Shared)\n :param bool show_master_nic: whether to show the master engine NIC ID's\n in the virtual instance\n :param int connection_limit: whether to limit number of connections for\n this instance\n :return: href of new virtual resource\n :rtype: str\n \"\"\"\n allocated_domain = domain_helper(domain)\n json = {'name': name,\n 'connection_limit': connection_limit,\n 'show_master_nic': show_master_nic,\n 'vfw_id': vfw_id,\n 'comment': comment,\n 'allocated_domain_ref': allocated_domain}\n \n return ElementCreator(self.__class__, json=json, href=self.href)", "response": "Create a new virtual resource. Called through engine\n engine. virtual_resource. create (..."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef enable(self, snmp_agent, snmp_location=None, snmp_interface=None):\n agent = element_resolver(snmp_agent)\n snmp_interface = [] if not snmp_interface else snmp_interface\n interfaces = self._iface_dict(snmp_interface)\n \n self.engine.data.update(\n snmp_agent_ref=agent,\n snmp_location=snmp_location if snmp_location else '',\n snmp_interface=interfaces)", "response": "Enable SNMP on the engine. Specify a list of interfaces that have NDI s to enable on those interfaces."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef update_configuration(self, **kwargs):\n updated = False\n if 'snmp_agent' in kwargs:\n kwargs.update(snmp_agent_ref=kwargs.pop('snmp_agent'))\n snmp_interface = kwargs.pop('snmp_interface', None)\n for name, value in kwargs.items():\n _value = element_resolver(value)\n if getattr(self.engine, name, None) != _value:\n self.engine.data[name] = _value\n updated = True\n \n if snmp_interface is not None:\n _snmp_interface = getattr(self.engine, 'snmp_interface', [])\n if not len(snmp_interface) and len(_snmp_interface):\n self.engine.data.update(snmp_interface=[])\n updated = True\n elif len(snmp_interface):\n if set(self._nicids) ^ set(map(str, snmp_interface)):\n self.engine.data.update(\n snmp_interface=self._iface_dict(snmp_interface))\n updated = True\n return updated", "response": "Update the SNMP configuration for the current node."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning a list of physical interfaces that the SNMP is bound to.", "response": "def interface(self):\n \"\"\"\n Return a list of physical interfaces that the SNMP\n agent is bound to.\n \n :rtype: list(PhysicalInterface)\n \"\"\"\n nics = set([nic.get('nicid') for nic in \\\n getattr(self.engine, 'snmp_interface', [])])\n return [self.engine.interface.get(nic) for nic in nics]"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nenable the DNS Relay service on this engine.", "response": "def enable(self, interface_id, dns_relay_profile=None):\n \"\"\"\n Enable the DNS Relay service on this engine.\n\n :param int interface_id: interface id to enable relay\n :param str,DNSRelayProfile dns_relay_profile: DNSRelayProfile element\n or str href\n :raises EngineCommandFailed: interface not found\n :raises ElementNotFound: profile not found\n :return: None\n \"\"\"\n if not dns_relay_profile: # Use default\n href = DNSRelayProfile('Cache Only').href\n else:\n href = element_resolver(dns_relay_profile)\n\n intf = self.engine.interface.get(interface_id)\n \n self.engine.data.update(dns_relay_profile_ref=href)\n self.engine.data.update(dns_relay_interface=intf.ndi_interfaces)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef disable(self):\n self.engine.data.update(dns_relay_interface=[])\n self.engine.data.pop('dns_relay_profile_ref', None)", "response": "Disable DNS Relay on this engine\n \n "} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nremoves DNS entries from this ranked DNS list.", "response": "def remove(self, values):\n \"\"\"\n Remove DNS entries from this ranked DNS list. A DNS entry can be either\n a raw IP Address, or an element of type :class:`smc.elements.network.Host`\n or :class:`smc.elements.servers.DNSServer`.\n \n :param list values: list of IP addresses, Host and/or DNSServer elements.\n :return: None\n \"\"\"\n removables = []\n for value in values:\n if value in self:\n removables.append(value)\n \n if removables:\n self.entries[:] = [entry._asdict() for entry in self\n if entry.value not in removables and not entry.element in removables]\n # Rerank to maintain order\n for i, entry in enumerate(self.entries):\n entry.update(rank='{}'.format(i))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef enable(self, policy):\n if hasattr(policy, 'href'):\n if not isinstance(policy, InterfacePolicy):\n raise LoadPolicyFailed('Invalid policy type specified. The policy'\n 'type must be InterfacePolicy')\n \n self.update(l2_interface_policy_ref=element_resolver(policy))", "response": "Enable a layer 2 interface policy."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef create(cls, name, template='High-Security IPS Template'):\n try:\n if cls.typeof == 'ips_template_policy' and template is None:\n fw_template = None\n else:\n fw_template = IPSTemplatePolicy(template).href\n except ElementNotFound:\n raise LoadPolicyFailed(\n 'Cannot find specified firewall template: {}'.format(template))\n json = {\n 'name': name,\n 'template': fw_template}\n try:\n return ElementCreator(cls, json)\n except CreateElementFailed as err:\n raise CreatePolicyFailed(err)", "response": "Create an IPS Policy with the specified name and template."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get(self, *args, **kwargs):\n if self:\n if args:\n index = args[0]\n if index <= len(self) -1:\n return self[args[0]]\n return None\n elif kwargs:\n key, value = kwargs.popitem()\n for item in self.items:\n if getattr(item, key, None) == value:\n return item\n else:\n raise ValueError('Missing argument. You must provide an '\n 'arg or kwarg to fetch an element from the collection.')", "response": "Get an element from the collection by an arg or kwarg."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nuploading an IPList as a zip file. Useful when IPLists are very large.", "response": "def upload_as_zip(name, filename):\n \"\"\"\n Upload an IPList as a zip file. Useful when IPList is very large.\n This is the default upload format for IPLists.\n\n :param str name: name of IPList\n :param str filename: name of zip file to upload, full path\n :return: None\n \"\"\"\n location = list(IPList.objects.filter(name))\n if location:\n iplist = location[0]\n return iplist.upload(filename=filename)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef upload_as_text(name, filename):\n location = list(IPList.objects.filter(name))\n if location:\n iplist = location[0]\n return iplist.upload(filename=filename, as_type='txt')", "response": "Uploads the IPList as text from a file."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef upload_as_json(name, mylist):\n location = list(IPList.objects.filter(name))\n if location:\n iplist = location[0]\n return iplist.upload(json=mylist, as_type='json')", "response": "Upload the IPList as json payload."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ndownloads IPList as text to specified filename.", "response": "def download_as_text(name, filename):\n \"\"\"\n Download IPList as text to specified filename.\n\n :param str name: name of IPList\n :param str filename: name of file for IPList download\n \"\"\"\n location = list(IPList.objects.filter(name))\n if location:\n iplist = location[0]\n return iplist.download(filename=filename, as_type='txt')"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ndownloads the specified IPList as json.", "response": "def download_as_json(name):\n \"\"\"\n Download IPList as json. This would allow for easily \n manipulation of the IPList, but generally recommended only for\n smaller lists\n\n :param str name: name of IPList\n :return: None\n \"\"\"\n location = list(IPList.objects.filter(name))\n if location:\n iplist = location[0]\n return iplist.download(as_type='json')"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create_iplist_with_data(name, iplist):\n iplist = IPList.create(name=name, iplist=iplist)\n return iplist", "response": "Create an IPList with initial list contents."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nenabling OSPF on this virtual firewall.", "response": "def enable(self, ospf_profile=None, router_id=None):\n \"\"\"\n Enable OSPF on this engine. For master engines, enable\n OSPF on the virtual firewall.\n\n Once enabled on the engine, add an OSPF area to an interface::\n\n engine.dynamic_routing.ospf.enable()\n interface = engine.routing.get(0)\n interface.add_ospf_area(OSPFArea('myarea'))\n\n :param str,OSPFProfile ospf_profile: OSPFProfile element or str\n href; if None, use default profile\n :param str router_id: single IP address router ID\n :raises ElementNotFound: OSPF profile not found\n :return: None\n \"\"\"\n ospf_profile = element_resolver(ospf_profile) if ospf_profile \\\n else OSPFProfile('Default OSPFv2 Profile').href\n\n self.data.update(\n enabled=True,\n ospfv2_profile_ref=ospf_profile,\n router_id=router_id)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef update_configuration(self, **kwargs):\n updated = False\n if 'ospf_profile' in kwargs:\n kwargs.update(ospfv2_profile_ref=kwargs.pop('ospf_profile'))\n for name, value in kwargs.items():\n _value = element_resolver(value)\n if self.data.get(name) != _value:\n self.data[name] = _value\n updated = True\n return updated", "response": "Update the OSPF configuration using kwargs that match the enable constructor."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create(cls, name, interface_settings_ref=None, area_id=1,\n area_type='normal', outbound_filters=None,\n inbound_filters=None, shortcut_capable_area=False,\n ospfv2_virtual_links_endpoints_container=None,\n ospf_abr_substitute_container=None, comment=None, **kwargs):\n \"\"\"\n Create a new OSPF Area\n\n :param str name: name of OSPFArea configuration\n :param str,OSPFInterfaceSetting interface_settings_ref: an OSPFInterfaceSetting\n element or href. If None, uses the default system profile\n :param str name: area id\n :param str area_type: \\|normal\\|stub\\|not_so_stubby\\|totally_stubby\\|\n totally_not_so_stubby\n :param list outbound_filters: reference to an IPAccessList and or IPPrefixList.\n You can only have one outbound prefix or access list\n :param list inbound_filters: reference to an IPAccessList and or IPPrefixList.\n You can only have one outbound prefix or access list\n :param shortcut_capable_area: True|False\n :param list ospfv2_virtual_links_endpoints_container: virtual link endpoints\n :param list ospf_abr_substitute_container: substitute types: \n \\|aggregate\\|not_advertise\\|substitute_with\n :param str comment: optional comment\n :raises CreateElementFailed: failed to create with reason\n :rtype: OSPFArea\n \"\"\"\n interface_settings_ref = element_resolver(interface_settings_ref) or \\\n OSPFInterfaceSetting('Default OSPFv2 Interface Settings').href\n \n if 'inbound_filters_ref' in kwargs:\n inbound_filters = kwargs.get('inbound_filters_ref')\n \n if 'outbound_filters_ref' in kwargs:\n outbound_filters = kwargs.get('outbound_filters_ref')\n \n json = {'name': name,\n 'area_id': area_id,\n 'area_type': area_type,\n 'comment': comment,\n 'inbound_filters_ref': element_resolver(inbound_filters),\n 'interface_settings_ref': interface_settings_ref,\n 'ospf_abr_substitute_container': ospf_abr_substitute_container,\n 'ospfv2_virtual_links_endpoints_container':\n ospfv2_virtual_links_endpoints_container,\n 'outbound_filters_ref': element_resolver(outbound_filters),\n 'shortcut_capable_area': shortcut_capable_area}\n\n return ElementCreator(cls, json)", "response": "Creates a new OSPF Area."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates a new custom OSPF interface settings.", "response": "def create(cls, name, dead_interval=40, hello_interval=10,\n hello_interval_type='normal', dead_multiplier=1,\n mtu_mismatch_detection=True, retransmit_interval=5,\n router_priority=1, transmit_delay=1,\n authentication_type=None, password=None,\n key_chain_ref=None):\n \"\"\"\n Create custom OSPF interface settings profile\n\n :param str name: name of interface settings\n :param int dead_interval: in seconds\n :param str hello_interval: in seconds\n :param str hello_interval_type: \\|normal\\|fast_hello\n :param int dead_multipler: fast hello packet multipler\n :param bool mtu_mismatch_detection: True|False\n :param int retransmit_interval: in seconds\n :param int router_priority: set priority\n :param int transmit_delay: in seconds\n :param str authentication_type: \\|password\\|message_digest\n :param str password: max 8 chars (required when \n authentication_type='password')\n :param str,Element key_chain_ref: OSPFKeyChain (required when \n authentication_type='message_digest')\n :raises CreateElementFailed: create failed with reason\n :return: instance with meta\n :rtype: OSPFInterfaceSetting\n \"\"\"\n json = {'name': name,\n 'authentication_type': authentication_type,\n 'password': password,\n 'key_chain_ref': element_resolver(key_chain_ref),\n 'dead_interval': dead_interval,\n 'dead_multiplier': dead_multiplier,\n 'hello_interval': hello_interval,\n 'hello_interval_type': hello_interval_type,\n 'mtu_mismatch_detection': mtu_mismatch_detection,\n 'retransmit_interval': retransmit_interval,\n 'router_priority': router_priority,\n 'transmit_delay': transmit_delay}\n\n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create(cls, name, key_chain_entry):\n key_chain_entry = key_chain_entry or []\n json = {'name': name,\n 'ospfv2_key_chain_entry': key_chain_entry}\n\n return ElementCreator(cls, json)", "response": "Create an OSPF Key Chain with list of key chain entries"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create(cls, name, domain_settings_ref=None, external_distance=110,\n inter_distance=110, intra_distance=110, redistribution_entry=None,\n default_metric=None, comment=None):\n \"\"\"\n Create an OSPF Profile.\n \n If providing a list of redistribution entries, provide in the following\n dict format: \n \n {'enabled': boolean, 'metric_type': 'external_1' or 'external_2',\n 'metric': 2, 'type': 'kernel'}\n \n Valid types for redistribution entries are: kernel, static, connected, bgp,\n and default_originate.\n \n You can also provide a 'filter' key with either an IPAccessList or RouteMap\n element to use for further access control on the redistributed route type.\n If metric_type is not provided, external_1 (E1) will be used. \n \n An example of a redistribution_entry would be::\n \n {u'enabled': True,\n u'metric': 123,\n u'metric_type': u'external_2',\n u'filter': RouteMap('myroutemap'),\n u'type': u'static'}\n\n :param str name: name of profile\n :param str,OSPFDomainSetting domain_settings_ref: OSPFDomainSetting \n element or href\n :param int external_distance: route metric (E1-E2)\n :param int inter_distance: routes learned from different areas (O IA)\n :param int intra_distance: routes learned from same area (O)\n :param list redistribution_entry: how to redistribute the OSPF routes.\n :raises CreateElementFailed: create failed with reason\n :rtype: OSPFProfile\n \"\"\"\n json = {'name': name,\n 'external_distance': external_distance,\n 'inter_distance': inter_distance,\n 'intra_distance': intra_distance,\n 'default_metric': default_metric,\n 'comment': comment}\n \n if redistribution_entry:\n json.update(redistribution_entry=\n _format_redist_entry(redistribution_entry))\n\n domain_settings_ref = element_resolver(domain_settings_ref) or \\\n OSPFDomainSetting('Default OSPFv2 Domain Settings').href\n\n json.update(domain_settings_ref=domain_settings_ref)\n\n return ElementCreator(cls, json)", "response": "Creates an OSPF profile with the given name domain settings and optional redistribution entry."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef create(cls, name, abr_type='cisco', auto_cost_bandwidth=100,\n deprecated_algorithm=False, initial_delay=200,\n initial_hold_time=1000, max_hold_time=10000,\n shutdown_max_metric_lsa=0, startup_max_metric_lsa=0):\n \"\"\"\n Create custom Domain Settings\n\n Domain settings are referenced by an OSPFProfile\n\n :param str name: name of custom domain settings\n :param str abr_type: cisco|shortcut|standard\n :param int auto_cost_bandwidth: Mbits/s\n :param bool deprecated_algorithm: RFC 1518 compatibility\n :param int initial_delay: in milliseconds\n :param int initial_hold_type: in milliseconds\n :param int max_hold_time: in milliseconds\n :param int shutdown_max_metric_lsa: in seconds\n :param int startup_max_metric_lsa: in seconds\n :raises CreateElementFailed: create failed with reason\n :return: instance with meta\n :rtype: OSPFDomainSetting\n \"\"\"\n json = {'name': name,\n 'abr_type': abr_type,\n 'auto_cost_bandwidth': auto_cost_bandwidth,\n 'deprecated_algorithm': deprecated_algorithm,\n 'initial_delay': initial_delay,\n 'initial_hold_time': initial_hold_time,\n 'max_hold_time': max_hold_time,\n 'shutdown_max_metric_lsa': shutdown_max_metric_lsa,\n 'startup_max_metric_lsa': startup_max_metric_lsa}\n\n return ElementCreator(cls, json)", "response": "Create custom domain settings"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nprovide a pre-filter to the create function that provides the ability to modify the element json before submitting to the SMC. To register a create hook, set on the class or top level Element class to enable this on any descendent of Element:: Element._create_hook = classmethod(myhook) The hook should be a callable and take two arguments, cls, json and return the json after modification. For example:: def myhook(cls, json): print(\"Called with class: %s\" % cls) if 'address' in json: json['address'] = '2.2.2.2' return json", "response": "def create_hook(function):\n \"\"\"\n Provide a pre-filter to the create function that provides the ability\n to modify the element json before submitting to the SMC. To register\n a create hook, set on the class or top level Element class to enable\n this on any descendent of Element::\n \n Element._create_hook = classmethod(myhook)\n \n The hook should be a callable and take two arguments, cls, json and \n return the json after modification. For example::\n \n def myhook(cls, json):\n print(\"Called with class: %s\" % cls)\n if 'address' in json:\n json['address'] = '2.2.2.2'\n return json\n \"\"\"\n @functools.wraps(function)\n def run(cls, json, **kwargs):\n if hasattr(cls, '_create_hook'):\n json = cls._create_hook(json)\n return function(cls, json, **kwargs)\n return run"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef exception(function):\n @functools.wraps(function)\n def wrapper(*exception, **kwargs):\n result = function(**kwargs)\n if exception:\n result.exception = exception[0]\n return result\n return wrapper", "response": "Decorator for handling exception parameter."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nformat data for adding SNMP to an engine.", "response": "def add_snmp(data, interfaces):\n \"\"\"\n Format data for adding SNMP to an engine.\n \n :param list data: list of interfaces as provided by kw\n :param list interfaces: interfaces to enable SNMP by id\n \"\"\"\n snmp_interface = []\n if interfaces: # Not providing interfaces will enable SNMP on all NDIs\n interfaces = map(str, interfaces)\n for interface in data:\n interface_id = str(interface.get('interface_id'))\n for if_def in interface.get('interfaces', []):\n _interface_id = None\n if 'vlan_id' in if_def:\n _interface_id = '{}.{}'.format(\n interface_id, if_def['vlan_id'])\n else:\n _interface_id = interface_id\n if _interface_id in interfaces and 'type' not in interface:\n for node in if_def.get('nodes', []):\n snmp_interface.append(\n {'address': node.get('address'),\n 'nicid': _interface_id})\n return snmp_interface"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create_bulk(cls, name, interfaces=None,\n primary_mgt=None, backup_mgt=None,\n log_server_ref=None,\n domain_server_address=None,\n location_ref=None, default_nat=False,\n enable_antivirus=False, enable_gti=False,\n sidewinder_proxy_enabled=False, enable_ospf=False,\n ospf_profile=None, comment=None, snmp=None, **kw):\n \"\"\"\n Create a Layer 3 Firewall providing all of the interface configuration.\n This method provides a way to fully create the engine and all interfaces\n at once versus using :py:meth:`~create` and creating each individual\n interface after the engine exists.\n \n Example interfaces format::\n \n interfaces=[\n {'interface_id': 1},\n {'interface_id': 2, \n 'interfaces':[{'nodes': [{'address': '2.2.2.2', 'network_value': '2.2.2.0/24'}]}],\n 'zone_ref': 'myzone'},\n {'interface_id': 3,\n 'interfaces': [{'nodes': [{'address': '3.3.3.3', 'network_value': '3.3.3.0/24'}],\n 'vlan_id': 3,\n 'zone_ref': 'myzone'},\n {'nodes': [{'address': '4.4.4.4', 'network_value': '4.4.4.0/24'}],\n 'vlan_id': 4}]},\n {'interface_id': 4,\n 'interfaces': [{'vlan_id': 4,\n 'zone_ref': 'myzone'}]},\n {'interface_id': 5,\n 'interfaces': [{'vlan_id': 5}]},\n {'interface_id': 1000,\n 'interfaces': [{'nodes': [{'address': '10.10.10.1', 'network_value': '10.10.10.0/24'}]}],\n 'type': 'tunnel_interface'}]\n \n \"\"\"\n physical_interfaces = []\n for interface in interfaces:\n if 'interface_id' not in interface:\n raise CreateEngineFailed('Interface definitions must contain the interface_id '\n 'field. Failed to create engine: %s' % name)\n if interface.get('type', None) == 'tunnel_interface':\n tunnel_interface = TunnelInterface(**interface)\n physical_interfaces.append(\n {'tunnel_interface': tunnel_interface})\n else:\n interface.update(interface='single_node_interface')\n interface = Layer3PhysicalInterface(primary_mgt=primary_mgt,\n backup_mgt=backup_mgt, **interface)\n physical_interfaces.append(\n {'physical_interface': interface})\n\n if snmp:\n snmp_agent = dict(\n snmp_agent_ref=snmp.get('snmp_agent', ''),\n snmp_location=snmp.get('snmp_location', ''))\n \n snmp_agent.update(\n snmp_interface=add_snmp(\n interfaces,\n snmp.get('snmp_interface', [])))\n \n try:\n engine = super(Layer3Firewall, cls)._create(\n name=name,\n node_type='firewall_node',\n physical_interfaces=physical_interfaces,\n loopback_ndi=kw.get('loopback_ndi', []),\n domain_server_address=domain_server_address,\n log_server_ref=log_server_ref,\n nodes=1, enable_gti=enable_gti,\n enable_antivirus=enable_antivirus,\n sidewinder_proxy_enabled=sidewinder_proxy_enabled,\n default_nat=default_nat,\n location_ref=location_ref,\n enable_ospf=enable_ospf,\n ospf_profile=ospf_profile,\n snmp_agent=snmp_agent if snmp else None,\n comment=comment)\n \n return ElementCreator(cls, json=engine)\n \n except (ElementNotFound, CreateElementFailed) as e:\n raise CreateEngineFailed(e)", "response": "Create a Layer 3 Firewall."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncreate a new NGFW firewall with management interfaces and DNS.", "response": "def create(cls, name, mgmt_ip, mgmt_network,\n mgmt_interface=0,\n log_server_ref=None,\n default_nat=False,\n reverse_connection=False,\n domain_server_address=None, zone_ref=None,\n enable_antivirus=False, enable_gti=False,\n location_ref=None, enable_ospf=False,\n sidewinder_proxy_enabled=False,\n ospf_profile=None, snmp=None, comment=None, **kw):\n \"\"\" \n Create a single layer 3 firewall with management interface and DNS. \n Provide the `interfaces` keyword argument if adding multiple additional interfaces.\n Interfaces can be one of any valid interface for a layer 3 firewall. Unless the\n interface type is specified, physical_interface is assumed.\n \n Valid interface types:\n - physical_interface (default if not specified)\n - tunnel_interface\n \n If providing all engine interfaces in a single operation, see :py:meth:`~create_bulk`\n for the proper format.\n \n :param str name: name of firewall engine\n :param str mgmt_ip: ip address of management interface\n :param str mgmt_network: management network in cidr format\n :param str log_server_ref: (optional) href to log_server instance for fw\n :param int mgmt_interface: (optional) interface for management from SMC to fw\n :param list domain_server_address: (optional) DNS server addresses\n :param str zone_ref: zone name, str href or zone name for management interface\n (created if not found)\n :param bool reverse_connection: should the NGFW be the mgmt initiator (used when behind NAT)\n :param bool default_nat: (optional) Whether to enable default NAT for outbound\n :param bool enable_antivirus: (optional) Enable antivirus (required DNS)\n :param bool enable_gti: (optional) Enable GTI\n :param bool sidewinder_proxy_enabled: Enable Sidewinder proxy functionality\n :param str location_ref: location href or not for engine if needed to contact SMC\n behind NAT (created if not found)\n :param bool enable_ospf: whether to turn OSPF on within engine\n :param str ospf_profile: optional OSPF profile to use on engine, by ref\n :param kw: optional keyword arguments specifying additional interfaces \n :raises CreateEngineFailed: Failure to create with reason\n :return: :py:class:`smc.core.engine.Engine`\n \"\"\"\n interfaces = kw.pop('interfaces', [])\n # Add the primary interface to the interface list\n interface = {'interface_id': mgmt_interface,\n 'interface': 'single_node_interface',\n 'zone_ref': zone_ref,\n 'interfaces': [{\n 'nodes': [{'address': mgmt_ip, 'network_value': mgmt_network, 'nodeid': 1,\n 'reverse_connection': reverse_connection}]\n }]\n }\n interfaces.append(interface)\n \n return Layer3Firewall.create_bulk(\n name=name,\n node_type='firewall_node',\n interfaces=interfaces,\n primary_mgt=mgmt_interface,\n domain_server_address=domain_server_address,\n log_server_ref=log_server_ref,\n nodes=1, enable_gti=enable_gti,\n enable_antivirus=enable_antivirus,\n sidewinder_proxy_enabled=sidewinder_proxy_enabled,\n default_nat=default_nat,\n location_ref=location_ref,\n enable_ospf=enable_ospf,\n ospf_profile=ospf_profile, snmp=snmp,\n comment=comment, **kw)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncreate a single firewall with only a single DHCP interface.", "response": "def create_dynamic(cls, name, interface_id,\n dynamic_index=1,\n reverse_connection=True,\n automatic_default_route=True,\n domain_server_address=None,\n loopback_ndi='127.0.0.1',\n location_ref=None,\n log_server_ref=None,\n zone_ref=None,\n enable_gti=False,\n enable_antivirus=False,\n sidewinder_proxy_enabled=False,\n default_nat=False, comment=None, **kw):\n \"\"\"\n Create a single layer 3 firewall with only a single DHCP interface. Useful\n when creating virtualized FW's such as in Microsoft Azure.\n \n :param str name: name of engine\n :param str,int interface_id: interface ID used for dynamic interface and management\n :param bool reverse_connection: specifies the dynamic interface should initiate connections\n to management (default: True)\n :param bool automatic_default_route: allow SMC to create a dynamic netlink for the default\n route (default: True)\n :param list domain_server_address: list of IP addresses for engine DNS\n :param str loopback_ndi: IP address for a loopback NDI. When creating a dynamic engine, the\n `auth_request` must be set to a different interface, so loopback is created\n :param str location_ref: location by name for the engine\n :param str log_server_ref: log server reference, will use the \n \n \"\"\"\n interfaces = kw.pop('interfaces', [])\n # Add the primary interface to the interface list\n interface = {'interface_id': interface_id,\n 'interface': 'single_node_interface',\n 'zone_ref': zone_ref,\n 'interfaces': [{\n 'nodes': [{'dynamic': True, 'dynamic_index': dynamic_index, 'nodeid': 1,\n 'reverse_connection': reverse_connection,\n 'automatic_default_route': automatic_default_route}]\n }]\n }\n interfaces.append(interface)\n \n loopback = LoopbackInterface.create(\n address=loopback_ndi, \n nodeid=1, \n auth_request=True, \n rank=1)\n \n return Layer3Firewall.create_bulk(\n name=name,\n node_type='firewall_node',\n primary_mgt=interface_id,\n interfaces=interfaces,\n loopback_ndi=[loopback.data],\n domain_server_address=domain_server_address,\n log_server_ref=log_server_ref,\n nodes=1, enable_gti=enable_gti,\n enable_antivirus=enable_antivirus,\n sidewinder_proxy_enabled=sidewinder_proxy_enabled,\n default_nat=default_nat,\n location_ref=location_ref,\n comment=comment, **kw)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef create(cls, name, mgmt_ip, mgmt_network,\n mgmt_interface=0,\n inline_interface='1-2',\n logical_interface='default_eth',\n log_server_ref=None,\n domain_server_address=None, zone_ref=None,\n enable_antivirus=False, enable_gti=False, comment=None):\n \"\"\" \n Create a single layer 2 firewall with management interface and inline pair\n\n :param str name: name of firewall engine\n :param str mgmt_ip: ip address of management interface\n :param str mgmt_network: management network in cidr format\n :param int mgmt_interface: (optional) interface for management from SMC to fw\n :param str inline_interface: interfaces to use for first inline pair\n :param str logical_interface: name, str href or LogicalInterface (created if it\n doesn't exist)\n :param str log_server_ref: (optional) href to log_server instance \n :param list domain_server_address: (optional) DNS server addresses\n :param str zone_ref: zone name, str href or Zone for management interface\n (created if not found)\n :param bool enable_antivirus: (optional) Enable antivirus (required DNS)\n :param bool enable_gti: (optional) Enable GTI\n :raises CreateEngineFailed: Failure to create with reason\n :return: :py:class:`smc.core.engine.Engine`\n \"\"\"\n interfaces = []\n interface_id, second_interface_id = inline_interface.split('-')\n l2 = {'interface_id': interface_id,\n 'interface': 'inline_interface',\n 'second_interface_id': second_interface_id,\n 'logical_interface_ref': logical_interface}\n \n interfaces.append(\n {'physical_interface': Layer2PhysicalInterface(**l2)})\n \n layer3 = {'interface_id': mgmt_interface,\n 'zone_ref': zone_ref,\n 'interfaces': [{'nodes': [\n {'address': mgmt_ip, 'network_value': mgmt_network, 'nodeid': 1}]}]\n }\n \n interfaces.append(\n {'physical_interface': Layer3PhysicalInterface(primary_mgt=mgmt_interface, **layer3)})\n \n engine = super(Layer2Firewall, cls)._create(\n name=name,\n node_type='fwlayer2_node',\n physical_interfaces=interfaces,\n domain_server_address=domain_server_address,\n log_server_ref=log_server_ref,\n nodes=1, enable_gti=enable_gti,\n enable_antivirus=enable_antivirus,\n comment=comment)\n \n try:\n return ElementCreator(cls, json=engine)\n \n except CreateElementFailed as e:\n raise CreateEngineFailed(e)", "response": "Creates a new single Layer 2 firewall with management interface and inline interface."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates a Layer3 Virtual Engine for a Master Engine.", "response": "def create(cls, name, master_engine, virtual_resource,\n interfaces, default_nat=False, outgoing_intf=0,\n domain_server_address=None, enable_ospf=False,\n ospf_profile=None, comment=None, **kw):\n \"\"\"\n Create a Layer3Virtual engine for a Master Engine. Provide interfaces\n as a list of dict items specifying the interface details in format::\n \n {'interface_id': 1, 'address': '1.1.1.1', 'network_value': '1.1.1.0/24',\n 'zone_ref': zone_by_name,href, 'comment': 'my interface comment'}\n \n :param str name: Name of this layer 3 virtual engine\n :param str master_engine: Name of existing master engine\n :param str virtual_resource: name of pre-created virtual resource\n :param list interfaces: dict of interface details\n :param bool default_nat: Whether to enable default NAT for outbound\n :param int outgoing_intf: outgoing interface for VE. Specifies interface number\n :param list interfaces: interfaces mappings passed in\n :param bool enable_ospf: whether to turn OSPF on within engine\n :param str ospf_profile: optional OSPF profile to use on engine, by ref \n :raises CreateEngineFailed: Failure to create with reason\n :raises LoadEngineFailed: master engine not found\n :return: :py:class:`smc.core.engine.Engine`\n \"\"\"\n virt_resource_href = None # need virtual resource reference\n master_engine = Engine(master_engine)\n\n for virt_resource in master_engine.virtual_resource.all():\n if virt_resource.name == virtual_resource:\n virt_resource_href = virt_resource.href\n break\n if not virt_resource_href:\n raise CreateEngineFailed('Cannot find associated virtual resource for '\n 'VE named: {}. You must first create a virtual resource for the '\n 'master engine before you can associate a virtual engine. Cannot '\n 'add VE'.format(name))\n \n virtual_interfaces = []\n for interface in interfaces:\n nodes = {'address': interface.get('address'),\n 'network_value': interface.get('network_value')}\n \n layer3 = {'interface_id': interface.get('interface_id'),\n 'interface': 'single_node_interface',\n 'comment': interface.get('comment', None),\n 'zone_ref': interface.get('zone_ref')}\n \n if interface.get('interface_id') == outgoing_intf:\n nodes.update(outgoing=True, auth_request=True)\n \n layer3['interfaces'] = [{'nodes': [nodes]}]\n \n virtual_interfaces.append(\n {'virtual_physical_interface': Layer3PhysicalInterface(**layer3).data.data})\n \n engine = super(Layer3VirtualEngine, cls)._create(\n name=name,\n node_type='virtual_fw_node',\n physical_interfaces=virtual_interfaces,\n domain_server_address=domain_server_address,\n log_server_ref=None, # Isn't used in VE\n nodes=1, default_nat=default_nat,\n enable_ospf=enable_ospf,\n ospf_profile=ospf_profile,\n comment=comment)\n\n engine.update(virtual_resource=virt_resource_href)\n # Master Engine provides this service\n engine.pop('log_server_ref', None)\n \n try:\n return ElementCreator(cls, json=engine)\n \n except CreateElementFailed as e:\n raise CreateEngineFailed(e)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create_bulk(cls, name, interfaces=None, nodes=2, cluster_mode='balancing',\n primary_mgt=None, backup_mgt=None, primary_heartbeat=None,\n log_server_ref=None, domain_server_address=None, location_ref=None,\n default_nat=False, enable_antivirus=False, enable_gti=False, comment=None,\n snmp=None, **kw):\n \"\"\"\n \n :param dict snmp: SNMP dict should have keys `snmp_agent` str defining name of SNMPAgent,\n `snmp_interface` which is a list of interface IDs, and optionally `snmp_location` which\n is a string with the SNMP location name.\n \n \"\"\"\n primary_heartbeat = primary_mgt if not primary_heartbeat else primary_heartbeat\n \n physical_interfaces = []\n for interface in interfaces:\n if 'interface_id' not in interface:\n raise CreateEngineFailed('Interface definitions must contain the interface_id '\n 'field. Failed to create engine: %s' % name)\n if interface.get('type', None) == 'tunnel_interface':\n tunnel_interface = TunnelInterface(**interface)\n physical_interfaces.append(\n {'tunnel_interface': tunnel_interface})\n else:\n cluster_interface = ClusterPhysicalInterface(\n primary_mgt=primary_mgt, backup_mgt=backup_mgt,\n primary_heartbeat=primary_heartbeat, **interface)\n physical_interfaces.append(\n {'physical_interface': cluster_interface})\n\n if snmp:\n snmp_agent = dict(\n snmp_agent_ref=snmp.get('snmp_agent', ''),\n snmp_location=snmp.get('snmp_location', ''))\n \n snmp_agent.update(\n snmp_interface=add_snmp(\n interfaces,\n snmp.get('snmp_interface', [])))\n \n try:\n engine = super(FirewallCluster, cls)._create(\n name=name,\n node_type='firewall_node',\n physical_interfaces=physical_interfaces,\n domain_server_address=domain_server_address,\n log_server_ref=log_server_ref,\n location_ref=location_ref,\n nodes=nodes, enable_gti=enable_gti,\n enable_antivirus=enable_antivirus,\n default_nat=default_nat,\n snmp_agent=snmp_agent if snmp else None,\n comment=comment)\n engine.update(cluster_mode=cluster_mode)\n \n return ElementCreator(cls, json=engine)\n \n except (ElementNotFound, CreateElementFailed) as e:\n raise CreateEngineFailed(e)", "response": "Creates a firewall cluster with the given interfaces."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create(cls, name, cluster_virtual, network_value, macaddress,\n interface_id, nodes, vlan_id=None, cluster_mode='balancing',\n backup_mgt=None, primary_heartbeat=None, log_server_ref=None,\n domain_server_address=None, location_ref=None, zone_ref=None,\n default_nat=False, enable_antivirus=False, enable_gti=False,\n comment=None, snmp=None, **kw):\n \"\"\"\n Create a layer 3 firewall cluster with management interface and any number\n of nodes. If providing keyword arguments to create additional interfaces,\n use the same constructor arguments and pass an `interfaces` keyword argument.\n The constructor defined interface will be assigned as the primary\n management interface by default. Otherwise the engine will be created with a\n single interface and interfaces can be added after.\n \n .. versionchanged:: 0.6.1\n Chgnged `cluster_nic` to `interface_id`, and `cluster_mask` to `network_value`\n \n :param str name: name of firewall engine\n :param str cluster_virtual: ip of cluster CVI\n :param str network_value: ip netmask of cluster CVI\n :param str macaddress: macaddress for packet dispatch clustering\n :param str interface_id: nic id to use for primary interface\n :param list nodes: address/network_value/nodeid combination for cluster nodes\n :param str vlan_id: optional VLAN id for the management interface, i.e. '15'.\n :param str cluster_mode: 'balancing' or 'standby' mode (default: balancing)\n :param str,int primary_heartbeat: optionally set the primary_heartbeat. This is\n automatically set to the management interface but can be overridden to use\n another interface if defining additional interfaces using `interfaces`.\n :param str,int backup_mgt: optionally set the backup management interface. This\n is unset unless you define additional interfaces using `interfaces`.\n :param str log_server_ref: (optional) href to log_server instance \n :param list domain_server_address: (optional) DNS server addresses\n :param str location_ref: location href or not for engine if needed to contact SMC\n behind NAT (created if not found)\n :param str zone_ref: zone name, str href or Zone for management interface\n (created if not found)\n :param bool enable_antivirus: (optional) Enable antivirus (required DNS)\n :param bool enable_gti: (optional) Enable GTI\n :param list interfaces: optional keyword to supply additional interfaces\n :param dict snmp: SNMP dict should have keys `snmp_agent` str defining name of SNMPAgent,\n `snmp_interface` which is a list of interface IDs, and optionally `snmp_location` which\n is a string with the SNMP location name.\n :raises CreateEngineFailed: Failure to create with reason\n :return: :py:class:`smc.core.engine.Engine`\n\n Example nodes parameter input::\n\n [{'address':'5.5.5.2', 'network_value':'5.5.5.0/24', 'nodeid':1},\n {'address':'5.5.5.3', 'network_value':'5.5.5.0/24', 'nodeid':2},\n {'address':'5.5.5.4', 'network_value':'5.5.5.0/24', 'nodeid':3}]\n \n You can also create additional CVI+NDI, or NDI only interfaces by providing\n the keyword argument interfaces using the same keyword values from the\n constructor::\n \n interfaces=[\n {'interface_id': 1,\n 'macaddress': '02:02:02:02:02:03',\n 'interfaces': [{'cluster_virtual': '2.2.2.1',\n 'network_value': '2.2.2.0/24',\n 'nodes':[{'address': '2.2.2.2', 'network_value': '2.2.2.0/24', 'nodeid': 1},\n {'address': '2.2.2.3', 'network_value': '2.2.2.0/24', 'nodeid': 2}]\n }]\n },\n {'interface_id': 2,\n 'interfaces': [{'nodes':[{'address': '3.3.3.2', 'network_value': '3.3.3.0/24', 'nodeid': 1},\n {'address': '3.3.3.3', 'network_value': '3.3.3.0/24', 'nodeid': 2}]\n }]\n }]\n \n It is also possible to define VLAN interfaces by providing the `vlan_id` keyword.\n Example VLAN with NDI only interfaces. If nesting the zone_ref within the interfaces\n list, the zone will be applied to the VLAN versus the top level interface::\n \n interfaces=[\n {'interface_id': 2,\n 'interfaces': [{'nodes':[{'address': '3.3.3.2', 'network_value': '3.3.3.0/24', 'nodeid': 1},\n {'address': '3.3.3.3', 'network_value': '3.3.3.0/24', 'nodeid': 2}],\n 'vlan_id': 22,\n 'zone_ref': 'private-network'\n },\n {'nodes': [{'address': '4.4.4.1', 'network_value': '4.4.4.0/24', 'nodeid': 1},\n {'address': '4.4.4.2', 'network_value': '4.4.4.0/24', 'nodeid': 2}],\n 'vlan_id': 23,\n 'zone_ref': 'other_vlan'\n }]\n }]\n \n Tunnel interfaces can also be created. As all interfaces defined are assumed to be\n a physical interface type, you must specify the `type` parameter to indicate the\n interface is a tunnel interface. Tunnel interfaces do not have a macaddress or VLANs.\n They be configured with NDI interfaces by omitting the `cluster_virtual` and\n `network_value` top level attributes::\n \n interfaces=[\n {'interface_id': 1000,\n 'interfaces': [{'cluster_virtual': '100.100.100.1',\n 'network_value': '100.100.100.0/24',\n 'nodes':[{'address': '100.100.100.2', 'network_value': '100.100.100.0/24', 'nodeid': 1},\n {'address': '100.100.100.3', 'network_value': '100.100.100.0/24', 'nodeid': 2}]\n }],\n 'zone_ref': 'AWStunnel',\n 'type': 'tunnel_interface'\n }]\n \n If setting primary_heartbeat or backup_mgt to a specific interface (the primary\n interface configured in the constructor will have these roles by default), you\n must define the interfaces in the `interfaces` keyword argument list.\n \n .. note:: If creating additional interfaces, you must at minimum provide the\n `interface_id` and `nodes` to create an NDI only interface.\n \n \"\"\"\n interfaces = kw.pop('interfaces', [])\n # Add the primary interface to the interface list\n interface = {'cluster_virtual': cluster_virtual,\n 'network_value': network_value,\n 'nodes': nodes}\n if vlan_id:\n interface.update(vlan_id=vlan_id)\n \n interfaces.append(dict(\n interface_id=interface_id,\n macaddress=macaddress,\n zone_ref=zone_ref,\n interfaces=[interface]))\n \n primary_mgt = interface_id if not vlan_id else '{}.{}'.format(interface_id, vlan_id)\n \n return FirewallCluster.create_bulk(\n name, interfaces=interfaces, nodes=len(nodes),\n cluster_mode=cluster_mode, primary_mgt=primary_mgt,\n backup_mgt=backup_mgt, primary_heartbeat=primary_heartbeat, \n log_server_ref=log_server_ref,\n domain_server_address=domain_server_address,\n location_ref=location_ref, default_nat=default_nat,\n enable_antivirus=enable_antivirus, enable_gti=enable_gti,\n comment=comment, snmp=snmp, **kw)", "response": "Create a new firewall cluster with the given parameters."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create(cls, name, master_type, mgmt_ip, mgmt_network,\n mgmt_interface=0,\n log_server_ref=None, zone_ref=None,\n domain_server_address=None, enable_gti=False,\n enable_antivirus=False, comment=None):\n \"\"\"\n Create a Master Engine with management interface\n\n :param str name: name of master engine engine\n :param str master_type: firewall|\n :param str mgmt_ip: ip address for management interface\n :param str mgmt_network: full netmask for management\n :param str mgmt_interface: interface to use for mgmt (default: 0)\n :param str log_server_ref: (optional) href to log_server instance \n :param list domain_server_address: (optional) DNS server addresses\n :param bool enable_antivirus: (optional) Enable antivirus (required DNS)\n :param bool enable_gti: (optional) Enable GTI\n :raises CreateEngineFailed: Failure to create with reason\n :return: :py:class:`smc.core.engine.Engine`\n \"\"\"\n interface = {'interface_id': mgmt_interface,\n 'interfaces': [{'nodes': [{'address': mgmt_ip, 'network_value': mgmt_network}]}],\n 'zone_ref': zone_ref, 'comment': comment}\n \n interface = Layer3PhysicalInterface(primary_mgt=mgmt_interface,\n primary_heartbeat=mgmt_interface, **interface)\n \n engine = super(MasterEngine, cls)._create(\n name=name,\n node_type='master_node',\n physical_interfaces=[{'physical_interface':interface}],\n domain_server_address=domain_server_address,\n log_server_ref=log_server_ref,\n nodes=1, enable_gti=enable_gti,\n enable_antivirus=enable_antivirus,\n comment=comment)\n\n engine.update(master_type=master_type,\n cluster_mode='standby')\n\n try:\n return ElementCreator(cls, json=engine)\n \n except CreateElementFailed as e:\n raise CreateEngineFailed(e)", "response": "Create a Master Engine with management interface."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncreate Master Engine Cluster.", "response": "def create(cls, name, master_type, macaddress, nodes, mgmt_interface=0,\n log_server_ref=None, domain_server_address=None, enable_gti=False,\n enable_antivirus=False, comment=None, **kw):\n \"\"\"\n Create Master Engine Cluster\n\n :param str name: name of master engine engine\n :param str master_type: firewall|\n :param str mgmt_ip: ip address for management interface\n :param str mgmt_netmask: full netmask for management\n :param str mgmt_interface: interface to use for mgmt (default: 0)\n :param list nodes: address/network_value/nodeid combination for cluster nodes \n :param str log_server_ref: (optional) href to log_server instance \n :param list domain_server_address: (optional) DNS server addresses\n :param bool enable_antivirus: (optional) Enable antivirus (required DNS)\n :param bool enable_gti: (optional) Enable GTI\n :raises CreateEngineFailed: Failure to create with reason\n :return: :py:class:`smc.core.engine.Engine`\n\n Example nodes parameter input::\n\n [{'address':'5.5.5.2', \n 'network_value':'5.5.5.0/24', \n 'nodeid':1},\n {'address':'5.5.5.3', \n 'network_value':'5.5.5.0/24', \n 'nodeid':2},\n {'address':'5.5.5.4', \n 'network_value':'5.5.5.0/24', \n 'nodeid':3}]\n \"\"\"\n primary_mgt = primary_heartbeat = mgmt_interface\n \n interface = {'interface_id': mgmt_interface,\n 'interfaces': [{\n 'nodes': nodes\n }],\n 'macaddress': macaddress,\n }\n \n interface = Layer3PhysicalInterface(primary_mgt=primary_mgt,\n primary_heartbeat=primary_heartbeat, **interface)\n\n engine = super(MasterEngineCluster, cls)._create(\n name=name,\n node_type='master_node',\n physical_interfaces=[{'physical_interface': interface}],\n domain_server_address=domain_server_address,\n log_server_ref=log_server_ref,\n nodes=len(nodes), enable_gti=enable_gti,\n enable_antivirus=enable_antivirus,\n comment=comment)\n\n engine.update(master_type=master_type,\n cluster_mode='standby')\n\n try:\n return ElementCreator(cls, json=engine)\n \n except CreateElementFailed as e:\n raise CreateEngineFailed(e)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nstream logger convenience function to log to console :param int log_level: A log level as specified in the `logging` module :param str format_string: Optional format string as specified in the `logging` module", "response": "def set_stream_logger(log_level=logging.DEBUG, format_string=None, logger_name='smc'): \n \"\"\" \n Stream logger convenience function to log to console\n \n :param int log_level: A log level as specified in the `logging` module\n :param str format_string: Optional format string as specified in the \n `logging` module\n \"\"\" \n if format_string is None: \n format_string = LOG_FORMAT\n \n logger = logging.getLogger(logger_name) \n logger.setLevel(log_level)\n \n # create console handler and set level\n ch = logging.StreamHandler() \n ch.setLevel(log_level)\n # create formatter\n formatter = logging.Formatter(format_string)\n # add formatter to ch\n ch.setFormatter(formatter) \n logger.addHandler(ch)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef set_file_logger(path, log_level=logging.DEBUG, format_string=None, logger_name='smc'):\n if format_string is None: \n format_string = LOG_FORMAT\n \n log = logging.getLogger(logger_name)\n log.setLevel(log_level)\n\n # create file handler and set level\n ch = logging.FileHandler(path)\n ch.setLevel(log_level)\n # create formatter\n formatter = logging.Formatter(format_string)\n # add formatter to ch\n ch.setFormatter(formatter)\n # add ch to logger\n log.addHandler(ch)", "response": "This function will quickly configure any level of logging\n to a file."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncreate an External LDAP user domain.", "response": "def create(cls, name, ldap_server, isdefault=False, auth_method=None, comment=None):\n \"\"\"\n Create an External LDAP user domain. These are used as containers for\n retrieving user and groups from the configured LDAP server/s. If you\n have multiple authentication methods supported for your LDAP server,\n or have none configured, you can set the `auth_method` to\n a supported AuthenticationMethod.\n \n :param str name: name of external LDAP domain\n :param list(str,ActiveDirectoryServer) ldap_server: list of existing\n authentication servers in href or element format\n :param bool isdefault: set this to 'Default LDAP domain'\n :param str,AuthenticationMethod auth_method: authentication method to\n use. Usually set when multiple are defined in LDAP service or\n none are defined.\n :param str comment: optional comment\n :raises CreateElementFailed: failed to create\n :rtype: ExternalLdapUserDomain\n \"\"\"\n return ElementCreator(cls,\n json={'name': name, 'ldap_server': element_resolver(ldap_server),\n 'auth_method': element_resolver(auth_method),\n 'isdefault': isdefault, 'comment': comment})"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create(cls, name, user_group=None, activation_date=None,\n expiration_date=None, comment=None):\n \"\"\"\n Create an internal user. \n Add a user example::\n \n InternalUser.create(name='goog', comment='my comment')\n \n :param str name: name of user that is displayed in SMC \n :param list(str,InternalUserGroup) user_group: internal user groups\n which to add this user to.\n :param datetime activation_date: activation date as datetime object.\n Activation date only supports year and month/day\n :param datetime expiration_date: expiration date as datetime object.\n Expiration date only supports year and month/day\n :param str comment: optional comment\n :raises ElementNotFound: thrown if group specified does not exist\n :rtype: InternalUser\n \"\"\"\n json = {\n 'name': name,\n 'unique_id': 'cn={},{}'.format(name, InternalUserDomain.user_dn),\n 'comment': comment}\n \n limits = {'activation_date': activation_date, 'expiration_date': expiration_date}\n for attr, value in limits.items():\n json[attr] = datetime_to_ms(value) if value else None\n \n if user_group:\n json.update(user_group=element_resolver(user_group))\n\n return ElementCreator(cls, json)", "response": "Create an internal user."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef create(cls, name, member=None, comment=None):\n json={'name': name,\n 'unique_id': 'cn={},{}'.format(name, InternalUserDomain.user_dn),\n 'comment': comment}\n if member:\n json.update(member=element_resolver(member))\n \n return ElementCreator(cls, json)", "response": "Create an internal group."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting all loopback interfaces for a given engine", "response": "def get_all_loopbacks(engine):\n \"\"\"\n Get all loopback interfaces for a given engine\n \"\"\"\n data = []\n if 'fw_cluster' in engine.type:\n for cvi in engine.data.get('loopback_cluster_virtual_interface', []): \n data.append(\n LoopbackClusterInterface(cvi, engine))\n for node in engine.nodes:\n for lb in node.data.get('loopback_node_dedicated_interface', []):\n data.append(LoopbackInterface(lb, engine))\n return data"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get(self, address):\n loopback = super(LoopbackCollection, self).get(address=address)\n if loopback:\n return loopback\n raise InterfaceNotFound('Loopback address specified was not found')", "response": "Get a loopback address by it s address."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add(self, interface_id, virtual_mapping=None,\n virtual_resource_name=None, zone_ref=None, comment=None):\n \"\"\"\n Add single physical interface with interface_id. Use other methods\n to fully add an interface configuration based on engine type.\n Virtual mapping and resource are only used in Virtual Engines.\n\n :param str,int interface_id: interface identifier\n :param int virtual_mapping: virtual firewall id mapping\n See :class:`smc.core.engine.VirtualResource.vfw_id`\n :param str virtual_resource_name: virtual resource name\n See :class:`smc.core.engine.VirtualResource.name`\n :raises EngineCommandFailed: failure creating interface\n :return: None\n \"\"\"\n interface = Layer3PhysicalInterface(engine=self._engine,\n interface_id=interface_id, zone_ref=zone_ref,\n comment=comment, virtual_resource_name=virtual_resource_name,\n virtual_mapping=virtual_mapping)\n \n return self._engine.add_interface(interface)", "response": "Add single physical interface with interface_id."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nadds a capture interface to the local cache.", "response": "def add_capture_interface(self, interface_id, logical_interface_ref,\n inspect_unspecified_vlans=True, zone_ref=None, comment=None):\n \"\"\"\n Add a capture interface. Capture interfaces are supported on\n Layer 2 FW and IPS engines.\n \n ..note::\n Capture interface are supported on Layer 3 FW/clusters for NGFW engines\n version >= 6.3 and SMC >= 6.3.\n \n :param str,int interface_id: interface identifier\n :param str logical_interface_ref: logical interface name, href or LogicalInterface.\n If None, 'default_eth' logical interface will be used.\n :param str zone_ref: zone reference, can be name, href or Zone\n :raises EngineCommandFailed: failure creating interface\n :return: None\n\n See :class:`smc.core.sub_interfaces.CaptureInterface` for more information\n \"\"\"\n capture = {'interface_id': interface_id, 'interface': 'capture_interface',\n 'logical_interface_ref': logical_interface_ref, 'inspect_unspecified_vlans':\n inspect_unspecified_vlans, 'zone_ref': zone_ref, 'comment': comment}\n \n interface = Layer2PhysicalInterface(engine=self._engine, **capture)\n return self._engine.add_interface(interface)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nadd a layer 3 interface on a non - clustered engine.", "response": "def add_layer3_interface(self, interface_id, address, network_value,\n zone_ref=None, comment=None, **kw):\n \"\"\"\n Add a layer 3 interface on a non-clustered engine.\n For Layer 2 FW and IPS engines, this interface type represents\n a layer 3 routed (node dedicated) interface. For clusters, use the\n cluster related methods such as :func:`add_cluster_virtual_interface`\n\n :param str,int interface_id: interface identifier\n :param str address: ip address\n :param str network_value: network/cidr (12.12.12.0/24)\n :param str zone_ref: zone reference, can be name, href or Zone\n :param kw: keyword arguments are passed to the sub-interface during\n create time. If the engine is a single FW, the sub-interface type\n is :class:`smc.core.sub_interfaces.SingleNodeInterface`. For all\n other engines, the type is :class:`smc.core.sub_interfaces.NodeInterface`\n For example, pass 'backup_mgt=True' to enable this interface as the\n management backup.\n :raises EngineCommandFailed: failure creating interface\n :return: None\n\n .. note::\n If an existing ip address exists on the interface and zone_ref is\n provided, this value will overwrite any previous zone definition.\n \"\"\"\n interfaces = {'interface_id': interface_id, 'interfaces':\n [{'nodes': [{'address': address, 'network_value': network_value}]}],\n 'zone_ref': zone_ref, 'comment': comment}\n interfaces.update(kw)\n \n if 'single_fw' in self._engine.type: # L2FW / IPS\n interfaces.update(interface='single_node_interface')\n \n try:\n interface = self._engine.interface.get(interface_id)\n interface._add_interface(**interfaces)\n return interface.update()\n \n except InterfaceNotFound:\n interface = Layer3PhysicalInterface(**interfaces)\n return self._engine.add_interface(interface)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef add_layer3_vlan_interface(self, interface_id, vlan_id, address=None,\n network_value=None, virtual_mapping=None, virtual_resource_name=None,\n zone_ref=None, comment=None, **kw):\n \"\"\"\n Add a Layer 3 VLAN interface. Optionally specify an address and network if\n assigning an IP to the VLAN. This method will also assign an IP address to\n an existing VLAN, or add an additional address to an existing VLAN. This\n method may commonly be used on a Master Engine to create VLANs for virtual\n firewall engines.\n \n Example of creating a VLAN and passing kwargs to define a DHCP server\n service on the VLAN interface::\n \n engine = Engine('engine1')\n engine.physical_interface.add_layer3_vlan_interface(interface_id=20, vlan_id=20,\n address='20.20.20.20', network_value='20.20.20.0/24', comment='foocomment',\n dhcp_server_on_interface={\n 'default_gateway': '20.20.20.1',\n 'default_lease_time': 7200,\n 'dhcp_address_range': '20.20.20.101-20.20.20.120',\n 'dhcp_range_per_node': [],\n 'primary_dns_server': '8.8.8.8'})\n \n :param str,int interface_id: interface identifier\n :param int vlan_id: vlan identifier\n :param str address: optional IP address to assign to VLAN\n :param str network_value: network cidr if address is specified. In\n format: 10.10.10.0/24.\n :param str zone_ref: zone to use, by name, href, or Zone\n :param str comment: optional comment for VLAN level of interface\n :param int virtual_mapping: virtual engine mapping id\n See :class:`smc.core.engine.VirtualResource.vfw_id`\n :param str virtual_resource_name: name of virtual resource\n See :class:`smc.core.engine.VirtualResource.name`\n :param dict kw: keyword arguments are passed to top level of VLAN interface,\n not the base level physical interface. This is useful if you want to\n pass in a configuration that enables the DHCP server on a VLAN for example.\n :raises EngineCommandFailed: failure creating interface\n :return: None\n \"\"\"\n interfaces = {'nodes': [{'address': address, 'network_value': network_value}] if address\n and network_value else [], 'zone_ref': zone_ref, 'virtual_mapping': virtual_mapping,\n 'virtual_resource_name': virtual_resource_name, 'comment': comment}\n interfaces.update(**kw)\n _interface = {'interface_id': interface_id, 'interfaces': [interfaces]}\n \n if 'single_fw' in self._engine.type: # L2FW / IPS\n _interface.update(interface='single_node_interface')\n \n try:\n interface = self._engine.interface.get(interface_id)\n vlan = interface.vlan_interface.get(vlan_id)\n # Interface exists, so we need to update but check if VLAN already exists\n if vlan is None:\n interfaces.update(vlan_id=vlan_id)\n interface._add_interface(**_interface)\n else:\n _interface.update(interface_id='{}.{}'.format(interface_id, vlan_id))\n vlan._add_interface(**_interface)\n return interface.update()\n \n except InterfaceNotFound:\n interfaces.update(vlan_id=vlan_id)\n interface = Layer3PhysicalInterface(**_interface)\n return self._engine.add_interface(interface)", "response": "This method adds a Layer 3 VLAN interface to the specified virtual resource."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nadd a cluster virtual interface to the NDI s or NDI s node.", "response": "def add_layer3_cluster_interface(self, interface_id, cluster_virtual=None,\n network_value=None, macaddress=None, nodes=None, cvi_mode='packetdispatch',\n zone_ref=None, comment=None, **kw):\n \"\"\"\n Add cluster virtual interface. A \"CVI\" interface is used as a VIP\n address for clustered engines. Providing 'nodes' will create the\n node specific interfaces. You can also add a cluster address with only\n a CVI, or only NDI's.\n \n Add CVI only:: \n \n engine.physical_interface.add_cluster_virtual_interface(\n interface_id=30,\n cluster_virtual='30.30.30.1',\n network_value='30.30.30.0/24', \n macaddress='02:02:02:02:02:06')\n \n Add NDI's only:: \n \n engine.physical_interface.add_cluster_virtual_interface( \n interface_id=30, \n nodes=nodes) \n \n Add CVI and NDI's::\n \n engine.physical_interface.add_cluster_virtual_interface(\n cluster_virtual='5.5.5.1',\n network_value='5.5.5.0/24',\n macaddress='02:03:03:03:03:03',\n nodes=[{'address':'5.5.5.2', 'network_value':'5.5.5.0/24', 'nodeid':1},\n {'address':'5.5.5.3', 'network_value':'5.5.5.0/24', 'nodeid':2}])\n\n .. versionchanged:: 0.6.1\n Renamed from add_cluster_virtual_interface\n \n :param str,int interface_id: physical interface identifier\n :param str cluster_virtual: CVI address (VIP) for this interface\n :param str network_value: network value for VIP; format: 10.10.10.0/24\n :param str macaddress: mandatory mac address if cluster_virtual and\n cluster_mask provided\n :param list nodes: list of dictionary items identifying cluster nodes\n :param str cvi_mode: packetdispatch is recommended setting\n :param str zone_ref: zone reference, can be name, href or Zone\n :param kw: key word arguments are valid NodeInterface sub-interface\n settings passed in during create time. For example, 'backup_mgt=True'\n to enable this interface as the management backup.\n :raises EngineCommandFailed: failure creating interface\n :return: None\n \"\"\"\n interfaces = [{'nodes': nodes if nodes else [],\n 'cluster_virtual': cluster_virtual, 'network_value': network_value}]\n try:\n interface = self._engine.interface.get(interface_id)\n interface._add_interface(interface_id, interfaces=interfaces)\n return interface.update()\n \n except InterfaceNotFound:\n \n interface = ClusterPhysicalInterface(\n engine=self._engine,\n interface_id=interface_id,\n interfaces=interfaces,\n cvi_mode=cvi_mode if macaddress else 'none',\n macaddress=macaddress,\n zone_ref=zone_ref, comment=comment, **kw)\n\n return self._engine.add_interface(interface)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef add_layer3_vlan_cluster_interface(self, interface_id, vlan_id,\n nodes=None, cluster_virtual=None, network_value=None, macaddress=None,\n cvi_mode='packetdispatch', zone_ref=None, comment=None, **kw):\n \"\"\"\n Add IP addresses to VLANs on a firewall cluster. The minimum params\n required are ``interface_id`` and ``vlan_id``.\n To create a VLAN interface with a CVI, specify ``cluster_virtual``,\n ``cluster_mask`` and ``macaddress``.\n\n To create a VLAN with only NDI, specify ``nodes`` parameter.\n\n Nodes data structure is expected to be in this format::\n\n nodes=[{'address':'5.5.5.2', 'network_value':'5.5.5.0/24', 'nodeid':1},\n {'address':'5.5.5.3', 'network_value':'5.5.5.0/24', 'nodeid':2}]\n\n :param str,int interface_id: interface id to assign VLAN.\n :param str,int vlan_id: vlan identifier\n :param list nodes: optional addresses for node interfaces (NDI's). For a cluster,\n each node will require an address specified using the nodes format.\n :param str cluster_virtual: cluster virtual ip address (optional). If specified, cluster_mask\n parameter is required\n :param str network_value: Specifies the network address, i.e. if cluster virtual is 1.1.1.1,\n cluster mask could be 1.1.1.0/24.\n :param str macaddress: (optional) if used will provide the mapping from node interfaces\n to participate in load balancing.\n :param str cvi_mode: cvi mode for cluster interface (default: packetdispatch)\n :param zone_ref: zone to assign, can be name, str href or Zone\n :param dict kw: keyword arguments are passed to top level of VLAN interface,\n not the base level physical interface. This is useful if you want to\n pass in a configuration that enables the DHCP server on a VLAN for example.\n :raises EngineCommandFailed: failure creating interface\n :return: None\n\n .. note::\n If the ``interface_id`` specified already exists, it is still possible\n to add additional VLANs and interface addresses.\n \"\"\"\n interfaces = {'nodes': nodes if nodes else [],\n 'cluster_virtual': cluster_virtual, 'network_value': network_value}\n interfaces.update(**kw)\n _interface = {'interface_id': interface_id, 'interfaces': [interfaces],\n 'macaddress': macaddress, 'cvi_mode': cvi_mode if macaddress else 'none',\n 'zone_ref': zone_ref, 'comment': comment}\n \n try:\n interface = self._engine.interface.get(interface_id)\n vlan = interface.vlan_interface.get(vlan_id)\n # Interface exists, so we need to update but check if VLAN already exists\n if vlan is None:\n interfaces.update(vlan_id=vlan_id)\n interface._add_interface(**_interface)\n else:\n for k in ('macaddress', 'cvi_mode'):\n _interface.pop(k)\n _interface.update(interface_id='{}.{}'.format(interface_id, vlan_id))\n vlan._add_interface(**_interface)\n \n return interface.update()\n \n except InterfaceNotFound:\n\n interfaces.update(vlan_id=vlan_id)\n interface = ClusterPhysicalInterface(**_interface)\n return self._engine.add_interface(interface)", "response": "Add a VLAN to a firewall cluster."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef add_inline_interface(self, interface_id, second_interface_id,\n logical_interface_ref=None, vlan_id=None, second_vlan_id=None, zone_ref=None,\n second_zone_ref=None, failure_mode='normal', comment=None, **kw):\n \"\"\"\n Add an inline interface pair. This method is only for IPS or L2FW engine\n types.\n \n :param str interface_id: interface id of first interface\n :param str second_interface_id: second interface pair id\n :param str, href logical_interface_ref: logical interface by href or name\n :param str vlan_id: vlan ID for first interface in pair\n :param str second_vlan_id: vlan ID for second interface in pair\n :param str, href zone_ref: zone reference by name or href for first interface\n :param str, href second_zone_ref: zone reference by nae or href for second interface\n :param str failure_mode: normal or bypass\n :param str comment: optional comment\n :raises EngineCommandFailed: failure creating interface\n :return: None\n \"\"\"\n interface_spec = {'interface_id': interface_id, 'second_interface_id': second_interface_id,\n 'interface': kw.get('interface') if self._engine.type in ('single_fw', 'fw_cluster')\n else 'inline_interface'}\n \n _interface = {'logical_interface_ref': logical_interface_ref,\n 'failure_mode': failure_mode, 'zone_ref': zone_ref, 'second_zone_ref': second_zone_ref,\n 'comment': comment}\n \n vlan = {'vlan_id': vlan_id, 'second_vlan_id': second_vlan_id}\n \n try:\n inline_id = '{}-{}'.format(interface_id, second_interface_id)\n interface = self._engine.interface.get(inline_id)\n _interface.update(vlan)\n interface_spec.update(interfaces=[_interface])\n interface._add_interface(**interface_spec)\n return interface.update()\n \n except InterfaceNotFound:\n _interface.update(interfaces=[vlan])\n interface_spec.update(_interface)\n interface = Layer2PhysicalInterface(**interface_spec)\n return self._engine.add_interface(interface)", "response": "This method adds an inline interface pair. This method is only for IPS or L2FW engine."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef add_inline_ips_interface(self, interface_id, second_interface_id,\n logical_interface_ref=None, vlan_id=None, failure_mode='normal',\n zone_ref=None, second_zone_ref=None, comment=None):\n \"\"\"\n .. versionadded:: 0.5.6\n Using an inline interface on a layer 3 FW requires SMC and engine\n version >= 6.3.\n \n An inline IPS interface is a new interface type for Layer 3 NGFW\n engines version >=6.3. Traffic passing an Inline IPS interface will\n have a access rule default action of Allow. Inline IPS interfaces are\n bypass capable. When using bypass interfaces and NGFW is powered off,\n in an offline state or overloaded, traffic is allowed through without\n inspection regardless of the access rules.\n \n If the interface does not exist and a VLAN id is specified, the logical\n interface and zones will be applied to the top level physical interface.\n If adding VLANs to an existing inline ips pair, the logical and zones\n will be applied to the VLAN.\n \n :param str interface_id: first interface in the interface pair\n :param str second_interface_id: second interface in the interface pair\n :param str logical_interface_ref: logical interface name, href or LogicalInterface.\n If None, 'default_eth' logical interface will be used.\n :param str vlan_id: optional VLAN id for first interface pair\n :param str failure_mode: 'normal' or 'bypass' (default: normal).\n Bypass mode requires fail open interfaces.\n :param zone_ref: zone for first interface in pair, can be name,\n str href or Zone\n :param second_zone_ref: zone for second interface in pair, can be name,\n str href or Zone\n :param str comment: comment for this interface\n :raises EngineCommandFailed: failure creating interface\n :return: None\n \n .. note:: Only a single VLAN is supported on this inline pair type\n \"\"\"\n _interface = {'interface_id': interface_id, 'second_interface_id': second_interface_id,\n 'logical_interface_ref': logical_interface_ref, 'failure_mode': failure_mode,\n 'zone_ref': zone_ref, 'second_zone_ref': second_zone_ref, 'comment': comment,\n 'interface': 'inline_ips_interface', 'vlan_id': vlan_id}\n \n return self.add_inline_interface(**_interface)", "response": "Adds an inline IPS interface to the inline IPS cache."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef add_inline_l2fw_interface(self, interface_id, second_interface_id,\n logical_interface_ref=None, vlan_id=None, zone_ref=None,\n second_zone_ref=None, comment=None):\n \"\"\"\n .. versionadded:: 0.5.6\n Requires NGFW engine >=6.3 and layer 3 FW or cluster\n \n An inline L2 FW interface is a new interface type for Layer 3 NGFW\n engines version >=6.3. Traffic passing an Inline Layer 2 Firewall\n interface will have a default action in access rules of Discard.\n Layer 2 Firewall interfaces are not bypass capable, so when NGFW is\n powered off, in an offline state or overloaded, traffic is blocked on\n this interface.\n \n If the interface does not exist and a VLAN id is specified, the logical\n interface and zones will be applied to the top level physical interface.\n If adding VLANs to an existing inline ips pair, the logical and zones\n will be applied to the VLAN.\n \n :param str interface_id: interface id; '1-2', '3-4', etc\n :param str logical_interface_ref: logical interface name, href or LogicalInterface.\n If None, 'default_eth' logical interface will be used.\n :param str vlan_id: optional VLAN id for first interface pair\n :param str vlan_id2: optional VLAN id for second interface pair\n :param zone_ref_intf1: zone for first interface in pair, can be name,\n str href or Zone\n :param zone_ref_intf2: zone for second interface in pair, can be name,\n str href or Zone\n :raises EngineCommandFailed: failure creating interface\n :return: None\n \n .. note:: Only a single VLAN is supported on this inline pair type\n \"\"\"\n _interface = {'interface_id': interface_id, 'second_interface_id': second_interface_id,\n 'logical_interface_ref': logical_interface_ref, 'failure_mode': 'normal',\n 'zone_ref': zone_ref, 'second_zone_ref': second_zone_ref, 'comment': comment,\n 'interface': 'inline_l2fw_interface', 'vlan_id': vlan_id}\n \n return self.add_inline_interface(**_interface)", "response": "Adds an inline L2 FW interface to the local cache."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef add_dhcp_interface(self, interface_id, dynamic_index, zone_ref=None,\n vlan_id=None, comment=None):\n \"\"\"\n Add a DHCP interface on a single FW\n\n :param int interface_id: interface id\n :param int dynamic_index: index number for dhcp interface\n :param bool primary_mgt: whether to make this primary mgt\n :param str zone_ref: zone reference, can be name, href or Zone\n :raises EngineCommandFailed: failure creating interface\n :return: None\n\n See :class:`~DHCPInterface` for more information\n \"\"\"\n _interface = {'interface_id': interface_id, 'interfaces': [{'nodes': [\n {'dynamic': True, 'dynamic_index': dynamic_index}], 'vlan_id': vlan_id}],\n 'comment': comment, 'zone_ref': zone_ref}\n \n if 'single_fw' in self._engine.type:\n _interface.update(interface='single_node_interface')\n \n try:\n interface = self._engine.interface.get(interface_id)\n vlan = interface.vlan_interface.get(vlan_id)\n # Interface exists, so we need to update but check if VLAN already exists\n if vlan is None:\n interface._add_interface(**_interface)\n interface.update()\n \n except InterfaceNotFound:\n interface = Layer3PhysicalInterface(**_interface)\n return self._engine.add_interface(interface)", "response": "Adds a DHCP interface on a single FW."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nadding a cluster interface to a master engine.", "response": "def add_cluster_interface_on_master_engine(self, interface_id, macaddress,\n nodes, zone_ref=None, vlan_id=None, comment=None):\n \"\"\"\n Add a cluster address specific to a master engine. Master engine\n clusters will not use \"CVI\" interfaces like normal layer 3 FW clusters,\n instead each node has a unique address and share a common macaddress.\n Adding multiple addresses to an interface is not supported with this\n method.\n\n :param str,int interface_id: interface id to use\n :param str macaddress: mac address to use on interface\n :param list nodes: interface node list\n :param bool is_mgmt: is this a management interface\n :param zone_ref: zone to use, by name, str href or Zone\n :param vlan_id: optional VLAN id if this should be a VLAN interface\n :raises EngineCommandFailed: failure creating interface\n :return: None\n \"\"\"\n _interface = {'interface_id': interface_id, 'macaddress': macaddress,\n 'interfaces': [{'nodes': nodes if nodes else [], 'vlan_id': vlan_id}],\n 'zone_ref': zone_ref, 'comment': comment}\n \n try:\n interface = self._engine.interface.get(interface_id)\n vlan = interface.vlan_interface.get(vlan_id)\n # Interface exists, so we need to update but check if VLAN already exists\n if vlan is None:\n interface._add_interface(**_interface)\n interface.update()\n \n except InterfaceNotFound:\n interface = Layer3PhysicalInterface(**_interface)\n return self._engine.add_interface(interface)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef add_tunnel_interface(self, interface_id, address, network_value,\n zone_ref=None, comment=None):\n \"\"\"\n Creates a tunnel interface for a virtual engine.\n\n :param str,int interface_id: the tunnel id for the interface, used as nicid also\n :param str address: ip address of interface\n :param str network_value: network cidr for interface; format: 1.1.1.0/24\n :param str zone_ref: zone reference for interface can be name, href or Zone\n :raises EngineCommandFailed: failure during creation\n :return: None\n \"\"\"\n interfaces = [{'nodes': [{'address': address, 'network_value': network_value}]}]\n interface = {'interface_id': interface_id, 'interfaces': interfaces,\n 'zone_ref': zone_ref, 'comment': comment}\n tunnel_interface = TunnelInterface(**interface)\n self._engine.add_interface(tunnel_interface)", "response": "Creates a tunnel interface for a virtual engine."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef load_from_environ():\n try:\n from urllib.parse import urlparse\n except ImportError:\n from urlparse import urlparse\n \n smc_address = os.environ.get('SMC_ADDRESS', '')\n smc_apikey = os.environ.get('SMC_API_KEY', '')\n smc_timeout = os.environ.get('SMC_TIMEOUT', None)\n api_version = os.environ.get('SMC_API_VERSION', None)\n domain = os.environ.get('SMC_DOMAIN', None)\n smc_extra_args = os.environ.get('SMC_EXTRA_ARGS', '')\n \n if not smc_apikey or not smc_address:\n raise ConfigLoadError(\n 'If loading from environment variables, you must provide values '\n 'SMC_ADDRESS and SMC_API_KEY.')\n \n config_dict = {}\n \n if smc_extra_args:\n try:\n args_as_dict = json.loads(smc_extra_args)\n config_dict.update(args_as_dict)\n except ValueError:\n pass\n \n config_dict.update(smc_apikey=smc_apikey)\n config_dict.update(timeout=smc_timeout)\n config_dict.update(api_version=api_version)\n config_dict.update(domain=domain)\n \n url = urlparse(smc_address)\n \n config_dict.update(smc_address=url.hostname)\n \n port = url.port\n if not port:\n port = '8082'\n \n config_dict.update(smc_port=port)\n \n if url.scheme == 'https':\n config_dict.update(smc_ssl=True)\n ssl_cert = os.environ.get('SMC_CLIENT_CERT', None)\n if ssl_cert: # Enable cert validation\n config_dict.update(verify_ssl=True)\n config_dict.update(ssl_cert_file=ssl_cert)\n # Else http\n \n return transform_login(config_dict)", "response": "Loads the SMC URL API KEY and SSL certificate from the environment variables."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef load_from_file(alt_filepath=None):\n required = ['smc_address', 'smc_apikey']\n bool_type = ['smc_ssl', 'verify_ssl', 'retry_on_busy'] # boolean option flag\n option_names = ['smc_port',\n 'api_version',\n 'smc_ssl',\n 'verify_ssl',\n 'ssl_cert_file',\n 'retry_on_busy',\n 'timeout',\n 'domain']\n\n parser = configparser.SafeConfigParser(defaults={\n 'smc_port': '8082',\n 'api_version': None,\n 'smc_ssl': 'false',\n 'verify_ssl': 'false',\n 'ssl_cert_file': None,\n 'timeout': None,\n 'domain': None},\n allow_no_value=True)\n\n path = '~/.smcrc'\n\n if alt_filepath is not None:\n full_path = alt_filepath\n else:\n ex_path = os.path.expandvars(path)\n full_path = os.path.expanduser(ex_path)\n\n section = 'smc'\n config_dict = {}\n try:\n with io.open(full_path, 'rt', encoding='UTF-8') as f:\n parser.readfp(f)\n\n # Get required settings, if not found it will raise err\n for name in required:\n config_dict[name] = parser.get(section, name)\n\n for name in option_names:\n if parser.has_option(section, name):\n if name in bool_type:\n config_dict[name] = parser.getboolean(section, name)\n else: # str\n config_dict[name] = parser.get(section, name)\n\n except configparser.NoOptionError as e:\n raise ConfigLoadError('Failed loading credentials from configuration '\n 'file: {}; {}'.format(path, e))\n except (configparser.NoSectionError, configparser.MissingSectionHeaderError) as e:\n raise ConfigLoadError('Failed loading credential file from: {}, check the '\n 'path and verify contents are correct.'.format(path, e))\n except IOError as e:\n raise ConfigLoadError(\n 'Failed loading configuration file: {}'.format(e))\n\n return transform_login(config_dict)", "response": "Load the SMC configuration file from the user home directory."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nparse the login data as dict. Called from load_from_file and can be used to collect information from other modules.", "response": "def transform_login(config):\n \"\"\"\n Parse login data as dict. Called from load_from_file and\n also can be used when collecting information from other\n sources as well.\n\n :param dict data: data representing the valid key/value pairs\n from smcrc\n :return: dict dict of settings that can be sent into session.login\n \"\"\"\n verify = True\n if config.pop('smc_ssl', None):\n scheme = 'https'\n\n verify = config.pop('ssl_cert_file', None)\n if config.pop('verify_ssl', None): \n # Get cert path to verify\n if not verify: # Setting omitted or already False\n verify = False\n else:\n verify = False\n else:\n scheme = 'http'\n config.pop('verify_ssl', None)\n config.pop('ssl_cert_file', None)\n verify = False\n\n transformed = {}\n url = '{}://{}:{}'.format(\n scheme,\n config.pop('smc_address', None),\n config.pop('smc_port', None))\n\n timeout = config.pop('timeout', None)\n if timeout:\n try:\n timeout = int(timeout)\n except ValueError:\n timeout = None\n\n api_version = config.pop('api_version', None)\n if api_version:\n try:\n float(api_version)\n except ValueError:\n api_version = None\n \n transformed.update(\n url=url,\n api_key=config.pop('smc_apikey', None),\n api_version=api_version,\n verify=verify,\n timeout=timeout,\n domain=config.pop('domain', None)) \n \n if config:\n transformed.update(kwargs=config) # Any remaining args\n \n return transformed"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef within_ipv4_network(self, field, values):\n v = ['ipv4_net(\"%s\")' % net for net in values]\n self.update_filter('{} IN union({})'.format(field, ','.join(v)))", "response": "This filter adds specified networks to a filter to check\n for inclusion."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef within_ipv4_range(self, field, values):\n v = ['ipv4(\"%s\")' % part\n for iprange in values\n for part in iprange.split('-')]\n \n self.update_filter('{} IN range({})'.format(field, ','.join(v)))", "response": "Add an IN range network filter for relevant address fields."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef exact_ipv4_match(self, field, values):\n if len(values) > 1:\n v = ['ipv4(\"%s\")' % ip for ip in values]\n value='{} IN union({})'.format(field, ','.join(v))\n else:\n value='{} == ipv4(\"{}\")'.format(field, values[0])\n \n self.update_filter(value)", "response": "An exact IPv4 match on relevant address fields."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ndownloading the current version of the package or engine.", "response": "def download(self, timeout=5, wait_for_finish=False):\n \"\"\"\n Download Package or Engine Update\n\n :param int timeout: timeout between queries\n :raises TaskRunFailed: failure during task status\n :rtype: TaskOperationPoller\n \"\"\"\n return Task.execute(self, 'download', timeout=timeout,\n wait_for_finish=wait_for_finish)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nactivates this package on the SMC OID.", "response": "def activate(self, resource=None, timeout=3, wait_for_finish=False):\n \"\"\"\n Activate this package on the SMC\n\n :param list resource: node href's to activate on. Resource is only\n required for software upgrades\n :param int timeout: timeout between queries\n :raises TaskRunFailed: failure during activation (downloading, etc)\n :rtype: TaskOperationPoller\n \"\"\"\n return Task.execute(self, 'activate', json={'resource': resource},\n timeout=timeout, wait_for_finish=wait_for_finish)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef all(self):\n attribute = self._attr[0]\n return self.profile.data.get(attribute, [])", "response": "Return all entries in the cache"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create(cls, name, sandbox_data_center, portal_username=None, comment=None):\n json = {\n 'name': name,\n 'sandbox_data_center': element_resolver(sandbox_data_center),\n 'portal_username': portal_username if portal_username else '',\n 'comment': comment}\n return ElementCreator(cls, json)", "response": "Create a Sandbox Service element"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nget all available API versions for this SMC.", "response": "def available_api_versions(base_url, timeout=10, verify=True):\n \"\"\"\n Get all available API versions for this SMC\n\n :return version numbers\n :rtype: list\n \"\"\"\n try:\n r = requests.get('%s/api' % base_url, timeout=timeout,\n verify=verify) # no session required\n \n if r.status_code == 200:\n j = json.loads(r.text)\n versions = []\n for version in j['version']:\n versions.append(version['rel'])\n return versions\n \n raise SMCConnectionError(\n 'Invalid status received while getting entry points from SMC. '\n 'Status code received %s. Reason: %s' % (r.status_code, r.reason))\n\n except requests.exceptions.RequestException as e:\n raise SMCConnectionError(e)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_api_version(base_url, api_version=None, timeout=10, verify=True):\n versions = available_api_versions(base_url, timeout, verify)\n \n newest_version = max([float(i) for i in versions])\n if api_version is None: # Use latest\n api_version = newest_version\n else:\n if api_version not in versions:\n api_version = newest_version\n \n return api_version", "response": "Get the API version specified or resolve the latest version"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncreating a new session manager for the specified list of Session objects.", "response": "def create(cls, sessions=None):\n \"\"\"\n A session manager will be mounted to the SMCRequest class through\n this classmethod. If there is already an existing SessionManager,\n that is returned instead.\n \n :param list sessions: a list of Session objects\n :rtype: SessionManager\n \"\"\"\n manager = getattr(SMCRequest, '_session_manager')\n if manager is not None:\n return manager\n manager = SessionManager(sessions)\n manager.mount()\n return manager"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_default_session(self):\n if self._sessions:\n return self.get_session(next(iter(self._sessions)))\n return self.get_session()", "response": "Returns the default session for the current node."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef manager(self):\n manager = SMCRequest._session_manager if not self._manager \\\n else self._manager\n if not manager:\n raise SessionManagerNotFound('A session manager was not found. '\n 'This is an initialization error binding the SessionManager. ')\n return manager", "response": "Returns the session manager for this session\n \n "} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef session_id(self):\n return None if not self.session or 'JSESSIONID' not in \\\n self.session.cookies else 'JSESSIONID={}'.format(\n self.session.cookies['JSESSIONID'])", "response": "Returns the session ID in header type format."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the administrator name for this session. Can be None", "response": "def name(self):\n \"\"\"\n Return the administrator name for this session. Can be None if\n the session has not yet been established.\n \n .. note:: The administrator name was introduced in SMC version\n 6.4. Previous versions will show the unique session\n identifier for this session.\n \n :rtype: str\n \"\"\"\n if self.session: # protect cached property from being set before session\n try:\n return self.current_user.name\n except AttributeError: # TODO: Catch ConnectionError? No session\n pass\n return hash(self)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef current_user(self):\n if self.session:\n try:\n response = self.session.get(self.entry_points.get('current_user'))\n if response.status_code in (200, 201):\n admin_href=response.json().get('value')\n request = SMCRequest(href=admin_href)\n smcresult = send_request(self, 'get', request)\n return ElementFactory(admin_href, smcresult)\n except UnsupportedEntryPoint:\n pass", "response": "Returns the currently logged on API Client user element. Requires SMC version >= 6. 4\n Requires SMC version >= 6. 4\n Requires SMC version >= 6. 4\n Returns None"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef login(self, url=None, api_key=None, login=None, pwd=None,\n api_version=None, timeout=None, verify=True, alt_filepath=None,\n domain=None, **kwargs):\n \"\"\"\n Login to SMC API and retrieve a valid session.\n Sessions use a pool connection manager to provide dynamic scalability\n during times of increased load. Each session is managed by a global\n session manager making it possible to have more than one session per\n interpreter.\n\n An example login and logout session::\n\n from smc import session\n session.login(url='http://1.1.1.1:8082', api_key='SomeSMCG3ener@t3dPwd')\n .....do stuff.....\n session.logout()\n\n :param str url: ip of SMC management server\n :param str api_key: API key created for api client in SMC\n :param str login: Administrator user in SMC that has privilege to SMC API.\n :param str pwd: Password for user login.\n :param api_version (optional): specify api version\n :param int timeout: (optional): specify a timeout for initial connect; (default 10)\n :param str|boolean verify: verify SSL connections using cert (default: verify=True)\n You can pass verify the path to a CA_BUNDLE file or directory with certificates\n of trusted CAs\n :param str alt_filepath: If using .smcrc, alternate path+filename\n :param str domain: domain to log in to. If domains are not configured, this\n field will be ignored and api client logged in to 'Shared Domain'.\n :param bool retry_on_busy: pass as kwarg with boolean if you want to add retries\n if the SMC returns HTTP 503 error during operation. You can also optionally customize\n this behavior and call :meth:`.set_retry_on_busy`\n :raises ConfigLoadError: loading cfg from ~.smcrc fails\n\n For SSL connections, you can disable validation of the SMC SSL certificate by setting\n verify=False, however this is not a recommended practice.\n\n If you want to use the SSL certificate generated and used by the SMC API server\n for validation, set verify='path_to_my_dot_pem'. It is also recommended that your\n certificate has subjectAltName defined per RFC 2818\n \n If SSL warnings are thrown in debug output, see:\n https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings\n\n Logout should be called to remove the session immediately from the\n SMC server.\n \n .. note:: As of SMC 6.4 it is possible to give a standard Administrative user\n access to the SMC API. It is still possible to use an API Client by\n providing the api_key in the login call.\n \"\"\"\n params = {}\n if not url or (not api_key and not (login and pwd)):\n try: # First try load from file\n params = load_from_file(alt_filepath) if alt_filepath\\\n is not None else load_from_file()\n logger.debug('Read config data from file: %s', params)\n except ConfigLoadError:\n # Last ditch effort, try to load from environment\n params = load_from_environ()\n logger.debug('Read config data from environ: %s', params)\n \n params = params or dict(\n url=url,\n api_key=api_key,\n login=login,\n pwd=pwd,\n api_version=api_version,\n verify=verify,\n timeout=timeout,\n domain=domain,\n kwargs=kwargs or {})\n \n # Check to see this session is already logged in. If so, return.\n # The session object represents a single connection. Log out to\n # re-use the same session object or get_session() from the\n # SessionManager to track multiple sessions.\n if self.manager and (self.session and self in self.manager):\n logger.info('An attempt to log in occurred when a session already '\n 'exists, bypassing login for session: %s' % self)\n return\n \n self._params = {k: v for k, v in params.items() if v is not None}\n \n verify_ssl = self._params.get('verify', True)\n \n # Determine and set the API version we will use.\n self._params.update(\n api_version=get_api_version(\n self.url, self.api_version, self.timeout, verify_ssl))\n \n extra_args = self._params.get('kwargs', {})\n \n # Retries configured\n retry_on_busy = extra_args.pop('retry_on_busy', False)\n \n request = self._build_auth_request(verify_ssl, **extra_args)\n \n # This will raise if session login fails...\n self._session = self._get_session(request)\n self.session.verify = verify_ssl\n\n if retry_on_busy:\n self.set_retry_on_busy()\n \n # Load entry points\n load_entry_points(self)\n \n # Put session in manager\n self.manager._register(self)\n \n logger.debug('Login succeeded for admin: %s in domain: %s, session: %s',\n self.name, self.domain, self.session_id)", "response": "Login to the SMC API and retrieve a valid session."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _build_auth_request(self, verify=False, **kwargs):\n json = {\n 'domain': self.domain\n }\n \n credential = self.credential\n params = {}\n \n if credential.provider_name.startswith('lms'):\n params = dict(\n login=credential._login,\n pwd=credential._pwd)\n else:\n json.update(authenticationkey=credential._api_key)\n \n if kwargs:\n json.update(**kwargs)\n self._extra_args.update(**kwargs) # Store in case we need to rebuild later\n \n request = dict(\n url=self.credential.get_provider_entry_point(self.url, self.api_version),\n json=json,\n params=params,\n headers={'content-type': 'application/json'},\n verify=verify)\n \n return request", "response": "Builds the authentication request to SMC\nAttributeNames"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nauthenticate the request dict and return the session object", "response": "def _get_session(self, request):\n \"\"\"\n Authenticate the request dict\n \n :param dict request: request dict built from user input\n :raises SMCConnectionError: failure to connect\n :return: python requests session\n :rtype: requests.Session\n \"\"\"\n _session = requests.session() # empty session\n \n response = _session.post(**request)\n logger.info('Using SMC API version: %s', self.api_version)\n \n if response.status_code != 200:\n raise SMCConnectionError(\n 'Login failed, HTTP status code: %s and reason: %s' % (\n response.status_code, response.reason))\n return _session"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef logout(self):\n if not self.session:\n self.manager._deregister(self)\n return\n try:\n r = self.session.put(self.entry_points.get('logout'))\n if r.status_code == 204:\n logger.info('Logged out admin: %s of domain: %s successfully',\n self.name, self.domain)\n else:\n logger.error('Logout status was unexpected. Received response '\n 'with status code: %s', (r.status_code))\n\n except requests.exceptions.SSLError as e:\n logger.error('SSL exception thrown during logout: %s', e)\n except requests.exceptions.ConnectionError as e:\n logger.error('Connection error on logout: %s', e)\n finally:\n self.entry_points.clear()\n self.manager._deregister(self)\n self._session = None\n try:\n delattr(self, 'current_user')\n except AttributeError:\n pass\n \n logger.debug('Call counters: %s' % counters)", "response": "Logout session from SMC\n "} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef refresh(self):\n if self.session and self.session_id: # Did session timeout?\n logger.info('Session timed out, will try obtaining a new session using '\n 'previously saved credential information.')\n self.logout() # Force log out session just in case\n return self.login(**self.copy())\n raise SMCConnectionError('Session expired and attempted refresh failed.')", "response": "Refresh session on 401."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef switch_domain(self, domain):\n if self.domain != domain:\n if self in self.manager: # Exit current domain\n self.logout()\n logger.info('Switching to domain: %r and creating new session', domain)\n params = self.copy()\n params.update(domain=domain)\n self.login(**params)", "response": "Switches the user to a specific domain."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nmount a custom retry object on the current session that allows service level retries when the SMC replys with a Service Unavailable message.", "response": "def set_retry_on_busy(self, total=5, backoff_factor=0.1, status_forcelist=None, **kwargs):\n \"\"\"\n Mount a custom retry object on the current session that allows service level\n retries when the SMC might reply with a Service Unavailable (503) message.\n This can be possible in larger environments with higher database activity.\n You can all this on the existing session, or provide as a dict to the login\n constructor.\n \n :param int total: total retries\n :param float backoff_factor: when to retry\n :param list status_forcelist: list of HTTP error codes to retry on\n :param list method_whitelist: list of methods to apply retries for, GET, POST and\n PUT by default\n :return: None\n \"\"\"\n if self.session:\n from requests.adapters import HTTPAdapter\n from requests.packages.urllib3.util.retry import Retry\n \n method_whitelist = kwargs.pop('method_whitelist', []) or ['GET', 'POST', 'PUT']\n status_forcelist = frozenset(status_forcelist) if status_forcelist else frozenset([503])\n retry = Retry(\n total=total,\n backoff_factor=backoff_factor,\n status_forcelist=status_forcelist,\n method_whitelist=method_whitelist)\n \n for proto_str in ('http://', 'https://'):\n self.session.mount(proto_str, HTTPAdapter(max_retries=retry))\n logger.debug('Mounting retry object to HTTP session: %s' % retry)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _get_log_schema(self):\n if self.session and self.session_id:\n schema = '{}/{}/monitoring/log/schemas'.format(self.url, self.api_version)\n \n response = self.session.get(\n url=schema,\n headers={'cookie': self.session_id,\n 'content-type': 'application/json'})\n\n if response.status_code in (200, 201):\n return response.json()", "response": "Get the log schema for this SMC version."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef has_credentials(self):\n return all([\n getattr(self, '_%s' % field, None) is not None\n for field in self.CredentialMap.get(self.provider_name)])", "response": "Returns True if this session has valid credentials."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nadd a single entry to the field.", "response": "def add(self, data):\n \"\"\"\n Add a single entry to field.\n\n Entries can be added to a rule using the href of the element\n or by loading the element directly. Element should be of type\n :py:mod:`smc.elements.network`.\n After modifying rule, call :py:meth:`~.save`.\n\n Example of adding entry by element::\n\n policy = FirewallPolicy('policy')\n for rule in policy.fw_ipv4_nat_rules.all():\n if rule.name == 'therule':\n rule.sources.add(Host('myhost'))\n rule.save()\n\n .. note:: If submitting type Element and the element cannot be\n found, it will be skipped.\n\n :param data: entry to add\n :type data: Element or str\n \"\"\"\n if self.is_none or self.is_any:\n self.clear()\n self.data[self.typeof] = []\n\n try:\n self.get(self.typeof).append(element_resolver(data))\n except ElementNotFound:\n pass"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nadding multiple entries to the field.", "response": "def add_many(self, data):\n \"\"\"\n Add multiple entries to field. Entries should be list format.\n Entries can be of types relavant to the field type. For example,\n for source and destination fields, elements may be of type \n :py:mod:`smc.elements.network` or be the elements direct href,\n or a combination of both.\n\n Add several entries to existing rule::\n\n policy = FirewallPolicy('policy')\n for rule in policy.fw_ipv4_nat_rules.all():\n if rule.name == 'therule':\n rule.sources.add_many([Host('myhost'), \n 'http://1.1.1.1/hosts/12345'])\n rule.save()\n\n :param list data: list of sources\n\n .. note:: If submitting type Element and the element cannot be\n found, it will be skipped.\n \"\"\"\n assert isinstance(data, list), \"Incorrect format. Expecting list.\"\n if self.is_none or self.is_any:\n self.clear()\n self.data[self.typeof] = []\n\n data = element_resolver(data, do_raise=False)\n self.data[self.typeof] = data"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef update_field(self, elements):\n changed = False\n if isinstance(elements, list):\n if self.is_any or self.is_none:\n self.add_many(elements)\n changed = True\n else:\n _elements = element_resolver(elements, do_raise=False)\n if set(self.all_as_href()) ^ set(_elements):\n self.data[self.typeof] = _elements\n changed = True\n \n if changed and self.rule and (isinstance(self, (Source, Destination)) and \\\n self.rule.typeof in ('fw_ipv4_nat_rule', 'fw_ipv6_nat_rule')):\n # Modify NAT cell if necessary\n self.rule._update_nat_field(self)\n \n return changed", "response": "Update the field with a list of provided elements but only if the values are different."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef all_as_href(self):\n if not self.is_any and not self.is_none:\n return [element for element in self.get(self.typeof)]\n return []", "response": "Return all elements without resolving to smc. elements. network or smc. elements. service. Just raw representation as href."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef all(self):\n if not self.is_any and not self.is_none:\n return [Element.from_href(href)\n for href in self.get(self.typeof)]\n return []", "response": "Return all elements of this rule."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncreating a match expression with the specified name user network element domain name and zone.", "response": "def create(cls, name, user=None, network_element=None, domain_name=None,\n zone=None, executable=None):\n \"\"\"\n Create a match expression\n\n :param str name: name of match expression\n :param str user: name of user or user group\n :param Element network_element: valid network element type, i.e. host, network, etc\n :param DomainName domain_name: domain name network element\n :param Zone zone: zone to use\n :param str executable: name of executable or group\n :raises ElementNotFound: specified object does not exist\n :return: instance with meta\n :rtype: MatchExpression\n \"\"\"\n ref_list = []\n if user:\n pass\n if network_element:\n ref_list.append(network_element.href)\n if domain_name:\n ref_list.append(domain_name.href)\n if zone:\n ref_list.append(zone.href)\n if executable:\n pass\n\n json = {'name': name,\n 'ref': ref_list}\n\n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsets http proxy settings for Antivirus updates.", "response": "def http_proxy(self, proxy, proxy_port, user=None, password=None):\n \"\"\"\n .. versionadded:: 0.5.7\n Requires SMC and engine version >= 6.4\n \n Set http proxy settings for Antivirus updates.\n \n :param str proxy: proxy IP address\n :param str,int proxy_port: proxy port\n :param str user: optional user for authentication\n \"\"\"\n self.update(\n antivirus_http_proxy=proxy,\n antivirus_proxy_port=proxy_port,\n antivirus_proxy_user=user if user else '',\n antivirus_proxy_password=password if password else '',\n antivirus_http_proxy_enabled=True)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nenable antivirus on the engine.", "response": "def enable(self):\n \"\"\"\n Enable antivirus on the engine\n \"\"\"\n self.update(antivirus_enabled=True,\n virus_mirror='update.nai.com/Products/CommonUpdater' if \\\n not self.get('virus_mirror') else self.virus_mirror,\n antivirus_update_time=self.antivirus_update_time if \\\n self.get('antivirus_update_time') else 21600000)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nenabling URL Filtering on the engine.", "response": "def enable(self, http_proxy=None):\n \"\"\"\n Enable URL Filtering on the engine. If proxy servers\n are needed, provide a list of HTTPProxy elements.\n \n :param http_proxy: list of proxies for GTI connections\n :type http_proxy: list(str,HttpProxy)\n \"\"\"\n self.update(ts_enabled=True, http_proxy=get_proxy(http_proxy))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn True if the user is in sandbox mode False otherwise.", "response": "def status(self):\n \"\"\"\n Status of sandbox on this engine\n \n :rtype: bool\n \"\"\"\n if 'sandbox_type' in self.engine.data:\n if self.engine.sandbox_type == 'none':\n return False\n return True\n return False"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef disable(self):\n self.engine.data.update(sandbox_type='none')\n self.pop('cloud_sandbox_settings', None) #pre-6.3\n self.pop('sandbox_settings', None)", "response": "Disable the sandbox on this engine."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nenabling sandbox on this engine.", "response": "def enable(self, license_key, license_token,\n sandbox_type='cloud_sandbox', service='Automatic',\n http_proxy=None, sandbox_data_center='Automatic'):\n \"\"\"\n Enable sandbox on this engine. Provide a valid license key\n and license token obtained from your engine licensing.\n Requires SMC version >= 6.3.\n \n .. note:: Cloud sandbox is a feature that requires an engine license.\n\n :param str license_key: license key for specific engine\n :param str license_token: license token for specific engine\n :param str sandbox_type: 'local_sandbox' or 'cloud_sandbox'\n :param str,SandboxService service: a sandbox service element from SMC. The service\n defines which location the engine is in and which data centers to use.\n The default is to use the 'US Data Centers' profile if undefined.\n :param str,SandboxDataCenter sandbox_data_center: sandbox data center to use\n if the service specified does not exist. Requires SMC >= 6.4.3\n :return: None\n \"\"\"\n service = element_resolver(SandboxService(service), do_raise=False) or \\\n element_resolver(SandboxService.create(name=service,\n sandbox_data_center=SandboxDataCenter(sandbox_data_center)))\n \n self.update(sandbox_license_key=license_key,\n sandbox_license_token=license_token,\n sandbox_service=service,\n http_proxy=get_proxy(http_proxy))\n \n self.engine.data.setdefault('sandbox_settings', {}).update(self.data)\n self.engine.data.update(sandbox_type=sandbox_type)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef add_tls_credential(self, credentials):\n for cred in credentials:\n href = element_resolver(cred)\n if href not in self.engine.server_credential:\n self.engine.server_credential.append(href)", "response": "Adds a list of TLSServerCredential to this engine."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nremoves a list of TLSServerCredentials from the engine.", "response": "def remove_tls_credential(self, credentials):\n \"\"\" \n Remove a list of TLSServerCredentials on this engine.\n \n :param credentials: list of credentials to remove from the\n engine\n :type credentials: list(str,TLSServerCredential)\n :return: None\n \"\"\"\n for cred in credentials:\n href = element_resolver(cred)\n if href in self.engine.server_credential:\n self.engine.server_credential.remove(href)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef policy_validation_settings(**kwargs):\n validation_settings = {\n 'configuration_validation_for_alert_chain': False,\n 'duplicate_rule_check_settings': False,\n 'empty_rule_check_settings': True,\n 'empty_rule_check_settings_for_alert': False,\n 'general_check_settings': True,\n 'nat_modification_check_settings': True,\n 'non_supported_feature': True,\n 'routing_modification_check': False,\n 'unreachable_rule_check_settings': False,\n 'vpn_validation_check_settings': True}\n \n for key, value in kwargs.items():\n validation_settings[key] = value\n \n return {'validation_settings': validation_settings}", "response": "Sets the policy validation settings. This is used when policy based\n tasks are created and validate_policy is set to True."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef log_target_types(all_logs=False, **kwargs):\n log_types = {\n 'for_alert_event_log': False,\n 'for_alert_log': False,\n 'for_audit_log': False,\n 'for_fw_log': False,\n 'for_ips_log': False,\n 'for_ips_recording_log': False,\n 'for_l2fw_log': False,\n 'for_third_party_log': False}\n \n if all_logs:\n for key in log_types.keys():\n log_types[key] = True\n else:\n for key, value in kwargs.items():\n log_types[key] = value\n \n return log_types", "response": "Return a dictionary of log target types that can be used in the log tasks."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nadds a schedule to an existing task.", "response": "def add_schedule(self, name, activation_date, day_period='one_time',\n final_action='ALERT_FAILURE', activated=True,\n minute_period='one_time', day_mask=None,\n repeat_until_date=None, comment=None):\n \"\"\"\n Add a schedule to an existing task.\n \n :param str name: name for this schedule\n :param int activation_date: when to start this task. Activation date\n should be a UTC time represented in milliseconds.\n :param str day_period: when this task should be run. Valid options:\n 'one_time', 'daily', 'weekly', 'monthly', 'yearly'. If 'daily' is\n selected, you can also provide a value for 'minute_period'.\n (default: 'one_time')\n :param str minute_period: only required if day_period is set to 'daily'.\n Valid options: 'each_quarter' (15 min), 'each_half' (30 minutes), or\n 'hourly', 'one_time' (default: 'one_time')\n :param int day_mask: If the task day_period=weekly, then specify the day\n or days for repeating. Day masks are: sun=1, mon=2, tue=4, wed=8,\n thu=16, fri=32, sat=64. To repeat for instance every Monday, Wednesday\n and Friday, the value must be 2 + 8 + 32 = 42\n :param str final_action: what type of action to perform after the\n scheduled task runs. Options are: 'ALERT_FAILURE', 'ALERT', or\n 'NO_ACTION' (default: ALERT_FAILURE)\n :param bool activated: whether to activate the schedule (default: True)\n :param str repeat_until_date: if this is anything but a one time task run,\n you can specify the date when this task should end. The format is the\n same as the `activation_date` param.\n :param str comment: optional comment\n :raises ActionCommandFailed: failed adding schedule\n :return: None\n \"\"\"\n json = {\n 'name': name,\n 'activation_date': activation_date,\n 'day_period': day_period,\n 'day_mask': day_mask,\n 'activated': activated,\n 'final_action': final_action,\n 'minute_period': minute_period,\n 'repeat_until_date': repeat_until_date if repeat_until_date else None,\n 'comment': comment}\n \n if 'daily' in day_period:\n minute_period = minute_period if minute_period != 'one_time' else 'hourly'\n json['minute_period'] = minute_period\n \n return self.make_request(\n ActionCommandFailed,\n method='create',\n resource='task_schedule',\n json=json)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ncreating a refresh policy task associated with specific engines.", "response": "def create(cls, name, engines, comment=None,\n validate_policy=True, **kwargs):\n \"\"\"\n Create a refresh policy task associated with specific\n engines. A policy refresh task does not require a policy\n be specified. The policy used in the refresh will be the\n policy already assigned to the engine.\n \n :param str name: name of this task\n :param engines: list of Engines for the task\n :type engines: list(Engine)\n :param str comment: optional comment\n :param bool validate_policy: validate the policy before upload.\n If set to true, validation kwargs can also be provided\n if customization is required, otherwise default validation settings\n are used.\n :param kwargs: see :func:`~policy_validation_settings` for keyword\n arguments and default values.\n :raises ElementNotFound: engine specified does not exist\n :raises CreateElementFailed: failure to create the task\n :return: the task\n :rtype: RefreshPolicyTask\n \"\"\"\n json = {\n 'resources': [engine.href for engine in engines],\n 'name': name,\n 'comment': comment}\n \n if validate_policy:\n json.update(policy_validation_settings(**kwargs))\n \n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef create(cls, name, engines, policy=None, comment=None, **kwargs):\n json = {\n 'name': name,\n 'resources': [eng.href for eng in engines],\n 'policy': policy.href if policy is not None else policy,\n 'comment': comment}\n \n if kwargs:\n json.update(policy_validation_settings(**kwargs))\n \n return ElementCreator(cls, json)", "response": "Create a new validate policy task."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates a refresh master engine policy task.", "response": "def create(cls, name, master_engines, comment=None):\n \"\"\"\n Create a refresh task for master engines. \n \n :param str name: name of task\n :param master_engines: list of master engines for this task\n :type master_engines: list(MasterEngine)\n :param str comment: optional comment\n :raises CreateElementFailed: failed to create the task\n :return: the task\n :rtype: RefreshMasterEnginePolicyTask\n \"\"\"\n json = {\n 'name': name,\n 'comment': comment,\n 'resources': [eng.href for eng in master_engines\n if isinstance(eng, MasterEngine)]}\n \n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncreating a new delete log task.", "response": "def create(cls, name, servers=None, time_range='yesterday', all_logs=False,\n filter_for_delete=None, comment=None, **kwargs):\n \"\"\"\n Create a new delete log task. Provide True to all_logs to delete\n all log types. Otherwise provide kwargs to specify each log by\n type of interest.\n \n :param str name: name for this task\n :param servers: servers to back up. Servers must be instances of\n management servers or log servers. If no value is provided, all\n servers are backed up.\n :type servers: list(ManagementServer or LogServer)\n :param str time_range: specify a time range for the deletion. Valid\n options are 'yesterday', 'last_full_week_sun_sat',\n 'last_full_week_mon_sun', 'last_full_month' (default 'yesterday')\n :param FilterExpression filter_for_delete: optional filter for deleting.\n (default: FilterExpression('Match All')\n :param bool all_logs: if True, all log types will be deleted. If this\n is True, kwargs are ignored (default: False)\n :param kwargs: see :func:`~log_target_types` for keyword\n arguments and default values.\n :raises ElementNotFound: specified servers were not found\n :raises CreateElementFailed: failure to create the task\n :return: the task\n :rtype: DeleteLogTask\n \"\"\"\n if not servers:\n servers = [svr.href for svr in ManagementServer.objects.all()]\n servers.extend([svr.href for svr in LogServer.objects.all()])\n else:\n servers = [svr.href for svr in servers]\n \n filter_for_delete = filter_for_delete.href if filter_for_delete else \\\n FilterExpression('Match All').href\n \n json = {\n 'name': name,\n 'resources': servers,\n 'time_limit_type': time_range,\n 'start_time': 0,\n 'end_time': 0,\n 'file_format': 'unknown',\n 'filter_for_delete': filter_for_delete,\n 'comment': comment} \n \n json.update(**log_target_types(all_logs, **kwargs))\n \n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncreating a new ServerBackupTask.", "response": "def create(cls, name, servers, backup_log_data=False,\n encrypt_password=None, comment=None):\n \"\"\"\n Create a new server backup task. This task provides the ability\n to backup individual or all management and log servers under\n SMC management.\n \n :param str name: name of task\n :param servers: servers to back up. Servers must be instances of\n management servers or log servers. If no value is provided all\n servers are backed up.\n :type servers: list(ManagementServer or LogServer)\n :param bool backup_log_data: Should the log files be backed up. This\n field is only relevant if a Log Server is backed up.\n :param str encrypt_password: Provide an encrypt password if you want\n this backup to be encrypted.\n :param str comment: optional comment\n :raises ElementNotFound: specified servers were not found\n :raises CreateElementFailed: failure to create the task\n :return: the task\n :rtype: ServerBackupTask\n \"\"\"\n if not servers:\n servers = [svr.href for svr in ManagementServer.objects.all()]\n servers.extend([svr.href for svr in LogServer.objects.all()])\n else:\n servers = [svr.href for svr in servers]\n \n json = {\n 'resources': servers,\n 'name': name,\n 'password': encrypt_password if encrypt_password else None,\n 'log_data_must_be_saved': backup_log_data,\n 'comment': comment}\n \n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create(cls, name, engines, include_core_files=False,\n include_slapcat_output=False, comment=None):\n \"\"\"\n Create an sginfo task. \n \n :param str name: name of task\n :param engines: list of engines to apply the sginfo task\n :type engines: list(Engine)\n :param bool include_core_files: include core files in the\n sginfo backup (default: False)\n :param bool include_slapcat_output: include output from a\n slapcat command in output (default: False)\n :raises ElementNotFound: engine not found\n :raises CreateElementFailed: create the task failed\n :return: the task\n :rtype: SGInfoTask\n \"\"\"\n json = {\n 'name': name,\n 'comment': comment,\n 'resources': [engine.href for engine in engines],\n 'include_core_files': include_core_files,\n 'include_slapcat_output': include_slapcat_output}\n \n return ElementCreator(cls, json)", "response": "Create an SGInfoTask object"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsending request to SMC", "response": "def send_request(user_session, method, request):\n \"\"\"\n Send request to SMC\n \n :param Session user_session: session object\n :param str method: method for request\n :param SMCRequest request: request object\n :raises SMCOperationFailure: failure with reason\n :rtype: SMCResult\n \"\"\"\n if user_session.session:\n session = user_session.session # requests session\n try:\n method = method.upper() if method else ''\n \n if method == GET:\n if request.filename: # File download request\n return file_download(user_session, request)\n \n response = session.get(\n request.href,\n params=request.params,\n headers=request.headers,\n timeout=user_session.timeout)\n \n response.encoding = 'utf-8'\n \n counters.update(read=1)\n\n if logger.isEnabledFor(logging.DEBUG):\n debug(response)\n \n if response.status_code not in (200, 204, 304):\n raise SMCOperationFailure(response)\n\n elif method == POST:\n if request.files: # File upload request\n return file_upload(user_session, method, request)\n \n response = session.post(\n request.href,\n data=json.dumps(request.json, cls=CacheEncoder),\n headers=request.headers,\n params=request.params)\n \n response.encoding = 'utf-8'\n\n counters.update(create=1)\n if logger.isEnabledFor(logging.DEBUG):\n debug(response)\n \n if response.status_code not in (200, 201, 202):\n # 202 is asynchronous response with follower link\n raise SMCOperationFailure(response)\n\n elif method == PUT:\n if request.files: # File upload request\n return file_upload(user_session, method, request)\n \n # Etag should be set in request object\n request.headers.update(Etag=request.etag)\n \n response = session.put(\n request.href,\n data=json.dumps(request.json, cls=CacheEncoder),\n params=request.params,\n headers=request.headers)\n\n counters.update(update=1)\n \n if logger.isEnabledFor(logging.DEBUG):\n debug(response)\n\n if response.status_code != 200:\n raise SMCOperationFailure(response)\n\n elif method == DELETE:\n response = session.delete(\n request.href,\n headers=request.headers)\n\n counters.update(delete=1)\n\n # Conflict (409) if ETag is not current\n if response.status_code in (409,):\n req = session.get(request.href)\n etag = req.headers.get('ETag')\n response = session.delete(\n request.href,\n headers={'if-match': etag})\n\n response.encoding = 'utf-8'\n\n if logger.isEnabledFor(logging.DEBUG):\n debug(response)\n \n if response.status_code not in (200, 204):\n raise SMCOperationFailure(response)\n\n else: # Unsupported method\n return SMCResult(msg='Unsupported method: %s' % method,\n user_session=user_session)\n\n except SMCOperationFailure as error:\n if error.code in (401,):\n user_session.refresh()\n return send_request(user_session, method, request)\n raise error\n except requests.exceptions.RequestException as e:\n raise SMCConnectionError('Connection problem to SMC, ensure the API '\n 'service is running and host is correct: %s, exiting.' % e)\n else:\n return SMCResult(response, user_session=user_session)\n else:\n raise SMCConnectionError('No session found. Please login to continue')"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a SMCResult object with the contents of the object that is stored in the file specified by request. filename.", "response": "def file_download(user_session, request):\n \"\"\"\n Called when GET request specifies a filename to retrieve.\n \n :param Session user_session: session object\n :param SMCRequest request: request object\n :raises SMCOperationFailure: failure with reason\n :rtype: SMCResult\n \"\"\"\n logger.debug('Download file: %s', vars(request))\n response = user_session.session.get(\n request.href,\n params=request.params,\n headers=request.headers,\n stream=True)\n\n if response.status_code == 200:\n logger.debug('Streaming to file... Content length: %s', len(response.content))\n try:\n path = os.path.abspath(request.filename)\n logger.debug('Operation: %s, saving to file: %s', request.href, path)\n\n with open(path, \"wb\") as handle:\n for chunk in response.iter_content(chunk_size=1024):\n if chunk:\n handle.write(chunk)\n handle.flush()\n except IOError as e:\n raise IOError('Error attempting to save to file: {}'.format(e))\n\n result = SMCResult(response, user_session=user_session)\n result.content = path\n return result\n else:\n raise SMCOperationFailure(response)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef file_upload(user_session, method, request):\n logger.debug('Upload: %s', vars(request))\n http_command = getattr(user_session.session, method.lower())\n \n try:\n response = http_command(\n request.href,\n params=request.params,\n files=request.files)\n except AttributeError:\n raise TypeError('File specified in request was not readable: %s' % request.files)\n else:\n if response.status_code in (200, 201, 202, 204):\n logger.debug('Success sending file in elapsed time: %s', response.elapsed)\n return SMCResult(response, user_session=user_session)\n \n raise SMCOperationFailure(response)", "response": "Perform a file upload PUT or POST to SMC."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nrename this node :param str name: new name for node", "response": "def rename(self, name):\n \"\"\"\n Rename this node\n \n :param str name: new name for node\n \"\"\"\n self.update(name='{} node {}'.format(name, self.nodeid))"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates the node and its related data structures.", "response": "def _create(cls, name, node_type, nodeid=1,\n loopback_ndi=None):\n \"\"\"\n Create the node/s for the engine. This isn't called directly,\n instead it is used when engine.create() is called\n\n :param str name: name of node\n :param str node_type: based on engine type specified\n :param int nodeid: used to identify which node\n :param list LoopbackInterface loopback_ndi: optional loopback\n interface for node.\n \"\"\"\n loopback = loopback_ndi if loopback_ndi else []\n node = {node_type: {\n 'activate_test': True,\n 'disabled': False,\n 'loopback_node_dedicated_interface': loopback,\n 'name': name + ' node ' + str(nodeid),\n 'nodeid': nodeid}\n }\n return node"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef loopback_interface(self):\n for lb in self.data.get('loopback_node_dedicated_interface', []):\n yield LoopbackInterface(lb, self._engine)", "response": "Return a list of LoopbackInterface objects for all loopback interfaces for this node."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef bind_license(self, license_item_id=None):\n params = {'license_item_id': license_item_id}\n self.make_request(\n LicenseError,\n method='create',\n resource='bind',\n params=params)", "response": "Bind license to a new node."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nsave the initial contact for the specified node.", "response": "def initial_contact(self, enable_ssh=True, time_zone=None,\n keyboard=None,\n install_on_server=None,\n filename=None,\n as_base64=False):\n \"\"\"\n Allows to save the initial contact for for the specified node\n\n :param bool enable_ssh: flag to know if we allow the ssh daemon on the\n specified node\n :param str time_zone: optional time zone to set on the specified node\n :param str keyboard: optional keyboard to set on the specified node\n :param bool install_on_server: optional flag to know if the generated\n configuration needs to be installed on SMC Install server\n (POS is needed)\n :param str filename: filename to save initial_contact to\n :param bool as_base64: return the initial config in base 64 format. Useful\n for cloud based engine deployments as userdata\n :raises NodeCommandFailed: IOError handling initial configuration data\n :return: initial contact text information\n :rtype: str\n \"\"\"\n result = self.make_request(\n NodeCommandFailed,\n method='create',\n raw_result=True,\n resource='initial_contact',\n params={'enable_ssh': enable_ssh})\n \n if result.content:\n if as_base64:\n result.content = b64encode(result.content)\n \n if filename:\n try:\n save_to_file(filename, result.content)\n except IOError as e:\n raise NodeCommandFailed(\n 'Error occurred when attempting to save initial '\n 'contact to file: {}'.format(e))\n return result.content"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef interface_status(self):\n result = self.make_request(\n NodeCommandFailed,\n resource='appliance_status')\n return InterfaceStatus(result.get('interface_statuses', []))", "response": "Returns an iterable that provides information about the interfaces that are currently in use."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nretrieve the hardware statistics for various areas of this node.", "response": "def hardware_status(self):\n \"\"\"\n Obtain hardware statistics for various areas of this node.\n \n See :class:`~HardwareStatus` for usage.\n \n :raises NodeCommandFailed: failure to retrieve current status\n :rtype: HardwareStatus\n \"\"\"\n result = self.make_request(\n NodeCommandFailed,\n resource='appliance_status')\n return HardwareStatus(result.get('hardware_statuses', []))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nexecute a Go - Online operation on the specified node.", "response": "def go_online(self, comment=None):\n \"\"\"\n Executes a Go-Online operation on the specified node\n typically done when the node has already been forced offline\n via :func:`go_offline`\n\n :param str comment: (optional) comment to audit\n :raises NodeCommandFailed: online not available\n :return: None\n \"\"\"\n self.make_request(\n NodeCommandFailed,\n method='update',\n resource='go_online',\n params={'comment': comment})"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef go_offline(self, comment=None):\n self.make_request(\n NodeCommandFailed,\n method='update',\n resource='go_offline',\n params={'comment': comment})", "response": "Executes a Go - Offline operation on the specified node."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nexecutes a Go - Standby operation on the specified node.", "response": "def go_standby(self, comment=None):\n \"\"\"\n Executes a Go-Standby operation on the specified node.\n To get the status of the current node/s, run :func:`status`\n\n :param str comment: optional comment to audit\n :raises NodeCommandFailed: engine cannot go standby\n :return: None\n \"\"\"\n self.make_request(\n NodeCommandFailed,\n method='update',\n resource='go_standby',\n params={'comment': comment})"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef lock_online(self, comment=None):\n self.make_request(\n NodeCommandFailed,\n method='update',\n resource='lock_online',\n params={'comment': comment})", "response": "Executes a Lock - Online operation on the specified node."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nexecutes a Lock - Offline operation on the specified node.", "response": "def lock_offline(self, comment=None):\n \"\"\"\n Executes a Lock-Offline operation on the specified node\n Bring back online by running :func:`go_online`.\n\n :param str comment: comment for audit\n :raises NodeCommandFailed: lock offline failed\n :return: None\n \"\"\"\n self.make_request(\n NodeCommandFailed,\n method='update',\n resource='lock_offline',\n params={'comment': comment})"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef reset_user_db(self, comment=None):\n self.make_request(\n NodeCommandFailed,\n method='update',\n resource='reset_user_db',\n params={'comment': comment})", "response": "Executes a Send Reset LDAP User DB Request on this node."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nviewing all debug settings for this node. This will return a debug object. View the debug object repr to identify settings to enable or disable and submit the object to :meth:`set_debug` to enable settings. Add filter_enabled=True argument to see only enabled settings :param bool filter_enabled: returns all enabled diagnostics :raises NodeCommandFailed: failure getting diagnostics :rtype: Debug .. seealso:: :class:`~Debug` for example usage", "response": "def debug(self, filter_enabled=False):\n \"\"\"\n View all debug settings for this node. This will return a\n debug object. View the debug object repr to identify settings\n to enable or disable and submit the object to :meth:`set_debug`\n to enable settings.\n \n Add filter_enabled=True argument to see only enabled settings\n\n :param bool filter_enabled: returns all enabled diagnostics\n :raises NodeCommandFailed: failure getting diagnostics\n :rtype: Debug\n \n .. seealso:: :class:`~Debug` for example usage\n \"\"\"\n params = {'filter_enabled': filter_enabled}\n result = self.make_request(\n NodeCommandFailed,\n resource='diagnostic',\n params=params)\n return Debug(result.get('diagnostics'))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef set_debug(self, debug):\n self.make_request(\n NodeCommandFailed,\n method='create',\n resource='send_diagnostic',\n json=debug.serialize())", "response": "Set the debug settings for this node."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef reboot(self, comment=None):\n self.make_request(\n NodeCommandFailed,\n method='update',\n resource='reboot',\n params={'comment': comment})", "response": "Send reboot command to this node."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef sginfo(self, include_core_files=False,\n include_slapcat_output=False,\n filename='sginfo.gz'):\n \"\"\"\n Get the SG Info of the specified node. Optionally provide\n a filename, otherwise default to 'sginfo.gz'. Once you run\n gzip -d , the inner contents will be in .tar format.\n\n :param include_core_files: flag to include or not core files\n :param include_slapcat_output: flag to include or not slapcat output\n :raises NodeCommandFailed: failed getting sginfo with reason\n :return: string path of download location\n :rtype: str\n \"\"\"\n params = {\n 'include_core_files': include_core_files,\n 'include_slapcat_output': include_slapcat_output}\n \n result = self.make_request(\n NodeCommandFailed,\n raw_result=True,\n resource='sginfo',\n filename=filename,\n params=params)\n \n return result.content", "response": "Get the SG Info of the specified node. Optionally provide a filename otherwise default to sginfo. gz."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef ssh(self, enable=True, comment=None):\n self.make_request(\n NodeCommandFailed,\n method='update',\n resource='ssh',\n params={'enable': enable, 'comment': comment})", "response": "Enable or disable SSH daemonization"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef change_ssh_pwd(self, pwd=None, comment=None):\n self.make_request(\n NodeCommandFailed,\n method='update',\n resource='change_ssh_pwd',\n params={'comment': comment},\n json={'value': pwd})", "response": "Executes a change SSH password operation on the specified node."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nremove a given location from the set of known elements.", "response": "def delete(self, location_name):\n \"\"\"\n Remove a given location by location name. This operation is\n performed only if the given location is valid, and if so,\n `update` is called automatically.\n \n :param str location: location name or location ref\n :raises UpdateElementFailed: failed to update element with reason\n :rtype: bool\n \"\"\"\n updated = False\n location_ref = location_helper(location_name, search_only=True)\n if location_ref in self:\n self._cas[:] = [loc for loc in self\n if loc.location_ref != location_ref]\n self.update()\n updated = True\n return updated"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nupdates an existing contact address or create if it doesn t exist.", "response": "def update_or_create(self, location, contact_address, with_status=False, **kw):\n \"\"\"\n Update an existing contact address or create if the location does\n not exist.\n \n :param str location: name of the location, the location will be added\n if it doesn't exist\n :param str contact_address: contact address IP. Can be the string 'dynamic'\n if this should be a dynamic contact address (i.e. on DHCP interface)\n :param bool with_status: if set to True, a 3-tuple is returned with \n (Element, modified, created), where the second and third tuple\n items are booleans indicating the status\n :raises UpdateElementFailed: failed to update element with reason\n :rtype: ContactAddressNode\n \"\"\"\n updated, created = False, False\n location_ref = location_helper(location)\n if location_ref in self:\n for ca in self:\n if ca.location_ref == location_ref:\n ca.update(\n address=contact_address if 'dynamic' not in contact_address\\\n else 'First DHCP Interface ip',\n dynamic='true' if 'dynamic' in contact_address else 'false')\n updated = True\n else:\n self.data.setdefault('contact_addresses', []).append(\n dict(address=contact_address if 'dynamic' not in contact_address\\\n else 'First DHCP Interface ip',\n dynamic='true' if 'dynamic' in contact_address else 'false',\n location_ref=location_ref))\n created = True\n \n if updated or created:\n self.update()\n if with_status:\n return self, updated, created\n return self"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get(self, interface_id, interface_ip=None):\n interfaces = []\n for interface in iter(self):\n if interface.interface_id == str(interface_id):\n if interface_ip:\n if interface.interface_ip == interface_ip:\n return interface\n else:\n interfaces.append(interface)\n return interfaces", "response": "Get will return a list of interface references based on the given interface id and IP address."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef change_vlan_id(self, vlan_id):\n first, _ = self.nicid.split('.')\n self.update(nicid='{}.{}'.format(first, str(vlan_id)))", "response": "Change a VLAN id in a NIC."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the VLAN ID for this interface", "response": "def vlan_id(self):\n \"\"\"\n VLAN ID for this interface, if any\n \n :return: VLAN identifier\n :rtype: str\n \"\"\"\n nicid = self.nicid\n if nicid:\n v = nicid.split('.')\n if len(v) > 1:\n return nicid.split('.')[1]"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates a new pair of neccesary related resources.", "response": "def create(cls, interface_id, logical_interface_ref,\n second_interface_id=None, zone_ref=None, **kwargs):\n \"\"\"\n :param str interface_id: two interfaces, i.e. '1-2', '4-5', '7-10', etc\n :param str logical_ref: logical interface reference\n :param str zone_ref: reference to zone, set on second inline pair\n :rtype: dict\n \"\"\"\n data = {'inspect_unspecified_vlans': True,\n 'nicid': '{}-{}'.format(str(interface_id), str(second_interface_id)) if \n second_interface_id else str(interface_id),\n 'logical_interface_ref': logical_interface_ref,\n 'zone_ref': zone_ref}\n\n for k, v in kwargs.items():\n data.update({k: v})\n\n return cls(data)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef change_vlan_id(self, vlan_id):\n first, second = self.nicid.split('-')\n firstintf = first.split('.')[0]\n secondintf = second.split('.')[0]\n newvlan = str(vlan_id).split('-')\n self.update(nicid='{}.{}-{}.{}'.format(\n firstintf, newvlan[0], secondintf, newvlan[-1]))", "response": "Change a VLAN id for an inline interface."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef change_interface_id(self, newid):\n try:\n newleft, newright = newid.split('-')\n except ValueError:\n raise EngineCommandFailed('You must provide two parts when changing '\n 'the interface ID on an inline interface, i.e. 1-2.')\n first, second = self.nicid.split('-')\n if '.' in first and '.' in second:\n firstvlan = first.split('.')[-1]\n secondvlan = second.split('.')[-1]\n self.update(nicid='{}.{}-{}.{}'.format(\n newleft, firstvlan, newright, secondvlan))\n else:\n # Top level interface or no VLANs\n self.update(nicid=newid)", "response": "Change the inline interface ID."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the VLAN ID for this interface.", "response": "def vlan_id(self):\n \"\"\"\n VLAN ID for this interface, if any\n \n :return: VLAN identifier\n :rtype: str\n \"\"\"\n nicids = self.nicid.split('-')\n if nicids:\n u = []\n for vlan in nicids:\n if vlan.split('.')[-1] not in u:\n u.append(vlan.split('.')[-1])\n return '-'.join(u)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create(cls, interface_id, logical_interface_ref, **kw):\n data = {'inspect_unspecified_vlans': True,\n 'logical_interface_ref': logical_interface_ref,\n 'nicid': str(interface_id)}\n \n if 'reset_interface_nicid' in kw:\n data.update(reset_interface_nicid=kw.get('reset_interface_nicid'))\n\n return cls(data)", "response": "Create a new entry in the nationale cache."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates a new instance of the class based on the parameters passed in.", "response": "def create(cls, interface_id, address, network_value,\n nodeid=1, **kwargs):\n \"\"\"\n :param int interface_id: interface id\n :param str address: ip address of the interface\n :param str network_value: network/netmask, i.e. x.x.x.x/24\n :param int nodeid: for clusters, used to identify the node number\n :rtype: dict\n \"\"\"\n data = {'address': address,\n 'network_value': network_value,\n 'nicid': str(interface_id),\n 'auth_request': False,\n 'backup_heartbeat': False,\n 'nodeid': nodeid,\n 'outgoing': False,\n 'primary_mgt': False,\n 'primary_heartbeat': False}\n\n for k, v in kwargs.items():\n data.update({k: v})\n\n return cls(data)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef create(cls, interface_id, address=None, network_value=None,\n nodeid=1, **kw):\n \"\"\"\n :param int interface_id: interface id\n :param str address: address of this interface\n :param str network_value: network of this interface in cidr x.x.x.x/24\n :param int nodeid: if a cluster, identifies which node this is for\n :rtype: dict\n \"\"\"\n data = {'address': address,\n 'auth_request': False,\n 'auth_request_source': False,\n 'primary_heartbeat': False,\n 'backup_heartbeat': False,\n 'backup_mgt': False,\n 'dynamic': False,\n 'network_value': network_value,\n 'nicid': str(interface_id),\n 'nodeid': nodeid,\n 'outgoing': False,\n 'primary_mgt': False}\n \n for k, v in kw.items():\n data.update({k: v})\n \n if 'dynamic' in kw and kw['dynamic'] is not None:\n for key in ('address', 'network_value'):\n data.pop(key, None)\n if data['primary_mgt']: # Have to set auth_request to a different interface for DHCP\n data['auth_request'] = False\n \n if data.get('dynamic_index', None) is None:\n data['dynamic_index'] = 1\n elif data.get('automatic_default_route') is None:\n data.update(automatic_default_route=True)\n \n return cls(data)", "response": "Creates a new DHCP\n object."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncreate a loopback interface. Uses parent constructor.", "response": "def create(cls, address, ospf_area=None, **kwargs):\n \"\"\"\n Create a loopback interface. Uses parent constructor\n \n :rtype: LoopbackClusterInterface\n \"\"\"\n return super(LoopbackClusterInterface, cls).create(\n address=address,\n network_value='{}/32'.format(address),\n interface_id='Loopback Interface',\n ospfv2_area_ref=ospf_area,\n **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef delete(self):\n self._engine.data[self.typeof] = \\\n [loopback for loopback in self._engine.data.get(self.typeof, [])\n if loopback.get('address') != self.address]\n \n self._engine.update()", "response": "Delete a loopback cluster virtual interface from this engine."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef add_cvi_loopback(self, address, ospf_area=None, **kw):\n lb = self.create(address, ospf_area, **kw)\n \n if self.typeof in self._engine.data:\n self._engine.data[self.typeof].append(lb.data)\n else:\n self._engine.data[self.typeof] = [lb.data]\n \n self._engine.update()", "response": "Adds a new entry to the cluster virtual loopback. This enables the loopback to float between cluster members."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nadd a single loopback interface to this engine. This is used to create a single loopback interface for virtual FW engines.", "response": "def add_single(self, address, rank=1, nodeid=1, ospf_area=None, **kwargs):\n \"\"\"\n Add a single loopback interface to this engine. This is used\n for single or virtual FW engines.\n \n :param str address: ip address for loopback\n :param int nodeid: nodeid to apply. Default to 1 for single FW\n :param str, Element ospf_area: ospf area href or element\n :raises UpdateElementFailed: failure to create loopback address\n :return: None\n \"\"\"\n lb = self.create(address, rank, nodeid, ospf_area, **kwargs)\n self._engine.nodes[0].data[self.typeof].append(lb.data)\n self._engine.update()"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef delete(self):\n nodes = []\n for node in self._engine.nodes:\n node.data[self.typeof] = \\\n [lb for lb in node.loopback_node_dedicated_interface\n if lb.get('rank') != self.rank]\n nodes.append({node.type: node.data})\n \n self._engine.data['nodes'] = nodes\n self._engine.update()", "response": "Delete a loopback interface from this engine."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncreate an instance of the object containing the name of the server element and address.", "response": "def create(cls, name, address, base_dn, bind_user_id=None, bind_password=None,\n port=389, protocol='ldap', tls_profile=None, tls_identity=None,\n domain_controller=None, supported_method=None, timeout=10, max_search_result=0,\n page_size=0, internet_auth_service_enabled=False, **kwargs):\n \"\"\"\n Create an AD server element using basic settings. You can also provide additional\n kwargs documented in the class description::\n \n ActiveDirectoryServer.create(name='somedirectory',\n address='10.10.10.10',\n base_dn='dc=domain,dc=net',\n bind_user_id='cn=admin,cn=users,dc=domain,dc=net',\n bind_password='somecrazypassword')\n \n Configure NPS along with Active Directory::\n \n ActiveDirectoryServer.create(name='somedirectory5',\n address='10.10.10.10',\n base_dn='dc=lepages,dc=net',\n internet_auth_service_enabled=True,\n retries=3,\n auth_ipaddress='10.10.10.15',\n auth_port=1900,\n shared_secret='123456')\n \n :param str name: name of AD element for display\n :param str address: address of AD server\n :param str base_dn: base DN for which to retrieve users, format is 'dc=domain,dc=com'\n :param str bind_user_id: bind user ID credentials, fully qualified. Format is\n 'cn=admin,cn=users,dc=domain,dc=com'. If not provided, anonymous bind is used\n :param str bind_password: bind password, required if bind_user_id set\n :param int port: LDAP bind port, (default: 389)\n :param str protocol: Which LDAP protocol to use, options 'ldap/ldaps/ldap_tls'. If\n ldaps or ldap_tls is used, you must provide a tls_profile element (default: ldap)\n :param str,TLSProfile tls_profile by element of str href. Used when protocol is set\n to ldaps or ldap_tls\n :param str,TLSIdentity tls_identity: check server identity when establishing TLS connection\n :param list(DomainController) domain_controller: list of domain controller objects to\n add an additional domain controllers for AD communication\n :param list(AuthenticationMethod) supported_method: authentication services allowed\n for this resource\n :param int timeout: The time (in seconds) that components wait for the server to reply\n :param int max_search_result: The maximum number of LDAP entries that are returned in\n an LDAP response (default: 0 for no limit)\n :param int page_size: The maximum number of LDAP entries that are returned on each page\n of the LDAP response. (default: 0 for no limit)\n :param bool internet_auth_service_enabled: whether to attach an NPS service to this\n AD controller (default: False). If setting to true, provide kwargs values for\n auth_ipaddress, auth_port and shared_secret\n :raises CreateElementFailed: failed creating element\n :rtype: ActiveDirectoryServer\n \"\"\"\n json={'name': name, 'address': address, 'base_dn': base_dn,\n 'bind_user_id': bind_user_id, 'bind_password': bind_password,\n 'port': port, 'protocol': protocol, 'timeout': timeout,\n 'domain_controller': domain_controller or [],\n 'max_search_result': max_search_result, 'page_size': page_size,\n 'internet_auth_service_enabled': internet_auth_service_enabled,\n 'supported_method': element_resolver(supported_method) or []}\n \n for obj_class in ('group_object_class', 'user_object_class'):\n json[obj_class] = kwargs.pop(obj_class, [])\n \n if protocol in ('ldaps', 'ldap_tls'):\n if not tls_profile:\n raise CreateElementFailed('You must provide a TLS Profile when TLS '\n 'connections are configured to the AD controller.')\n json.update(tls_profile_ref=element_resolver(tls_profile),\n tls_identity=tls_identity)\n \n if internet_auth_service_enabled:\n ias = {'auth_port': kwargs.pop('auth_port', 1812),\n 'auth_ipaddress': kwargs.pop('auth_ipaddress', ''),\n 'shared_secret': kwargs.pop('shared_secret'),\n 'retries': kwargs.pop('retries', 2)}\n json.update(ias)\n \n json.update(kwargs)\n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nupdates or create an active directory server element.", "response": "def update_or_create(cls, with_status=False, **kwargs):\n \"\"\"\n Update or create active directory configuration.\n \n :param dict kwargs: kwargs to satisfy the `create` constructor arguments\n if the element doesn't exist or attributes to change\n :raises CreateElementFailed: failed creating element\n :return: element instance by type or 3-tuple if with_status set\n \"\"\"\n element, updated, created = super(ActiveDirectoryServer, cls).update_or_create(\n defer_update=True, **kwargs)\n \n if not created:\n domain_controller = kwargs.pop('domain_controller', [])\n if domain_controller:\n current_dc_list = element.domain_controller\n for dc in domain_controller:\n if dc not in current_dc_list:\n element.data.setdefault('domain_controller', []).append(dc.data)\n updated = True\n \n if updated:\n element.update()\n if with_status:\n return element, updated, created\n return element"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef create(cls, name, entries=None, comment=None, **kw):\n access_list_entry = []\n if entries:\n for entry in entries:\n access_list_entry.append(\n {'{}_entry'.format(cls.typeof): entry})\n\n json = {'name': name,\n 'entries': access_list_entry,\n 'comment': comment}\n json.update(kw)\n \n return ElementCreator(cls, json)", "response": "Create an Access List Entry."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef add_entry(self, **kw):\n self.data.setdefault('entries', []).append(\n {'{}_entry'.format(self.typeof): kw})", "response": "Add an entry to an AccessList."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nremoving an AccessList entry by field specified.", "response": "def remove_entry(self, **field_value):\n \"\"\"\n Remove an AccessList entry by field specified. Use the supported\n arguments for the inheriting class for keyword arguments.\n\n :raises UpdateElementFailed: failed to modify with reason\n :return: None\n \"\"\"\n field, value = next(iter(field_value.items()))\n self.data['entries'][:] = [entry\n for entry in self.data.get('entries')\n if entry.get('{}_entry'.format(self.typeof))\n .get(field) != str(value)]"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef update_or_create(cls, with_status=False, overwrite_existing=False, **kw):\n created = False\n modified = False\n try:\n element = cls.get(kw.get('name'))\n except ElementNotFound:\n element = cls.create(**kw)\n created = True\n \n if not created:\n if overwrite_existing:\n element.data['entries'] = [\n {'{}_entry'.format(element.typeof): entry}\n for entry in kw.get('entries')]\n modified = True\n else:\n if 'comment' in kw and kw['comment'] != element.comment:\n element.comment = kw['comment']\n modified = True\n for entry in kw.get('entries', []):\n if cls._view(**entry) not in element:\n element.add_entry(**entry)\n modified = True\n\n if modified:\n element.update()\n \n if with_status:\n return element, modified, created\n return element", "response": "Update or create the Access List."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef update_status(self):\n task = self.make_request(\n TaskRunFailed,\n href=self.href)\n\n return Task(task)", "response": "Updates the status of this task and returns a new Task object."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nexecuting the task and return a TaskOperationPoller.", "response": "def execute(self, resource, **kw):\n \"\"\"\n Execute the task and return a TaskOperationPoller.\n \n :rtype: TaskOperationPoller\n \"\"\"\n params = kw.pop('params', {})\n json = kw.pop('json', None)\n task = self.make_request(\n TaskRunFailed,\n method='create',\n params=params,\n json=json,\n resource=resource)\n\n timeout = kw.pop('timeout', 5)\n wait_for_finish = kw.pop('wait_for_finish', True)\n \n return TaskOperationPoller(\n task=task, timeout=timeout,\n wait_for_finish=wait_for_finish,\n **kw)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nstart and return a Download Task", "response": "def download(self, resource, filename, **kw):\n \"\"\"\n Start and return a Download Task\n \n :rtype: DownloadTask(TaskOperationPoller)\n \"\"\"\n params = kw.pop('params', {})\n task = self.make_request(\n TaskRunFailed,\n method='create',\n resource=resource,\n params=params)\n\n return DownloadTask(\n filename=filename, task=task)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef add_done_callback(self, callback):\n if self._done is None or self._done.is_set():\n raise ValueError('Task has already finished')\n if callable(callback):\n self.callbacks.append(callback)", "response": "Add a callback to run after the task completes."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef wait(self, timeout=None):\n if self._thread is None:\n return\n self._thread.join(timeout=timeout)", "response": "Blocking wait for task status."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef last_message(self, timeout=5):\n if self._thread is not None:\n self._thread.join(timeout=timeout)\n return self._task.last_message", "response": "Wait a specified amount of time and return the last message from the task"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nstopping the running task.", "response": "def stop(self):\n \"\"\"\n Stop the running task\n \"\"\"\n if self._thread is not None and self._thread.isAlive():\n self._done.set()"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncreating a new ACL object.", "response": "def create(cls, name, granted_element=None):\n \"\"\"\n Create a new ACL\n \n :param str name: Name of ACL\n :param list granted_elements: Elements to grant access to. Can be\n engines, policies or other acl's.\n :type granted_elements: list(str,Element)\n :raises CreateElementFailed: failed creating ACL\n :return: instance with meta\n :rtype: AccessControlList\n \"\"\"\n granted_element = element_resolver(granted_element)\n json = {'name': name,\n 'granted_element': granted_element}\n \n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_permission(self, elements):\n elements = element_resolver(elements)\n self.data['granted_element'].extend(elements)\n self.update()", "response": "Adds a list of elements to the set of permissions that this ACL can grant access to."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef remove_permission(self, elements):\n elements = element_resolver(elements)\n for element in elements:\n if element in self.granted_element:\n self.data['granted_element'].remove(element)\n self.update()", "response": "Removes the specified elements from the set of granted elements to this ACL."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create(cls, elements, role, domain=None):\n if not domain:\n domain = AdminDomain('Shared Domain')\n \n return Permission(\n granted_elements=elements, role_ref=role, granted_domain_ref=domain)", "response": "Create a permission.\n \n :param list granted_elements: Elements for this permission. Can\n be engines, policies or ACLs\n :type granted_elements: list(str,Element)\n :param str,Role role: role for this permission\n :param str,Element domain: domain to apply (default: Shared Domain)\n :rtype: Permission"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef save_to_file(filename, content):\n import os.path\n path = os.path.abspath(filename)\n with open(path, \"w\") as text_file:\n text_file.write(\"{}\".format(content))", "response": "Save content to file. Used by node initial contact but\n can be used anywhere."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef element_resolver(elements, do_raise=True):\n if isinstance(elements, list):\n e = []\n for element in elements:\n try:\n e.append(element.href)\n except AttributeError:\n e.append(element)\n except smc.api.exceptions.ElementNotFound:\n if do_raise:\n raise\n return e\n try:\n return elements.href\n except AttributeError:\n return elements\n except smc.api.exceptions.ElementNotFound:\n if do_raise:\n raise", "response": "Element resolver takes either a single class instance or a list of elements to resolve the href. If do_raise is True an exception is raised."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nmerging the second dict into the first dict.", "response": "def merge_dicts(dict1, dict2, append_lists=False):\n \"\"\"\n Merge the second dict into the first\n Not intended to merge list of dicts.\n\n :param append_lists: If true, instead of clobbering a list with the\n new value, append all of the new values onto the original list.\n \"\"\"\n for key in dict2:\n if isinstance(dict2[key], dict):\n if key in dict1 and key in dict2:\n merge_dicts(dict1[key], dict2[key], append_lists)\n else:\n dict1[key] = dict2[key]\n # If the value is a list and the ``append_lists`` flag is set,\n # append the new values onto the original list\n elif isinstance(dict2[key], list) and append_lists:\n # The value in dict1 must be a list in order to append new\n # values onto it. Don't add duplicates.\n if key in dict1 and isinstance(dict1[key], list):\n dict1[key].extend(\n [k for k in dict2[key] if k not in dict1[key]])\n else:\n dict1[key] = dict2[key]\n else:\n dict1[key] = dict2[key]"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef unicode_to_bytes(s, encoding='utf-8', errors='replace'):\n return s if isinstance(s, str) else s.encode(encoding, errors)", "response": "Helper function to convert unicode strings to bytes for the\n output stream"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ncreating a new base class containing a sub - element collection.", "response": "def create_collection(href, cls):\n \"\"\"\n This collection type inserts a ``create`` method into the collection.\n This will proxy to the sub elements create method while restricting\n access to other attributes that wouldn't be initialized yet.\n \n .. py:method:: create(...)\n \n Create method is inserted dynamically for the collection class type.\n See the class types documentation, or use help().\n\n :rtype: SubElementCollection \n \"\"\"\n instance = cls(href=href)\n meth = getattr(instance, 'create')\n return type(\n cls.__name__, (SubElementCollection,), {'create': meth})(href, cls)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef rule_collection(href, cls):\n instance = cls(href=href)\n meth = getattr(instance, 'create')\n return type( \n cls.__name__, (SubElementCollection,), {\n 'create': meth,\n 'create_rule_section': instance.create_rule_section})(href, cls)", "response": "A utility method to create a new rule collection."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _strip_metachars(val):\n ignore_metachar = r'(.+)([/-].+)'\n match = re.search(ignore_metachar, str(val))\n if match:\n left_half = match.group(1)\n return left_half\n return val", "response": "Strip the metachars from the given value."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets the element by index in the internal list.", "response": "def get(self, index):\n \"\"\"\n Get the element by index. If index is out of bounds for\n the internal list, None is returned. Indexes cannot be\n negative.\n \n :param int index: retrieve element by positive index in list\n :rtype: SubElement or None\n \"\"\"\n if self and (index <= len(self) -1):\n return self._result_cache[index]"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_contains(self, value, case_sensitive=True):\n for element in self:\n if not case_sensitive:\n if value.lower() in element.name.lower():\n return element\n elif value in element.name:\n return element", "response": "Returns the first element that contains the given value."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns all elements that contain the specified value.", "response": "def get_all_contains(self, value, case_sensitive=True):\n \"\"\"\n A partial match on the name field. Does an `in` comparsion to\n elements by the meta `name` field.\n Returns all elements that match the specified value.\n \n .. seealso:: :meth:`get_contains` for returning only a single item.\n \n :param str value: searchable string for contains match\n :param bool case_sensitive: whether the match should consider case\n (default: True)\n :return: element or empty list\n :rtype: list(SubElement)\n \"\"\"\n elements = []\n for element in self:\n if not case_sensitive:\n if value.lower() in element.name.lower():\n elements.append(element)\n elif value in element.name:\n elements.append(element)\n return elements"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncreating a clone of this collection.", "response": "def _clone(self, **kwargs):\n \"\"\"\n Create a clone of this collection. The only param in the\n initial collection is the filter context. Each chainable\n filter is added to the clone and returned to preserve\n previous iterators and their returned elements.\n \n :return: :class:`.ElementCollection`\n \"\"\"\n params = copy.deepcopy(self._params)\n if self._iexact:\n params.update(iexact=self._iexact)\n params.update(**kwargs)\n clone = self.__class__(**params)\n return clone"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nfilter the list of elements for specific element type.", "response": "def filter(self, *filter, **kw): # @ReservedAssignment\n \"\"\"\n Filter results for specific element type.\n \n keyword arguments can be used to specify a match against the\n elements attribute directly. It's important to note that if the\n search filter contains a / or -, the SMC will only search the\n name and comment fields. Otherwise other key fields of an element\n are searched. In addition, SMC searches are a 'contains' search\n meaning you may return more results than wanted. Use a key word\n argument to specify the elements attribute and value expected.\n ::\n \n >>> list(Router.objects.filter('10.10.10.1'))\n [Router(name=Router-110.10.10.10), Router(name=Router-10.10.10.10), Router(name=Router-10.10.10.1)]\n >>> list(Router.objects.filter(address='10.10.10.1'))\n [Router(name=Router-10.10.10.1)]\n \n :param str filter: any parameter to attempt to match on.\n For example, if this is a service, you could match on service name\n 'http' or ports of interest, '80'.\n :param bool exact_match: Can be passed as a keyword arg. Specifies whether\n the match needs to be exact or not (default: False)\n :param bool case_sensitive: Can be passed as a keyword arg. Specifies\n whether the match is case sensitive or not. (default: True) \n :param kw: keyword args can specify an attribute=value to use as an\n exact match against the elements attribute.\n :return: :class:`.ElementCollection`\n \"\"\"\n iexact = None\n if filter:\n _filter = filter[0]\n \n exact_match = kw.pop('exact_match', False)\n case_sensitive = kw.pop('case_sensitive', True)\n \n if kw:\n _, value = next(iter(kw.items()))\n _filter = value\n iexact = kw\n \n # Only strip metachars from network and address range\n if not exact_match and self._params.get('filter_context', {})\\\n in ('network', 'address_range', 'network_elements'):\n _filter = _strip_metachars(_filter)\n \n return self._clone(\n filter=_filter,\n iexact=iexact,\n exact_match=exact_match,\n case_sensitive=case_sensitive)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn an iterator that returns num results per iteration.", "response": "def batch(self, num):\n \"\"\"\n Iterator returning results in batches. When making more general queries\n that might have larger results, specify a batch result that should be\n returned with each iteration.\n \n :param int num: number of results per iteration\n :return: iterator holding list of results\n \"\"\"\n self._params.pop('limit', None) # Limit and batch are mutually exclusive\n it = iter(self)\n while True:\n chunk = list(islice(it, num))\n if not chunk:\n return\n yield chunk"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the first object in the collection.", "response": "def first(self):\n \"\"\"\n Returns the first object matched or None if there is no\n matching object.\n ::\n \n >>> iterator = Host.objects.iterator()\n >>> c = iterator.filter('kali')\n >>> if c.exists():\n >>> print(c.count())\n >>> print(c.first())\n 7\n Host(name=kali67)\n \n If results are not needed and you only 1 result, this can be called\n from the CollectionManager::\n \n >>> Host.objects.first()\n Host(name=SMC)\n \n :return: element or None\n \"\"\"\n if len(self):\n self._params.update(limit=1)\n if 'filter' not in self._params:\n return list(self)[0]\n else: # Filter may not return results\n result = list(self)\n if result:\n return result[0]"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef last(self):\n if len(self):\n self._params.update(limit=1)\n if 'filter' not in self._params:\n return list(self)[-1]\n else: # Filter may not return results\n result = list(self)\n if result:\n return result[-1]", "response": "Returns the last object in the list or None if there is no matching object."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef iterator(self, **kwargs):\n cls_name = '{0}Collection'.format(self._cls.__name__)\n collection_cls = type(str(cls_name), (ElementCollection,), {})\n\n params = {'filter_context': self._cls.typeof}\n params.update(kwargs)\n return collection_cls(**params)", "response": "Return an iterator from the collection manager."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef entry_point(self, entry_point):\n if len(entry_point.split(',')) == 1:\n self._params.update(\n href=self._resource.get(entry_point))\n return self\n else:\n self._params.update(\n filter_context=entry_point)\n return self", "response": "This method provides an entry point for element types to search."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef context_filter(self, context):\n if context in CONTEXTS:\n self._params.update(filter_context=context)\n return self\n raise InvalidSearchFilter(\n 'Context filter %r was invalid. Available filters: %s' %\n (context, CONTEXTS))", "response": "Provide a context filter to search."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef unused(self):\n self._params.update(\n href=self._resource.get('search_unused'))\n return self", "response": "Return unused user - created elements."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef duplicates(self):\n self._params.update(\n href=self._resource.get('search_duplicate'))\n return self", "response": "Return duplicate user - created elements."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nshows all available entry points available for searching.", "response": "def object_types():\n \"\"\"\n Show all available 'entry points' available for searching. An entry\n point defines a uri that provides unfiltered access to all elements\n of the entry point type.\n\n :return: list of entry points by name\n :rtype: list(str)\n \"\"\"\n # Return all elements from the root of the API nested under elements URI\n #element_uri = str(\n types = [element.rel\n for element in entry_point()]\n types.extend(list(CONTEXTS))\n return types"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef fetch_as_element(self, **kw):\n clone = self.copy()\n clone.format.field_format('id')\n for custom_field in ['field_ids', 'field_names']:\n clone.format.data.pop(custom_field, None)\n\n for list_of_results in clone.fetch_raw(**kw):\n for entry in list_of_results:\n yield Connection(**entry)", "response": "Fetch the results and return as a Connection element."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate an OSPFArea with message - digest authentication for your OSPFArea.", "response": "def create_ospf_area_with_message_digest_auth():\n \"\"\"\n If you require message-digest authentication for your OSPFArea, you must\n create an OSPF key chain configuration. \n \"\"\"\n OSPFKeyChain.create(name='secure-keychain',\n key_chain_entry=[{'key': 'fookey',\n 'key_id': 10,\n 'send_key': True}])\n\n \"\"\"\n An OSPF interface is applied to a physical interface at the engines routing\n node level. This configuration is done by an OSPFInterfaceSetting element. To\n apply the key-chain to this configuration, add the authentication_type of \n message-digest and reference to the key-chain\n \"\"\"\n key_chain = OSPFKeyChain('secure-keychain') # obtain resource\n OSPFInterfaceSetting.create(name='authenicated-ospf',\n authentication_type='message_digest',\n key_chain_ref=key_chain.href)\n\n \"\"\"\n Create the OSPFArea and assign the above created OSPFInterfaceSetting.\n In this example, use the default system OSPFInterfaceSetting called\n 'Default OSPFv2 Interface Settings'\n \"\"\"\n for profile in describe_ospfv2_interface_settings():\n if profile.name.startswith('Default OSPF'): # Use the system default\n interface_profile = profile.href\n\n OSPFArea.create(name='area0',\n interface_settings_ref=interface_profile,\n area_id=0)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncreates an OSPF Profile and OSPF Domain Setting.", "response": "def create_ospf_profile():\n \"\"\"\n An OSPF Profile contains administrative distance and redistribution settings. An\n OSPF Profile is applied at the engine level. \n When creating an OSPF Profile, you must reference a OSPFDomainSetting. \n\n An OSPFDomainSetting holds the settings of the area border router (ABR) type, \n throttle timer settings, and the max metric router link-state advertisement \n (LSA) settings.\n \"\"\"\n OSPFDomainSetting.create(name='custom',\n abr_type='cisco')\n\n ospf_domain = OSPFDomainSetting('custom') # obtain resource\n\n ospf_profile = OSPFProfile.create(name='myospfprofile',\n domain_settings_ref=ospf_domain.href)\n print(ospf_profile)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef timezone(self, tz):\n self.data['resolving'].update(\n timezone=tz,\n time_show_zone=True)", "response": "Set the timezone on the audit records."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsets resolving log fields.", "response": "def set_resolving(self, **kw):\n \"\"\"\n Certain log fields can be individually resolved. Use this\n method to set these fields. Valid keyword arguments:\n \n :param str timezone: string value to set timezone for audits\n :param bool time_show_zone: show the time zone in the audit.\n :param bool time_show_millis: show timezone in milliseconds\n :param bool keys: resolve log field keys\n :param bool ip_elements: resolve IP's to SMC elements\n :param bool ip_dns: resolve IP addresses using DNS\n :param bool ip_locations: resolve locations \n \"\"\"\n if 'timezone' in kw and 'time_show_zone' not in kw:\n kw.update(time_show_zone=True)\n self.data['resolving'].update(**kw)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef fetch_raw(self):\n for results in super(LogQuery, self).execute():\n if 'records' in results and results['records']:\n yield results['records']", "response": "Execute the query and return by batches."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef fetch_batch(self, formatter=TableFormat):\n clone = self.copy()\n clone.update_query(type='stored')\n if not clone.fetch_size or clone.fetch_size <= 0:\n clone.request['fetch'].update(quantity=200)\n \n fmt = formatter(clone)\n for result in clone.fetch_raw():\n yield fmt.formatted(result)", "response": "Fetch a batch of logs and return using the specified formatter."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn a generator of formatted results for all live log entries in real - time.", "response": "def fetch_live(self, formatter=TableFormat):\n \"\"\"\n View logs in real-time. If previous filters were already set on\n this query, they will be preserved on the original instance (this\n method forces ``fetch_type='current'``).\n \n :param formatter: Formatter type for data representation. Any type\n in :py:mod:`smc_monitoring.models.formatters`.\n :return: generator of formatted results\n \"\"\"\n clone = self.copy()\n clone.update_query(type='current')\n fmt = formatter(clone)\n for result in clone.fetch_raw():\n yield fmt.formatted(result)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate a new VSS Container.", "response": "def create(cls, name, vss_def=None):\n \"\"\"\n Create a VSS Container. This maps to the Service Manager\n within NSX.\n\n vss_def is optional and has the following definition:\n\n {\"isc_ovf_appliance_model\": 'virtual',\n \"isc_ovf_appliance_version\": '',\n \"isc_ip_address\": '192.168.4.84', # IP of manager, i.e. OSC\n \"isc_vss_id\": '',\n \"isc_virtual_connector_name\": 'smc-python'}\n\n :param str name: name of container\n :param dict vss_def: dict of optional settings\n :raises CreateElementFailed: failed creating with reason\n :rtype: VSSContainer\n \"\"\"\n vss_def = {} if vss_def is None else vss_def\n json = {\n 'master_type': 'dcl2fw',\n 'name': name,\n 'vss_isc': vss_def\n }\n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the nodes for this VSS Container", "response": "def nodes(self):\n \"\"\"\n Return the nodes for this VSS Container\n \n :rtype: SubElementCollection(VSSContainerNode)\n \"\"\"\n resource = sub_collection(\n self.get_relation('vss_container_node'), \n VSSContainerNode) \n resource._load_from_engine(self, 'nodes') \n return resource"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef vss_contexts(self):\n result = self.make_request(\n href=fetch_entry_point('visible_virtual_engine_mapping'),\n params={'filter': self.name})\n if result.get('mapping', []):\n return [Element.from_href(ve)\n for ve in result['mapping'][0].get('virtual_engine', [])]\n return []", "response": "Return all virtual contexts for this VSS Container."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nremoves a security group from the container", "response": "def remove_security_group(self, name):\n \"\"\"\n Remove a security group from container\n \"\"\"\n for group in self.security_groups:\n if group.isc_name == name:\n group.delete()"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef add_security_group(self, name, isc_id, comment=None):\n return SecurityGroup.create(\n name=name,\n isc_id=isc_id,\n vss_container=self,\n comment=comment)", "response": "Creates a new security group."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncreates the VSS Context within the VSSContainer", "response": "def add_context(self, isc_name, isc_policy_id, isc_traffic_tag):\n \"\"\"\n Create the VSS Context within the VSSContainer\n\n :param str isc_name: ISC name, possibly append policy name??\n :param str isc_policy_id: Policy ID in SMC (the 'key' attribute)\n :param str isc_traffic_tag: NSX groupId (serviceprofile-145)\n :raises CreateElementFailed: failed to create\n :return: VSSContext\n \"\"\"\n if 'add_context' in self.data.links: # SMC >=6.5\n element = ElementCreator(\n VSSContext,\n href=self.get_relation('add_context'),\n json = {\n 'name': isc_name,\n 'vc_isc': {\n 'isc_name': isc_name,\n 'isc_policy_id': isc_policy_id,\n 'isc_traffic_tag': isc_traffic_tag\n }\n })\n else: # SMC < 6.5\n element = VSSContext.create(\n isc_name=isc_name,\n isc_policy_id=isc_policy_id,\n isc_traffic_tag=isc_traffic_tag,\n vss_container=self)\n \n # Delete cache since the virtualResources node is attached to\n # the engine json\n self._del_cache()\n return element"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef create(cls, name, vss_container, vss_node_def,\n comment=None):\n \"\"\"\n Create VSS node. This is the engines dvFilter management\n interface within the NSX agent.\n \n .. seealso:: `~.isc_settings` for documentation on the\n vss_node_def dict\n\n :param str name: name of node\n :param VSSContainer vss_container: container to nest this node\n :param dict vss_node_def: node definition settings\n :raises CreateElementFailed: created failed with reason\n :rtype: VSSContainerNode\n \"\"\"\n element = ElementCreator(cls,\n href=vss_container.get_relation('vss_container_node'),\n json={\n 'name': name,\n 'vss_node_isc': vss_node_def,\n 'comment': comment\n })\n vss_container._del_cache() # Removes references to linked container\n return element", "response": "Create a new VSS node."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create(cls, isc_name, isc_policy_id, isc_traffic_tag, vss_container):\n container = element_resolver(vss_container)\n return ElementCreator(cls,\n href=container + '/vss_context',\n json = {\n 'vc_isc': {\n 'isc_name': isc_name,\n 'isc_policy_id': isc_policy_id,\n 'isc_traffic_tag': isc_traffic_tag\n }\n })", "response": "Create the VSS Context within the VSS Container."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nremove this VSS Context from the master node.", "response": "def remove_from_master_node(self, wait_for_finish=False, timeout=20, **kw):\n \"\"\"\n Remove this VSS Context from it's parent VSS Container.\n This is required before calling VSSContext.delete(). It preps\n the engine for removal.\n \n :param bool wait_for_finish: wait for the task to finish\n :param int timeout: how long to wait if delay\n :type: TaskOperationPoller\n \"\"\"\n return Task.execute(self, 'remove_from_master_node',\n timeout=timeout, wait_for_finish=wait_for_finish, **kw)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates a new security group.", "response": "def create(cls, name, isc_id, vss_container, comment=None):\n \"\"\"\n Create a new security group.\n \n Find a security group::\n \n SecurityGroup.objects.filter(name)\n \n .. note:: The isc_id (securitygroup-10, etc) represents the\n internal NSX reference for the Security Composer group.\n This is placed in the comment field which makes it\n a searchable field using search collections\n\n :param str name: name of group\n :param str isc_id: NSX Security Group objectId\n :param str isc_name: NSX Security Group name\n :param VSSContainer vss_container: VSS Container to add the\n security group to.\n :param str comment: comment, making this searchable\n :raises: CreateElementFailed\n :return SecurityGroup\n \"\"\"\n return ElementCreator(cls,\n href=vss_container.get_relation('security_groups'),\n json = {\n 'name': name,\n 'isc_name': name,\n 'isc_id': isc_id,\n 'comment': comment or isc_id\n })"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncreating a new StaticNetlink with the given parameters.", "response": "def create(cls, name, gateway, network, input_speed=None,\n output_speed=None, domain_server_address=None,\n provider_name=None, probe_address=None,\n standby_mode_period=3600, standby_mode_timeout=30,\n active_mode_period=5, active_mode_timeout=1, comment=None):\n \"\"\"\n Create a new StaticNetlink to be used as a traffic handler.\n\n :param str name: name of netlink Element\n :param gateway_ref: gateway to map this netlink to. This can be an element\n or str href.\n :type gateway_ref: Router,Engine\n :param list ref: network/s associated with this netlink.\n :type ref: list(str,Element)\n :param int input_speed: input speed in Kbps, used for ratio-based\n load-balancing\n :param int output_speed: output speed in Kbps, used for ratio-based\n load-balancing\n :param list domain_server_address: dns addresses for netlink. Engine\n DNS can override this field\n :type dns_addresses: list(str,Element)\n :param str provider_name: optional name to identify provider for this\n netlink\n :param list probe_address: list of IP addresses to use as probing\n addresses to validate connectivity\n :type probe_ip_address: list(str)\n :param int standby_mode_period: Specifies the probe period when\n standby mode is used (in seconds)\n :param int standby_mode_timeout: probe timeout in seconds\n :param int active_mode_period: Specifies the probe period when active\n mode is used (in seconds)\n :param int active_mode_timeout: probe timeout in seconds\n :raises ElementNotFound: if using type Element parameters that are\n not found.\n :raises CreateElementFailed: failure to create netlink with reason\n :rtype: StaticNetlink\n\n .. note:: To monitor the status of the network links, you must define\n at least one probe IP address.\n \"\"\"\n json = {'name': name,\n 'gateway_ref': element_resolver(gateway),\n 'ref': element_resolver(network),\n 'input_speed': input_speed,\n 'output_speed': output_speed,\n 'probe_address': probe_address,\n 'nsp_name': provider_name,\n 'comment': comment,\n 'standby_mode_period': standby_mode_period,\n 'standby_mode_timeout': standby_mode_timeout,\n 'active_mode_period': active_mode_period,\n 'active_mode_timeout': active_mode_timeout}\n\n if domain_server_address:\n r = RankedDNSAddress([])\n r.add(domain_server_address)\n json.update(domain_server_address=r.entries)\n \n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nupdates or create a static netlink.", "response": "def update_or_create(cls, with_status=False, **kwargs):\n \"\"\"\n Update or create static netlink. DNS entry differences are not\n resolved, instead any entries provided will be the final state\n for this netlink. If the intent is to add/remove DNS entries\n you can use the :meth:`~domain_server_address` method to add\n or remove.\n \n :raises CreateElementFailed: failed creating element\n :return: element instance by type or 3-tuple if with_status set\n \"\"\"\n dns_address = kwargs.pop('domain_server_address', [])\n element, updated, created = super(StaticNetlink, cls).update_or_create(\n with_status=True, defer_update=True, **kwargs)\n if not created:\n if dns_address:\n new_entries = RankedDNSAddress([])\n new_entries.add(dns_address)\n element.data.update(domain_server_address=new_entries.entries)\n updated = True\n if updated:\n element.update()\n if with_status:\n return element, updated, created\n return element"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create(cls, name, input_speed=None, learn_dns_automatically=True,\n output_speed=None, provider_name=None, probe_address=None,\n standby_mode_period=3600, standby_mode_timeout=30,\n active_mode_period=5, active_mode_timeout=1, comment=None):\n \"\"\"\n Create a Dynamic Netlink.\n\n :param str name: name of netlink Element\n :param int input_speed: input speed in Kbps, used for ratio-based\n load-balancing\n :param int output_speed: output speed in Kbps, used for ratio-based\n load-balancing\n :param bool learn_dns_automatically: whether to obtain DNS automatically\n from the DHCP interface\n :param str provider_name: optional name to identify provider for this\n netlink\n :param list probe_address: list of IP addresses to use as probing\n addresses to validate connectivity\n :type probe_ip_address: list(str)\n :param int standby_mode_period: Specifies the probe period when\n standby mode is used (in seconds)\n :param int standby_mode_timeout: probe timeout in seconds\n :param int active_mode_period: Specifies the probe period when active\n mode is used (in seconds)\n :param int active_mode_timeout: probe timeout in seconds\n :raises CreateElementFailed: failure to create netlink with reason\n :rtype: DynamicNetlink\n\n .. note:: To monitor the status of the network links, you must define\n at least one probe IP address.\n \"\"\"\n json = {'name': name,\n 'input_speed': input_speed,\n 'output_speed': output_speed,\n 'probe_address': probe_address,\n 'nsp_name': provider_name,\n 'comment': comment,\n 'standby_mode_period': standby_mode_period,\n 'standby_mode_timeout': standby_mode_timeout,\n 'active_mode_period': active_mode_period,\n 'active_mode_timeout': active_mode_timeout,\n 'learn_dns_server_automatically': learn_dns_automatically}\n\n return ElementCreator(cls, json)", "response": "Creates a new Dynamic Netlink element."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create(cls, name, multilink_members, multilink_method='rtt', retries=2,\n timeout=3600, comment=None):\n \"\"\"\n Create a new multilink configuration. Multilink requires at least\n one netlink for operation, although 2 or more are recommeneded.\n \n :param str name: name of multilink\n :param list multilink_members: the output of calling\n :func:`.multilink_member` to retrieve the proper formatting for\n this sub element.\n :param str multilink_method: 'rtt' or 'ratio'. If ratio is used, each\n netlink must have a probe IP address configured and also have\n input and output speed configured (default: 'rtt')\n :param int retries: number of keep alive retries before a destination\n link is considered unavailable (default: 2)\n :param int timeout: timeout between retries (default: 3600 seconds)\n :param str comment: comment for multilink (optional)\n :raises CreateElementFailed: failure to create multilink\n :rtype: Multilink\n \"\"\"\n json = {'name': name,\n 'comment': comment,\n 'retries': retries,\n 'timeout': timeout,\n 'multilink_member': multilink_members,\n 'multilink_method': multilink_method}\n \n return ElementCreator(cls, json)", "response": "Creates a new multilink configuration."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create_with_netlinks(cls, name, netlinks, **kwargs):\n multilink_members = []\n for member in netlinks:\n m = {'ip_range': member.get('ip_range', '0.0.0.0'),\n 'netlink_role': member.get('netlink_role', 'active')}\n netlink = member.get('netlink')\n m.update(netlink_ref=netlink.href)\n if netlink.typeof == 'netlink':\n m.update(network_ref=netlink.data.get('ref')[0])\n multilink_members.append(m)\n \n return cls.create(name, multilink_members, **kwargs)", "response": "Create a new multilink with a list of netlinks."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create(cls, netlink, ip_range=None, netlink_role='active'):\n member_def = dict(\n netlink_ref=netlink.href,\n netlink_role=netlink_role,\n ip_range=ip_range if netlink.typeof == 'netlink' else '0.0.0.0')\n if netlink.typeof == 'netlink': # static netlink vs dynamic netlink\n member_def.update(network_ref=netlink.network[0].href)\n \n return cls(member_def)", "response": "Create a new multilink member in the local cache."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates a new NIST profile with the specified name and TLS version.", "response": "def create(cls, name, tls_version, use_only_subject_alt_name=False,\n accept_wildcard=False, check_revocation=True, tls_cryptography_suites=None,\n crl_delay=0, ocsp_delay=0, ignore_network_issues=False,\n tls_trusted_ca_ref=None, comment=None):\n \"\"\"\n Create a TLS Profile. By default the SMC will have a default NIST\n TLS Profile but it is also possible to create a custom profile to\n provide special TLS handling.\n \n :param str name: name of TLS Profile\n :param str tls_verison: supported tls verison, valid options are\n TLSv1.1, TLSv1.2, TLSv1.3\n :param bool use_only_subject_alt_name: Use Only Subject Alt Name\n when the TLS identity is a DNS name\n :param bool accept_wildcard: Does server identity check accept wildcards\n :param bool check_revocation: Is certificate revocation checked\n :param str,TLSCryptographySuite tls_cryptography_suites: allowed cryptography\n suites for this profile. Uses NIST profile if not specified\n :param int crl_delay: Delay time (hours) for fetching CRL\n :param int ocsp_delay: Ignore OCSP failure for (hours)\n :param bool ignore_network_issues: Ignore revocation check failures\n due to network issues\n :param list tls_trusted_ca_ref: Trusted Certificate Authorities, empty\n list means trust any\n :param str comment: optional comment\n :raises CreateElementFailed: failed to create element with reason\n :raises ElementNotFound: specified element reference was not found\n :rtype: TLSProfile\n \"\"\"\n json = {\n 'name': name,\n 'tls_version': tls_version,\n 'use_only_subject_alt_name': use_only_subject_alt_name,\n 'accept_wildcard': accept_wildcard,\n 'check_revocation': check_revocation,\n 'tls_cryptography_suites': element_resolver(tls_cryptography_suites) or \\\n element_resolver(TLSCryptographySuite.objects.filter('NIST').first()),\n 'crl_delay': crl_delay,\n 'ocsp_delay': ocsp_delay,\n 'ignore_network_issues': ignore_network_issues,\n 'tls_trusted_ca_ref': element_resolver(tls_trusted_ca_ref) or [],\n 'comment': comment}\n \n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef create(cls, name, certificate):\n json = {'name': name,\n 'certificate': certificate if pem_as_string(certificate) else \\\n load_cert_chain(certificate)[0][1].decode('utf-8')}\n \n return ElementCreator(cls, json)", "response": "Create a TLS CA."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create_csr(cls, name, common_name, public_key_algorithm='rsa',\n signature_algorithm='rsa_sha_512', key_length=4096):\n \"\"\"\n Create a certificate signing request. \n \n :param str name: name of TLS Server Credential\n :param str rcommon_name: common name for certificate. An example\n would be: \"CN=CommonName,O=Organization,OU=Unit,C=FR,ST=PACA,L=Nice\".\n At minimum, a \"CN\" is required.\n :param str public_key_algorithm: public key type to use. Valid values\n rsa, dsa, ecdsa.\n :param str signature_algorithm: signature algorithm. Valid values\n dsa_sha_1, dsa_sha_224, dsa_sha_256, rsa_md5, rsa_sha_1, rsa_sha_256,\n rsa_sha_384, rsa_sha_512, ecdsa_sha_1, ecdsa_sha_256, ecdsa_sha_384,\n ecdsa_sha_512. (Default: rsa_sha_512)\n :param int key_length: length of key. Key length depends on the key\n type. For example, RSA keys can be 1024, 2048, 3072, 4096. See SMC\n documentation for more details.\n :raises CreateElementFailed: failed to create CSR\n :rtype: TLSServerCredential\n \"\"\"\n json = {\n 'name': name,\n 'info': common_name,\n 'public_key_algorithm': public_key_algorithm,\n 'signature_algorithm': signature_algorithm,\n 'key_length': key_length,\n 'certificate_state': 'initial'\n }\n return ElementCreator(cls, json)", "response": "Creates a new CSR for the specified key."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate a self signed certificate.", "response": "def create_self_signed(cls, name, common_name, public_key_algorithm='rsa',\n signature_algorithm='rsa_sha_512', key_length=4096):\n \"\"\"\n Create a self signed certificate. This is a convenience method that\n first calls :meth:`~create_csr`, then calls :meth:`~self_sign` on the\n returned TLSServerCredential object.\n \n :param str name: name of TLS Server Credential\n :param str rcommon_name: common name for certificate. An example\n would be: \"CN=CommonName,O=Organization,OU=Unit,C=FR,ST=PACA,L=Nice\".\n At minimum, a \"CN\" is required.\n :param str public_key_algorithm: public key type to use. Valid values\n rsa, dsa, ecdsa.\n :param str signature_algorithm: signature algorithm. Valid values\n dsa_sha_1, dsa_sha_224, dsa_sha_256, rsa_md5, rsa_sha_1, rsa_sha_256,\n rsa_sha_384, rsa_sha_512, ecdsa_sha_1, ecdsa_sha_256, ecdsa_sha_384,\n ecdsa_sha_512. (Default: rsa_sha_512)\n :param int key_length: length of key. Key length depends on the key\n type. For example, RSA keys can be 1024, 2048, 3072, 4096. See SMC\n documentation for more details.\n :raises CreateElementFailed: failed to create CSR\n :raises ActionCommandFailed: Failure to self sign the certificate\n :rtype: TLSServerCredential\n \"\"\"\n tls = TLSServerCredential.create_csr(name=name, common_name=common_name,\n public_key_algorithm=public_key_algorithm, signature_algorithm=signature_algorithm,\n key_length=key_length)\n try:\n tls.self_sign()\n except ActionCommandFailed:\n tls.delete()\n raise\n return tls"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef import_signed(cls, name, certificate, private_key, intermediate=None):\n tls = TLSServerCredential.create(name)\n try:\n tls.import_certificate(certificate)\n tls.import_private_key(private_key)\n if intermediate is not None:\n tls.import_intermediate_certificate(intermediate)\n except CertificateImportError:\n tls.delete()\n raise\n return tls", "response": "Imports a signed certificate and private key to SMC and optionally a intermediate certificate."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef import_from_chain(cls, name, certificate_file, private_key=None):\n contents = load_cert_chain(certificate_file)\n for pem in list(contents):\n if b'PRIVATE KEY' in pem[0]:\n private_key = pem[1]\n contents.remove(pem)\n \n if not private_key:\n raise ValueError('Private key was not found in chain file and '\n 'was not provided. The private key is required to create a '\n 'TLS Server Credential.')\n\n if contents:\n if len(contents) == 1:\n certificate = contents[0][1]\n intermediate = None\n else:\n certificate = contents[0][1]\n intermediate = contents[1][1]\n else:\n raise ValueError('No certificates found in certificate chain file. Did you '\n 'provide only a private key?')\n \n tls = TLSServerCredential.create(name)\n try:\n tls.import_certificate(certificate)\n tls.import_private_key(private_key)\n if intermediate is not None:\n tls.import_intermediate_certificate(intermediate)\n except CertificateImportError:\n tls.delete()\n raise\n return tls", "response": "Imports the server certificate and intermediate and optionally private key from a certificate chain file."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nimports a signed certificate and private key as a client protection CA.", "response": "def import_signed(cls, name, certificate, private_key):\n \"\"\"\n Import a signed certificate and private key as a client protection CA.\n \n This is a shortcut method to the 3 step process:\n \n * Create CA with name\n * Import certificate\n * Import private key\n \n Create the CA::\n \n ClientProtectionCA.import_signed(\n name='myclientca',\n certificate_file='/pathto/server.crt'\n private_key_file='/pathto/server.key')\n \n :param str name: name of client protection CA \n :param str certificate_file: fully qualified path or string of certificate\n :param str private_key_file: fully qualified path or string of private key\n :raises CertificateImportError: failure during import\n :raises IOError: failure to find certificate files specified\n :rtype: ClientProtectionCA\n \"\"\"\n ca = ClientProtectionCA.create(name=name)\n try:\n ca.import_certificate(certificate)\n ca.import_private_key(private_key)\n except CertificateImportError:\n ca.delete()\n raise\n return ca"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate a new self signed client protection CA.", "response": "def create_self_signed(cls, name, prefix, password, public_key_algorithm='rsa',\n life_time=365, key_length=2048):\n \"\"\"\n Create a self signed client protection CA. To prevent browser warnings during\n decryption, you must trust the signing certificate in the client browsers.\n \n :param str name: Name of this ex: \"SG Root CA\" Used as Key.\n Real common name will be derivated at creation time with a uniqueId.\n :param str prefix: prefix used for derivating file names\n :param str password: password for private key\n :param public_key_algorithm: public key algorithm, either rsa, dsa or ecdsa\n :param str,int life_time: lifetime in days for CA\n :param int key_length: length in bits, either 1024 or 2048\n :raises CreateElementFailed: creating element failed\n :raises ActionCommandFailed: failed to self sign the certificate\n :rtype: ClientProtectionCA\n \"\"\"\n json = {'key_name': name,\n 'prefix': prefix,\n 'password': password,\n 'life_time': life_time,\n 'key_size': key_length,\n 'algorithm': public_key_algorithm}\n \n tls = ClientProtectionCA.create(name)\n try:\n tls.make_request(\n method='create',\n json=json,\n resource='generate_self_signed_cert')\n except ActionCommandFailed:\n tls.delete()\n raise\n return tls"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef update_format(self, format):\n self.format = format\n self.request.update(format=self.format.data)", "response": "Updates the format for this query. \n "} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef add_in_filter(self, *values):\n filt = InFilter(*values)\n self.update_filter(filt)\n return filt", "response": "Adds an IN filter to the list of filters that will be used to find a match for the given values."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef add_and_filter(self, *values):\n filt = AndFilter(*values)\n self.update_filter(filt)\n return filt", "response": "Adds an AND filter to the filter list."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef add_or_filter(self, *values):\n filt = OrFilter(*values)\n self.update_filter(filt)\n return filt", "response": "Adds an OR filter to the list of values."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef add_not_filter(self, *value):\n filt = NotFilter(*value)\n self.update_filter(filt)\n return filt", "response": "Adds a NOT filter to the list of filters. Typically this filter is used in the case of a list of values that are not present in the list of entries."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_defined_filter(self, *value):\n filt = DefinedFilter(*value)\n self.update_filter(filt)\n return filt", "response": "Adds a DefinedFilter expression to the query."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef resolve_field_ids(ids, **kw):\n request = {\n 'fetch': {'quantity': 0},\n 'format': {\n 'type': 'detailed',\n 'field_ids': ids},\n 'query': {}\n }\n query = Query(**kw)\n query.location = '/monitoring/log/socket'\n query.request = request\n for fields in query.execute():\n if 'fields' in fields:\n return fields['fields']\n return []", "response": "This function returns the log field details based on the LogField constant IDs."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _get_field_schema(self):\n self.update_format(DetailedFormat())\n for fields in self.execute():\n if 'fields' in fields:\n return fields['fields']", "response": "Get a list of all of the default fields for this query type."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nexecuting the query with optional timeout.", "response": "def execute(self):\n \"\"\"\n Execute the query with optional timeout. The response to the execute\n query is the raw payload received from the websocket and will contain\n multiple dict keys and values. It is more common to call query.fetch_XXX\n which will filter the return result based on the method. Each result set\n will have a max batch size of 200 records. This method will also\n continuously return results until terminated. To make a single bounded\n fetch, call :meth:`.fetch_batch` or :meth:`.fetch_raw`.\n \n :param int sock_timeout: event loop interval\n :return: raw dict returned from query\n :rtype: dict(list)\n \"\"\"\n with SMCSocketProtocol(self, **self.sockopt) as protocol:\n for result in protocol.receive():\n yield result"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nfetching the records for this query. This method will return the records for the current query in raw dict format.", "response": "def fetch_raw(self, **kw):\n \"\"\"\n Fetch the records for this query. This fetch type will return the\n results in raw dict format. It is possible to limit the number of\n receives on the socket that return results before exiting by providing\n max_recv.\n \n This fetch should be used if you want to return only the result records\n returned from the query in raw dict format. Any other dict key/values\n from the raw query are ignored.\n \n :param int max_recv: max number of socket receive calls before\n returning from this query. If you want to wait longer for\n results before returning, increase max_iterations (default: 0)\n :return: list of query results\n :rtype: list(dict)\n \"\"\"\n self.sockopt.update(kw)\n with SMCSocketProtocol(self, **self.sockopt) as protocol:\n for result in protocol.receive():\n if 'records' in result and result['records'].get('added'):\n yield result['records']['added']"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nfetch and return in the specified format.", "response": "def fetch_batch(self, formatter=TableFormat, **kw):\n \"\"\"\n Fetch and return in the specified format. Output format is a formatter\n class in :py:mod:`smc_monitoring.models.formatters`. This fetch type will\n be a single shot fetch unless providing max_recv keyword with a value\n greater than the default of 1. Keyword arguments available are kw in\n :meth:`.fetch_raw`. \n \n :param formatter: Formatter type for data representation. Any type\n in :py:mod:`smc_monitoring.models.formatters`.\n :return: generator returning data in specified format\n \n .. note:: You can provide your own formatter class,\n see :py:mod:`smc_monitoring.models.formatters` for more info.\n \"\"\"\n fmt = formatter(self)\n if 'max_recv' not in kw:\n kw.update(max_recv=1)\n for result in self.fetch_raw(**kw):\n yield fmt.formatted(result)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef fetch_live(self, formatter=TableFormat):\n fmt = formatter(self)\n for results in self.execute():\n if 'records' in results and results['records'].get('added'):\n yield fmt.formatted(results['records']['added'])", "response": "Fetch a live stream query. This is the equivalent of selecting\n the Play option for monitoring fields within the SMC UI."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef fetch_meta_by_name(name, filter_context=None, exact_match=True):\n result = SMCRequest(\n params={'filter': name,\n 'filter_context': filter_context,\n 'exact_match': exact_match}).read()\n \n if not result.json:\n result.json = []\n return result", "response": "This function is used to fetch the metadata of a specific element by name."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nadds a blacklist entry to all engines that support the source and destination entries.", "response": "def blacklist(self, src, dst, duration=3600, **kw):\n \"\"\"\n Add blacklist to all defined engines.\n Use the cidr netmask at the end of src and dst, such as:\n 1.1.1.1/32, etc.\n\n :param src: source of the entry\n :param dst: destination of blacklist entry\n :raises ActionCommandFailed: blacklist apply failed with reason\n :return: None\n\n .. seealso:: :class:`smc.core.engine.Engine.blacklist`. Applying\n a blacklist at the system level will be a global blacklist entry\n versus an engine specific entry.\n \n .. note:: If more advanced blacklist is required using source/destination\n ports and protocols (udp/tcp), use kw to provide these arguments. See\n :py:func:`smc.elements.other.prepare_blacklist` for more details.\n \"\"\"\n self.make_request(\n method='create',\n resource='blacklist',\n json=prepare_blacklist(src, dst, duration, **kw))"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ninstall a new license.", "response": "def license_install(self, license_file):\n \"\"\"\n Install a new license.\n \n :param str license_file: fully qualified path to the\n license jar file.\n :raises: ActionCommandFailed\n :return: None\n \"\"\"\n self.make_request(\n method='update',\n resource='license_install',\n files={\n 'license_file': open(license_file, 'rb')\n })"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef visible_security_group_mapping(self, filter=None): # @ReservedAssignment\n if 'visible_security_group_mapping' not in self.data.links:\n raise ResourceNotFound('This entry point is only supported on SMC >= 6.5')\n \n return self.make_request(resource='visible_security_group_mapping',\n params={'filter': filter})", "response": "Returns a dictionary containing the visible security groups assigned to VSS container types."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning all references to element specified.", "response": "def references_by_element(self, element_href):\n \"\"\"\n Return all references to element specified.\n\n :param str element_href: element reference\n :return: list of references where element is used\n :rtype: list(dict)\n \"\"\"\n result = self.make_request(\n method='create',\n resource='references_by_element',\n json={\n 'value': element_href})\n return result"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef export_elements(self, filename='export_elements.zip', typeof='all'):\n valid_types = ['all', 'nw', 'ips', 'sv', 'rb', 'al', 'vpn']\n if typeof not in valid_types:\n typeof = 'all'\n \n return Task.download(self, 'export_elements', filename,\n params={'recursive': True, 'type': typeof})", "response": "Export elements from SMC."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef import_elements(self, import_file):\n self.make_request(\n method='create',\n resource='import_elements',\n files={\n 'import_file': open(import_file, 'rb')\n })", "response": "Import elements into SMC."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nupdates this service to use the specified protocol agent.", "response": "def update_protocol_agent(self, protocol_agent):\n \"\"\"\n Update this service to use the specified protocol agent.\n After adding the protocol agent to the service you must call\n `update` on the element to commit.\n \n :param str,ProtocolAgent protocol_agent: protocol agent element or href\n :return: None\n \"\"\"\n if not protocol_agent:\n for pa in ('paValues', 'protocol_agent_ref'):\n self.data.pop(pa, None)\n else:\n self.data.update(protocol_agent_ref=element_resolver(protocol_agent))"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _get_param_values(self, name):\n for param in self.data.get('paParameters', []):\n for _pa_parameter, values in param.items():\n if values.get('name') == name:\n return values", "response": "Return the values of a parameter by name as stored on the protocol\n agent payload."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef parameters(self):\n return [type('ProtocolParameter', (SubElement,), {\n 'data': ElementCache(data=self._get_param_values(param.get('name')))}\n )(**param)\n for param in self.make_request(resource='pa_parameters')]", "response": "Returns a list of ProtocolParameter s for each protocol agent in the service."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the protocol parameter that is defined from the base protocol agent.", "response": "def protocol_parameter(self):\n \"\"\"\n The protocol parameter defined from the base protocol agent. This is a\n read-only element that provides some additional context to the parameter\n setting.\n \n :rtype: ProtocolParameter\n \"\"\"\n for parameter in self.protocol_agent.parameters:\n if parameter.href in self.protocol_agent_values.get('parameter_ref', []):\n return parameter"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _update(self, **kwargs):\n if 'value' in kwargs and self.protocol_agent_values.get('value') != \\\n kwargs.get('value'):\n self.protocol_agent_values.update(value=kwargs['value'])\n return True\n return False", "response": "Update the mutable field value."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning the ProxyServer element referenced in this protocol parameter or None if no proxy server element is assigned.", "response": "def proxy_server(self):\n \"\"\"\n The ProxyServer element referenced in this protocol parameter, if any.\n \n :return: The proxy server element or None if one is not assigned\n :rtype: ProxyServer\n \"\"\"\n # TODO: Workaround for SMC 6.4.3 which only provides the inspected service\n # reference. We need to find the Proxy Server from this ref.\n if self._inspected_service_ref:\n for proxy in ProxyServer.objects.all():\n if self._inspected_service_ref.startswith(proxy.href):\n return proxy"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef update(self, name, **kwargs):\n for value in self.items:\n if value.name == name:\n return value._update(**kwargs)\n return False", "response": "Update the protocol agent parameters based on the parameter name."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nadds an access list to the match condition.", "response": "def add_access_list(self, accesslist, rank=None):\n \"\"\"\n Add an access list to the match condition. Valid\n access list types are IPAccessList (v4 and v6),\n IPPrefixList (v4 and v6), AS Path, CommunityAccessList,\n ExtendedCommunityAccessList.\n \"\"\"\n self.conditions.append(\n dict(access_list_ref=accesslist.href,\n type='element',\n rank=rank))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef add_metric(self, value, rank=None):\n self.conditions.append(\n dict(metric=value, type='metric', rank=rank))", "response": "Adds a metric to this match condition\n \n "} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef add_next_hop(self, access_or_prefix_list, rank=None):\n self.conditions.append(dict(\n next_hop_ref=access_or_prefix_list.href,\n type='next_hop',\n rank=rank))", "response": "Adds a next hop condition."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nadding a peer address to the set of conditions.", "response": "def add_peer_address(self, ext_bgp_peer_or_fw, rank=None):\n \"\"\"\n Add a peer address. Peer address types are ExternalBGPPeer\n or Engine.\n \n :raises ElementNotFound: If element specified does not exist\n \"\"\"\n if ext_bgp_peer_or_fw.typeof == 'external_bgp_peer':\n ref = 'external_bgp_peer_address_ref'\n else: # engine\n ref = 'fwcluster_peer_address_ref'\n self.conditions.append(\n {ref: ext_bgp_peer_or_fw.href,\n 'rank': rank,\n 'type': 'peer_address'})"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef remove_condition(self, rank):\n self.conditions[:] = [r for r in self.conditions\n if r.get('rank') != rank]", "response": "Removes a condition element using rank."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create(self, name, action='permit', goto=None, finish=False,\n call=None, comment=None, add_pos=None, after=None,\n before=None, **match_condition):\n \"\"\"\n Create a route map rule. You can provide match conditions\n by using keyword arguments specifying the required types.\n You can also create the route map rule and add match conditions\n after.\n \n :param str name: name for this rule\n :param str action: permit or deny\n :param str goto: specify a rule section to goto after if there\n is a match condition. This will override the finish parameter\n :param bool finish: finish stops the processing after a match condition.\n If finish is False, processing will continue to the next rule.\n :param RouteMap call: call another route map after matching. \n :param str comment: optional comment for the rule\n :param int add_pos: position to insert the rule, starting with position 1. If\n the position value is greater than the number of rules, the rule is inserted at\n the bottom. If add_pos is not provided, rule is inserted in position 1. Mutually\n exclusive with ``after`` and ``before`` params.\n :param str after: Rule tag to add this rule after. Mutually exclusive with ``add_pos``\n and ``before`` params.\n :param str before: Rule tag to add this rule before. Mutually exclusive with ``add_pos``\n and ``after`` params.\n :param match_condition: keyword values identifying initial\n values for the match condition. Valid keyword arguments\n are 'access_list', 'next_hop', 'metric' and 'peer_address'.\n You can also optionally pass the keyword 'match_condition'\n with an instance of MatchCondition.\n :raises CreateRuleFailed: failure to insert rule with reason\n :raises ElementNotFound: if references elements in a match condition\n this can be raised when the element specified is not found.\n \n .. seealso:: :class:`~MatchCondition` for valid elements and\n expected values for each type.\n \"\"\"\n json = {'name': name,\n 'action': action,\n 'finish': finish,\n 'goto': goto.href if goto else None,\n 'call_route_map_ref': None if not call else call.href,\n 'comment': comment}\n \n if not match_condition:\n json.update(match_condition=[])\n else:\n if 'match_condition' in match_condition:\n conditions = match_condition.pop('match_condition')\n else:\n conditions = MatchCondition()\n if 'peer_address' in match_condition:\n conditions.add_peer_address(\n match_condition.pop('peer_address'))\n if 'next_hop' in match_condition:\n conditions.add_next_hop(\n match_condition.pop('next_hop'))\n if 'metric' in match_condition:\n conditions.add_metric(\n match_condition.pop('metric'))\n if 'access_list' in match_condition:\n conditions.add_access_list(\n match_condition.pop('access_list'))\n \n json.update(match_condition=conditions.conditions)\n \n params = None\n href = self.href \n if add_pos is not None: \n href = self.add_at_position(add_pos) \n elif before or after: \n params = self.add_before_after(before, after)\n \n return ElementCreator(\n self.__class__,\n exception=CreateRuleFailed, \n href=href,\n params=params, \n json=json)", "response": "Creates a new route map rule."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef search_rule(self, search):\n return [RouteMapRule(**rule) for rule in self.make_request( \n resource='search_rule', params={'filter': search})]", "response": "Search the RouteMap policy using a search string"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncreates a VPNCertificateCA object for signing external gateway VPN certificates.", "response": "def create(cls, name, certificate):\n \"\"\"\n Create a new external VPN CA for signing internal gateway\n certificates.\n \n :param str name: Name of VPN CA\n :param str certificate: file name, path or certificate string.\n :raises CreateElementFailed: Failed creating cert with reason\n :rtype: VPNCertificateCA\n \"\"\"\n json = {'name': name,\n 'certificate': certificate}\n \n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncreating the TCP service object with the given properties.", "response": "def create(cls, name, min_dst_port, max_dst_port=None, min_src_port=None,\n max_src_port=None, protocol_agent=None, comment=None):\n \"\"\"\n Create the TCP service\n\n :param str name: name of tcp service\n :param int min_dst_port: minimum destination port value\n :param int max_dst_port: maximum destination port value\n :param int min_src_port: minimum source port value\n :param int max_src_port: maximum source port value\n :param str,ProtocolAgent protocol_agent: optional protocol agent for\n this service\n :param str comment: optional comment for service\n :raises CreateElementFailed: failure creating element with reason\n :return: instance with meta\n :rtype: TCPService\n \"\"\"\n max_dst_port = max_dst_port if max_dst_port is not None else ''\n json = {'name': name,\n 'min_dst_port': min_dst_port,\n 'max_dst_port': max_dst_port,\n 'min_src_port': min_src_port,\n 'max_src_port': max_src_port,\n 'protocol_agent_ref': element_resolver(protocol_agent) or None,\n 'comment': comment}\n\n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef create(cls, name, protocol_number, protocol_agent=None, comment=None):\n json = {'name': name,\n 'protocol_number': protocol_number,\n 'protocol_agent_ref': element_resolver(protocol_agent) or None,\n 'comment': comment}\n\n return ElementCreator(cls, json)", "response": "Create the IP Service object with the given name protocol_number and comment."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate an ethernet service with the specified name", "response": "def create(cls, name, frame_type='eth2', value1=None, comment=None):\n \"\"\"\n Create an ethernet service\n\n :param str name: name of service\n :param str frame_type: ethernet frame type, eth2\n :param str value1: hex code representing ethertype field\n :param str comment: optional comment\n :raises CreateElementFailed: failure creating element with reason\n :return: instance with meta\n :rtype: EthernetService\n \"\"\"\n json = {'frame_type': frame_type,\n 'name': name,\n 'value1': int(value1, 16),\n 'comment': comment}\n\n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nprovide a custom range for the search query. Start time and end time are expected to be naive ``datetime`` objects converted to milliseconds. When submitting the query, it is strongly recommended to set the timezone matching the local client making the query. Example of finding all records on 9/2/2017 from 06:25:30 to 06:26:30 in the local time zone CST:: dt_start = datetime(2017, 9, 2, 6, 25, 30, 0) dt_end = datetime(2017, 9, 2, 6, 26, 30, 0) query = LogQuery() query.format.timezone('CST') query.time_range.custom_range( datetime_to_ms(dt_start), datetime_to_ms(dt_end)) for record in query.fetch_batch(): print(record) Last two minutes from current (py2):: now = datetime.now() start_time = int((now - timedelta(minutes=2)).strftime('%s'))*1000 Specific start time (py2):: p2time = datetime.strptime(\"1.8.2017 08:26:42,76\", \"%d.%m.%Y %H:%M:%S,%f\").strftime('%s') p2time = int(s)*1000 Specific start time (py3):: p3time = datetime.strptime(\"1.8.2017 08:40:42,76\", \"%d.%m.%Y %H:%M:%S,%f\") p3time.timestamp() * 1000 :param int start_time: search start time in milliseconds. Start time represents the oldest timestamp. :param int end_time: search end time in milliseconds. End time represents the newest timestamp.", "response": "def custom_range(self, start_time, end_time=None):\n \"\"\"\n Provide a custom range for the search query. Start time and end\n time are expected to be naive ``datetime`` objects converted to \n milliseconds. When submitting the query, it is strongly recommended\n to set the timezone matching the local client making the query.\n \n Example of finding all records on 9/2/2017 from 06:25:30 to 06:26:30\n in the local time zone CST::\n \n dt_start = datetime(2017, 9, 2, 6, 25, 30, 0)\n dt_end = datetime(2017, 9, 2, 6, 26, 30, 0)\n \n query = LogQuery()\n query.format.timezone('CST')\n query.time_range.custom_range(\n datetime_to_ms(dt_start),\n datetime_to_ms(dt_end))\n \n for record in query.fetch_batch():\n print(record)\n \n Last two minutes from current (py2)::\n \n now = datetime.now()\n start_time = int((now - timedelta(minutes=2)).strftime('%s'))*1000\n \n Specific start time (py2)::\n \n p2time = datetime.strptime(\"1.8.2017 08:26:42,76\", \"%d.%m.%Y %H:%M:%S,%f\").strftime('%s')\n p2time = int(s)*1000\n \n Specific start time (py3)::\n \n p3time = datetime.strptime(\"1.8.2017 08:40:42,76\", \"%d.%m.%Y %H:%M:%S,%f\")\n p3time.timestamp() * 1000\n \n :param int start_time: search start time in milliseconds. Start time\n represents the oldest timestamp.\n :param int end_time: search end time in milliseconds. End time\n represents the newest timestamp.\n \"\"\"\n if end_time is None:\n end_time = current_millis()\n \n self.data.update(\n start_ms=start_time,\n end_ms=end_time)\n return self"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns True if the certificate is a PEM - encoded string.", "response": "def pem_as_string(cert):\n \"\"\"\n Only return False if the certificate is a file path. Otherwise it\n is a file object or raw string and will need to be fed to the\n file open context.\n \"\"\"\n if hasattr(cert, 'read'): # File object - return as is\n return cert\n cert = cert.encode('utf-8') if isinstance(cert, unicode) else cert\n if re.match(_PEM_RE, cert):\n return True\n return False"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nload the certificates from the chain file.", "response": "def load_cert_chain(chain_file):\n \"\"\" \n Load the certificates from the chain file.\n \n :raises IOError: Failure to read specified file\n :raises ValueError: Format issues with chain file or missing entries\n :return: list of cert type matches\n \"\"\"\n if hasattr(chain_file, 'read'):\n cert_chain = chain_file.read() \n else:\n with open(chain_file, 'rb') as f:\n cert_chain = f.read() \n \n if not cert_chain:\n raise ValueError('Certificate chain file is empty!')\n\n cert_type_matches = []\n for match in _PEM_RE.finditer(cert_chain):\n cert_type_matches.append((match.group(1), match.group(0)))\n \n if not cert_type_matches:\n raise ValueError('No certificate types were found. Valid types '\n 'are: {}'.format(CERT_TYPES))\n\n return cert_type_matches"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nimport a valid certificate.", "response": "def import_certificate(self, certificate):\n \"\"\"\n Import a valid certificate. Certificate can be either a file path\n or a string of the certificate. If string certificate, it must include\n the -----BEGIN CERTIFICATE----- string.\n \n :param str certificate_file: fully qualified path to certificate file\n :raises CertificateImportError: failure to import cert with reason\n :raises IOError: file not found, permissions, etc.\n :return: None\n \"\"\"\n multi_part = 'signed_certificate' if self.typeof == 'tls_server_credentials'\\\n else 'certificate'\n self.make_request(\n CertificateImportError,\n method='create',\n resource='certificate_import',\n headers = {'content-type': 'multipart/form-data'}, \n files={ \n multi_part: open(certificate, 'rb') if not \\\n pem_as_string(certificate) else certificate\n })"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nimporting a valid certificate.", "response": "def import_intermediate_certificate(self, certificate):\n \"\"\"\n Import a valid certificate. Certificate can be either a file path\n or a string of the certificate. If string certificate, it must include\n the -----BEGIN CERTIFICATE----- string.\n \n :param str certificate: fully qualified path or string \n :raises CertificateImportError: failure to import cert with reason\n :raises IOError: file not found, permissions, etc.\n :return: None\n \"\"\"\n self.make_request(\n CertificateImportError,\n method='create',\n resource='intermediate_certificate_import',\n headers = {'content-type': 'multipart/form-data'}, \n files={ \n 'signed_certificate': open(certificate, 'rb') if not \\\n pem_as_string(certificate) else certificate\n })"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef export_intermediate_certificate(self, filename=None):\n result = self.make_request(\n CertificateExportError,\n raw_result=True,\n resource='intermediate_certificate_export')\n \n if filename is not None:\n save_to_file(filename, result.content)\n return\n \n return result.content", "response": "Exports the intermediate certificate."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nimport a private key.", "response": "def import_private_key(self, private_key):\n \"\"\"\n Import a private key. The private key can be a path to a file\n or the key in string format. If in string format, the key must\n start with -----BEGIN. Key types supported are PRIVATE RSA KEY\n and PRIVATE KEY.\n \n :param str private_key: fully qualified path to private key file\n :raises CertificateImportError: failure to import cert with reason\n :raises IOError: file not found, permissions, etc.\n :return: None\n \"\"\"\n self.make_request(\n CertificateImportError,\n method='create',\n resource='private_key_import',\n headers = {'content-type': 'multipart/form-data'}, \n files={ \n 'private_key': open(private_key, 'rb') if not \\\n pem_as_string(private_key) else private_key\n })"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nget the resource by name of rel name", "response": "def get(self, rel):\n \"\"\"\n Get the resource by rel name\n \n :param str rel_name: name of rel\n :raises UnsupportedEntryPoint: entry point not found in this version\n of the API\n \"\"\"\n for link in self._entry_points:\n if link.get('rel') == rel:\n return link.get('href')\n raise UnsupportedEntryPoint(\n \"The specified entry point '{}' was not found in this \"\n \"version of the SMC API. Check the element documentation \"\n \"to determine the correct version and specify the api_version \"\n \"parameter during session.login() if necessary.\".format(rel))"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncalls when the connection is made.", "response": "def on_open(self):\n \"\"\"\n Once the connection is made, start the query off and\n start an event loop to wait for a signal to\n stop. Results are yielded within receive().\n \"\"\"\n def event_loop():\n logger.debug(pformat(self.query.request))\n self.send(json.dumps(self.query.request))\n while not self.event.is_set():\n #print('Waiting around on the socket: %s' % self.gettimeout())\n self.event.wait(self.gettimeout())\n \n logger.debug('Event loop terminating.')\n \n self.thread = threading.Thread(\n target=event_loop)\n self.thread.setDaemon(True)\n self.thread.start()"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef send_message(self, message):\n if self.connected:\n self.send(\n json.dumps(message.request))", "response": "Send a message down the socket."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef LoadElement(href, only_etag=False):\n request = SMCRequest(href=href)\n request.exception = FetchElementFailed\n result = request.read()\n if only_etag:\n return result.etag\n return ElementCache(\n result.json, etag=result.etag)", "response": "Load an element from a resource."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncreate an element of type cls with meta data.", "response": "def ElementCreator(cls, json, **kwargs):\n \"\"\"\n Helper method for creating elements. If the created element type is\n a SubElement class type, provide href value as kwargs since that class\n type does not have a direct entry point. This is a lazy load that will\n provide only the meta for the element. Additional attribute access\n will load the full data.\n\n :param Element,SubElement cls: class for creating\n :param dict json: json payload\n :param SMCException exception: exception class to override\n :return: instance of type Element with meta\n :rtype: Element\n \"\"\"\n if 'exception' not in kwargs:\n kwargs.update(exception=CreateElementFailed)\n href = kwargs.pop('href') if 'href' in kwargs else cls.href\n \n result = SMCRequest(\n href=href,\n json=json,\n **kwargs).create()\n \n element = cls(\n name=json.get('name'),\n type=cls.typeof,\n href=result.href)\n \n if result.user_session.in_atomic_block:\n result.user_session.transactions.append(element)\n return element"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef ElementFactory(href, smcresult=None, raise_exc=None):\n if smcresult is None:\n smcresult = SMCRequest(href=href).read()\n if smcresult.json:\n cache = ElementCache(smcresult.json, etag=smcresult.etag)\n typeof = lookup_class(cache.type)\n instance = typeof(\n name=cache.get('name'),\n href=href,\n type=cache.type)\n instance.data = cache\n return instance\n if raise_exc and smcresult.msg:\n raise raise_exc(smcresult.msg)", "response": "Factory function for creating an object of type Element when only\n is provided."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngets the ETag of the resource.", "response": "def etag(self, href):\n \"\"\"\n ETag can be None if a subset of element json is using\n this container, such as the case with Routing.\n \"\"\"\n if self and self._etag is None:\n self._etag = LoadElement(href, only_etag=True)\n return self._etag"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_link(self, rel):\n if rel in self.links:\n return self.links[rel]\n raise ResourceNotFound('Resource requested: %r is not available '\n 'on this element.' % rel)", "response": "Return the link for the specified resource."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get(cls, name, raise_exc=True):\n element = cls.objects.filter(name, exact_match=True).first() if \\\n name is not None else None\n if not element and raise_exc:\n raise ElementNotFound('Cannot find specified element: %s, type: '\n '%s' % (name, cls.__name__))\n return element", "response": "Get the element by name. Does an exact match by element type."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef update_or_create(cls, filter_key=None, with_status=False, **kwargs):\n updated = False\n # Providing this flag will return before updating and require the calling\n # class to call update if changes were made.\n defer_update = kwargs.pop('defer_update', False)\n if defer_update:\n with_status = True\n \n element, created = cls.get_or_create(filter_key=filter_key, with_status=True, **kwargs)\n if not created:\n for key, value in kwargs.items():\n # Callable, Element or string\n if callable(value):\n value = value()\n elif isinstance(value, Element):\n value = value.href\n # Retrieve the 'type' of instance attribute. This is used to\n # serialize attributes that resolve href's to elements. It\n # provides a common structure but also prevents the fetching\n # of the href to element when doing an equality comparison\n attr_type = getattr(type(element), key, None)\n if isinstance(attr_type, ElementRef):\n attr_name = getattr(attr_type, 'attr', None)\n if element.data.get(attr_name) != value:\n element.data[attr_name] = value\n updated = True\n continue\n elif isinstance(attr_type, ElementList):\n value_hrefs = element_resolver(value) # Resolve the elements to href\n attr_name = getattr(attr_type, 'attr', None)\n if set(element.data.get(attr_name, [])) != set(value_hrefs):\n element.data[attr_name] = value_hrefs\n updated = True\n continue\n \n # Type is not 'special', therefore we are expecting only strings,\n # integer types or list of strings. Complex data structures\n # will be handled later through encapsulation and __eq__, __hash__\n # for comparison. The keys value type here is going to assume the\n # provided value is of the right type as the key may not necessarily\n # exist in the cached json.\n if isinstance(value, (string_types, int)): # also covers bool\n val = getattr(element, key, None)\n if val != value:\n element.data[key] = value\n updated = True\n elif isinstance(value, list) and all(isinstance(s, string_types) for s in value):\n # List of simple strings (assuming the attribute is also!)\n if set(value) ^ set(element.data.get(key, [])):\n element.data[key] = value\n updated = True\n # Complex lists, objects, etc will fall down here and be skipped.\n # To process these, provide defer_update=True, override update_or_create,\n # process the complex object deltas and call update()\n \n if updated and not defer_update:\n element.update()\n \n if with_status:\n return element, updated, created\n return element", "response": "Update or create an existing element in the specified class."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef add_category(self, category):\n assert isinstance(category, list), 'Category input was expecting list.'\n from smc.elements.other import Category\n for tag in category:\n category = Category(tag)\n try:\n category.add_element(self.href)\n except ElementNotFound:\n Category.create(name=tag)\n category.add_element(self.href)", "response": "Add a category to the element."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nexport this element. Usage:: engine = Engine('myfirewall') extask = engine.export(filename='fooexport.zip') while not extask.done(): extask.wait(3) print(\"Finished download task: %s\" % extask.message()) print(\"File downloaded to: %s\" % extask.filename) :param str filename: filename to store exported element :raises TaskRunFailed: invalid permissions, invalid directory, or this element is a system element and cannot be exported. :return: DownloadTask .. note:: It is not possible to export system elements", "response": "def export(self, filename='element.zip'):\n \"\"\"\n Export this element.\n\n Usage::\n\n engine = Engine('myfirewall')\n extask = engine.export(filename='fooexport.zip')\n while not extask.done():\n extask.wait(3)\n print(\"Finished download task: %s\" % extask.message())\n print(\"File downloaded to: %s\" % extask.filename)\n\n :param str filename: filename to store exported element\n :raises TaskRunFailed: invalid permissions, invalid directory, or this\n element is a system element and cannot be exported.\n :return: DownloadTask\n \n .. note:: It is not possible to export system elements\n \"\"\"\n from smc.administration.tasks import Task\n return Task.download(self, 'export', filename)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nshow all references for this element.", "response": "def referenced_by(self):\n \"\"\"\n Show all references for this element. A reference means that this\n element is being used, for example, in a policy rule, as a member of\n a group, etc.\n\n :return: list referenced elements\n :rtype: list(Element)\n \"\"\"\n href = fetch_entry_point('references_by_element')\n return [Element.from_meta(**ref)\n for ref in self.make_request(\n method='create',\n href=href,\n json={'value': self.href})]"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn a new History object with the current state of this element.", "response": "def history(self):\n \"\"\"\n .. versionadded:: 0.5.7\n Requires SMC version >= 6.3.2\n \n Obtain the history of this element. This will not chronicle every\n modification made over time, but instead a current snapshot with\n historical information such as when the element was created, by\n whom, when it was last modified and it's current state.\n \n :raises ResourceNotFound: If not running SMC version >= 6.3.2\n :rtype: History\n \"\"\"\n from smc.core.resource import History\n return History(**self.make_request(resource='history'))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef duplicate(self, name):\n dup = self.make_request(\n method='update',\n raw_result=True,\n resource='duplicate',\n params={'name': name})\n return type(self)(name=name, href=dup.href, type=type(self).typeof)", "response": "This method is used to create a duplicate of an element."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets a contact address by location name", "response": "def get(self, location_name):\n \"\"\"\n Get a contact address by location name\n \n :param str location_name: name of location\n :return: return contact address element or None\n :rtype: ContactAddress\n \"\"\"\n location_ref = location_helper(location_name, search_only=True)\n if location_ref:\n for location in self:\n if location.location_ref == location_ref:\n return location"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef update_or_create(self, location, contact_addresses, with_status=False,\n overwrite_existing=False, **kw):\n \"\"\"\n Update or create a contact address and location pair. If the\n location does not exist it will be automatically created. If the\n server already has a location assigned with the same name, the\n contact address specified will be added if it doesn't already\n exist (Management and Log Server can have multiple address for a\n single location).\n \n :param list(str) contact_addresses: list of contact addresses for\n the specified location\n :param str location: location to place the contact address in\n :param bool overwrite_existing: if you want to replace existing\n location to address mappings set this to True. Otherwise if\n the location exists, only new addresses are appended\n :param bool with_status: if set to True, a 3-tuple is returned with \n (Element, modified, created), where the second and third tuple\n items are booleans indicating the status\n :raises UpdateElementFailed: failed to update element with reason\n :rtype: MultiContactAddress\n \"\"\"\n updated, created = False, False\n location_ref = location_helper(location)\n if location_ref in self:\n for loc in self:\n if loc.location_ref == location_ref:\n if overwrite_existing:\n loc['addresses'][:] = contact_addresses\n updated = True\n else:\n for ca in contact_addresses:\n if ca not in loc.addresses:\n loc['addresses'].append(ca)\n updated = True\n else:\n self.data.setdefault('multi_contact_addresses', []).append(\n dict(addresses=contact_addresses, location_ref=location_ref))\n created = True\n \n if updated or created:\n self.update()\n if with_status:\n return self, updated, created\n return self", "response": "Update or create a contact address pair."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nprovides a reference to contact addresses used by this server.", "response": "def contact_addresses(self):\n \"\"\"\n Provides a reference to contact addresses used by this server.\n \n Obtain a reference to manipulate or iterate existing contact\n addresses::\n \n >>> from smc.elements.servers import ManagementServer\n >>> mgt_server = ManagementServer.objects.first()\n >>> for contact_address in mgt_server.contact_addresses:\n ... contact_address\n ... \n ContactAddress(location=Default,addresses=[u'1.1.1.1'])\n ContactAddress(location=foolocation,addresses=[u'12.12.12.12'])\n \n :rtype: MultiContactAddress\n \"\"\"\n return MultiContactAddress(\n href=self.get_relation('contact_addresses'),\n type=self.typeof,\n name=self.name)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create(cls, name, address, proxy_port=8080, username=None,\n password=None, secondary=None, comment=None):\n \"\"\"\n Create a new HTTP Proxy service. Proxy must define at least\n one primary address but can optionally also define a list\n of secondary addresses.\n \n :param str name: Name of the proxy element\n :param str address: Primary address for proxy\n :param int proxy_port: proxy port (default: 8080)\n :param str username: optional username for authentication (default: None)\n :param str password: password for username if defined (default: None)\n :param str comment: optional comment\n :param list secondary: secondary list of proxy server addresses\n :raises CreateElementFailed: Failed to create the proxy element\n :rtype: HttpProxy\n \"\"\"\n json = {\n 'name': name,\n 'address': address,\n 'comment': comment,\n 'http_proxy_port': proxy_port,\n 'http_proxy_username': username if username else '',\n 'http_proxy_password': password if password else '',\n 'secondary': secondary if secondary else []}\n \n return ElementCreator(cls, json)", "response": "Creates a new HTTP Proxy service."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef create(cls, name, address, time_to_live=20, update_interval=10,\n secondary=None, comment=None):\n \"\"\"\n Create a DNS Server element.\n \n :param str name: Name of DNS Server\n :param str address: IP address for DNS Server element\n :param int time_to_live: Defines how long a DNS entry can be cached\n before querying the DNS server again (default: 20)\n :param int update_interval: Defines how often the DNS entries can be\n updated to the DNS server if the link status changes constantly\n (default: 10)\n :param list secondary: a secondary set of IP address for this element\n :raises CreateElementFailed: Failed to create with reason\n :rtype: DNSServer\n \"\"\"\n json = {\n 'name': name,\n 'address': address,\n 'comment': comment,\n 'time_to_live': time_to_live,\n 'update_interval': update_interval,\n 'secondary': secondary if secondary else []}\n \n return ElementCreator(cls, json)", "response": "Creates a DNS Server element."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates a new Generic Proxy Server element.", "response": "def create(cls, name, address, inspected_service, secondary=None,\n balancing_mode='ha', proxy_service='generic', location=None,\n comment=None, add_x_forwarded_for=False, trust_host_header=False,\n **kw):\n \"\"\"\n Create a Proxy Server element\n \n :param str name: name of proxy server element\n :param str address: address of element. Can be a single FQDN or comma separated\n list of IP addresses\n :param list secondary: list of secondary IP addresses\n :param str balancing_mode: how to balance traffic, valid options are\n ha (first available server), src, dst, srcdst (default: ha)\n :param str proxy_service: which proxy service to use for next hop, options\n are generic or forcepoint_ap-web_cloud\n :param str,Element location: location for this proxy server\n :param bool add_x_forwarded_for: add X-Forwarded-For header when using the\n Generic Proxy forwarding method (default: False)\n :param bool trust_host_header: trust the host header when using the Generic\n Proxy forwarding method (default: False)\n :param dict inspected_service: inspection services dict. Valid keys are\n service_type and port. Service type valid values are HTTP, HTTPS, FTP or SMTP\n and are case sensitive\n :param str comment: optional comment\n :param kw: keyword arguments are used to collect settings when the proxy_service\n value is forcepoint_ap-web_cloud. Valid keys are `fp_proxy_key`, \n `fp_proxy_key_id`, `fp_proxy_user_id`. The fp_proxy_key is the password value.\n All other values are of type int\n \"\"\" \n json = {'name': name,\n 'comment': comment,\n 'secondary': secondary or [],\n 'http_proxy': proxy_service,\n 'balancing_mode': balancing_mode,\n 'inspected_service': inspected_service,\n 'trust_host_header': trust_host_header,\n 'add_x_forwarded_for': add_x_forwarded_for,\n 'location_ref': element_resolver(location)\n }\n addresses = address.split(',')\n json.update(address=addresses.pop(0))\n json.update(ip_address=addresses if 'ip_address' not in kw else kw['ip_address'])\n \n if proxy_service == 'forcepoint_ap-web_cloud':\n for key in ('fp_proxy_key', 'fp_proxy_key_id', 'fp_proxy_user_id'):\n if key not in kw:\n raise CreateElementFailed('Missing required fp key when adding a '\n 'proxy server to forward to forcepoint. Missing key: %s' % key)\n json[key] = kw.get(key)\n \n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef flush_parent_cache(node):\n if node._parent is None:\n node._del_cache()\n return\n node._del_cache()\n flush_parent_cache(node._parent)", "response": "Flush parent cache of the given node."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef from_meta(node):\n # Version SMC < 6.4\n if 'related_element_type' not in node.data:\n return Element.from_href(\n node.data.get('href'))\n \n # SMC Version >= 6.4 - more efficient because it builds the\n # element by meta versus requiring a query\n return Element.from_meta(\n name=node.data.get('name'),\n type=node.related_element_type,\n href=node.data.get('href'))", "response": "Returns an Element object from a routing node."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef route_level(root, level):\n def recurse(nodes):\n for node in nodes:\n if node.level == level:\n routing_node.append(node)\n else:\n recurse(node)\n\n routing_node = []\n recurse(root)\n return routing_node", "response": "This method returns the routing node of the specified level."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns a list of gateways of a specific type", "response": "def gateway_by_type(self, type=None, on_network=None): # @ReservedAssignment\n \"\"\"\n Return gateways for the specified node. You can also\n specify type to find only gateways of a specific type.\n Valid types are: bgp_peering, netlink, ospfv2_area.\n \n :param RoutingNode self: the routing node to check\n :param str type: bgp_peering, netlink, ospfv2_area\n :param str on_network: if network is specified, should be CIDR and\n specifies a filter to only return gateways on that network when\n an interface has multiple\n :return: tuple of RoutingNode(interface,network,gateway)\n :rtype: list\n \"\"\"\n gateways = route_level(self, 'gateway')\n if not type:\n for gw in gateways:\n yield gw\n else:\n for node in gateways:\n #TODO: Change to type == node.related_element_type when\n # only supporting SMC >= 6.4\n if type == node.routing_node_element.typeof:\n # If the parent is level interface, this is a tunnel interface\n # where the gateway is bound to interface versus network\n parent = node._parent\n if parent.level == 'interface':\n interface = parent\n network = None\n else:\n network = parent\n interface = network._parent\n \n if on_network is not None:\n if network and network.ip == on_network:\n yield (interface, network, node)\n else:\n yield (interface, network, node)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _which_ip_protocol(element):\n try:\n if element.typeof in ('host', 'router'):\n return getattr(element, 'address', False), getattr(element, 'ipv6_address', False)\n elif element.typeof == 'netlink':\n gateway = element.gateway\n if gateway.typeof == 'router':\n return getattr(gateway, 'address', False), getattr(gateway, 'ipv6_address', False)\n # It's an engine, return true\n elif element.typeof == 'network':\n return getattr(element, 'ipv4_network', False), getattr(element, 'ipv6_network', False)\n except AttributeError:\n pass\n # Always return true so that the calling function assumes the element\n # is valid for the routing node. This could fail when submitting but\n # we don't want to prevent adding elements yet since this could change\n return True, True", "response": "Validate the protocol addresses for the element and return the protocol address."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ndeletes any invalid routes that are tagged by a list of nicids.", "response": "def del_invalid_routes(engine, nicids):\n \"\"\"\n Helper method to run through and delete any routes that are tagged\n as invalid or to_delete by a list of nicids. Since we could have a\n list of routes, iterate from top level engine routing node to avoid\n fetch exceptions. Route list should be a list of nicids as str.\n \n :param list nicids: list of nicids\n :raises DeleteElementFailed: delete element failed with reason\n \"\"\"\n nicids = map(str, nicids)\n for interface in engine.routing:\n if interface.nicid in nicids:\n if getattr(interface, 'to_delete', False): # Delete the invalid interface\n interface.delete()\n continue\n for network in interface:\n if getattr(network, 'invalid', False) or \\\n getattr(network, 'to_delete', False):\n network.delete()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef related_element_type(self):\n if 'related_element_type' in self.data:\n return self.data.get('related_element_type')\n return None if self.dynamic_nicid or (self.nicid and '.' in self.nicid) else \\\n Element.from_href(self.data.get('href')).typeof", "response": "Return the related element type for this element."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ndisplay the routing tree representation in string format", "response": "def as_tree(self, level=0):\n \"\"\"\n Display the routing tree representation in string\n format\n \n :rtype: str\n \"\"\"\n ret = '--' * level + repr(self) + '\\n'\n for routing_node in self:\n ret += routing_node.as_tree(level+1)\n return ret"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get(self, interface_id):\n for interface in self:\n if interface.nicid == str(interface_id) or \\\n interface.dynamic_nicid == str(interface_id):\n return interface\n raise InterfaceNotFound('Specified interface {} does not exist on '\n 'this engine.'.format(interface_id))", "response": "Returns the routing configuration for a specific interface by ID."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nadd a traffic handler to a routing node.", "response": "def add_traffic_handler(self, netlink, netlink_gw=None, network=None):\n \"\"\"\n Add a traffic handler to a routing node. A traffic handler can be\n either a static netlink or a multilink traffic handler. If ``network``\n is not specified and the interface has multiple IP addresses, the \n traffic handler will be added to all ipv4 addresses.\n \n Add a pre-defined netlink to the route table of interface 0::\n \n engine = Engine('vm')\n rnode = engine.routing.get(0)\n rnode.add_traffic_handler(StaticNetlink('mynetlink'))\n \n Add a pre-defined netlink only to a specific network on an interface\n with multiple addresses. Specify a netlink_gw for the netlink::\n \n rnode = engine.routing.get(0)\n rnode.add_traffic_handler(\n StaticNetlink('mynetlink'),\n netlink_gw=[Router('myrtr'), Host('myhost')],\n network='172.18.1.0/24')\n \n :param StaticNetlink,Multilink netlink: netlink element\n :param list(Element) netlink_gw: list of elements that should be destinations\n for this netlink. Typically these may be of type host, router, group, server,\n network or engine. \n :param str network: if network specified, only add OSPF to this network on interface\n :raises UpdateElementFailed: failure updating routing\n :raises ModificationAborted: Change must be made at the interface level\n :raises ElementNotFound: ospf area not found\n :return: Status of whether the route table was updated\n :rtype: bool\n \"\"\"\n routing_node_gateway = RoutingNodeGateway(netlink,\n destinations=[] if not netlink_gw else netlink_gw)\n return self._add_gateway_node('netlink', routing_node_gateway, network)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nadd an OSPF Area to the routing node.", "response": "def add_ospf_area(self, ospf_area, ospf_interface_setting=None, network=None,\n communication_mode='NOT_FORCED', unicast_ref=None):\n \"\"\"\n Add OSPF Area to this routing node.\n\n Communication mode specifies how the interface will interact with the\n adjacent OSPF environment. Please see SMC API documentation for more\n in depth information on each option.\n\n If the interface has multiple networks nested below, all networks\n will receive the OSPF area by default unless the ``network`` parameter\n is specified. OSPF cannot be applied to IPv6 networks.\n\n Example of adding an area to interface routing node::\n\n area = OSPFArea('area0') #obtain area resource\n\n #Set on routing interface 0\n interface = engine.routing.get(0)\n interface.add_ospf_area(area)\n\n .. note:: If UNICAST is specified, you must also provide a unicast_ref\n of element type Host to identify the remote host. If no\n unicast_ref is provided, this is skipped\n\n :param OSPFArea ospf_area: OSPF area instance or href\n :param OSPFInterfaceSetting ospf_interface_setting: used to override the\n OSPF settings for this interface (optional)\n :param str network: if network specified, only add OSPF to this network\n on interface\n :param str communication_mode: NOT_FORCED|POINT_TO_POINT|PASSIVE|UNICAST\n :param Element unicast_ref: Element used as unicast gw (required for UNICAST)\n :raises ModificationAborted: Change must be made at the interface level\n :raises UpdateElementFailed: failure updating routing\n :raises ElementNotFound: ospf area not found\n :return: Status of whether the route table was updated\n :rtype: bool\n \"\"\"\n communication_mode = communication_mode.upper()\n destinations=[] if not ospf_interface_setting else [ospf_interface_setting]\n if communication_mode == 'UNICAST' and unicast_ref:\n destinations.append(unicast_ref)\n routing_node_gateway = RoutingNodeGateway(\n ospf_area, communication_mode=communication_mode,\n destinations=destinations)\n return self._add_gateway_node('ospfv2_area', routing_node_gateway, network)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef add_bgp_peering(self, bgp_peering, external_bgp_peer=None,\n network=None):\n \"\"\"\n Add a BGP configuration to this routing interface. \n If the interface has multiple ip addresses, all networks will receive\n the BGP peering by default unless the ``network`` parameter is\n specified.\n \n Example of adding BGP to an interface by ID::\n\n interface = engine.routing.get(0)\n interface.add_bgp_peering(\n BGPPeering('mypeer'),\n ExternalBGPPeer('neighbor'))\n\n :param BGPPeering bgp_peering: BGP Peer element\n :param ExternalBGPPeer,Engine external_bgp_peer: peer element or href\n :param str network: if network specified, only add OSPF to this network\n on interface\n :raises ModificationAborted: Change must be made at the interface level\n :raises UpdateElementFailed: failed to add BGP\n :return: Status of whether the route table was updated\n :rtype: bool\n \"\"\"\n destination = [external_bgp_peer] if external_bgp_peer else []\n routing_node_gateway = RoutingNodeGateway(bgp_peering,\n destinations=destination)\n return self._add_gateway_node('bgp_peering', routing_node_gateway, network)", "response": "Adds a BGP peering to the routing table."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_static_route(self, gateway, destination, network=None):\n routing_node_gateway = RoutingNodeGateway(gateway,\n destinations=destination) \n return self._add_gateway_node('router', routing_node_gateway, network)", "response": "Adds a static route to the routing table."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_dynamic_gateway(self, networks):\n routing_node_gateway = RoutingNodeGateway(dynamic_classid='gateway',\n destinations=networks or [])\n return self._add_gateway_node('dynamic_netlink', routing_node_gateway)", "response": "Adds a dynamic gateway to the routing table."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _add_gateway_node_on_tunnel(self, routing_node_gateway):\n modified = False\n peering = [next_hop for next_hop in self\n if next_hop.routing_node_element == routing_node_gateway.routing_node_element]\n if not peering:\n self.data.setdefault('routing_node', []).append( \n routing_node_gateway)\n modified = True\n # Have peering\n else:\n peers = [node.routing_node_element for peer in peering\n for node in peer]\n for destination in routing_node_gateway.destinations:\n if destination not in peers:\n peering[0].data.setdefault('routing_node', []).append(\n {'level': 'any', 'href': destination.href,\n 'name': destination.name})\n modified = True\n if modified:\n self.update()\n return modified", "response": "Adds a gateway node on a tunnel interface."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _add_gateway_node(self, gw_type, routing_node_gateway, network=None):\n if self.level != 'interface':\n raise ModificationAborted('You must make this change from the '\n 'interface routing level. Current node: {}'.format(self))\n \n if self.related_element_type == 'tunnel_interface':\n return self._add_gateway_node_on_tunnel(routing_node_gateway)\n \n # Find any existing gateways\n routing_node = list(gateway_by_type(self, type=gw_type, on_network=network))\n \n _networks = [netwk for netwk in self if netwk.ip == network] if network is \\\n not None else list(self)\n \n # Routing Node Gateway to add as Element\n gateway_element_type = routing_node_gateway.routing_node_element\n \n modified = False\n for network in _networks:\n # Short circuit for dynamic interfaces\n if getattr(network, 'dynamic_classid', None):\n network.data.setdefault('routing_node', []).append(\n routing_node_gateway)\n modified = True\n break\n \n # Used for comparison to \n this_network_node = network.routing_node_element\n \n if routing_node and any(netwk for _intf, netwk, gw in routing_node\n if netwk.routing_node_element == this_network_node and\n gateway_element_type == gw.routing_node_element):\n \n # A gateway exists on this network\n for gw in network:\n if gw.routing_node_element == gateway_element_type:\n existing_dests = [node.routing_node_element for node in gw]\n for destination in routing_node_gateway.destinations:\n is_valid_destination = False\n if destination not in existing_dests:\n dest_ipv4, dest_ipv6 = _which_ip_protocol(destination)\n if len(network.ip.split(':')) > 1: # IPv6\n if dest_ipv6:\n is_valid_destination = True\n else:\n if dest_ipv4:\n is_valid_destination = True\n \n if is_valid_destination:\n gw.data.setdefault('routing_node', []).append(\n {'level': 'any', 'href': destination.href,\n 'name': destination.name})\n modified = True\n \n else: # Gateway doesn't exist\n gw_ipv4, gw_ipv6 = _which_ip_protocol(gateway_element_type) # ipv4, ipv6 or both\n if len(network.ip.split(':')) > 1:\n if gw_ipv6:\n network.data.setdefault('routing_node', []).append(\n routing_node_gateway)\n modified = True\n else: # IPv4\n if gw_ipv4:\n network.data.setdefault('routing_node', []).append(\n routing_node_gateway) \n modified = True\n\n if modified:\n self.update()\n return modified", "response": "Adds a gateway node to the routing tree."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nremoves a route gateway from the routing table.", "response": "def remove_route_gateway(self, element, network=None):\n \"\"\"\n Remove a route element by href or Element. Use this if you want to\n remove a netlink or a routing element such as BGP or OSPF. Removing\n is done from within the routing interface context.\n ::\n \n interface0 = engine.routing.get(0)\n interface0.remove_route_gateway(StaticNetlink('mynetlink'))\n \n Only from a specific network on a multi-address interface::\n \n interface0.remove_route_gateway(\n StaticNetlink('mynetlink'),\n network='172.18.1.0/24')\n \n :param str,Element element: element to remove from this routing node\n :param str network: if network specified, only add OSPF to this\n network on interface\n :raises ModificationAborted: Change must be made at the interface level\n :raises UpdateElementFailed: failure to update routing table\n :return: Status of whether the entry was removed (i.e. or not found)\n :rtype: bool\n \"\"\"\n if self.level not in ('interface',):\n raise ModificationAborted('You must make this change from the '\n 'interface routing level. Current node: {}'.format(self))\n \n node_changed = False\n element = element_resolver(element)\n for network in self:\n # Tunnel Interface binds gateways to the interface\n if network.level == 'gateway' and network.data.get('href') == element:\n network.delete()\n node_changed = True\n break\n for gateway in network:\n if gateway.data.get('href') == element:\n gateway.delete()\n node_changed = True\n return node_changed"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nadds an entry to this antispoofing node level.", "response": "def add(self, element):\n \"\"\"\n Add an entry to this antispoofing node level.\n Entry can be either href or network elements specified\n in :py:class:`smc.elements.network`\n ::\n\n if0 = engine.antispoofing.get(0)\n if0.add(Network('foonet'))\n\n :param Element element: entry to add, i.e. Network('mynetwork'), Host(..)\n :raises CreateElementFailed: failed adding entry\n :raises ElementNotFound: element entry specified not in SMC\n :return: whether entry was added\n :rtype: bool\n \"\"\"\n if self.level == 'interface':\n for network in self:\n if from_meta(network) == element:\n return False\n \n self.data['antispoofing_node'].append({\n 'antispoofing_node': [],\n 'auto_generated': 'false',\n 'href': element.href,\n 'level': self.level,\n 'validity': 'enable',\n 'name': element.name})\n\n self.update()\n return True\n return False"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nremoving a specific user added element from the antispoofing tables of a given interface.", "response": "def remove(self, element):\n \"\"\"\n Remove a specific user added element from the antispoofing tables of\n a given interface. This will not remove autogenerated or system level\n entries.\n \n :param Element element: element to remove\n :return: remove element if it exists and return bool\n :rtype: bool\n \"\"\"\n if self.level == 'interface':\n len_before_change = len(self)\n _nodes = []\n for network in self:\n if from_meta(network) != element:\n _nodes.append(network.data)\n else:\n if network.autogenerated: # Make sure it was user added\n _nodes.append(network.data)\n if len(_nodes) != len_before_change:\n self.data['antispoofing_node'] = _nodes\n self.update()\n return True\n return False"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create(self, source, destination, gateway_ip, comment=None):\n self.items.append(dict(\n source=source, destination=destination,\n gateway_ip=gateway_ip, comment=comment))", "response": "Add a new policy route to the engine."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef delete(self, **kw):\n delete_by = []\n for field, val in kw.items():\n if val is not None:\n delete_by.append(field)\n \n self.items[:] = [route for route in self.items\n if not all(route.get(field) == kw.get(field)\n for field in delete_by)]", "response": "Delete a policy route from the engine."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef create(cls, name, address=None, ipv6_address=None,\n secondary=None, comment=None):\n \"\"\"\n Create the host element\n\n :param str name: Name of element\n :param str address: ipv4 address of host object (optional if ipv6)\n :param str ipv6_address: ipv6 address (optional if ipv4)\n :param list secondary: secondary ip addresses (optional)\n :param str comment: comment (optional)\n :raises CreateElementFailed: element creation failed with reason\n :return: instance with meta\n :rtype: Host\n \n .. note:: Either ipv4 or ipv6 address is required\n \"\"\"\n address = address if address else None\n ipv6_address = ipv6_address if ipv6_address else None\n secondaries = [] if secondary is None else secondary\n json = {'name': name,\n 'address': address,\n 'ipv6_address': ipv6_address,\n 'secondary': secondaries,\n 'comment': comment}\n\n return ElementCreator(cls, json)", "response": "Create the host element with meta data"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nadds secondary IP addresses to this host element.", "response": "def add_secondary(self, address, append_lists=False):\n \"\"\"\n Add secondary IP addresses to this host element. If append_list\n is True, then add to existing list. Otherwise overwrite.\n \n :param list address: ip addresses to add in IPv4 or IPv6 format\n :param bool append_list: add to existing or overwrite (default: append)\n :return: None\n \"\"\"\n self.update(\n secondary=address,\n append_lists=append_lists)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncreate an AddressRange element", "response": "def create(cls, name, ip_range, comment=None):\n \"\"\"\n Create an AddressRange element\n\n :param str name: Name of element\n :param str iprange: iprange of element\n :param str comment: comment (optional)\n :raises CreateElementFailed: element creation failed with reason\n :return: instance with meta\n :rtype: AddressRange\n \"\"\"\n json = {'name': name,\n 'ip_range': ip_range,\n 'comment': comment}\n\n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create(cls, name, ipv4_network=None, ipv6_network=None,\n comment=None):\n \"\"\"\n Create the network element\n\n :param str name: Name of element\n :param str ipv4_network: network cidr (optional if ipv6)\n :param str ipv6_network: network cidr (optional if ipv4)\n :param str comment: comment (optional)\n :raises CreateElementFailed: element creation failed with reason\n :return: instance with meta\n :rtype: Network\n \n .. note:: Either an ipv4_network or ipv6_network must be specified\n \"\"\"\n ipv4_network = ipv4_network if ipv4_network else None\n ipv6_network = ipv6_network if ipv6_network else None\n json = {'name': name,\n 'ipv4_network': ipv4_network,\n 'ipv6_network': ipv6_network,\n 'comment': comment}\n\n return ElementCreator(cls, json)", "response": "Create the network element in the cache"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef build_sub_expression(name, ne_ref=None, operator='union'):\n ne_ref = [] if ne_ref is None else ne_ref\n json = {'name': name,\n 'ne_ref': ne_ref,\n 'operator': operator}\n return json", "response": "Static method to build and return the proper json for a sub - expression."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef create(cls, name, ne_ref=None, operator='exclusion',\n sub_expression=None, comment=None):\n \"\"\"\n Create the expression\n\n :param str name: name of expression\n :param list ne_ref: network element references for expression\n :param str operator: 'exclusion' (negation), 'union', 'intersection'\n (default: exclusion)\n :param dict sub_expression: sub expression used\n :param str comment: optional comment\n :raises CreateElementFailed: element creation failed with reason\n :return: instance with meta\n :rtype: Expression\n \"\"\"\n sub_expression = [] if sub_expression is None else [sub_expression]\n json = {'name': name,\n 'operator': operator,\n 'ne_ref': ne_ref,\n 'sub_expression': sub_expression,\n 'comment': comment}\n\n return ElementCreator(cls, json)", "response": "Create the Expression\n class."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef download(self, filename=None, as_type='zip'):\n headers = None\n if as_type in ['zip', 'txt', 'json']:\n if as_type == 'zip':\n if filename is None:\n raise MissingRequiredInput('Filename must be specified when '\n 'downloading IPList as a zip file.')\n filename = '{}'.format(filename)\n elif as_type == 'txt':\n headers = {'accept': 'text/plain'}\n elif as_type == 'json':\n headers = {'accept': 'application/json'}\n \n result = self.make_request(\n FetchElementFailed,\n raw_result=True,\n resource='ip_address_list',\n filename=filename,\n headers=headers)\n \n return result.json if as_type == 'json' else result.content", "response": "Download the IPList from the AAF system."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef upload(self, filename=None, json=None, as_type='zip'):\n headers = {'content-type': 'multipart/form-data'}\n params = None\n files = None\n if filename:\n files = {'ip_addresses': open(filename, 'rb')}\n if as_type == 'json':\n headers = {'accept': 'application/json',\n 'content-type': 'application/json'}\n elif as_type == 'txt':\n params = {'format': 'txt'}\n\n self.make_request(\n CreateElementFailed,\n method='create',\n resource='ip_address_list',\n headers=headers, files=files, json=json,\n params=params)", "response": "Uploads an IPList to the SMC."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nupdates or create an IPList.", "response": "def update_or_create(cls, append_lists=True, with_status=False, **kwargs):\n \"\"\"\n Update or create an IPList.\n \n :param bool append_lists: append to existing IP List\n :param dict kwargs: provide at minimum the name attribute\n and optionally match the create constructor values\n :raises FetchElementFailed: Reason for retrieval failure\n \"\"\"\n was_created, was_modified = False, False \n element = None\n try: \n element = cls.get(kwargs.get('name')) \n if append_lists:\n iplist = element.iplist\n diff = [i for i in kwargs.get('iplist', []) if i not in iplist]\n if diff:\n iplist.extend(diff)\n else:\n iplist = []\n else:\n iplist = kwargs.get('iplist', [])\n \n if iplist:\n element.upload(json={'ip': iplist}, as_type='json')\n was_modified = True\n \n except ElementNotFound:\n element = cls.create( \n kwargs.get('name'), \n iplist = kwargs.get('iplist', []))\n was_created = True\n\n if with_status: \n return element, was_modified, was_created \n return element"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef create(cls, name, iplist=None, comment=None):\n json = {'name': name, 'comment': comment}\n result = ElementCreator(cls, json)\n if result and iplist is not None:\n element = IPList(name)\n\n element.make_request(\n CreateElementFailed,\n method='create',\n resource='ip_address_list',\n json={'ip': iplist})\n \n return result", "response": "Create an IP List."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef update_or_create(cls, name, engine, translation_values=None, with_status=False):\n updated, created = False, False\n alias = cls.get(name, raise_exc=False)\n if not alias:\n alias = cls.create(name)\n created = True\n \n elements = element_resolver(translation_values) if translation_values \\\n else []\n \n if not created: # possible update\n # Does alias already exist with a value\n alias_value = [_alias\n for _alias in engine.data.get('alias_value', [])\n if _alias.get('alias_ref') == alias.href]\n \n if alias_value:\n if not elements:\n alias_value[0].update(translated_element=None)\n updated = True\n else:\n t_values = alias_value[0].get('translated_element')\n if set(t_values) ^ set(elements):\n t_values[:] = elements\n updated = True\n \n if elements and (created or not updated):\n engine.data.setdefault('alias_value', []).append(\n {'alias_ref': alias.href,\n 'translated_element': elements})\n updated = True\n \n if updated:\n engine.update()\n\n if with_status:\n return alias, updated, created\n return alias", "response": "Update or create an alias and its mappings."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn an alias object from the engine.", "response": "def _from_engine(cls, data, alias_list):\n \"\"\"\n Return an alias for the engine. The data is dict provided\n when calling engine.alias_resolving(). The alias list is\n the list of aliases pre-fetched from Alias.objects.all().\n This will return an Alias element by taking the alias_ref\n and finding the name in the alias list.\n \n :rtype: Alias\n \"\"\"\n for alias in alias_list:\n href = data.get('alias_ref')\n if alias.href == href:\n _alias = Alias(alias.name, href=href)\n _alias.resolved_value = data.get('resolved_value')\n _alias.typeof = alias._meta.type\n return _alias"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef resolve(self, engine):\n if not self.resolved_value:\n result = self.make_request(\n ElementNotFound,\n href=self.get_relation('resolve'),\n params={'for': engine})\n\n self.resolved_value = result.get('resolved_value')\n return self.resolved_value", "response": "Resolve this Alias to a specific value."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef fetch_as_element(self, **kw):\n clone = self.copy()\n # Replace all filters with a combined filter\n bldata = TextFormat(field_format='name')\n # Preserve resolving fields for new filter\n if 'resolving' in self.format.data:\n bldata.set_resolving(**self.format.data['resolving'])\n\n # Resolve the entry ID to match SMC\n blid = TextFormat(field_format='pretty')\n blid.field_ids([LogField.BLACKLISTENTRYID])\n \n combined = CombinedFormat(bldata=bldata, blid=blid)\n clone.update_format(combined)\n\n for list_of_results in clone.fetch_raw(**kw):\n for entry in list_of_results:\n data = entry.get('bldata')\n data.update(**entry.get('blid'))\n yield BlacklistEntry(**data)", "response": "Fetch the blacklist and return as an instance of Element."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the source ports for this blacklist entry.", "response": "def source_ports(self):\n \"\"\"\n Source ports for this blacklist entry. If no ports are specified (i.e. ALL\n ports), 'ANY' is returned.\n \n :rtype: str\n \"\"\"\n start_port = self.blacklist.get('BlacklistEntrySourcePort')\n if start_port is not None:\n return '{}-{}'.format(\n start_port, self.blacklist.get('BlacklistEntrySourcePortRange'))\n return 'ANY'"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn the destination ports for this blacklist entry.", "response": "def dest_ports(self):\n \"\"\"\n Destination ports for this blacklist entry. If no ports are specified,\n 'ANY' is returned.\n \n :rtype: str\n \"\"\"\n start_port = self.blacklist.get('BlacklistEntryDestinationPort')\n if start_port is not None:\n return '{}-{}'.format(\n start_port, self.blacklist.get('BlacklistEntryDestinationPortRange'))\n return 'ANY'"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the name of the rule element.", "response": "def name(self):\n \"\"\"\n Name attribute of rule element\n \"\"\"\n return self._meta.name if self._meta.name else \\\n 'Rule @%s' % self.tag"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef move_rule_after(self, other_rule):\n self.make_request(\n CreateRuleFailed,\n href=other_rule.get_relation('add_after'),\n method='create',\n json=self)\n self.delete()", "response": "Move this rule after another rule."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate a rule section in a Firewall Policy.", "response": "def create_rule_section(self, name, add_pos=None, after=None, before=None):\n \"\"\"\n Create a rule section in a Firewall Policy. To specify a specific numbering\n position for the rule section, use the `add_pos` field. If no position or\n before/after is specified, the rule section will be placed at the top which\n will encapsulate all rules below.\n Create a rule section for the relavant policy::\n \n policy = FirewallPolicy('mypolicy')\n policy.fw_ipv4_access_rules.create_rule_section(name='attop')\n # For NAT rules\n policy.fw_ipv4_nat_rules.create_rule_section(name='mysection', add_pos=5)\n \n :param str name: create a rule section by name\n :param int add_pos: position to insert the rule, starting with position 1.\n If the position value is greater than the number of rules, the rule is\n inserted at the bottom. If add_pos is not provided, rule is inserted in\n position 1. Mutually exclusive with ``after`` and ``before`` params. \n :param str after: Rule tag to add this rule after. Mutually exclusive with\n ``add_pos`` and ``before`` params. \n :param str before: Rule tag to add this rule before. Mutually exclusive with\n ``add_pos`` and ``after`` params. \n :raises MissingRequiredInput: when options are specified the need additional \n setting, i.e. use_vpn action requires a vpn policy be specified. \n :raises CreateRuleFailed: rule creation failure \n :return: the created ipv4 rule \n :rtype: IPv4Rule \n \"\"\"\n href = self.href\n params = None\n if add_pos is not None:\n href = self.add_at_position(add_pos)\n elif before or after: \n params = self.add_before_after(before, after) \n \n return ElementCreator(\n self.__class__,\n exception=CreateRuleFailed,\n href=href,\n params=params,\n json={'comment': name})"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncreate a new layer 3 firewall rule.", "response": "def create(self, name, sources=None, destinations=None,\n services=None, action='allow', log_options=None,\n authentication_options=None, connection_tracking=None,\n is_disabled=False, vpn_policy=None, mobile_vpn=False,\n add_pos=None, after=None, before=None,\n sub_policy=None, comment=None, **kw):\n \"\"\" \n Create a layer 3 firewall rule\n\n :param str name: name of rule\n :param sources: source/s for rule\n :type sources: list[str, Element]\n :param destinations: destination/s for rule\n :type destinations: list[str, Element]\n :param services: service/s for rule\n :type services: list[str, Element]\n :param action: allow,continue,discard,refuse,enforce_vpn,\n apply_vpn,forward_vpn, blacklist (default: allow)\n :type action: Action or str\n :param LogOptions log_options: LogOptions object\n :param ConnectionTracking connection_tracking: custom connection tracking settings\n :param AuthenticationOptions authentication_options: options for auth if any\n :param PolicyVPN,str vpn_policy: policy element or str href; required for\n enforce_vpn, use_vpn and apply_vpn actions\n :param bool mobile_vpn: if using a vpn action, you can set mobile_vpn to True and\n omit the vpn_policy setting if you want this VPN to apply to any mobile VPN based\n on the policy VPN associated with the engine\n :param str,Element sub_policy: sub policy required when rule has an action of 'jump'.\n Can be the FirewallSubPolicy element or href.\n :param int add_pos: position to insert the rule, starting with position 1. If\n the position value is greater than the number of rules, the rule is inserted at\n the bottom. If add_pos is not provided, rule is inserted in position 1. Mutually\n exclusive with ``after`` and ``before`` params.\n :param str after: Rule tag to add this rule after. Mutually exclusive with ``add_pos``\n and ``before`` params.\n :param str before: Rule tag to add this rule before. Mutually exclusive with ``add_pos``\n and ``after`` params.\n :param str comment: optional comment for this rule\n :raises MissingRequiredInput: when options are specified the need additional \n setting, i.e. use_vpn action requires a vpn policy be specified.\n :raises CreateRuleFailed: rule creation failure\n :return: the created ipv4 rule\n :rtype: IPv4Rule\n \"\"\"\n rule_values = self.update_targets(sources, destinations, services)\n rule_values.update(name=name, comment=comment)\n\n if isinstance(action, Action):\n rule_action = action\n else:\n rule_action = Action()\n rule_action.action = action\n\n if not rule_action.action in self._actions:\n raise CreateRuleFailed('Action specified is not valid for this '\n 'rule type; action: {}'.format(rule_action.action))\n\n if rule_action.action in ('apply_vpn', 'enforce_vpn', 'forward_vpn'):\n if vpn_policy is None and not mobile_vpn:\n raise MissingRequiredInput('You must either specify a vpn_policy or set '\n 'mobile_vpn when using a rule with a VPN action')\n if mobile_vpn:\n rule_action.mobile_vpn = True\n else:\n try:\n vpn = element_resolver(vpn_policy) # VPNPolicy\n rule_action.vpn = vpn\n except ElementNotFound:\n raise MissingRequiredInput('Cannot find VPN policy specified: {}, '\n .format(vpn_policy))\n \n elif rule_action.action == 'jump':\n try:\n rule_action.sub_policy = element_resolver(sub_policy)\n except ElementNotFound:\n raise MissingRequiredInput('Cannot find sub policy specified: {} '\n .format(sub_policy))\n \n #rule_values.update(action=rule_action.data)\n\n log_options = LogOptions() if not log_options else log_options\n \n if connection_tracking is not None:\n rule_action.connection_tracking_options.update(**connection_tracking)\n \n auth_options = AuthenticationOptions() if not authentication_options \\\n else authentication_options\n\n rule_values.update(\n action=rule_action.data,\n options=log_options.data,\n authentication_options=auth_options.data,\n is_disabled=is_disabled)\n\n params = None\n href = self.href\n if add_pos is not None:\n href = self.add_at_position(add_pos)\n elif before or after:\n params = self.add_before_after(before, after)\n \n return ElementCreator(\n self.__class__,\n exception=CreateRuleFailed,\n href=href,\n params=params,\n json=rule_values)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef create(self, name, sources=None, destinations=None,\n services=None, action='allow', is_disabled=False,\n logical_interfaces=None, add_pos=None,\n after=None, before=None, comment=None):\n \"\"\"\n Create an Ethernet rule\n\n :param str name: name of rule\n :param sources: source/s for rule\n :type sources: list[str, Element]\n :param destinations: destination/s for rule\n :type destinations: list[str, Element]\n :param services: service/s for rule\n :type services: list[str, Element]\n :param str action: \\|allow\\|continue\\|discard\\|refuse\\|blacklist\n :param bool is_disabled: whether to disable rule or not\n :param list logical_interfaces: logical interfaces by name\n :param int add_pos: position to insert the rule, starting with position 1. If\n the position value is greater than the number of rules, the rule is inserted at\n the bottom. If add_pos is not provided, rule is inserted in position 1. Mutually\n exclusive with ``after`` and ``before`` params.\n :param str after: Rule tag to add this rule after. Mutually exclusive with ``add_pos``\n and ``before`` params.\n :param str before: Rule tag to add this rule before. Mutually exclusive with ``add_pos``\n and ``after`` params.\n :raises MissingReuqiredInput: when options are specified the need additional\n setting, i.e. use_vpn action requires a vpn policy be specified.\n :raises CreateRuleFailed: rule creation failure\n :return: newly created rule\n :rtype: EthernetRule\n \"\"\"\n rule_values = self.update_targets(sources, destinations, services)\n rule_values.update(name=name, comment=comment)\n rule_values.update(is_disabled=is_disabled)\n\n if isinstance(action, Action):\n rule_action = action\n else:\n rule_action = Action()\n rule_action.action = action\n\n if not rule_action.action in self._actions:\n raise CreateRuleFailed('Action specified is not valid for this '\n 'rule type; action: {}'\n .format(rule_action.action))\n\n rule_values.update(action=rule_action.data)\n\n rule_values.update(self.update_logical_if(logical_interfaces))\n\n params = None\n \n href = self.href\n if add_pos is not None:\n href = self.add_at_position(add_pos)\n else:\n params = self.add_before_after(before, after)\n \n return ElementCreator(\n self.__class__,\n exception=CreateRuleFailed,\n href=href,\n params=params,\n json=rule_values)", "response": "Creates an Ethernet rule with the specified parameters."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _severity_by_name(name):\n for intvalue, sevname in SEVERITY.items():\n if name.lower() == sevname:\n return intvalue\n return 1", "response": "Return the severity integer value by it s name."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef situation_parameters(self):\n for param in self.data.get('situation_parameters', []):\n cache = ElementCache(data=self.make_request(href=param))\n yield type('SituationParameter', (SituationParameter,), {\n 'data': cache})(name=cache.name, type=cache.type, href=param)", "response": "A generator that returns the list of SituationParameter objects representing the detection logic for the context."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef parameter_values(self):\n for param in self.data.get('parameter_values', []):\n cache = ElementCache(data=self.make_request(href=param))\n name = '{}'.format(cache.type.title()).replace('_', '')\n yield type(name, (SituationParameterValue,), {\n 'data': cache})(name=cache.name, type=cache.type, href=param)", "response": "Yields the parameter values for this inspection situation. This correlate to\n \n the situation_context."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef create(cls, name, situation_context, attacker=None, target=None,\n severity='information', situation_type=None, description=None,\n comment=None):\n \"\"\"\n Create an inspection situation.\n \n :param str name: name of the situation\n :param InspectionSituationContext situation_context: The situation\n context type used to define this situation. Identifies the proper\n parameter that identifies how the situation is defined (i.e. regex, etc).\n :param str attacker: Attacker information, used to identify last packet\n the triggers attack and is only used for blacklisting. Values can\n be packet_source, packet_destination, connection_source, or\n connection_destination\n :param str target: Target information, used to identify the last packet\n that triggers the attack and is only used for blacklisting. Values\n can be packet_source, packet_destination, connection_source, or\n connection_destination\n :param str severity: severity for this situation. Valid values are\n critical, high, low, information\n :param str description: optional description\n :param str comment: optional comment\n \"\"\"\n try:\n json = {\n 'name': name, 'comment': comment,\n 'description': description,\n 'situation_context_ref': situation_context.href,\n 'attacker': attacker, 'victim': target,\n 'severity': _severity_by_name(severity)}\n \n element = ElementCreator(cls, json)\n tag = situation_type or SituationTag('User Defined Situations')\n tag.add_element(element)\n return element\n \n except ElementNotFound as e:\n raise CreateElementFailed('{}. Inspection Situation Contexts require SMC '\n 'version 6.5 and above.'.format(str(e)))", "response": "Create an inspection situation."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncreates a regular expression for this inspection situation.", "response": "def create_regular_expression(self, regexp):\n \"\"\"\n Create a regular expression for this inspection situation\n context. The inspection situation must be using an inspection\n context that supports regex.\n \n :param str regexp: regular expression string\n :raises CreateElementFailed: failed to modify the situation\n \"\"\"\n for parameter in self.situation_context.situation_parameters:\n if parameter.type == 'regexp':\n return self.add_parameter_value(\n 'reg_exp_situation_parameter_values',\n **{'parameter_ref': parameter.href,\n 'reg_exp': regexp}) # Treat as raw string\n\n raise CreateElementFailed('The situation does not support a regular '\n 'expression as a context value.')"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ndownloading the current version of the current snapshot to a file.", "response": "def download(self, filename=None):\n \"\"\"\n Download snapshot to filename\n\n :param str filename: fully qualified path including filename .zip\n :raises EngineCommandFailed: IOError occurred downloading snapshot\n :return: None\n \"\"\"\n if not filename:\n filename = '{}{}'.format(self.name, '.zip')\n \n try:\n self.make_request(\n EngineCommandFailed,\n resource='content',\n filename=filename)\n\n except IOError as e:\n raise EngineCommandFailed(\"Snapshot download failed: {}\"\n .format(e))"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef upload(self, engine, timeout=5, wait_for_finish=False, **kw):\n return Task.execute(self, 'upload', params={'filter': engine},\n timeout=timeout, wait_for_finish=wait_for_finish, **kw)", "response": "Uploads policy to specific device. Using wait for finish\n returns a TaskOperationPoller that will be executed when the task is finished."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef search_rule(self, search):\n result = self.make_request(\n resource='search_rule',\n params={'filter': search})\n \n if result:\n results = []\n for data in result:\n typeof = data.get('type')\n if 'ethernet' in typeof:\n klazz = lookup_class('ethernet_rule')\n elif typeof in [\n 'ips_ipv4_access_rule', 'l2_interface_ipv4_access_rule']:\n klazz = lookup_class('layer2_ipv4_access_rule')\n else:\n klazz = lookup_class(typeof)\n results.append(klazz(**data))\n return results\n return []", "response": "Search a rule for a rule tag or name value value"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef rule_counters(self, engine, duration_type='one_week',\n duration=0, start_time=0):\n \"\"\"\n .. versionadded:: 0.5.6\n Obtain rule counters for this policy. Requires SMC >= 6.2\n \n Rule counters can be obtained for a given policy and duration for\n those counters can be provided in duration_type. A custom start\n range can also be provided.\n\n :param Engine engine: the target engine to obtain rule counters from\n :param str duration_type: duration for obtaining rule counters. Valid\n options are: one_day, one_week, one_month, six_months, one_year,\n custom, since_last_upload\n :param int duration: if custom set for duration type, specify the\n duration in seconds (Default: 0)\n :param int start_time: start time in milliseconds (Default: 0)\n :raises: ActionCommandFailed\n :return: list of rule counter objects\n :rtype: RuleCounter\n \"\"\"\n json = {'target_ref': engine.href, 'duration_type': duration_type}\n return [RuleCounter(**rule)\n for rule in self.make_request(\n method='create',\n resource='rule_counter',\n json=json)]", "response": "This method returns a list of RuleCounter objects for the given engine and duration."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create(cls, name, nat=False, mobile_vpn_toplogy_mode=None,\n vpn_profile=None):\n \"\"\"\n Create a new policy based VPN\n\n :param name: name of vpn policy\n :param bool nat: whether to apply NAT to the VPN (default False)\n :param mobile_vpn_toplogy_mode: whether to allow remote vpn\n :param VPNProfile vpn_profile: reference to VPN profile, or uses default\n :rtype: PolicyVPN\n \"\"\"\n vpn_profile = element_resolver(vpn_profile) or \\\n VPNProfile('VPN-A Suite').href\n \n json = {'mobile_vpn_topology_mode': mobile_vpn_toplogy_mode,\n 'name': name,\n 'nat': nat,\n 'vpn_profile': vpn_profile}\n\n try:\n return ElementCreator(cls, json)\n except CreateElementFailed as err:\n raise CreatePolicyFailed(err)", "response": "Create a new VPN - level policy based VPN - level entry."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef enable_disable_nat(self):\n if self.nat:\n self.data['nat'] = False\n else:\n self.data['nat'] = True", "response": "Enable or disable NAT on this policy."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nadds a mobile VPN gateway to this policy VPN.", "response": "def add_mobile_gateway(self, gateway):\n \"\"\"\n Add a mobile VPN gateway to this policy VPN. \n Example of adding or removing a mobile VPN gateway::\n \n policy_vpn = PolicyVPN('myvpn')\n policy_vpn.open()\n policy_vpn.add_mobile_vpn_gateway(ExternalGateway('extgw3'))\n \n for mobile_gateway in policy_vpn.mobile_gateway_node:\n if mobile_gateway.gateway == ExternalGateway('extgw3'):\n mobile_gateway.delete()\n policy_vpn.save()\n policy_vpn.close()\n \n :param Engine,ExternalGateway gateway: An external gateway, engine or\n href for the mobile gateway\n :raises PolicyCommandFailed: could not add gateway\n \"\"\"\n try:\n gateway = gateway.vpn.internal_gateway.href # Engine\n except AttributeError:\n gateway = element_resolver(gateway) # External Gateway\n \n self.make_request(\n PolicyCommandFailed,\n method='create',\n resource='mobile_gateway_node',\n json={'gateway': gateway,\n 'node_usage': 'mobile'})"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef add_internal_gateway_to_vpn(internal_gateway_href, vpn_policy,\n vpn_role='central'):\n \"\"\"\n Add an internal gateway (managed engine node) to a VPN policy\n based on the internal gateway href.\n\n :param str internal_gateway_href: href for engine internal gw\n :param str vpn_policy: name of vpn policy\n :param str vpn_role: central|satellite\n :return: True for success\n :rtype: bool\n \"\"\"\n try:\n vpn = PolicyVPN(vpn_policy)\n vpn.open()\n if vpn_role == 'central':\n vpn.add_central_gateway(internal_gateway_href)\n else:\n vpn.add_satellite_gateway(internal_gateway_href)\n vpn.save()\n vpn.close()\n except ElementNotFound:\n return False\n return True", "response": "Adds an internal gateway to VPN policy using the href of the engine internal gateway."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nenabling or disable the tunnel link between endpoints.", "response": "def enable_disable(self):\n \"\"\"\n Enable or disable the tunnel link between endpoints.\n \n :raises UpdateElementFailed: failed with reason\n :return: None\n \"\"\"\n if self.enabled:\n self.update(enabled=False)\n else:\n self.update(enabled=True)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nprepare a blacklist entry for the given source and destination.", "response": "def prepare_blacklist(src, dst, duration=3600, src_port1=None,\n src_port2=None, src_proto='predefined_tcp',\n dst_port1=None, dst_port2=None,\n dst_proto='predefined_tcp'):\n \"\"\" \n Create a blacklist entry.\n \n A blacklist can be added directly from the engine node, or from\n the system context. If submitting from the system context, it becomes\n a global blacklist. This will return the properly formatted json\n to submit.\n \n :param src: source address, with cidr, i.e. 10.10.10.10/32 or 'any'\n :param dst: destination address with cidr, i.e. 1.1.1.1/32 or 'any'\n :param int duration: length of time to blacklist\n \n Both the system and engine context blacklist allow kw to be passed\n to provide additional functionality such as adding source and destination\n ports or port ranges and specifying the protocol. The following parameters\n define the ``kw`` that can be passed.\n \n The following example shows creating an engine context blacklist\n using additional kw::\n \n engine.blacklist('1.1.1.1/32', '2.2.2.2/32', duration=3600,\n src_port1=1000, src_port2=1500, src_proto='predefined_udp',\n dst_port1=3, dst_port2=3000, dst_proto='predefined_udp')\n \n :param int src_port1: start source port to limit blacklist\n :param int src_port2: end source port to limit blacklist\n :param str src_proto: source protocol. Either 'predefined_tcp'\n or 'predefined_udp'. (default: 'predefined_tcp')\n :param int dst_port1: start dst port to limit blacklist\n :param int dst_port2: end dst port to limit blacklist\n :param str dst_proto: dst protocol. Either 'predefined_tcp'\n or 'predefined_udp'. (default: 'predefined_tcp')\n \n .. note:: if blocking a range of ports, use both src_port1 and\n src_port2, otherwise providing only src_port1 is adequate. The\n same applies to dst_port1 / dst_port2. In addition, if you provide\n src_portX but not dst_portX (or vice versa), the undefined port\n side definition will default to all ports.\n \"\"\"\n\n json = {}\n \n directions = {src: 'end_point1', dst: 'end_point2'}\n \n for direction, key in directions.items():\n json[key] = {'address_mode': 'any'} if \\\n 'any' in direction.lower() else {'address_mode': 'address', 'ip_network': direction}\n \n if src_port1:\n json.setdefault('end_point1').update(\n port1=src_port1,\n port2=src_port2 or src_port1,\n port_mode=src_proto)\n\n if dst_port1:\n json.setdefault('end_point2').update(\n port1=dst_port1,\n port2=dst_port2 or dst_port1,\n port_mode=dst_proto)\n \n json.update(duration=duration)\n return json"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nadds an element to the tag.", "response": "def add_element(self, element):\n \"\"\"\n Element can be href or type :py:class:`smc.base.model.Element`\n ::\n\n >>> from smc.elements.other import Category\n >>> category = Category('foo')\n >>> category.add_element(Host('kali'))\n\n :param str,Element element: element to add to tag\n :raises: ModificationFailed: failed adding element\n :return: None\n \"\"\"\n element = element_resolver(element)\n\n self.make_request(\n ModificationFailed,\n method='create',\n resource='category_add_element',\n json={'value': element})"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nadd this category to a category tag.", "response": "def add_category_tag(self, tags, append_lists=True):\n \"\"\"\n Add this category to a category tag (group). This provides drop down\n filters in the SMC UI by category tag.\n \n :param list tags: category tag by name\n :param bool append_lists: append to existing tags or overwrite\n default: append)\n :type tags: list(str)\n :return: None\n \"\"\"\n tags = element_resolver(tags)\n self.update(\n category_parent_ref=tags,\n append_lists=append_lists)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef remove_category(self, categories):\n categories = element_resolver(categories)\n diff = [category for category in self.data['category_child_ref']\n if category not in categories]\n self.update(category_child_ref=diff)", "response": "Removes a category from this Category Tag."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef add_entry(self, src, dst, duration=3600, src_port1=None,\n src_port2=None, src_proto='predefined_tcp',\n dst_port1=None, dst_port2=None,\n dst_proto='predefined_tcp'):\n \"\"\" \n Create a blacklist entry.\n \n A blacklist can be added directly from the engine node, or from\n the system context. If submitting from the system context, it becomes\n a global blacklist. This will return the properly formatted json\n to submit.\n \n :param src: source address, with cidr, i.e. 10.10.10.10/32 or 'any'\n :param dst: destination address with cidr, i.e. 1.1.1.1/32 or 'any'\n :param int duration: length of time to blacklist\n \n Both the system and engine context blacklist allow kw to be passed\n to provide additional functionality such as adding source and destination\n ports or port ranges and specifying the protocol. The following parameters\n define the ``kw`` that can be passed.\n \n The following example shows creating an engine context blacklist\n using additional kw::\n \n engine.blacklist('1.1.1.1/32', '2.2.2.2/32', duration=3600,\n src_port1=1000, src_port2=1500, src_proto='predefined_udp',\n dst_port1=3, dst_port2=3000, dst_proto='predefined_udp')\n \n :param int src_port1: start source port to limit blacklist\n :param int src_port2: end source port to limit blacklist\n :param str src_proto: source protocol. Either 'predefined_tcp'\n or 'predefined_udp'. (default: 'predefined_tcp')\n :param int dst_port1: start dst port to limit blacklist\n :param int dst_port2: end dst port to limit blacklist\n :param str dst_proto: dst protocol. Either 'predefined_tcp'\n or 'predefined_udp'. (default: 'predefined_tcp')\n \n .. note:: if blocking a range of ports, use both src_port1 and\n src_port2, otherwise providing only src_port1 is adequate. The\n same applies to dst_port1 / dst_port2. In addition, if you provide\n src_portX but not dst_portX (or vice versa), the undefined port\n side definition will default to all ports.\n \"\"\"\n self.entries.setdefault('entries', []).append(prepare_blacklist(\n src, dst, duration, src_port1, src_port2, src_proto, dst_port1,\n dst_port2, dst_proto))", "response": "Add a new entry to the blacklist."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nupdate or create a group entry.", "response": "def update_or_create(cls, append_lists=True, with_status=False,\n remove_members=False, **kwargs):\n \"\"\"\n Update or create group entries. If the group exists, the members\n will be updated. Set append_lists=True to add new members to\n the list, or False to reset the list to the provided members. If\n setting remove_members, this will override append_lists if set.\n \n :param bool append_lists: add to existing members, if any\n :param bool remove_members: remove specified members instead of appending\n or overwriting\n :paran dict kwargs: keyword arguments to satisfy the `create`\n constructor if the group needs to be created.\n :raises CreateElementFailed: could not create element with reason \n :return: element instance by type \n :rtype: Element\n \"\"\"\n was_created, was_modified = False, False \n element = None\n try: \n element = cls.get(kwargs.get('name'))\n was_modified = element.update_members(\n kwargs.get('members', []),\n append_lists=append_lists,\n remove_members=remove_members)\n except ElementNotFound: \n element = cls.create(\n kwargs.get('name'),\n members = kwargs.get('members', []))\n was_created = True\n \n if with_status:\n return element, was_modified, was_created\n return element"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create(cls, name, members=None, comment=None):\n element = [] if members is None else element_resolver(members)\n json = {'name': name,\n 'element': element,\n 'comment': comment}\n\n return ElementCreator(cls, json)", "response": "Create the TCP Service group with the given name and members."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ngenerate the report and optionally wait for results.", "response": "def generate(self, start_time=0, end_time=0, \n senders=None, wait_for_finish=False, timeout=5, **kw): # @ReservedAssignment\n \"\"\"\n Generate the report and optionally wait for results.\n You can optionally add filters to the report by providing the senders\n argument as a list of type Element::\n \n report = ReportDesign('Firewall Weekly Summary')\n begin = datetime_to_ms(datetime.strptime(\"2018-02-03T00:00:00\", \"%Y-%m-%dT%H:%M:%S\"))\n end = datetime_to_ms(datetime.strptime(\"2018-02-04T00:00:00\", \"%Y-%m-%dT%H:%M:%S\"))\n report.generate(start_time=begin, end_time=end, senders=[Engine('vm')])\n \n :param int period_begin: milliseconds time defining start time for report\n :param int period_end: milliseconds time defining end time for report\n :param senders: filter targets to use when generating report\n :type senders: list(Element)\n :param bool wait_for_finish: enable polling for results\n :param int timeout: timeout between polling\n :raises TaskRunFailed: refresh failed, possibly locked policy\n :rtype: TaskOperationPoller\n \"\"\"\n if start_time and end_time:\n kw.setdefault('params', {}).update(\n {'start_time': start_time,\n 'end_time': end_time})\n if senders:\n kw.setdefault('json', {}).update({'senders': element_resolver(senders)})\n return Task.execute(self, 'generate',\n timeout=timeout, wait_for_finish=wait_for_finish, **kw)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create_design(self, name):\n result = self.make_request(\n CreateElementFailed,\n resource='create_design',\n raw_result=True,\n method='create',\n params={'name': name})\n \n return ReportDesign(\n href=result.href, type=self.typeof, name=name)", "response": "Creates a report design based on an existing template."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nexporting the report in PDF format.", "response": "def export_pdf(self, filename):\n \"\"\"\n Export the report in PDF format. Specify a path for which\n to save the file, including the trailing filename.\n \n :param str filename: path including filename\n :return: None\n \"\"\"\n self.make_request(\n raw_result=True,\n resource='export',\n filename=filename, \n headers = {'accept': 'application/pdf'})"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nexport the current neccessary entry in text format. Optionally provide a filename to save to.", "response": "def export_text(self, filename=None):\n \"\"\"\n Export in text format. Optionally provide a filename to\n save to.\n \n :param str filename: path including filename (optional)\n :return: None\n \"\"\"\n result = self.make_request(\n resource='export',\n params={'format': 'txt'},\n filename=filename,\n raw_result=True, \n headers = {'accept': 'text/plain'})\n\n if not filename:\n return result.content"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncreating a new VPN tunnel based on routing.", "response": "def create_ipsec_tunnel(cls, name, local_endpoint, remote_endpoint,\n preshared_key=None, monitoring_group=None,\n vpn_profile=None, mtu=0, pmtu_discovery=True,\n ttl=0, enabled=True, comment=None):\n \"\"\"\n The VPN tunnel type negotiates IPsec tunnels in the same way\n as policy-based VPNs, but traffic is selected to be sent into\n the tunnel based on routing.\n \n :param str name: name of VPN\n :param TunnelEndpoint local_endpoint: the local side endpoint for\n this VPN.\n :param TunnelEndpoint remote_endpoint: the remote side endpoint for\n this VPN.\n :param str preshared_key: required if remote endpoint is an ExternalGateway\n :param TunnelMonitoringGroup monitoring_group: the group to place\n this VPN in for monitoring. Default: 'Uncategorized'.\n :param VPNProfile vpn_profile: VPN profile for this VPN.\n (default: VPN-A Suite)\n :param int mtu: Set MTU for this VPN tunnel (default: 0)\n :param boolean pmtu_discovery: enable pmtu discovery (default: True)\n :param int ttl: ttl for connections on the VPN (default: 0)\n :param bool enabled: enable the RBVPN or leave it disabled\n :param str comment: optional comment\n :raises CreateVPNFailed: failed to create the VPN with reason\n :rtype: RouteVPN\n \"\"\"\n group = monitoring_group or TunnelMonitoringGroup('Uncategorized')\n profile = vpn_profile or VPNProfile('VPN-A Suite')\n \n json = {\n 'name': name,\n 'mtu': mtu,\n 'ttl': ttl,\n 'enabled': enabled,\n 'monitoring_group_ref': group.href,\n 'pmtu_discovery': pmtu_discovery,\n 'preshared_key': preshared_key,\n 'rbvpn_tunnel_side_a': local_endpoint.data,\n 'rbvpn_tunnel_side_b': remote_endpoint.data,\n 'tunnel_mode': 'vpn',\n 'comment': comment,\n 'vpn_profile_ref': profile.href\n }\n \n try:\n return ElementCreator(cls, json)\n except CreateElementFailed as err:\n raise CreateVPNFailed(err)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncreate a GRE based tunnel mode route VPN.", "response": "def create_gre_tunnel_mode(cls, name, local_endpoint, remote_endpoint,\n policy_vpn, mtu=0, pmtu_discovery=True, ttl=0,\n enabled=True, comment=None):\n \"\"\"\n Create a GRE based tunnel mode route VPN. Tunnel mode GRE wraps the\n GRE tunnel in an IPSEC tunnel to provide encrypted end-to-end\n security. Therefore a policy based VPN is required to 'wrap' the\n GRE into IPSEC. \n \n :param str name: name of VPN\n :param TunnelEndpoint local_endpoint: the local side endpoint for\n this VPN.\n :param TunnelEndpoint remote_endpoint: the remote side endpoint for\n this VPN.\n :param PolicyVPN policy_vpn: reference to a policy VPN\n :param TunnelMonitoringGroup monitoring_group: the group to place\n this VPN in for monitoring. (default: 'Uncategorized')\n :param int mtu: Set MTU for this VPN tunnel (default: 0)\n :param boolean pmtu_discovery: enable pmtu discovery (default: True)\n :param int ttl: ttl for connections on the VPN (default: 0)\n :param str comment: optional comment\n :raises CreateVPNFailed: failed to create the VPN with reason\n :rtype: RouteVPN\n \"\"\"\n json = {\n 'name': name,\n 'ttl': ttl,\n 'mtu': mtu,\n 'pmtu_discovery': pmtu_discovery,\n 'tunnel_encryption': 'tunnel_mode',\n 'tunnel_mode': 'gre',\n 'enabled': enabled,\n 'comment': comment,\n 'rbvpn_tunnel_side_a': local_endpoint.data,\n 'rbvpn_tunnel_side_b': remote_endpoint.data\n }\n if policy_vpn is None:\n json['tunnel_encryption'] = 'no_encryption'\n else:\n json['tunnel_mode_vpn_ref'] = policy_vpn.href\n \n try:\n return ElementCreator(cls, json)\n except CreateElementFailed as err:\n raise CreateVPNFailed(err)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates a GRE Tunnel with no encryption.", "response": "def create_gre_tunnel_no_encryption(cls, name, local_endpoint, remote_endpoint,\n mtu=0, pmtu_discovery=True, ttl=0,\n enabled=True, comment=None):\n \"\"\"\n Create a GRE Tunnel with no encryption. See `create_gre_tunnel_mode` for\n constructor descriptions.\n \"\"\"\n return cls.create_gre_tunnel_mode(\n name, local_endpoint, remote_endpoint, policy_vpn=None,\n mtu=mtu, pmtu_discovery=pmtu_discovery, ttl=ttl,\n enabled=enabled, comment=comment)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef set_preshared_key(self, new_key):\n if self.data.get('preshared_key'):\n self.update(preshared_key=new_key)", "response": "Sets the preshared key for this VPN."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the internal endpoint and external gateway that is used to specify which interface is enabled for VPN.", "response": "def endpoint(self):\n \"\"\"\n Endpoint is used to specify which interface is enabled for\n VPN. This is the InternalEndpoint property of the\n InternalGateway.\n \n :return: internal endpoint where VPN is enabled\n :rtype: InternalEndpoint,ExternalGateway\n \"\"\"\n if self.endpoint_ref and self.tunnel_interface_ref:\n return InternalEndpoint(href=self.endpoint_ref)\n return Element.from_href(self.endpoint_ref)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef create_gre_tunnel_endpoint(cls, endpoint=None, tunnel_interface=None,\n remote_address=None):\n \"\"\"\n Create the GRE tunnel mode or no encryption mode endpoint.\n If the GRE tunnel mode endpoint is an SMC managed device,\n both an endpoint and a tunnel interface is required. If the\n endpoint is externally managed, only an IP address is required.\n \n :param InternalEndpoint,ExternalEndpoint endpoint: the endpoint\n element for this tunnel endpoint.\n :param TunnelInterface tunnel_interface: the tunnel interface for\n this tunnel endpoint. Required for SMC managed devices.\n :param str remote_address: IP address, only required if the tunnel\n endpoint is a remote gateway.\n :rtype: TunnelEndpoint\n \"\"\"\n tunnel_interface = tunnel_interface.href if tunnel_interface else None\n endpoint = endpoint.href if endpoint else None\n return TunnelEndpoint(\n tunnel_interface_ref=tunnel_interface,\n endpoint_ref=endpoint,\n ip_address=remote_address)", "response": "Create the GRE tunnel endpoint."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates a GRE transport mode endpoint.", "response": "def create_gre_transport_endpoint(cls, endpoint, tunnel_interface=None):\n \"\"\"\n Create the GRE transport mode endpoint. If the GRE transport mode\n endpoint is an SMC managed device, both an endpoint and a tunnel\n interface is required. If the GRE endpoint is an externally managed\n device, only an endpoint is required.\n \n :param InternalEndpoint,ExternalEndpoint endpoint: the endpoint\n element for this tunnel endpoint.\n :param TunnelInterface tunnel_interface: the tunnel interface for\n this tunnel endpoint. Required for SMC managed devices.\n :rtype: TunnelEndpoint\n \"\"\"\n tunnel_interface = tunnel_interface.href if tunnel_interface else None\n return TunnelEndpoint(\n endpoint_ref=endpoint.href,\n tunnel_interface_ref=tunnel_interface)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncreate the VPN tunnel endpoint.", "response": "def create_ipsec_endpoint(cls, gateway, tunnel_interface=None):\n \"\"\" \n Create the VPN tunnel endpoint. If the VPN tunnel endpoint\n is an SMC managed device, both a gateway and a tunnel interface\n is required. If the VPN endpoint is an externally managed\n device, only a gateway is required.\n \n :param InternalGateway,ExternalGateway gateway: the gateway\n for this tunnel endpoint\n :param TunnelInterface tunnel_interface: Tunnel interface for\n this RBVPN. This can be None if the gateway is a non-SMC\n managed gateway.\n :rtype: TunnelEndpoint\n \"\"\"\n tunnel_interface = tunnel_interface.href if tunnel_interface else None\n return TunnelEndpoint(\n gateway_ref=gateway.href,\n tunnel_interface_ref=tunnel_interface)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nset the authentication request field for the specified interface_id.", "response": "def set_auth_request(self, interface_id, address=None):\n \"\"\"\n Set the authentication request field for the specified\n engine.\n \n \"\"\"\n self.interface.set_auth_request(interface_id, address)\n self._engine.update()"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nset this interface as the primary heartbeat for this master NGFW Engines only.", "response": "def set_primary_heartbeat(self, interface_id):\n \"\"\"\n Set this interface as the primary heartbeat for this engine. \n This will 'unset' the current primary heartbeat and move to\n specified interface_id.\n Clusters and Master NGFW Engines only.\n \n :param str,int interface_id: interface specified for primary mgmt\n :raises InterfaceNotFound: specified interface is not found\n :raises UpdateElementFailed: failed modifying interfaces\n :return: None\n \"\"\"\n self.interface.set_unset(interface_id, 'primary_heartbeat')\n self._engine.update()"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsets this interface as the backup heartbeat interface.", "response": "def set_backup_heartbeat(self, interface_id):\n \"\"\"\n Set this interface as the backup heartbeat interface.\n Clusters and Master NGFW Engines only.\n \n :param str,int interface_id: interface as backup\n :raises InterfaceNotFound: specified interface is not found\n :raises UpdateElementFailed: failure to update interface\n :return: None\n \"\"\"\n self.interface.set_unset(interface_id, 'backup_heartbeat')\n self._engine.update()"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nsets the primary management for a specific interface.", "response": "def set_primary_mgt(self, interface_id, auth_request=None,\n address=None):\n \"\"\"\n Specifies the Primary Control IP address for Management Server\n contact. For single FW and cluster FW's, this will enable 'Outgoing',\n 'Auth Request' and the 'Primary Control' interface. For clusters, the\n primary heartbeat will NOT follow this change and should be set\n separately using :meth:`.set_primary_heartbeat`.\n For virtual FW engines, only auth_request and outgoing will be set.\n For master engines, only primary control and outgoing will be set.\n \n Primary management can be set on an interface with single IP's,\n multiple IP's or VLANs.\n ::\n \n engine.interface_options.set_primary_mgt(1)\n \n Set primary management on a VLAN interface::\n \n engine.interface_options.set_primary_mgt('1.100')\n \n Set primary management and different interface for auth_request::\n \n engine.interface_options.set_primary_mgt(\n interface_id='1.100', auth_request=0)\n \n Set on specific IP address of interface VLAN with multiple addresses::\n \n engine.interface_options.set_primary_mgt(\n interface_id='3.100', address='50.50.50.1')\n \n :param str,int interface_id: interface id to make management\n :param str address: if the interface for management has more than\n one ip address, this specifies which IP to bind to.\n :param str,int auth_request: if setting primary mgt on a cluster\n interface with no CVI, you must pick another interface to set\n the auth_request field to (default: None)\n :raises InterfaceNotFound: specified interface is not found\n :raises UpdateElementFailed: updating management fails\n :return: None\n \n .. note:: Setting primary management on a cluster interface with no\n CVI requires you to set the interface for auth_request.\n \"\"\"\n intfattr = ['primary_mgt', 'outgoing']\n if self.interface.engine.type in ('virtual_fw',):\n intfattr.remove('primary_mgt')\n \n for attribute in intfattr:\n self.interface.set_unset(interface_id, attribute, address)\n \n if auth_request is not None:\n self.interface.set_auth_request(auth_request)\n else:\n self.interface.set_auth_request(interface_id, address)\n \n self._engine.update()"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef set_backup_mgt(self, interface_id):\n self.interface.set_unset(interface_id, 'backup_mgt')\n self._engine.update()", "response": "Set the specified interface as a backup management interface."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef set_outgoing(self, interface_id):\n self.interface.set_unset(interface_id, 'outgoing')\n self._engine.update()", "response": "Sets the outgoing address for the specified interface."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef dscp_marking_and_throttling(self, qos_policy):\n self._interface.data.update(qos_limit=-1,\n qos_mode='dscp',\n qos_policy_ref=qos_policy.href)", "response": "Enables DSCP marking and throttling on the interface."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ndeleting the related object from the cache.", "response": "def delete(self):\n \"\"\"\n Override delete in parent class, this will also delete\n the routing configuration referencing this interface.\n ::\n\n engine = Engine('vm')\n interface = engine.interface.get(2)\n interface.delete()\n \"\"\"\n super(Interface, self).delete()\n for route in self._engine.routing:\n if route.to_delete:\n route.delete()\n self._engine._del_cache()"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef update(self, *args, **kw):\n super(Interface, self).update(*args, **kw)\n self._engine._del_cache()\n return self", "response": "Update this interface s information back to SMC."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning 3 - tuple with ( address network nicid )", "response": "def addresses(self):\n \"\"\"\n Return 3-tuple with (address, network, nicid)\n\n :return: address related information of interface as 3-tuple list\n :rtype: list\n \"\"\"\n addresses = []\n for i in self.all_interfaces:\n if isinstance(i, VlanInterface):\n for v in i.interfaces:\n addresses.append((v.address, v.network_value, v.nicid))\n else:\n addresses.append((i.address, i.network_value, i.nicid))\n return addresses"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef sub_interfaces(self):\n interfaces = self.all_interfaces\n sub_interfaces = []\n for interface in interfaces:\n if isinstance(interface, VlanInterface):\n if interface.has_interfaces:\n for subaddr in interface.interfaces:\n sub_interfaces.append(subaddr)\n else:\n sub_interfaces.append(interface)\n else:\n sub_interfaces.append(interface)\n \n return sub_interfaces", "response": "Flatten out all top level interfaces and only return sub interfaces."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_boolean(self, name):\n for interface in self.all_interfaces:\n if isinstance(interface, VlanInterface):\n if any(vlan for vlan in interface.interfaces\n if getattr(vlan, name)):\n return True\n else:\n if getattr(interface, name):\n return True\n return False", "response": "Get the boolean value for the attribute specified from the the\n sub interface."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ndeletes any invalid routes for this interface.", "response": "def delete_invalid_route(self):\n \"\"\"\n Delete any invalid routes for this interface. An invalid route is\n a left over when an interface is changed to a different network.\n \n :return: None\n \"\"\"\n try:\n routing = self._engine.routing.get(self.interface_id)\n for route in routing:\n if route.invalid or route.to_delete:\n route.delete()\n except InterfaceNotFound: # Only VLAN identifiers, so no routing\n pass"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef reset_interface(self):\n self.data['interfaces'] = []\n if self.typeof != 'tunnel_interface':\n self.data['vlanInterfaces'] = []\n self.update()\n self.delete_invalid_route()", "response": "Reset the interface by removing all assigned addresses and VLANs."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nchanging the interface ID for this interface. This method is used to change the interface ID for both CVI and NDI interfaces.", "response": "def change_interface_id(self, interface_id):\n \"\"\"\n Change the interface ID for this interface. This can be used on any\n interface type. If the interface is an Inline interface, you must\n provide the ``interface_id`` in format '1-2' to define both\n interfaces in the pair. The change is committed after calling this\n method.\n ::\n \n itf = engine.interface.get(0)\n itf.change_interface_id(10)\n \n Or inline interface pair 10-11::\n \n itf = engine.interface.get(10)\n itf.change_interface_id('20-21')\n \n :param str,int interface_id: new interface ID. Format can be single\n value for non-inline interfaces or '1-2' format for inline.\n :raises UpdateElementFailed: changing the interface failed with reason\n :return: None\n \"\"\"\n splitted = str(interface_id).split('-')\n if1 = splitted[0]\n \n for interface in self.all_interfaces:\n # Set top level interface, this only uses a single value which\n # will be the leftmost interface\n if isinstance(interface, VlanInterface):\n interface.interface_id = '{}.{}'.format(if1,\n interface.interface_id.split('.')[-1])\n \n if interface.has_interfaces:\n for sub_interface in interface.interfaces:\n if isinstance(sub_interface, InlineInterface):\n sub_interface.change_interface_id(interface_id)\n else:\n # VLAN interface only (i.e. CVI, NDI, etc)\n sub_interface.change_interface_id(if1) \n else:\n if isinstance(interface, InlineInterface):\n interface.change_interface_id(interface_id)\n else:\n interface.update(nicid=if1)\n \n self.interface_id = if1\n self.update()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nupdate the physical interface base settings with the values in another_interface.", "response": "def _update_interface(self, other_interface):\n \"\"\"\n Update the physical interface base settings with another interface.\n Only set updated if a value has changed. You must also call `update`\n on the interface if modifications are made. This is called from other\n interfaces (i.e. PhysicalInterface, ClusterPhysicalInterface, etc) to\n update the top layer.\n \n :param other_interface PhysicalInterface: an instance of an\n interface where values in this interface will overwrite values\n that are different.\n :raises UpdateElementFailed: Failed to update the element\n :rtype: bool\n \"\"\"\n updated = False\n for name, value in other_interface.data.items():\n if isinstance(value, string_types) and getattr(self, name, None) != value:\n self.data[name] = value\n updated = True\n elif value is None and getattr(self, name, None):\n self.data[name] = None\n updated = True\n return updated"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef update_interface(self, other_interface, ignore_mgmt=True):\n base_updated = self._update_interface(other_interface)\n \n mgmt = ('auth_request', 'backup_heartbeat', 'backup_mgt',\n 'primary_mgt', 'primary_heartbeat', 'outgoing')\n \n updated = False\n invalid_routes = []\n \n def process_interfaces(current, interface):\n updated = False\n invalid_routes = []\n \n # Ignore interfaces with multiple addresses\n if current.has_multiple_addresses:\n return updated, invalid_routes\n \n local_interfaces = current.interfaces # Existing interface\n for interface in interface.interfaces: # New values\n local_interface = None\n if not getattr(interface, 'nodeid', None): # CVI\n cvi = [itf for itf in local_interfaces if not getattr(itf, 'nodeid', None)]\n local_interface = cvi[0] if cvi else None\n else:\n local_interface = local_interfaces.get(nodeid=interface.nodeid)\n \n if local_interface: # CVI or NDI sub interfaces\n for name, value in interface.data.items():\n if getattr(local_interface, name) != value:\n if ignore_mgmt and name in mgmt:\n pass\n else:\n local_interface[name] = value\n updated = True\n if 'network_value' in name: # Only reset routes if network changed\n invalid_routes.append(interface.nicid)\n else:\n current.data.setdefault('interfaces', []).append(\n {interface.typeof: interface.data})\n updated = True\n return updated, invalid_routes\n\n # Handle VLANs\n is_vlan = other_interface.has_vlan\n if is_vlan:\n vlan_interfaces = self.vlan_interface\n for pvlan in other_interface.vlan_interface:\n current = vlan_interfaces.get(pvlan.vlan_id)\n if current:\n # PhysicalVlanInterface, set any parent interface values\n if current._update_interface(pvlan):\n updated = True\n else:\n # Create new interface\n self.data.setdefault('vlanInterfaces', []).append(pvlan.data)\n updated = True\n continue # Skip sub interface check\n \n _updated, routes = process_interfaces(current, pvlan)\n if _updated: updated = True\n invalid_routes.extend(routes)\n\n else:\n _updated, routes = process_interfaces(self, other_interface)\n if _updated: updated = True\n invalid_routes.extend(routes)\n \n interface = self\n if updated or base_updated:\n interface = self.update()\n if invalid_routes: # Interface updated, check the routes\n del_invalid_routes(self._engine, invalid_routes)\n \n return interface, base_updated or updated", "response": "Update an existing interface by comparing values between two interfaces."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nadds an interface to the tunnel interface cache.", "response": "def _add_interface(self, interface_id, **kw):\n \"\"\"\n Create a tunnel interface. Kw argument list is as follows\n \"\"\"\n base_interface = ElementCache()\n\n base_interface.update(\n interface_id=str(interface_id),\n interfaces=[])\n \n if 'zone_ref' in kw:\n zone_ref = kw.pop('zone_ref')\n base_interface.update(zone_ref=zone_helper(zone_ref) if zone_ref else None)\n \n if 'comment' in kw:\n base_interface.update(comment=kw.pop('comment'))\n \n self.data = base_interface\n \n interfaces = kw.pop('interfaces', [])\n if interfaces:\n for interface in interfaces:\n if interface.get('cluster_virtual', None) or \\\n len(interface.get('nodes', [])) > 1: # Cluster\n \n kw.update(interface_id=interface_id, interfaces=interfaces)\n cvi = ClusterPhysicalInterface(**kw)\n cvi.data.pop('vlanInterfaces', None)\n self.data.update(cvi.data)\n else:\n # Single interface FW\n for node in interface.get('nodes', []):\n node.update(nodeid=1)\n sni = SingleNodeInterface.create(interface_id, **node)\n base_interface.setdefault('interfaces', []).append(\n {sni.typeof: sni.data})"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a formatted list of NDI interfaces on this engine.", "response": "def ndi_interfaces(self):\n \"\"\"\n Return a formatted dict list of NDI interfaces on this engine.\n This will ignore CVI or any inline or layer 2 interface types.\n This can be used to identify to indicate available IP addresses\n for a given interface which can be used to run services such as\n SNMP or DNS Relay.\n \n :return: list of dict items [{'address':x, 'nicid':y}]\n :rtype: list(dict)\n \"\"\"\n return [{'address': interface.address, 'nicid': interface.nicid}\n for interface in self.interfaces\n if isinstance(interface, (NodeInterface, SingleNodeInterface))]"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef change_vlan_id(self, original, new):\n vlan = self.vlan_interface.get_vlan(original)\n newvlan = str(new).split('-')\n splitted = vlan.interface_id.split('.')\n vlan.interface_id = '{}.{}'.format(splitted[0], newvlan[0])\n for interface in vlan.interfaces:\n if isinstance(interface, InlineInterface):\n interface.change_vlan_id(new)\n else:\n interface.change_vlan_id(newvlan[0])\n self.update()", "response": "Change the VLAN ID for a single or inline VLAN or inline VLAN."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nenable aggregate mode on this LAGG entry.", "response": "def enable_aggregate_mode(self, mode, interfaces):\n \"\"\" \n Enable Aggregate (LAGG) mode on this interface. Possible LAGG\n types are 'ha' and 'lb' (load balancing). For HA, only one\n secondary interface ID is required. For load balancing mode, up\n to 7 additional are supported (8 max interfaces).\n \n :param str mode: 'lb' or 'ha'\n :param list interfaces: secondary interfaces for this LAGG\n :type interfaces: list(str,int)\n :raises UpdateElementFailed: failed adding aggregate\n :return: None\n \"\"\"\n if mode in ['lb', 'ha']:\n self.data['aggregate_mode'] = mode\n self.data['second_interface_id'] = ','.join(map(str, interfaces)) \n self.save()"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nadds an entry to the static ARP table.", "response": "def static_arp_entry(self, ipaddress, macaddress, arp_type='static', netmask=32):\n \"\"\"\n Add an arp entry to this physical interface.\n ::\n\n interface = engine.physical_interface.get(0)\n interface.static_arp_entry(\n ipaddress='23.23.23.23',\n arp_type='static',\n macaddress='02:02:02:02:04:04')\n interface.save()\n\n :param str ipaddress: ip address for entry\n :param str macaddress: macaddress for ip address\n :param str arp_type: type of entry, 'static' or 'proxy' (default: static)\n :param str,int netmask: netmask for entry (default: 32)\n :return: None\n \"\"\"\n self.data['arp_entry'].append({\n 'ipaddress': ipaddress,\n 'macaddress': macaddress,\n 'netmask': netmask,\n 'type': arp_type})"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nadd the Cluster interface to the internal data structure.", "response": "def _add_interface(self, interface_id, mgt=None, **kw):\n \"\"\"\n Add the Cluster interface. If adding a cluster interface to\n an existing node, retrieve the existing interface and call\n this method. Use the supported format for defining an interface.\n \"\"\"\n _kw = copy.deepcopy(kw) # Preserve original kw, especially lists\n mgt = mgt if mgt else {}\n \n if 'cvi_mode' in _kw:\n self.data.update(cvi_mode=_kw.pop('cvi_mode'))\n \n if 'macaddress' in _kw:\n self.data.update(\n macaddress=_kw.pop('macaddress'))\n if 'cvi_mode' not in self.data:\n self.data.update(cvi_mode='packetdispatch')\n \n if 'zone_ref' in _kw:\n zone_ref = _kw.pop('zone_ref')\n self.data.update(zone_ref=zone_helper(zone_ref) if zone_ref else None)\n \n if 'comment' in _kw:\n self.data.update(comment=_kw.pop('comment'))\n\n interfaces = _kw.pop('interfaces', [])\n for interface in interfaces:\n vlan_id = interface.pop('vlan_id', None)\n if vlan_id:\n _interface_id = '{}.{}'.format(interface_id, vlan_id)\n else:\n _interface_id = interface_id\n \n _interface = []\n if_mgt = {k: str(v) == str(_interface_id) for k, v in mgt.items()}\n \n if 'cluster_virtual' in interface and 'network_value' in interface:\n cluster_virtual = interface.pop('cluster_virtual')\n network_value = interface.pop('network_value')\n if cluster_virtual and network_value:\n cvi = ClusterVirtualInterface.create(\n _interface_id, cluster_virtual, network_value,\n auth_request=True if if_mgt.get('primary_mgt') else False)\n \n _interface.append({cvi.typeof: cvi.data})\n \n for node in interface.pop('nodes', []):\n _node = if_mgt.copy()\n _node.update(outgoing=True if if_mgt.get('primary_mgt') else False)\n # Add node specific key/value pairs set on the node. This can\n # also be used to override management settings\n _node.update(node)\n ndi = NodeInterface.create(\n interface_id=_interface_id, **_node)\n _interface.append({ndi.typeof: ndi.data})\n\n if vlan_id:\n vlan_interface = {\n 'interface_id': _interface_id,\n 'zone_ref': zone_helper(interface.pop('zone_ref', None)),\n 'comment': interface.pop('comment', None),\n 'interfaces': _interface}\n # Add remaining kwargs on vlan level to VLAN physical interface\n for name, value in interface.items():\n vlan_interface[name] = value\n self.data.setdefault('vlanInterfaces', []).append(\n vlan_interface)\n else:\n self.data.setdefault('interfaces', []).extend(\n _interface)\n\n # Remaining kw go to base level interface\n for name, value in _kw.items():\n self.data[name] = value"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef delete(self):\n if self in self._parent.vlan_interface:\n self._parent.data['vlanInterfaces'] = [\n v for v in self._parent.vlan_interface\n if v != self]\n self.update()\n for route in self._parent._engine.routing:\n if route.to_delete:\n route.delete()", "response": "Delete this Vlan interface from the parent interface."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef change_vlan_id(self, vlan_id):\n intf_id, _ = self.interface_id.split('.')\n self.interface_id = '{}.{}'.format(intf_id, vlan_id)\n for interface in self.interfaces:\n interface.change_vlan_id(vlan_id)\n self.update()", "response": "Change the VLAN id for this interface."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting from the interface collection.", "response": "def get(self, *args, **kwargs):\n \"\"\"\n Get from the interface collection. It is more accurate to use\n kwargs to specify an attribute of the sub interface to retrieve\n rather than using an index value. If retrieving using an index,\n the collection will first check vlan interfaces and standard\n interfaces second. In most cases, if VLANs exist, standard\n interface definitions will be nested below the VLAN with exception\n of Inline Interfaces which may have both.\n \n :param int args: index to retrieve\n :param kwargs: key value for sub interface\n :rtype: SubInterface or None\n \"\"\"\n for collection in self.items:\n if args:\n index = args[0]\n if len(collection) and (index <= len(collection)-1):\n return collection[index]\n else:\n # Collection with get\n result = collection.get(**kwargs)\n if result is not None:\n return result\n return None"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_vlan(self, *args, **kwargs):\n if args:\n kwargs = {'vlan_id': str(args[0])}\n key, value = kwargs.popitem()\n for vlan in self:\n if getattr(vlan, key, None) == value:\n return vlan\n raise InterfaceNotFound('VLAN ID {} was not found on this engine.'\n .format(value))", "response": "Get the VLAN from this PhysicalInterface."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngets the sub interfaces for this VlanInterface", "response": "def get(self, *args, **kwargs):\n \"\"\"\n Get the sub interfaces for this VlanInterface\n \n >>> itf = engine.interface.get(3)\n >>> list(itf.vlan_interface)\n [Layer3PhysicalInterfaceVlan(name=VLAN 3.3), Layer3PhysicalInterfaceVlan(name=VLAN 3.5),\n Layer3PhysicalInterfaceVlan(name=VLAN 3.4)]\n \n :param int args: args are translated to vlan_id=args[0]\n :param kwargs: key value for sub interface\n :rtype: VlanInterface or None\n \"\"\"\n if args:\n kwargs = {'vlan_id': str(args[0])}\n key, value = kwargs.popitem()\n for item in self:\n if 'vlan_id' in key and getattr(item, key, None) == value:\n return item\n for vlan in item.interfaces:\n if getattr(vlan, key, None) == value:\n return item"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nfind the management interface specified and return either the string representation of the interface_id.", "response": "def find_mgmt_interface(self, mgmt):\n \"\"\"\n Find the management interface specified and return\n either the string representation of the interface_id.\n \n Valid options: primary_mgt, backup_mgt, primary_heartbeat,\n backup_heartbeat, outgoing, auth_request\n \n :return: str interface_id\n \"\"\"\n for intf in self:\n for allitf in intf.all_interfaces:\n if isinstance(allitf, VlanInterface):\n for vlan in allitf.interfaces:\n if getattr(vlan, mgmt, None):\n return allitf.interface_id\n else:\n if getattr(allitf, mgmt, None):\n return intf.interface_id"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget the interface from engine json", "response": "def get(self, interface_id):\n \"\"\"\n Get the interface from engine json\n \n :param str interface_id: interface ID to find\n :raises InterfaceNotFound: Cannot find interface\n \"\"\"\n # From within engine, skips nested iterators for this find\n # Make sure were dealing with a string\n interface_id = str(interface_id)\n for intf in self:\n if intf.interface_id == interface_id:\n intf._engine = self.engine\n return intf\n else: # Check for inline interfaces\n if '.' in interface_id:\n # It's a VLAN interface\n vlan = interface_id.split('.')\n # Check that we're on the right interface\n if intf.interface_id == vlan[0]:\n if intf.has_vlan:\n return intf.vlan_interface.get_vlan(vlan[-1])\n\n elif intf.has_interfaces:\n for interface in intf.interfaces:\n if isinstance(interface, InlineInterface):\n split_intf = interface.nicid.split('-')\n if interface_id == interface.nicid or \\\n str(interface_id) in split_intf:\n intf._engine = self.engine\n return intf\n\n raise InterfaceNotFound(\n 'Interface id {} was not found on this engine.'.format(interface_id))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef set_unset(self, interface_id, attribute, address=None):\n interface = self.get(interface_id) if interface_id is not None else None\n if address is not None:\n target_network = None\n sub_interface = interface.all_interfaces.get(address=address)\n if sub_interface:\n target_network = sub_interface.network_value\n\n if not target_network:\n raise InterfaceNotFound('Address specified: %s was not found on interface '\n '%s' % (address, interface_id))\n \n for interface in self:\n all_subs = interface.sub_interfaces()\n for sub_interface in all_subs:\n # Skip VLAN only interfaces (no addresses)\n if not isinstance(sub_interface, (VlanInterface, InlineInterface)):\n if getattr(sub_interface, attribute) is not None:\n if sub_interface.nicid == str(interface_id):\n if address is not None:\n if sub_interface.network_value == target_network:\n sub_interface[attribute] = True\n else:\n sub_interface[attribute] = False\n else:\n sub_interface[attribute] = True\n else: #unset\n sub_interface[attribute] = False", "response": "Set attribute to True and unset the same attribute for all other CVI interfaces."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef set_auth_request(self, interface_id, address=None):\n for engine_type in ['master', 'layer2', 'ips']:\n if engine_type in self.engine.type:\n return\n \n interface = self.get(interface_id)\n if 'cluster' in self.engine.type:\n # Auth request on a cluster interface must have at least a CVI.\n # It cannot bind to NDI only interfaces\n if not any(isinstance(itf, ClusterVirtualInterface)\n for itf in interface.all_interfaces):\n raise InterfaceNotFound('The interface specified: %s does not have '\n 'a CVI interface defined and therefore cannot be used as an '\n 'interface for auth_requests. If setting the primary_mgt interface '\n 'provide an interface id for auth_request.' % interface_id)\n \n current_interface = self.get(\n self.engine.interface_options.auth_request)\n for itf in current_interface.all_interfaces:\n if getattr(itf, 'auth_request', False):\n itf['auth_request'] = False\n # Set\n sub_if = interface.all_interfaces\n if any(isinstance(itf, ClusterVirtualInterface) for itf in sub_if):\n for itf in sub_if:\n if isinstance(itf, ClusterVirtualInterface):\n if address:\n if getattr(itf, 'address', None) == address:\n itf['auth_request'] = True\n else:\n itf['auth_request'] = True\n else:\n for itf in sub_if:\n if getattr(itf, 'auth_request', None) is not None:\n if address:\n if getattr(itf, 'address', None) == address:\n itf['auth_request'] = True\n else:\n itf['auth_request'] = True", "response": "Set auth request on a NDI interface."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef as_dotted(dotted_str):\n #max_asn = 4294967295 (65535 * 65535)\n if '.' not in dotted_str:\n return dotted_str\n max_byte = 65535\n left, right = map(int, dotted_str.split('.'))\n if left > max_byte or right > max_byte:\n raise ValueError('The max low and high order value for '\n 'a 32-bit ASN is 65535')\n binval = \"{0:016b}\".format(left)\n binval += \"{0:016b}\".format(right)\n return int(binval, 2)", "response": "Return the ASN number represented by the given string as a dotted string."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nupdating the antispoofing list with the given networks.", "response": "def update_antispoofing(self, networks=None):\n \"\"\"\n Pass a list of networks to update antispoofing networks with.\n You can clear networks by providing an empty list. If networks\n are provided but already exist, no update is made.\n \n :param list networks: networks, groups or hosts for antispoofing\n :rtype: bool\n \"\"\"\n if not networks and len(self.data.get('antispoofing_ne_ref')):\n self.data.update(antispoofing_ne_ref=[])\n return True\n _networks = element_resolver(networks)\n if set(_networks) ^ set(self.data.get('antispoofing_ne_ref')):\n self.data.update(antispoofing_ne_ref=_networks)\n return True\n return False"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nenabling BGP on this engine.", "response": "def enable(self, autonomous_system, announced_networks,\n router_id=None, bgp_profile=None):\n \"\"\"\n Enable BGP on this engine. On master engine, enable BGP on the\n virtual firewall. When adding networks to `announced_networks`, the\n element types can be of type :class:`smc.elements.network.Host`,\n :class:`smc.elements.network.Network` or :class:`smc.elements.group.Group`.\n If passing a Group, it must have element types of host or network.\n \n Within announced_networks, you can pass a 2-tuple that provides an optional\n :class:`smc.routing.route_map.RouteMap` if additional policy is required\n for a given network.\n ::\n\n engine.dynamic_routing.bgp.enable(\n autonomous_system=AutonomousSystem('aws_as'),\n announced_networks=[Network('bgpnet'),Network('inside')],\n router_id='10.10.10.10')\n\n :param str,AutonomousSystem autonomous_system: provide the AS element\n or str href for the element\n :param str,BGPProfile bgp_profile: provide the BGPProfile element or\n str href for the element; if None, use system default\n :param list announced_networks: list of networks to advertise via BGP\n Announced networks can be single networks,host or group elements or\n a 2-tuple with the second tuple item being a routemap element\n :param str router_id: router id for BGP, should be an IP address. If not\n set, automatic discovery will use default bound interface as ID.\n :raises ElementNotFound: OSPF, AS or Networks not found\n :return: None\n \n .. note:: For arguments that take str or Element, the str value should be\n the href of the element.\n \"\"\"\n autonomous_system = element_resolver(autonomous_system)\n \n bgp_profile = element_resolver(bgp_profile) or \\\n BGPProfile('Default BGP Profile').href\n \n announced = self._unwrap(announced_networks)\n \n self.data.update(\n enabled=True,\n bgp_as_ref=autonomous_system,\n bgp_profile_ref=bgp_profile,\n announced_ne_setting=announced,\n router_id=router_id)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef update_configuration(self, **kwargs):\n updated = False\n if 'announced_networks' in kwargs:\n kwargs.update(announced_ne_setting=kwargs.pop('announced_networks'))\n if 'bgp_profile' in kwargs:\n kwargs.update(bgp_profile_ref=kwargs.pop('bgp_profile'))\n if 'autonomous_system' in kwargs:\n kwargs.update(bgp_as_ref=kwargs.pop('autonomous_system'))\n \n announced_ne = kwargs.pop('announced_ne_setting', None)\n \n for name, value in kwargs.items():\n _value = element_resolver(value)\n if self.data.get(name) != _value:\n self.data[name] = _value\n updated = True\n \n if announced_ne is not None:\n s = self.data.get('announced_ne_setting')\n ne = self._unwrap(announced_ne)\n \n if len(announced_ne) != len(s) or not self._equal(ne, s):\n self.data.update(announced_ne_setting=ne)\n updated = True\n \n return updated", "response": "Update the configuration of the current object based on the given kwargs."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef announced_networks(self):\n return [(Element.from_href(ne.get('announced_ne_ref')),\n Element.from_href(ne.get('announced_rm_ref')))\n for ne in self.data.get('announced_ne_setting')]", "response": "Show all announced networks for the BGP configuration."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates an AutonomousSystem with the specified name and AS number.", "response": "def create(cls, name, as_number, comment=None):\n \"\"\"\n Create an AS to be applied on the engine BGP configuration. An\n AS is a required parameter when creating an ExternalBGPPeer. You\n can also provide an AS number using an 'asdot' syntax::\n \n AutonomousSystem.create(name='myas', as_number='200.600')\n\n :param str name: name of this AS\n :param int as_number: AS number preferred\n :param str comment: optional string comment\n :raises CreateElementFailed: unable to create AS\n :raises ValueError: If providing AS number in dotted format and\n low/high order bytes are > 65535.\n :return: instance with meta\n :rtype: AutonomousSystem\n \"\"\"\n as_number = as_dotted(str(as_number))\n json = {'name': name,\n 'as_number': as_number,\n 'comment': comment}\n\n return ElementCreator(cls, json)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef create(cls, name, port=179, external_distance=20, internal_distance=200,\n local_distance=200, subnet_distance=None):\n \"\"\"\n Create a custom BGP Profile\n\n :param str name: name of profile\n :param int port: port for BGP process\n :param int external_distance: external administrative distance; (1-255)\n :param int internal_distance: internal administrative distance (1-255)\n :param int local_distance: local administrative distance (aggregation) (1-255)\n :param list subnet_distance: configure specific subnet's with respective distances\n :type tuple subnet_distance: (subnet element(Network), distance(int))\n :raises CreateElementFailed: reason for failure\n :return: instance with meta\n :rtype: BGPProfile\n \"\"\"\n json = {'name': name,\n 'external': external_distance,\n 'internal': internal_distance,\n 'local': local_distance,\n 'port': port}\n\n if subnet_distance:\n d = [{'distance': distance, 'subnet': subnet.href}\n for subnet, distance in subnet_distance]\n json.update(distance_entry=d)\n\n return ElementCreator(cls, json)", "response": "Create a custom BGP profile with specific attributes."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create(cls, name, neighbor_as, neighbor_ip,\n neighbor_port=179, comment=None):\n \"\"\"\n Create an external BGP Peer. \n\n :param str name: name of peer\n :param str,AutonomousSystem neighbor_as_ref: AutonomousSystem\n element or href. \n :param str neighbor_ip: ip address of BGP peer\n :param int neighbor_port: port for BGP, default 179.\n :raises CreateElementFailed: failed creating\n :return: instance with meta\n :rtype: ExternalBGPPeer\n \"\"\"\n json = {'name': name,\n 'neighbor_ip': neighbor_ip,\n 'neighbor_port': neighbor_port,\n 'comment': comment}\n\n neighbor_as_ref = element_resolver(neighbor_as)\n json.update(neighbor_as=neighbor_as_ref)\n\n return ElementCreator(cls, json)", "response": "Create an external BGP Peer. \n"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef create(cls, name, connection_profile_ref=None,\n md5_password=None, local_as_option='not_set',\n max_prefix_option='not_enabled', send_community='no',\n connected_check='disabled', orf_option='disabled',\n next_hop_self=True, override_capability=False,\n dont_capability_negotiate=False, remote_private_as=False,\n route_reflector_client=False, soft_reconfiguration=True,\n ttl_option='disabled', comment=None):\n \"\"\"\n Create a new BGPPeering configuration.\n\n :param str name: name of peering\n :param str,BGPConnectionProfile connection_profile_ref: required BGP\n connection profile. System default used if not provided.\n :param str md5_password: optional md5_password\n :param str local_as_option: the local AS mode. Valid options are:\n 'not_set', 'prepend', 'no_prepend', 'replace_as'\n :param str max_prefix_option: The max prefix mode. Valid options are:\n 'not_enabled', 'enabled', 'warning_only'\n :param str send_community: the send community mode. Valid options are:\n 'no', 'standard', 'extended', 'standard_and_extended'\n :param str connected_check: the connected check mode. Valid options are:\n 'disabled', 'enabled', 'automatic'\n :param str orf_option: outbound route filtering mode. Valid options are:\n 'disabled', 'send', 'receive', 'both'\n :param bool next_hop_self: next hop self setting\n :param bool override_capability: is override received capabilities\n :param bool dont_capability_negotiate: do not send capabilities\n :param bool remote_private_as: is remote a private AS\n :param bool route_reflector_client: Route Reflector Client (iBGP only)\n :param bool soft_reconfiguration: do soft reconfiguration inbound\n :param str ttl_option: ttl check mode. Valid options are: 'disabled', \n 'ttl-security'\n :raises CreateElementFailed: failed creating profile\n :return: instance with meta\n :rtype: BGPPeering\n \"\"\"\n json = {'name': name,\n 'local_as_option': local_as_option,\n 'max_prefix_option': max_prefix_option,\n 'send_community': send_community,\n 'connected_check': connected_check,\n 'orf_option': orf_option,\n 'next_hop_self': next_hop_self,\n 'override_capability': override_capability,\n 'dont_capability_negotiate': dont_capability_negotiate,\n 'soft_reconfiguration': soft_reconfiguration,\n 'remove_private_as': remote_private_as,\n 'route_reflector_client': route_reflector_client,\n 'ttl_option': ttl_option,\n 'comment': comment}\n\n if md5_password:\n json.update(md5_password=md5_password)\n\n connection_profile_ref = element_resolver(connection_profile_ref) or \\\n BGPConnectionProfile('Default BGP Connection Profile').href\n \n json.update(connection_profile=connection_profile_ref)\n\n return ElementCreator(cls, json)", "response": "Creates a new BGP PEERing configuration."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef create(cls, name, md5_password=None, connect_retry=120,\n session_hold_timer=180, session_keep_alive=60):\n \"\"\"\n Create a new BGP Connection Profile.\n\n :param str name: name of profile\n :param str md5_password: optional md5 password\n :param int connect_retry: The connect retry timer, in seconds\n :param int session_hold_timer: The session hold timer, in seconds\n :param int session_keep_alive: The session keep alive timer, in seconds\n :raises CreateElementFailed: failed creating profile\n :return: instance with meta\n :rtype: BGPConnectionProfile\n \"\"\"\n json = {'name': name,\n 'connect': connect_retry,\n 'session_hold_timer': session_hold_timer,\n 'session_keep_alive': session_keep_alive}\n\n if md5_password:\n json.update(md5_password=md5_password)\n\n return ElementCreator(cls, json)", "response": "Create a new BGP Connection Profile."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef enable(self, values):\n for value in values:\n if value in self.data:\n self.data[value] = True", "response": "Enable specific permissions on this role."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ndisabling specific permissions on this role.", "response": "def disable(self, values):\n \"\"\"\n Disable specific permissions on this role. Use :py:attr:`~permissions` to\n view valid permission settings and current value/s. Change is committed\n immediately.\n \n :param list values: list of values by allowed types\n :return: None\n \"\"\"\n for value in values:\n if value in self.data:\n self.data[value] = False"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning valid permissions and setting for this role.", "response": "def permissions(self):\n \"\"\"\n Return valid permissions and setting for this role. Permissions are\n returned as a list of dict items, {permission: state}. State for the\n permission is either True or False. Use :meth:`~enable` and\n :meth:`~disable` to toggle role settings.\n \n :return: list of permission settings\n :rtype: list(dict)\n \"\"\"\n permissions = []\n for permission, value in self.data.items():\n if permission not in self._reserved:\n permissions.append({permission: value})\n return permissions"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nextract the port range from a string or int .", "response": "def _extract_ports(port_string):\n \"\"\"\n Return a dict for translated_value based on a string or int\n value.\n \n Value could be 80, or '80' or '80-90'.\n \n Will be returned as {'min_port': 80, 'max_port': 80} or \n {'min_port': 80, 'max_port': 90}\n \n :rtype: dict\n \"\"\"\n _ports = str(port_string)\n if '-' in _ports:\n start, end = _ports.split('-')\n return {'min_port': start, 'max_port': end}\n return {'min_port': _ports, 'max_port': _ports}"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _update_nat_field(self, source_or_dest):\n original_value = source_or_dest.all_as_href()\n if original_value:\n nat_element = None\n if 'src' in source_or_dest.typeof and self.static_src_nat.has_nat:\n nat_element = self.static_src_nat\n elif 'dst' in source_or_dest.typeof and self.static_dst_nat.has_nat:\n nat_element = self.static_dst_nat\n \n if nat_element:\n nat_element.setdefault(nat_element.typeof, {}).update(\n original_value={'element': original_value[0]})", "response": "This method updates the original value of the source or destination field of a NAT rule."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _update_field(self, natvalue):\n updated = False\n if natvalue.element and natvalue.element != self.element:\n self.update(element=natvalue.element)\n self.pop('ip_descriptor', None)\n updated = True\n elif natvalue.ip_descriptor and self.ip_descriptor and \\\n natvalue.ip_descriptor != self.ip_descriptor:\n self.update(ip_descriptor=natvalue.ip_descriptor)\n self.pop('element', None)\n updated = True\n\n for port in ('min_port', 'max_port'):\n _port = getattr(natvalue, port, None)\n if _port is not None and getattr(self, port, None) != _port:\n self[port] = _port\n updated = True\n \n return updated", "response": "Update this object s fields with the given NATValue."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nupdating the source field for this rule.", "response": "def update_field(self, element_or_ip_address=None, \n start_port=None, end_port=None, **kw):\n \"\"\"\n Update the source NAT translation on this rule.\n You must call `save` or `update` on the rule to make this\n modification. To update the source target for this NAT rule, update\n the source field directly using rule.sources.update_field(...).\n This will automatically update the NAT value. This method should be\n used when you want to change the translated value or the port\n mappings for dynamic source NAT.\n \n Starting and ending ports are only used for dynamic source NAT and\n define the available ports for doing PAT on the outbound connection.\n \n :param str,Element element_or_ip_address: Element or IP address that\n is the NAT target\n :param int start_port: starting port value, only used for dynamic source NAT\n :param int end_port: ending port value, only used for dynamic source NAT\n :param bool automatic_proxy: whether to enable proxy ARP (default: True)\n :return: boolean indicating whether the rule was modified\n :rtype: bool\n \"\"\"\n updated = False\n src = _resolve_nat_element(element_or_ip_address) if \\\n element_or_ip_address else {}\n \n automatic_proxy = kw.pop('automatic_proxy', None)\n # Original value is only used when creating a rule for static src NAT.\n # This should be the href of the source field to properly create\n # TODO: The SMC API should autofill this based on source field\n _original_value = kw.pop('original_value', None)\n \n src.update(kw)\n \n if not self.translated_value:\n # Adding to a rule\n if 'dynamic_src_nat' in self.typeof:\n src.update(\n min_port=start_port or 1024,\n max_port=end_port or 65535)\n \n self.setdefault(self.typeof, {}).update(\n automatic_proxy=automatic_proxy if automatic_proxy else True,\n **self._translated_value(src))\n \n if 'static_src_nat' in self.typeof:\n if self.rule and self.rule.sources.all_as_href():\n original_value={'element': self.rule.sources.all_as_href()[0]}\n else:\n original_value={'element': _original_value}\n \n self.setdefault(self.typeof, {}).update(original_value=original_value)\n \n updated = True\n else:\n if 'dynamic_src_nat' in self.typeof:\n src.update(min_port=start_port, max_port=end_port)\n if self.translated_value._update_field(NATValue(src)):\n updated = True\n \n if automatic_proxy is not None and self.automatic_proxy \\\n != automatic_proxy:\n self.automatic_proxy = automatic_proxy\n updated = True\n \n return updated"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning the translated value for this rule.", "response": "def translated_value(self):\n \"\"\"\n The translated value for this NAT type. If this rule\n does not have a NAT value defined, this will return\n None.\n \n :return: NATValue or None\n :rtype: NATValue\n \"\"\"\n if self.typeof in self:\n return NATValue(self.get(self.typeof, {}).get(\n 'translated_value'))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nupdating the field of the destination field in the rule.", "response": "def update_field(self, element_or_ip_address=None, \n original_port=None, translated_port=None, **kw):\n \"\"\"\n Update the destination NAT translation on this rule. You must call\n `save` or `update` on the rule to make this modification. The\n destination field in the NAT rule determines which destination is\n the target of the NAT. To change the target, call the\n rule.destinations.update_field(...) method. This will automatically\n update the NAT value. This method should be used when you want to\n change the translated value port mappings for the service.\n \n Translated Port values can be used to provide port redirection for\n the service specified in the NAT rule. These should be provided as a\n string format either in single port format, or as a port range.\n \n For example, providing redirection from port 80 to port 8080::\n \n original_port='80'\n translated_port='8080'\n \n You can also use a range format although port range sizes much\n then match in size. The format for range of ports is:\n '80-100', '6000-6020' - port 80 translates to 6000, etc.\n \n For example, doing port range redirection using a range of ports::\n \n original_port='80-90'\n translated_port='200-210'\n \n .. note:: When using a range of ports for static destination translation, you\n must use a port range of equal length or the update will be ignored.\n \n :param str,Element element_or_ip_address: Element or IP address that\n is the NAT target\n :param str,int original_port: The original port is based on the service port\n :param str,int translated_port: The port to translate the original port to\n :param bool automatic_proxy: whether to enable proxy ARP (default: True)\n :return: boolean indicating whether the rule was modified\n :rtype: bool\n \"\"\"\n updated = False\n src = _resolve_nat_element(element_or_ip_address) if \\\n element_or_ip_address else {}\n \n automatic_proxy = kw.pop('automatic_proxy', None)\n # Original value is only used when creating a rule for static src NAT.\n # This should be the href of the source field to properly create\n # TODO: The SMC API should autofill this based on source field\n _original_value = kw.pop('original_value', None)\n \n src.update(kw)\n if translated_port is not None:\n src.update(_extract_ports(translated_port))\n \n if not self.translated_value:\n # Adding to a rule\n self.setdefault(self.typeof, {}).update(\n automatic_proxy=automatic_proxy if automatic_proxy else True,\n **self._translated_value(src))\n \n if self.rule and self.rule.destinations.all_as_href():\n original_value={'element': self.rule.destinations.all_as_href()[0]}\n else:\n # If creating, original_value should be href of resource\n original_value={'element': _original_value}\n \n if original_port is not None:\n original_value.update(_extract_ports(original_port))\n \n self.setdefault(self.typeof, {}).update(\n original_value=original_value)\n \n updated = True\n else:\n if self.translated_value._update_field(NATValue(src)):\n updated = True\n \n if original_port:\n if self.original_value._update_field(NATValue(\n _extract_ports(original_port))):\n updated = True\n \n if automatic_proxy is not None and self.automatic_proxy \\\n != automatic_proxy:\n self.automatic_proxy = automatic_proxy\n updated = True\n \n return updated"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef create(self, name, sources=None, destinations=None, services=None,\n dynamic_src_nat=None, dynamic_src_nat_ports=(1024, 65535),\n static_src_nat=None, static_dst_nat=None,\n static_dst_nat_ports=None, is_disabled=False, used_on=None,\n add_pos=None, after=None, before=None, comment=None):\n \"\"\"\n Create a NAT rule.\n\n When providing sources/destinations or services, you can provide the\n element href, network element or services from ``smc.elements``.\n You can also mix href strings with Element types in these fields. \n\n :param str name: name of NAT rule\n :param list sources: list of sources by href or Element\n :type sources: list(str,Element)\n :param list destinations: list of destinations by href or Element\n :type destinations: list(str,Element)\n :param list services: list of services by href or Element\n :type services: list(str,Element)\n :param dynamic_src_nat: str ip or Element for dest NAT\n :type dynamic_src_nat: str,Element\n :param tuple dynamic_src_nat_ports: starting and ending ports for PAT.\n Default: (1024, 65535)\n :param str static_src_nat: ip or element href of used for source NAT\n :param str static_dst_nat: destination NAT IP address or element href\n :param tuple static_dst_nat_ports: ports or port range used for original\n and destination ports (only needed if a different destination port\n is used and does not match the rules service port)\n :param bool is_disabled: whether to disable rule or not\n :param str,href used_on: Can be a str href of an Element or Element of type\n AddressRange('ANY'), AddressRange('NONE') or an engine element.\n :type used_on: str,Element\n :param int add_pos: position to insert the rule, starting with position 1. If\n the position value is greater than the number of rules, the rule is inserted at\n the bottom. If add_pos is not provided, rule is inserted in position 1. Mutually\n exclusive with ``after`` and ``before`` params.\n :param str after: Rule tag to add this rule after. Mutually exclusive with ``add_pos``\n and ``before`` params.\n :param str before: Rule tag to add this rule before. Mutually exclusive with ``add_pos``\n and ``after`` params.\n :param str comment: optional comment for the NAT rule\n :raises InvalidRuleValue: if rule requirements are not met\n :raises CreateRuleFailed: rule creation failure\n :return: newly created NAT rule\n :rtype: IPv4NATRule\n \"\"\"\n rule_values = self.update_targets(sources, destinations, services)\n rule_values.update(name=name, comment=comment)\n rule_values.update(is_disabled=is_disabled)\n \n rule_values.update(used_on=element_resolver(used_on) if used_on else used_on)\n #rule_values.update(used_on=element_resolver(AddressRange('ANY') if not \\\n # used_on else element_resolver(used_on)))\n \n if dynamic_src_nat:\n nat = DynamicSourceNAT()\n start_port, end_port = dynamic_src_nat_ports\n nat.update_field(dynamic_src_nat, start_port=start_port,\n end_port=end_port)\n rule_values.update(options=nat)\n \n elif static_src_nat:\n sources = rule_values['sources']\n if 'any' in sources or 'none' in sources:\n raise InvalidRuleValue('Source field cannot be none or any for '\n 'static source NAT.')\n \n nat = StaticSourceNAT()\n nat.update_field(static_src_nat,\n original_value=sources.get('src')[0])\n rule_values.update(options=nat)\n \n if static_dst_nat:\n destinations = rule_values['destinations']\n if 'any' in destinations or 'none' in destinations:\n raise InvalidRuleValue('Destination field cannot be none or any for '\n 'destination NAT.')\n \n nat = StaticDestNAT()\n original_port, translated_port = None, None\n if static_dst_nat_ports:\n original_port, translated_port = static_dst_nat_ports\n \n nat.update_field(static_dst_nat,\n original_value=destinations.get('dst')[0],\n original_port=original_port,\n translated_port=translated_port)\n \n rule_values.setdefault('options', {}).update(nat)\n\n if 'options' not in rule_values: # No NAT\n rule_values.update(options=LogOptions())\n \n params = None\n href = self.href\n if add_pos is not None:\n href = self.add_at_position(add_pos)\n elif before or after:\n params = self.add_before_after(before, after)\n \n return ElementCreator(\n self.__class__,\n exception=CreateRuleFailed,\n href=href,\n params=params,\n json=rule_values)", "response": "Creates a new NAT rule in the national domain."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef request(self, uri, method='GET', body=None, headers=None,\n **kwargs):\n \"\"\"Implementation of httplib2's Http.request.\"\"\"\n\n _credential_refresh_attempt = kwargs.pop(\n '_credential_refresh_attempt', 0)\n\n # Make a copy of the headers. They will be modified by the credentials\n # and we want to pass the original headers if we recurse.\n request_headers = headers.copy() if headers is not None else {}\n\n self.credentials.before_request(\n self._request, method, uri, request_headers)\n\n # Check if the body is a file-like stream, and if so, save the body\n # stream position so that it can be restored in case of refresh.\n body_stream_position = None\n if all(getattr(body, stream_prop, None) for stream_prop in\n _STREAM_PROPERTIES):\n body_stream_position = body.tell()\n\n # Make the request.\n response, content = self.http.request(\n uri, method, body=body, headers=request_headers, **kwargs)\n\n # If the response indicated that the credentials needed to be\n # refreshed, then refresh the credentials and re-attempt the\n # request.\n # A stored token may expire between the time it is retrieved and\n # the time the request is made, so we may need to try twice.\n if (response.status in self._refresh_status_codes\n and _credential_refresh_attempt < self._max_refresh_attempts):\n\n _LOGGER.info(\n 'Refreshing credentials due to a %s response. Attempt %s/%s.',\n response.status, _credential_refresh_attempt + 1,\n self._max_refresh_attempts)\n\n self.credentials.refresh(self._request)\n\n # Restore the body's stream position if needed.\n if body_stream_position is not None:\n body.seek(body_stream_position)\n\n # Recurse. Pass in the original headers, not our modified set.\n return self.request(\n uri, method, body=body, headers=headers,\n _credential_refresh_attempt=_credential_refresh_attempt + 1,\n **kwargs)\n\n return response, content", "response": "Implementation of httplib2 s Http. request."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef connect(self, broker, port=1883, client_id=\"\", clean_session=True):\n logger.info('Connecting to %s at port %s' % (broker, port))\n self._connected = False\n self._unexpected_disconnect = False\n self._mqttc = mqtt.Client(client_id, clean_session)\n\n # set callbacks\n self._mqttc.on_connect = self._on_connect\n self._mqttc.on_disconnect = self._on_disconnect\n\n if self._username:\n self._mqttc.username_pw_set(self._username, self._password)\n\n self._mqttc.connect(broker, int(port))\n\n timer_start = time.time()\n while time.time() < timer_start + self._loop_timeout:\n if self._connected or self._unexpected_disconnect:\n break;\n self._mqttc.loop()\n\n if self._unexpected_disconnect:\n raise RuntimeError(\"The client disconnected unexpectedly\")\n logger.debug('client_id: %s' % self._mqttc._client_id)\n return self._mqttc", "response": "Connect to an MQTT broker."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef publish(self, topic, message=None, qos=0, retain=False):\n logger.info('Publish topic: %s, message: %s, qos: %s, retain: %s'\n % (topic, message, qos, retain))\n self._mid = -1\n self._mqttc.on_publish = self._on_publish\n result, mid = self._mqttc.publish(topic, message, int(qos), retain)\n if result != 0:\n raise RuntimeError('Error publishing: %s' % result)\n\n timer_start = time.time()\n while time.time() < timer_start + self._loop_timeout:\n if mid == self._mid:\n break;\n self._mqttc.loop()\n\n if mid != self._mid:\n logger.warn('mid wasn\\'t matched: %s' % mid)", "response": "Publish a message to a topic with specified qos and retain flag."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef subscribe(self, topic, qos, timeout=1, limit=1):\n seconds = convert_time(timeout)\n self._messages[topic] = []\n limit = int(limit)\n self._subscribed = False\n\n logger.info('Subscribing to topic: %s' % topic)\n self._mqttc.on_subscribe = self._on_subscribe\n self._mqttc.subscribe(str(topic), int(qos))\n self._mqttc.on_message = self._on_message_list\n\n if seconds == 0:\n logger.info('Starting background loop')\n self._background_mqttc = self._mqttc\n self._background_mqttc.loop_start()\n return self._messages[topic]\n\n timer_start = time.time()\n while time.time() < timer_start + seconds:\n if limit == 0 or len(self._messages[topic]) < limit:\n self._mqttc.loop()\n else:\n # workaround for client to ack the publish. Otherwise,\n # it seems that if client disconnects quickly, broker\n # will not get the ack and publish the message again on\n # next connect.\n time.sleep(1)\n break\n return self._messages[topic]", "response": "Subscribe to a topic and return a list of message payloads received within the specified time."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef listen(self, topic, timeout=1, limit=1):\n if not self._subscribed:\n logger.warn('Cannot listen when not subscribed to a topic')\n return []\n\n if topic not in self._messages:\n logger.warn('Cannot listen when not subscribed to topic: %s' % topic)\n return []\n\n # If enough messages have already been gathered, return them\n if limit != 0 and len(self._messages[topic]) >= limit:\n messages = self._messages[topic][:] # Copy the list's contents\n self._messages[topic] = []\n return messages[-limit:]\n\n seconds = convert_time(timeout)\n limit = int(limit)\n\n logger.info('Listening on topic: %s' % topic)\n timer_start = time.time()\n while time.time() < timer_start + seconds:\n if limit == 0 or len(self._messages[topic]) < limit:\n # If the loop is running in the background\n # merely sleep here for a second or so and continue\n # otherwise, do the loop ourselves\n if self._background_mqttc:\n time.sleep(1)\n else:\n self._mqttc.loop()\n else:\n # workaround for client to ack the publish. Otherwise,\n # it seems that if client disconnects quickly, broker\n # will not get the ack and publish the message again on\n # next connect.\n time.sleep(1)\n break\n\n messages = self._messages[topic][:] # Copy the list's contents\n self._messages[topic] = []\n return messages[-limit:] if limit != 0 else messages", "response": "Listen to a topic and return a list of message payloads received within the specified time."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nsubscribe to a topic and validate that the specified payload is received within the specified timeout.", "response": "def subscribe_and_validate(self, topic, qos, payload, timeout=1):\n \"\"\" Subscribe to a topic and validate that the specified payload is\n received within timeout. It is required that a connection has been\n established using `Connect` keyword. The payload can be specified as\n a python regular expression. If the specified payload is not received\n within timeout, an AssertionError is thrown.\n\n `topic` topic to subscribe to\n\n `qos` quality of service for the subscription\n\n `payload` payload (message) that is expected to arrive\n\n `timeout` time to wait for the payload to arrive\n\n Examples:\n\n | Subscribe And Validate | test/test | 1 | test message |\n\n \"\"\"\n seconds = convert_time(timeout)\n self._verified = False\n\n logger.info('Subscribing to topic: %s' % topic)\n self._mqttc.subscribe(str(topic), int(qos))\n self._payload = str(payload)\n self._mqttc.on_message = self._on_message\n\n timer_start = time.time()\n while time.time() < timer_start + seconds:\n if self._verified:\n break\n self._mqttc.loop()\n\n if not self._verified:\n raise AssertionError(\"The expected payload didn't arrive in the topic\")"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef unsubscribe(self, topic):\n try:\n tmp = self._mqttc\n except AttributeError:\n logger.info('No MQTT Client instance found so nothing to unsubscribe from.')\n return\n\n if self._background_mqttc:\n logger.info('Closing background loop')\n self._background_mqttc.loop_stop()\n self._background_mqttc = None\n\n if topic in self._messages:\n del self._messages[topic]\n\n logger.info('Unsubscribing from topic: %s' % topic)\n self._unsubscribed = False\n self._mqttc.on_unsubscribe = self._on_unsubscribe\n self._mqttc.unsubscribe(str(topic))\n\n timer_start = time.time()\n while (not self._unsubscribed and\n time.time() < timer_start + self._loop_timeout):\n self._mqttc.loop()\n\n if not self._unsubscribed:\n logger.warn('Client didn\\'t receive an unsubscribe callback')", "response": "Unsubscribe from a topic."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ndisconnecting from the broker.", "response": "def disconnect(self):\n \"\"\" Disconnect from MQTT Broker.\n\n Example:\n | Disconnect |\n\n \"\"\"\n try:\n tmp = self._mqttc\n except AttributeError:\n logger.info('No MQTT Client instance found so nothing to disconnect from.')\n return\n\n self._disconnected = False\n self._unexpected_disconnect = False\n self._mqttc.on_disconnect = self._on_disconnect\n\n self._mqttc.disconnect()\n\n timer_start = time.time()\n while time.time() < timer_start + self._loop_timeout:\n if self._disconnected or self._unexpected_disconnect:\n break;\n self._mqttc.loop()\n if self._unexpected_disconnect:\n raise RuntimeError(\"The client disconnected unexpectedly\")"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\npublish a single message and disconnect.", "response": "def publish_single(self, topic, payload=None, qos=0, retain=False,\n hostname=\"localhost\", port=1883, client_id=\"\", keepalive=60,\n will=None, auth=None, tls=None, protocol=mqtt.MQTTv31):\n\n \"\"\" Publish a single message and disconnect. This keyword uses the\n [http://eclipse.org/paho/clients/python/docs/#single|single]\n function of publish module.\n\n `topic` topic to which the message will be published\n\n `payload` message payload to publish (default None)\n\n `qos` qos of the message (default 0)\n\n `retain` retain flag (True or False, default False)\n\n `hostname` MQTT broker host (default localhost)\n\n `port` broker port (default 1883)\n\n `client_id` if not specified, a random id is generated\n\n `keepalive` keepalive timeout value for client\n\n `will` a dict containing will parameters for client:\n will = {'topic': \"\", 'payload':\", 'qos':,\n 'retain':}\n\n `auth` a dict containing authentication parameters for the client:\n auth = {'username':\"\", 'password':\"\"}\n\n `tls` a dict containing TLS configuration parameters for the client:\n dict = {'ca_certs':\"\", 'certfile':\"\",\n 'keyfile':\"\", 'tls_version':\"\",\n 'ciphers':\"}\n\n `protocol` MQTT protocol version (MQTTv31 or MQTTv311)\n\n Example:\n\n Publish a message on specified topic and disconnect:\n | Publish Single | topic=t/mqtt | payload=test | hostname=127.0.0.1 |\n\n \"\"\"\n logger.info('Publishing to: %s:%s, topic: %s, payload: %s, qos: %s' %\n (hostname, port, topic, payload, qos))\n publish.single(topic, payload, qos, retain, hostname, port,\n client_id, keepalive, will, auth, tls, protocol)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef publish_multiple(self, msgs, hostname=\"localhost\", port=1883,\n client_id=\"\", keepalive=60, will=None, auth=None,\n tls=None, protocol=mqtt.MQTTv31):\n\n \"\"\" Publish multiple messages and disconnect. This keyword uses the\n [http://eclipse.org/paho/clients/python/docs/#multiple|multiple]\n function of publish module.\n\n `msgs` a list of messages to publish. Each message is either a dict\n or a tuple. If a dict, it must be of the form:\n msg = {'topic':\"\", 'payload':\"\", 'qos':,\n 'retain':}\n Only the topic must be present. Default values will be used\n for any missing arguments. If a tuple, then it must be of the\n form:\n (\"\", \"\", qos, retain)\n\n See `publish single` for the description of hostname, port,\n client_id, keepalive, will, auth, tls, protocol.\n\n Example:\n\n | ${msg1} | Create Dictionary | topic=${topic} | payload=message 1 |\n | ${msg2} | Create Dictionary | topic=${topic} | payload=message 2 |\n | ${msg3} | Create Dictionary | topic=${topic} | payload=message 3 |\n | @{msgs} | Create List | ${msg1} | ${msg2} | ${msg3} |\n | Publish Multiple | msgs=${msgs} | hostname=127.0.0.1 |\n\n \"\"\"\n logger.info('Publishing to: %s:%s, msgs: %s' %\n (hostname, port, msgs))\n publish.multiple(msgs, hostname, port, client_id, keepalive,\n will, auth, tls, protocol)", "response": "Publish multiple messages and disconnect."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nadd user to request if user logged in", "response": "def user_to_request(handler):\n '''Add user to request if user logged in'''\n @wraps(handler)\n async def decorator(*args):\n request = _get_request(args)\n request[cfg.REQUEST_USER_KEY] = await get_cur_user(request)\n return await handler(*args)\n return decorator"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef add_synonym(self, other):\n self.synonyms.extend(other.synonyms)\n other.synonyms = self.synonyms", "response": "Add a synonym to the list of synonyms."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef find_one_sql(table, filter, fields=None):\n '''\n >>> find_one_sql('tbl', {'foo': 10, 'bar': 'baz'})\n ('SELECT * FROM tbl WHERE bar=$1 AND foo=$2', ['baz', 10])\n >>> find_one_sql('tbl', {'id': 10}, fields=['foo', 'bar'])\n ('SELECT foo, bar FROM tbl WHERE id=$1', [10])\n '''\n keys, values = _split_dict(filter)\n fields = ', '.join(fields) if fields else '*'\n where = _pairs(keys)\n sql = 'SELECT {} FROM {} WHERE {}'.format(fields, table, where)\n return sql, values", "response": "Return a SQL query that only returns one row."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef insert_sql(table, data, returning='id'):\n '''\n >>> insert_sql('tbl', {'foo': 'bar', 'id': 1})\n ('INSERT INTO tbl (foo, id) VALUES ($1, $2) RETURNING id', ['bar', 1])\n\n >>> insert_sql('tbl', {'foo': 'bar', 'id': 1}, returning=None)\n ('INSERT INTO tbl (foo, id) VALUES ($1, $2)', ['bar', 1])\n\n >>> insert_sql('tbl', {'foo': 'bar', 'id': 1}, returning='pk')\n ('INSERT INTO tbl (foo, id) VALUES ($1, $2) RETURNING pk', ['bar', 1])\n '''\n keys, values = _split_dict(data)\n sql = 'INSERT INTO {} ({}) VALUES ({}){}'.format(\n table,\n ', '.join(keys),\n ', '.join(_placeholders(data)),\n ' RETURNING {}'.format(returning) if returning else '')\n return sql, values", "response": "Insert data into the specified table."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef update_sql(table, filter, updates):\n '''\n >>> update_sql('tbl', {'foo': 'a', 'bar': 1}, {'bar': 2, 'baz': 'b'})\n ('UPDATE tbl SET bar=$1, baz=$2 WHERE bar=$3 AND foo=$4', [2, 'b', 1, 'a'])\n '''\n where_keys, where_vals = _split_dict(filter)\n up_keys, up_vals = _split_dict(updates)\n changes = _pairs(up_keys, sep=', ')\n where = _pairs(where_keys, start=len(up_keys) + 1)\n sql = 'UPDATE {} SET {} WHERE {}'.format(\n table, changes, where)\n return sql, up_vals + where_vals", "response": ">>> update_sql - Updates the table with the given filter and updates"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef delete_sql(table, filter):\n '''\n >>> delete_sql('tbl', {'foo': 10, 'bar': 'baz'})\n ('DELETE FROM tbl WHERE bar=$1 AND foo=$2', ['baz', 10])\n '''\n keys, values = _split_dict(filter)\n where = _pairs(keys)\n sql = 'DELETE FROM {} WHERE {}'.format(table, where)\n return sql, values", "response": ">>> delete_sql - Delete a table from the database with a filter"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nsplit dict into sorted keys and values >>>", "response": "def _split_dict(dic):\n '''Split dict into sorted keys and values\n\n >>> _split_dict({'b': 2, 'a': 1})\n (['a', 'b'], [1, 2])\n '''\n keys = sorted(dic.keys())\n return keys, [dic[k] for k in keys]"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nturns the Python prompt into an Adventure game.", "response": "def play(seed=None):\n \"\"\"Turn the Python prompt into an Adventure game.\n\n With optional the `seed` argument the caller can supply an integer\n to start the Python random number generator at a known state.\n\n \"\"\"\n global _game\n\n from .game import Game\n from .prompt import install_words\n\n _game = Game(seed)\n load_advent_dat(_game)\n install_words(_game)\n _game.start()\n print(_game.output[:-1])"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nappends the Unicode representation of s to our output.", "response": "def write(self, more):\n \"\"\"Append the Unicode representation of `s` to our output.\"\"\"\n if more:\n self.output += str(more).upper()\n self.output += '\\n'"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nasking a question and prepare to receive a yes - or - no answer.", "response": "def yesno(self, s, yesno_callback, casual=False):\n \"\"\"Ask a question and prepare to receive a yes-or-no answer.\"\"\"\n self.write(s)\n self.yesno_callback = yesno_callback\n self.yesno_casual = casual"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ndisplays instructions if the user wants them.", "response": "def start2(self, yes):\n \"\"\"Display instructions if the user wants them.\"\"\"\n if yes:\n self.write_message(1)\n self.hints[3].used = True\n self.lamp_turns = 1000\n\n self.oldloc2 = self.oldloc = self.loc = self.rooms[1]\n self.dwarves = [ Dwarf(self.rooms[n]) for n in (19, 27, 33, 44, 64) ]\n self.pirate = Pirate(self.chest_room)\n\n treasures = self.treasures\n self.treasures_not_found = len(treasures)\n for treasure in treasures:\n treasure.prop = -1\n\n self.describe_location()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef do_command(self, words):\n self.output = ''\n self._do_command(words)\n return self.output", "response": "Parse and act upon the command in the list of strings words."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning an Adventure game saved to the given file.", "response": "def resume(self, obj):\n \"\"\"Returns an Adventure game saved to the given file.\"\"\"\n if isinstance(obj, str):\n savefile = open(obj, 'rb')\n else:\n savefile = obj\n game = pickle.loads(zlib.decompress(savefile.read()))\n if savefile is not obj:\n savefile.close()\n # Reinstate the random number generator.\n game.random_generator = random.Random()\n game.random_generator.setstate(game.random_state)\n del game.random_state\n return game"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nread the Adventure data file and return a Data object.", "response": "def parse(data, datafile):\n \"\"\"Read the Adventure data file and return a ``Data`` object.\"\"\"\n data._last_travel = [0, [0]] # x and verbs used by section 3\n\n while True:\n section_number = int(datafile.readline())\n if not section_number: # no further sections\n break\n store = globals().get('section%d' % section_number)\n while True:\n fields = [ (int(field) if field.lstrip('-').isdigit() else field)\n for field in datafile.readline().strip().split('\\t') ]\n if fields[0] == -1: # end-of-section marker\n break\n store(data, *fields)\n\n del data._last_travel # state used by section 3\n del data._object # state used by section 5\n\n data.object_list = sorted(set(data.objects.values()), key=attrgetter('n'))\n #data.room_list = sorted(set(data.rooms.values()), key=attrgetter('n'))\n for obj in data.object_list:\n name = obj.names[0]\n if hasattr(data, name):\n name = name + '2' # create identifiers like ROD2, PLANT2\n setattr(data, name, obj)\n\n return data"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nmap the values passed in SMTP headers to API - ready ones.", "response": "def _map_smtp_headers_to_api_parameters(self, email_message):\n \"\"\"\n Map the values passed in SMTP headers to API-ready\n 2-item tuples present in HEADERS_MAP\n\n header values must be a single string or list or tuple of strings\n\n :return: 2-item tuples of the form (api_name, api_values)\n \"\"\"\n api_data = []\n for smtp_key, api_transformer in six.iteritems(self._headers_map):\n data_to_transform = email_message.extra_headers.pop(smtp_key, None)\n if data_to_transform is not None:\n if isinstance(data_to_transform, (list, tuple)):\n # map each value in the tuple/list\n for data in data_to_transform:\n api_data.append((api_transformer[0], api_transformer[1](data)))\n elif isinstance(data_to_transform, dict):\n for data in six.iteritems(data_to_transform):\n api_data.append(api_transformer(data))\n else:\n # we only have one value\n api_data.append((api_transformer[0], api_transformer[1](data_to_transform)))\n return api_data"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _send(self, email_message):\n if not email_message.recipients():\n return False\n from_email = sanitize_address(email_message.from_email, email_message.encoding)\n\n to_recipients = [sanitize_address(addr, email_message.encoding)\n for addr in email_message.to]\n\n try:\n post_data = []\n post_data.append(('to', (\",\".join(to_recipients)),))\n if email_message.bcc:\n bcc_recipients = [sanitize_address(addr, email_message.encoding) for addr in email_message.bcc]\n post_data.append(('bcc', (\",\".join(bcc_recipients)),))\n if email_message.cc:\n cc_recipients = [sanitize_address(addr, email_message.encoding) for addr in email_message.cc]\n post_data.append(('cc', (\",\".join(cc_recipients)),))\n post_data.append(('text', email_message.body,))\n post_data.append(('subject', email_message.subject,))\n post_data.append(('from', from_email,))\n # get our recipient variables if they were passed in\n recipient_variables = email_message.extra_headers.pop('recipient_variables', None)\n if recipient_variables is not None:\n post_data.append(('recipient-variables', recipient_variables, ))\n\n for name, value in self._map_smtp_headers_to_api_parameters(email_message):\n post_data.append((name, value, ))\n\n if hasattr(email_message, 'alternatives') and email_message.alternatives:\n for alt in email_message.alternatives:\n if alt[1] == 'text/html':\n post_data.append(('html', alt[0],))\n break\n\n # Map Reply-To header if present\n try:\n if email_message.reply_to:\n post_data.append((\n \"h:Reply-To\",\n \", \".join(map(force_text, email_message.reply_to)),\n ))\n except AttributeError:\n pass\n\n if email_message.attachments:\n for attachment in email_message.attachments:\n post_data.append(('attachment', (attachment[0], attachment[1],)))\n content, header = encode_multipart_formdata(post_data)\n headers = {'Content-Type': header}\n else:\n content = post_data\n headers = None\n\n response = requests.post(self._api_url + \"messages\",\n auth=(\"api\", self._access_key),\n data=content, headers=headers)\n except:\n if not self.fail_silently:\n raise\n return False\n\n if response.status_code != 200:\n if not self.fail_silently:\n raise MailgunAPIError(response)\n return False\n\n return True", "response": "A helper method that does the actual sending."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef send_messages(self, email_messages):\n if not email_messages:\n return\n\n num_sent = 0\n for message in email_messages:\n if self._send(message):\n num_sent += 1\n\n return num_sent", "response": "Sends one or more EmailMessage objects and returns the number of EmailMessages sent."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef student_view(self, context=None):\n context = context or {}\n context.update({\n 'display_name': self.display_name,\n 'image_url': self.image_url,\n 'thumbnail_url': self.thumbnail_url or self.image_url,\n 'description': self.description,\n 'xblock_id': text_type(self.scope_ids.usage_id),\n 'alt_text': self.alt_text or self.display_name,\n })\n fragment = self.build_fragment(\n template='view.html',\n context=context,\n css=[\n 'view.less.css',\n URL_FONT_AWESOME_CSS,\n ],\n js=[\n 'draggabilly.pkgd.js',\n 'view.js',\n ],\n js_init='ImageModalView',\n )\n return fragment", "response": "Build the default student view fragment for the student view."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef build_fragment(\n self,\n template='',\n context=None,\n css=None,\n js=None,\n js_init=None,\n ):\n \"\"\"\n Creates a fragment for display.\n \"\"\"\n template = 'templates/' + template\n context = context or {}\n css = css or []\n js = js or []\n rendered_template = ''\n if template:\n rendered_template = self.loader.render_django_template(\n template,\n context=Context(context),\n i18n_service=self.runtime.service(self, 'i18n'),\n )\n fragment = Fragment(rendered_template)\n for item in css:\n if item.startswith('/'):\n url = item\n else:\n item = 'public/' + item\n url = self.runtime.local_resource_url(self, item)\n fragment.add_css_url(url)\n for item in js:\n item = 'public/' + item\n url = self.runtime.local_resource_url(self, item)\n fragment.add_javascript_url(url)\n if js_init:\n fragment.initialize_js(js_init)\n return fragment", "response": "Builds a fragment for display."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _parse_title(file_path):\n title = file_path\n title = title.split('/')[-1]\n title = '.'.join(title.split('.')[:-1])\n title = ' '.join(title.split('-'))\n title = ' '.join([\n word.capitalize()\n for word in title.split(' ')\n ])\n return title", "response": "Parse a title from a file name"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreading the contents of a list of files", "response": "def _read_files(files):\n \"\"\"\n Read the contents of a list of files\n \"\"\"\n file_contents = [\n (\n _parse_title(file_path),\n _read_file(file_path),\n )\n for file_path in files\n ]\n return file_contents"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _find_files(directory):\n pattern = \"{directory}/*.xml\".format(\n directory=directory,\n )\n files = glob(pattern)\n return files", "response": "Find XML files in the directory."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef workbench_scenarios(cls):\n module = cls.__module__\n module = module.split('.')[0]\n directory = pkg_resources.resource_filename(module, 'scenarios')\n files = _find_files(directory)\n scenarios = _read_files(files)\n return scenarios", "response": "Gather scenarios to be displayed in the workbench"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _set_session():\n global __SESSION\n global __USER_ID\n \n if __SESSION is None:\n try:\n __SESSION = AuthorizedSession(google.auth.default(['https://www.googleapis.com/auth/userinfo.profile',\n 'https://www.googleapis.com/auth/userinfo.email'])[0])\n health()\n __USER_ID = id_token.verify_oauth2_token(__SESSION.credentials.id_token,\n Request(session=__SESSION))['email']\n except (DefaultCredentialsError, RefreshError) as gae:\n if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/'):\n raise\n logging.warning(\"Unable to determine/refresh application credentials\")\n try:\n subprocess.check_call(['gcloud', 'auth', 'application-default',\n 'login', '--no-launch-browser'])\n __SESSION = AuthorizedSession(google.auth.default(['https://www.googleapis.com/auth/userinfo.profile',\n 'https://www.googleapis.com/auth/userinfo.email'])[0])\n except subprocess.CalledProcessError as cpe:\n if cpe.returncode < 0:\n logging.exception(\"%s was terminated by signal %d\",\n cpe.cmd, -cpe.returncode)\n else:\n logging.exception(\"%s returned %d\", cpe.cmd,\n cpe.returncode)\n raise gae", "response": "Sets the global session and user id for the current user."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _fiss_agent_header(headers=None):\n _set_session()\n\n fiss_headers = {\"User-Agent\" : FISS_USER_AGENT}\n if headers is not None:\n fiss_headers.update(headers)\n return fiss_headers", "response": "Returns the FISS User - Agent header for the current session."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _check_response_code(response, codes):\n if type(codes) == int:\n codes = [codes]\n if response.status_code not in codes:\n raise FireCloudServerError(response.status_code, response.content)", "response": "Check if the http response code is expected."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nlisting the entity types present in a workspace.", "response": "def list_entity_types(namespace, workspace):\n \"\"\"List the entity types present in a workspace.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n\n Swagger:\n https://api.firecloud.org/#!/Entities/getEntityTypes\n \"\"\"\n headers = _fiss_agent_header({\"Content-type\": \"application/json\"})\n uri = \"workspaces/{0}/{1}/entities\".format(namespace, workspace)\n return __get(uri, headers=headers)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nupload entities from tab - delimited string.", "response": "def upload_entities(namespace, workspace, entity_data):\n \"\"\"Upload entities from tab-delimited string.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n entity_data (str): TSV string describing entites\n\n Swagger:\n https://api.firecloud.org/#!/Entities/importEntities\n \"\"\"\n body = urlencode({\"entities\" : entity_data})\n headers = _fiss_agent_header({\n 'Content-type': \"application/x-www-form-urlencoded\"\n })\n uri = \"workspaces/{0}/{1}/importEntities\".format(namespace, workspace)\n return __post(uri, headers=headers, data=body)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nuploading entities from a tsv file.", "response": "def upload_entities_tsv(namespace, workspace, entities_tsv):\n \"\"\"Upload entities from a tsv loadfile.\n\n File-based wrapper for api.upload_entities().\n A loadfile is a tab-separated text file with a header row\n describing entity type and attribute names, followed by\n rows of entities and their attribute values.\n\n Ex:\n entity:participant_id age alive\n participant_23 25 Y\n participant_27 35 N\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n entities_tsv (file): FireCloud loadfile, see format above\n \"\"\"\n if isinstance(entities_tsv, string_types):\n with open(entities_tsv, \"r\") as tsv:\n entity_data = tsv.read()\n elif isinstance(entities_tsv, io.StringIO):\n entity_data = entities_tsv.getvalue()\n else:\n raise ValueError('Unsupported input type.')\n return upload_entities(namespace, workspace, entity_data)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncopy entities between workspaces", "response": "def copy_entities(from_namespace, from_workspace, to_namespace,\n to_workspace, etype, enames, link_existing_entities=False):\n \"\"\"Copy entities between workspaces\n\n Args:\n from_namespace (str): project (namespace) to which source workspace belongs\n from_workspace (str): Source workspace name\n to_namespace (str): project (namespace) to which target workspace belongs\n to_workspace (str): Target workspace name\n etype (str): Entity type\n enames (list(str)): List of entity names to copy\n link_existing_entities (boolean): Link all soft conflicts to the entities that already exist.\n\n Swagger:\n https://api.firecloud.org/#!/Entities/copyEntities\n \"\"\"\n\n uri = \"workspaces/{0}/{1}/entities/copy\".format(to_namespace, to_workspace)\n body = {\n \"sourceWorkspace\": {\n \"namespace\": from_namespace,\n \"name\": from_workspace\n },\n \"entityType\": etype,\n \"entityNames\": enames\n }\n \n return __post(uri, json=body, params={'linkExistingEntities':\n str(link_existing_entities).lower()})"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_entities(namespace, workspace, etype):\n uri = \"workspaces/{0}/{1}/entities/{2}\".format(namespace, workspace, etype)\n return __get(uri)", "response": "List entities of given type in a workspace."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget entities of given type in a workspace as TSV.", "response": "def get_entities_tsv(namespace, workspace, etype):\n \"\"\"List entities of given type in a workspace as a TSV.\n\n Identical to get_entities(), but the response is a TSV.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n etype (str): Entity type\n\n Swagger:\n https://api.firecloud.org/#!/Entities/browserDownloadEntitiesTSV\n \"\"\"\n uri = \"workspaces/{0}/{1}/entities/{2}/tsv\".format(namespace,\n workspace, etype)\n return __get(uri)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_entity(namespace, workspace, etype, ename):\n uri = \"workspaces/{0}/{1}/entities/{2}/{3}\".format(namespace,\n workspace, etype, ename)\n return __get(uri)", "response": "Request entity information.\n\n Gets entity metadata and attributes.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n etype (str): Entity type\n ename (str): The entity's unique id\n\n Swagger:\n https://api.firecloud.org/#!/Entities/getEntity"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ndeletes entities in a workspace", "response": "def delete_entities(namespace, workspace, json_body):\n \"\"\"Delete entities in a workspace.\n\n Note: This action is not reversible. Be careful!\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n json_body:\n [\n {\n \"entityType\": \"string\",\n \"entityName\": \"string\"\n }\n ]\n\n Swagger:\n https://api.firecloud.org/#!/Entities/deleteEntities\n \"\"\"\n\n uri = \"workspaces/{0}/{1}/entities/delete\".format(namespace, workspace)\n return __post(uri, json=json_body)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef delete_entity_type(namespace, workspace, etype, ename):\n\n uri = \"workspaces/{0}/{1}/entities/delete\".format(namespace, workspace)\n if isinstance(ename, string_types):\n body = [{\"entityType\":etype, \"entityName\":ename}]\n elif isinstance(ename, Iterable):\n body = [{\"entityType\":etype, \"entityName\":i} for i in ename]\n\n return __post(uri, json=body)", "response": "Delete entities in a workspace."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\npaginating version of get_entities_with_type", "response": "def get_entities_query(namespace, workspace, etype, page=1,\n page_size=100, sort_direction=\"asc\",\n filter_terms=None):\n \"\"\"Paginated version of get_entities_with_type.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n\n Swagger:\n https://api.firecloud.org/#!/Entities/entityQuery\n\n \"\"\"\n\n # Initial parameters for pagination\n params = {\n \"page\" : page,\n \"pageSize\" : page_size,\n \"sortDirection\" : sort_direction\n }\n if filter_terms:\n params['filterTerms'] = filter_terms\n\n uri = \"workspaces/{0}/{1}/entityQuery/{2}\".format(namespace,workspace,etype)\n return __get(uri, params=params)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef update_entity(namespace, workspace, etype, ename, updates):\n headers = _fiss_agent_header({\"Content-type\": \"application/json\"})\n uri = \"{0}workspaces/{1}/{2}/entities/{3}/{4}\".format(fcconfig.root_url,\n namespace, workspace, etype, ename)\n\n # FIXME: create __patch method, akin to __get, __delete etc\n return __SESSION.patch(uri, headers=headers, json=updates)", "response": "Update the attributes of an entity in a workspace."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nlist method configurations in workspace.", "response": "def list_workspace_configs(namespace, workspace, allRepos=False):\n \"\"\"List method configurations in workspace.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n\n Swagger:\n https://api.firecloud.org/#!/Method_Configurations/listWorkspaceMethodConfigs\n DUPLICATE: https://api.firecloud.org/#!/Workspaces/listWorkspaceMethodConfigs\n \"\"\"\n uri = \"workspaces/{0}/{1}/methodconfigs\".format(namespace, workspace)\n return __get(uri, params={'allRepos': allRepos})"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncreate method configuration in workspace.", "response": "def create_workspace_config(namespace, workspace, body):\n \"\"\"Create method configuration in workspace.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n body (json) : a filled-in JSON object for the new method config\n (e.g. see return value of get_workspace_config)\n\n Swagger:\n https://api.firecloud.org/#!/Method_Configurations/postWorkspaceMethodConfig\n DUPLICATE: https://api.firecloud.org/#!/Workspaces/postWorkspaceMethodConfig\n \"\"\"\n\n #json_body = {\n # \"namespace\" : mnamespace,\n # \"name\" : method,\n # \"rootEntityType\" : root_etype,\n # \"inputs\" : {},\n # \"outputs\" : {},\n # \"prerequisites\" : {}\n #}\n uri = \"workspaces/{0}/{1}/methodconfigs\".format(namespace, workspace)\n return __post(uri, json=body)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef delete_workspace_config(namespace, workspace, cnamespace, config):\n uri = \"workspaces/{0}/{1}/method_configs/{2}/{3}\".format(namespace,\n workspace, cnamespace, config)\n return __delete(uri)", "response": "Delete method configuration in workspace."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting method configuration in workspace.", "response": "def get_workspace_config(namespace, workspace, cnamespace, config):\n \"\"\"Get method configuration in workspace.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n cnamespace (str): Config namespace\n config (str): Config name\n\n Swagger:\n https://api.firecloud.org/#!/Method_Configurations/getWorkspaceMethodConfig\n \"\"\"\n\n uri = \"workspaces/{0}/{1}/method_configs/{2}/{3}\".format(namespace,\n workspace, cnamespace, config)\n return __get(uri)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nadds or overwrite method configuration in workspace.", "response": "def overwrite_workspace_config(namespace, workspace, cnamespace, configname, body):\n \"\"\"Add or overwrite method configuration in workspace.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n cnamespace (str): Configuration namespace\n configname (str): Configuration name\n body (json): new body (definition) of the method config\n\n Swagger:\n https://api.firecloud.org/#!/Method_Configurations/overwriteWorkspaceMethodConfig\n \"\"\"\n headers = _fiss_agent_header({\"Content-type\": \"application/json\"})\n uri = \"workspaces/{0}/{1}/method_configs/{2}/{3}\".format(namespace,\n workspace, cnamespace, configname)\n return __put(uri, headers=headers, json=body)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nupdate method configuration in workspace.", "response": "def update_workspace_config(namespace, workspace, cnamespace, configname, body):\n \"\"\"Update method configuration in workspace.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n cnamespace (str): Configuration namespace\n configname (str): Configuration name\n body (json): new body (definition) of the method config\n\n Swagger:\n https://api.firecloud.org/#!/Method_Configurations/updateWorkspaceMethodConfig\n \"\"\"\n uri = \"workspaces/{0}/{1}/method_configs/{2}/{3}\".format(namespace,\n workspace, cnamespace, configname)\n return __post(uri, json=body)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting syntax validation for a configuration.", "response": "def validate_config(namespace, workspace, cnamespace, config):\n \"\"\"Get syntax validation for a configuration.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n cnamespace (str): Configuration namespace\n config (str): Configuration name\n\n Swagger:\n https://api.firecloud.org/#!/Method_Configurations/validate_method_configuration\n \"\"\"\n uri = \"workspaces/{0}/{1}/method_configs/{2}/{3}/validate\".format(namespace,\n workspace, cnamespace, config)\n return __get(uri)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nrename a method configuration in a workspace.", "response": "def rename_workspace_config(namespace, workspace, cnamespace, config,\n new_namespace, new_name):\n \"\"\"Rename a method configuration in a workspace.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n mnamespace (str): Config namespace\n config (str): Config name\n new_namespace (str): Updated config namespace\n new_name (str): Updated method name\n\n Swagger:\n https://api.firecloud.org/#!/Method_Configurations/renameWorkspaceMethodConfig\n \"\"\"\n\n body = {\n \"namespace\" : new_namespace,\n \"name\" : new_name,\n # I have no idea why this is required by FC, but it is...\n \"workspaceName\" : {\n \"namespace\" : namespace,\n \"name\" : workspace\n }\n }\n uri = \"workspaces/{0}/{1}/method_configs/{2}/{3}/rename\".format(namespace,\n workspace, cnamespace, config)\n return __post(uri, json=body)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef copy_config_from_repo(namespace, workspace, from_cnamespace,\n from_config, from_snapshot_id, to_cnamespace,\n to_config):\n \"\"\"Copy a method config from the methods repository to a workspace.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n from_cnamespace (str): Source configuration namespace\n from_config (str): Source configuration name\n from_snapshot_id (int): Source configuration snapshot_id\n to_cnamespace (str): Target configuration namespace\n to_config (str): Target configuration name\n\n Swagger:\n https://api.firecloud.org/#!/Method_Configurations/copyFromMethodRepo\n DUPLICATE: https://api.firecloud.org/#!/Method_Repository/copyFromMethodRepo\n \"\"\"\n\n body = {\n \"configurationNamespace\" : from_cnamespace,\n \"configurationName\" : from_config,\n \"configurationSnapshotId\" : from_snapshot_id,\n \"destinationNamespace\" : to_cnamespace,\n \"destinationName\" : to_config\n }\n uri = \"workspaces/{0}/{1}/method_configs/copyFromMethodRepo\".format(\n namespace, workspace)\n return __post(uri, json=body)", "response": "Copy a method config from the methods repository to a workspace."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncopying a method config from a workspace to the methods repository.", "response": "def copy_config_to_repo(namespace, workspace, from_cnamespace,\n from_config, to_cnamespace, to_config):\n \"\"\"Copy a method config from a workspace to the methods repository.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n from_cnamespace (str): Source configuration namespace\n from_config (str): Source configuration name\n to_cnamespace (str): Target configuration namespace\n to_config (str): Target configuration name\n\n Swagger:\n https://api.firecloud.org/#!/Method_Configurations/copyToMethodRepo\n DUPLICATE: https://api.firecloud.org/#!/Method_Repository/copyToMethodRepo\n \"\"\"\n\n body = {\n \"configurationNamespace\" : to_cnamespace,\n \"configurationName\" : to_config,\n \"sourceNamespace\" : from_cnamespace,\n \"sourceName\" : from_config\n }\n uri = \"workspaces/{0}/{1}/method_configs/copyToMethodRepo\".format(\n namespace, workspace)\n return __post(uri, json=body)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef list_repository_methods(namespace=None, name=None, snapshotId=None):\n params = {k:v for (k,v) in locals().items() if v is not None}\n return __get(\"methods\", params=params)", "response": "List all methods in the methods repository."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_config_template(namespace, method, version):\n\n body = {\n \"methodNamespace\" : namespace,\n \"methodName\" : method,\n \"methodVersion\" : int(version)\n }\n return __post(\"template\", json=body)", "response": "Get the configuration template for a method."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets a description of the inputs and outputs for a method.", "response": "def get_inputs_outputs(namespace, method, snapshot_id):\n \"\"\"Get a description of the inputs and outputs for a method.\n\n The method should exist in the methods repository.\n\n Args:\n namespace (str): Methods namespace\n method (str): method name\n snapshot_id (int): snapshot_id of the method\n\n Swagger:\n https://api.firecloud.org/#!/Method_Repository/getMethodIO\n \"\"\"\n\n body = {\n \"methodNamespace\" : namespace,\n \"methodName\" : method,\n \"methodVersion\" : snapshot_id\n }\n return __post(\"inputsOutputs\", json=body)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting a method configuration from the methods repository.", "response": "def get_repository_config(namespace, config, snapshot_id):\n \"\"\"Get a method configuration from the methods repository.\n\n Args:\n namespace (str): Methods namespace\n config (str): config name\n snapshot_id (int): snapshot_id of the method\n\n Swagger:\n https://api.firecloud.org/#!/Method_Repository/getMethodRepositoryConfiguration\n \"\"\"\n uri = \"configurations/{0}/{1}/{2}\".format(namespace, config, snapshot_id)\n return __get(uri)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_repository_method(namespace, method, snapshot_id, wdl_only=False):\n uri = \"methods/{0}/{1}/{2}?onlyPayload={3}\".format(namespace, method,\n snapshot_id,\n str(wdl_only).lower())\n return __get(uri)", "response": "Get a method definition from the method repository."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncreate or update a workflow definition.", "response": "def update_repository_method(namespace, method, synopsis, wdl, doc=None,\n comment=\"\"):\n \"\"\"Create/Update workflow definition.\n\n FireCloud will create a new snapshot_id for the given workflow.\n\n Args:\n namespace (str): Methods namespace\n method (str): method name\n synopsis (str): short (<80 char) description of method\n wdl (file): Workflow Description Language file\n doc (file): (Optional) Additional documentation\n comment (str): (Optional) Comment specific to this snapshot\n\n Swagger:\n https://api.firecloud.org/#!/Method_Repository/post_api_methods\n\n \"\"\"\n with open(wdl, 'r') as wf:\n wdl_payload = wf.read()\n if doc is not None:\n with open (doc, 'r') as df:\n doc = df.read()\n\n body = {\n \"namespace\": namespace,\n \"name\": method,\n \"entityType\": \"Workflow\",\n \"payload\": wdl_payload,\n \"documentation\": doc,\n \"synopsis\": synopsis,\n \"snapshotComment\": comment\n }\n\n return __post(\"methods\",\n json={key: value for key, value in body.items() if value})"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef delete_repository_method(namespace, name, snapshot_id):\n uri = \"methods/{0}/{1}/{2}\".format(namespace, name, snapshot_id)\n return __delete(uri)", "response": "Redacts a method and all of its associated configurations in the methods repository."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_repository_method_acl(namespace, method, snapshot_id):\n uri = \"methods/{0}/{1}/{2}/permissions\".format(namespace,method,snapshot_id)\n return __get(uri)", "response": "Get permissions for a method in the methods repository."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef update_repository_method_acl(namespace, method, snapshot_id, acl_updates):\n\n uri = \"methods/{0}/{1}/{2}/permissions\".format(namespace,method,snapshot_id)\n return __post(uri, json=acl_updates)", "response": "Set the permissions of a method in the Method Repository."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_repository_config_acl(namespace, config, snapshot_id):\n uri = \"configurations/{0}/{1}/{2}/permissions\".format(namespace,\n config, snapshot_id)\n return __get(uri)", "response": "Get permissions for a specific configuration in the methods repository."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef update_repository_config_acl(namespace, config, snapshot_id, acl_updates):\n\n uri = \"configurations/{0}/{1}/{2}/permissions\".format(namespace,\n config, snapshot_id)\n return __post(uri, json=acl_updates)", "response": "Set the permissions of a configuration in the methods repository."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create_submission(wnamespace, workspace, cnamespace, config,\n entity, etype, expression=None, use_callcache=True):\n \"\"\"Submit job in FireCloud workspace.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n cnamespace (str): Method configuration namespace\n config (str): Method configuration name\n entity (str): Entity to submit job on. Should be the same type as\n the root entity type of the method config, unless an\n expression is used\n etype (str): Entity type of root_entity\n expression (str): Instead of using entity as the root entity,\n evaluate the root entity from this expression.\n use_callcache (bool): use call cache if applicable (default: true)\n\n Swagger:\n https://api.firecloud.org/#!/Submissions/createSubmission\n \"\"\"\n\n uri = \"workspaces/{0}/{1}/submissions\".format(wnamespace, workspace)\n body = {\n \"methodConfigurationNamespace\" : cnamespace,\n \"methodConfigurationName\" : config,\n \"entityType\" : etype,\n \"entityName\" : entity,\n \"useCallCache\" : use_callcache\n }\n\n if expression:\n body['expression'] = expression\n\n return __post(uri, json=body)", "response": "Submit a new job in FireCloud workspace."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef abort_submission(namespace, workspace, submission_id):\n uri = \"workspaces/{0}/{1}/submissions/{2}\".format(namespace,\n workspace, submission_id)\n return __delete(uri)", "response": "Abort running job in a workspace."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nrequest submission information. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name submission_id (str): Submission's unique identifier Swagger: https://api.firecloud.org/#!/Submissions/monitorSubmission", "response": "def get_submission(namespace, workspace, submission_id):\n \"\"\"Request submission information.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n submission_id (str): Submission's unique identifier\n\n Swagger:\n https://api.firecloud.org/#!/Submissions/monitorSubmission\n \"\"\"\n uri = \"workspaces/{0}/{1}/submissions/{2}\".format(namespace,\n workspace, submission_id)\n return __get(uri)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nrequesting the metadata for a workflow in a submission.", "response": "def get_workflow_metadata(namespace, workspace, submission_id, workflow_id):\n \"\"\"Request the metadata for a workflow in a submission.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n submission_id (str): Submission's unique identifier\n workflow_id (str): Workflow's unique identifier.\n\n Swagger:\n https://api.firecloud.org/#!/Submissions/workflowMetadata\n \"\"\"\n uri = \"workspaces/{0}/{1}/submissions/{2}/workflows/{3}\".format(namespace,\n workspace, submission_id, workflow_id)\n return __get(uri)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_workflow_outputs(namespace, workspace, submission_id, workflow_id):\n uri = \"workspaces/{0}/{1}/\".format(namespace, workspace)\n uri += \"submissions/{0}/workflows/{1}/outputs\".format(submission_id,\n workflow_id)\n return __get(uri)", "response": "Request the outputs for a workflow in a submission."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef create_workspace(namespace, name, authorizationDomain=\"\", attributes=None):\n\n if not attributes:\n attributes = dict()\n\n body = {\n \"namespace\": namespace,\n \"name\": name,\n \"attributes\": attributes\n }\n if authorizationDomain:\n authDomain = [{\"membersGroupName\": authorizationDomain}]\n else:\n authDomain = []\n\n body[\"authorizationDomain\"] = authDomain\n\n return __post(\"workspaces\", json=body)", "response": "Create a new FireCloud Workspace."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef update_workspace_acl(namespace, workspace, acl_updates, invite_users_not_found=False):\n uri = \"{0}workspaces/{1}/{2}/acl?inviteUsersNotFound={3}\".format(fcconfig.root_url,\n namespace, workspace, str(invite_users_not_found).lower())\n headers = _fiss_agent_header({\"Content-type\": \"application/json\"})\n # FIXME: create __patch method, akin to __get, __delete etc\n return __SESSION.patch(uri, headers=headers, data=json.dumps(acl_updates))", "response": "Update the access control list for a workspace."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncloning a FireCloud workspace.", "response": "def clone_workspace(from_namespace, from_workspace, to_namespace, to_workspace,\n authorizationDomain=\"\"):\n \"\"\"Clone a FireCloud workspace.\n\n A clone is a shallow copy of a FireCloud workspace, enabling\n easy sharing of data, such as TCGA data, without duplication.\n\n Args:\n from_namespace (str): project (namespace) to which source workspace belongs\n from_workspace (str): Source workspace's name\n to_namespace (str): project to which target workspace belongs\n to_workspace (str): Target workspace's name\n authorizationDomain: (str) required authorization domains\n\n Swagger:\n https://api.firecloud.org/#!/Workspaces/cloneWorkspace\n \"\"\"\n\n if authorizationDomain:\n if isinstance(authorizationDomain, string_types):\n authDomain = [{\"membersGroupName\": authorizationDomain}]\n else:\n authDomain = [{\"membersGroupName\": authDomain} for authDomain in authorizationDomain]\n else:\n authDomain = []\n\n body = {\n \"namespace\": to_namespace,\n \"name\": to_workspace,\n \"attributes\": dict(),\n \"authorizationDomain\": authDomain,\n }\n\n uri = \"workspaces/{0}/{1}/clone\".format(from_namespace, from_workspace)\n return __post(uri, json=body)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nupdate or remove workspace attributes.", "response": "def update_workspace_attributes(namespace, workspace, attrs):\n \"\"\"Update or remove workspace attributes.\n\n Args:\n namespace (str): project to which workspace belongs\n workspace (str): Workspace name\n attrs (list(dict)): List of update operations for workspace attributes.\n Use the helper dictionary construction functions to create these:\n\n _attr_set() : Set/Update attribute\n _attr_rem() : Remove attribute\n _attr_ladd() : Add list member to attribute\n _attr_lrem() : Remove list member from attribute\n\n Swagger:\n https://api.firecloud.org/#!/Workspaces/updateAttributes\n \"\"\"\n headers = _fiss_agent_header({\"Content-type\": \"application/json\"})\n uri = \"{0}workspaces/{1}/{2}/updateAttributes\".format(fcconfig.root_url,\n namespace, workspace)\n body = json.dumps(attrs)\n\n # FIXME: create __patch method, akin to __get, __delete etc\n return __SESSION.patch(uri, headers=headers, data=body)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef add_user_to_group(group, role, email):\n uri = \"groups/{0}/{1}/{2}\".format(group, role, email)\n return __put(uri)", "response": "Adds a user to a group"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nremove a user from a group", "response": "def remove_user_from_group(group, role, email):\n \"\"\"Remove a user from a group the caller owns\n\n Args:\n group (str): Group name\n role (str) : Role of user for group; either 'member' or 'admin'\n email (str): Email of user or group to remove\n \n Swagger:\n https://api.firecloud.org/#!/Groups/removeUserFromGroup\n \"\"\"\n uri = \"groups/{0}/{1}/{2}\".format(group, role, email)\n return __delete(uri)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef register_commands(self, subparsers):\n #print_(\"DEV PLUGIN: Loaded commands\")\n prsr = subparsers.add_parser(\n 'upload', description='Copy the file or directory into the given')\n prsr.add_argument('workspace', help='Workspace name')\n prsr.add_argument('source', help='File or directory to upload')\n prsr.add_argument('-s', '--show', action='store_true',\n help=\"Show the gsutil command, but don't run it\")\n\n dest_help = 'Destination relative to the bucket root. '\n dest_help += 'If omitted the file will be placed in the root directory'\n prsr.add_argument('-d', '--destination', help=dest_help)\n\n\n prsr.set_defaults(func=upload)", "response": "Register commands to a list of subparsers"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef new(namespace, name, wdl, synopsis,\n documentation=None, api_url=fapi.PROD_API_ROOT):\n \"\"\"Create new FireCloud method.\n\n If the namespace + name already exists, a new snapshot is created.\n\n Args:\n namespace (str): Method namespace for this method\n name (str): Method name\n wdl (file): WDL description\n synopsis (str): Short description of task\n documentation (file): Extra documentation for method\n \"\"\"\n r = fapi.update_workflow(namespace, name, synopsis,\n wdl, documentation, api_url)\n fapi._check_response_code(r, 201)\n d = r.json()\n return Method(namespace, name, d[\"snapshotId\"])", "response": "Create a new FireCloud method."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning a method template for this method.", "response": "def template(self):\n \"\"\"Return a method template for this method.\"\"\"\n r = fapi.get_config_template(self.namespace, self.name,\n self.snapshot_id, self.api_url)\n fapi._check_response_code(r, 200)\n return r.json()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef inputs_outputs(self):\n r = fapi.get_inputs_outputs(self.namespace, self.name,\n self.snapshot_id, self.api_url)\n fapi._check_response_code(r, 200)\n return r.json()", "response": "Get information on method inputs & outputs."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets the access control list for this method.", "response": "def acl(self):\n \"\"\"Get the access control list for this method.\"\"\"\n r = fapi.get_repository_method_acl(\n self.namespace, self.name, self.snapshot_id, self.api_url)\n fapi._check_response_code(r, 200)\n return r.json()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef set_acl(self, role, users):\n acl_updates = [{\"user\": user, \"role\": role} for user in users]\n r = fapi.update_repository_method_acl(\n self.namespace, self.name, self.snapshot_id,\n acl_updates, self.api_url\n )\n fapi._check_response_code(r, 200)", "response": "Set permissions for this method."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef __ensure_gcloud():\n if which('gcloud') is None:\n gcloud_path = os.path.join(os.path.expanduser('~'),\n 'google-cloud-sdk', 'bin')\n env_path = os.getenv('PATH')\n if os.path.isdir(gcloud_path):\n if env_path is not None:\n os.environ['PATH'] = gcloud_path + os.pathsep + env_path\n else:\n os.environ['PATH'] = gcloud_path", "response": "Ensure that the gcloud - sdk directory for the executables exists."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nparse a set of named config files and store the result in a config dictionary.", "response": "def config_parse(files=None, config=None, config_profile=\".fissconfig\", **kwargs):\n '''\n Read initial configuration state, from named config files; store\n this state within a config dictionary (which may be nested) whose keys may\n also be referenced as attributes (safely, defaulting to None if unset). A\n config object may be passed in, as a way of accumulating or overwriting\n configuration state; if one is NOT passed, the default config obj is used\n '''\n local_config = config\n config = __fcconfig\n\n cfgparser = configparser.SafeConfigParser()\n\n filenames = list()\n\n # Give personal/user followed by current working directory configuration the first say\n filenames.append(os.path.join(os.path.expanduser('~'), config_profile))\n\n filenames.append(os.path.join(os.getcwd(), config_profile))\n\n if files:\n if isinstance(files, string_types):\n filenames.append(files)\n elif isinstance(files, Iterable):\n for f in files:\n if isinstance(f, IOBase):\n f = f.name\n filenames.append(f)\n\n cfgparser.read(filenames)\n\n # [DEFAULT] defines common variables for interpolation/substitution in\n # other sections, and are stored at the root level of the config object\n for keyval in cfgparser.items('DEFAULT'):\n #print(\"config_parse: adding config variable %s=%s\" % (keyval[0], str(keyval[1])))\n __fcconfig[keyval[0]] = keyval[1]\n\n for section in cfgparser.sections():\n config[section] = attrdict()\n for option in cfgparser.options(section):\n # DEFAULT vars ALSO behave as though they were defined in every\n # section, but we purposely skip them here so that each section\n # reflects only the options explicitly defined in that section\n if not config[option]:\n config[section][option] = cfgparser.get(section, option)\n\n config.verbosity = int(config.verbosity)\n if not config.root_url.endswith('/'):\n config.root_url += '/'\n if os.path.isfile(config.credentials):\n os.environ[environment_vars.CREDENTIALS] = config.credentials\n\n # if local_config override options with passed options\n if local_config is not None:\n for key, value in local_config.items():\n config[key] = value\n\n # if any explict config options are passed override.\n for key, value in kwargs.items():\n config[key] = value\n\n return config"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncreate a new FireCloud workspace.", "response": "def new(namespace, name, protected=False,\n attributes=dict(), api_url=fapi.PROD_API_ROOT):\n \"\"\"Create a new FireCloud workspace.\n\n Returns:\n Workspace: A new FireCloud workspace\n\n Raises:\n FireCloudServerError: API call failed.\n \"\"\"\n r = fapi.create_workspace(namespace, name, protected, attributes, api_url)\n fapi._check_response_code(r, 201)\n return Workspace(namespace, name, api_url)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreloading workspace metadata from firecloud.", "response": "def refresh(self):\n \"\"\"Reload workspace metadata from firecloud.\n\n Workspace metadata is cached in the data attribute of a Workspace,\n and may become stale, requiring a refresh().\n \"\"\"\n r = fapi.get_workspace(self.namespace, self.name, self.api_url)\n fapi._check_response_code(r, 200)\n self.data = r.json()\n return self"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ndelete the workspace from FireCloud.", "response": "def delete(self):\n \"\"\"Delete the workspace from FireCloud.\n\n Note:\n This action cannot be undone. Be careful!\n \"\"\"\n r = fapi.delete_workspace(self.namespace, self.name)\n fapi._check_response_code(r, 202)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nlocking this Workspace. This causes the workspace to behave in a read-only way, regardless of access permissions.", "response": "def lock(self):\n \"\"\"Lock this Workspace.\n\n This causes the workspace to behave in a read-only way,\n regardless of access permissions.\n \"\"\"\n r = fapi.lock_workspace(self.namespace, self.name, self.api_url)\n fapi._check_response_code(r, 204)\n self.data['workspace']['isLocked'] = True\n return self"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nsets the value of a workspace attribute.", "response": "def update_attribute(self, attr, value):\n \"\"\"Set the value of a workspace attribute.\"\"\"\n update = [fapi._attr_up(attr, value)]\n r = fapi.update_workspace_attributes(self.namespace, self.name,\n update, self.api_url)\n fapi._check_response_code(r, 200)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef remove_attribute(self, attr):\n update = [fapi._attr_rem(attr)]\n r = fapi.update_workspace_attributes(self.namespace, self.name,\n update, self.api_url)\n self.data[\"workspace\"][\"attributes\"].pop(attr, None)\n fapi._check_response_code(r, 200)", "response": "Removes an attribute from a workspace."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef import_tsv(self, tsv_file):\n r = fapi.upload_entities_tsv(self.namespace, self.name,\n self.tsv_file, self.api_url)\n fapi._check_response_code(r, 201)", "response": "Upload entity data to workspace from tsv loadfile."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning entity in this workspace.", "response": "def get_entity(self, etype, entity_id):\n \"\"\"Return entity in this workspace.\n\n Args:\n etype (str): Entity type\n entity_id (str): Entity name/unique id\n \"\"\"\n r = fapi.get_entity(self.namespace, self.name, etype,\n entity_id, self.api_url)\n fapi._check_response_code(r, 200)\n dresp = r.json()\n return Entity(etype, entity_id, dresp['attributes'])"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ndelete an entity in this workspace.", "response": "def delete_entity(self, etype, entity_id):\n \"\"\"Delete an entity in this workspace.\n\n Args:\n etype (str): Entity type\n entity_id (str): Entity name/unique id\n \"\"\"\n r = fapi.delete_entity(self.namespace, self.name, etype,\n entity_id, self.api_url)\n fapi._check_response_code(r, 202)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef import_entities(self, entities):\n edata = Entity.create_payload(entities)\n r = fapi.upload_entities(self.namespace, self.name,\n edata, self.api_url)\n fapi._check_response_code(r, 201)", "response": "Upload entities to firecloud."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create_set(self, set_id, etype, entities):\n if etype not in {\"sample\", \"pair\", \"participant\"}:\n raise ValueError(\"Unsupported entity type:\" + str(etype))\n\n payload = \"membership:\" + etype + \"_set_id\\t\" + etype + \"_id\\n\"\n\n for e in entities:\n if e.etype != etype:\n msg = \"Entity type '\" + e.etype + \"' does not match \"\n msg += \"set type '\" + etype + \"'\"\n raise ValueError(msg)\n payload += set_id + '\\t' + e.entity_id + '\\n'\n\n\n r = fapi.upload_entities(self.namespace, self.name,\n payload, self.api_url)\n fapi._check_response_code(r, 201)", "response": "Create a set of entities and upload to FireCloud."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef submissions(self):\n r = fapi.get_submissions(self.namespace, self.name, self.api_url)\n fapi._check_response_code(r, 200)\n return r.json()", "response": "List job submissions in workspace."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef entity_types(self):\n r = fapi.get_entity_types(self.namespace, self.name, self.api_url)\n fapi._check_response_code(r, 200)\n return r.json().keys()", "response": "List entity types in workspace."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef entities(self):\n r = fapi.get_entities_with_type(self.namespace,\n self.name, self.api_url)\n fapi._check_response_code(r, 200)\n edicts = r.json()\n return [Entity(e['entityType'], e['name'], e['attributes'])\n for e in edicts]", "response": "List all entities in workspace."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef __get_entities(self, etype):\n r = fapi.get_entities(self.namespace, self.name,\n etype, self.api_url)\n fapi._check_response_code(r, 200)\n return [Entity(e['entityType'], e['name'], e['attributes'])\n for e in r.json()]", "response": "Helper to get entities for a given type."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncopies entities from another workspace.", "response": "def copy_entities(self, from_namespace, from_workspace, etype, enames):\n \"\"\"Copy entities from another workspace.\n\n Args:\n from_namespace (str): Source workspace namespace\n from_workspace (str): Source workspace name\n etype (str): Entity type\n enames (list(str)): List of entity names to copy\n \"\"\"\n r = fapi.copy_entities(from_namespace, from_workspace,\n self.namespace, self.name, etype, enames,\n self.api_url)\n fapi._check_response_code(r, 201)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef configs(self):\n raise NotImplementedError\n r = fapi.get_configs(self.namespace, self.name, self.api_url)\n fapi._check_response_code(r, 200)\n cdata = r.json()\n configs = []\n for c in cdata:\n cnamespace = c['namespace']\n cname = c['name']\n root_etype = c['rootEntityType']\n method_namespace = c['methodRepoMethod']['methodNamespace']\n method_name = c['methodRepoMethod']['methodName']\n method_version = c['methodRepoMethod']['methodVersion']", "response": "Get method configurations in a workspace."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef acl(self):\n r = fapi.get_workspace_acl(self.namespace, self.name, self.api_url)\n fapi._check_response_code(r, 200)\n return r.json()", "response": "Get the access control list for this workspace."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncloning this workspace. Args: to_namespace (str): Target workspace namespace to_name (str): Target workspace name", "response": "def clone(self, to_namespace, to_name):\n \"\"\"Clone this workspace.\n\n Args:\n to_namespace (str): Target workspace namespace\n to_name (str): Target workspace name\n \"\"\"\n r = fapi.clone_workspace(self.namespace, self.name,\n to_namespace, to_name, self.api_url)\n fapi._check_response_code(r, 201)\n return Workspace(to_namespace, to_name, self.api_url)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef supervise(project, workspace, namespace, workflow,\n sample_sets, recovery_file):\n \"\"\" Supervise submission of jobs from a Firehose-style workflow of workflows\"\"\"\n # Get arguments\n\n logging.info(\"Initializing FireCloud Supervisor...\")\n logging.info(\"Saving recovery checkpoints to \" + recovery_file)\n\n # Parse workflow description\n # these three objects must be saved in order to recover the supervisor\n\n args = {\n 'project' : project,\n 'workspace': workspace,\n 'namespace': namespace,\n 'workflow' : workflow,\n 'sample_sets': sample_sets\n }\n\n monitor_data, dependencies = init_supervisor_data(workflow, sample_sets)\n recovery_data = {\n 'args' : args,\n 'monitor_data' : monitor_data,\n 'dependencies' : dependencies\n }\n\n # Monitor loop. Keep going until all nodes have been evaluated\n supervise_until_complete(monitor_data, dependencies, args, recovery_file)", "response": "This function is used to run a Firehose - style workflow of jobs from a Firehose - style workflow of jobs."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef init_supervisor_data(dotfile, sample_sets):\n with open(dotfile) as wf:\n graph_data = wf.read()\n\n graph = pydot.graph_from_dot_data(graph_data)[0]\n nodes = [n.get_name().strip('\"') for n in graph.get_nodes()]\n\n monitor_data = dict()\n dependencies = {n:[] for n in nodes}\n\n # Initialize empty list of dependencies\n for n in nodes:\n monitor_data[n] = dict()\n for sset in sample_sets:\n monitor_data[n][sset] = {\n 'state' : \"Not Started\",\n 'evaluated' : False,\n 'succeeded' : False\n }\n\n edges = graph.get_edges()\n\n # Iterate over the edges, and get the dependency information for each node\n for e in edges:\n source = e.get_source().strip('\"')\n dest = e.get_destination().strip('\"')\n\n dep = e.get_attributes()\n dep['upstream_task'] = source\n\n dependencies[dest].append(dep)\n\n return monitor_data, dependencies", "response": "Parse a DOT file and initialize the supervisor data structure"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nvalidate that all tasks in the supervisor are valid and that all permissions requirements are satisfied.", "response": "def validate_monitor_tasks(dependencies, args):\n \"\"\" Validate that all entries in the supervisor are valid task configurations and\n that all permissions requirements are satisfied.\n \"\"\"\n # Make a list of all task configurations needed to supervise\n sup_configs = sorted(dependencies.keys())\n\n\n try:\n logging.info(\"Validating supervisor data...\")\n # Make an api call to list task configurations in the workspace\n r = fapi.list_workspace_configs(args['project'], args['workspace'])\n fapi._check_response_code(r, 200)\n space_configs = r.json()\n\n # Make a dict for easy lookup later\n space_configs = { c[\"name\"]: c for c in space_configs}\n\n # Also make an api call to list methods you have view permissions for\n r = fapi.list_repository_methods()\n fapi._check_response_code(r, 200)\n repo_methods = r.json()\n\n ## Put in a form that is more easily searchable: namespace/name:snapshot\n repo_methods = {m['namespace'] + '/' + m['name'] + ':' + str(m['snapshotId'])\n for m in repo_methods if m['entityType'] == 'Workflow'}\n\n valid = True\n\n for config in sup_configs:\n # ensure config exists in the workspace\n if config not in space_configs:\n logging.error(\"No task configuration for \"\n + config + \" found in \" + args['project'] + \"/\"\n + args['workspace'])\n valid = False\n else:\n # Check access permissions for the referenced method\n m = space_configs[config]['methodRepoMethod']\n ref_method = m['methodNamespace'] + \"/\" + m['methodName'] + \":\" + str(m['methodVersion'])\n if ref_method not in repo_methods:\n logging.error(config+ \" -- You don't have permisson to run the referenced method: \"\n + ref_method)\n valid = False\n\n except Exception as e:\n logging.error(\"Exception occurred while validating supervisor: \" + str(e))\n raise\n return False\n\n return valid"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nrecovers data from recovery_file and resume monitoring", "response": "def recover_and_supervise(recovery_file):\n \"\"\" Retrieve monitor data from recovery_file and resume monitoring \"\"\"\n try:\n logging.info(\"Attempting to recover Supervisor data from \" + recovery_file)\n with open(recovery_file) as rf:\n recovery_data = json.load(rf)\n monitor_data = recovery_data['monitor_data']\n dependencies = recovery_data['dependencies']\n args = recovery_data['args']\n\n except:\n logging.error(\"Could not recover monitor data, exiting...\")\n return 1\n\n logging.info(\"Data successfully loaded, resuming Supervisor\")\n supervise_until_complete(monitor_data, dependencies, args, recovery_file)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef supervise_until_complete(monitor_data, dependencies, args, recovery_file):\n project = args['project']\n workspace = args['workspace']\n namespace = args['namespace']\n sample_sets = args['sample_sets']\n recovery_data = {'args': args}\n\n if not validate_monitor_tasks(dependencies, args):\n logging.error(\"Errors found, aborting...\")\n return\n\n while True:\n # There are 4 possible states for each node:\n # 1. Not Started -- In this state, check all the dependencies for the\n # node (possibly 0). If all of them have been evaluated, and the\n # satisfiedMode is met, start the task, change to \"Running\". if\n # satisfiedMode is not met, change to \"Evaluated\"\n #\n # 2. Running -- Submitted in FC. Check the submission endpoint, and\n # if it has completed, change to \"Completed\", set evaluated=True,\n # and whether the task succeeded\n # Otherwise, do nothing\n #\n # 3. Completed -- Job ran in FC and either succeeded or failed. Do nothing\n # 4. Evaluated -- All dependencies evaluated, but this task did not run\n # do nothing\n\n # Keep a tab of the number of jobs in each category\n running = 0\n waiting = 0\n completed = 0\n\n # Get the submissions\n r = fapi.list_submissions(project, workspace)\n sub_list = r.json()\n #TODO: filter this list by submission time first?\n sub_lookup = {s[\"submissionId\"]: s for s in sub_list}\n\n # Keys of dependencies is the list of tasks to run\n for n in dependencies:\n for sset in sample_sets:\n task_data = monitor_data[n][sset]\n if task_data['state'] == \"Not Started\":\n # See if all of the dependencies have been evaluated\n upstream_evaluated = True\n for dep in dependencies[n]:\n # Look up the status of the task\n upstream_task_data = monitor_data[dep['upstream_task']][sset]\n if not upstream_task_data.get('evaluated'):\n upstream_evaluated = False\n\n # if all of the dependencies have been evaluated, we can evaluate\n # this node\n if upstream_evaluated:\n # Now check the satisfied Mode of all the dependencies\n should_run = True\n for dep in dependencies[n]:\n upstream_task_data = monitor_data[dep['upstream_task']][sset]\n mode = dep['satisfiedMode']\n\n # Task must have succeeded for OnComplete\n if mode == '\"OnComplete\"' and not upstream_task_data['succeeded']:\n should_run = False\n # 'Always' and 'Optional' run once the deps have been\n # evaluated\n\n if should_run:\n # Submit the workflow to FC\n fc_config = n\n logging.info(\"Starting workflow \" + fc_config + \" on \" + sset)\n\n # How to handle errors at this step?\n for retry in range(3):\n r = fapi.create_submission(\n project, workspace, namespace, fc_config,\n sset, etype=\"sample_set\", expression=None\n )\n if r.status_code == 201:\n task_data['submissionId'] = r.json()['submissionId']\n task_data['state'] = \"Running\"\n running += 1\n break\n else:\n # There was an error, under certain circumstances retry\n logging.debug(\"Create_submission for \" + fc_config\n + \"failed on \" + sset + \" with the following response:\"\n + r.content + \"\\nRetrying...\")\n\n else:\n # None of the attempts above succeeded, log an error, mark as failed\n logging.error(\"Maximum retries exceeded\")\n task_data['state'] = 'Completed'\n task_data['evaluated'] = True\n task_data['succeeded'] = False\n\n else:\n # This task will never be able to run, mark evaluated\n task_data['state'] = \"Evaluated\"\n task_data['evaluated'] = True\n completed += 1\n else:\n waiting += 1\n\n elif task_data['state'] == \"Running\":\n submission = sub_lookup[task_data['submissionId']]\n status = submission['status']\n\n if status == \"Done\":\n # Look at the individual workflows to see if there were\n # failures\n logging.info(\"Workflow \" + n + \" completed for \" + sset)\n success = 'Failed' not in submission['workflowStatuses']\n task_data['evaluated'] = True\n task_data['succeeded'] = success\n task_data['state'] = \"Completed\"\n\n completed += 1\n else:\n # Submission isn't done, don't do anything\n running += 1\n\n else:\n # Either Completed or evaluated\n completed += 1\n\n # Save the state of the monitor for recovery purposes\n # Have to do this for every workflow + sample_set so we don't lose track of any\n recovery_data['monitor_data'] = monitor_data\n recovery_data['dependencies'] = dependencies\n\n with open(recovery_file, 'w') as rf:\n json.dump(recovery_data, rf)\n\n logging.info(\"{0} Waiting, {1} Running, {2} Completed\".format(waiting, running, completed))\n\n # If all tasks have been evaluated, we are done\n if all(monitor_data[n][sset]['evaluated']\n for n in monitor_data for sset in monitor_data[n]):\n logging.info(\"DONE.\")\n break\n time.sleep(30)", "response": "This function is a loop forever until all tasks are evaluated or completed."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef space_list(args):\n ''' List accessible workspaces, in TSV form: workspace'''\n\n r = fapi.list_workspaces()\n fapi._check_response_code(r, 200)\n\n spaces = []\n project = args.project\n if project:\n project = re.compile('^' + project)\n\n for space in r.json():\n ns = space['workspace']['namespace']\n if project and not project.match(ns):\n continue\n ws = space['workspace']['name']\n spaces.append(ns + '\\t' + ws)\n\n # Sort for easier downstream viewing, ignoring case\n return sorted(spaces, key=lambda s: s.lower())", "response": "List accessible workspaces in TSV form"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ndetermines if the named space exists in the given project ( namespace", "response": "def space_exists(args):\n \"\"\" Determine if the named space exists in the given project (namespace)\"\"\"\n # The return value is the INVERSE of UNIX exit status semantics, (where\n # 0 = good/true, 1 = bad/false), so to check existence in UNIX one would do\n # if ! fissfc space_exists blah ; then\n # ...\n # fi\n try:\n r = fapi.get_workspace(args.project, args.workspace)\n fapi._check_response_code(r, 200)\n exists = True\n except FireCloudServerError as e:\n if e.code == 404:\n exists = False\n else:\n raise\n if fcconfig.verbosity:\n result = \"DOES NOT\" if not exists else \"DOES\"\n eprint('Space <%s> %s exist in project <%s>' %\n (args.workspace, result, args.project))\n return exists"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncreating a new workspace.", "response": "def space_new(args):\n \"\"\" Create a new workspace. \"\"\"\n r = fapi.create_workspace(args.project, args.workspace,\n args.authdomain, dict())\n fapi._check_response_code(r, 201)\n if fcconfig.verbosity:\n eprint(r.content)\n return 0"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nget metadata for a workspace.", "response": "def space_info(args):\n \"\"\" Get metadata for a workspace. \"\"\"\n r = fapi.get_workspace(args.project, args.workspace)\n fapi._check_response_code(r, 200)\n return r.text"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef space_acl(args):\n ''' Retrieve access control list for a workspace'''\n r = fapi.get_workspace_acl(args.project, args.workspace)\n fapi._check_response_code(r, 200)\n result = dict()\n for user, info in sorted(r.json()['acl'].items()):\n result[user] = info['accessLevel']\n return result", "response": "Retrieve access control list for a workspace"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nassigning an ACL role to list of users for a workspace", "response": "def space_set_acl(args):\n \"\"\" Assign an ACL role to list of users for a workspace \"\"\"\n acl_updates = [{\"email\": user,\n \"accessLevel\": args.role} for user in args.users]\n r = fapi.update_workspace_acl(args.project, args.workspace, acl_updates)\n fapi._check_response_code(r, 200)\n errors = r.json()['usersNotFound']\n\n if len(errors):\n eprint(\"Unable to assign role for unrecognized users:\")\n for user in errors:\n eprint(\"\\t{0}\".format(user['email']))\n return 1\n\n if fcconfig.verbosity:\n print(\"Successfully updated {0} role(s)\".format(len(acl_updates)))\n\n return 0"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nsearches for workspaces matching certain criteria", "response": "def space_search(args):\n \"\"\" Search for workspaces matching certain criteria \"\"\"\n r = fapi.list_workspaces()\n fapi._check_response_code(r, 200)\n\n # Parse the JSON for workspace + namespace; then filter by\n # search terms: each term is treated as a regular expression\n workspaces = r.json()\n extra_terms = []\n if args.bucket:\n workspaces = [w for w in workspaces\n if re.search(args.bucket, w['workspace']['bucketName'])]\n extra_terms.append('bucket')\n\n # FIXME: add more filter terms\n pretty_spaces = []\n for space in workspaces:\n ns = space['workspace']['namespace']\n ws = space['workspace']['name']\n pspace = ns + '/' + ws\n # Always show workspace storage id\n pspace += '\\t' + space['workspace']['bucketName']\n pretty_spaces.append(pspace)\n\n # Sort for easier viewing, ignore case\n return sorted(pretty_spaces, key=lambda s: s.lower())"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nupload an entity loadfile.", "response": "def entity_import(args):\n \"\"\" Upload an entity loadfile. \"\"\"\n project = args.project\n workspace = args.workspace\n chunk_size = args.chunk_size\n\n with open(args.tsvfile) as tsvf:\n headerline = tsvf.readline().strip()\n entity_data = [l.rstrip('\\n') for l in tsvf]\n\n return _batch_load(project, workspace, headerline, entity_data, chunk_size)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning a list of lines that would suffice to reconstitute a container ( set ) entity if passed to entity_import.", "response": "def set_export(args):\n '''Return a list of lines in TSV form that would suffice to reconstitute a\n container (set) entity, if passed to entity_import. The first line in\n the list is the header, and subsequent lines are the container members.\n '''\n\n r = fapi.get_entity(args.project, args.workspace, args.entity_type, args.entity)\n fapi._check_response_code(r, 200)\n set_type = args.entity_type\n set_name = args.entity\n member_type = set_type.split('_')[0]\n members = r.json()['attributes'][member_type+'s']['items']\n\n result = [\"membership:{}_id\\t{}_id\".format(set_type, member_type)]\n result += [\"%s\\t%s\" % (set_name, m['entityName']) for m in members ]\n return result"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef entity_types(args):\n r = fapi.list_entity_types(args.project, args.workspace)\n fapi._check_response_code(r, 200)\n return r.json().keys()", "response": "List entity types in a workspace"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nlist entities in a workspace.", "response": "def entity_list(args):\n \"\"\" List entities in a workspace. \"\"\"\n r = fapi.get_entities_with_type(args.project, args.workspace)\n fapi._check_response_code(r, 200)\n return [ '{0}\\t{1}'.format(e['entityType'], e['name']) for e in r.json() ]"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nlist participants within a container", "response": "def participant_list(args):\n ''' List participants within a container'''\n\n # Case 1: retrieve participants within a named data entity\n if args.entity_type and args.entity:\n # Edge case: caller asked for participant within participant (itself)\n if args.entity_type == 'participant':\n return [ args.entity.strip() ]\n # Otherwise retrieve the container entity\n r = fapi.get_entity(args.project, args.workspace, args.entity_type, args.entity)\n fapi._check_response_code(r, 200)\n participants = r.json()['attributes'][\"participants\"]['items']\n return [ participant['entityName'] for participant in participants ]\n\n # Case 2: retrieve all participants within a workspace\n return __get_entities(args, \"participant\", page_size=2000)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nlist the pairs within a container.", "response": "def pair_list(args):\n ''' List pairs within a container. '''\n\n # Case 1: retrieve pairs within a named data entity\n if args.entity_type and args.entity:\n # Edge case: caller asked for pair within a pair (itself)\n if args.entity_type == 'pair':\n return [ args.entity.strip() ]\n # Edge case: pairs for a participant, which has to be done hard way\n # by iteratiing over all samples (see firecloud/discussion/9648)\n elif args.entity_type == 'participant':\n entities = _entity_paginator(args.project, args.workspace,\n 'pair', page_size=2000)\n return [ e['name'] for e in entities if\n e['attributes']['participant']['entityName'] == args.entity]\n\n # Otherwise retrieve the container entity\n r = fapi.get_entity(args.project, args.workspace, args.entity_type, args.entity)\n fapi._check_response_code(r, 200)\n pairs = r.json()['attributes'][\"pairs\"]['items']\n return [ pair['entityName'] for pair in pairs]\n\n # Case 2: retrieve all pairs within a workspace\n return __get_entities(args, \"pair\", page_size=2000)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nlisting samples within a container.", "response": "def sample_list(args):\n ''' List samples within a container. '''\n\n # Case 1: retrieve samples within a named data entity\n if args.entity_type and args.entity:\n # Edge case: caller asked for samples within a sample (itself)\n if args.entity_type == 'sample':\n return [ args.entity.strip() ]\n # Edge case: samples for a participant, which has to be done hard way\n # by iteratiing over all samples (see firecloud/discussion/9648)\n elif args.entity_type == 'participant':\n samples = _entity_paginator(args.project, args.workspace,\n 'sample', page_size=2000)\n return [ e['name'] for e in samples if\n e['attributes']['participant']['entityName'] == args.entity]\n\n # Otherwise retrieve the container entity\n r = fapi.get_entity(args.project, args.workspace, args.entity_type, args.entity)\n fapi._check_response_code(r, 200)\n if args.entity_type == 'pair':\n pair = r.json()['attributes']\n samples = [ pair['case_sample'], pair['control_sample'] ]\n else:\n samples = r.json()['attributes'][\"samples\"]['items']\n\n return [ sample['entityName'] for sample in samples ]\n\n # Case 2: retrieve all samples within a workspace\n return __get_entities(args, \"sample\", page_size=2000)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef entity_delete(args):\n\n msg = \"WARNING: this will delete {0} {1} in {2}/{3}\".format(\n args.entity_type, args.entity, args.project, args.workspace)\n\n if not (args.yes or _confirm_prompt(msg)):\n return\n\n json_body=[{\"entityType\": args.entity_type,\n \"entityName\": args.entity}]\n r = fapi.delete_entities(args.project, args.workspace, json_body)\n fapi._check_response_code(r, 204)\n if fcconfig.verbosity:\n print(\"Succesfully deleted \" + args.type + \" \" + args.entity)", "response": "Delete an entity in a workspace."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsubmits a new workflow to the methods repository.", "response": "def meth_new(args):\n \"\"\" Submit a new workflow (or update) to the methods repository. \"\"\"\n r = fapi.update_repository_method(args.namespace, args.method,\n args.synopsis, args.wdl, args.doc,\n args.comment)\n fapi._check_response_code(r, 201)\n if fcconfig.verbosity:\n print(\"Method %s installed to project %s\" % (args.method,\n args.namespace))\n return 0"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef meth_delete(args):\n message = \"WARNING: this will delete workflow \\n\\t{0}/{1}:{2}\".format(\n args.namespace, args.method, args.snapshot_id)\n if not args.yes and not _confirm_prompt(message):\n return\n\n r = fapi.delete_repository_method(args.namespace, args.method,\n args.snapshot_id)\n fapi._check_response_code(r, 200)\n if fcconfig.verbosity:\n print(\"Method %s removed from project %s\" % (args.method,args.namespace))\n return 0", "response": "Remove a method from the method repository"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef meth_wdl(args):\n ''' Retrieve WDL for given version of a repository method'''\n r = fapi.get_repository_method(args.namespace, args.method,\n args.snapshot_id, True)\n fapi._check_response_code(r, 200)\n return r.text", "response": "Retrieve WDL for given version of a repository method"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef meth_acl(args):\n ''' Retrieve access control list for given version of a repository method'''\n r = fapi.get_repository_method_acl(args.namespace, args.method,\n args.snapshot_id)\n fapi._check_response_code(r, 200)\n acls = sorted(r.json(), key=lambda k: k['user'])\n return map(lambda acl: '{0}\\t{1}'.format(acl['user'], acl['role']), acls)", "response": "Retrieve access control list for given version of a repository method"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef meth_set_acl(args):\n acl_updates = [{\"user\": user, \"role\": args.role} \\\n for user in set(expand_fc_groups(args.users)) \\\n if user != fapi.whoami()]\n\n id = args.snapshot_id\n if not id:\n # get the latest snapshot_id for this method from the methods repo\n r = fapi.list_repository_methods(namespace=args.namespace,\n name=args.method)\n fapi._check_response_code(r, 200)\n versions = r.json()\n if len(versions) == 0:\n if fcconfig.verbosity:\n eprint(\"method {0}/{1} not found\".format(args.namespace,\n args.method))\n return 1\n latest = sorted(versions, key=lambda m: m['snapshotId'])[-1]\n id = latest['snapshotId']\n\n r = fapi.update_repository_method_acl(args.namespace, args.method, id,\n acl_updates)\n fapi._check_response_code(r, 200)\n if fcconfig.verbosity:\n print(\"Updated ACL for {0}/{1}:{2}\".format(args.namespace, args.method,\n id))\n return 0", "response": "Assign an ACL role to a list of users for a workflow."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef expand_fc_groups(users):\n groups = None\n for user in users:\n fcgroup = None\n if '@' not in user:\n fcgroup = user\n elif user.lower().endswith('@firecloud.org'):\n if groups is None:\n r = fapi.get_groups()\n fapi._check_response_code(r, 200)\n groups = {group['groupEmail'].lower():group['groupName'] \\\n for group in r.json() if group['role'] == 'Admin'}\n if user.lower() not in groups:\n if fcconfig.verbosity:\n eprint(\"You do not have access to the members of {}\".format(user))\n yield user\n continue\n else:\n fcgroup = groups[user.lower()]\n else:\n yield user\n continue\n r = fapi.get_group(fcgroup)\n fapi._check_response_code(r, 200)\n fcgroup_data = r.json()\n for admin in fcgroup_data['adminsEmails']:\n yield admin\n for member in fcgroup_data['membersEmails']:\n yield member", "response": "Given a list of users return all members of the group."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef meth_list(args):\n r = fapi.list_repository_methods(namespace=args.namespace,\n name=args.method,\n snapshotId=args.snapshot_id)\n fapi._check_response_code(r, 200)\n\n # Parse the JSON for the workspace + namespace\n methods = r.json()\n results = []\n for m in methods:\n ns = m['namespace']\n n = m['name']\n sn_id = m['snapshotId']\n results.append('{0}\\t{1}\\t{2}'.format(ns,n,sn_id))\n\n # Sort for easier viewing, ignore case\n return sorted(results, key=lambda s: s.lower())", "response": "List workflows in the methods repository"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ninvoke a task ( method configuration ) on given entity in given space", "response": "def config_start(args):\n '''Invoke a task (method configuration), on given entity in given space'''\n\n # Try to use call caching (job avoidance)? Flexibly accept range of answers\n cache = getattr(args, \"cache\", True)\n cache = cache is True or (cache.lower() in [\"y\", \"true\", \"yes\", \"t\", \"1\"])\n\n if not args.namespace:\n args.namespace = fcconfig.method_ns\n if not args.namespace:\n raise RuntimeError(\"namespace not provided, or configured by default\")\n\n r = fapi.create_submission(args.project, args.workspace,args.namespace,\n args.config, args.entity, args.entity_type,\n args.expression, use_callcache=cache)\n fapi._check_response_code(r, 201)\n id = r.json()['submissionId']\n\n return (\"Started {0}/{1} in {2}/{3}: id={4}\".format(\n args.namespace, args.config, args.project, args.workspace, id)), id"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\naborts a task by submission ID in given space", "response": "def config_stop(args):\n '''Abort a task (method configuration) by submission ID in given space'''\n\n r = fapi.abort_submission(args.project, args.workspace,\n args.submission_id)\n fapi._check_response_code(r, 204)\n\n return (\"Aborted {0} in {1}/{2}\".format(args.submission_id,\n args.project,\n args.workspace))"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nlist the method configs in the methods repository or a workspace.", "response": "def config_list(args):\n \"\"\" List configuration(s) in the methods repository or a workspace. \"\"\"\n verbose = fcconfig.verbosity\n if args.workspace:\n if verbose:\n print(\"Retrieving method configs from space {0}\".format(args.workspace))\n if not args.project:\n eprint(\"No project given, and no default project configured\")\n return 1\n r = fapi.list_workspace_configs(args.project, args.workspace)\n fapi._check_response_code(r, 200)\n else:\n if verbose:\n print(\"Retrieving method configs from method repository\")\n r = fapi.list_repository_configs(namespace=args.namespace,\n name=args.config,\n snapshotId=args.snapshot_id)\n fapi._check_response_code(r, 200)\n\n # Parse the JSON for the workspace + namespace\n methods = r.json()\n results = []\n for m in methods:\n ns = m['namespace']\n if not ns:\n ns = '__EMPTYSTRING__'\n name = m['name']\n # Ugh: configs in workspaces look different from configs in methodRepo\n mver = m.get('methodRepoMethod', None)\n if mver:\n mver = mver.get('methodVersion', 'unknown') # space config\n else:\n mver = m.get('snapshotId', 'unknown') # repo config\n results.append('{0}\\t{1}\\tsnapshotId:{2}'.format(ns, name, mver))\n\n # Sort for easier viewing, ignore case\n return sorted(results, key=lambda s: s.lower())"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nretrieve access control list for a method configuration", "response": "def config_acl(args):\n ''' Retrieve access control list for a method configuration'''\n r = fapi.get_repository_config_acl(args.namespace, args.config,\n args.snapshot_id)\n fapi._check_response_code(r, 200)\n acls = sorted(r.json(), key=lambda k: k['user'])\n return map(lambda acl: '{0}\\t{1}'.format(acl['user'], acl['role']), acls)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nassigns an ACL role to a list of users for a config.", "response": "def config_set_acl(args):\n \"\"\" Assign an ACL role to a list of users for a config. \"\"\"\n acl_updates = [{\"user\": user, \"role\": args.role} \\\n for user in set(expand_fc_groups(args.users)) \\\n if user != fapi.whoami()]\n\n id = args.snapshot_id\n if not id:\n # get the latest snapshot_id for this method from the methods repo\n r = fapi.list_repository_configs(namespace=args.namespace,\n name=args.config)\n fapi._check_response_code(r, 200)\n versions = r.json()\n if len(versions) == 0:\n if fcconfig.verbosity:\n eprint(\"Configuration {0}/{1} not found\".format(args.namespace,\n args.config))\n return 1\n latest = sorted(versions, key=lambda c: c['snapshotId'])[-1]\n id = latest['snapshotId']\n\n r = fapi.update_repository_config_acl(args.namespace, args.config, id,\n acl_updates)\n fapi._check_response_code(r, 200)\n if fcconfig.verbosity:\n print(\"Updated ACL for {0}/{1}:{2}\".format(args.namespace, args.config,\n id))\n return 0"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nretrieves a method config from a workspace send stdout", "response": "def config_get(args):\n \"\"\" Retrieve a method config from a workspace, send stdout \"\"\"\n r = fapi.get_workspace_config(args.project, args.workspace,\n args.namespace, args.config)\n fapi._check_response_code(r, 200)\n # Setting ensure_ascii to False ensures unicode string returns\n return json.dumps(r.json(), indent=4, separators=(',', ': '),\n sort_keys=True, ensure_ascii=False)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nretrieve the WDL for a method config in a workspace send stdout", "response": "def config_wdl(args):\n \"\"\" Retrieve the WDL for a method config in a workspace, send stdout \"\"\"\n r = fapi.get_workspace_config(args.project, args.workspace,\n args.namespace, args.config)\n fapi._check_response_code(r, 200)\n \n method = r.json()[\"methodRepoMethod\"]\n args.namespace = method[\"methodNamespace\"]\n args.method = method[\"methodName\"]\n args.snapshot_id = method[\"methodVersion\"]\n \n return meth_wdl(args)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncomparing method configuration definitions across workspaces. Ignores methodConfigVersion if the verbose argument is not set", "response": "def config_diff(args):\n \"\"\"Compare method configuration definitions across workspaces. Ignores\n methodConfigVersion if the verbose argument is not set\"\"\"\n config_1 = config_get(args).splitlines()\n args.project = args.Project\n args.workspace = args.Workspace\n cfg_1_name = args.config\n if args.Config is not None:\n args.config = args.Config\n if args.Namespace is not None:\n args.namespace = args.Namespace\n config_2 = config_get(args).splitlines()\n if not args.verbose:\n config_1 = skip_cfg_ver(config_1)\n config_2 = skip_cfg_ver(config_2)\n return list(unified_diff(config_1, config_2, cfg_1_name, args.config, lineterm=''))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ninstall a valid method configuration into a workspace in one of several several ways.", "response": "def config_put(args):\n '''Install a valid method configuration into a workspace, in one of several\n ways: from a JSON file containing a config definition (both file names\n and objects are supported); as a string representing the content of such\n a JSON file; or as a dict generated from such JSON content, e.g via\n json.loads(). Note that the CLI supports only string & filename input.'''\n\n config = args.config\n if os.path.isfile(config):\n with open(config, 'r') as fp:\n config = json.loads(fp.read())\n elif isinstance(config, str):\n config = json.loads(config)\n elif isinstance(config, dict):\n pass\n elif hasattr(config, \"read\"):\n config = json.loads(config.read())\n else:\n raise ValueError('Input method config must be filename, string or dict')\n\n r = fapi.create_workspace_config(args.project, args.workspace, config)\n fapi._check_response_code(r, [201])\n return True"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ninstall a new method config into a workspace.", "response": "def config_new(args):\n '''Attempt to install a new method config into a workspace, by: generating\n a template from a versioned method in the methods repo, then launching\n a local editor (respecting the $EDITOR environment variable) to fill in\n the incomplete input/output fields. Returns True if the config was\n successfully installed, otherwise False'''\n\n cfg = config_template(args)\n # Iteratively try to edit/install the config: exit iteration by EITHER\n # Successful config_put() after editing\n # Leaving config unchanged in editor, e.g. quitting out of VI with :q\n # FIXME: put an small integer upper bound on the # of loops here\n while True:\n try:\n edited = fccore.edit_text(cfg)\n if edited == cfg:\n eprint(\"No edits made, method config not installed ...\")\n break\n if __EDITME__ in edited:\n eprint(\"Edit is incomplete, method config not installed ...\")\n time.sleep(1)\n continue\n args.config = cfg = edited\n config_put(args)\n return True\n except FireCloudServerError as fce:\n __pretty_print_fc_exception(fce)\n\n return False"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef config_delete(args):\n r = fapi.delete_workspace_config(args.project, args.workspace,\n args.namespace, args.config)\n fapi._check_response_code(r, [200,204])\n return r.text if r.text else None", "response": "Remove a method config from a workspace"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncopying a method config to a new name space or namespace", "response": "def config_copy(args):\n \"\"\" Copy a method config to new name/space/namespace/project (or all 4) \"\"\"\n if not (args.tospace or args.toname or args.toproject or args.tonamespace):\n raise RuntimeError('A new config name OR workspace OR project OR ' +\n 'namespace must be given (or all)')\n\n copy = fapi.get_workspace_config(args.fromproject, args.fromspace,\n args.namespace, args.config)\n fapi._check_response_code(copy, 200)\n\n copy = copy.json()\n if not args.toname:\n args.toname = args.config\n\n if not args.tonamespace:\n args.tonamespace = args.namespace\n\n if not args.toproject:\n args.toproject = args.fromproject\n\n if not args.tospace:\n args.tospace = args.fromspace\n\n copy['name'] = args.toname\n copy['namespace'] = args.tonamespace\n\n # Instantiate the copy\n r = fapi.overwrite_workspace_config(args.toproject, args.tospace,\n args.tonamespace, args.toname, copy)\n fapi._check_response_code(r, 200)\n\n if fcconfig.verbosity:\n print(\"Method %s/%s:%s copied to %s/%s:%s\" % (\n args.fromproject, args.fromspace, args.config,\n args.toproject, args.tospace, args.toname))\n\n return 0"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef attr_get(args):\n '''Return a dict of attribute name/value pairs: if entity name & type\n are specified then attributes will be retrieved from that entity,\n otherwise workspace-level attributes will be returned. By default all\n attributes attached to the given object will be returned, but a subset\n can be selected by specifying a list of attribute names; names which\n refer to a non-existent attribute will be silently ignored. By default\n a special __header__ entry is optionally added to the result. '''\n\n if args.entity_type and args.entity:\n r = fapi.get_entity(args.project, args.workspace, args.entity_type, args.entity)\n fapi._check_response_code(r, 200)\n attrs = r.json()['attributes']\n # It is wrong for the members of container objects to appear as metadata\n # (attributes) attached to the container, as it conflates fundamentally\n # different things: annotation vs membership. This can be seen by using\n # a suitcase model of reasoning: ATTRIBUTES are METADATA like the weight\n # & height of the suitcase, the shipping label and all of its passport\n # stamps; while MEMBERS are the actual CONTENTS INSIDE the suitcase.\n # This conflation also contradicts the design & docs (which make a clear\n # distinction between MEMBERSHIP and UPDATE loadfiles). For this reason\n # a change has been requested of the FireCloud dev team (via forum), and\n # until it is implemented we will elide \"list of members\" attributes here\n # (but later may provide a way for users to request such, if wanted)\n for k in [\"samples\", \"participants\", \"pairs\"]:\n attrs.pop(k, None)\n else:\n r = fapi.get_workspace(args.project, args.workspace)\n fapi._check_response_code(r, 200)\n attrs = r.json()['workspace']['attributes']\n\n if args.attributes: # return a subset of attributes, if requested\n attrs = {k:attrs[k] for k in set(attrs).intersection(args.attributes)}\n\n # If some attributes have been collected, return in appropriate format\n if attrs:\n if args.entity: # Entity attributes\n\n def textify(thing):\n if isinstance(thing, dict):\n thing = thing.get(\"items\", thing.get(\"entityName\", \"__UNKNOWN__\"))\n return \"{0}\".format(thing)\n\n result = {args.entity : u'\\t'.join(map(textify, attrs.values()))}\n # Add \"hidden\" header of attribute names, for downstream convenience\n object_id = u'entity:%s_id' % args.entity_type\n result['__header__'] = [object_id] + list(attrs.keys())\n else:\n result = attrs # Workspace attributes\n else:\n result = {}\n\n return result", "response": "Return a dict of attribute name - value pairs for a given entity type or workspace - level object."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nretrieve names of all attributes attached to a given object either an entity type + name or workspace", "response": "def attr_list(args):\n '''Retrieve names of all attributes attached to a given object, either\n an entity (if entity type+name is provided) or workspace (if not)'''\n args.attributes = None\n result = attr_get(args)\n names = result.get(\"__header__\",[])\n if names:\n names = names[1:]\n else:\n names = result.keys()\n return sorted(names)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsetting key = value attributes for a single entity or a single workspace", "response": "def attr_set(args):\n ''' Set key=value attributes: if entity name & type are specified then\n attributes will be set upon that entity, otherwise the attribute will\n be set at the workspace level'''\n\n if args.entity_type and args.entity:\n prompt = \"Set {0}={1} for {2}:{3} in {4}/{5}?\\n[Y\\\\n]: \".format(\n args.attribute, args.value, args.entity_type,\n args.entity, args.project, args.workspace)\n\n if not (args.yes or _confirm_prompt(\"\", prompt)):\n return 0\n\n update = fapi._attr_set(args.attribute, args.value)\n r = fapi.update_entity(args.project, args.workspace, args.entity_type,\n args.entity, [update])\n fapi._check_response_code(r, 200)\n else:\n prompt = \"Set {0}={1} in {2}/{3}?\\n[Y\\\\n]: \".format(\n args.attribute, args.value, args.project, args.workspace\n )\n\n if not (args.yes or _confirm_prompt(\"\", prompt)):\n return 0\n\n update = fapi._attr_set(args.attribute, args.value)\n r = fapi.update_workspace_attributes(args.project, args.workspace,\n [update])\n fapi._check_response_code(r, 200)\n return 0"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef attr_delete(args):\n ''' Delete key=value attributes: if entity name & type are specified then\n attributes will be deleted from that entity, otherwise the attribute will\n be removed from the workspace'''\n\n if args.entity_type and args.entities:\n # Since there is no attribute deletion endpoint, we must perform 2 steps\n # here: first we retrieve the entity_ids, and any foreign keys (e.g.\n # participant_id for sample_id); and then construct a loadfile which\n # specifies which entities are to have what attributes removed. Note\n # that FireCloud uses the magic keyword __DELETE__ to indicate that\n # an attribute should be deleted from an entity.\n\n # Step 1: see what entities are present, and filter to those requested\n entities = _entity_paginator(args.project, args.workspace,\n args.entity_type,\n page_size=1000, filter_terms=None,\n sort_direction=\"asc\")\n if args.entities:\n entities = [e for e in entities if e['name'] in args.entities]\n\n # Step 2: construct a loadfile to delete these attributes\n attrs = sorted(args.attributes)\n etype = args.entity_type\n\n entity_data = []\n for entity_dict in entities:\n name = entity_dict['name']\n line = name\n # TODO: Fix other types?\n if etype == \"sample\":\n line += \"\\t\" + entity_dict['attributes']['participant']['entityName']\n for attr in attrs:\n line += \"\\t__DELETE__\"\n # Improve performance by only updating records that have changed\n entity_data.append(line)\n\n entity_header = [\"entity:\" + etype + \"_id\"]\n if etype == \"sample\":\n entity_header.append(\"participant_id\")\n entity_header = '\\t'.join(entity_header + list(attrs))\n\n # Remove attributes from an entity\n message = \"WARNING: this will delete these attributes:\\n\\n\" + \\\n ','.join(args.attributes) + \"\\n\\n\"\n if args.entities:\n message += 'on these {0}s:\\n\\n'.format(args.entity_type) + \\\n ', '.join(args.entities)\n else:\n message += 'on all {0}s'.format(args.entity_type)\n message += \"\\n\\nin workspace {0}/{1}\\n\".format(args.project, args.workspace)\n if not args.yes and not _confirm_prompt(message):\n return 0\n\n # TODO: reconcile with other batch updates\n # Chunk the entities into batches of 500, and upload to FC\n if args.verbose:\n print(\"Batching \" + str(len(entity_data)) + \" updates to Firecloud...\")\n chunk_len = 500\n total = int(len(entity_data) / chunk_len) + 1\n batch = 0\n for i in range(0, len(entity_data), chunk_len):\n batch += 1\n if args.verbose:\n print(\"Updating samples {0}-{1}, batch {2}/{3}\".format(\n i+1, min(i+chunk_len, len(entity_data)), batch, total\n ))\n this_data = entity_header + '\\n' + '\\n'.join(entity_data[i:i+chunk_len])\n\n # Now push the entity data back to firecloud\n r = fapi.upload_entities(args.project, args.workspace, this_data)\n fapi._check_response_code(r, 200)\n else:\n message = \"WARNING: this will delete the following attributes in \" + \\\n \"{0}/{1}\\n\\t\".format(args.project, args.workspace) + \\\n \"\\n\\t\".join(args.attributes)\n\n if not (args.yes or _confirm_prompt(message)):\n return 0\n\n updates = [fapi._attr_rem(a) for a in args.attributes]\n r = fapi.update_workspace_attributes(args.project, args.workspace,\n updates)\n fapi._check_response_code(r, 200)\n\n return 0", "response": "Delete attributes from an entity in a workspace."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef attr_copy(args):\n if not args.to_workspace:\n args.to_workspace = args.workspace\n if not args.to_project:\n args.to_project = args.project\n if (args.project == args.to_project\n and args.workspace == args.to_workspace):\n eprint(\"destination project and namespace must differ from\"\n \" source workspace\")\n return 1\n\n # First get the workspace attributes of the source workspace\n r = fapi.get_workspace(args.project, args.workspace)\n fapi._check_response_code(r, 200)\n\n # Parse the attributes\n workspace_attrs = r.json()['workspace']['attributes']\n\n # If we passed attributes, only use those\n if args.attributes:\n workspace_attrs = {k:v for k, v in iteritems(workspace_attrs)\n if k in args.attributes}\n\n if len(workspace_attrs) == 0:\n print(\"No workspace attributes defined in {0}/{1}\".format(\n args.project, args.workspace))\n return 1\n\n message = \"This will copy the following workspace attributes to {0}/{1}\\n\"\n message = message.format(args.to_project, args.to_workspace)\n for k, v in sorted(iteritems(workspace_attrs)):\n message += '\\t{0}\\t{1}\\n'.format(k, v)\n\n if not args.yes and not _confirm_prompt(message):\n return 0\n\n # make the attributes into updates\n updates = [fapi._attr_set(k,v) for k,v in iteritems(workspace_attrs)]\n r = fapi.update_workspace_attributes(args.to_project, args.to_workspace,\n updates)\n fapi._check_response_code(r, 200)\n return 0", "response": "Copy workspace attributes between workspaces."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef attr_fill_null(args):\n NULL_SENTINEL = \"gs://broad-institute-gdac/GDAC_FC_NULL\"\n attrs = args.attributes\n\n if not attrs:\n print(\"Error: provide at least one attribute to set\")\n return 1\n\n if 'participant' in attrs or 'samples' in attrs:\n print(\"Error: can't assign null to samples or participant\")\n return 1\n\n # Set entity attributes\n if args.entity_type is not None:\n print(\"Collecting entity data...\")\n # Get existing attributes\n entities = _entity_paginator(args.project, args.workspace,\n args.entity_type,\n page_size=1000, filter_terms=None,\n sort_direction=\"asc\")\n\n # samples need participant_id as well\n #TODO: This may need more fixing for other types\n orig_attrs = list(attrs)\n if args.entity_type == \"sample\":\n attrs.insert(0, \"participant_id\")\n\n header = \"entity:\" + args.entity_type + \"_id\\t\" + \"\\t\".join(attrs)\n # Book keep the number of updates for each attribute\n attr_update_counts = {a : 0 for a in orig_attrs}\n\n # construct new entity data by inserting null sentinel, and counting\n # the number of updates\n entity_data = []\n for entity_dict in entities:\n name = entity_dict['name']\n etype = entity_dict['entityType']\n e_attrs = entity_dict['attributes']\n line = name\n altered = False\n for attr in attrs:\n if attr == \"participant_id\":\n line += \"\\t\" + e_attrs['participant']['entityName']\n continue # This attribute is never updated by fill_null\n if attr not in e_attrs:\n altered = True\n attr_update_counts[attr] += 1\n line += \"\\t\" + str(e_attrs.get(attr, NULL_SENTINEL))\n # Improve performance by only updating records that have changed\n if altered:\n entity_data.append(line)\n\n # Check to see if all entities are being set to null for any attributes\n # This is usually a mistake, so warn the user\n num_entities = len(entities)\n prompt = \"Continue? [Y\\\\n]: \"\n for attr in orig_attrs:\n if num_entities == attr_update_counts[attr]:\n message = \"WARNING: no {0}s with attribute '{1}'\\n\".format(\n args.entity_type, attr\n )\n if not args.yes and not _confirm_prompt(message, prompt):\n return\n\n # check to see if no sentinels are necessary\n if not any(c != 0 for c in itervalues(attr_update_counts)):\n print(\"No null sentinels required, exiting...\")\n return 0\n\n if args.to_loadfile:\n print(\"Saving loadfile to \" + args.to_loadfile)\n with open(args.to_loadfile, \"w\") as f:\n f.write(header + '\\n')\n f.write(\"\\n\".join(entity_data))\n return 0\n\n updates_table = \" count attribute\\n\"\n for attr in sorted(attr_update_counts):\n count = attr_update_counts[attr]\n updates_table += \"{0:>10} {1}\\n\".format(count, attr)\n\n message = \"WARNING: This will insert null sentinels for \" \\\n \"these attributes:\\n\" + updates_table\n if not args.yes and not _confirm_prompt(message):\n return 0\n\n # Chunk the entities into batches of 500, and upload to FC\n print(\"Batching \" + str(len(entity_data)) + \" updates to Firecloud...\")\n chunk_len = 500\n total = int(len(entity_data) / chunk_len) + 1\n batch = 0\n for i in range(0, len(entity_data), chunk_len):\n batch += 1\n print(\"Updating samples {0}-{1}, batch {2}/{3}\".format(\n i+1, min(i+chunk_len, len(entity_data)), batch, total\n ))\n this_data = header + '\\n' + '\\n'.join(entity_data[i:i+chunk_len])\n\n # Now push the entity data back to firecloud\n r = fapi.upload_entities(args.project, args.workspace, this_data)\n fapi._check_response_code(r, 200)\n\n return 0\n else:\n # TODO: set workspace attributes\n print(\"attr_fill_null requires an entity type\")\n return 1", "response": "Assign the null sentinel value for all entities which do not have a value for all attributes."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef mop(args):\n ''' Clean up unreferenced data in a workspace'''\n # First retrieve the workspace to get bucket information\n if args.verbose:\n print(\"Retrieving workspace information...\")\n r = fapi.get_workspace(args.project, args.workspace)\n fapi._check_response_code(r, 200)\n workspace = r.json()\n bucket = workspace['workspace']['bucketName']\n bucket_prefix = 'gs://' + bucket\n workspace_name = workspace['workspace']['name']\n\n if args.verbose:\n print(\"{0} -- {1}\".format(workspace_name, bucket_prefix))\n\n referenced_files = set()\n for value in workspace['workspace']['attributes'].values():\n if isinstance(value, string_types) and value.startswith(bucket_prefix):\n referenced_files.add(value)\n\n # TODO: Make this more efficient with a native api call?\n # # Now run a gsutil ls to list files present in the bucket\n try:\n gsutil_args = ['gsutil', 'ls', 'gs://' + bucket + '/**']\n if args.verbose:\n print(' '.join(gsutil_args))\n bucket_files = subprocess.check_output(gsutil_args, stderr=subprocess.PIPE)\n # Check output produces a string in Py2, Bytes in Py3, so decode if necessary\n if type(bucket_files) == bytes:\n bucket_files = bucket_files.decode()\n\n except subprocess.CalledProcessError as e:\n eprint(\"Error retrieving files from bucket: \" + str(e))\n return 1\n\n bucket_files = set(bucket_files.strip().split('\\n'))\n if args.verbose:\n num = len(bucket_files)\n if args.verbose:\n print(\"Found {0} files in bucket {1}\".format(num, bucket))\n\n # Now build a set of files that are referenced in the bucket\n # 1. Get a list of the entity types in the workspace\n r = fapi.list_entity_types(args.project, args.workspace)\n fapi._check_response_code(r, 200)\n entity_types = r.json().keys()\n\n # 2. For each entity type, request all the entities\n for etype in entity_types:\n if args.verbose:\n print(\"Getting annotations for \" + etype + \" entities...\")\n # use the paginated version of the query\n entities = _entity_paginator(args.project, args.workspace, etype,\n page_size=1000, filter_terms=None,\n sort_direction=\"asc\")\n\n for entity in entities:\n for value in entity['attributes'].values():\n if isinstance(value, string_types) and value.startswith(bucket_prefix):\n # 'value' is a file in this bucket\n referenced_files.add(value)\n\n if args.verbose:\n num = len(referenced_files)\n print(\"Found {0} referenced files in workspace {1}\".format(num, workspace_name))\n\n # Set difference shows files in bucket that aren't referenced\n unreferenced_files = bucket_files - referenced_files\n\n # Filter out files like .logs and rc.txt\n def can_delete(f):\n '''Return true if this file should not be deleted in a mop.'''\n # Don't delete logs\n if f.endswith('.log'):\n return False\n # Don't delete return codes from jobs\n if f.endswith('-rc.txt'):\n return False\n # Don't delete tool's exec.sh\n if f.endswith('exec.sh'):\n return False\n\n return True\n\n deleteable_files = [f for f in unreferenced_files if can_delete(f)]\n\n if len(deleteable_files) == 0:\n if args.verbose:\n print(\"No files to mop in \" + workspace['workspace']['name'])\n return 0\n\n if args.verbose or args.dry_run:\n print(\"Found {0} files to delete:\\n\".format(len(deleteable_files))\n + \"\\n\".join(deleteable_files ) + '\\n')\n\n message = \"WARNING: Delete {0} files in {1} ({2})\".format(\n len(deleteable_files), bucket_prefix, workspace['workspace']['name'])\n if args.dry_run or (not args.yes and not _confirm_prompt(message)):\n return 0\n\n # Pipe the deleteable_files into gsutil rm to remove them\n gsrm_args = ['gsutil', '-m', 'rm', '-I']\n PIPE = subprocess.PIPE\n STDOUT=subprocess.STDOUT\n if args.verbose:\n print(\"Deleting files with gsutil...\")\n gsrm_proc = subprocess.Popen(gsrm_args, stdin=PIPE, stdout=PIPE, stderr=STDOUT)\n # Pipe the deleteable_files into gsutil\n result = gsrm_proc.communicate(input='\\n'.join(deleteable_files).encode())[0]\n if args.verbose:\n if type(result) == bytes:\n result = result.decode()\n print(result.rstrip())\n return 0", "response": "Clean up unreferenced data in a workspace"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nloops over all sample sets in a workspace performing a func", "response": "def sset_loop(args):\n ''' Loop over all sample sets in a workspace, performing a func '''\n # Ensure that the requested action is a valid fiss_cmd\n fiss_func = __cmd_to_func(args.action)\n if not fiss_func:\n eprint(\"invalid FISS cmd '\" + args.action + \"'\")\n return 1\n\n # First get the sample set names\n r = fapi.get_entities(args.project, args.workspace, \"sample_set\")\n fapi._check_response_code(r, 200)\n\n sample_sets = [entity['name'] for entity in r.json()]\n\n args.entity_type = \"sample_set\"\n for sset in sample_sets:\n print('\\n# {0}::{1}/{2} {3}'.format(args.project, args.workspace, sset,\n args.action))\n args.entity = sset\n # Note how this code is similar to how args.func is called in\n # main so it may make sense to try to a common method for both\n try:\n result = fiss_func(args)\n except Exception as e:\n status = __pretty_print_fc_exception(e)\n if not args.keep_going:\n return status\n printToCLI(result)\n\n return 0"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef monitor(args):\n ''' Retrieve status of jobs submitted from a given workspace, as a list\n of TSV lines sorted by descending order of job submission date'''\n r = fapi.list_submissions(args.project, args.workspace)\n fapi._check_response_code(r, 200)\n statuses = sorted(r.json(), key=lambda k: k['submissionDate'], reverse=True)\n header = '\\t'.join(list(statuses[0].keys()))\n expander = lambda v: '{0}'.format(v)\n\n def expander(thing):\n if isinstance(thing, dict):\n entityType = thing.get(\"entityType\", None)\n if entityType:\n return \"{0}:{1}\".format(entityType, thing['entityName'])\n return \"{0}\".format(thing)\n\n # FIXME: this will generally return different column order between Python 2/3\n return [header] + ['\\t'.join( map(expander, v.values())) for v in statuses]", "response": "Retrieve status of jobs submitted from a given workspace as a list\n sorted by descending order of job submission date"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nrun legacy Firehose - style workflow of workflows", "response": "def supervise(args):\n ''' Run legacy, Firehose-style workflow of workflows'''\n project = args.project\n workspace = args.workspace\n namespace = args.namespace\n workflow = args.workflow\n sample_sets = args.sample_sets\n recovery_file = args.json_checkpoint\n\n # If no sample sets are provided, run on all sample sets\n if not sample_sets:\n r = fapi.get_entities(args.project, args.workspace, \"sample_set\")\n fapi._check_response_code(r, 200)\n sample_sets = [s['name'] for s in r.json()]\n\n message = \"Sample Sets ({}):\\n\\t\".format(len(sample_sets)) + \\\n \"\\n\\t\".join(sample_sets)\n\n prompt = \"\\nLaunch workflow in \" + project + \"/\" + workspace + \\\n \" on these sample sets? [Y\\\\n]: \"\n\n if not args.yes and not _confirm_prompt(message, prompt):\n return\n\n return supervisor.supervise(project, workspace, namespace, workflow,\n sample_sets, recovery_file)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef entity_copy(args):\n if not args.to_workspace:\n args.to_workspace = args.workspace\n if not args.to_project:\n args.to_project = args.project\n if (args.project == args.to_project\n and args.workspace == args.to_workspace):\n eprint(\"destination project and namespace must differ from\"\n \" source workspace\")\n return 1\n\n if not args.entities:\n # get a list of entities from source workspace matching entity_type\n ents = _entity_paginator(args.project, args.workspace, args.entity_type,\n page_size=500, filter_terms=None,\n sort_direction='asc')\n args.entities = [e['name'] for e in ents]\n\n prompt = \"Copy {0} {1}(s) from {2}/{3} to {4}/{5}?\\n[Y\\\\n]: \"\n prompt = prompt.format(len(args.entities), args.entity_type, args.project,\n args.workspace, args.to_project, args.to_workspace)\n\n if not args.yes and not _confirm_prompt(\"\", prompt):\n return\n\n r = fapi.copy_entities(args.project, args.workspace, args.to_project,\n args.to_workspace, args.entity_type, args.entities,\n link_existing_entities=args.link)\n fapi._check_response_code(r, 201)\n return 0", "response": "Copy entities from one workspace to another."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef proj_list(args):\n '''Retrieve the list of billing projects accessible to the caller/user, and\n show the level of access granted for each (e.g. Owner, User, ...)'''\n projects = fapi.list_billing_projects()\n fapi._check_response_code(projects, 200)\n projects = sorted(projects.json(), key=lambda d: d['projectName'])\n l = map(lambda p: '{0}\\t{1}'.format(p['projectName'], p['role']), projects)\n # FIXME: add username col to output, for when iterating over multiple users\n return [\"Project\\tRole\"] + l", "response": "Retrieve the list of billing projects accessible to the caller and the caller s user and the level of access granted for each"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef config_validate(args):\n ''' Validate a workspace configuration: if an entity was specified (i.e.\n upon which the configuration should operate), then also validate that\n the entity has the necessary attributes'''\n\n r = fapi.validate_config(args.project, args.workspace, args.namespace,\n args.config)\n fapi._check_response_code(r, 200)\n entity_d = None\n config_d = r.json()\n if args.entity:\n entity_type = config_d['methodConfiguration']['rootEntityType']\n entity_r = fapi.get_entity(args.project, args.workspace,\n entity_type, args.entity)\n fapi._check_response_code(entity_r, [200,404])\n if entity_r.status_code == 404:\n eprint(\"Error: No {0} named '{1}'\".format(entity_type, args.entity))\n return 2\n else:\n entity_d = entity_r.json()\n\n # also get the workspace info\n w = fapi.get_workspace(args.project, args.workspace)\n fapi._check_response_code(w, 200)\n workspace_d = w.json()\n\n ii, io, ma, mwa = _validate_helper(args, config_d, workspace_d, entity_d)\n ii_msg = \"\\nInvalid inputs:\"\n io_msg = \"\\nInvalid outputs:\"\n ma_msg = \"\\n{0} {1} doesn't satisfy the following inputs:\".format(entity_type, args.entity) if args.entity else \"\"\n mwa_msg = \"\\nWorkspace {0}/{1} doesn't satisfy following inputs:\".format(args.project, args.workspace)\n\n for errs, msg in zip([ii, io, ma, mwa], [ii_msg, io_msg, ma_msg, mwa_msg]):\n if errs:\n print(msg)\n for inp, val in errs:\n print(\"{0} -> {1}\".format(inp, val))\n\n if ii + io + ma + mwa:\n return 1", "response": "Validate a workspace configuration"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns FISSFC validation information on config for a certain entity", "response": "def _validate_helper(args, config_d, workspace_d, entity_d=None):\n \"\"\" Return FISSFC validation information on config for a certain entity \"\"\"\n # 4 ways to have invalid config:\n invalid_inputs = sorted(config_d[\"invalidInputs\"])\n invalid_outputs = sorted(config_d[\"invalidOutputs\"])\n\n # Also insert values for invalid i/o\n invalid_inputs = [(i, config_d['methodConfiguration']['inputs'][i]) for i in invalid_inputs]\n invalid_outputs = [(i, config_d['methodConfiguration']['outputs'][i]) for i in invalid_outputs]\n\n missing_attrs = []\n missing_wksp_attrs = []\n\n # If an entity was provided, also check to see if that entity has the necessary inputs\n if entity_d:\n entity_type = config_d['methodConfiguration']['rootEntityType']\n\n # If the attribute is listed here, it has an entry\n entity_attrs = set(entity_d['attributes'])\n\n # Optimization, only get the workspace attrs if the method config has any\n workspace_attrs = workspace_d['workspace']['attributes']\n\n # So now iterate over the inputs\n for inp, val in iteritems(config_d['methodConfiguration']['inputs']):\n # Must be an attribute on the entity\n if val.startswith(\"this.\"):\n # Normally, the value is of the form 'this.attribute',\n # but for operations on sets, e.g. one can also do\n # 'this.samples.attr'. But even in this case, there must be a\n # 'samples' attribute on the sample set, so checking for the middle\n # value works as expected. Other pathological cases would've been\n # caught above by the validation endpoint\n expected_attr = val.split('.')[1]\n # 'name' is special, it really means '_id', which everything has\n if expected_attr == \"name\":\n continue\n if expected_attr not in entity_attrs:\n missing_attrs.append((inp, val))\n\n if val.startswith(\"workspace.\"):\n # Anything not matching this format will be caught above\n expected_attr = val.split('.')[1]\n if expected_attr not in workspace_attrs:\n missing_wksp_attrs.append((inp, val))\n # Anything else is a literal\n\n return invalid_inputs, invalid_outputs, missing_attrs, missing_wksp_attrs"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nshow me what can be run in a given workspace", "response": "def runnable(args):\n \"\"\" Show me what can be run in a given workspace \"\"\"\n w = fapi.get_workspace(args.project, args.workspace)\n fapi._check_response_code(w, 200)\n workspace_d = w.json()\n\n if args.config and args.namespace and not args.entity:\n # See what entities I can run on with this config\n r = fapi.validate_config(args.project, args.workspace, args.namespace,\n args.config)\n fapi._check_response_code(r, 200)\n config_d = r.json()\n\n\n\n # First validate without any sample sets\n errs = sum(_validate_helper(args, config_d, workspace_d, None), [])\n if errs:\n print(\"Configuration contains invalid expressions\")\n return 1\n\n # Now get all the possible entities, and evaluate each\n entity_type = config_d['methodConfiguration']['rootEntityType']\n ent_r = fapi.get_entities(args.project, args.workspace, entity_type)\n fapi._check_response_code(r, 200)\n entities = ent_r.json()\n\n can_run_on = []\n cannot_run_on = []\n\n # Validate every entity\n for entity_d in entities:\n # If there are errors in the validation\n if sum(_validate_helper(args, config_d, workspace_d, entity_d), []):\n cannot_run_on.append(entity_d['name'])\n else:\n can_run_on.append(entity_d['name'])\n\n # Print what can be run\n if can_run_on:\n print(\"{0} CAN be run on {1} {2}(s):\".format(args.config, len(can_run_on), entity_type))\n print(\"\\n\".join(can_run_on)+\"\\n\")\n\n print(\"{0} CANNOT be run on {1} {2}(s)\".format(args.config, len(cannot_run_on), entity_type))\n #print(\"\\n\".join(cannot_run_on))\n\n # See what method configs are possible for the given sample set\n elif args.entity and args.entity_type and not args.config:\n entity_r = fapi.get_entity(args.project, args.workspace,\n args.entity_type, args.entity)\n fapi._check_response_code(entity_r, [200,404])\n if entity_r.status_code == 404:\n print(\"Error: No {0} named '{1}'\".format(args.entity_type, args.entity))\n return 2\n entity_d = entity_r.json()\n\n # Now get all the method configs in the workspace\n conf_r = fapi.list_workspace_configs(args.project, args.workspace)\n fapi._check_response_code(conf_r, 200)\n\n # Iterate over configs in the workspace, and validate against them\n for cfg in conf_r.json():\n # If we limit search to a particular namespace, skip ones that don't match\n if args.namespace and cfg['namespace'] != args.namespace:\n continue\n\n # But we have to get the full description\n r = fapi.validate_config(args.project, args.workspace,\n cfg['namespace'], cfg['name'])\n fapi._check_response_code(r, [200, 404])\n if r.status_code == 404:\n # Permission error, continue\n continue\n config_d = r.json()\n errs = sum(_validate_helper(args, config_d, workspace_d, entity_d),[])\n if not errs:\n print(cfg['namespace'] + \"/\" + cfg['name'])\n\n elif args.entity_type:\n # Last mode, build a matrix of everything based on the entity type\n # Get all of the entity_type\n ent_r = fapi.get_entities(args.project, args.workspace, args.entity_type)\n fapi._check_response_code(ent_r, 200)\n entities = ent_r.json()\n entity_names = sorted(e['name'] for e in entities)\n\n conf_r = fapi.list_workspace_configs(args.project, args.workspace)\n fapi._check_response_code(conf_r, 200)\n conf_list = conf_r.json()\n config_names = sorted(c['namespace'] + '/' + c['name'] for c in conf_list)\n mat = {c:dict() for c in config_names}\n\n # Now iterate over configs, building up the matrix\n # Iterate over configs in the workspace, and validate against them\n for cfg in conf_list:\n\n # If we limit search to a particular namespace, skip ones that don't match\n if args.namespace and cfg['namespace'] != args.namespace:\n continue\n # But we have to get the full description\n r = fapi.validate_config(args.project, args.workspace,\n cfg['namespace'], cfg['name'])\n fapi._check_response_code(r, [200, 404])\n if r.status_code == 404:\n # Permission error, continue\n continue\n config_d = r.json()\n\n # Validate against every entity\n for entity_d in entities:\n errs = sum(_validate_helper(args, config_d, workspace_d, entity_d),[])\n #TODO: True/False? Y/N?\n symbol = \"X\" if not errs else \"\"\n cfg_name = cfg['namespace'] + '/' + cfg['name']\n mat[cfg_name][entity_d['name']] = symbol\n\n # Now print the validation matrix\n # headers\n print(\"Namespace/Method Config\\t\" + \"\\t\".join(entity_names))\n for conf in config_names:\n print(conf + \"\\t\" + \"\\t\".join(mat[conf][e] for e in entity_names))\n\n\n else:\n print(\"runnable requires a namespace+configuration or entity type\")\n return 1"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _confirm_prompt(message, prompt=\"\\nAre you sure? [y/yes (default: no)]: \",\n affirmations=(\"Y\", \"Yes\", \"yes\", \"y\")):\n \"\"\"\n Display a message, then confirmation prompt, and return true\n if the user responds with one of the affirmations.\n \"\"\"\n answer = input(message + prompt)\n return answer in affirmations", "response": "Display a message and then return true\n if the user responds with one of the affirmations."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _entity_paginator(namespace, workspace, etype, page_size=500,\n filter_terms=None, sort_direction=\"asc\"):\n \"\"\"Pages through the get_entities_query endpoint to get all entities in\n the workspace without crashing.\n \"\"\"\n\n page = 1\n all_entities = []\n # Make initial request\n r = fapi.get_entities_query(namespace, workspace, etype, page=page,\n page_size=page_size, sort_direction=sort_direction,\n filter_terms=filter_terms)\n fapi._check_response_code(r, 200)\n\n response_body = r.json()\n # Get the total number of pages\n total_pages = response_body['resultMetadata']['filteredPageCount']\n\n # append the first set of results\n entities = response_body['results']\n all_entities.extend(entities)\n # Now iterate over remaining pages to retrieve all the results\n page = 2\n while page <= total_pages:\n r = fapi.get_entities_query(namespace, workspace, etype, page=page,\n page_size=page_size, sort_direction=sort_direction,\n filter_terms=filter_terms)\n fapi._check_response_code(r, 200)\n entities = r.json()['results']\n all_entities.extend(entities)\n page += 1\n\n return all_entities", "response": "Page through the get_entities_query endpoint to get all entities in a workspace without crashing."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the function object in this module matching cmd.", "response": "def __cmd_to_func(cmd):\n \"\"\" Returns the function object in this module matching cmd. \"\"\"\n fiss_module = sys.modules[__name__]\n # Returns None if string is not a recognized FISS command\n func = getattr(fiss_module, cmd, None)\n if func and not hasattr(func, 'fiss_cmd'):\n func = None\n return func"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn true if the given string is a valid loadfile header", "response": "def _valid_headerline(l):\n \"\"\"return true if the given string is a valid loadfile header\"\"\"\n\n if not l:\n return False\n headers = l.split('\\t')\n first_col = headers[0]\n\n tsplit = first_col.split(':')\n if len(tsplit) != 2:\n return False\n\n if tsplit[0] in ('entity', 'update'):\n return tsplit[1] in ('participant_id', 'participant_set_id',\n 'sample_id', 'sample_set_id',\n 'pair_id', 'pair_set_id')\n elif tsplit[0] == 'membership':\n if len(headers) < 2:\n return False\n # membership:sample_set_id sample_id, e.g.\n return tsplit[1].replace('set_', '') == headers[1]\n else:\n return False"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nbatching load a set of entity data into a single object", "response": "def _batch_load(project, workspace, headerline, entity_data, chunk_size=500):\n \"\"\" Submit a large number of entity updates in batches of chunk_size \"\"\"\n\n\n if fcconfig.verbosity:\n print(\"Batching \" + str(len(entity_data)) + \" updates to Firecloud...\")\n\n # Parse the entity type from the first cell, e.g. \"entity:sample_id\"\n # First check that the header is valid\n if not _valid_headerline(headerline):\n eprint(\"Invalid loadfile header:\\n\" + headerline)\n return 1\n\n update_type = \"membership\" if headerline.startswith(\"membership\") else \"entitie\"\n etype = headerline.split('\\t')[0].split(':')[1].replace(\"_id\", \"\")\n\n # Split entity_data into chunks\n total = int(len(entity_data) / chunk_size) + 1\n batch = 0\n for i in range(0, len(entity_data), chunk_size):\n batch += 1\n if fcconfig.verbosity:\n print(\"Updating {0} {1}s {2}-{3}, batch {4}/{5}\".format(\n etype, update_type, i+1, min(i+chunk_size, len(entity_data)),\n batch, total))\n this_data = headerline + '\\n' + '\\n'.join(entity_data[i:i+chunk_size])\n\n # Now push the entity data to firecloud\n r = fapi.upload_entities(project, workspace, this_data)\n fapi._check_response_code(r, 200)\n\n return 0"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef main_as_cli(argv=None):\n '''Use this entry point to call HL fiss funcs as though from the UNIX CLI.\n (see firecloud/tests/highlevel_tests.py:call_cli for usage examples)'''\n try:\n result = main(argv)\n except Exception as e:\n result = __pretty_print_fc_exception(e)\n # FIXME: we should invert True/False return values to 0/1 here, to comply\n # with UNIX exit code semantics (and avoid problems with make, scripts, etc)\n return printToCLI(result)", "response": "Use this entry point to call HL fiss funcs as though from the UNIX CLI."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create_payload(entities):\n #First check that all entities are of the same type\n types = {e.etype for e in entities}\n if len(types) != 1:\n raise ValueError(\"Can't create payload with \" +\n str(len(types)) + \" types\")\n\n all_attrs = set()\n for e in entities:\n all_attrs.update(set(e.attrs.keys()))\n\n #Write a header line\n all_attrs = list(all_attrs)\n header = \"entity:\" + entities[0].etype + \"_id\"\n payload = '\\t'.join([header] + all_attrs) + '\\n'\n\n for e in entities:\n line = e.entity_id\n for a in all_attrs:\n line += '\\t' + e.attrs.get(a, \"\")\n payload += line + '\\n'\n\n return payload", "response": "Create a TSV payload describing the given list of entities."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef create_loadfile(entities, f):\n with open(f, 'w') as out:\n out.write(Entity.create_payload(entities))", "response": "Create payload and save to file."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning true if gcloud is unavailable and needed for authentication.", "response": "def needs_gcloud(self):\n \"\"\"Returns true if gcloud is unavailable and needed for\n authentication.\"\"\"\n gcloud_default_path = ['google-cloud-sdk', 'bin']\n if platform.system() != \"Windows\":\n gcloud_default_path = os.path.join(os.path.expanduser('~'),\n *gcloud_default_path)\n else:\n gcloud_default_path = os.path.join(os.environ['LOCALAPPDATA'],\n 'Google', 'Cloud SDK',\n *gcloud_default_path)\n return not os.getenv('SERVER_SOFTWARE',\n '').startswith('Google App Engine/') \\\n and gcloud_default_path not in os.environ[\"PATH\"].split(os.pathsep) \\\n and which('gcloud') is None"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nrunning mogrify. Most of the action is in convert, this just creates a temp file for the output.", "response": "def action(arguments):\n \"\"\"\n Run mogrify. Most of the action is in convert, this just creates a temp\n file for the output.\n \"\"\"\n for input_file in arguments.input_files:\n logging.info(input_file)\n # Generate a temporary file\n with common.atomic_write(\n input_file.name, file_factory=common.FileType('wt')) as tf:\n convert.transform_file(input_file, tf, arguments)\n if hasattr(input_file, 'close'):\n input_file.close()"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef all_unambiguous(sequence_str):\n result = [[]]\n for c in sequence_str:\n result = [i + [a] for i in result for a in _AMBIGUOUS_MAP.get(c, c)]\n return [''.join(i) for i in result]", "response": "Returns a list of all unambiguous versions of sequence_str."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nbuild a subparser for the command line", "response": "def build_parser(parser):\n \"\"\"\n Generate a subparser\n \"\"\"\n parser.add_argument(\n 'sequence_file',\n type=FileType('r'),\n help=\"\"\"Input fastq file. A fasta-format file may also be provided\n if --input-qual is also specified.\"\"\")\n parser.add_argument(\n '--input-qual',\n type=FileType('r'),\n help=\"\"\"The quality scores associated with the input file. Only\n used if input file is fasta.\"\"\")\n parser.add_argument(\n 'output_file',\n type=FileType('w'),\n help=\"\"\"Output file. Format determined from extension.\"\"\")\n\n output_group = parser.add_argument_group(\"Output\")\n output_group.add_argument(\n '--report-out',\n type=FileType('w'),\n default=sys.stdout,\n help=\"\"\"Output file for report [default:\n stdout]\"\"\")\n output_group.add_argument(\n '--details-out',\n type=FileType('w'),\n help=\"\"\"Output file to report fate of each sequence\"\"\")\n output_group.add_argument(\n '--no-details-comment',\n action='store_false',\n default=True,\n dest='details_comment',\n help=\"\"\"Do not write comment\n lines with version and call to start --details-out\"\"\")\n\n parser.add_argument(\n '--min-mean-quality',\n metavar='QUALITY',\n type=float,\n default=DEFAULT_MEAN_SCORE,\n help=\"\"\"Minimum mean quality score for\n each read [default: %(default)s]\"\"\")\n parser.add_argument(\n '--min-length',\n metavar='LENGTH',\n type=int,\n default=200,\n help=\"\"\"Minimum length to keep sequence [default:\n %(default)s]\"\"\")\n parser.add_argument(\n '--max-length',\n metavar='LENGTH',\n type=int,\n default=1000,\n help=\"\"\"Maximum length to keep before truncating\n [default: %(default)s]. This operation occurs before\n --max-ambiguous\"\"\")\n\n window_group = parser.add_argument_group('Quality window options')\n window_group.add_argument(\n '--quality-window-mean-qual',\n type=float,\n help=\"\"\"Minimum quality score within the window defined by\n --quality-window. [default: same as --min-mean-quality]\"\"\")\n window_group.add_argument(\n '--quality-window-prop',\n help=\"\"\"Proportion of\n reads within quality window to that must pass filter. Floats are [default:\n %(default).1f]\"\"\",\n default=1.0,\n type=typed_range(float, 0.0, 1.0))\n window_group.add_argument(\n '--quality-window',\n type=int,\n metavar='WINDOW_SIZE',\n default=0,\n help=\"\"\"Window size for truncating sequences. When set\n to a non-zero value, sequences are truncated where the mean mean\n quality within the window drops below --min-mean-quality.\n [default: %(default)s]\"\"\")\n\n parser.add_argument(\n '--ambiguous-action',\n choices=('truncate', 'drop'),\n help=\"\"\"Action to take on ambiguous base in sequence (N's).\n [default: no action]\"\"\")\n parser.add_argument(\n '--max-ambiguous',\n default=None,\n help=\"\"\"Maximum number\n of ambiguous bases in a sequence. Sequences exceeding this count\n will be removed.\"\"\",\n type=int)\n parser.add_argument(\n '--pct-ambiguous',\n help=\"\"\"Maximun percent of\n ambiguous bases in a sequence. Sequences exceeding this percent\n will be removed.\"\"\",\n type=float)\n\n barcode_group = parser.add_argument_group('Barcode/Primer')\n primer_group = barcode_group.add_mutually_exclusive_group()\n primer_group.add_argument(\n '--primer', help=\"\"\"IUPAC ambiguous primer to\n require\"\"\")\n primer_group.add_argument(\n '--no-primer',\n help=\"\"\"Do not use a primer.\"\"\",\n action='store_const',\n const='',\n dest='primer')\n barcode_group.add_argument(\n '--barcode-file',\n help=\"\"\"CSV file containing\n sample_id,barcode[,primer] in the rows. A single primer for all\n sequences may be specified with `--primer`, or `--no-primer` may be\n used to indicate barcodes should be used without a primer\n check.\"\"\",\n type=FileType('r'))\n barcode_group.add_argument(\n '--barcode-header',\n action='store_true',\n default=False,\n help=\"\"\"Barcodes have a header row [default:\n %(default)s]\"\"\")\n barcode_group.add_argument(\n '--map-out',\n help=\"\"\"Path to write\n sequence_id,sample_id pairs\"\"\",\n type=FileType('w'),\n metavar='SAMPLE_MAP')\n barcode_group.add_argument(\n '--quoting',\n help=\"\"\"A string naming an\n attribute of the csv module defining the quoting behavior for\n `SAMPLE_MAP`. [default: %(default)s]\"\"\",\n default='QUOTE_MINIMAL',\n choices=[s for s in dir(csv) if s.startswith('QUOTE_')])"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nload label barcode primer records from a CSV file.", "response": "def parse_barcode_file(fp, primer=None, header=False):\n \"\"\"\n Load label, barcode, primer records from a CSV file.\n\n Returns a map from barcode -> label\n\n Any additional columns are ignored\n \"\"\"\n tr = trie.trie()\n reader = csv.reader(fp)\n\n if header:\n # Skip header\n next(reader)\n\n # Skip blank rows\n records = (record for record in reader if record)\n\n for record in records:\n specimen, barcode = record[:2]\n if primer is not None:\n pr = primer\n else:\n pr = record[2]\n for sequence in all_unambiguous(barcode + pr):\n if sequence in tr:\n raise ValueError(\"Duplicate sample: {0}, {1} both have {2}\",\n specimen, tr[sequence], sequence)\n logging.info('%s->%s', sequence, specimen)\n tr[sequence] = specimen\n\n return tr"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ngive parsed arguments filter input files and return a new tree of BioXML objects.", "response": "def action(arguments):\n \"\"\"\n Given parsed arguments, filter input files.\n \"\"\"\n if arguments.quality_window_mean_qual and not arguments.quality_window:\n raise ValueError(\"--quality-window-mean-qual specified without \"\n \"--quality-window\")\n\n if trie is None or triefind is None:\n raise ValueError(\n 'Missing Bio.trie and/or Bio.triefind modules. Cannot continue')\n\n filters = []\n input_type = fileformat.from_handle(arguments.sequence_file)\n output_type = fileformat.from_handle(arguments.output_file)\n with arguments.sequence_file as fp:\n if arguments.input_qual:\n sequences = QualityIO.PairedFastaQualIterator(\n fp, arguments.input_qual)\n else:\n sequences = SeqIO.parse(fp, input_type)\n\n listener = RecordEventListener()\n if arguments.details_out:\n rh = RecordReportHandler(arguments.details_out, arguments.argv,\n arguments.details_comment)\n rh.register_with(listener)\n\n # Track read sequences\n sequences = listener.iterable_hook('read', sequences)\n\n # Add filters\n if arguments.min_mean_quality and input_type == 'fastq':\n qfilter = QualityScoreFilter(arguments.min_mean_quality)\n filters.append(qfilter)\n if arguments.max_length:\n max_length_filter = MaxLengthFilter(arguments.max_length)\n filters.append(max_length_filter)\n if arguments.min_length:\n min_length_filter = MinLengthFilter(arguments.min_length)\n filters.append(min_length_filter)\n if arguments.max_ambiguous is not None:\n max_ambig_filter = MaxAmbiguousFilter(arguments.max_ambiguous)\n filters.append(max_ambig_filter)\n if arguments.pct_ambiguous is not None:\n pct_ambig_filter = PctAmbiguousFilter(arguments.pct_ambiguous)\n filters.append(pct_ambig_filter)\n if arguments.ambiguous_action:\n ambiguous_filter = AmbiguousBaseFilter(arguments.ambiguous_action)\n filters.append(ambiguous_filter)\n if arguments.quality_window:\n min_qual = (arguments.quality_window_mean_qual or\n arguments.min_mean_quality)\n window_filter = WindowQualityScoreFilter(arguments.quality_window,\n min_qual)\n filters.insert(0, window_filter)\n\n if arguments.barcode_file:\n with arguments.barcode_file:\n tr = parse_barcode_file(arguments.barcode_file,\n arguments.primer,\n arguments.barcode_header)\n f = PrimerBarcodeFilter(tr)\n filters.append(f)\n\n if arguments.map_out:\n barcode_writer = csv.writer(\n arguments.map_out,\n quoting=getattr(csv, arguments.quoting),\n lineterminator='\\n')\n\n def barcode_handler(record, sample, barcode=None):\n barcode_writer.writerow((record.id, sample))\n\n listener.register_handler('found_barcode', barcode_handler)\n for f in filters:\n f.listener = listener\n sequences = f.filter_records(sequences)\n\n # Track sequences which passed all filters\n sequences = listener.iterable_hook('write', sequences)\n\n with arguments.output_file:\n SeqIO.write(sequences, arguments.output_file, output_type)\n\n rpt_rows = (f.report_dict() for f in filters)\n\n # Write report\n with arguments.report_out as fp:\n writer = csv.DictWriter(\n fp, BaseFilter.report_fields, lineterminator='\\n', delimiter='\\t')\n writer.writeheader()\n writer.writerows(rpt_rows)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nfiring an event named ``name`` with each item in iterable", "response": "def iterable_hook(self, name, iterable):\n \"\"\"\n Fire an event named ``name`` with each item in iterable\n \"\"\"\n for record in iterable:\n self(name, record)\n yield record"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nhook called when barcode is found", "response": "def _found_barcode(self, record, sample, barcode=None):\n \"\"\"Hook called when barcode is found\"\"\"\n assert record.id == self.current_record['sequence_name']\n self.current_record['sample'] = sample"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef filter_records(self, records):\n for record in records:\n try:\n filtered = self.filter_record(record)\n assert (filtered)\n # Quick tracking whether the sequence was modified\n if filtered.seq == record.seq:\n self.passed_unchanged += 1\n else:\n self.passed_changed += 1\n yield filtered\n except FailedFilter as e:\n self.failed += 1\n v = e.value\n if self.listener:\n self.listener(\n 'failed_filter',\n record,\n filter_name=self.name,\n value=v)", "response": "Apply the filter to the records and yield the filtered records."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef filter_record(self, record):\n quality_scores = record.letter_annotations['phred_quality']\n\n mean_score = mean(quality_scores)\n if mean_score >= self.min_mean_score:\n return record\n else:\n raise FailedFilter(mean_score)", "response": "Filter a single record by the mean score."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef filter_record(self, record):\n quality_scores = record.letter_annotations['phred_quality']\n\n # Simple case - window covers whole sequence\n if len(record) <= self.window_size:\n mean_score = mean(quality_scores)\n if mean_score >= self.min_mean_score:\n return record\n else:\n raise FailedFilter(mean_score)\n\n # Find the right clipping point. Start clipping at the beginning of the\n # sequence, then extend the window to include regions with acceptable\n # mean quality scores.\n clip_right = 0\n for i, a in enumerate(\n moving_average(quality_scores, self.window_size)):\n\n if a >= self.min_mean_score:\n clip_right = i + self.window_size\n else:\n break\n\n if clip_right:\n return record[:clip_right]\n else:\n # First window failed - record fails\n raise FailedFilter()", "response": "Filter a single record by the minimum and acceptable mean score."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef filter_record(self, record):\n nloc = record.seq.find('N')\n if nloc == -1:\n return record\n elif self.action == 'truncate':\n return record[:nloc]\n elif self.action == 'drop':\n raise FailedFilter()\n else:\n assert False", "response": "Filter a record by truncating or dropping at an N"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nfilter the record to only contain any that don t meet minimum length.", "response": "def filter_record(self, record):\n \"\"\"\n Filter record, dropping any that don't meet minimum length\n \"\"\"\n\n if len(record) >= self.min_length:\n return record\n else:\n raise FailedFilter(len(record))"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nfilter the record to only contain the ones that are longer than self. max_length.", "response": "def filter_record(self, record):\n \"\"\"\n Filter record, truncating any over some maximum length\n \"\"\"\n if len(record) >= self.max_length:\n return record[:self.max_length]\n else:\n return record"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nsummarizing a sequence file returning a tuple containing the name whether the file is an alignment minimum sequence length maximum sequence length average length number of sequences.", "response": "def summarize_sequence_file(source_file, file_type=None):\n \"\"\"\n Summarizes a sequence file, returning a tuple containing the name,\n whether the file is an alignment, minimum sequence length, maximum\n sequence length, average length, number of sequences.\n \"\"\"\n is_alignment = True\n avg_length = None\n min_length = sys.maxsize\n max_length = 0\n sequence_count = 0\n\n # Get an iterator and analyze the data.\n with common.FileType('rt')(source_file) as fp:\n if not file_type:\n file_type = fileformat.from_handle(fp)\n for record in SeqIO.parse(fp, file_type):\n sequence_count += 1\n sequence_length = len(record)\n if max_length != 0:\n # If even one sequence is not the same length as the others,\n # we don't consider this an alignment.\n if sequence_length != max_length:\n is_alignment = False\n\n # Lengths\n if sequence_length > max_length:\n max_length = sequence_length\n if sequence_length < min_length:\n min_length = sequence_length\n\n # Average length\n if sequence_count == 1:\n avg_length = float(sequence_length)\n else:\n avg_length = avg_length + ((sequence_length - avg_length) /\n sequence_count)\n\n # Handle an empty file:\n if avg_length is None:\n min_length = max_length = avg_length = 0\n if sequence_count <= 1:\n is_alignment = False\n\n return (source_file, str(is_alignment).upper(), min_length,\n max_length, avg_length, sequence_count)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef action(arguments):\n # Ignore SIGPIPE, for head support\n common.exit_on_sigpipe()\n common.exit_on_sigint()\n\n handle = arguments.destination_file\n output_format = arguments.output_format\n if not output_format:\n try:\n output_format = 'align' if handle.isatty() else 'tab'\n except AttributeError:\n output_format = 'tab'\n\n writer_cls = _WRITERS[output_format]\n\n ssf = partial(summarize_sequence_file, file_type = arguments.input_format)\n\n # if only one thread, do not use the multithreading so parent process\n # can be terminated using ctrl+c\n if arguments.threads > 1:\n pool = multiprocessing.Pool(processes=arguments.threads)\n summary = pool.imap(ssf, arguments.source_files)\n else:\n summary = (ssf(f) for f in arguments.source_files)\n\n with handle:\n writer = writer_cls(arguments.source_files, summary, handle)\n writer.write()", "response": "This function is called by the command line interface to determine if the file is an alignment or tab - delimited."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nbuffering for transform functions which require multiple passes through data.", "response": "def _record_buffer(records, buffer_size=DEFAULT_BUFFER_SIZE):\n \"\"\"\n Buffer for transform functions which require multiple passes through data.\n\n Value returned by context manager is a function which returns an iterator\n through records.\n \"\"\"\n with tempfile.SpooledTemporaryFile(buffer_size, mode='wb+') as tf:\n pickler = pickle.Pickler(tf)\n for record in records:\n pickler.dump(record)\n\n def record_iter():\n tf.seek(0)\n unpickler = pickle.Unpickler(tf)\n while True:\n try:\n yield unpickler.load()\n except EOFError:\n break\n\n yield record_iter"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef dashes_cleanup(records, prune_chars='.:?~'):\n logging.info(\n \"Applying _dashes_cleanup: converting any of '{}' to '-'.\".format(prune_chars))\n translation_table = {ord(c): '-' for c in prune_chars}\n for record in records:\n record.seq = Seq(str(record.seq).translate(translation_table),\n record.seq.alphabet)\n yield record", "response": "Take an alignment and convert any undesirable characters such as? or ~ to\n -."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef deduplicate_sequences(records, out_file):\n\n logging.info('Applying _deduplicate_sequences generator: '\n 'removing any duplicate records with identical sequences.')\n checksum_sequences = collections.defaultdict(list)\n for record in records:\n checksum = seguid(record.seq)\n sequences = checksum_sequences[checksum]\n if not sequences:\n yield record\n sequences.append(record.id)\n\n if out_file is not None:\n with out_file:\n for sequences in checksum_sequences.values():\n out_file.write('%s\\n' % (' '.join(sequences),))", "response": "Remove any duplicate records with identical sequences keep the first\nPortal instance seen and discard additional occurences."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nremoving any duplicate records with identical IDs keep the first duplicate record seen and discard additional occurences.", "response": "def deduplicate_taxa(records):\n \"\"\"\n Remove any duplicate records with identical IDs, keep the first\n instance seen and discard additional occurences.\n \"\"\"\n logging.info('Applying _deduplicate_taxa generator: ' + \\\n 'removing any duplicate records with identical IDs.')\n taxa = set()\n for record in records:\n # Default to full ID, split if | is found.\n taxid = record.id\n if '|' in record.id:\n try:\n taxid = int(record.id.split(\"|\")[0])\n except:\n # If we couldn't parse an integer from the ID, just fall back\n # on the ID\n logging.warn(\"Unable to parse integer taxid from %s\",\n taxid)\n if taxid in taxa:\n continue\n taxa.add(taxid)\n yield record"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef first_name_capture(records):\n logging.info('Applying _first_name_capture generator: '\n 'making sure ID only contains the first whitespace-delimited '\n 'word.')\n whitespace = re.compile(r'\\s+')\n for record in records:\n if whitespace.search(record.description):\n yield SeqRecord(record.seq, id=record.id,\n description=\"\")\n else:\n yield record", "response": "Yields SeqRecords that contain only the first whitespace - delimited word as the name of the sequence."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nfilter the records keeping only sequences whose ID is contained in the handle.", "response": "def include_from_file(records, handle):\n \"\"\"\n Filter the records, keeping only sequences whose ID is contained in the\n handle.\n \"\"\"\n ids = set(i.strip() for i in handle)\n\n for record in records:\n if record.id.strip() in ids:\n yield record"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nyield sequences from the given start and end indices.", "response": "def isolate_region(sequences, start, end, gap_char='-'):\n \"\"\"\n Replace regions before and after start:end with gap chars\n \"\"\"\n # Check arguments\n if end <= start:\n raise ValueError(\"start of slice must precede end ({0} !> {1})\".format(\n end, start))\n\n for sequence in sequences:\n seq = sequence.seq\n start_gap = gap_char * start\n end_gap = gap_char * (len(seq) - end)\n seq = Seq(start_gap + str(seq[start:end]) + end_gap,\n alphabet=seq.alphabet)\n sequence.seq = seq\n yield sequence"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ndrop all columns present in slices from records", "response": "def drop_columns(records, slices):\n \"\"\"\n Drop all columns present in ``slices`` from records\n \"\"\"\n for record in records:\n # Generate a set of indices to remove\n drop = set(i for slice in slices\n for i in range(*slice.indices(len(record))))\n keep = [i not in drop for i in range(len(record))]\n record.seq = Seq(''.join(itertools.compress(record.seq, keep)), record.seq.alphabet)\n yield record"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ncutting records to slices indexed by non - gap positions in record_id", "response": "def cut_sequences_relative(records, slices, record_id):\n \"\"\"\n Cuts records to slices, indexed by non-gap positions in record_id\n \"\"\"\n with _record_buffer(records) as r:\n try:\n record = next(i for i in r() if i.id == record_id)\n except StopIteration:\n raise ValueError(\"Record with id {0} not found.\".format(record_id))\n\n new_slices = _update_slices(record, slices)\n for record in multi_cut_sequences(r(), new_slices):\n yield record"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nmasks sequences in a list of records by the given slices.", "response": "def multi_mask_sequences(records, slices):\n \"\"\"\n Replace characters sliced by slices with gap characters.\n \"\"\"\n for record in records:\n record_indices = list(range(len(record)))\n keep_indices = reduce(lambda i, s: i - frozenset(record_indices[s]),\n slices, frozenset(record_indices))\n seq = ''.join(b if i in keep_indices else '-'\n for i, b in enumerate(str(record.seq)))\n record.seq = Seq(seq)\n yield record"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef prune_empty(records):\n for record in records:\n if not all(c == '-' for c in str(record.seq)):\n yield record", "response": "Remove any empty sequences from the sequence list."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _reverse_annotations(old_record, new_record):\n # Copy the annotations over\n for k, v in list(old_record.annotations.items()):\n # Trim if appropriate\n if isinstance(v, (tuple, list)) and len(v) == len(old_record):\n assert len(v) == len(old_record)\n v = v[::-1]\n new_record.annotations[k] = v\n\n # Letter annotations must be lists / tuples / strings of the same\n # length as the sequence\n for k, v in list(old_record.letter_annotations.items()):\n assert len(v) == len(old_record)\n new_record.letter_annotations[k] = v[::-1]", "response": "Reverse the annotations of the old_record to the new_record."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef reverse_sequences(records):\n logging.info('Applying _reverse_sequences generator: '\n 'reversing the order of sites in sequences.')\n for record in records:\n rev_record = SeqRecord(record.seq[::-1], id=record.id,\n name=record.name,\n description=record.description)\n # Copy the annotations over\n _reverse_annotations(record, rev_record)\n\n yield rev_record", "response": "Reverse the order of sites in sequences."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ntransforms sequences into reverse complements.", "response": "def reverse_complement_sequences(records):\n \"\"\"\n Transform sequences into reverse complements.\n \"\"\"\n logging.info('Applying _reverse_complement_sequences generator: '\n 'transforming sequences into reverse complements.')\n for record in records:\n rev_record = SeqRecord(record.seq.reverse_complement(),\n id=record.id, name=record.name,\n description=record.description)\n # Copy the annotations over\n _reverse_annotations(record, rev_record)\n\n yield rev_record"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef ungap_sequences(records, gap_chars=GAP_TABLE):\n logging.info('Applying _ungap_sequences generator: removing all gap characters')\n for record in records:\n yield ungap_all(record, gap_chars)", "response": "Remove gaps from sequences given an alignment."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _update_id(record, new_id):\n old_id = record.id\n record.id = new_id\n\n # At least for FASTA, record ID starts the description\n record.description = re.sub('^' + re.escape(old_id), new_id, record.description)\n return record", "response": "Update a record ID to new_id"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngives a set of sequences append a suffix for each sequence s name.", "response": "def name_append_suffix(records, suffix):\n \"\"\"\n Given a set of sequences, append a suffix for each sequence's name.\n \"\"\"\n logging.info('Applying _name_append_suffix generator: '\n 'Appending suffix ' + suffix + ' to all '\n 'sequence IDs.')\n for record in records:\n new_id = record.id + suffix\n _update_id(record, new_id)\n yield record"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ngive a set of sequences insert a prefix for each sequence s name.", "response": "def name_insert_prefix(records, prefix):\n \"\"\"\n Given a set of sequences, insert a prefix for each sequence's name.\n \"\"\"\n logging.info('Applying _name_insert_prefix generator: '\n 'Inserting prefix ' + prefix + ' for all '\n 'sequence IDs.')\n for record in records:\n new_id = prefix + record.id\n _update_id(record, new_id)\n yield record"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngives a set of sequences and a regular expression filter out any sequences with names that do not match the regular expression.", "response": "def name_include(records, filter_regex):\n \"\"\"\n Given a set of sequences, filter out any sequences with names\n that do not match the specified regular expression. Ignore case.\n \"\"\"\n logging.info('Applying _name_include generator: '\n 'including only IDs matching ' + filter_regex +\n ' in results.')\n regex = re.compile(filter_regex)\n for record in records:\n if regex.search(record.id) or regex.search(record.description):\n yield record"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef name_replace(records, search_regex, replace_pattern):\n regex = re.compile(search_regex)\n for record in records:\n maybe_id = record.description.split(None, 1)[0]\n if maybe_id == record.id:\n record.description = regex.sub(replace_pattern, record.description)\n record.id = record.description.split(None, 1)[0]\n else:\n record.id = regex.sub(replace_pattern, record.id)\n record.description = regex.sub(replace_pattern, record.description)\n yield record", "response": "Given a set of sequences replace all occurrences of search_regex with replace_pattern. Ignore case."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef seq_include(records, filter_regex):\n regex = re.compile(filter_regex)\n for record in records:\n if regex.search(str(record.seq)):\n yield record", "response": "Filter any sequences who s seq does not match the filter_regex. Ignore case."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef sample(records, k, random_seed=None):\n\n if random_seed is not None:\n random.seed(random_seed)\n\n result = []\n for i, record in enumerate(records):\n if len(result) < k:\n result.append(record)\n else:\n r = random.randint(0, i)\n if r < k:\n result[r] = record\n return result", "response": "Choose a length - k subset of records retaining the input\n order."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nyield the records from the specified head.", "response": "def head(records, head):\n \"\"\"\n Limit results to the top N records.\n With the leading `-', print all but the last N records.\n \"\"\"\n logging.info('Applying _head generator: '\n 'limiting results to top ' + head + ' records.')\n\n if head == '-0':\n for record in records:\n yield record\n elif '-' in head:\n with _record_buffer(records) as r:\n record_count = sum(1 for record in r())\n end_index = max(record_count + int(head), 0)\n for record in itertools.islice(r(), end_index):\n yield record\n else:\n for record in itertools.islice(records, int(head)):\n yield record"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nyielding the records from the top N records.", "response": "def tail(records, tail):\n \"\"\"\n Limit results to the bottom N records.\n Use +N to output records starting with the Nth.\n \"\"\"\n logging.info('Applying _tail generator: '\n 'limiting results to top ' + tail + ' records.')\n\n if tail == '+0':\n for record in records:\n yield record\n elif '+' in tail:\n tail = int(tail) - 1\n for record in itertools.islice(records, tail, None):\n yield record\n else:\n with _record_buffer(records) as r:\n record_count = sum(1 for record in r())\n start_index = max(record_count - int(tail), 0)\n for record in itertools.islice(r(), start_index, None):\n yield record"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ngenerate a list with the proportion of gaps by index in a set of sequences.", "response": "def gap_proportion(sequences, gap_chars='-'):\n \"\"\"\n Generates a list with the proportion of gaps by index in a set of\n sequences.\n \"\"\"\n aln_len = None\n gaps = []\n for i, sequence in enumerate(sequences):\n if aln_len is None:\n aln_len = len(sequence)\n gaps = [0] * aln_len\n else:\n if not len(sequence) == aln_len:\n raise ValueError((\"Unexpected sequence length {0}. Is this \"\n \"an alignment?\").format(len(sequence)))\n\n # Update any gap positions in gap list\n for j, char in enumerate(sequence.seq):\n if char in gap_chars:\n gaps[j] += 1\n\n sequence_count = float(i + 1)\n gap_props = [i / sequence_count for i in gaps]\n return gap_props"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ntakes a sequence iterator for determining gap between all sequences AttributeNames and trims any gaps that are present in the same position across all sequences AttributeNames.", "response": "def squeeze(records, gap_threshold=1.0):\n \"\"\"\n Remove any gaps that are present in the same position across all sequences\n in an alignment. Takes a second sequence iterator for determining gap\n positions.\n \"\"\"\n with _record_buffer(records) as r:\n gap_proportions = gap_proportion(r())\n\n keep_columns = [g < gap_threshold for g in gap_proportions]\n\n for record in r():\n sequence = str(record.seq)\n # Trim\n squeezed = itertools.compress(sequence, keep_columns)\n yield SeqRecord(Seq(''.join(squeezed)), id=record.id,\n description=record.description)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ngive a list of SeqRecords yield SeqRecords that are in the order they appear in the ID.", "response": "def strip_range(records):\n \"\"\"\n Cut off trailing /- ranges from IDs. Ranges must be 1-indexed and\n the stop integer must not be less than the start integer.\n \"\"\"\n logging.info('Applying _strip_range generator: '\n 'removing /- ranges from IDs')\n # Split up and be greedy.\n cut_regex = re.compile(r\"(?P.*)\\/(?P\\d+)\\-(?P\\d+)\")\n for record in records:\n name = record.id\n match = cut_regex.match(str(record.id))\n if match:\n sequence_id = match.group('id')\n start = int(match.group('start'))\n stop = int(match.group('stop'))\n if start > 0 and start <= stop:\n name = sequence_id\n yield SeqRecord(record.seq, id=name,\n description='')"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nyielding the records in the sequence that are transcribed by the transcribe operation.", "response": "def transcribe(records, transcribe):\n \"\"\"\n Perform transcription or back-transcription.\n transcribe must be one of the following:\n dna2rna\n rna2dna\n \"\"\"\n logging.info('Applying _transcribe generator: '\n 'operation to perform is ' + transcribe + '.')\n for record in records:\n sequence = str(record.seq)\n description = record.description\n name = record.id\n if transcribe == 'dna2rna':\n dna = Seq(sequence, IUPAC.ambiguous_dna)\n rna = dna.transcribe()\n yield SeqRecord(rna, id=name, description=description)\n elif transcribe == 'rna2dna':\n rna = Seq(sequence, IUPAC.ambiguous_rna)\n dna = rna.back_transcribe()\n yield SeqRecord(dna, id=name, description=description)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ntranslating a list of records into a sequence sequence.", "response": "def translate(records, translate):\n \"\"\"\n Perform translation from generic DNA/RNA to proteins. Bio.Seq\n does not perform back-translation because the codons would\n more-or-less be arbitrary. Option to translate only up until\n reaching a stop codon. translate must be one of the following:\n dna2protein\n dna2proteinstop\n rna2protein\n rna2proteinstop\n \"\"\"\n logging.info('Applying translation generator: '\n 'operation to perform is ' + translate + '.')\n\n to_stop = translate.endswith('stop')\n\n source_type = translate[:3]\n alphabet = {'dna': IUPAC.ambiguous_dna, 'rna': IUPAC.ambiguous_rna}[source_type]\n\n # Get a translation table\n table = {'dna': CodonTable.ambiguous_dna_by_name[\"Standard\"],\n 'rna': CodonTable.ambiguous_rna_by_name[\"Standard\"]}[source_type]\n\n # Handle ambiguities by replacing ambiguous codons with 'X'\n # TODO: this copy operation causes infinite recursion with python3.6 -\n # not sure why it was here to begin with.\n # table = copy.deepcopy(table)\n table.forward_table = CodonWarningTable(table.forward_table)\n\n for record in records:\n sequence = str(record.seq)\n seq = Seq(sequence, alphabet)\n protein = seq.translate(table, to_stop=to_stop)\n yield SeqRecord(protein, id=record.id, description=record.description)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ndiscarding any records that are longer than max_length.", "response": "def max_length_discard(records, max_length):\n \"\"\"\n Discard any records that are longer than max_length.\n \"\"\"\n logging.info('Applying _max_length_discard generator: '\n 'discarding records longer than '\n '.')\n for record in records:\n if len(record) > max_length:\n # Discard\n logging.debug('Discarding long sequence: %s, length=%d',\n record.id, len(record))\n else:\n yield record"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ndiscard any records that are shorter than min_length.", "response": "def min_length_discard(records, min_length):\n \"\"\"\n Discard any records that are shorter than min_length.\n \"\"\"\n logging.info('Applying _min_length_discard generator: '\n 'discarding records shorter than %d.', min_length)\n for record in records:\n if len(record) < min_length:\n logging.debug('Discarding short sequence: %s, length=%d',\n record.id, len(record))\n else:\n yield record"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef min_ungap_length_discard(records, min_length):\n for record in records:\n if len(ungap_all(record)) >= min_length:\n yield record", "response": "Discard any records that are shorter than min_length after removing gaps."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nsort sequences by length.", "response": "def sort_length(source_file, source_file_type, direction=1):\n \"\"\"\n Sort sequences by length. 1 is ascending (default) and 0 is descending.\n \"\"\"\n direction_text = 'ascending' if direction == 1 else 'descending'\n\n logging.info('Indexing sequences by length: %s', direction_text)\n\n # Adapted from the Biopython tutorial example.\n\n # Get the lengths and ids, and sort on length\n len_and_ids = sorted((len(rec), rec.id)\n for rec in SeqIO.parse(source_file, source_file_type))\n\n if direction == 0:\n ids = reversed([seq_id for (length, seq_id) in len_and_ids])\n else:\n ids = [seq_id for (length, seq_id) in len_and_ids]\n del len_and_ids # free this memory\n\n # SeqIO.index does not handle gzip instances\n if isinstance(source_file, gzip.GzipFile):\n tmpfile = tempfile.NamedTemporaryFile()\n source_file.seek(0)\n tmpfile.write(source_file.read())\n tmpfile.seek(0)\n source_file = tmpfile\n\n record_index = SeqIO.index(source_file.name, source_file_type)\n\n for seq_id in ids:\n yield record_index[seq_id]"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef batch(iterable, chunk_size):\n i = iter(iterable)\n while True:\n r = list(itertools.islice(i, chunk_size))\n if not r:\n break\n yield r", "response": "Yields items from iterable in chunk_size bits."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nrun the alignment of a single sequence.", "response": "def action(arguments):\n \"\"\"\n Run\n \"\"\"\n # Ignore SIGPIPE, for head support\n common.exit_on_sigpipe()\n logging.basicConfig()\n\n prot_sequences = SeqIO.parse(arguments.protein_align,\n fileformat.from_handle(arguments.protein_align))\n nucl_sequences = SeqIO.parse(arguments.nucl_align,\n fileformat.from_handle(arguments.nucl_align))\n\n instance = AlignmentMapper(TRANSLATION_TABLES[arguments.translation_table],\n arguments.fail_action)\n\n SeqIO.write(instance.map_all(prot_sequences, nucl_sequences),\n arguments.out_file, fileformat.from_filename(arguments.out_file.name))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nvalidating that the translation table is valid for the given protein and nucleotide.", "response": "def _validate_translation(self, aligned_prot, aligned_nucl):\n \"\"\"\n Given a seq for protein and nucleotide, ensure that the translation holds\n \"\"\"\n codons = [''.join(i) for i in batch(str(aligned_nucl), 3)]\n for codon, aa in zip(codons, str(aligned_prot)):\n # Check gaps\n if codon == '---' and aa == '-':\n continue\n\n try:\n trans = self.translation_table.forward_table[codon]\n if not trans == aa:\n raise ValueError(\"Codon {0} translates to {1}, not {2}\".format(\n codon, trans, aa))\n except (KeyError, CodonTable.TranslationError):\n if aa != 'X':\n if self.unknown_action == 'fail':\n raise ValueError(\"Unknown codon: {0} mapped to {1}\".format(\n codon, aa))\n elif self.unknown_action == 'warn':\n logging.warn('Cannot verify that unknown codon %s '\n 'maps to %s', codon, aa)\n return True"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nmaps alignment to the original sequence.", "response": "def map_alignment(self, prot_seq, nucl_seq):\n \"\"\"\n Use aligned prot_seq to align nucl_seq\n \"\"\"\n if prot_seq.id != nucl_seq.id:\n logging.warning(\n 'ID mismatch: %s != %s. Are the sequences in the same order?',\n prot_seq.id, nucl_seq.id)\n\n # Ungap nucleotides\n codons = batch(str(nucl_seq.seq.ungap('-')), 3)\n codons = [''.join(i) for i in codons]\n codon_iter = iter(codons)\n\n ungapped_prot = str(prot_seq.seq).replace('-', '')\n\n if len(ungapped_prot) != len(codons):\n table = self.translation_table.forward_table\n prot_str = ' '.join(' ' + p + ' ' for p in ungapped_prot)\n codon_str = ' '.join(codons)\n trans_str = ' '.join(' ' + table.get(codon, 'X') + ' '\n for codon in codons)\n raise ValueError(\"\"\"Length of codon sequence ({0}) does not match \\\nlength of protein sequence ({1}) for {2}\nProtein: {3}\nCodons: {4}\nTrans. Codons: {5}\"\"\".format(len(codons), len(ungapped_prot), nucl_seq.id, prot_str,\n codon_str, trans_str))\n\n try:\n nucl_align = ['---' if p == '-' else next(codon_iter)\n for p in str(prot_seq.seq)]\n except StopIteration:\n assert False # Should be checked above\n\n result = SeqRecord(Seq(''.join(nucl_align)), id=nucl_seq.id,\n description=nucl_seq.description)\n\n # Validate\n self._validate_translation(prot_seq.seq.upper(), result.seq.upper())\n\n return result"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nmaps all sequences in the protein alignment to nucleotide sequences.", "response": "def map_all(self, prot_alignment, nucl_sequences):\n \"\"\"\n Convert protein sequences to nucleotide alignment\n \"\"\"\n zipped = itertools.zip_longest(prot_alignment, nucl_sequences)\n for p, n in zipped:\n if p is None:\n raise ValueError(\"Exhausted protein sequences\")\n elif n is None:\n raise ValueError(\"Exhausted nucleotide sequences\")\n yield self.map_alignment(p, n)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the BioPython file type corresponding with the input extension.", "response": "def from_extension(extension):\n \"\"\"\n Look up the BioPython file type corresponding with input extension.\n\n Look up is case insensitive.\n \"\"\"\n if not extension.startswith('.'):\n raise ValueError(\"Extensions must begin with a period.\")\n try:\n return EXTENSION_TO_TYPE[extension.lower()]\n except KeyError:\n raise UnknownExtensionError(\n \"seqmagick does not know how to handle \" +\n \"files with extensions like this: \" + extension)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef from_filename(file_name):\n base, extension = os.path.splitext(file_name)\n if extension in COMPRESS_EXT:\n # Compressed file\n extension = os.path.splitext(base)[1]\n return from_extension(extension)", "response": "Returns a BioPython object corresponding to the file name."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef from_handle(fh, stream_default='fasta'):\n if fh in (sys.stdin, sys.stdout, sys.stderr):\n return stream_default\n return from_filename(fh.name)", "response": "Return a BioPython object from a file - like object."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nparses command - line arguments for different actions.", "response": "def parse_arguments(argv):\n \"\"\"\n Extract command-line arguments for different actions.\n \"\"\"\n parser = argparse.ArgumentParser(description='seqmagick - Manipulate ' + \\\n ' sequence files.', prog='seqmagick')\n\n parser.add_argument('-V', '--version', action='version',\n version='seqmagick v' + version,\n help=\"Print the version number and exit\")\n parser.add_argument('-v', '--verbose', dest='verbosity',\n action='count', default=1,\n help=\"Be more verbose. Specify -vv or -vvv for even more\")\n parser.add_argument('-q', '--quiet', action='store_const', const=0,\n dest='verbosity', help=\"Suppress output\")\n\n # Subparsers\n subparsers = parser.add_subparsers(dest='subparser_name')\n\n parser_help = subparsers.add_parser('help',\n help='Detailed help for actions using help ')\n\n parser_help.add_argument('action')\n\n # Add actions\n actions = {}\n for name, mod in subcommands.itermodules():\n subparser = subparsers.add_parser(name, help=mod.__doc__,\n description=mod.__doc__)\n mod.build_parser(subparser)\n actions[name] = mod.action\n\n arguments = parser.parse_args(argv)\n arguments.argv = argv\n action = arguments.subparser_name\n\n if action == 'help':\n return parse_arguments([str(arguments.action), '-h'])\n\n return actions[action], arguments"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning a dict mapping from an index in the ungapped sequence to an index in the gapped sequence.", "response": "def ungap_index_map(sequence, gap_chars='-'):\n \"\"\"\n Returns a dict mapping from an index in the ungapped sequence to an index\n in the gapped sequence.\n\n >>> ungap_index_map('AC-TG-')\n {0: 0, 1: 1, 2: 3, 3: 4}\n \"\"\"\n counter = itertools.count(0).__next__\n ungap_indexes = [\n counter() if c not in gap_chars else None for c in iter(sequence)\n ]\n return dict(\n (ungapped, gapped)\n for ungapped, gapped in zip(ungap_indexes, range(len(sequence)))\n if ungapped is not None)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning mapping from gapped index to ungapped index.", "response": "def gap_index_map(sequence, gap_chars='-'):\n \"\"\"\n Opposite of ungap_index_map: returns mapping from gapped index to ungapped\n index.\n\n >>> gap_index_map('AC-TG-')\n {0: 0, 1: 1, 3: 2, 4: 3}\n \"\"\"\n return dict(\n (v, k) for k, v in list(ungap_index_map(sequence, gap_chars).items()))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ntest if two bases are equal.", "response": "def _iupac_ambiguous_equal(ambig_base, unambig_base):\n \"\"\"\n Tests two bases for equality, accounting for IUPAC ambiguous DNA\n\n ambiguous base may be IUPAC ambiguous, unambiguous must be one of ACGT\n \"\"\"\n iupac_translation = {\n 'A': 'A',\n 'C': 'C',\n 'G': 'G',\n 'T': 'T',\n 'U': 'U',\n 'R': 'AG',\n 'Y': 'CT',\n 'S': 'GC',\n 'W': 'AT',\n 'K': 'GT',\n 'M': 'AC',\n 'B': 'CGT',\n 'D': 'AGT',\n 'H': 'ACT',\n 'V': 'ACG',\n 'N': 'ACGT',\n '-': '-'\n }\n for i in (ambig_base, unambig_base):\n if not len(i) == 1:\n raise ValueError(\"only one base may be passed.\")\n\n return unambig_base.upper() in iupac_translation[ambig_base.upper()]"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef hamming_distance(s1, s2, equality_function=operator.eq):\n if not len(s1) == len(s2):\n raise ValueError(\"String lengths are not equal\")\n\n # Number of non-matching characters:\n return sum(not equality_function(c1, c2) for c1, c2 in zip(s1, s2))", "response": "Returns the hamming distance between two strings."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nfinds the forward and reverse primers in a set of sequences.", "response": "def locate_primers(sequences, forward_primer, reverse_primer,\n reverse_complement, max_hamming_distance):\n \"\"\"\n Find forward and reverse primers in a set of sequences, return two tuples:\n (forward_start, forward_end), (reverse_start, reverse_end)\n \"\"\"\n forward_loc = None\n reverse_loc = None\n seq_length = None\n\n # Reverse complement the reverse primer, if appropriate\n if reverse_complement:\n reverse_primer = reverse_primer.reverse_complement()\n\n forward_aligner = PrimerAligner(forward_primer)\n reverse_aligner = PrimerAligner(reverse_primer)\n\n for i, sequence in enumerate(sequences):\n if seq_length is None:\n seq_length = len(sequence)\n elif len(sequence) != seq_length:\n raise ValueError((\"Sequence Length Heterogeneity: {0} != {1}. \"\n \"Is this an alignment?\").format(\n len(sequence), seq_length))\n index_map = ungap_index_map(sequence.seq)\n if forward_loc is None:\n ham_dist, start, end = forward_aligner.align(sequence.seq.ungap())\n if ham_dist <= max_hamming_distance:\n forward_loc = index_map[start], index_map[end]\n logging.info(\"Forward in sequence %d: indexes %d to %d\", i + 1,\n *forward_loc)\n if reverse_loc is None:\n ham_dist, start, end = reverse_aligner.align(sequence.seq.ungap())\n if ham_dist <= max_hamming_distance:\n reverse_loc = index_map[start], index_map[end]\n logging.info(\"Reverse in sequence %d: indexes %d to %d\", i + 1,\n *reverse_loc)\n if forward_loc and reverse_loc:\n # Both found\n # Check order\n if forward_loc[0] > reverse_loc[0]:\n raise PrimerOrderError(forward_loc[0], reverse_loc[0])\n return forward_loc, reverse_loc\n else:\n logging.debug(\n \"Sequence %d: %d/2 primers found\", i + 1,\n sum(j is not None for j in (forward_loc, reverse_loc)))\n\n # Did not find either the forward or reverse primer:\n if not forward_loc:\n raise PrimerNotFound(forward_primer)\n else:\n raise PrimerNotFound(reverse_primer)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nslicing the input sequences from start to end", "response": "def trim(sequences, start, end):\n \"\"\"\n Slice the input sequences from start to end\n \"\"\"\n logging.info(\"Trimming from %d to %d\", start, end)\n return (sequence[start:end] for sequence in sequences)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef action(arguments):\n # Determine file format for input and output\n source_format = (arguments.source_format or\n fileformat.from_handle(arguments.source_file))\n output_format = (arguments.output_format or\n fileformat.from_handle(arguments.output_file))\n\n # Load the alignment\n with arguments.source_file:\n sequences = SeqIO.parse(\n arguments.source_file,\n source_format,\n alphabet=Alphabet.Gapped(Alphabet.single_letter_alphabet))\n\n # Locate primers\n (forward_start, forward_end), (reverse_start, reverse_end) = locate_primers(\n sequences, arguments.forward_primer,\n arguments.reverse_primer, arguments.reverse_complement,\n arguments.max_hamming_distance)\n\n # Generate slice indexes\n if arguments.include_primers:\n start = forward_start\n end = reverse_end + 1\n else:\n start = forward_end + 1\n end = reverse_start\n\n # Rewind the input file\n arguments.source_file.seek(0)\n sequences = SeqIO.parse(\n arguments.source_file,\n source_format,\n alphabet=Alphabet.Gapped(Alphabet.single_letter_alphabet))\n\n # Apply the transformation\n prune_action = _ACTIONS[arguments.prune_action]\n transformed_sequences = prune_action(sequences, start, end)\n\n with arguments.output_file:\n SeqIO.write(transformed_sequences, arguments.output_file,\n output_format)", "response": "This function performs the alignment processing."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\naligns the primer to the input sequence returning a tuple of hamming distance start and end index.", "response": "def align(self, sequence):\n \"\"\"\n Aligns the primer to the given query sequence, returning a tuple of:\n\n hamming_distance, start, end\n\n Where hamming distance is the distance between the primer and aligned\n sequence, and start and end give the start and end index of the primer\n relative to the input sequence.\n \"\"\"\n seq_aln, primer_aln, score, start, end = pairwise2.align.globalms(\n str(sequence).upper(), str(self.primer).upper(),\n self.match, self.difference, self.gap_open,\n self.gap_extend, one_alignment_only=True,\n penalize_end_gaps=self.penalize_end_gaps)[0]\n\n # Get an ungapped mapping on the sequence\n index_map = gap_index_map(seq_aln)\n ungap_map = ungap_index_map(primer_aln)\n\n # Trim to primer\n start = ungap_map[0]\n end = ungap_map[len(self.primer) - 1]\n\n trimmed = seq_aln[start:end + 1]\n\n ham_dist = hamming_distance(primer_aln[start:end + 1], trimmed,\n _iupac_ambiguous_equal)\n # assert primer_aln[start:end].replace('-', '') == str(self.primer)\n\n # TODO: handle start or end being gap better. For now, just give up\n # and return maxint for the hamming distance\n if trimmed.endswith('-'):\n tail = len(trimmed) - len(trimmed.rstrip('-'))\n end = index_map[end - tail] + 1\n ham_dist = sys.maxsize\n else:\n end = index_map[end]\n if trimmed.startswith('-'):\n start = 0\n ham_dist = sys.maxsize\n else:\n start = index_map[start]\n\n return ham_dist, start, end"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef hash_starts_numeric(records):\n for record in records:\n seq_hash = hashlib.sha1(str(record.seq)).hexdigest()\n if seq_hash[0].isdigit():\n yield record", "response": "A simple function that only accepts records with a numeric start to\nCTYPE their sha - 1 hash."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nopen a file for atomic writing. Generates a temp file, renames to value of ``path``. Arguments: ``permissions``: Permissions to set (default: umask) ``file_factory``: If given, the handle yielded will be the result of calling file_factory(path) Additional arguments are passed to tempfile.NamedTemporaryFile", "response": "def atomic_write(path, mode='wt', permissions=None, file_factory=None, **kwargs):\n \"\"\"\n Open a file for atomic writing.\n\n Generates a temp file, renames to value of ``path``.\n\n Arguments:\n ``permissions``: Permissions to set (default: umask)\n ``file_factory``: If given, the handle yielded will be the result of\n calling file_factory(path)\n\n Additional arguments are passed to tempfile.NamedTemporaryFile\n \"\"\"\n if permissions is None:\n permissions = apply_umask()\n # Handle stdout:\n if path == '-':\n yield sys.stdout\n else:\n base_dir = os.path.dirname(path)\n kwargs['suffix'] = os.path.basename(path)\n tf = tempfile.NamedTemporaryFile(\n dir=base_dir, mode=mode, delete=False, **kwargs)\n\n # If a file_factory is given, close, and re-open a handle using the\n # file_factory\n if file_factory is not None:\n tf.close()\n tf = file_factory(tf.name)\n try:\n with tf:\n yield tf\n # Move\n os.rename(tf.name, path)\n os.chmod(path, permissions)\n except:\n os.remove(tf.name)\n raise"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef cut_range(string):\n value_range = string.split(':')\n if len(value_range) == 1:\n start = int(value_range[0])\n stop = start\n elif len(value_range) == 2:\n start, stop = tuple(int(i) if i else None for i in value_range)\n else:\n msg = \"{0} is not a valid, 1-indexed range.\".format(string)\n raise argparse.ArgumentTypeError(msg)\n\n if start == 0 or (stop or sys.maxsize) < (start or 0):\n msg = \"{0} is not a valid, 1-indexed range.\".format(string)\n raise argparse.ArgumentTypeError(msg)\n\n # Convert from 1-indexed to 0-indexed\n if start is not None and start > 0:\n start -= 1\n\n return slice(start, stop)", "response": "A custom argparse type to deal with sequences ranges such as 5 : 500."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef typed_range(type_func, minimum, maximum):\n @functools.wraps(type_func)\n def inner(string):\n result = type_func(string)\n if not result >= minimum and result <= maximum:\n raise argparse.ArgumentTypeError(\n \"Please provide a value between {0} and {1}\".format(\n minimum, maximum))\n return result\n return inner", "response": "Require variables to be of the specified type, between minimum and maximum"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef partial_append_action(fn, argument_keys=None):\n if isinstance(argument_keys, str):\n argument_keys = [argument_keys]\n argument_keys = argument_keys or []\n\n class PartialAppendAction(argparse.Action):\n def __init__(self,\n option_strings,\n dest,\n const=None,\n default=None,\n required=False,\n help=None,\n type=None,\n metavar=None,\n nargs=None,\n **kwargs):\n super(PartialAppendAction, self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=len(argument_keys),\n const=const,\n default=default,\n required=required,\n metavar=metavar,\n type=type,\n help=help, **kwargs)\n\n def __call__(self, parser, namespace, values, option_string=None):\n items = copy.copy(getattr(namespace, self.dest, None)) or []\n\n # If no value was set default to empty list\n if values is None:\n values = []\n elif not isinstance(values, list):\n values = [values]\n\n if len(argument_keys) != len(values):\n raise ValueError(\"Unexpected number of values\")\n\n # Generate keyword arguments for the input function\n kwargs = dict(list(zip(argument_keys, values)))\n f = functools.partial(fn, **kwargs)\n items.append(f)\n setattr(namespace, self.dest, items)\n\n return PartialAppendAction", "response": "Returns an argparse. Action which appends a partially - applied function to dest."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef positive_value(target_type):\n def inner(string):\n value = target_type(string)\n if not value >= 0:\n raise argparse.ArgumentTypeError(\"Invalid positive number: \" +\n string)\n return value\n\n return inner", "response": "Returns a function that checks that the argument is a positive number."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nadding the options to the parser that will be used to filter the input sequences.", "response": "def add_options(parser):\n \"\"\"\n Add optional arguments to the parser\n \"\"\"\n partial_action = common.partial_append_action\n file_mods = parser.add_argument_group(\"Sequence File Modification\")\n file_mods.add_argument('--line-wrap', dest='line_wrap', metavar='N',\n type=int, help='Adjust line wrap for sequence strings. '\n 'When N is 0, all line breaks are removed. Only fasta files '\n 'are supported for the output format.')\n file_mods.add_argument('--sort', dest='sort',\n choices=['length-asc', 'length-desc', 'name-asc', 'name-desc'],\n help='Perform sorting by length or name, ascending or descending. '\n 'ASCII sorting is performed for names')\n\n parser.epilog = \"\"\"Filters using regular expressions are case-sensitive\n by default. Append \"(?i)\" to a pattern to make it case-insensitive.\"\"\"\n\n seq_mods = parser.add_argument_group(\"Sequence Modificaton\")\n seq_mods.add_argument('--apply-function', type=module_function,\n metavar='/path/to/module.py:function_name[:parameter]',\n help=\"\"\"Specify a custom function to apply to the input sequences,\n specified as /path/to/file.py:function_name. Function should accept\n an iterable of Bio.SeqRecord objects, and yield SeqRecords. If the\n parameter is specified, it will be passed as a string as the second\n argument to the function. Specify more than one to chain.\"\"\",\n default=[], action='append')\n seq_mods.add_argument('--cut', dest='transforms',\n metavar=\"start:end[,start2:end2]\",\n type=common.sequence_slices,\n action=partial_action(transform.multi_cut_sequences, 'slices'),\n help=\"\"\"Keep only the residues within the 1-indexed start and end\n positions specified, : separated. Includes last item. Start or end\n can be left unspecified to indicate start/end of sequence. A\n negative start may be provided to indicate an offset from the end\n of the sequence. Note that to prevent negative numbers being\n interpreted as flags, this should be written with an equals\n sign between `--cut` and the argument, e.g.: `--cut=-10:`\"\"\")\n seq_mods.add_argument('--relative-to', dest='cut_relative', metavar='ID',\n help=\"\"\"Apply --cut relative to the indexes of non-gap residues in\n sequence identified by ID\"\"\")\n seq_mods.add_argument('--drop', dest='transforms',\n metavar='start:end[,start2:end2]',\n type=common.sequence_slices,\n action=partial_action(transform.drop_columns, 'slices'),\n help=\"\"\"Remove the residues at the specified indices. Same format as `--cut`.\"\"\")\n seq_mods.add_argument('--dash-gap',\n action=partial_action(transform.dashes_cleanup), dest='transforms',\n help=\"\"\"Replace any of the characters \"?.:~\" with a \"-\" for all\n sequences\"\"\")\n seq_mods.add_argument('--lower',\n action=partial_action(transform.lower_sequences),\n dest='transforms', help='Translate the sequences to lower case')\n seq_mods.add_argument('--mask', metavar=\"start1:end1[,start2:end2]\",\n action=partial_action(transform.multi_mask_sequences, 'slices'),\n type=common.sequence_slices, dest='transforms', help=\"\"\"Replace\n residues in 1-indexed slice with gap-characters. If --relative-to\n is also specified, coordinates are relative to the sequence ID\n provided.\"\"\")\n seq_mods.add_argument('--reverse',\n action=partial_action(transform.reverse_sequences),\n dest='transforms', help='Reverse the order of sites in sequences')\n seq_mods.add_argument('--reverse-complement', dest='transforms',\n action=partial_action(transform.reverse_complement_sequences),\n help='Convert sequences into reverse complements')\n seq_mods.add_argument('--squeeze', action=partial_action(transform.squeeze),\n dest='transforms',\n help='''Remove any gaps that are present in the same\n position across all sequences in an alignment (equivalent to\n --squeeze-threshold=1.0)''')\n seq_mods.add_argument('--squeeze-threshold', dest='transforms',\n action=partial_action(transform.squeeze, 'gap_threshold'),\n type=common.typed_range(float, 0.0, 1.0),\n metavar='PROP', help=\"\"\"Trim columns from an alignment which\n have gaps in least the specified proportion of sequences.\"\"\")\n seq_mods.add_argument('--transcribe', dest='transforms',\n action=partial_action(transform.transcribe, 'transcribe'),\n choices=('dna2rna', 'rna2dna'), help=\"\"\"Transcription and back\n transcription for generic DNA and RNA. Source sequences must be the\n correct alphabet or this action will likely produce incorrect\n results.\"\"\")\n seq_mods.add_argument('--translate', dest='transforms',\n action=partial_action(transform.translate, 'translate'),\n choices=['dna2protein', 'rna2protein', 'dna2proteinstop',\n 'rna2proteinstop'], help=\"\"\"Translate from generic DNA/RNA to\n proteins. Options with \"stop\" suffix will NOT translate through\n stop codons . Source sequences must be the correct alphabet or\n this action will likely produce incorrect results.\"\"\")\n seq_mods.add_argument('--ungap',\n action=partial_action(transform.ungap_sequences),\n dest='transforms', help='Remove gaps in the sequence alignment')\n seq_mods.add_argument('--upper',\n action=partial_action(transform.upper_sequences),\n dest='transforms', help='Translate the sequences to upper case')\n\n seq_select = parser.add_argument_group(\"Record Selection\")\n\n seq_select.add_argument('--deduplicate-sequences',\n action='store_const', const=None, default=False,\n dest='deduplicate_sequences', help='Remove any duplicate sequences '\n 'by sequence content, keep the first instance seen')\n seq_select.add_argument('--deduplicated-sequences-file', action='store',\n metavar='FILE', dest='deduplicate_sequences', default=False,\n type=common.FileType('wt'),\n help='Write all of the deduplicated sequences to a file')\n seq_select.add_argument('--deduplicate-taxa',\n action=partial_action(transform.deduplicate_taxa),\n dest='transforms', help=\"\"\"Remove any duplicate sequences by ID,\n keep the first instance seen\"\"\")\n seq_select.add_argument('--exclude-from-file', metavar='FILE',\n type=common.FileType('rt'), help=\"\"\"Filter sequences, removing\n those sequence IDs in the specified file\"\"\", dest='transforms',\n action=partial_action(transform.exclude_from_file, 'handle'))\n seq_select.add_argument('--include-from-file', metavar='FILE',\n type=common.FileType('rt'), help=\"\"\"Filter sequences, keeping only\n those sequence IDs in the specified file\"\"\", dest='transforms',\n action=partial_action(transform.include_from_file, 'handle'))\n seq_select.add_argument('--head', metavar='N', dest='transforms',\n action=partial_action(transform.head, 'head'), help=\"\"\"Trim\n down to top N sequences. With the leading `-', print all but the last N sequences.\"\"\")\n seq_select.add_argument('--max-length', dest='transforms', metavar='N',\n action=partial_action(transform.max_length_discard, 'max_length'),\n type=int, help=\"\"\"Discard any sequences beyond the specified\n maximum length. This operation occurs *before* all length-changing\n options such as cut and squeeze.\"\"\")\n seq_select.add_argument('--min-length', dest='transforms', metavar='N',\n action=partial_action(transform.min_length_discard, 'min_length'),\n type=int, help=\"\"\"Discard any sequences less than the specified\n minimum length. This operation occurs *before* cut and squeeze.\"\"\")\n seq_select.add_argument('--min-ungapped-length', metavar='N',\n action=partial_action(transform.min_ungap_length_discard,\n 'min_length'), type=int, help=\"\"\"Discard any sequences less\n than the specified minimum length, excluding gaps. This\n operation occurs *before* cut and squeeze.\"\"\",\n dest='transforms')\n seq_select.add_argument('--pattern-include', metavar='REGEX',\n action=partial_action(transform.name_include, 'filter_regex'),\n dest='transforms', help=\"\"\"Filter the sequences by regular\n expression in ID or description\"\"\")\n seq_select.add_argument('--pattern-exclude', metavar='REGEX',\n action=partial_action(transform.name_exclude, 'filter_regex'),\n dest='transforms', help=\"\"\"Filter the sequences by regular\n expression in ID or description\"\"\")\n seq_select.add_argument('--prune-empty',\n action=partial_action(transform.prune_empty), dest='transforms',\n help=\"Prune sequences containing only gaps ('-')\")\n seq_select.add_argument('--sample', metavar='N', dest='transforms', type=int,\n action=partial_action(transform.sample, 'k'),\n help = \"\"\" Select a random sampling of sequences \"\"\")\n seq_select.add_argument('--sample-seed', metavar='N', type=int,\n help = \"\"\"Set random seed for sampling of sequences\"\"\")\n seq_select.add_argument('--seq-pattern-include', metavar='REGEX',\n action=partial_action(transform.seq_include, 'filter_regex'),\n dest='transforms', help=\"\"\"Filter the sequences by regular\n expression in sequence\"\"\")\n seq_select.add_argument('--seq-pattern-exclude', metavar='REGEX',\n action=partial_action(transform.seq_exclude, 'filter_regex'),\n dest='transforms', help=\"\"\"Filter the sequences by regular\n expression in sequence\"\"\")\n seq_select.add_argument('--tail', metavar='N', dest='transforms',\n action=partial_action(transform.tail, 'tail'),\n help=\"\"\"Trim down to bottom N sequences. Use +N to output sequences starting with the Nth.\"\"\")\n\n id_mods = parser.add_argument_group(\"Sequence ID Modification\")\n id_mods.add_argument('--first-name',\n action=partial_action(transform.first_name_capture),\n dest='transforms', help='''Take only the first whitespace-delimited\n word as the name of the sequence''')\n id_mods.add_argument('--name-suffix', metavar='SUFFIX',\n action=partial_action(transform.name_append_suffix, 'suffix'),\n dest='transforms', help='Append a suffix to all IDs.')\n id_mods.add_argument('--name-prefix', metavar='PREFIX',\n action=partial_action(transform.name_insert_prefix, 'prefix'),\n dest='transforms', help=\"\"\"Insert a prefix for all\n IDs.\"\"\")\n id_mods.add_argument('--pattern-replace', nargs=2,\n metavar=('search_pattern', 'replace_pattern'),\n action=partial_action(transform.name_replace, ('search_regex',\n 'replace_pattern')),\n dest='transforms', help=\"\"\"Replace regex pattern \"search_pattern\"\n with \"replace_pattern\" in sequence ID and description\"\"\")\n id_mods.add_argument('--strip-range', dest='transforms',\n action=partial_action(transform.strip_range), help=\"\"\"Strip ranges\n from sequences IDs, matching \"\"\")\n\n format_group = parser.add_argument_group('Format Options')\n format_group.add_argument('--input-format', metavar='FORMAT',\n help=\"Input file format (default: determine from extension)\")\n format_group.add_argument('--output-format', metavar='FORMAT',\n help=\"Output file format (default: determine from extension)\")\n\n parser.add_argument('--alphabet', choices=ALPHABETS,\n help=\"\"\"Input alphabet. Required for writing NEXUS.\"\"\")\n\n return parser"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef build_parser(parser):\n add_options(parser)\n parser.add_argument('source_file', type=common.FileType('rt'),\n help=\"Input sequence file\")\n parser.add_argument('dest_file', help=\"Output file\")\n\n return parser", "response": "Build the parser for the\n command line."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef module_function(string):\n parts = string.split(':', 2)\n if len(parts) < 2:\n raise ValueError(\n \"Illegal specification. Should be module:function[:parameter]\")\n module_path, function_name = parts[:2]\n\n # Import the module\n module_vars = {}\n exec(compile(open(module_path).read(), module_path, 'exec'), module_vars)\n\n try:\n function = module_vars[function_name]\n except KeyError:\n raise argparse.ArgumentTypeError(\"{0} has no attribute '{1}'\".format(\n module_path, function_name))\n\n if len(parts) == 3:\n old_function = function\n function = lambda r: old_function(r, parts[2])\n\n return function", "response": "Load a function from a python module using a file name function name"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef parse_cgmlst_alleles(cgmlst_fasta):\n out = defaultdict(list)\n for header, seq in parse_fasta(cgmlst_fasta):\n if not '|' in header:\n raise Exception('Unexpected format for cgMLST fasta file header. No \"|\" (pipe) delimiter present! Header=\"{}\"'.format(header))\n marker_name, allele_name = header.split('|')\n out[marker_name].append(seq)\n return out", "response": "Parse cgMLST alleles from cgMLST FASTA file."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nparses a fasta file returning a generator yielding tuples of header to sequences", "response": "def parse_fasta(filepath):\n '''\n Parse a fasta file returning a generator yielding tuples of fasta headers to sequences.\n\n Note:\n This function should give equivalent results to SeqIO from BioPython\n\n .. code-block:: python\n\n from Bio import SeqIO\n # biopython to dict of header-seq\n hseqs_bio = {r.description:str(r.seq) for r in SeqIO.parse(fasta_path, 'fasta')}\n # this func to dict of header-seq\n hseqs = {header:seq for header, seq in parse_fasta(fasta_path)}\n # both methods should return the same dict\n assert hseqs == hseqs_bio\n\n Args:\n filepath (str): Fasta file path\n\n Returns:\n generator: yields tuples of (, )\n '''\n with open(filepath, 'r') as f:\n seqs = []\n header = ''\n for line in f:\n line = line.strip()\n if line == '':\n continue\n if line[0] == '>':\n if header == '':\n header = line.replace('>','')\n else:\n yield header, ''.join(seqs)\n seqs = []\n header = line.replace('>','')\n else:\n seqs.append(line)\n yield header, ''.join(seqs)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncheck that a file is valid FASTA format.", "response": "def fasta_format_check(fasta_path, logger):\n \"\"\"\n Check that a file is valid FASTA format.\n\n - First non-blank line needs to begin with a '>' header character.\n - Sequence can only contain valid IUPAC nucleotide characters\n\n Args:\n fasta_str (str): FASTA file contents string\n\n Raises:\n Exception: If invalid FASTA format\n \"\"\"\n\n header_count = 0\n line_count = 1\n nt_count = 0\n with open(fasta_path) as f:\n for l in f:\n l = l.strip()\n if l == '':\n continue\n if l[0] == '>':\n header_count += 1\n continue\n if header_count == 0 and l[0] != '>':\n error_msg = 'First non-blank line (L:{line_count}) does not contain FASTA header. Line beginning with \">\" expected.' \\\n .format(line_count=line_count)\n logger.error(error_msg)\n raise Exception(error_msg)\n non_nucleotide_chars_in_line = set(l) - VALID_NUCLEOTIDES\n\n if len(non_nucleotide_chars_in_line) > 0:\n error_msg = 'Line {line} contains the following non-nucleotide characters: {non_nt_chars}' \\\n .format(line=line_count,\n non_nt_chars=', '.join([x for x in non_nucleotide_chars_in_line]))\n logger.error(error_msg)\n raise Exception(error_msg)\n nt_count += len(l)\n line_count += 1\n\n if nt_count == 0:\n error_msg = 'File \"{}\" does not contain any nucleotide sequence.'.format(fasta_path)\n logger.error(error_msg)\n raise Exception(error_msg)\n\n logger.info('Valid FASTA format \"{}\" ({} bp)'.format(fasta_path, nt_count))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nconvert list of ACGT strings to matix of 1 - 4 ints", "response": "def seq_int_arr(seqs):\n \"\"\"Convert list of ACGT strings to matix of 1-4 ints\n\n Args:\n seqs (list of str): nucleotide sequences with only 'ACGT' characters\n\n Returns:\n numpy.array of int: matrix of integers from 1 to 4 inclusive representing A, C, G, and T\n str: nucleotide sequence string\n \"\"\"\n return np.array([[NT_TO_INT[c] for c in x.upper()] for x in seqs])"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngroups alleles by start and end indices", "response": "def group_alleles_by_start_end_Xbp(arr, bp=28):\n \"\"\"Group alleles by matching ends\n\n Args:\n arr (numpy.array): 2D int matrix of alleles\n bp (int): length of ends to group by\n\n Returns:\n dict of lists: key of start + end strings to list of indices of alleles with matching ends\n \"\"\"\n starts = arr[:,0:bp]\n ends = arr[:,-bp:]\n starts_ends_idxs = defaultdict(list)\n l, seq_len = arr.shape\n for i in range(l):\n start_i = starts[i]\n end_i = ends[i]\n start_i_str = ''.join([str(x) for x in start_i])\n end_i_str = ''.join([str(x) for x in end_i])\n starts_ends_idxs[start_i_str + end_i_str].append(i)\n return starts_ends_idxs"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nfinds the index of the row with the minimum row distance sum This should return the index of the row with the least distance sum to all other rows.", "response": "def min_row_dist_sum_idx(dists):\n \"\"\"Find the index of the row with the minimum row distance sum \n\n This should return the index of the row index with the least distance overall \n to all other rows. \n\n Args:\n dists (np.array): must be square distance matrix\n\n Returns:\n int: index of row with min dist row sum\n \"\"\"\n row_sums = np.apply_along_axis(arr=dists, axis=0, func1d=np.sum)\n return row_sums.argmin()"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreduces list of alleles to set of centroid alleles based on size grouping ends matching and hierarchical clustering", "response": "def find_centroid_alleles(alleles, bp=28, t=0.025):\n \"\"\"Reduce list of alleles to set of centroid alleles based on size grouping, ends matching and hierarchical clustering\n\n Workflow for finding centroid alleles:\n\n - grouping by size (e.g. 100bp, 101bp, 103bp, etc)\n - then grouped by `bp` nucleotides at ends matching\n - size and ends grouped alleles hierarchically clustered (Hamming distance, complete linkage)\n - tree cutting at threshold `t`\n - select allele with minimum distance to other alleles in cluster as centroid\n\n Args:\n alleles (iterable): collection of allele nucleotide sequences\n bp (int): number of bp matching at allele ends for size grouping (default=28 due to default blastn megablast word size)\n t (float): cluster generation (tree cutting) distance threshold for size grouped alleles\n\n Returns:\n set of str: centroid alleles\n \"\"\"\n centroid_alleles = set()\n len_allele = group_alleles_by_size(alleles)\n for length, seqs in len_allele.items():\n # if only one alelle of a particular size, add as centroid, move onto next size group\n if len(seqs) == 1:\n centroid_alleles.add(seqs[0])\n continue\n # convert allele nucleotide sequences to integer matrix\n seq_arr = seq_int_arr(seqs)\n # group alleles by matching ends\n starts_ends_idxs = group_alleles_by_start_end_Xbp(seq_arr, bp=bp)\n for k, idxs in starts_ends_idxs.items():\n # if only one allele for a particular matching ends group, then add as centroid and move onto next ends group\n if len(idxs) == 1:\n centroid_alleles.add(seqs[idxs[0]])\n continue\n # fetch subset of int allele sequences for a matching ends group\n seq_arr_subset = seq_arr[idxs]\n # Hamming distances between alleles\n dists = pdist(seq_arr_subset, 'hamming')\n # create flat clusters (tree cut) at t threshold\n cl = allele_clusters(dists, t=t)\n # for each allele cluster\n dm_sq = squareform(dists)\n for cl_key, cl_idxs in cl.items():\n # if only 1 or 2 alleles in cluster then return first\n if len(cl_idxs) == 1 or len(cl_idxs) == 2:\n # get first cluster index and get nt seq for that index\n centroid_alleles.add(seq_int_arr_to_nt(seq_arr_subset[cl_idxs[0]]))\n continue\n # else find allele with min distances to all other alleles in cluster\n dm_sub = dm_subset(dm_sq, cl_idxs)\n min_idx = min_row_dist_sum_idx(dm_sub)\n # add nucleotide seq for cluster centroid allele to centroids set\n centroid_alleles.add(seq_int_arr_to_nt(seq_arr_subset[min_idx]))\n #end for cl_key, cl_idxs in cl.iteritems():\n #end for k, idxs in starts_ends_idxs.iteritems():\n #end for length, seqs in alleles.iteritems():\n return centroid_alleles"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncomputing Mash distances of sketch file of genome fasta to RefSeq sketch DB.", "response": "def mash_dist_trusted(fasta_path):\n \"\"\"\n Compute Mash distances of sketch file of genome fasta to RefSeq sketch DB.\n\n Args:\n mash_bin (str): Mash binary path\n\n Returns:\n (str): Mash STDOUT string\n \"\"\"\n args = [MASH_BIN,\n 'dist',\n MASH_SKETCH_FILE,\n fasta_path]\n p = Popen(args, stderr=PIPE, stdout=PIPE)\n (stdout, stderr) = p.communicate()\n retcode = p.returncode\n if retcode != 0:\n raise Exception('Could not run Mash dist {}'.format(stderr))\n\n return stdout"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns a condensed cgMLST pairwise distance matrix for the specified genomes.", "response": "def nr_profiles(arr, genomes):\n \"\"\"\n Get a condensed cgMLST pairwise distance matrix for specified Genomes_\n where condensed means redundant cgMLST profiles are only represented once in the distance matrix.\n\n Args:\n user_name (list): List of Genome_ names to retrieve condensed distance matrix for\n\n Returns:\n (numpy.array, list): tuple of condensed cgMLST distance matrix and list of grouped Genomes_\n \"\"\"\n gs_collapse = []\n genome_idx_dict = {}\n indices = []\n patt_dict = {}\n for i, g in enumerate(genomes):\n p = arr[i, :].tostring()\n if p in patt_dict:\n parent = patt_dict[p]\n idx = genome_idx_dict[parent]\n gs_collapse[idx].append(g)\n else:\n indices.append(i)\n patt_dict[p] = g\n genome_idx_dict[g] = len(gs_collapse)\n gs_collapse.append([g])\n return arr[indices, :], gs_collapse"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef overall_serovar_call(serovar_prediction, antigen_predictor):\n assert isinstance(serovar_prediction, SerovarPrediction)\n assert isinstance(antigen_predictor, SerovarPredictor)\n\n h1 = antigen_predictor.h1\n h2 = antigen_predictor.h2\n sg = antigen_predictor.serogroup\n spp = serovar_prediction.cgmlst_subspecies\n if spp is None:\n if 'mash_match' in serovar_prediction.__dict__:\n spp = serovar_prediction.__dict__['mash_subspecies']\n\n serovar_prediction.serovar_antigen = antigen_predictor.serovar\n\n cgmlst_serovar = serovar_prediction.serovar_cgmlst\n cgmlst_distance = float(serovar_prediction.cgmlst_distance)\n\n null_result = '-:-:-'\n\n\n try:\n spp_roman = spp_name_to_roman[spp]\n except:\n spp_roman = None\n\n is_antigen_null = lambda x: (x is None or x == '' or x == '-')\n\n\n if antigen_predictor.serovar is None:\n if is_antigen_null(sg) and is_antigen_null(h1) and is_antigen_null(h2):\n if spp_roman is not None:\n serovar_prediction.serovar = '{} {}:{}:{}'.format(spp_roman, sg, h1, h2)\n else:\n serovar_prediction.serovar = '{}:{}:{}'.format(spp_roman, sg, h1, h2)\n elif cgmlst_serovar is not None and cgmlst_distance <= CGMLST_DISTANCE_THRESHOLD:\n serovar_prediction.serovar = cgmlst_serovar\n else:\n serovar_prediction.serovar = null_result\n if 'mash_match' in serovar_prediction.__dict__:\n spd = serovar_prediction.__dict__\n mash_dist = float(spd['mash_distance'])\n if mash_dist <= MASH_DISTANCE_THRESHOLD:\n serovar_prediction.serovar = spd['mash_serovar']\n else:\n serovars_from_antigen = antigen_predictor.serovar.split('|')\n if not isinstance(serovars_from_antigen, list):\n serovars_from_antigen = [serovars_from_antigen]\n if cgmlst_serovar is not None:\n if cgmlst_serovar in serovars_from_antigen:\n serovar_prediction.serovar = cgmlst_serovar\n else:\n if float(cgmlst_distance) <= CGMLST_DISTANCE_THRESHOLD:\n serovar_prediction.serovar = cgmlst_serovar\n elif 'mash_match' in serovar_prediction.__dict__:\n spd = serovar_prediction.__dict__\n mash_serovar = spd['mash_serovar']\n mash_dist = float(spd['mash_distance'])\n if mash_serovar in serovars_from_antigen:\n serovar_prediction.serovar = mash_serovar\n else:\n if mash_dist <= MASH_DISTANCE_THRESHOLD:\n serovar_prediction.serovar = mash_serovar\n\n if serovar_prediction.serovar is None:\n serovar_prediction.serovar = serovar_prediction.serovar_antigen\n\n if serovar_prediction.h1 is None:\n serovar_prediction.h1 = '-'\n if serovar_prediction.h2 is None:\n serovar_prediction.h2 = '-'\n if serovar_prediction.serogroup is None:\n serovar_prediction.serogroup = '-'\n if serovar_prediction.serovar_antigen is None:\n if spp_roman is not None:\n serovar_prediction.serovar_antigen = '{} -:-:-'.format(spp_roman)\n else:\n serovar_prediction.serovar_antigen = '-:-:-'\n if serovar_prediction.serovar is None:\n serovar_prediction.serovar = serovar_prediction.serovar_antigen\n return serovar_prediction", "response": "This function is used to get the overall serovar from cgMLST cluster membership analysis and antigen BLAST results."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef process_cgmlst_results(df):\n assert isinstance(df, pd.DataFrame)\n markers = []\n alleles = []\n for x in df['qseqid']:\n marker, allele = x.split('|')\n markers.append(marker)\n alleles.append(int(allele))\n df.loc[:, 'marker'] = markers\n df.loc[:, 'allele'] = alleles\n df.loc[:, 'is_match'] = (df['coverage'] >= 1.0) & (df['pident'] >= 90.0) & ~(df['is_trunc'])\n df.loc[:, 'allele_name'] = df.apply(lambda x: allele_name(x.sseq.replace('-', '')), axis=1)\n df.loc[:, 'is_perfect'] = (df['coverage'] == 1.0) & (df['pident'] == 100.0)\n df_perf = df[df['is_perfect']]\n perf_markers = df_perf['marker'].unique()\n df.loc[:, 'has_perfect_match'] = df['marker'].isin(perf_markers)\n start_idxs, end_idxs, needs_revcomps, trunc, is_extended = extend_subj_match_vec(df)\n df.loc[:, 'start_idx'] = start_idxs\n df.loc[:, 'end_idx'] = end_idxs\n df.loc[:, 'needs_revcomp'] = needs_revcomps\n df.loc[:, 'trunc'] = trunc\n df.loc[:, 'is_extended'] = is_extended\n df.loc[:, 'sseq_msa_gaps'] = np.zeros(df.shape[0], dtype=np.int64)\n df.loc[:, 'sseq_msa_p_gaps'] = np.zeros(df.shape[0], dtype=np.float64)\n df.loc[:, 'too_many_gaps'] = trunc\n\n return df", "response": "Append informative fields to cgMLST330 BLAST results DataFrame"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef alleles_to_retrieve(df):\n contig_blastn_records = defaultdict(list)\n markers = df.marker.unique()\n for m in markers:\n dfsub = df[df.marker == m]\n for i, r in dfsub.iterrows():\n if r.coverage < 1.0:\n contig_blastn_records[r.stitle].append(r)\n break\n return contig_blastn_records", "response": "Alleles to retrieve from genome fasta\n "} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nparse perfect BLAST matches to marker results dict.", "response": "def matches_to_marker_results(df):\n \"\"\"Perfect BLAST matches to marker results dict\n\n Parse perfect BLAST matches to marker results dict.\n\n\n Args:\n df (pandas.DataFrame): DataFrame of perfect BLAST matches\n\n Returns:\n dict: cgMLST330 marker names to matching allele numbers\n \"\"\"\n assert isinstance(df, pd.DataFrame)\n from collections import defaultdict\n d = defaultdict(list)\n for idx, row in df.iterrows():\n marker = row['marker']\n d[marker].append(row)\n\n marker_results = {}\n for k,v in d.items():\n if len(v) > 1:\n logging.debug('Multiple potential cgMLST allele matches (n=%s) found for marker %s. Selecting match on longest contig.', len(v), k)\n df_marker = pd.DataFrame(v)\n df_marker.sort_values('slen', ascending=False, inplace=True)\n for i,r in df_marker.iterrows():\n allele = r['allele_name']\n slen = r['slen']\n logging.debug('Selecting allele %s from contig with length %s', allele, slen)\n seq = r['sseq']\n if '-' in seq:\n logging.warning('Gaps found in allele. Removing gaps. %s', r)\n seq = seq.replace('-', '').upper()\n allele = allele_name(seq)\n marker_results[k] = allele_result_dict(allele, seq, r.to_dict())\n break\n elif len(v) == 1:\n row = v[0]\n seq = row['sseq']\n if '-' in seq:\n logging.warning('Gaps found in allele. Removing gaps. %s', row)\n seq = seq.replace('-', '').upper()\n allele = allele_name(seq)\n marker_results[k] = allele_result_dict(allele, seq, row.to_dict())\n else:\n err_msg = 'Empty list of matches for marker {}'.format(k)\n logging.error(err_msg)\n raise Exception(err_msg)\n return marker_results"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef cgmlst_subspecies_call(df_relatives):\n\n closest_distance = df_relatives['distance'].min()\n\n if closest_distance > CGMLST_SUBSPECIATION_DISTANCE_THRESHOLD:\n logging.warning('Min cgMLST distance (%s) above subspeciation distance threshold (%s)',\n closest_distance,\n CGMLST_SUBSPECIATION_DISTANCE_THRESHOLD)\n return None\n else:\n df_relatives = df_relatives.loc[df_relatives.distance <= CGMLST_SUBSPECIATION_DISTANCE_THRESHOLD, :]\n df_relatives = df_relatives.sort_values('distance', ascending=True)\n logging.debug('df_relatives by cgmlst %s', df_relatives.head())\n genome_spp = genomes_to_subspecies()\n subspecies_below_threshold = [genome_spp[member_genome] if member_genome in genome_spp else None for member_genome in df_relatives.index]\n subspecies_below_threshold = filter(None, subspecies_below_threshold)\n subspecies_counter = Counter(subspecies_below_threshold)\n logging.debug('Subspecies counter: %s', subspecies_counter)\n return (subspecies_counter.most_common(1)[0][0], closest_distance, dict(subspecies_counter))", "response": "This method attempts to find the majority subspecies type within a curated group by cgMLST and returns the most common subspecies type within the cgMLST group."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef run_cgmlst(blast_runner, full=False):\n from sistr.src.serovar_prediction.constants import genomes_to_serovar\n\n df_cgmlst_profiles = ref_cgmlst_profiles()\n\n logging.debug('{} distinct cgMLST330 profiles'.format(df_cgmlst_profiles.shape[0]))\n\n logging.info('Running BLAST on serovar predictive cgMLST330 alleles')\n cgmlst_fasta_path = CGMLST_CENTROID_FASTA_PATH if not full else CGMLST_FULL_FASTA_PATH\n blast_outfile = blast_runner.blast_against_query(cgmlst_fasta_path)\n logging.info('Reading BLAST output file \"{}\"'.format(blast_outfile))\n blast_reader = BlastReader(blast_outfile)\n if blast_reader.df is None:\n logging.error('No cgMLST330 alleles found!')\n return ({'distance': 1.0,\n 'genome_match': None,\n 'serovar': None,\n 'matching_alleles': 0,\n 'subspecies': None,\n 'cgmlst330_ST': None,},\n {}, )\n logging.info('Found {} cgMLST330 allele BLAST results'.format(blast_reader.df.shape[0]))\n\n\n df_cgmlst_blastn = process_cgmlst_results(blast_reader.df)\n\n marker_match_results = matches_to_marker_results(df_cgmlst_blastn[df_cgmlst_blastn.is_match])\n contig_blastn_records = alleles_to_retrieve(df_cgmlst_blastn)\n retrieved_marker_alleles = get_allele_sequences(blast_runner.fasta_path,\n contig_blastn_records,\n full=full)\n logging.info('Type retrieved_marker_alleles %s', type(retrieved_marker_alleles))\n all_marker_results = marker_match_results.copy()\n for marker, res in retrieved_marker_alleles.items():\n all_marker_results[marker] = res\n for marker in df_cgmlst_profiles.columns:\n if marker not in all_marker_results:\n all_marker_results[marker] = {'blast_result': None,\n 'name': None,\n 'seq': None,}\n cgmlst_results = {}\n for marker, res in all_marker_results.items():\n try:\n cgmlst_results[marker] = int(res['name'])\n except:\n logging.error('Missing cgmlst_results for %s', marker)\n logging.debug(res)\n\n logging.info('Calculating number of matching alleles to serovar predictive cgMLST330 profiles')\n df_relatives = find_closest_related_genome(cgmlst_results, df_cgmlst_profiles)\n genome_serovar_dict = genomes_to_serovar()\n df_relatives['serovar'] = [genome_serovar_dict[genome] for genome in df_relatives.index]\n logging.debug('Top 5 serovar predictive cgMLST profiles:\\n{}'.format(df_relatives.head()))\n spp = None\n subspeciation_tuple = cgmlst_subspecies_call(df_relatives)\n if subspeciation_tuple is not None:\n spp, distance, spp_counter = subspeciation_tuple\n logging.info('Top subspecies by cgMLST is \"{}\" (min dist={}, Counter={})'.format(spp, distance, spp_counter))\n else:\n logging.warning('Subspeciation by cgMLST was not possible!')\n\n cgmlst_serovar = None\n cgmlst_matching_genome = None\n cgmlst_matching_alleles = 0\n cgmlst_distance = 1.0\n for idx, row in df_relatives.iterrows():\n cgmlst_distance = row['distance']\n cgmlst_matching_alleles = row['matching']\n cgmlst_serovar = row['serovar'] if cgmlst_distance <= 1.0 else None\n cgmlst_matching_genome = idx if cgmlst_distance <= 1.0 else None\n logging.info('Top serovar by cgMLST profile matching: \"{}\" with {} matching alleles, distance={:.1%}'.format(\n cgmlst_serovar,\n cgmlst_matching_alleles,\n cgmlst_distance\n ))\n break\n\n cgmlst_st = None\n cgmlst_markers_sorted = sorted(all_marker_results.keys())\n cgmlst_allele_names = []\n marker = None\n for marker in cgmlst_markers_sorted:\n try:\n aname = all_marker_results[marker]['name']\n if aname:\n cgmlst_allele_names.append(str(aname))\n else:\n break\n except:\n break\n if len(cgmlst_allele_names) == len(cgmlst_markers_sorted):\n cgmlst_st = allele_name('-'.join(cgmlst_allele_names))\n logging.info('cgMLST330 Sequence Type=%s', cgmlst_st)\n else:\n logging.warning('Could not compute cgMLST330 Sequence Type due to missing data (marker %s)', marker)\n return ({'distance': cgmlst_distance,\n 'genome_match': cgmlst_matching_genome,\n 'serovar': cgmlst_serovar,\n 'matching_alleles': cgmlst_matching_alleles,\n 'subspecies': spp,\n 'cgmlst330_ST': cgmlst_st,},\n all_marker_results, )", "response": "Perform in silico cgMLST on an input genome\n Returns a dictionary containing the result of the BLAST on success Return None if no results were found"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef genome_name_from_fasta_path(fasta_path):\n filename = os.path.basename(fasta_path)\n return re.sub(r'(\\.fa$)|(\\.fas$)|(\\.fasta$)|(\\.fna$)|(\\.\\w{1,}$)', '', filename)", "response": "Extract genome name from fasta file path"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef extend_subj_match_vec(df):\n needs_revcomp = df.sstart > df.send\n add_to_end = df.qlen - df.qend\n add_to_start = df.qstart - 1\n ssum2 = (df.send + df.sstart) / 2.0\n sabs2 = np.abs(df.send - df.sstart) / 2.0\n end_idx = ssum2 + sabs2 - 1\n start_idx = ssum2 - sabs2 - 1\n start_idx[needs_revcomp] -= add_to_end\n start_idx[~needs_revcomp] -= add_to_start\n end_idx[needs_revcomp] += add_to_start\n end_idx[~needs_revcomp] += add_to_end\n clipped_start_idx = np.clip(start_idx, 0, (df.slen - 1))\n clipped_end_idx = np.clip(end_idx, 0, (df.slen - 1))\n trunc = (clipped_start_idx != start_idx) | (clipped_end_idx != end_idx)\n is_extended = (add_to_start > 0) | (add_to_end > 0)\n return clipped_start_idx, clipped_end_idx, needs_revcomp, trunc, is_extended", "response": "This function returns the extended start and end indices of the extended sequence that is used to extract the subject sequence from the blastn results dataframe."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef listattrs(x):\n return [attr for attr in dir(x) if not attr.startswith(\"__\") and not callable(getattr(x, attr))]", "response": "Get all instance and class attributes of an object."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ntransforms a nested object into a regular dict and pickle don t like to serialize", "response": "def to_dict(x, depth, exclude_keys=set(), depth_threshold=8):\n \"\"\"Transform a nested object/dict/list into a regular dict\n \n json.dump(s) and pickle don't like to un/serialize regular Python objects so\n this function should handle arbitrarily nested objects to be serialized to\n regular string, float, int, bool, None values.\n \n This is a recursive function so by default it will exit at a certain depth (depth_threshold=8).\n \n Args:\n x (object): Some object to dict-ify unless x is a scalar/literal then return x as is\n depth (int): Starting depth must be 0 (cannot supply default value due to weird Pythonisms)\n exclude_keys (set): Keys to avoid adding to the output dict\n depth_threshold (int): object/dict nesting depth to stop at\n \n Returns:\n dict: dict with only scalar/literal leaf values \n \"\"\"\n if x is None or isinstance(x, (str, int, float, bool)):\n return x\n if isinstance(x, np.int_):\n return int(x)\n if isinstance(x, np.int64):\n return int(x)\n if isinstance(x, np.float_):\n return float(x)\n if isinstance(x, np.float64):\n return float(x)\n if isinstance(x, np.bool_):\n return bool(x)\n if depth + 1 > depth_threshold: return {}\n if isinstance(x, list):\n out = []\n for v in x:\n tmp = to_dict(v, depth + 1, exclude_keys, depth_threshold)\n if tmp == {}: continue\n out.append(tmp)\n return out\n out = {}\n if isinstance(x, dict):\n for k, v in x.items():\n if k in exclude_keys: continue\n if not isinstance(k, (str,)):\n k = str(k)\n tmp = to_dict(v, depth + 1, exclude_keys, depth_threshold)\n if tmp == {}: continue\n out[k] = tmp\n return out\n for attr in listattrs(x):\n if attr in exclude_keys: continue\n v = getattr(x, attr)\n tmp = to_dict(v, depth + 1, exclude_keys, depth_threshold)\n if tmp == {}: continue\n out[attr] = tmp\n return out"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nflattening an arbitrarily nested dict as output by to_dict", "response": "def flatten_dict(x):\n \"\"\"Flatten a dict\n \n Flatten an arbitrarily nested dict as output by to_dict\n \n .. note:: \n \n Keys in the flattened dict may get very long. \n \n Args:\n x (dict): Arbitrarily nested dict (maybe resembling a tree) with literal/scalar leaf values\n \n Returns:\n dict: flattened 1D dict \n \"\"\"\n out = {}\n for k, v in x.items():\n out = _recur_flatten(k, v, out)\n return out"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef df_first_row_to_dict(df):\n if df is not None:\n return [dict(r) for i, r in df.head(1).iterrows()][0]", "response": "Returns a list of dict that looks like\n\n "} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef is_blast_result_trunc(qstart, qend, sstart, send, qlen, slen):\n q_match_len = abs(qstart - qend) + 1\n s_max = max(sstart, send)\n s_min = min(sstart, send)\n return (q_match_len < qlen) and (s_max >= slen or s_min <= 1)", "response": "Check if a query sequence is truncated by the end of a subject sequence"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef trunc(qstart, qend, sstart, send, qlen, slen):\n ssum2 = (send + sstart) / 2.0\n sabs2 = np.abs(send - sstart) / 2.0\n smax = ssum2 + sabs2\n smin = ssum2 - sabs2\n q_match_len = np.abs(qstart - qend) + 1\n return (q_match_len < qlen) & ((smax >= slen) | (smin <= 1))", "response": "Check if a query sequence is truncated by the end of a subject sequence"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef perfect_matches(self):\n if self.is_missing:\n return None\n\n df_perfect_matches = self.df[(self.df['coverage'] == 1.0) & (self.df['pident'] == 100.0)]\n if df_perfect_matches.shape[0] == 0:\n return None\n return df_perfect_matches", "response": "Return pandas DataFrame with perfect BLAST matches"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef top_result(self):\n\n if self.is_missing:\n return None\n\n df_perfect_matches = self.df[(self.df['coverage'] == 1.0) & (self.df['pident'] == 100.0)]\n if df_perfect_matches.shape[0]:\n self.is_perfect_match = True\n return BlastReader.df_first_row_to_dict(df_perfect_matches)\n\n # Return the result with the highest bitscore.\n # This is the first result in dataframe since the df is ordered by\n # bitscore in descending order.\n result_dict = BlastReader.df_first_row_to_dict(self.df)\n result_trunc = BlastReader.is_blast_result_trunc(qstart=result_dict['qstart'],\n qend=result_dict['qend'],\n sstart=result_dict['sstart'],\n send=result_dict['send'],\n qlen=result_dict['qlen'],\n slen=result_dict['slen'])\n self.is_trunc = result_trunc\n return result_dict", "response": "Return the top result of the BLASTN analysis."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncreate a Mash sketch from an input fasta file.", "response": "def sketch_fasta(fasta_path, outdir):\n \"\"\"Create a Mash sketch from an input fasta file\n\n Args:\n fasta_path (str): input fasta file path. Genome name in fasta filename\n outdir (str): output directory path to write Mash sketch file to\n\n Returns:\n str: output Mash sketch file path\n \"\"\"\n genome_name = genome_name_from_fasta_path(fasta_path)\n outpath = os.path.join(outdir, genome_name)\n args = ['mash', 'sketch', '-o', outpath, fasta_path]\n logging.info('Running Mash sketch with command: %s', ' '.join(args))\n p = Popen(args)\n p.wait()\n sketch_path = outpath + '.msh'\n assert os.path.exists(sketch_path), 'Mash sketch for genome {} was not created at {}'.format(\n genome_name,\n sketch_path)\n return sketch_path"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nmerges new Mash sketches with current Mash sketches", "response": "def merge_sketches(outdir, sketch_paths):\n \"\"\"Merge new Mash sketches with current Mash sketches\n\n Args:\n outdir (str): output directory to write merged Mash sketch file\n sketch_paths (list of str): Mash sketch file paths for input fasta files\n\n Returns:\n str: output path for Mash sketch file with new and old sketches\n \"\"\"\n merge_sketch_path = os.path.join(outdir, 'sistr.msh')\n args = ['mash', 'paste', merge_sketch_path]\n for x in sketch_paths:\n args.append(x)\n args.append(MASH_SKETCH_FILE)\n logging.info('Running Mash paste with command: %s', ' '.join(args))\n p = Popen(args)\n p.wait()\n assert os.path.exists(merge_sketch_path), 'Merged sketch was not created at {}'.format(merge_sketch_path)\n return merge_sketch_path"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef put(self, key, data):\n try:\n return self._dstore.put(key, data)\n finally:\n self.cache.delete(key)", "response": "Implementation of :meth:`~simplekv.KeyValueStore.put`.\n\n Will store the value in the backing store. After a successful or\n unsuccessful store, the cache will be invalidated by deleting the key\n from it."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef put_file(self, key, file):\n try:\n return self._dstore.put_file(key, file)\n finally:\n self.cache.delete(key)", "response": "Implementation of :meth:`~simplekv.KeyValueStore.put_file`.\n\n Will store the value in the backing store. After a successful or\n unsuccessful store, the cache will be invalidated by deleting the key\n from it."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nwrites contents of key to file VatEntry", "response": "def get_file(self, key, file):\n \"\"\"Write contents of key to file\n\n Like :meth:`.KeyValueStore.put_file`, this method allows backends to\n implement a specialized function if data needs to be written to disk or\n streamed.\n\n If *file* is a string, contents of *key* are written to a newly\n created file with the filename *file*. Otherwise, the data will be\n written using the *write* method of *file*.\n\n :param key: The key to be read\n :param file: Output filename or an object with a *write* method.\n\n :raises exceptions.ValueError: If the key is not valid.\n :raises exceptions.IOError: If there was a problem reading or writing\n data.\n :raises exceptions.KeyError: If the key was not found.\n \"\"\"\n self._check_valid_key(key)\n if isinstance(file, str):\n return self._get_filename(key, file)\n else:\n return self._get_file(key, file)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nstoring data from a source file into a key on disk.", "response": "def put_file(self, key, file):\n \"\"\"Store into key from file on disk\n\n Stores data from a source into key. *file* can either be a string,\n which will be interpretet as a filename, or an object with a *read()*\n method.\n\n If the passed object has a *fileno()* method, it may be used to speed\n up the operation.\n\n The file specified by *file*, if it is a filename, may be removed in\n the process, to avoid copying if possible. If you need to make a copy,\n pass the opened file instead.\n\n :param key: The key under which the data is to be stored\n :param file: A filename or an object with a read method. If a filename,\n may be removed\n\n :returns: The key under which data was stored\n\n :raises exceptions.ValueError: If the key is not valid.\n :raises exceptions.IOError: If there was a problem moving the file in.\n \"\"\"\n # FIXME: shouldn't we call self._check_valid_key here?\n if isinstance(file, str):\n return self._put_filename(key, file)\n else:\n return self._put_file(key, file)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nchecks if a key is valid and raises a ValueError if it is not.", "response": "def _check_valid_key(self, key):\n \"\"\"Checks if a key is valid and raises a ValueError if its not.\n\n When in need of checking a key for validity, always use this\n method if possible.\n\n :param key: The key to be checked\n \"\"\"\n if not isinstance(key, key_type):\n raise ValueError('%r is not a valid key type' % key)\n if not VALID_KEY_RE.match(key):\n raise ValueError('%r contains illegal characters' % key)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _get_file(self, key, file):\n bufsize = 1024 * 1024\n\n # note: we do not use a context manager here or close the source.\n # the source goes out of scope shortly after, taking care of the issue\n # this allows us to support file-like objects without close as well,\n # such as BytesIO.\n source = self.open(key)\n try:\n while True:\n buf = source.read(bufsize)\n file.write(buf)\n\n if len(buf) < bufsize:\n break\n finally:\n source.close()", "response": "Write key to file - like object file."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _get_filename(self, key, filename):\n with open(filename, 'wb') as dest:\n return self._get_file(key, dest)", "response": "Write key to file."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef put(self, key, data, ttl_secs=None):\n self._check_valid_key(key)\n if not isinstance(data, bytes):\n raise IOError(\"Provided data is not of type bytes\")\n return self._put(key, data, self._valid_ttl(ttl_secs))", "response": "Like : meth : ~simplekv. KeyValueStore. put but with an optional ttl_secs parameter."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nliking : meth : ~simplekv. KeyValueStore. put_file but with an optional ttl_secs parameter.", "response": "def put_file(self, key, file, ttl_secs=None):\n \"\"\"Like :meth:`~simplekv.KeyValueStore.put_file`, but with an\n additional parameter:\n\n :param ttl_secs: Number of seconds until the key expires. See above\n for valid values.\n :raises exceptions.ValueError: If ``ttl_secs`` is invalid.\n \"\"\"\n if ttl_secs is None:\n ttl_secs = self.default_ttl_secs\n\n self._check_valid_key(key)\n\n if isinstance(file, str):\n return self._put_filename(key, file, self._valid_ttl(ttl_secs))\n else:\n return self._put_file(key, file, self._valid_ttl(ttl_secs))"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncopies a key. The destination key is overwritten if does exist.", "response": "def copy(self, source, dest):\n \"\"\"Copies a key. The destination is overwritten if does exist.\n\n :param source: The source key to copy\n :param dest: The destination for the copy\n\n :returns: The destination key\n\n :raises: exceptions.ValueError: If the source or target key are not valid\n :raises: exceptions.KeyError: If the source key was not found\"\"\"\n self._check_valid_key(source)\n self._check_valid_key(dest)\n return self._copy(source, dest)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef open(self):\n try:\n if self.transport in ('http', 'https'):\n connection = pyeapi.client.connect(\n transport=self.transport,\n host=self.hostname,\n username=self.username,\n password=self.password,\n port=self.port,\n timeout=self.timeout\n )\n elif self.transport == 'socket':\n connection = pyeapi.client.connect(transport=self.transport)\n else:\n raise ConnectionException(\"Unknown transport: {}\".format(self.transport))\n\n if self.device is None:\n self.device = pyeapi.client.Node(connection, enablepwd=self.enablepwd)\n # does not raise an Exception if unusable\n\n # let's try to run a very simple command\n self.device.run_commands(['show clock'], encoding='text')\n except ConnectionError as ce:\n # and this is raised either if device not avaiable\n # either if HTTP(S) agent is not enabled\n # show management api http-commands\n raise ConnectionException(ce.message)", "response": "Implementation of NAPALM method open."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef compare_config(self):\n if self.config_session is None:\n return ''\n else:\n commands = ['show session-config named %s diffs' % self.config_session]\n result = self.device.run_commands(commands, encoding='text')[0]['output']\n\n result = '\\n'.join(result.splitlines()[2:])\n\n return result.strip()", "response": "Implementation of NAPALM method compare_config."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef discard_config(self):\n if self.config_session is not None:\n commands = []\n commands.append('configure session {}'.format(self.config_session))\n commands.append('abort')\n self.device.run_commands(commands)\n self.config_session = None", "response": "Implementation of NAPALM method discard_config."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_snmp_information(self):\n\n # Default values\n snmp_dict = {\n 'chassis_id': '',\n 'location': '',\n 'contact': '',\n 'community': {}\n }\n\n commands = [\n 'show snmp chassis',\n 'show snmp location',\n 'show snmp contact'\n ]\n snmp_config = self.device.run_commands(commands, encoding='json')\n for line in snmp_config:\n for k, v in line.items():\n if k == 'chassisId':\n snmp_dict['chassis_id'] = v\n else:\n # Some EOS versions add extra quotes\n snmp_dict[k] = v.strip('\"')\n\n commands = ['show running-config | section snmp-server community']\n raw_snmp_config = self.device.run_commands(commands, encoding='text')[0].get('output', '')\n for line in raw_snmp_config.splitlines():\n match = self._RE_SNMP_COMM.search(line)\n if match:\n matches = match.groupdict('')\n snmp_dict['community'][match.group('community')] = {\n 'acl': py23_compat.text_type(matches['v4_acl']),\n 'mode': py23_compat.text_type(matches['access'])\n }\n\n return snmp_dict", "response": "Return the SNMP information for the current EOS version."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn a json structure of BGP neighbors.", "response": "def get_bgp_neighbors_detail(self, neighbor_address=''):\n \"\"\"Implementation of get_bgp_neighbors_detail\"\"\"\n def _parse_per_peer_bgp_detail(peer_output):\n \"\"\"This function parses the raw data per peer and returns a\n json structure per peer.\n \"\"\"\n\n int_fields = ['local_as', 'remote_as',\n 'local_port', 'remote_port', 'local_port',\n 'input_messages', 'output_messages', 'input_updates',\n 'output_updates', 'messages_queued_out', 'holdtime',\n 'configured_holdtime', 'keepalive',\n 'configured_keepalive', 'advertised_prefix_count',\n 'received_prefix_count']\n\n peer_details = []\n\n # Using preset template to extract peer info\n peer_info = (\n napalm_base.helpers.textfsm_extractor(\n self, 'bgp_detail', peer_output))\n\n for item in peer_info:\n\n # Determining a few other fields in the final peer_info\n item['up'] = (\n True if item['up'] == \"up\" else False)\n item['local_address_configured'] = (\n True if item['local_address'] else False)\n item['multihop'] = (\n False if item['multihop'] == 0 or\n item['multihop'] == '' else True)\n\n # TODO: The below fields need to be retrieved\n # Currently defaulting their values to False or 0\n item['multipath'] = False\n item['remove_private_as'] = False\n item['suppress_4byte_as'] = False\n item['local_as_prepend'] = False\n item['flap_count'] = 0\n item['active_prefix_count'] = 0\n item['suppressed_prefix_count'] = 0\n\n # Converting certain fields into int\n for key in int_fields:\n item[key] = napalm_base.helpers.convert(int, item[key], 0)\n\n # Conforming with the datatypes defined by the base class\n item['export_policy'] = (\n napalm_base.helpers.convert(\n py23_compat.text_type, item['export_policy']))\n item['last_event'] = (\n napalm_base.helpers.convert(\n py23_compat.text_type, item['last_event']))\n item['remote_address'] = napalm_base.helpers.ip(item['remote_address'])\n item['previous_connection_state'] = (\n napalm_base.helpers.convert(\n py23_compat.text_type, item['previous_connection_state']))\n item['import_policy'] = (\n napalm_base.helpers.convert(\n py23_compat.text_type, item['import_policy']))\n item['connection_state'] = (\n napalm_base.helpers.convert(\n py23_compat.text_type, item['connection_state']))\n item['routing_table'] = (\n napalm_base.helpers.convert(\n py23_compat.text_type, item['routing_table']))\n item['router_id'] = napalm_base.helpers.ip(item['router_id'])\n item['local_address'] = napalm_base.helpers.convert(\n napalm_base.helpers.ip, item['local_address'])\n\n peer_details.append(item)\n\n return peer_details\n\n def _append(bgp_dict, peer_info):\n\n remote_as = peer_info['remote_as']\n vrf_name = peer_info['routing_table']\n\n if vrf_name not in bgp_dict.keys():\n bgp_dict[vrf_name] = {}\n if remote_as not in bgp_dict[vrf_name].keys():\n bgp_dict[vrf_name][remote_as] = []\n\n bgp_dict[vrf_name][remote_as].append(peer_info)\n\n commands = []\n summary_commands = []\n if not neighbor_address:\n commands.append('show ip bgp neighbors vrf all')\n commands.append('show ipv6 bgp neighbors vrf all')\n summary_commands.append('show ip bgp summary vrf all')\n summary_commands.append('show ipv6 bgp summary vrf all')\n else:\n try:\n peer_ver = IPAddress(neighbor_address).version\n except Exception as e:\n raise e\n\n if peer_ver == 4:\n commands.append('show ip bgp neighbors %s vrf all' %\n neighbor_address)\n summary_commands.append('show ip bgp summary vrf all')\n elif peer_ver == 6:\n commands.append('show ipv6 bgp neighbors %s vrf all' %\n neighbor_address)\n summary_commands.append('show ipv6 bgp summary vrf all')\n\n raw_output = (\n self.device.run_commands(commands, encoding='text'))\n bgp_summary = (\n self.device.run_commands(summary_commands, encoding='json'))\n\n bgp_detail_info = {}\n\n v4_peer_info = []\n v6_peer_info = []\n\n if neighbor_address:\n peer_info = _parse_per_peer_bgp_detail(raw_output[0]['output'])\n\n if peer_ver == 4:\n v4_peer_info.append(peer_info[0])\n else:\n v6_peer_info.append(peer_info[0])\n\n else:\n # Using preset template to extract peer info\n v4_peer_info = _parse_per_peer_bgp_detail(raw_output[0]['output'])\n v6_peer_info = _parse_per_peer_bgp_detail(raw_output[1]['output'])\n\n for peer_info in v4_peer_info:\n\n vrf_name = peer_info['routing_table']\n peer_remote_addr = peer_info['remote_address']\n peer_info['accepted_prefix_count'] = (\n bgp_summary[0]['vrfs'][vrf_name]['peers'][peer_remote_addr]['prefixAccepted']\n if peer_remote_addr in bgp_summary[0]['vrfs'][vrf_name]['peers'].keys()\n else 0\n )\n\n _append(bgp_detail_info, peer_info)\n\n for peer_info in v6_peer_info:\n\n vrf_name = peer_info['routing_table']\n peer_remote_addr = peer_info['remote_address']\n peer_info['accepted_prefix_count'] = (\n bgp_summary[1]['vrfs'][vrf_name]['peers'][peer_remote_addr]['prefixAccepted']\n if peer_remote_addr in bgp_summary[1]['vrfs'][vrf_name]['peers'].keys()\n else 0\n )\n\n _append(bgp_detail_info, peer_info)\n\n return bgp_detail_info"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_config(self, retrieve=\"all\"):\n get_startup = retrieve == \"all\" or retrieve == \"startup\"\n get_running = retrieve == \"all\" or retrieve == \"running\"\n get_candidate = (retrieve == \"all\" or retrieve == \"candidate\") and self.config_session\n\n if retrieve == \"all\":\n commands = ['show startup-config',\n 'show running-config']\n\n if self.config_session:\n commands.append('show session-config named {}'.format(self.config_session))\n\n output = self.device.run_commands(commands, encoding=\"text\")\n return {\n 'startup': py23_compat.text_type(output[0]['output']) if get_startup else u\"\",\n 'running': py23_compat.text_type(output[1]['output']) if get_running else u\"\",\n 'candidate': py23_compat.text_type(output[2]['output']) if get_candidate else u\"\",\n }\n elif get_startup or get_running:\n commands = ['show {}-config'.format(retrieve)]\n output = self.device.run_commands(commands, encoding=\"text\")\n return {\n 'startup': py23_compat.text_type(output[0]['output']) if get_startup else u\"\",\n 'running': py23_compat.text_type(output[0]['output']) if get_running else u\"\",\n 'candidate': \"\",\n }\n elif get_candidate:\n commands = ['show session-config named {}'.format(self.config_session)]\n output = self.device.run_commands(commands, encoding=\"text\")\n return {\n 'startup': \"\",\n 'running': \"\",\n 'candidate': py23_compat.text_type(output[0]['output']),\n }\n elif retrieve == \"candidate\":\n # If we get here it means that we want the candidate but there is none.\n return {\n 'startup': \"\",\n 'running': \"\",\n 'candidate': \"\",\n }\n else:\n raise Exception(\"Wrong retrieve filter: {}\".format(retrieve))", "response": "get_config implementation for EOS."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef map_boto_exceptions(key=None, exc_pass=()):\n from boto.exception import BotoClientError, BotoServerError, \\\n StorageResponseError\n try:\n yield\n except StorageResponseError as e:\n if e.code == 'NoSuchKey':\n raise KeyError(key)\n raise IOError(str(e))\n except (BotoClientError, BotoServerError) as e:\n if e.__class__.__name__ not in exc_pass:\n raise IOError(str(e))", "response": "Map boto - specific exceptions to simplekv - API."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nchecking if a key is valid and raises a ValueError if it is not.", "response": "def _check_valid_key(self, key):\n \"\"\"Checks if a key is valid and raises a ValueError if its not.\n\n When in need of checking a key for validity, always use this\n method if possible.\n\n :param key: The key to be checked\n \"\"\"\n if not isinstance(key, key_type) and key is not None:\n raise ValueError('%r is not a valid key type' % key)\n if not VALID_KEY_RE_EXTENDED.match(key) or key == u'/':\n raise ValueError('%r contains illegal characters' % key)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _file_md5(file_):\n md5 = hashlib.md5()\n chunk_size = 128 * md5.block_size\n for chunk in iter(lambda: file_.read(chunk_size), b''):\n md5.update(chunk)\n file_.seek(0)\n byte_digest = md5.digest()\n return base64.b64encode(byte_digest).decode()", "response": "Compute the md5 digest of a file in base64 encoding."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncompute the md5 digest of a byte buffer in base64 encoding.", "response": "def _byte_buffer_md5(buffer_):\n \"\"\"\n Computes the md5 digest of a byte buffer in base64 encoding.\n \"\"\"\n md5 = hashlib.md5(buffer_)\n byte_digest = md5.digest()\n return base64.b64encode(byte_digest).decode()"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef map_azure_exceptions(key=None, exc_pass=()):\n from azure.common import AzureMissingResourceHttpError, AzureHttpError,\\\n AzureException\n try:\n yield\n except AzureMissingResourceHttpError as ex:\n if ex.__class__.__name__ not in exc_pass:\n s = str(ex)\n if s.startswith(u\"The specified container does not exist.\"):\n raise IOError(s)\n raise KeyError(key)\n except AzureHttpError as ex:\n if ex.__class__.__name__ not in exc_pass:\n raise IOError(str(ex))\n except AzureException as ex:\n if ex.__class__.__name__ not in exc_pass:\n raise IOError(str(ex))", "response": "Map Azure - specific exceptions to the simplekv - API."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreads the next size bytes from the file.", "response": "def read(self, size=-1):\n \"\"\"Returns 'size' amount of bytes or less if there is no more data.\n If no size is given all data is returned. size can be >= 0.\"\"\"\n if self.closed:\n raise ValueError(\"I/O operation on closed file\")\n with map_azure_exceptions(key=self.key):\n if size < 0:\n size = self.size - self.pos\n\n end = min(self.pos + size - 1, self.size - 1)\n if self.pos > end:\n return b''\n b = self.block_blob_service.get_blob_to_bytes(\n container_name=self.container_name,\n blob_name=self.key,\n start_range=self.pos,\n end_range=end, # end_range is inclusive\n max_connections=self.max_connections,\n )\n self.pos += len(b.content)\n return b.content"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef seek(self, offset, whence=0):\n if self.closed:\n raise ValueError(\"I/O operation on closed file\")\n if whence == 0:\n if offset < 0:\n raise IOError('seek would move position outside the file')\n self.pos = offset\n elif whence == 1:\n if self.pos + offset < 0:\n raise IOError('seek would move position outside the file')\n self.pos += offset\n elif whence == 2:\n if self.size + offset < 0:\n raise IOError('seek would move position outside the file')\n self.pos = self.size + offset\n return self.pos", "response": "Move to a new offset either relative or absolute."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _on_tree(repo, tree, components, obj):\n\n # pattern-matching:\n if len(components) == 1:\n if isinstance(obj, Blob):\n mode = 0o100644\n elif isinstance(obj, Tree):\n mode = 0o040000\n elif obj is None:\n mode = None\n else:\n raise TypeError('Can only mount Blobs or Trees')\n name = components[0]\n\n if mode is not None:\n tree[name] = mode, obj.id\n return [tree]\n if name in tree:\n del tree[name]\n return [tree]\n elif len(components) > 1:\n a, bc = components[0], components[1:]\n if a in tree:\n a_tree = repo[tree[a][1]]\n if not isinstance(a_tree, Tree):\n a_tree = Tree()\n else:\n a_tree = Tree()\n res = _on_tree(repo, a_tree, bc, obj)\n a_tree_new = res[-1]\n\n if a_tree_new.items():\n tree[a] = 0o040000, a_tree_new.id\n return res + [tree]\n\n # tree is empty\n if a in tree:\n del tree[a]\n return [tree]\n else:\n raise ValueError('Components can\\'t be empty.')", "response": "Mounts an object on a tree using the given path components."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nbuild the Transgenic Line Classes.", "response": "def build_transgenic_lines(self):\n \"\"\"\n init class | \"transgenic_line_source_name\":\"stock_number\" a Class\n add superClass | rdfs:subClassOf ilxtr:transgenicLine\n add *order* | ilxtr:useObjectProperty ilxtr:\n add name | rdfs:label \"name\"\n add def | definition: \"description\"\n add transtype | rdfs:hasTransgenicType \"transgenic_line_type_name\"\n \"\"\"\n allen_namespaces = {\n 'JAX': 'http://jaxmice.jax.org/strain/',\n 'MMRRC': 'http://www.mmrrc.org/catalog/getSDS.jsp?mmrrc_id=',\n 'AIBS': 'http://api.brain-map.org/api/v2/data/TransgenicLine/',\n }\n for prefix, iri in allen_namespaces.items():\n self.g.add_namespace(prefix, iri)\n for cell_line in self.neuron_data[:]:\n for tl in cell_line['donor']['transgenic_lines']:\n _id = tl['stock_number'] if tl['stock_number'] else tl['id']\n prefix = tl['transgenic_line_source_name']\n line_type = tl['transgenic_line_type_name']\n if prefix not in ['JAX', 'MMRRC', 'AIBS']:\n continue\n _class = prefix + ':' + str(_id)\n self.g.add_class(_class)\n self.g.add_trip(_class, 'rdfs:label', tl['name'])\n self.g.add_trip(_class, 'definition:', tl['description'])\n self.g.add_trip(_class, 'rdfs:subClassOf', 'ilxtr:transgenicLine')\n self.g.add_trip(_class, 'ilxtr:hasTransgenicType', 'ilxtr:' + line_type + 'Line')\n self.g.write()"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef datagetter(cls):\n with open('myfile', 'rt') as f:\n rows = [r for r in csv.reader(f)]\n dothing = lambda _: [i for i, v in enumerate(_)]\n rows = [dothing(_) for _ in rows]\n raise NotImplementedError('You need to implement this yourlself!')\n return rows", "response": "example datagetter function make any local modifications here"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef dataproc(cls, graph, data):\n for thing in data:\n graph.add_trip(*thing)\n raise NotImplementedError('You need to implement this yourlself!')", "response": "example datagetter function make any local modifications here"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nconnects to ec2 resource.", "response": "def _connect(self):\n \"\"\"Connect to ec2 resource.\"\"\"\n resource = None\n try:\n resource = boto3.resource(\n 'ec2',\n aws_access_key_id=self.access_key_id,\n aws_secret_access_key=self.secret_access_key,\n region_name=self.region\n )\n # boto3 resource is lazy so attempt method to test connection\n resource.meta.client.describe_account_attributes()\n except Exception:\n raise EC2CloudException(\n 'Could not connect to region: %s' % self.region\n )\n return resource"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nretrieve instance matching instance_id.", "response": "def _get_instance(self):\n \"\"\"Retrieve instance matching instance_id.\"\"\"\n resource = self._connect()\n\n try:\n instance = resource.Instance(self.running_instance_id)\n except Exception:\n raise EC2CloudException(\n 'Instance with ID: {instance_id} not found.'.format(\n instance_id=self.running_instance_id\n )\n )\n return instance"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the state of the instance.", "response": "def _get_instance_state(self):\n \"\"\"\n Attempt to retrieve the state of the instance.\n Raises:\n EC2CloudException: If the instance cannot be found.\n \"\"\"\n instance = self._get_instance()\n state = None\n\n try:\n state = instance.state['Name']\n except Exception:\n raise EC2CloudException(\n 'Instance with id: {instance_id}, '\n 'cannot be found.'.format(\n instance_id=self.running_instance_id\n )\n )\n\n return state"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns formatted bash script string.", "response": "def _get_user_data(self):\n \"\"\"\n Return formatted bash script string.\n\n The public ssh key is added by cloud init to the instance based on\n the ssh user and private key file.\n \"\"\"\n key = ipa_utils.generate_public_ssh_key(\n self.ssh_private_key_file\n ).decode()\n script = BASH_SSH_SCRIPT.format(user=self.ssh_user, key=key)\n return script"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _launch_instance(self):\n resource = self._connect()\n\n instance_name = ipa_utils.generate_instance_name('ec2-ipa-test')\n kwargs = {\n 'InstanceType': self.instance_type or EC2_DEFAULT_TYPE,\n 'ImageId': self.image_id,\n 'MaxCount': 1,\n 'MinCount': 1,\n 'TagSpecifications': [\n {\n 'ResourceType': 'instance',\n 'Tags': [\n {\n 'Key': 'Name',\n 'Value': instance_name\n }\n ]\n }\n ]\n }\n\n if self.zone:\n kwargs['Placement'] = {'AvailabilityZone': self.zone}\n\n if self.ssh_key_name:\n kwargs['KeyName'] = self.ssh_key_name\n else:\n kwargs['UserData'] = self._get_user_data()\n\n if self.subnet_id:\n kwargs['SubnetId'] = self.subnet_id\n\n if self.security_group_id:\n kwargs['SecurityGroupIds'] = [self.security_group_id]\n\n try:\n instances = resource.create_instances(**kwargs)\n except Exception as error:\n raise EC2CloudException(\n 'Unable to create instance: {0}.'.format(error)\n )\n\n self.running_instance_id = instances[0].instance_id\n self.logger.debug('ID of instance: %s' % self.running_instance_id)\n self._wait_on_instance('running', self.timeout)", "response": "Launch an instance of the given image."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _set_instance_ip(self):\n instance = self._get_instance()\n\n # ipv6\n try:\n ipv6 = instance.network_interfaces[0].ipv6_addresses[0]\n except (IndexError, TypeError):\n ipv6 = None\n\n self.instance_ip = instance.public_ip_address or \\\n ipv6 or instance.private_ip_address\n\n if not self.instance_ip:\n raise EC2CloudException(\n 'IP address for instance cannot be found.'\n )", "response": "Retrieve instance ip and cache it."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ndetermine the init system of distribution.", "response": "def _set_init_system(self, client):\n \"\"\"Determine the init system of distribution.\"\"\"\n if not self.init_system:\n try:\n out = ipa_utils.execute_ssh_command(\n client,\n 'ps -p 1 -o comm='\n )\n except Exception as e:\n raise IpaDistroException(\n 'An error occurred while retrieving'\n ' the distro init system: %s' % e\n )\n if out:\n self.init_system = out.strip()"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef install_package(self, client, package):\n install_cmd = \"{sudo} '{install} {package}'\".format(\n sudo=self.get_sudo_exec_wrapper(),\n install=self.get_install_cmd(),\n package=package\n )\n\n try:\n out = ipa_utils.execute_ssh_command(\n client,\n install_cmd\n )\n except Exception as error:\n raise IpaDistroException(\n 'An error occurred installing package {package} '\n 'on instance: {error}'.format(\n package=package,\n error=error\n )\n )\n else:\n return out", "response": "Install package on instance."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef reboot(self, client):\n self._set_init_system(client)\n\n reboot_cmd = \"{sudo} '{stop_ssh};{reboot}'\".format(\n sudo=self.get_sudo_exec_wrapper(),\n stop_ssh=self.get_stop_ssh_service_cmd(),\n reboot=self.get_reboot_cmd()\n )\n\n try:\n ipa_utils.execute_ssh_command(\n client,\n reboot_cmd\n )\n except Exception as error:\n raise IpaDistroException(\n 'An error occurred rebooting instance: %s' % error\n )\n ipa_utils.clear_cache()", "response": "Execute reboot command on instance."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nexecutes update command on instance.", "response": "def update(self, client):\n \"\"\"Execute update command on instance.\"\"\"\n update_cmd = \"{sudo} '{refresh};{update}'\".format(\n sudo=self.get_sudo_exec_wrapper(),\n refresh=self.get_refresh_repo_cmd(),\n update=self.get_update_cmd()\n )\n\n out = ''\n try:\n out = ipa_utils.execute_ssh_command(\n client,\n update_cmd\n )\n except Exception as error:\n raise IpaDistroException(\n 'An error occurred updating instance: %s' % error\n )\n return out"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nannotate text from the specified content with the specified categories.", "response": "def annotate(self, content, includeCat=None, excludeCat=None, minLength=None, longestOnly=None, includeAbbrev=None, includeAcronym=None, includeNumbers=None, output='text/plain; charset=utf-8'):\n \"\"\" Annotate text from: /annotations\n\n Arguments:\n content: The content to annotate\n includeCat: A set of categories to include\n excludeCat: A set of categories to exclude\n minLength: The minimum number of characters in annotated entities\n longestOnly: Should only the longest entity be returned for an overlapping group\n includeAbbrev: Should abbreviations be included\n includeAcronym: Should acronyms be included\n includeNumbers: Should numbers be included\n outputs:\n text/plain; charset=utf-8\n \"\"\"\n\n kwargs = {'content':content, 'includeCat':includeCat, 'excludeCat':excludeCat, 'minLength':minLength, 'longestOnly':longestOnly, 'includeAbbrev':includeAbbrev, 'includeAcronym':includeAcronym, 'includeNumbers':includeNumbers}\n kwargs = {k:dumps(v) if builtins.type(v) is dict else v for k, v in kwargs.items()}\n param_rest = self._make_rest(None, **kwargs)\n url = self._basePath + ('/annotations').format(**kwargs)\n requests_params = kwargs\n output = self._get('GET', url, requests_params, output)\n return output if output else None"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets all the nodes reachable from a starting point.", "response": "def reachableFrom(self, id, hint=None, relationships=None, lbls=None, callback=None, output='application/json'):\n \"\"\" Get all the nodes reachable from a starting point, traversing the provided edges. from: /graph/reachablefrom/{id}\n\n Arguments:\n id: The type of the edge\n hint: A label hint to find the start node.\n relationships: A list of relationships to traverse, in order. Supports cypher\n operations such as relA|relB or relA*.\n lbls: A list of node labels to filter.\n callback: Name of the JSONP callback ('fn' by default). Supplying this\n parameter or requesting a javascript media type will cause a\n JSONP response to be rendered.\n outputs:\n application/json\n application/graphson\n application/xml\n application/graphml+xml\n application/xgmml\n text/gml\n text/csv\n text/tab-separated-values\n image/jpeg\n image/png\n \"\"\"\n\n if id and id.startswith('http:'):\n id = parse.quote(id, safe='')\n kwargs = {'id':id, 'hint':hint, 'relationships':relationships, 'lbls':lbls, 'callback':callback}\n kwargs = {k:dumps(v) if builtins.type(v) is dict else v for k, v in kwargs.items()}\n param_rest = self._make_rest('id', **kwargs)\n url = self._basePath + ('/graph/reachablefrom/{id}').format(**kwargs)\n requests_params = {k:v for k, v in kwargs.items() if k != 'id'}\n output = self._get('GET', url, requests_params, output)\n return output if output else []"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef ordered(start, edges, predicate=None, inverse=False):\n s, o = 'sub', 'obj'\n if inverse:\n s, o = o, s\n for edge in edges:\n if predicate is not None and edge['pred'] != predicate:\n print('scoop!')\n continue\n\n if edge[s] == start:\n yield edge\n yield from Graph.ordered(edge[o], edges, predicate=predicate)", "response": "Iterate over the edges in a SciGraph response."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget len of transitive closure assume type items is tree...", "response": "def tcsort(item): # FIXME SUCH WOW SO INEFFICIENT O_O\n \"\"\" get len of transitive closure assume type items is tree... \"\"\"\n return len(item[1]) + sum(tcsort(kv) for kv in item[1].items())"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nfind a single node in the tree", "response": "def get_node(start, tree, pnames):\n \"\"\" for each parent find a single branch to root \"\"\"\n def get_first_branch(node):\n if node not in pnames: # one way to hit a root\n return []\n if pnames[node]: # mmmm names\n fp = pnames[node][0]\n if cycle_check(node, fp, pnames):\n fp = pnames[node][1] # if there are double cycles I WILL KILL FOR THE PLEASURE IF IT\n print(fp)\n return [fp] + get_first_branch(fp)\n else:\n return []\n\n branch = get_first_branch(start)\n\n for n in branch[::-1]:\n tree = tree[n]\n\n assert start in tree, \"our start wasnt in the tree! OH NO!\"\n branch = [start] + branch\n print('branch', branch)\n return tree, branch"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef dematerialize(parent_name, parent_node): # FIXME we need to demat more than just leaves!\n #FIXME still an issue: Fornix, Striatum, Diagonal Band\n \"\"\" Remove nodes higher in the tree that occur further down the\n SAME branch. If they occur down OTHER branchs leave them alone.\n\n NOTE: modifies in place!\n \"\"\"\n lleaves = {}\n children = parent_node[parent_name]\n\n if not children: # children could be empty ? i think this only happens @ root?\n #print('at bottom', parent_name)\n lleaves[parent_name] = None\n return lleaves\n\n children_ord = reversed(sorted(sorted(((k, v)\n for k, v in children.items()),\n key=alphasortkey),\n #key=lambda a: f'{a[0]}'.split('>')[1] if '>' in f'{a[0]}' else f'a[0]'),\n #key=lambda a: a[0].split('>') if '>' in a[0] else a[0]),\n key=tcsort)) # make sure we hit deepest first\n\n for child_name, _ in children_ord: # get list so we can go ahead and pop\n #print(child_name)\n new_lleaves = dematerialize(child_name, children)\n if child_name == 'magnetic resonance imaging': # debugging failing demat\n pass\n #embed()\n\n if child_name in new_lleaves or all(l in lleaves for l in new_lleaves):\n # if it is a leaf or all childs are leaves as well\n if child_name in lleaves: # if it has previously been identified as a leaf!\n #print('MATERIALIZATION DETECTED! LOWER PARENT:',\n #lleaves[child_name],'ZAPPING!:', child_name,\n #'OF PARENT:', parent_name)\n children.pop(child_name)\n #print('cn', child_name, 'pn', parent_name, 'BOTTOM')\n #else: # if it has NOT previously been identified as a leaf, add the parent!\n #new_lleaves[child_name] = parent_name # pass it back up to nodes above\n #print('cn', child_name, 'pn', parent_name)\n #else: # it is a node but we want to dematerizlize them too!\n lleaves[child_name] = parent_name\n\n lleaves.update(new_lleaves)\n\n return lleaves", "response": "This function dematerialize a tree from a parent node."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nswitches obj and sub for a set of edges", "response": "def inv_edges(json):\n \"\"\"Switch obj/sub for a set of edges (makes fixing known inverse edges MUCH easier)\"\"\"\n for edge in json['edges']:\n sub, obj = edge['sub'], edge['obj']\n edge['sub'] = obj\n edge['obj'] = sub\n\n edge['pred'] += 'INVERTED'"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning the result for json file.", "response": "def collect_results(results_file):\n \"\"\"Return the result (pass/fail) for json file.\"\"\"\n with open(results_file, 'r') as results:\n data = json.load(results)\n return data"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_terms(self):\n ''' GROUP BY is a shortcut to only getting the first in every list of group '''\n if not self.terms.empty:\n return self.terms\n if self.from_backup:\n self.terms = open_pickle(TERMS_BACKUP_PATH)\n return self.terms\n engine = create_engine(self.db_url)\n data = \"\"\"\n SELECT t.id as tid, t.ilx, t.label, t.definition, t.type, t.comment, t.version\n FROM terms t\n GROUP BY t.ilx\n \"\"\"\n self.terms = pd.read_sql(data, engine)\n create_pickle(self.terms, TERMS_BACKUP_PATH)\n return self.terms", "response": "get the list of terms"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nget complete entity data like term view", "response": "def get_terms_complete(self) -> pd.DataFrame:\n ''' Gets complete entity data like term/view '''\n if not self.terms_complete.empty:\n return self.terms_complete\n if self.from_backup:\n self.terms_complete = open_pickle(TERMS_COMPLETE_BACKUP_PATH)\n return self.terms_complete\n ilx2synonyms = self.get_ilx2synonyms()\n ilx2existing_ids = self.get_ilx2existing_ids()\n ilx2annotations = self.get_ilx2annotations()\n ilx2superclass = self.get_ilx2superclass()\n ilx_complete = []\n header = ['Index'] + list(self.fetch_terms().columns)\n for row in self.fetch_terms().itertuples():\n row = {header[i]:val for i, val in enumerate(row)}\n row['synonyms'] = ilx2synonyms.get(row['ilx'])\n row['existing_ids'] = ilx2existing_ids[row['ilx']] # if breaks we have worse problems\n row['annotations'] = ilx2annotations.get(row['ilx'])\n row['superclass'] = ilx2superclass.get(row['ilx'])\n ilx_complete.append(row)\n terms_complete = pd.DataFrame(ilx_complete)\n create_pickle(terms_complete, TERMS_COMPLETE_BACKUP_PATH)\n return terms_complete"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning a dictionary mapping term_ilx to list of superclasses.", "response": "def get_ilx2superclass(self, clean:bool=True):\n ''' clean: for list of literals only '''\n ilx2superclass = defaultdict(list)\n header = ['Index'] + list(self.fetch_superclasses().columns)\n for row in self.fetch_superclasses().itertuples():\n row = {header[i]:val for i, val in enumerate(row)}\n if clean:\n superclass = {\n 'tid': row['superclass_tid'],\n 'ilx': row['superclass_ilx'],\n }\n ilx2superclass[row['term_ilx']].append(superclass)\n elif not clean:\n ilx2superclass[row['term_ilx']].append(row)\n return ilx2superclass"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_tid2annotations(self, clean:bool=True):\n ''' clean: for list of literals only '''\n tid2annotations = defaultdict(list)\n header = ['Index'] + list(self.fetch_annotations().columns)\n for row in self.fetch_annotations().itertuples():\n row = {header[i]:val for i, val in enumerate(row)}\n if clean:\n annotation = {\n 'tid': row['tid'],\n 'annotation_type_tid': row['annotation_type_tid'],\n 'value': row['value'],\n 'annotation_type_label': row['annotation_type_label'],\n }\n tid2annotations[row['tid']].append(annotation)\n elif not clean:\n tid2annotations[row['tid']].append(row)\n return tid2annotations", "response": "Returns a dictionary mapping tid to list of annotations."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a dictionary of the TID - > Synonym - > list of lists of literals only", "response": "def get_tid2synonyms(self, clean:bool=True):\n ''' clean: for list of literals only '''\n tid2synonyms = {}\n header = ['Index'] + list(self.fetch_synonyms().columns)\n for row in self.fetch_synonyms().itertuples():\n row = {header[i]:val for i, val in enumerate(row)}\n if clean:\n synonym = {'literal':row['literal'], 'type':row['type']}\n tid2synonyms[row['tid']].append(synonym)\n elif not clean:\n tid2synonyms[row['tid']].append(row)\n return tid2synonyms"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_ilx2synonyms(self, clean:bool=True):\n ''' clean: for list of literals only '''\n ilx2synonyms = defaultdict(list)\n header = ['Index'] + list(self.fetch_synonyms().columns)\n for row in self.fetch_synonyms().itertuples():\n row = {header[i]:val for i, val in enumerate(row)}\n if clean:\n synonym = {'literal':row['literal'], 'type':row['type']}\n ilx2synonyms[row['ilx']].append(synonym)\n elif not clean:\n ilx2synonyms[row['ilx']].append(row)\n return ilx2synonyms", "response": "Returns a dictionary mapping ILX to Synonym objects."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nfixes superclass_tid in superclass_tid", "response": "def superclasses_bug_fix(data):\n ''' PHP returns \"id\" in superclass but only accepts superclass_tid '''\n for i, value in enumerate(data['superclasses']):\n data['superclasses'][i]['superclass_tid'] = data['superclasses'][i].pop('id')\n return data"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get(self, url):\n ''' Requests data from database '''\n req = r.get(url,\n headers = self.headers,\n auth = self.auth)\n return self.process_request(req)", "response": "Requests data from database"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngives data to database", "response": "def post(self, url, data):\n ''' Gives data to database '''\n data.update({'key': self.APIKEY})\n req = r.post(url,\n data = json.dumps(data),\n headers = self.headers,\n auth = self.auth)\n return self.process_request(req)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncheck to see if data returned from database is useable '''", "response": "def process_request(self, req):\n ''' Checks to see if data returned from database is useable '''\n # Check status code of request\n req.raise_for_status() # if codes not in 200s; error raise\n # Proper status code, but check if server returned a warning\n try:\n output = req.json()\n except:\n exit(req.text) # server returned html error\n # Try to find an error msg in the server response\n try:\n error = output['data'].get('errormsg')\n except:\n error = output.get('errormsg') # server has 2 variations of errormsg\n finally:\n if error:\n exit(error)\n return output"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nget full meta data from an ILX ID", "response": "def get_data_from_ilx(self, ilx_id):\n ''' Gets full meta data (expect their annotations and relationships) from is ILX ID '''\n ilx_id = self.fix_ilx(ilx_id)\n url_base = self.base_path + \"ilx/search/identifier/{identifier}?key={APIKEY}\"\n url = url_base.format(identifier=ilx_id, APIKEY=self.APIKEY)\n output = self.get(url)\n # Can be a successful request, but not a successful response\n success = self.check_success(output)\n return output, success"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef search_by_label(self, label):\n ''' Server returns anything that is simlar in any catagory '''\n url_base = self.base_path + 'term/search/{term}?key={api_key}'\n url = url_base.format(term=label, api_key=self.APIKEY)\n return self.get(url)", "response": "Search for a single category by label"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef are_ilx(self, ilx_ids):\n ''' Checks list of objects to see if they are usable ILX IDs '''\n total_data = []\n for ilx_id in ilx_ids:\n ilx_id = ilx_id.replace('http', '').replace('.', '').replace('/', '')\n data, success = self.get_data_from_ilx(ilx_id)\n if success:\n total_data.append(data['data'])\n else:\n total_data.append({})\n return total_data", "response": "Checks list of objects to see if they are usable ILX IDs"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef add_triple(self, subj, pred, obj):\n ''' Adds an entity property to an existing entity '''\n subj_data, pred_data, obj_data = self.are_ilx([subj, pred, obj])\n # RELATIONSHIP PROPERTY\n if subj_data.get('id') and pred_data.get('id') and obj_data.get('id'):\n if pred_data['type'] != 'relationship':\n return self.test_check('Adding a relationship as formate \\\n \"term1_ilx relationship_ilx term2_ilx\"')\n return self.add_relationship(term1=subj_data,\n relationship=pred_data,\n term2=obj_data)\n # ANNOTATION PROPERTY\n elif subj_data.get('id') and pred_data.get('id'):\n if pred_data['type'] != 'annotation':\n return self.test_check('Adding a relationship as formate \\\n \"term_ilx annotation_ilx value\"')\n return self.add_annotation(entity=subj_data,\n annotation=pred_data,\n value=obj)\n # UPDATE ENTITY\n elif subj_data.get('id'):\n data = subj_data\n _pred = self.ttl2sci_map.get(pred)\n if not _pred:\n error = pred + \" doesnt not have correct RDF format or It is not an option\"\n return self.test_check(error)\n data = self.custom_update(data, _pred, obj)\n if data == 'failed': # for debugging custom_update\n return data\n data = superclasses_bug_fix(data)\n url_base = self.base_path + 'term/edit/{id}'\n url = url_base.format(id=data['id'])\n return self.post(url, data)\n else:\n return self.test_check('The ILX ID(s) provided do not exist')", "response": "Adds an entity property to an existing entity"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_relationship(self, term1, relationship, term2):\n ''' Creates a relationship between 3 entities in database '''\n url = self.base_path + 'term/add-relationship'\n data = {'term1_id': term1['id'],\n 'relationship_tid': relationship['id'],\n 'term2_id': term2['id'],\n 'term1_version': term1['version'],\n 'relationship_term_version': relationship['version'],\n 'term2_version': term2['version']}\n return self.post(url, data)", "response": "Creates a relationship between 3 entities in database"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nadds an annotation proprty to an existing entity", "response": "def add_annotation(self, entity, annotation, value):\n ''' Adds an annotation proprty to existing entity '''\n url = self.base_path + 'term/add-annotation'\n data = {'tid': entity['id'],\n 'annotation_tid': annotation['id'],\n 'value': value,\n 'term_version': entity['version'],\n 'annotation_term_version': annotation['version']}\n return self.post(url, data)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef custom_update(self, data, pred, obj):\n ''' Updates existing entity proprty based on the predicate input '''\n if isinstance(data[pred], str): # for all simple properties of str value\n data[pred] = str(obj)\n else: # synonyms, superclasses, and existing_ids have special requirements\n if pred == 'synonyms':\n literals = [d['literal'] for d in data[pred]]\n if obj not in literals:\n data[pred].append({'literal': obj}) # synonyms req for post\n elif pred == 'superclasses':\n ilx_ids = [d['ilx'] for d in data[pred]]\n if obj not in ilx_ids:\n _obj = obj.replace('ILX:', 'ilx_')\n super_data, success = self.get_data_from_ilx(ilx_id=_obj)\n super_data = super_data['data']\n if success:\n # superclass req post\n data[pred].append({'id': super_data['id'], 'ilx': _obj})\n else:\n return self.test_check('Your superclass ILX ID '\n + _obj + ' does not exist.')\n elif pred == 'existing_ids': # FIXME need to autogenerate curies from a map\n iris = [d['iri'] for d in data[pred]]\n if obj not in iris:\n if 'http' not in obj:\n return self.test_check('exisiting id value must \\\n be a uri containing \"http\"')\n data[pred].append({\n 'curie': self.qname(obj),\n 'iri': obj,\n 'preferred': '0' # preferred is auto generated by preferred_change\n })\n #data[pred] = []\n data = self.preferred_change(data) # One ex id is determined to be preferred\n else:\n # Somehow broke this code\n return self.test_check(pred + ' Has slipped through the cracks')\n return data", "response": "Custom update of existing entity proprty based on the predicate input"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nadds an entity to the database.", "response": "def add_entity(self, rdf_type, superclass, label, definition=None):\n ''' Adds entity as long as it doesn't exist and has a usable\n superclass ILX ID and rdf:type\n '''\n # Checks if you inputed the right type\n rdf_type = rdf_type.lower().strip().replace('owl:Class', 'term')\n accepted_types = ['owl:Class', 'term', 'cde', 'annotation', 'relationship', 'fde']\n if rdf_type not in accepted_types:\n error = 'rdf_type must be one of the following: {accepted_types}'\n return self.test_check(error.format(accepted_types=accepted_types))\n\n # Pulls superclass data out and checks if it exists\n superclass_data, success = self.get_data_from_ilx(ilx_id=superclass)\n superclass_data = superclass_data['data']\n if not success:\n error = '{superclass} is does not exist and cannot be used as a superclass.'\n return self.test_check(error.format(superclass=superclass))\n\n # Searchs database to see if the term exists. Will return anything similar,\n # but we want only what is_equal\n search_results = self.search_by_label(label)['data']\n search_results = [sr for sr in search_results\n if self.is_equal(sr['label'], label_bug_fix(label))]\n\n # If search_results is not empty, we need to see if the type and superclass are also a\n # match. If not, you can create this entity. HOWEVER. If you are the creator of an entity,\n # you can only have one label of any type or superclass\n if search_results:\n search_hits = 0\n for entity in search_results: # garunteed to only have one match if any\n entity, success = self.get_data_from_ilx(ilx_id = entity['ilx']) # all metadata\n entity = entity['data']\n user_url = 'https://scicrunch.org/api/1/user/info?key={api_key}'\n user_data = self.get(user_url.format(api_key=self.APIKEY))\n user_data = user_data['data']\n if str(entity['uid']) == str(user_data['id']): # creator check\n bp = 'Entity {label} already created by you with ILX ID {ilx_id} and of type {rdf_type}'\n return self.test_check(bp.format(label = label,\n ilx_id = entity['ilx'],\n rdf_type = entity['type']))\n types_equal = self.is_equal(entity['type'], rdf_type) # type check\n if 'superclasses' in entity and entity['superclasses']:\n entity_super_ilx = entity['superclasses'][0]['ilx']\n else:\n entity_super_ilx = ''\n supers_equal = self.is_equal(entity_super_ilx, superclass_data['ilx'])\n if types_equal and supers_equal:\n bp = 'Entity {label} already exisits with ILX ID {ilx_id} and of type {rdf_type}'\n return self.test_check(bp.format(label = label,\n ilx_id = self.fix_ilx(entity['ilx']),\n rdf_type = entity['type']))\n\n # Generates ILX ID and does a validation check\n url = self.base_path + 'ilx/add'\n data = {'term': label,\n 'superclasses': [{\n 'id': superclass_data['id'],\n 'ilx': superclass_data['ilx']}],\n 'type': rdf_type,}\n data = superclasses_bug_fix(data)\n output = self.post(url, data)['data']\n if output.get('ilx'):\n ilx_id = output['ilx']\n else:\n ilx_id = output['fragment'] # archetype of beta\n\n # Uses generated ILX ID to make a formal row in the database\n url = self.base_path + 'term/add'\n data = {'label': label.replace(''', \"'\").replace('"', '\"'),\n 'ilx': ilx_id,\n 'superclasses': [{\n 'id': superclass_data['id'],\n 'ilx': superclass_data['ilx']}],\n 'type': rdf_type}\n data = superclasses_bug_fix(data)\n if definition:\n data.update({'definition':definition})\n return self.post(url, data)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef sortProperties(self, properties): # modified to sort objects using their global rank\n # Sort object lists\n for prop, objects in properties.items():\n objects.sort(key=self._globalSortKey)\n\n # Make sorted list of properties\n return sorted(properties, key=lambda p: self.predicate_rank[p])", "response": "Take a hash from predicate uris to lists of values."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nbuild a hash key by predicate to a list of objects for the given subject.", "response": "def _buildPredicateHash(self, subject): # XXX unmodified\n \"\"\"\n Build a hash key by predicate to a list of objects for the given\n subject\n \"\"\"\n properties = {}\n for s, p, o in self.store.triples((subject, None, None)):\n oList = properties.get(p, [])\n oList.append(o)\n properties[p] = oList\n\n return properties"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncheck if a list is a valid RDF list.", "response": "def isValidList(self, l): # modified to flatten lists specified using [ a rdf:List; ] syntax\n \"\"\"\n Checks if l is a valid RDF list, i.e. no nodes have other properties.\n \"\"\"\n try:\n if self.store.value(l, RDF.first) is None:\n return False\n except:\n return False\n while l:\n if l != RDF.nil:\n po = list(self.store.predicate_objects(l))\n if (RDF.type, RDF.List) in po and len(po) == 3:\n pass\n elif len(po) != 2:\n return False\n l = self.store.value(l, RDF.rest)\n return True"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _write(self, value):\n if ' ' in value:\n s = inspect.stack()\n fn = s[1].function\n super().write('%%DEBUG {} %%'.format(fn))\n super().write(value)", "response": "rename to write and import inspect to debut the callstack"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nserializes the object to HTML.", "response": "def serialize(self, *args, **kwargs):\n \"\"\" Modified to allow additional labels to be passed in. \"\"\"\n if 'labels' in kwargs:\n # populate labels from outside the local graph\n self._labels.update(kwargs['labels'])\n super(HtmlTurtleSerializer, self).serialize(*args, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nadd a local name for a phenotype from a pair of identifiers", "response": "def addLNT(LocalName, phenoId, predicate, g=None): # XXX deprecated\n \"\"\" Add a local name for a phenotype from a pair of identifiers \"\"\"\n if g is None:\n s = inspect.stack(0) # horribly inefficient\n checkCalledInside('LocalNameManager', s)\n g = s[1][0].f_locals # get globals of calling scope\n addLN(LocalName, Phenotype(phenoId, predicate), g)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef resetLocalNames(g=None):\n if g is None:\n g = inspect.stack(0)[1][0].f_locals # get globals of calling scope\n for k in list(graphBase.LocalNames.keys()):\n try:\n g.pop(k)\n except KeyError:\n raise KeyError('%s not in globals, are you calling resetLocalNames from a local scope?' % k)\n graphBase.LocalNames.pop(k)", "response": "Reset all local names in the current scope."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nadvancing usage allows loading multiple sets of neurons and using a config object to keep track of the different graphs", "response": "def load_existing(self):\n \"\"\" advanced usage allows loading multiple sets of neurons and using a config\n object to keep track of the different graphs \"\"\"\n from pyontutils.closed_namespaces import rdfs\n # bag existing\n\n try:\n next(iter(self.neurons()))\n raise self.ExistingNeuronsError('Existing neurons detected. Please '\n 'load from file before creating neurons!')\n except StopIteration:\n pass\n\n def getClassType(s):\n graph = self.load_graph\n Class = infixowl.Class(s, graph=graph)\n for ec in Class.equivalentClass:\n if isinstance(ec.identifier, rdflib.BNode):\n bc = infixowl.CastClass(ec, graph=graph)\n if isinstance(bc, infixowl.BooleanClass):\n for id_ in bc._rdfList:\n if isinstance(id_, rdflib.URIRef):\n yield id_ # its one of our types\n\n # bug is that I am not wiping graphBase.knownClasses and swapping it for each config\n # OR the bug is that self.load_graph is persisting, either way the call to type()\n # below seems to be the primary suspect for the issue\n if not graphBase.ignore_existing:\n ogp = Path(graphBase.ng.filename) # FIXME ng.filename <-> out_graph_path property ...\n if ogp.exists():\n from itertools import chain\n from rdflib import Graph # FIXME\n self.load_graph = Graph().parse(graphBase.ng.filename, format='turtle')\n graphBase.load_graph = self.load_graph\n # FIXME memory inefficiency here ...\n _ = [graphBase.in_graph.add(t) for t in graphBase.load_graph] # FIXME use conjuctive ...\n if len(graphBase.python_subclasses) == 2: # FIXME magic number for Neuron and NeuronCUT\n ebms = [type(OntId(s).suffix, (NeuronCUT,), dict(owlClass=s))\n for s in self.load_graph[:rdfs.subClassOf:NeuronEBM.owlClass]\n if not graphBase.knownClasses.append(s)]\n else:\n ebms = []\n\n class_types = [(type, s) for s in self.load_graph[:rdf.type:owl.Class]\n for type in getClassType(s) if type]\n sc = None\n for sc in chain(graphBase.python_subclasses, ebms):\n sc.owlClass\n iris = [s for type, s in class_types if type == sc.owlClass]\n if iris:\n sc._load_existing(iris)\n\n if sc is None:\n raise ImportError(f'Failed to find any neurons to load in {graphBase.ng.filename}')"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef configGraphIO(remote_base,\n local_base= None,\n branch= 'master',\n core_graph_paths= tuple(),\n core_graph= None,\n in_graph_paths= tuple(),\n out_graph_path= None,\n out_imports= tuple(),\n out_graph= None,\n prefixes= tuple(),\n force_remote= False,\n checkout_ok= ont_checkout_ok,\n scigraph= None,\n iri= None,\n sources= tuple(),\n source_file= None,\n use_local_import_paths=True,\n compiled_location= (PPath('/tmp/neurondm/compiled')\n if working_dir is None else\n PPath(working_dir, 'neurondm/neurondm/compiled')),\n ignore_existing= False,\n local_conventions= False,):\n # FIXME suffixes seem like a bad way to have done this :/\n \"\"\" We set this up to work this way because we can't\n instantiate graphBase, it is a super class that needs\n to be configurable and it needs to do so globally.\n All the default values here are examples and not real.\n You should write a local `def config` function as part\n of your local setup that replicates that arguments of\n configureGraphIO.\n\n Example:\n def config(remote_base= 'http://someurl.org/remote/ontology/',\n local_base= '/home/user/git/ontology/',\n branch= 'master',\n core_graph_paths= ['local/path/localCore.ttl',\n 'local/path/localClasses.ttl'],\n core_graph= None,\n in_graph_paths= tuple(),\n out_graph_path= '/tmp/outputGraph.ttl',\n out_imports= ['local/path/localCore.ttl'],\n out_graph= None,\n prefixes= {'hello':'http://world.org/'}\n force_remote= False,\n checkout_ok= False,\n scigraph= 'http://scigraph.mydomain.org:9000/scigraph'):\n graphBase.configGraphIO(remote_base, local_base, branch,\n core_graph_paths, core_graph,\n in_graph_paths,\n out_graph_path, out_imports, out_graph,\n force_remote, checkout_ok, scigraph)\n\n \"\"\"\n\n graphBase.local_conventions = local_conventions\n\n if local_base is None:\n local_base = devconfig.ontology_local_repo\n graphBase.local_base = Path(local_base).expanduser().resolve()\n graphBase.remote_base = remote_base\n\n def makeLocalRemote(suffixes):\n remote = [os.path.join(graphBase.remote_base, branch, s)\n if '://' not in s else # 'remote' is file:// or http[s]://\n s for s in suffixes]\n # TODO the whole thing needs to be reworked to not use suffixes...\n local = [(graphBase.local_base / s).as_posix()\n if '://' not in s else\n ((graphBase.local_base / s.replace(graphBase.remote_base, '').strip('/')).as_uri()\n if graphBase.remote_base in s else s) # FIXME this breaks the semanics of local?\n for s in suffixes]\n return remote, local\n\n # file location setup\n remote_core_paths, local_core_paths = makeLocalRemote(core_graph_paths)\n remote_in_paths, local_in_paths = makeLocalRemote(in_graph_paths)\n remote_out_imports, local_out_imports = makeLocalRemote(out_imports)\n\n out_graph_paths = [out_graph_path]\n remote_out_paths, local_out_paths = makeLocalRemote(out_graph_paths) # XXX fail w/ tmp\n remote_out_paths = local_out_paths # can't write to a remote server without magic\n\n if (not force_remote\n and graphBase.local_base == Path(devconfig.ontology_local_repo)\n and graphBase.local_base.exists()):\n\n repo = Repo(graphBase.local_base.as_posix())\n if repo.active_branch.name != branch and not checkout_ok:\n raise graphBase.GitRepoOnWrongBranch(\n 'Local git repo not on %s branch!\\n'\n 'Please run `git checkout %s` in %s, '\n 'set NIFSTD_CHECKOUT_OK= via export or '\n 'at runtime, or set checkout_ok=True.'\n % (branch, branch, repo.working_dir))\n elif checkout_ok:\n graphBase.repo = repo\n graphBase.working_branch = next(h for h in repo.heads\n if h.name == branch)\n graphBase.original_branch = repo.active_branch\n graphBase.set_repo_state()\n use_core_paths = local_core_paths\n use_in_paths = local_in_paths\n else:\n if not force_remote and not graphBase.local_base.exists():\n log.warning(f'Warning local ontology path {local_base!r} not found!')\n use_core_paths = remote_core_paths\n use_in_paths = remote_in_paths\n\n if local_base is not None:\n log.warning(f'Warning local base has been set manually you are on your own!')\n try:\n repo = Repo(graphBase.local_base.as_posix())\n except (git.exc.InvalidGitRepositoryError, git.exc.NoSuchPathError) as e:\n local_working_dir = get_working_dir(graphBase.local_base)\n if local_working_dir is None:\n raise e\n else:\n msg = (f'{graphBase.local_base} is already contained in a git repository '\n 'located in {local_working_dir} if you wish to use this repo please '\n 'set local_base to {local_working_dir}.')\n raise git.exc.InvalidGitRepositoryError(msg) from e\n\n graphBase.repo = repo\n # FIXME repo init when branch set still an issue\n # ideally remove _all_ of this code though because WOW\n # it is a mess\n graphBase.set_repo_state()\n\n # core graph setup\n if core_graph is None:\n core_graph = rdflib.ConjunctiveGraph()\n for cg in use_core_paths:\n try:\n core_graph.parse(cg, format='turtle')\n except (FileNotFoundError, HTTPError) as e:\n # TODO failover to local if we were remote?\n #print(tc.red('WARNING:'), f'no file found for core graph at {cg}')\n log.warning(f'no file found for core graph at {cg}')\n graphBase.core_graph = core_graph\n if RDFL not in [type(s) for s in OntTerm.query.services]:\n # FIXME ah subtle differences between graphs >_<\n # need a much more consistent way to handle the local graphs\n # switching everything out for a single RDFL instance seems\n # the most attractive ...\n OntTerm.query.ladd(RDFL(core_graph, OntId)) # ladd for higher priority\n\n # store prefixes\n if isinstance(prefixes, dict):\n graphBase.prefixes = prefixes\n else:\n graphBase.prefixes = makePrefixes(*prefixes)\n\n PREFIXES = {**graphBase.prefixes, **uPREFIXES}\n OntCuries(PREFIXES)\n\n # input graph setup\n in_graph = core_graph\n for ig in use_in_paths:\n in_graph.parse(ig, format='turtle')\n\n nin_graph = makeGraph('', prefixes=PREFIXES, graph=in_graph)\n graphBase.in_graph = in_graph\n graphBase.ignore_existing = ignore_existing\n\n # output graph setup\n if out_graph is None:\n _sources = sources\n _source_file = source_file\n class NeurOnt(Ont): # FIXME this is super misleading wrt the source ...\n path = 'ttl/generated/neurons/'\n #filename = 'to-be-set-later'\n prefixes = PREFIXES\n sources = _sources\n source_file = _source_file\n # FIXME temp fix for issue with wgb in core\n #wasGeneratedBy = ('https://github.com/tgbugs/pyontutils/blob/'\n #'{commit}/'\n #'{filepath}'\n #'{hash_L_line}')\n\n no = NeurOnt()\n out_graph = no.graph\n graphBase.ng = no._graph\n\n #out_graph = rdflib.Graph()\n # in thise case we also want to wipe any existing python Neuron entires\n # that we use to serialize so that behavior is consistent\n NeuronBase.existing_pes = {}\n NeuronBase.existing_ids = {}\n else:\n no = None\n graphBase.ng = makeGraph('', prefixes=PREFIXES, graph=out_graph)\n #new_graph = makeGraph('', prefixes=PREFIXES, graph=out_graph)\n graphBase.out_graph = out_graph\n\n # python output setup\n graphBase.compiled_location = compiled_location\n\n # makeGraph setup\n new_graph = graphBase.ng #= new_graph\n new_graph.filename = out_graph_path\n\n if iri is not None:\n ontid = rdflib.URIRef(iri)\n else:\n ontid = rdflib.URIRef('file://' + out_graph_path) # do not use Path().absolute() it will leak\n\n if use_local_import_paths:\n new_graph.add_trip(ontid, rdf.type, owl.Ontology)\n for local_out_import in local_out_imports: # TODO flip switch between local and remote import behavior\n new_graph.add_trip(ontid, owl.imports, rdflib.URIRef(local_out_import)) # core should be in the import closure\n else:\n new_graph.add_trip(ontid, rdf.type, owl.Ontology)\n for remote_out_import in remote_out_imports: # TODO flip switch between local and remote import behavior\n new_graph.add_trip(ontid, owl.imports, rdflib.URIRef(remote_out_import)) # core should be in the import closure\n\n if no is not None:\n no() # populate generated by info\n\n # set predicates\n graphBase._predicates = getPhenotypePredicates(graphBase.core_graph)\n\n # scigraph setup\n if scigraph is not None:\n graphBase._sgv = Vocabulary(cache=True, basePath=scigraph)\n else:\n graphBase._sgv = Vocabulary(cache=True)", "response": "This function is a simple function that creates a graphIO class that can be used to configure a graphIO."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a python header for the class.", "response": "def python_header(cls):\n out = '#!/usr/bin/env python3.6\\n'\n out += f'from {cls.__import_name__} import *\\n\\n'\n\n all_types = set(type(n) for n in cls.neurons())\n _subs = [inspect.getsource(c) for c in subclasses(Neuron)\n if c in all_types and Path(inspect.getfile(c)).exists()]\n subs = '\\n' + '\\n\\n'.join(_subs) + '\\n\\n' if _subs else ''\n #log.debug(str(all_types))\n #log.debug(f'python header for {cls.filename_python()}:\\n{subs}')\n out += subs\n\n ind = '\\n' + ' ' * len('config = Config(')\n _prefixes = {k:str(v) for k, v in cls.ng.namespaces.items()\n if k not in uPREFIXES and k != 'xml' and k != 'xsd'} # FIXME don't hardcode xml xsd\n len_thing = len(f'config = Config({cls.ng.name!r}, prefixes={{')\n '}}'\n prefixes = (f',{ind}prefixes={pformat(_prefixes, 0)}'.replace('\\n', '\\n' + ' ' * len_thing)\n if _prefixes\n else '')\n\n tel = ttl_export_dir = Path(cls.ng.filename).parent.as_posix()\n ttlexp = f',{ind}ttl_export_dir={tel!r}'\n\n # FIXME prefixes should be separate so they are accessible in the namespace\n # FIXME ilxtr needs to be detected as well\n # FIXME this doesn't trigger when run as an import?\n out += f'config = Config({cls.ng.name!r},{ind}file=__file__{ttlexp}{prefixes})\\n\\n' # FIXME this is from neurons.lang\n\n return out"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nadding the equivalent class of this neuron with the set of other neurons", "response": "def equivalentClass(self, *others):\n \"\"\" as implemented this acts as a permenant bag union operator\n and therefore should be used with extreme caution since\n in any given context the computed label/identifier will\n no longer reflect the entailed/reasoned bag\n\n In a static context this means that we might want to have\n an ilxtr:assertedAlwaysImplies -> bag union neuron\n \"\"\"\n # FIXME this makes the data model mutable!\n # TODO symmetry here\n\n # serious modelling issue\n # If you make two bags equivalent to eachother\n # then in the set theory model it becomes impossible\n # to have a set that is _just_ one or the other of the bags\n # which I do not think that we want\n for other in others:\n if isinstance(other, self.__class__):\n #if isinstance(other, NegPhenotype): # FIXME maybe this is the issue with neg equivs?\n otherid = other.id_\n else:\n otherid = other\n\n self.out_graph.add((self.id_, owl.equivalentClass, otherid))\n self._equivalent_bags_ids.add(otherid)\n\n return self"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning a labelMaker object for this graphBase", "response": "def label_maker(self):\n \"\"\" needed to defer loading of local conventions to avoid circular dependency issue \"\"\"\n if (not hasattr(graphBase, '_label_maker') or\n graphBase._label_maker.local_conventions != graphBase.local_conventions):\n graphBase._label_maker = LabelMaker(graphBase.local_conventions)\n\n return graphBase._label_maker"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nlifts phenotypeEdges to Restrictions", "response": "def _graphify(self, *args, graph=None): # defined\n \"\"\" Lift phenotypeEdges to Restrictions \"\"\"\n if graph is None:\n graph = self.out_graph\n\n ################## LABELS ARE DEFINED HERE ##################\n gl = self.genLabel\n ll = self.localLabel\n ol = self.origLabel\n graph.add((self.id_, ilxtr.genLabel, rdflib.Literal(gl)))\n if ll != gl:\n graph.add((self.id_, ilxtr.localLabel, rdflib.Literal(ll)))\n\n if ol and ol != gl:\n graph.add((self.id_, ilxtr.origLabel, rdflib.Literal(ol)))\n\n members = [self.expand(self.owlClass)]\n for pe in self.pes:\n target = pe._graphify(graph=graph)\n if isinstance(pe, NegPhenotype): # isinstance will match NegPhenotype -> Phenotype\n #self.Class.disjointWith = [target] # FIXME for defined neurons this is what we need and I think it is strong than the complementOf version\n djc = infixowl.Class(graph=graph) # TODO for generic neurons this is what we need\n djc.complementOf = target\n members.append(djc)\n else:\n members.append(target) # FIXME negative logical phenotypes :/\n intersection = infixowl.BooleanClass(members=members, graph=graph) # FIXME dupes\n #existing = list(self.Class.equivalentClass)\n #if existing or str(pe.pLabel) == 'Htr3a':\n #embed()\n ec = [intersection]\n self.Class.equivalentClass = ec\n return self.Class"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nrun the build server and returns the current version of the current node.", "response": "def run(args):\n if args['--debug']:\n print(args)\n # ignoring bamboo sequecing for the moment...\n # check the build server to see if we have built the latest (or the specified commit)\n # if yes just scp those to services\n # if no check the web endpoint to see if latest matches build\n # if yes fetch\n # else build (push to web endpoint)\n\n 'localhost:~/files/ontology-packages/scigraph/'\n 'tom@orpheus:~/files/ontology-packages/graph/'\n #b = Builder(build_host, services_host, build_user, services_user,\n #graph_latest_url, scigraph_latest_url,\n #scp, sscp, git_local, repo_name,\n #services_folder, graph_folder, services_config_folder\n #)\n\n kwargs = {k.strip('--').strip('<').rstrip('>').replace('-','_'):v for k, v in args.items()}\n b = Builder(args, **kwargs)\n if b.mode is not None:\n code = b.run()\n else:\n if args['--view-defaults']:\n for k, v in combined_defaults.items():\n print(f'{k:<22} {v}')\n return\n\n if b.debug:\n FILE = '/tmp/test.sh'\n with open(FILE, 'wt') as f:\n f.write('#!/usr/bin/env bash\\n' + code)\n os.system(f\"emacs -batch {FILE} --eval '(indent-region (point-min) (point-max) nil)' -f save-buffer\")\n print()\n with open(FILE, 'rt') as f:\n print(f.read())\n #embed()\n if b.local:\n return\n #if b.check_built:\n #return\n else:\n print(code)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef runOnExecutor(self, *commands, oper=ACCEPT, defer_shell_expansion=False):\n return self.makeOutput(EXE, commands, oper=oper, defer_shell_expansion=defer_shell_expansion)", "response": "This method runs the given commands on the current executor."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef build(self, mode=None, check=False):\n kwargs = {}\n if not self.build_only: # don't try to deploy twice\n kwargs[' --build-only'] = True\n if not self.local:\n kwargs['--local'] = True\n if check:\n kwargs['--check-built'] = True\n remote_args = self.formatted_args(self.args, mode, **kwargs)\n cmds = tuple() if self.check_built or self._updated else self.cmds_pyontutils()\n if not self._updated:\n self._updated = True\n return self.runOnBuild(*cmds,\n f'scigraph-deploy {remote_args}',\n defer_shell_expansion=True,\n oper=AND)", "response": "Shuffle the current call off to the build server with the specified mode."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef swanson():\n\n source = Path(devconfig.resources, 'swanson_aligned.txt').as_posix()\n ONT_PATH = 'http://ontology.neuinfo.org/NIF/ttl/generated/'\n filename = 'swanson_hierarchies'\n ontid = ONT_PATH + filename + '.ttl'\n PREFIXES = SwansonLabels.prefixes\n new_graph = makeGraph(filename, PREFIXES, writeloc='/tmp/')\n new_graph.add_ont(ontid,\n 'Swanson brain partomies',\n 'Swanson 2014 Partonomies',\n 'This file is automatically generated from ' + source + '.' + '**FIXME**',\n 'now')\n\n # FIXME citations should really go on the ... anatomy? scheme artifact\n definingCitation = 'Swanson, Larry W. Neuroanatomical Terminology: a lexicon of classical origins and historical foundations. Oxford University Press, USA, 2014.'\n definingCitationID = 'ISBN:9780195340624'\n new_graph.add_trip(ontid, 'NIFRID:definingCitation', definingCitation)\n new_graph.add_trip(ontid, 'NIFRID:definingCitationID', definingCitationID)\n\n with open(source, 'rt') as f:\n lines = [l.strip() for l in f.readlines()]\n\n # join header on page 794\n lines[635] += ' ' + lines.pop(636)\n #fix for capitalization since this header is reused\n fixed = ' or '.join([' ('.join([n.capitalize() for n in _.split(' (')]) for _ in lines[635].lower().split(' or ')]).replace('human','HUMAN')\n lines[635] = fixed\n\n data = []\n for l in lines:\n if not l.startswith('#'):\n level = l.count('.'*5)\n l = l.strip('.')\n if ' (' in l:\n if ') or' in l:\n n1, l = l.split(') or')\n area_name, citationP = n1.strip().split(' (')\n citation = citationP.rstrip(')')\n d = (level, area_name, citation, 'NEXT SYN')\n data.append(d)\n #print(tc.red(tc.bold(repr(d))))\n\n area_name, citationP = l.strip().split(' (')\n citation = citationP.rstrip(')')\n else:\n area_name = l\n citation = None\n\n d = (level, area_name, citation, None)\n #print(d)\n data.append(d)\n results = async_getter(sgv.findByTerm, [(d[1],) for d in data])\n #results = [None] * len(data)\n curies = [[r['curie'] for r in _ if 'UBERON' in r['curie']] if _ else [] for _ in results]\n output = [_[0] if _ else None for _ in curies]\n\n header = ['Depth', 'Name', 'Citation', 'NextSyn', 'Uberon']\n zoop = [header] + [r for r in zip(*zip(*data), output)] + \\\n [(0, 'Appendix END None', None, None, None)] # needed to add last appendix\n\n # TODO annotate the appendicies and the classes with these\n appendix_root_mapping = (1, 1, 1, 1, 30, 83, 69, 70, 74, 1) # should generate?\n\n class SP(rowParse):\n def __init__(self):\n self.nodes = defaultdict(dict)\n self._appendix = 0\n self.appendicies = {}\n self._last_at_level = {}\n self.names = defaultdict(set)\n self.children = defaultdict(set)\n self.parents = defaultdict(set)\n self.next_syn = False\n super().__init__(zoop)\n\n def Depth(self, value):\n if self.next_syn:\n self.synonym = self.next_syn\n else:\n self.synonym = False\n self.depth = value\n\n def Name(self, value):\n self.name = value\n\n def Citation(self, value):\n self.citation = value\n\n def NextSyn(self, value):\n if value:\n self.next_syn = self._rowind\n else:\n self.next_syn = False\n\n def Uberon(self, value):\n self.uberon = value\n\n def _row_post(self):\n # check if we are in the next appendix\n # may want to xref ids between appendicies as well...\n if self.depth == 0:\n if self.name.startswith('Appendix'):\n if self._appendix:\n self.appendicies[self._appendix]['children'] = dict(self.children)\n self.appendicies[self._appendix]['parents'] = dict(self.parents)\n self._last_at_level = {}\n self.children = defaultdict(set)\n self.parents = defaultdict(set)\n _, num, apname = self.name.split(' ', 2)\n if num == 'END':\n return\n self._appendix = int(num)\n self.appendicies[self._appendix] = {\n 'name':apname.capitalize(),\n 'type':self.citation.capitalize() if self.citation else None}\n return\n else:\n if ' [' in self.name:\n name, taxonB = self.name.split(' [')\n self.name = name\n self.appendicies[self._appendix]['taxon'] = taxonB.rstrip(']').capitalize()\n else: # top level is animalia\n self.appendicies[self._appendix]['taxon'] = 'ANIMALIA'.capitalize()\n\n self.name = self.name.capitalize()\n self.citation = self.citation.capitalize()\n # nodes\n if self.synonym:\n self.nodes[self.synonym]['synonym'] = self.name\n self.nodes[self.synonym]['syn-cite'] = self.citation\n self.nodes[self.synonym]['syn-uberon'] = self.uberon\n return\n else:\n if self.citation: # Transverse Longitudinal etc all @ lvl4\n self.names[self.name + ' ' + self.citation].add(self._rowind)\n else:\n self.name += str(self._appendix) + self.nodes[self._last_at_level[self.depth - 1]]['label']\n #print(level, self.name)\n # can't return here because they are their own level\n # replace with actually doing something...\n self.nodes[self._rowind]['label'] = self.name\n self.nodes[self._rowind]['citation'] = self.citation\n self.nodes[self._rowind]['uberon'] = self.uberon\n # edges\n self._last_at_level[self.depth] = self._rowind\n # TODO will need something to deal with the Lateral/\n if self.depth > 0:\n try:\n parent = self._last_at_level[self.depth - 1]\n except:\n embed()\n self.children[parent].add(self._rowind)\n self.parents[self._rowind].add(parent)\n\n def _end(self):\n replace = {}\n for asdf in [sorted(n) for k,n in self.names.items() if len(n) > 1]:\n replace_with, to_replace = asdf[0], asdf[1:]\n for r in to_replace:\n replace[r] = replace_with\n\n for r, rw in replace.items():\n #print(self.nodes[rw])\n o = self.nodes.pop(r)\n #print(o)\n\n for vals in self.appendicies.values():\n children = vals['children']\n parents = vals['parents']\n # need reversed so children are corrected before swap\n for r, rw in reversed(sorted(replace.items())):\n if r in parents:\n child = r\n new_child = rw\n parent = parents.pop(child)\n parents[new_child] = parent\n parent = list(parent)[0]\n children[parent].remove(child)\n children[parent].add(new_child)\n if r in children:\n parent = r\n new_parent = rw\n childs = children.pop(parent)\n children[new_parent] = childs\n for child in childs:\n parents[child] = {new_parent}\n\n self.nodes = dict(self.nodes)\n\n sp = SP()\n tp = [_ for _ in sorted(['{: <50}'.format(n['label']) + n['uberon'] if n['uberon'] else n['label'] for n in sp.nodes.values()])]\n #print('\\n'.join(tp))\n #print(sp.appendicies[1].keys())\n #print(sp.nodes[1].keys())\n nbase = PREFIXES['SWAN'] + '%s'\n json_ = {'nodes':[],'edges':[]}\n parent = ilxtr.swansonBrainRegionConcept\n for node, anns in sp.nodes.items():\n nid = nbase % node\n new_graph.add_class(nid, parent, label=anns['label'])\n new_graph.add_trip(nid, 'NIFRID:definingCitation', anns['citation'])\n json_['nodes'].append({'lbl':anns['label'],'id':'SWA:' + str(node)})\n #if anns['uberon']:\n #new_graph.add_trip(nid, owl.equivalentClass, anns['uberon']) # issues arrise here...\n\n for appendix, data in sp.appendicies.items():\n aid = PREFIXES['SWAA'] + str(appendix)\n new_graph.add_class(aid, label=data['name'].capitalize())\n new_graph.add_trip(aid, 'ilxtr:hasTaxonRank', data['taxon']) # FIXME appendix is the data artifact...\n children = data['children']\n ahp = 'swanr:hasPart' + str(appendix)\n apo = 'swanr:partOf' + str(appendix)\n new_graph.add_op(ahp, transitive=True)\n new_graph.add_op(apo, inverse=ahp, transitive=True)\n for parent, childs in children.items(): # FIXME does this give complete coverage?\n pid = nbase % parent\n for child in childs:\n cid = nbase % child\n new_graph.add_restriction(pid, ahp, cid) # note hierarhcy inverts direction\n new_graph.add_restriction(cid, apo, pid)\n json_['edges'].append({'sub':'SWA:' + str(child),'pred':apo,'obj':'SWA:' + str(parent)})\n\n return new_graph", "response": "This function creates a graph that contains the swanson - aligned file and returns the NIFRID of the next page."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nbuilding a list of all the tables in the ontology.", "response": "def build(*onts, fail=False, n_jobs=9, write=True):\n \"\"\" Set n_jobs=1 for debug or embed() will crash. \"\"\"\n tail = lambda:tuple()\n lonts = len(onts)\n if lonts > 1:\n for i, ont in enumerate(onts):\n if ont.__name__ == 'parcBridge':\n onts = onts[:-1]\n def tail(o=ont):\n return o.setup(),\n if i != lonts - 1:\n raise ValueError('parcBridge should be built last to avoid weird errors!')\n # ont_setup must be run first on all ontologies\n # or we will get weird import errors\n if n_jobs == 1 or True:\n return tuple(ont.make(fail=fail, write=write) for ont in\n tuple(ont.setup() for ont in onts) + tail())\n\n # have to use a listcomp so that all calls to setup()\n # finish before parallel goes to work\n return Parallel(n_jobs=n_jobs)(delayed(o.make)(fail=fail, write=write)\n for o in\n #[ont_setup(ont) for ont in onts])\n (tuple(Async()(deferred(ont.setup)()\n for ont in onts)) + tail()\n if n_jobs > 1\n else [ont.setup()\n for ont in onts]))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef qname(uri, warning=False):\n if warning:\n print(tc.red('WARNING:'), tc.yellow(f'qname({uri}) is deprecated! please use OntId({uri}).curie'))\n return __helper_graph.qname(uri)", "response": "compute qname from defaults"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef cull_prefixes(graph, prefixes={k:v for k, v in uPREFIXES.items() if k != 'NIFTTL'},\n cleanup=lambda ps, graph: None, keep=False):\n \"\"\" Remove unused curie prefixes and normalize to a standard set. \"\"\"\n prefs = ['']\n if keep:\n prefixes.update({p:str(n) for p, n in graph.namespaces()})\n\n if '' not in prefixes:\n prefixes[''] = null_prefix # null prefix\n\n pi = {v:k for k, v in prefixes.items()}\n asdf = {} #{v:k for k, v in ps.items()}\n asdf.update(pi)\n # determine which prefixes we need\n for uri in set((e for t in graph for e in t)):\n if uri.endswith('.owl') or uri.endswith('.ttl') or uri.endswith('$$ID$$'):\n continue # don't prefix imports or templates\n for rn, rp in sorted(asdf.items(), key=lambda a: -len(a[0])): # make sure we get longest first\n lrn = len(rn)\n if type(uri) == rdflib.BNode:\n continue\n elif uri.startswith(rn) and '#' not in uri[lrn:] and '/' not in uri[lrn:]: # prevent prefixing when there is another sep\n prefs.append(rp)\n break\n\n ps = {p:prefixes[p] for p in prefs}\n\n cleanup(ps, graph)\n\n ng = makeGraph('', prefixes=ps)\n [ng.g.add(t) for t in graph]\n return ng", "response": "Remove unused curie prefixes and normalize to a standard set."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ndisplaying a list of triples in a nice way.", "response": "def displayTriples(triples, qname=qname):\n \"\"\" triples can also be an rdflib Graph instance \"\"\"\n [print(*(e[:5]\n if isinstance(e, rdflib.BNode) else\n qname(e)\n for e in t), '.')\n for t in sorted(triples)]"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nserialize self. g and write to self. filename", "response": "def write(self, cull=False):\n \"\"\" Serialize self.g and write to self.filename, set cull to true to remove unwanted prefixes \"\"\"\n if cull:\n cull_prefixes(self).write()\n else:\n ser = self.g.serialize(format='nifttl')\n with open(self.filename, 'wb') as f:\n f.write(ser)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nadd an annotation property to the resource.", "response": "def add_ap(self, id_, label=None, addPrefix=True):\n \"\"\" Add id_ as an owl:AnnotationProperty\"\"\"\n self.add_trip(id_, rdf.type, owl.AnnotationProperty)\n if label:\n self.add_trip(id_, rdfs.label, label)\n if addPrefix:\n prefix = ''.join([s.capitalize() for s in label.split()])\n namespace = self.expand(id_)\n self.add_namespace(prefix, namespace)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nadds an operation to the rdflib object.", "response": "def add_op(self, id_, label=None, subPropertyOf=None, inverse=None, transitive=False, addPrefix=True):\n \"\"\" Add id_ as an owl:ObjectProperty\"\"\"\n self.add_trip(id_, rdf.type, owl.ObjectProperty)\n if inverse:\n self.add_trip(id_, owl.inverseOf, inverse)\n if subPropertyOf:\n self.add_trip(id_, rdfs.subPropertyOf, subPropertyOf)\n if label:\n self.add_trip(id_, rdfs.label, label)\n if addPrefix:\n prefix = ''.join([s.capitalize() for s in label.split()])\n namespace = self.expand(id_)\n self.add_namespace(prefix, namespace)\n if transitive:\n self.add_trip(id_, rdf.type, owl.TransitiveProperty)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef add_hierarchy(self, parent, edge, child): # XXX DEPRECATED\n if type(parent) != rdflib.URIRef:\n parent = self.check_thing(parent)\n\n if type(edge) != rdflib.URIRef:\n edge = self.check_thing(edge)\n\n if type(child) != infixowl.Class:\n if type(child) != rdflib.URIRef:\n child = self.check_thing(child)\n child = infixowl.Class(child, graph=self.g)\n\n restriction = infixowl.Restriction(edge, graph=self.g, someValuesFrom=parent)\n child.subClassOf = [restriction] + [c for c in child.subClassOf]", "response": "Add a hierarchy of object properties to the graph."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nadd a restriction to the subject.", "response": "def add_restriction(self, subject, predicate, object_):\n \"\"\" Lift normal triples into restrictions using someValuesFrom. \"\"\"\n if type(object_) != rdflib.URIRef:\n object_ = self.check_thing(object_)\n\n if type(predicate) != rdflib.URIRef:\n predicate = self.check_thing(predicate)\n\n if type(subject) != infixowl.Class:\n if type(subject) != rdflib.URIRef:\n subject = self.check_thing(subject)\n subject = infixowl.Class(subject, graph=self.g)\n\n restriction = infixowl.Restriction(predicate, graph=self.g, someValuesFrom=object_)\n subject.subClassOf = [restriction] + [c for c in subject.subClassOf]"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nget equivelant classes where curie is in an intersection", "response": "def get_equiv_inter(self, curie):\n \"\"\" get equivelant classes where curie is in an intersection \"\"\"\n start = self.qname(self.expand(curie)) # in case something is misaligned\n qstring = \"\"\"\n SELECT DISTINCT ?match WHERE {\n ?match owl:equivalentClass/owl:intersectionOf/rdf:rest*/rdf:first %s .\n }\"\"\" % start\n return [_ for (_,) in self.g.query(qstring)]"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngives a uri return the qname if it exists otherwise return the uri.", "response": "def qname(self, uri, generate=False):\n \"\"\" Given a uri return the qname if it exists, otherwise return the uri. \"\"\"\n try:\n prefix, namespace, name = self.g.namespace_manager.compute_qname(uri, generate=generate)\n qname = ':'.join((prefix, name))\n return qname\n except (KeyError, ValueError) as e:\n return uri.toPython() if isinstance(uri, rdflib.URIRef) else uri"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\narchive the log and results files for the given history item to the given destination directory.", "response": "def archive_history_item(item, destination, no_color):\n \"\"\"\n Archive the log and results file for the given history item.\n\n Copy the files and update the results file in destination directory.\n \"\"\"\n log_src, description = split_history_item(item.strip())\n\n # Get relative path for log:\n # {provider}/{image}/{instance}/{timestamp}.log\n log_dest = os.path.sep.join(log_src.rsplit(os.path.sep, 4)[1:])\n\n # Get results src and destination based on log paths.\n results_src = log_src.rsplit('.', 1)[0] + '.results'\n results_dest = log_dest.rsplit('.', 1)[0] + '.results'\n\n destination_path = os.path.join(destination, log_dest)\n log_dir = os.path.dirname(destination_path)\n\n try:\n if not os.path.isdir(log_dir):\n os.makedirs(log_dir)\n\n # Copy results and log files to archive directory.\n shutil.copyfile(log_src, destination_path)\n shutil.copyfile(results_src, os.path.join(destination, results_dest))\n except Exception as error:\n echo_style(\n 'Unable to archive history item: %s' % error,\n no_color,\n fg='red'\n )\n sys.exit(1)\n else:\n # Only update the archive results log if no error occur.\n update_history_log(\n os.path.join(destination, '.history'),\n description=description,\n test_log=log_dest\n )"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nprint test results in nagios style format.", "response": "def echo_results(data, no_color, verbose=False):\n \"\"\"Print test results in nagios style format.\"\"\"\n try:\n summary = data['summary']\n except KeyError as error:\n click.secho(\n 'The results json is missing key: %s' % error,\n fg='red'\n )\n sys.exit(1)\n\n if 'failed' in summary or 'error' in summary:\n fg = 'red'\n status = 'FAILED'\n else:\n fg = 'green'\n status = 'PASSED'\n\n results = '{} tests={}|pass={}|skip={}|fail={}|error={}'.format(\n status,\n str(summary.get('num_tests', 0)),\n str(summary.get('passed', 0)),\n str(summary.get('skipped', 0)),\n str(summary.get('failed', 0)),\n str(summary.get('error', 0))\n )\n echo_style(results, no_color, fg=fg)\n\n if verbose:\n echo_verbose_results(data, no_color)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nprinting test results in nagios style format.", "response": "def echo_results_file(results_file, no_color, verbose=False):\n \"\"\"Print test results in nagios style format.\"\"\"\n try:\n data = collect_results(results_file)\n except ValueError:\n echo_style(\n 'The results file is not the proper json format.',\n no_color,\n fg='red'\n )\n sys.exit(1)\n except Exception as error:\n echo_style(\n 'Unable to process results file: %s' % error,\n no_color,\n fg='red'\n )\n sys.exit(1)\n\n echo_results(data, no_color, verbose)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nprints list of tests and result of each test.", "response": "def echo_verbose_results(data, no_color):\n \"\"\"Print list of tests and result of each test.\"\"\"\n click.echo()\n click.echo(\n '\\n'.join(\n '{}: {}'.format(key, val) for key, val in data['info'].items()\n )\n )\n click.echo()\n for test in data['tests']:\n if test['outcome'] == 'passed':\n fg = 'green'\n elif test['outcome'] == 'skipped':\n fg = 'yellow'\n else:\n fg = 'red'\n\n name = parse_test_name(test['name'])\n echo_style(\n '{} {}'.format(name, test['outcome'].upper()),\n no_color,\n fg=fg\n )"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_log_file_from_item(history):\n try:\n log_file, description = shlex.split(history)\n except ValueError:\n log_file = history.strip()\n\n return log_file", "response": "Return the log file based on the provided history item."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef results_history(history_log, no_color):\n try:\n with open(history_log, 'r') as f:\n lines = f.readlines()\n except Exception as error:\n echo_style(\n 'Unable to process results history log: %s' % error,\n no_color,\n fg='red'\n )\n sys.exit(1)\n\n index = len(lines)\n for item in lines:\n click.echo('{} {}'.format(index, item), nl=False)\n index -= 1", "response": "Display a list of ipa test results history."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nsplit the history item into log file and optional description.", "response": "def split_history_item(history):\n \"\"\"\n Return the log file and optional description for item.\n \"\"\"\n try:\n log_file, description = shlex.split(history)\n except ValueError:\n log_file = history.strip()\n description = None\n\n return log_file, description"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_working_dir(script__file__):\n start = Path(script__file__).resolve()\n _root = Path(start.root)\n working_dir = start\n while not list(working_dir.glob('.git')):\n if working_dir == _root:\n return\n\n working_dir = working_dir.parent\n\n return working_dir", "response": "hardcoded sets the equivalent working directory if not in git"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef sysidpath(ignore_options=False):\n # in the event we have to make our own\n # this should not be passed in a as a parameter\n # since we need these definitions to be more or less static\n failover = Path('/tmp/machine-id')\n\n if not ignore_options:\n options = (\n Path('/etc/machine-id'),\n failover, # always read to see if we somehow managed to persist this\n )\n for option in options:\n if (option.exists() and\n os.access(option, os.R_OK) and\n option.stat().st_size > 0):\n return option\n\n uuid = uuid4()\n with open(failover, 'wt') as f:\n f.write(uuid.hex)\n\n return failover", "response": "get a unique identifier for the machine running this function"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsplitting a list into size pieces.", "response": "def chunk_list(list_, size):\n \"\"\" Split a list list_ into sublists of length size.\n NOTE: len(chunks[-1]) <= size. \"\"\"\n ll = len(list_)\n if ll <= size:\n return [list_]\n elif size == 0:\n return list_ # or None ??\n elif size == 1:\n return [[l] for l in list_]\n else:\n chunks = []\n for start, stop in zip(range(0, ll, size), range(size, ll, size)):\n chunks.append(list_[start:stop])\n chunks.append(list_[stop:]) # snag unaligned chunks from last stop\n return chunks"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef NICKNAME(selfPARAMSDEFAULT_OUTPUT):\n {params_conditional}\n kwargs = {param_rest}\n kwargs = {dict_comp}\n param_rest = self._make_rest({required}, **kwargs)\n url = self._basePath + ('{path}').format(**kwargs)\n requests_params = {dict_comp2}\n output = self._get('{method}', url, requests_params, {output})\n return output if output else {empty_return_type}", "response": "This method returns the NICKNAME of the current user."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nrun this to generate the code", "response": "def gencode(self):\n \"\"\" Run this to generate the code \"\"\"\n ledict = requests.get(self.api_url).json()\n ledict = self.dotopdict(ledict)\n out = self.dodict(ledict)\n self._code = out"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nrewriting the 2. 0 json to match what we feed the code for 1. 2", "response": "def dotopdict(self, dict_):\n \"\"\" Rewrite the 2.0 json to match what we feed the code for 1.2 \"\"\"\n mlookup = {'get':'GET', 'post':'POST'}\n def rearrange(path, method_dict, method):\n oid = method_dict['operationId']\n self._paths[oid] = path\n method_dict['nickname'] = oid\n method_dict['method'] = mlookup[method]\n\n paths = dict_['paths']\n for path, path_dict in paths.items():\n if self.path_prefix and self.path_prefix not in path:\n continue\n path_dict['operations'] = []\n for method, method_dict in sorted(path_dict.items()):\n if method == 'operations':\n continue\n rearrange(path, method_dict, method)\n #print(self.operation(method_dict))\n path_dict['operations'].append(method_dict)\n path_dict['path'] = path\n\n def setp(v, lenp=len(self.path_prefix)):\n v['path'] = v['path'][lenp:]\n return v\n\n dict_['apis'] = []\n for tag_dict in dict_['tags']:\n path = '/' + tag_dict['name']\n d = {'path':path,\n 'description':tag_dict['description'],\n 'class_json':{\n 'docstring':tag_dict['description'],\n 'resourcePath':path,\n 'apis':[setp(v) for k, v in paths.items()\n if k.startswith(self.path_prefix + path)]},\n }\n dict_['apis'].append(d)\n\n # make sure this is run first so we don't get key errors\n self._swagger(dict_['swagger'])\n self._info(dict_['info'])\n self._definitions(dict_['definitions'])\n\n return dict_"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef local_imports(remote_base, local_base, ontologies, local_versions=tuple(), readonly=False, dobig=False, revert=False):\n done = []\n triples = set()\n imported_iri_vs_ontology_iri = {}\n p = owl.imports\n oi = b'owl:imports'\n oo = b'owl:Ontology'\n def inner(local_filepath, remote=False):\n if noneMembers(local_filepath, *bigleaves) or dobig:\n ext = os.path.splitext(local_filepath)[-1]\n if ext == '.ttl':\n infmt = 'turtle'\n else:\n print(ext, local_filepath)\n infmt = None\n if remote:\n resp = requests.get(local_filepath) # TODO nonblocking pull these out, fetch, run inner again until done\n raw = resp.text.encode()\n else:\n try:\n with open(local_filepath, 'rb') as f:\n raw = f.read()\n except FileNotFoundError as e:\n if local_filepath.startswith('file://'):\n print('local_imports has already been run, skipping', local_filepath)\n return\n #raise ValueError('local_imports has already been run') from e\n else:\n print(e) # TODO raise a warning if the file cannot be matched\n # seems like good practice to have any imported ontology under\n # version control so all imports are guaranteed to have good\n # provenance and not split the prior informaiton between the\n # scigraph config and the repository, the repository remains\n # the source of truth, load.yaml files can then pick a subset\n # of the properly tracked files to load as they see fit, but\n # not add to them (at least in pyontutils land)\n raw = b''\n if oo in raw: # we only care if there are imports or an ontology iri\n scratch = rdflib.Graph()\n if infmt == 'turtle':\n data, rest = raw.split(b'###', 1)\n elif infmt == None: # assume xml\n xml_tree = etree.parse(BytesIO(raw))\n xml_root = xml_tree.getroot()\n xml_ontology = xml_tree.xpath(\"/*[local-name()='RDF']/*[local-name()='Ontology']\")\n xml_root.clear()\n xml_root.append(xml_ontology[0])\n data = etree.tostring(xml_root)\n scratch.parse(data=data, format=infmt)\n for s in scratch.subjects(rdf.type, owl.Ontology):\n triples.add((s, owl.sameAs, rdflib.URIRef(local_filepath)))\n # somehow this breaks computing the chain\n #for p in (rdfs.comment, skos.definition, definition, dc.title, rdfs.label):\n #for o in scratch[s:p]:\n #triples.add((s, p, o))\n for s, o in sorted(scratch.subject_objects(p)):\n if revert:\n raise NotImplemented('TODO')\n nlfp = o.replace(remote_base, local_base)\n triples.add((s, p, o))\n if 'http://' in local_filepath or 'external' in local_filepath: # FIXME what to do about https used inconsistently :/\n if 'external' in local_filepath:\n imported_iri = rdflib.URIRef(local_filepath.replace(local_base, remote_base)) # inefficient\n else:\n imported_iri = rdflib.URIRef(local_filepath)\n if s != imported_iri:\n imported_iri_vs_ontology_iri[imported_iri] = s # kept for the record\n triples.add((imported_iri, p, s)) # bridge imported != ontology iri\n if local_base in nlfp and 'file://' not in o: # FIXME file:// should not be slipping through here...\n scratch.add((s, p, rdflib.URIRef('file://' + nlfp)))\n scratch.remove((s, p, o))\n if nlfp not in done:\n done.append(nlfp)\n if local_base in nlfp and 'external' not in nlfp: # skip externals TODO\n inner(nlfp)\n elif readonly: # read external imports\n if 'external' in nlfp:\n inner(nlfp)\n else:\n inner(nlfp, remote=True)\n if not readonly:\n ttl = scratch.serialize(format='nifttl')\n ndata, comment = ttl.split(b'###', 1)\n out = ndata + b'###' + rest\n with open(local_filepath, 'wb') as f:\n f.write(out)\n\n for start in ontologies:\n print('START', start)\n done.append(start)\n inner(start)\n return sorted(triples)", "response": "Read the import closure and use the local versions of the files."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef id_fix(value):\n if value.startswith('KSC_M'):\n pass\n else:\n value = value.replace(':','_')\n if value.startswith('ERO') or value.startswith('OBI') or value.startswith('GO') or value.startswith('UBERON') or value.startswith('IAO'):\n value = 'obo:' + value\n elif value.startswith('birnlex') or value.startswith('nlx'):\n value = 'NIFSTD:' + value\n elif value.startswith('MESH'):\n value = ':'.join(value.split('_'))\n else:\n value = ':' + value\n\n return OntId(value).URIRef", "response": "fix values for ttl"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nwrite the current object to a file.", "response": "def write(self, filename, type_='obo'): #FIXME this is bugged\n \"\"\" Write file, will not overwrite files with the same name\n outputs to obo by default but can also output to ttl if\n passed type_='ttl' when called.\n \"\"\"\n if os.path.exists(filename):\n name, ext = filename.rsplit('.',1)\n try:\n prefix, num = name.rsplit('_',1)\n n = int(num)\n n += 1\n filename = prefix + '_' + str(n) + '.' + ext\n except ValueError:\n filename = name + '_1.' + ext\n print('file exists, renaming to %s' % filename)\n self.write(filename, type_)\n\n else:\n with open(filename, 'wt', encoding='utf-8') as f:\n if type_ == 'obo':\n f.write(str(self)) # FIXME this is incredibly slow for big files :/\n elif type_ == 'ttl':\n f.write(self.__ttl__())\n else:\n raise TypeError('No exporter for file type %s!' % type_)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ntake a string and returns a list of strings.", "response": "def string_profiler(string, start_delimiter='(', end_delimiter=')', remove=True):\n '''\n long = '(life is is good) love world \"(blah) blah\" \"here I am\" once again \"yes\" blah '\n print(string_profiler(long))\n null = ''\n print(string_profiler(null))\n short = '(life love) yes(and much more)'\n print(string_profiler(short))\n short = 'yes \"life love\"'\n print(string_profiler(short))\n '''\n mark = 0\n string_list = []\n tmp_string = ''\n for i in range(len(string)):\n curr_index = i + mark\n if curr_index == len(string):\n break\n if string[curr_index] == start_delimiter:\n flag = True\n else:\n flag = False\n if flag:\n if tmp_string:\n string_list.extend(tmp_string.strip().split())\n tmp_string = ''\n quoted_string = ''\n for j in range(curr_index+1, len(string)):\n mark += 1\n if string[j] == end_delimiter:\n break\n quoted_string += string[j]\n if not remove:\n string_list.append(quoted_string)\n else:\n tmp_string += string[curr_index]\n if tmp_string:\n string_list.extend(tmp_string.strip().split())\n return string_list"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef make_predicate_object_combinator(function, p, o):\n def predicate_object_combinator(subject):\n return function(subject, p, o)\n return predicate_object_combinator", "response": "Create a predicate object combinator that can be used to defer production of a triple until the missing pieces are supplied and then\n is supplied."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef serialize(self, subject, *objects_or_combinators):\n ec_s = rdflib.BNode()\n if self.operator is not None:\n if subject is not None:\n yield subject, self.predicate, ec_s\n yield from oc(ec_s)\n yield from self._list.serialize(ec_s, self.operator, *objects_or_combinators)\n else:\n for thing in objects_or_combinators:\n if isinstance(thing, Combinator):\n object = rdflib.BNode()\n #anything = list(thing(object))\n #if anything:\n #[print(_) for _ in anything]\n hasType = False\n for t in thing(object):\n if t[1] == rdf.type:\n hasType = True\n yield t\n\n if not hasType:\n yield object, rdf.type, owl.Class\n else:\n object = thing\n\n yield subject, self.predicate, object", "response": "Yields RDF objects for the given subject and objects_or_combinators."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef update_with(self, **query):\n for k, v in self._filter_attrs(query).items():\n setattr(self, k, v)\n return self.save()", "response": "update with a query"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _filter_attrs(cls, attrs):\n if cls.__attr_whitelist__:\n whitelist = cls.__attr_accessible__ - cls.__attr_protected__\n return {k: v for k, v in attrs.items() if k in whitelist}\n else:\n blacklist = cls.__attr_protected__ - cls.__attr_accessible__\n return {k: v for k, v in attrs.items() if k not in blacklist}", "response": "Filter the attrs dictionary to only include the attributes that are not in the whitelist or in the blacklist."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nvalidate model data and save errors", "response": "def _validate(self):\n \"\"\"Validate model data and save errors\n \"\"\"\n errors = {}\n\n for name, validator in self._validators.items():\n value = getattr(self, name)\n\n try:\n validator(self, value)\n except ValidationError as e:\n errors[name] = str(e)\n\n self._validate_errors = errors"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nconvert a Penn Treebank tag to a simplified Wordnet tag", "response": "def penn_to_wn(tag):\n \"\"\" Convert between a Penn Treebank tag to a simplified Wordnet tag \"\"\"\n if tag.startswith('N'):\n return 'n'\n\n if tag.startswith('V'):\n return 'v'\n\n if tag.startswith('J'):\n return 'a'\n\n if tag.startswith('R'):\n return 'r'\n\n return None"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef sentence_similarity(sentence1, sentence2):\n # Tokenize and tag\n sentence1 = pos_tag(word_tokenize(sentence1))\n sentence2 = pos_tag(word_tokenize(sentence2))\n\n # Get the synsets for the tagged words\n synsets1 = [tagged_to_synset(*tagged_word) for tagged_word in sentence1]\n synsets2 = [tagged_to_synset(*tagged_word) for tagged_word in sentence2]\n\n # Filter out the Nones\n synsets1 = [ss for ss in synsets1 if ss]\n synsets2 = [ss for ss in synsets2 if ss]\n #print(synsets1)\n #print(synsets2)\n score, count = 0.0, 0.0\n\n # For each word in the first sentence\n for synset in synsets1:\n # Get the similarity value of the most similar word in the other sentence\n\n best_score=[synset.path_similarity(ss) for ss in synsets2 if synset.path_similarity(ss)]\n\n # Check that the similarity could have been computed\n if best_score:\n score += max(best_score)\n count += 1\n\n # Average the values\n if count > 0:\n score /= count\n else:\n score = 0\n\n return score", "response": "Compute the sentence similarity using Wordnet"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef save(self, foldername: str, path_to_folder: str=None) -> None:\n ''' Saves entities into multiple files within the same folder because of pickle-recursive\n errors that would happen if squeezed into one '''\n self.create_pickle((self.g.namespaces, ))\n self.df.to_pickle(output)", "response": "Saves the current state of the object into a pickle file."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef qname(self, uri: str) -> str:\n ''' Returns qname of uri in rdflib graph while also saving it '''\n try:\n prefix, namespace, name = self.g.compute_qname(uri)\n qname = prefix + ':' + name\n return qname\n except:\n try:\n print('prefix:', prefix)\n print('namespace:', namespace)\n print('name:', name)\n except:\n print('Could not print from compute_qname')\n exit('No qname for ' + uri)", "response": "Returns qname of uri in rdflib graph while also saving it"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef Graph2Pandas_converter(self):\n '''Updates self.g or self.path bc you could only choose 1'''\n\n if isinstance(self.path, str) or isinstance(self.path, p):\n self.path = str(self.path)\n filetype = p(self.path).suffix\n if filetype == '.pickle':\n self.g = pickle.load(open(self.path, 'rb'))\n if isinstance(self.g, rdflib.graph.Graph):\n return self.get_sparql_dataframe()\n else:\n print('WARNING:: function df() wont work unless an ontology source is loaded')\n return self.g\n elif filetype == '.ttl' or filetype == '.rdf':\n self.g = rdflib.Graph()\n self.g.parse(self.path, format='turtle')\n return self.get_sparql_dataframe()\n elif filetype == '.nt':\n self.g = rdflib.Graph()\n self.g.parse(self.path, format='nt')\n return self.get_sparql_dataframe()\n elif filetype == '.owl' or filetype == '.xrdf':\n self.g = rdflib.Graph()\n try:\n self.g.parse(self.path, format='xml')\n except:\n # some owl formats are more rdf than owl\n self.g.parse(self.path, format='turtle')\n return self.get_sparql_dataframe()\n else:\n exit('Format options: owl, ttl, df_pickle, rdflib.Graph()')\n try:\n return self.get_sparql_dataframe()\n self.path = None\n except:\n exit('Format options: owl, ttl, df_pickle, rdflib.Graph()')\n\n elif isinstance(self.g, rdflib.graph.Graph):\n self.path = None\n return self.get_sparql_dataframe()\n\n else:\n exit('Obj given is not str, pathlib obj, or an rdflib.Graph()')", "response": "Updates self. g or self. path bc you could only choose 1"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\niterating through the sparql table and condenses it into a Pandas DataFrame", "response": "def get_sparql_dataframe( self ):\n ''' Iterates through the sparql table and condenses it into a Pandas DataFrame '''\n self.result = self.g.query(self.query)\n cols = set() # set(['qname'])\n indx = set()\n data = {}\n curr_subj = None # place marker for first subj to be processed\n bindings = []\n\n for i, binding in enumerate(self.result.bindings):\n subj_binding = binding[rdflib.term.Variable('subj')]\n pred_binding = binding[rdflib.term.Variable('pred')]\n obj_binding = binding[rdflib.term.Variable('obj')]\n\n subj = subj_binding\n pred = pred_binding # self.qname(pred_binding) if self.predicate_as_qname else pred_binding\n obj = obj_binding\n\n # stops at BNodes; could be exanded here\n if isinstance(subj, BNode):\n continue\n elif isinstance(pred, BNode):\n continue\n elif isinstance(obj, BNode) and obj:\n continue\n # else:\n # subj = str(subj)\n # pred = str(pred)\n # obj = str(obj)\n\n cols.add(pred)\n indx.add(subj)\n bindings.append(binding)\n\n bindings = sorted(bindings, key=lambda k: k[rdflib.term.Variable('subj')])\n df = pd.DataFrame(columns=cols, index=indx)\n\n for i, binding in enumerate(bindings):\n\n subj_binding = binding[rdflib.term.Variable('subj')]\n pred_binding = binding[rdflib.term.Variable('pred')]\n obj_binding = binding[rdflib.term.Variable('obj')]\n\n subj = subj_binding\n pred = pred_binding # self.qname(pred_binding) if self.qname_predicates else pred_binding\n obj = obj_binding\n\n # stops at BNodes; could be exanded here\n if isinstance(subj, BNode):\n continue\n elif isinstance(pred, BNode):\n continue\n elif isinstance(obj, BNode) and obj:\n continue\n # elif self.value_type:\n # subj = str(subj)\n # pred = str(pred)\n # obj = str(obj)\n\n if curr_subj == None:\n curr_subj = subj\n if not data.get(subj): # Prepare defaultdict home if it doesn't exist\n data[subj] = defaultdict(list)\n data[subj][pred].append(obj)\n elif curr_subj != subj:\n curr_subj = subj\n for data_subj, data_pred_objs in data.items():\n for data_pred, data_objs in data_pred_objs.items():\n if len(data_objs) == 1: # clean lists of just 1 value\n data_pred_objs[data_pred] = data_objs[0]\n df.loc[data_subj] = pd.Series(data_pred_objs)\n data = {}\n if not data.get(subj): # Prepare defaultdict home if it doesn't exist\n data[subj] = defaultdict(list)\n data[subj][pred].append(obj)\n else:\n if not data.get(subj): # Prepare defaultdict home if it doesn't exist\n data[subj] = defaultdict(list)\n data[subj][pred].append(obj)\n\n for data_subj, data_pred_objs in data.items():\n for data_pred, data_objs in data_pred_objs.items():\n if len(data_objs) == 1: # clean lists of just 1 value\n data_pred_objs[data_pred] = data_objs[0]\n df.loc[data_subj] = pd.Series(data_pred_objs)\n\n df = df.where((pd.notnull(df)), None) # default Null is fricken Float NaN\n\n return df"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef df(self, qname_predicates:bool=False, keep_variable_type:bool=True) -> pd.DataFrame:\n ''' Multi funcitonal DataFrame with settings '''\n local_df = self.df.copy()\n if qname_predicates:\n for col in self.columns:\n local_df.rename({col: self.g.qname(col)})\n if not keep_variable_type:\n pass\n # convert all to strings, watch out for lists\n return local_df", "response": "Multi funcitonal DataFrame with settings"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef add_namespaces(self, namespaces: Dict[str, str]) -> None:\n for prefix, uri in namespaces.items():\n self.add_namespace(prefix=prefix, uri=uri)", "response": "Adds a prefix to uri mapping in bulk\n "} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nget qualified name of uri in rdflib graph while also saving it", "response": "def qname(self, iri: str) -> str:\n \"\"\" Get qualified name of uri in rdflib graph while also saving it\n\n Args: iri: The iri that you want to replace the uri with a known prefix with\n\n Returns: qualified name of the iri to be used as the new predicate\n \"\"\"\n prefix, namespace, name = self.g.compute_qname(uri)\n qname = prefix + ':' + name\n self.rqname[qname] = iri\n return qname"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nadd a prefix to uri mapping", "response": "def add_namespace(self, prefix: str, uri: str) -> Namespace:\n \"\"\" Adds a prefix to uri mapping (namespace)\n\n Adds a namespace to replace any uris in iris with shortened prefixes\n in order to make the file more readable. Not techniqually necessary.\n\n Args:\n prefix: prefix that will substitute the uri in the iri\n uri: the uri in the iri to be substituted by the prefix\n\n Returns:\n A namespace of the uri\n\n Example:\n add_namespace(\n prefix = \"rdfs\",\n uri = 'http://www.w3.org/2000/01/rdf-schema#',\n )\n makes\n \"http://www.w3.org/2000/01/rdf-schema#label 'neuron'@en ;\",\n become\n \"rdfs:label 'neuron'@en ;\"\n \"\"\"\n ns = Namespace(uri)\n if not self.namespaces.get(prefix):\n self.namespaces[prefix] = ns\n self.g.bind(prefix, uri)\n return ns"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef find_prefix(self, iri: Union[URIRef, Literal, str]) -> Union[None, str]:\n iri = str(iri)\n max_iri_len = 0\n max_prefix = None\n for prefix, uri in common_namespaces.items():\n if uri in iri and max_iri_len < len(uri): # if matched uri is larger; replace as king\n max_prefix = prefix\n max_iri_len = len(uri)\n return max_prefix", "response": "Finds the prefix of a known uri in the localCOOKIE store."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef add_annotation(\n self,\n subj: URIRef,\n pred: URIRef,\n obj: Union[Literal, URIRef],\n a_p: URIRef ,\n a_o: Union[Literal, URIRef],\n ) -> BNode:\n \"\"\" Adds annotation to rdflib graph.\n\n The annotation axiom will filled in if this is a new annotation for the triple.\n\n Args:\n subj: Entity subject to be annotated\n pref: Entities Predicate Anchor to be annotated\n obj: Entities Object Anchor to be annotated\n a_p: Annotation predicate\n a_o: Annotation object\n\n Returns:\n A BNode which is an address to the location in the RDF graph that is storing the\n annotation information.\n \"\"\"\n bnode: BNode = self.triple2annotation_bnode.get( (subj, pred, obj) )\n if not bnode:\n a_s: BNode = BNode()\n self.triple2annotation_bnode[(subj, pred, obj)]: BNode = a_s\n self.g.add((a_s, RDF.type, OWL.Axiom))\n self.g.add((a_s, OWL.annotatedSource, self.process_subj_or_pred(subj)))\n self.g.add((a_s, OWL.annotatedProperty,self.process_subj_or_pred(pred)))\n self.g.add((a_s, OWL.annotatedTarget, self.process_obj(obj)))\n else:\n a_s: BNode = bnode\n self.g.add((a_s, self.process_subj_or_pred(a_p), self.process_obj(a_o)))\n return bnode", "response": "Adds an annotation to the graph."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_triple(\n self,\n subj: Union[URIRef, str],\n pred: Union[URIRef, str],\n obj: Union[URIRef, Literal, str]\n ) -> None:\n \"\"\" Adds triple to rdflib Graph\n\n Triple can be of any subject, predicate, and object of the entity without a need for order.\n\n Args:\n subj: Entity subject\n pred: Entity predicate\n obj: Entity object\n\n Example:\n In [1]: add_triple(\n ...: 'http://uri.interlex.org/base/ilx_0101431',\n ...: RDF.type,\n ...: 'http://www.w3.org/2002/07/owl#Class')\n ...: )\n \"\"\"\n if obj in [None, \"\", \" \"]: return # Empty objects are bad practice\n _subj = self.process_subj_or_pred(subj)\n _pred = self.process_subj_or_pred(pred)\n _obj = self.process_obj(obj)\n self.g.add( (_subj, _pred, _obj) )", "response": "Adds triple to Graph"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nadding a namespace to the graph if it does not already exist", "response": "def process_prefix(self, prefix: str) -> Union[Namespace, None]:\n \"\"\" Add namespace to graph if it has a local match\n\n This allows qnames to be used without adding their respected namespaces if they are in\n the common_namespaces local dict. This is is to save a butt-ton of time trying to see what\n the ontology has as far as uris go.\n\n Args: prefix: prefix of the uri in the rdflib namespace to be checked if it exists in\n the local dict of common_namespaces.\n\n Returns: Namespace of uri if add or already exists; else None\n \"\"\"\n if self.namespaces.get(prefix):\n return self.namespaces[prefix]\n iri: str = common_namespaces.get(prefix)\n if iri:\n return self.add_namespace(prefix, iri)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nadding viable uri from iri or expands viable qname to iri to be triple ready", "response": "def process_subj_or_pred(self, component: Union[URIRef, str]) -> URIRef:\n \"\"\" Adds viable uri from iri or expands viable qname to iri to be triple ready\n\n Need to have a viable qualified name (qname) in order to use a qname. You can make it\n viable by either add the namespace beforehand with add_namespace(s) or if its already\n in the local common_namespaces preloaded.\n\n Args:\n component: entity subject or predicate to be expanded or have its uri saved.\n\n Returns:\n rdflib URIRef ready subject or predicate to be put into a triple.\n\n Raises:\n SystemExit: When expecting a qname to be expanded, but is not valid or if\n component is not a qualified name or a iri.\n \"\"\"\n if 'http' in component:\n prefix = self.find_prefix(component) # Find uri in iri based on common_namespaces\n if prefix: self.process_prefix(prefix) # if match, will add to Graph namespaces\n return URIRef(component)\n elif ':' in component:\n presumed_prefix, info = component.split(':', 1)\n namespace: Union[Namespace, None] = self.process_prefix(presumed_prefix)\n if not namespace: exit(component + ': qname namespace does\\'t exist yet.')\n return namespace[info]\n exit(component + ': is not a valid subject or predicate')"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef process_obj(self, obj: Union[URIRef, Literal, str]) -> Union[URIRef, Literal]:\n if isinstance(obj, dict) or isinstance(obj, list):\n exit(str(obj) + ': should be str or intended to be a URIRef or Literal.')\n\n if isinstance(obj, Literal) or isinstance(obj, URIRef):\n prefix = self.find_prefix(obj)\n if prefix: self.process_prefix(prefix)\n return obj\n\n if len(obj) > 8:\n if 'http' == obj[:4] and '://' in obj and ' ' not in obj:\n prefix = self.find_prefix(obj)\n if prefix: self.process_prefix(prefix)\n return URIRef(obj)\n\n if ':' in str(obj):\n presumed_prefix, info = obj.split(':', 1)\n namespace: Union[Namespace, None] = self.process_prefix(presumed_prefix)\n if namespace: return namespace[info]\n\n return Literal(obj)", "response": "Takes an entity object and returns the appropriate node type."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef serialize(self, **kwargs) -> str:\n kwargs = {key: str(value) for key, value in kwargs.items()}\n return self.g.serialize(**kwargs)", "response": "Serialize the graph into a string."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef remove_triple(\n self,\n subj: URIRef,\n pred: URIRef,\n obj: Union[URIRef, Literal]\n ) -> None:\n \"\"\" Removes triple from rdflib Graph\n\n You must input the triple in its URIRef or Literal form for each node exactly the way it\n was inputed or it will not delete the triple.\n\n Args:\n subj: Entity subject to be removed it its the only node with this subject; else this is\n just going to delete a desciption I.E. predicate_object of this entity.\n pred: Entity predicate to be removed\n obj: Entity object to be removed\n \"\"\"\n self.g.remove( (subj, pred, obj) )", "response": "Removes a triple from the Graph."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef print_graph(self, format: str = 'turtle') -> str:\n print(self.g.serialize(format=format).decode('utf-8'))", "response": "prints serialized RDFlib Graph"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsearches for a list of term ids", "response": "def identifierSearches(self,\n ids=None,\n LIMIT=25,\n _print=True,\n crawl=False):\n \"\"\"parameters( data = \"list of term_ids\" )\"\"\"\n url_base = self.base_url + '/api/1/term/view/{id}' + '?key=' + self.api_key\n urls = [url_base.format(id=str(_id)) for _id in ids]\n return self.get(\n urls=urls,\n LIMIT=LIMIT,\n action='Searching For Terms',\n crawl=crawl,\n _print=_print)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef ilxSearches(self,\n ilx_ids=None,\n LIMIT=25,\n _print=True,\n crawl=False):\n \"\"\"parameters( data = \"list of ilx_ids\" )\"\"\"\n url_base = self.base_url + \"/api/1/ilx/search/identifier/{identifier}?key={APIKEY}\"\n urls = [url_base.format(identifier=ilx_id.replace('ILX:', 'ilx_'), APIKEY=self.api_key) for ilx_id in ilx_ids]\n return self.get(\n urls=urls,\n LIMIT=LIMIT,\n action='Searching For Terms',\n crawl=crawl,\n _print=_print)", "response": "This method searches for ILX."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef updateTerms(self, data:list, LIMIT:int=20, _print:bool=True, crawl:bool=False,) -> list:\n\n url_base = self.base_url + '/api/1/term/edit/{id}'\n merged_data = []\n\n # PHP on the server is is LOADED with bugs. Best to just duplicate entity data and change\n # what you need in it before re-upserting the data.\n old_data = self.identifierSearches(\n [d['id'] for d in data], # just need the ids\n LIMIT = LIMIT,\n _print = _print,\n crawl = crawl,\n )\n\n for d in data: # d for dictionary\n url = url_base.format(id=str(d['id']))\n\n # Reason this exists is to avoid contradictions in case you are using a local reference\n if d['ilx'] != old_data[int(d['id'])]['ilx']:\n print(d['ilx'], old_data[int(d['id'])]['ilx'])\n exit('You might be using beta insead of production!')\n\n merged = scicrunch_client_helper.merge(new=d, old=old_data[int(d['id'])])\n merged = scicrunch_client_helper.superclasses_bug_fix(merged) # BUG: superclass output diff than input needed\n merged_data.append((url, merged))\n\n resp = self.post(\n merged_data,\n LIMIT = LIMIT,\n action = 'Updating Terms', # forced input from each function\n _print = _print,\n crawl = crawl,\n )\n return resp", "response": "Updates existing entities with new ones."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef addTerms(self, data, LIMIT=25, _print=True, crawl=False):\n needed = set([\n 'label',\n 'type',\n ])\n\n url_base = self.base_url + '/api/1/ilx/add'\n terms = []\n for d in data:\n if (set(list(d)) & needed) != needed:\n exit('You need keys: '+ str(needed - set(list(d))))\n\n if not d.get('label') or not d.get('type'): # php wont catch empty type!\n exit('=== Data is missing label or type! ===')\n\n d['term'] = d.pop('label') # ilx only accepts term, will need to replaced back\n #d['batch-elastic'] = 'True' # term/add and edit should be ready now\n terms.append((url_base, d))\n\n primer_responses = self.post(\n terms,\n action='Priming Terms',\n LIMIT=LIMIT,\n _print=_print,\n crawl=crawl)\n\n ilx = {}\n for primer_response in primer_responses:\n primer_response['term'] = primer_response['term'].replace(''', \"'\")\n primer_response['term'] = primer_response['term'].replace('"', '\"')\n primer_response['label'] = primer_response.pop('term')\n ilx[primer_response['label'].lower()] = primer_response\n\n url_base = self.base_url + '/api/1/term/add'\n terms = []\n for d in data:\n d['label'] = d.pop('term')\n d = scicrunch_client_helper.superclasses_bug_fix(d)\n if not ilx.get(d['label'].lower()): # ilx can be incomplete if errored term\n continue\n try:\n d.update({'ilx': ilx[d['label'].lower()]['ilx']})\n except:\n d.update({'ilx': ilx[d['label'].lower()]['fragment']})\n terms.append((url_base, d))\n\n return self.post(\n terms,\n action='Adding Terms',\n LIMIT=LIMIT,\n _print=_print,\n crawl=crawl)", "response": "Adds terms to the internal state of the object."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef getAnnotations_via_tid(self,\n tids,\n LIMIT=25,\n _print=True,\n crawl=False):\n \"\"\"\n tids = list of term ids that possess the annoations\n \"\"\"\n url_base = self.base_url + \\\n '/api/1/term/get-annotations/{tid}?key=' + self.api_key\n urls = [url_base.format(tid=str(tid)) for tid in tids]\n return self.get(urls,\n LIMIT=LIMIT,\n _print=_print,\n crawl=crawl)", "response": "Get the list of annotations for a set of term ids"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef getAnnotations_via_id(self,\n annotation_ids,\n LIMIT=25,\n _print=True,\n crawl=False):\n \"\"\"tids = list of strings or ints that are the ids of the annotations themselves\"\"\"\n url_base = self.base_url + \\\n '/api/1/term/get-annotation/{id}?key=' + self.api_key\n urls = [\n url_base.format(id=str(annotation_id))\n for annotation_id in annotation_ids\n ]\n return self.get(urls, LIMIT=LIMIT, _print=_print, crawl=crawl)", "response": "Get the list of annotations via the ids of the annotations."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef updateAnnotations(self,\n data,\n LIMIT=25,\n _print=True,\n crawl=False,):\n \"\"\"data = [{'id', 'tid', 'annotation_tid', 'value', 'comment', 'upvote', 'downvote',\n 'curator_status', 'withdrawn', 'term_version', 'annotation_term_version', 'orig_uid',\n 'orig_time'}]\n \"\"\"\n url_base = self.base_url + \\\n '/api/1/term/edit-annotation/{id}' # id of annotation not term id\n annotations = self.getAnnotations_via_id([d['id'] for d in data],\n LIMIT=LIMIT,\n _print=_print,\n crawl=crawl)\n\n annotations_to_update = []\n for d in data:\n annotation = annotations[int(d['id'])]\n annotation.update({**d})\n url = url_base.format(id=annotation['id'])\n annotations_to_update.append((url, annotation))\n self.post(annotations_to_update,\n LIMIT=LIMIT,\n action='Updating Annotations',\n _print=_print,\n crawl=crawl)", "response": "Update the annotations of a specific term."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ndeleting the specified annotations.", "response": "def deleteAnnotations(self,\n annotation_ids,\n LIMIT=25,\n _print=True,\n crawl=False,):\n \"\"\"data = list of ids\"\"\"\n url_base = self.base_url + \\\n '/api/1/term/edit-annotation/{annotation_id}' # id of annotation not term id; thx past troy!\n annotations = self.getAnnotations_via_id(annotation_ids,\n LIMIT=LIMIT,\n _print=_print,\n crawl=crawl)\n annotations_to_delete = []\n for annotation_id in annotation_ids:\n annotation = annotations[int(annotation_id)]\n params = {\n 'value': ' ', # for delete\n 'annotation_tid': ' ', # for delete\n 'tid': ' ', # for delete\n 'term_version': '1',\n 'annotation_term_version': '1',\n }\n url = url_base.format(annotation_id=annotation_id)\n annotation.update({**params})\n annotations_to_delete.append((url, annotation))\n return self.post(annotations_to_delete,\n LIMIT=LIMIT,\n _print=_print,\n crawl=crawl)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nadds relationships to the related terms.", "response": "def addRelationships(\n self,\n data: list,\n LIMIT: int = 20,\n _print: bool = True,\n crawl: bool = False,\n ) -> list:\n \"\"\"\n data = [{\n \"term1_id\", \"term2_id\", \"relationship_tid\",\n \"term1_version\", \"term2_version\",\n \"relationship_term_version\",}]\n \"\"\"\n url_base = self.base_url + '/api/1/term/add-relationship'\n relationships = []\n for relationship in data:\n relationship.update({\n 'term1_version': relationship['term1_version'],\n 'term2_version': relationship['term2_version'],\n 'relationship_term_version': relationship['relationship_term_version']\n })\n relationships.append((url_base, relationship))\n return self.post(\n relationships,\n LIMIT = LIMIT,\n action = 'Adding Relationships',\n _print = _print,\n crawl = crawl,\n )"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ndeprecates an entity in interlex.", "response": "def deprecate_entity(\n self,\n ilx_id: str,\n note = None,\n ) -> None:\n \"\"\" Tagged term in interlex to warn this term is no longer used\n\n There isn't an proper way to delete a term and so we have to mark it so I can\n extrapolate that in mysql/ttl loads.\n\n Args:\n term_id: id of the term of which to be deprecated\n term_version: version of the term of which to be deprecated\n\n Example: deprecateTerm('ilx_0101431', '6')\n \"\"\"\n\n term_id, term_version = [(d['id'], d['version'])\n for d in self.ilxSearches([ilx_id], crawl=True, _print=False).values()][0]\n\n annotations = [{\n 'tid': term_id,\n 'annotation_tid': '306375', # id for annotation \"deprecated\"\n 'value': 'True',\n 'term_version': term_version,\n 'annotation_term_version': '1', # term version for annotation \"deprecated\"\n }]\n if note:\n editor_note = {\n 'tid': term_id,\n 'annotation_tid': '306378', # id for annotation \"editorNote\"\n 'value': note,\n 'term_version': term_version,\n 'annotation_term_version': '1', # term version for annotation \"deprecated\"\n }\n annotations.append(editor_note)\n self.addAnnotations(annotations, crawl=True, _print=False)\n print(annotations)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef force_add_term(self, entity: dict):\n needed = set([\n 'label',\n 'type',\n ])\n url_ilx_add = self.base_url + '/api/1/ilx/add'\n url_term_add = self.base_url + '/api/1/term/add'\n url_term_update = self.base_url + '/api/1/term/edit/{id}'\n\n if (set(list(entity)) & needed) != needed:\n exit('You need keys: '+ str(needed - set(list(d))))\n\n # to ensure uniqueness\n random_string = ''.join(random.choices(string.ascii_uppercase + string.digits, k=25))\n real_label = entity['label']\n entity['label'] = entity['label'] + '_' + random_string\n\n entity['term'] = entity.pop('label') # ilx only accepts term, will need to replaced back\n primer_response = self.post([(url_ilx_add, entity.copy())], _print=False, crawl=True)[0]\n entity['label'] = entity.pop('term')\n entity['ilx'] = primer_response['fragment'] if primer_response.get('fragment') else primer_response['ilx']\n entity = scicrunch_client_helper.superclasses_bug_fix(entity)\n\n response = self.post([(url_term_add, entity.copy())], _print=False, crawl=True)[0]\n\n old_data = self.identifierSearches(\n [response['id']], # just need the ids\n _print = False,\n crawl = True,\n )[response['id']]\n\n old_data['label'] = real_label\n entity = old_data.copy()\n url_term_update = url_term_update.format(id=entity['id'])\n return self.post([(url_term_update, entity)], _print=False, crawl=True)", "response": "Force an entity to add a term to InterLex"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create_df_file_with_query(self, query, output):\n chunk_size = 100000\n offset = 0\n data = defaultdict(lambda : defaultdict(list))\n\n with open(output, 'wb') as outfile:\n query = query.replace(';', '')\n query += \"\"\" LIMIT {chunk_size} OFFSET {offset};\"\"\"\n while True:\n print(offset)\n query = query.format(\n chunk_size=chunk_size,\n offset=offset\n )\n df = pd.read_sql(query, self.engine)\n pickle.dump(df, outfile)\n offset += chunk_size\n if len(df) < chunk_size:\n break\n outfile.close()", "response": "Create a dataframe with query and save it to file."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef diff(s1, s2):\n ''' --word-diff=porcelain clone'''\n delta = difflib.Differ().compare(s1.split(), s2.split())\n difflist = []\n fullline = ''\n for line in delta:\n if line[0] == '?':\n continue\n elif line[0] == ' ':\n fullline += line.strip() + ' '\n else:\n if fullline:\n difflist.append(fullline[:-1])\n fullline = ''\n difflist.append(line)\n if fullline:\n difflist.append(fullline[:-1])\n return [l[:] for l in '\\n'.join(difflist).splitlines() if l]", "response": "Returns a list of the differences between two words."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef diffcolor(s1, s2):\n ''' --word-diff=color clone '''\n string = ''\n for line in diff(s1, s2):\n if line[0] == '-':\n string += ' ' + TermColors.red(line[2:])\n elif line[0] == '+':\n string += ' ' + TermColors.green(line[2:])\n else:\n string += ' ' + line\n return string[1:]", "response": "Colorize two words in a tree."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates basic html based on the diff of two strings", "response": "def create_html(s1, s2, output='test.html'):\n ''' creates basic html based on the diff of 2 strings '''\n html = difflib.HtmlDiff().make_file(s1.split(), s2.split())\n with open(output, 'w') as f:\n f.write(html)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef traverse_data(obj, key_target):\n ''' will traverse nested list and dicts until key_target equals the current dict key '''\n if isinstance(obj, str) and '.json' in str(obj):\n obj = json.load(open(obj, 'r'))\n if isinstance(obj, list):\n queue = obj.copy()\n elif isinstance(obj, dict):\n queue = [obj.copy()]\n else:\n sys.exit('obj needs to be a list or dict')\n count = 0\n ''' BFS '''\n while not queue or count != 1000:\n count += 1\n curr_obj = queue.pop()\n if isinstance(curr_obj, dict):\n for key, value in curr_obj.items():\n if key == key_target:\n return curr_obj\n else:\n queue.append(curr_obj[key])\n elif isinstance(curr_obj, list):\n for co in curr_obj:\n queue.append(co)\n if count == 1000:\n sys.exit('traverse_data needs to be updated...')\n return False", "response": "traverse nested list and dicts until key_target equals the current dict key"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef json_diff(json1, json2, key_target, get_just_diff=True, porcelain=False):\n ''' creates a (keyname + diff) key within the json of the same layer which key_target resides.\n Ex: json1={'definition':'data of key_target'}, json2={'definition':'data of key_target'}\n key_target = 'definition'\n\n Usage:\n json_diff (\n json_data1, json_data1 can be both [{..}] and {[..]} or json file path\n json_data2, json_data2 can be both [{..}] and {[..]} or json file path\n key_target, of a key within a dict that holds the string data for comparison; EX: 'definition'\n get_just_diff=True, default=True; will return just the color diff of the 2 strings\n porcelain=False default=False; porcelain clone as output only as optional\n )\n '''\n json1 = json_secretary(json1)\n json2 = json_secretary(json2)\n\n obj1 = traverse_data(json1, key_target)\n obj2 = traverse_data(json2, key_target)\n\n output = diffcolor(obj1[key_target], obj2[key_target])\n\n if porcelain:\n return diff(obj1[key_target], obj2[key_target])\n\n if get_just_diff:\n return output\n\n obj1[key_target + '_diff'] = output\n obj2[key_target + '_diff'] = output\n return json1, json2, output", "response": "returns a json object with the diff color of the two objects"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncheck if there is enough memory for vms_max_kb", "response": "def memoryCheck(vms_max_kb):\n \"\"\" Lookup vms_max using getCurrentVMSKb \"\"\"\n safety_factor = 1.2\n vms_max = vms_max_kb\n vms_gigs = vms_max / 1024 ** 2\n buffer = safety_factor * vms_max\n buffer_gigs = buffer / 1024 ** 2\n vm = psutil.virtual_memory()\n free_gigs = vm.available / 1024 ** 2\n if vm.available < buffer:\n raise MemoryError('Running this requires quite a bit of memory ~ '\n f'{vms_gigs:.2f}, you have {free_gigs:.2f} of the '\n f'{buffer_gigs:.2f} needed')"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nquerying all sequence rows", "response": "def _sequence_query(self):\n \"\"\"\n query all sequence rows\n \"\"\"\n klass = self.__class__\n query = klass.select().where(klass.sequence.is_null(False))\n seq_scope_field_names =\\\n (self.__seq_scope_field_name__ or '').split(',')\n for name in seq_scope_field_names:\n seq_scope_field = getattr(klass, name, None)\n if seq_scope_field:\n seq_scope_field_value = getattr(self, name)\n query = query.where(seq_scope_field == seq_scope_field_value)\n return query"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nupdating the values of a single spreadsheet.", "response": "def update_sheet_values(spreadsheet_name, sheet_name, values, spreadsheet_service=None):\n SPREADSHEET_ID = devconfig.secrets(spreadsheet_name)\n if spreadsheet_service is None:\n service = get_oauth_service(readonly=False)\n ss = service.spreadsheets()\n else:\n ss = spreadsheet_service\n \"\"\"\n requests = [\n {'updateCells': {\n 'start': {'sheetId': TODO,\n 'rowIndex': 0,\n 'columnIndex': 0}\n 'rows': {'values'}\n }\n }]\n response = ss.batchUpdate(\n spreadsheetId=SPREADSHEET_ID, range=sheet_name,\n body=body).execute()\n\n \"\"\"\n body = {'values': values}\n\n response = ss.values().update(\n spreadsheetId=SPREADSHEET_ID, range=sheet_name,\n valueInputOption='USER_ENTERED', body=body).execute()\n\n return response"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nfetching the values from the remote data store", "response": "def fetch(self, fetch_notes=None):\n \"\"\" update remote values (called automatically at __init__) \"\"\"\n if fetch_notes is None:\n fetch_notes = self.fetch_notes\n values, notes_index = get_sheet_values(self.name, self.sheet_name,\n spreadsheet_service=self._spreadsheet_service,\n get_notes=fetch_notes)\n self.raw_values = values\n self.values = [list(r) for r in zip(*itertools.zip_longest(*self.raw_values, fillvalue=''))]\n self.byCol = byCol(self.values, to_index=self.index_columns)\n self.notes_index = notes_index"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef add_types(graph, phenotypes): # TODO missing expression phenotypes! also basket type somehow :(\n\n collect = defaultdict(set)\n def recurse(id_, start, level=0):\n #print(level)\n for t in graph.g.triples((None, None, id_)):\n if level == 0:\n if t[1] != rdflib.term.URIRef('http://www.w3.org/2002/07/owl#someValuesFrom'):\n continue\n if type_check(t, (rdflib.term.URIRef, rdflib.term.URIRef, rdflib.term.BNode)):\n #print(start, t[0])\n collect[start].add(t[0])\n return # we're done here, otherwise we hit instantiated subclasses\n if level > 1:\n if t[1] == rdflib.URIRef('http://www.w3.org/1999/02/22-rdf-syntax-ns#first') or \\\n t[1] == rdflib.URIRef('http://www.w3.org/1999/02/22-rdf-syntax-ns#rest'):\n continue\n\n recurse(t[0], start, level + 1)\n\n\n for phenotype in phenotypes:\n recurse(phenotype, phenotype)\n\n return collect", "response": "Add types to the set of classes that are not in the hierarchy."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _rest_make_phenotypes():\n #phenotype sources\n neuroner = Path(devconfig.git_local_base,\n 'neuroNER/resources/bluima/neuroner/hbp_morphology_ontology.obo').as_posix()\n neuroner1 = Path(devconfig.git_local_base,\n 'neuroNER/resources/bluima/neuroner/hbp_electrophysiology_ontology.obo').as_posix()\n neuroner2 = Path(devconfig.git_local_base,\n 'neuroNER/resources/bluima/neuroner/hbp_electrophysiology-triggers_ontology.obo').as_posix()\n nif_qual = Path(devconfig.ontology_local_repo,\n 'ttl/NIF-Quality.ttl').as_posix()\n\n mo = OboFile(os.path.expanduser(neuroner))\n mo1 = OboFile(os.path.expanduser(neuroner1))\n mo2 = OboFile(os.path.expanduser(neuroner2))\n mo_ttl = mo.__ttl__() + mo1.__ttl__() + mo2.__ttl__()\n\n mo_ttl = \"\"\"\\\n @prefix : .\n @prefix nsu: .\n @prefix rdfs: .\n @prefix owl: .\n @prefix rdf: .\n \"\"\" + mo_ttl\n\n #sio = io.StringIO()\n #sio.write(mo_ttl)\n\n ng = rdflib.Graph()\n ng.parse(data=mo_ttl, format='turtle')\n ng.parse(os.path.expanduser(nif_qual), format='turtle')\n #ng.namespace_manager.bind('default1', None, override=False, replace=True)\n ng.remove((None, rdflib.OWL.imports, None))\n\n bad_match = {\n 'http://ontology.neuinfo.org/NIF/BiomaterialEntities/NIF-Quality.owl#nlx_qual_20090505',\n 'http://ontology.neuinfo.org/NIF/BiomaterialEntities/NIF-Quality.owl#sao1693353776',\n 'http://ontology.neuinfo.org/NIF/BiomaterialEntities/NIF-Quality.owl#sao1288413465',\n 'http://ontology.neuinfo.org/NIF/BiomaterialEntities/NIF-Quality.owl#sao4459136323',\n 'http://ontology.neuinfo.org/NIF/BiomaterialEntities/NIF-Quality.owl#nlx_qual_20090507',\n }\n\n exact = []\n similar = []\n quals = []\n s2 = {}\n\n for subject, label in sorted(ng.subject_objects(rdflib.RDFS.label)):\n\n syns = set([a for a in ng.objects(subject, rdflib.URIRef('http://www.FIXME.org/nsupper#synonym'))])\n syns.update(set([a for a in ng.objects(subject, rdflib.URIRef('http://ontology.neuinfo.org/NIF/Backend/OBO_annotation_properties.owl#synonym'))]))\n #if syns:\n #print(syns)\n #print(subject)\n #print(label.lower())\n if 'quality' in label.lower():\n quals.append((subject, label))\n subpre = ng.namespace_manager.compute_qname(subject)[1]\n llower = rdflib.Literal(label.lower(), lang='en')\n for s in ng.subjects(rdflib.RDFS.label, llower):\n if s != subject:\n exact.append((subject, s, label, llower))\n for s, p, o in sorted(ng.triples((None, rdflib.RDFS.label, None))):\n spre = ng.namespace_manager.compute_qname(s)[1]\n if subject != s and label.lower() in o.lower().split(' ') and spre != subpre:\n if s.toPython() in bad_match or subject.toPython() in bad_match:\n continue\n #print()\n #print(spre, subpre)\n similar.append((subject, s, label, o))\n if subpre.toPython() == 'http://FIXME.org/':\n print('YAY')\n print(label, ',', o)\n print(subject, s)\n subject, s = s, subject\n label, o = o, label\n\n if subject in s2:\n #print('YES IT EXISTS')\n #print(syns, label, [subject, s])\n s2[subject]['syns'].update(syns)\n s2[subject]['syns'].add(label)\n s2[subject]['xrefs'] += [subject, s]\n else:\n s2[subject] = {'label': label.toPython(), 'o': o.toPython(), 'xrefs':[subject, s], 'syns':syns} # FIXME overwrites\n\n pprint(quals)\n \"\"\" print stuff\n print('matches')\n pprint(exact)\n pprint(similar)\n\n #print('EXACT', exact)\n\n print()\n for k, v in s2.items():\n print(k)\n for k, v2 in sorted(v.items()):\n print(' ', k, ':', v2)\n #\"\"\"\n\n desired_nif_terms = set() #{\n #'NIFQUAL:sao1959705051', # dendrite\n #'NIFQUAL:sao2088691397', # axon\n #'NIFQUAL:sao1057800815', # morphological\n #'NIFQUAL:sao-1126011106', # soma\n #'NIFQUAL:',\n #'NIFQUAL:',\n #}\n starts = [\n #\"NIFQUAL:sao2088691397\",\n #\"NIFQUAL:sao1278200674\",\n #\"NIFQUAL:sao2088691397\",\n #\"NIFQUAL:sao-1126011106\", # FIXME WTF IS THIS NONSENSE (scigraph bug?)\n quote(\"http://ontology.neuinfo.org/NIF/BiomaterialEntities/NIF-Quality.owl#sao1959705051\").replace('/','%2F'),\n quote(\"http://ontology.neuinfo.org/NIF/BiomaterialEntities/NIF-Quality.owl#sao2088691397\").replace('/','%2F'),\n quote(\"http://ontology.neuinfo.org/NIF/BiomaterialEntities/NIF-Quality.owl#sao1278200674\").replace('/','%2F'),\n quote(\"http://ontology.neuinfo.org/NIF/BiomaterialEntities/NIF-Quality.owl#sao2088691397\").replace('/','%2F'),\n quote(\"http://ontology.neuinfo.org/NIF/BiomaterialEntities/NIF-Quality.owl#sao-1126011106\").replace('/','%2F'),\n ]\n\n for id_ in starts:\n want = sgg.getNeighbors(id_, relationshipType='subClassOf', direction='INCOMING', depth=5)\n #print(id_, want)\n desired_nif_terms.update([n['id'] for n in want['nodes']])\n\n print(desired_nif_terms)\n\n\n ilx_start = 50114\n print(ilx_base.format(ilx_start))\n new_terms = {}\n dg = makeGraph('uwotm8', prefixes=PREFIXES)\n xr = makeGraph('xrefs', prefixes=PREFIXES)\n for s, o in sorted(ng.subject_objects(rdflib.RDFS.label))[::-1]:\n spre = ng.namespace_manager.compute_qname(s)[1]\n #if spre.toPython() == g.namespaces['NIFQUAL']:\n #print('skipping', s)\n #continue # TODO\n if s in new_terms:\n print(s, 'already in as xref probably')\n continue\n #elif spre.toPython() != 'http://uri.interlex.org/base/ilx_' or spre.toPython() != 'http://FIXME.org/' and s.toPython() not in desired_nif_terms:\n #elif spre.toPython() != 'http://FIXME.org/' and s.toPython() not in desired_nif_terms:\n #print('DO NOT WANT', s, spre)\n #continue\n\n syns = set([s for s in ng.objects(s, dg.namespaces['nsu']['synonym'])])\n #data['syns'] += syns\n\n data = {}\n id_ = ilx_base.format(ilx_start)\n ilx_start += 1\n if s in s2:\n d = s2[s]\n syns.update(d['syns'])\n new_terms[d['xrefs'][0]] = {'replaced_by':id_}\n xr.add_trip(d['xrefs'][0], 'oboInOwl:replacedBy', id_)\n #dg.add_trip(d['xrefs'][0], 'oboInOwl:replacedBy', id_)\n new_terms[d['xrefs'][1]] = {'replaced_by':id_}\n xr.add_trip(d['xrefs'][1], 'oboInOwl:replacedBy', id_)\n #dg.add_trip(d['xrefs'][1], 'oboInOwl:replacedBy', id_)\n\n data['labels'] = [d['label'], d['o']]\n #dg.add_trip(id_, rdflib.RDFS.label, d['label'])\n dg.add_trip(id_, rdflib.RDFS.label, d['o'])\n data['xrefs'] = d['xrefs']\n for x in d['xrefs']: # FIXME... expecting order of evaluation errors here...\n dg.add_trip(id_, 'oboInOwl:hasDbXref', x) # xr\n xr.add_trip(id_, 'oboInOwl:hasDbXref', x) # x\n\n elif spre.toPython() != 'http://ontology.neuinfo.org/NIF/BiomaterialEntities/NIF-Quality.owl#' or ng.namespace_manager.qname(s).replace('default1','NIFQUAL') in desired_nif_terms: # skip non-xref quals\n #print(ng.namespace_manager.qname(s).replace('default1','NIFQUAL'))\n new_terms[s] = {'replaced_by':id_}\n xr.add_trip(s, 'oboInOwl:replacedBy', id_)\n data['labels'] = [o.toPython()]\n dg.add_trip(id_, rdflib.RDFS.label, o.toPython())\n data['xrefs'] = [s]\n dg.add_trip(id_, 'oboInOwl:hasDbXref', s) # xr\n xr.add_trip(id_, 'oboInOwl:hasDbXref', s) # xr\n else:\n ilx_start -= 1\n continue\n\n new_terms[id_] = data\n dg.add_trip(id_, rdflib.RDF.type, rdflib.OWL.Class)\n xr.add_trip(id_, rdflib.RDF.type, rdflib.OWL.Class)\n for syn in syns:\n if syn.toPython() not in data['labels']:\n if len(syn) > 3:\n dg.add_trip(id_, 'NIFRID:synonym', syn)\n elif syn:\n dg.add_trip(id_, 'NIFRID:abbrev', syn)\n\n if 'EPHYS' in s or any(['EPHYS' in x for x in data['xrefs']]):\n dg.add_trip(id_, rdflib.RDFS.subClassOf, ephys_phenotype)\n elif 'MORPHOLOGY' in s or any(['MORPHOLOGY' in x for x in data['xrefs']]):\n dg.add_trip(id_, rdflib.RDFS.subClassOf, morpho_phenotype)\n\n #dg.write(convert=False)\n xr.write(convert=False)\n\n #skip this for now, we can use DG to do lookups later\n #for t in dg.g.triples((None, None, None)):\n #g.add_trip(*t) # only way to clean prefixes :/\n add_phenotypes(g)\n g.write(convert=False)\n\n g2 = makeGraph('pheno-comp', PREFIXES)\n for t in ng.triples((None, None, None)):\n g2.add_trip(*t) # only way to clean prefixes :/\n\n g2.write(convert=False)\n\n syn_mappings = {}\n for sub, syn in [_ for _ in g.g.subject_objects(g.expand('NIFRID:synonym'))] + [_ for _ in g.g.subject_objects(rdflib.RDFS.label)]:\n syn = syn.toPython()\n if syn in syn_mappings:\n log.error(f'duplicate synonym! {syn} {sub}')\n syn_mappings[syn] = sub\n\n #embed()\n return syn_mappings, pedges, ilx_start", "response": "Make the phenotype of the neuron."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef config(self):\n # TODO more efficient to read once and put watch on the file\n config = {}\n if self.config_file.exists():\n with open(self.config_file.as_posix(), 'rt') as f: # 3.5/pypy3 can't open Path directly\n config = {k:self._override[k] if\n k in self._override else\n v for k, v in yaml.safe_load(f).items()}\n\n return config", "response": "Allows changing the config on the fly"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _get_service_account_info(self):\n with open(self.service_account_file, 'r') as f:\n info = json.load(f)\n\n self.service_account_email = info.get('client_email')\n if not self.service_account_email:\n raise GCECloudException(\n 'Service account JSON file is invalid for GCE. '\n 'client_email key is expected. See getting started '\n 'docs for information on GCE configuration.'\n )\n\n self.service_account_project = info.get('project_id')\n if not self.service_account_project:\n raise GCECloudException(\n 'Service account JSON file is invalid for GCE. '\n 'project_id key is expected. See getting started '\n 'docs for information on GCE configuration.'\n )", "response": "Retrieve json dict from service account file."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _get_driver(self):\n ComputeEngine = get_driver(Provider.GCE)\n return ComputeEngine(\n self.service_account_email,\n self.service_account_file,\n project=self.service_account_project\n )", "response": "Get authenticated GCE driver."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _get_instance(self):\n try:\n instance = self.compute_driver.ex_get_node(\n self.running_instance_id,\n zone=self.region\n )\n except ResourceNotFoundError as e:\n raise GCECloudException(\n 'Instance with id: {id} cannot be found: {error}'.format(\n id=self.running_instance_id, error=e\n )\n )\n\n return instance", "response": "Retrieve instance matching instance_id."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ngenerate SSH public key from private key.", "response": "def _get_ssh_public_key(self):\n \"\"\"Generate SSH public key from private key.\"\"\"\n key = ipa_utils.generate_public_ssh_key(self.ssh_private_key_file)\n return '{user}:{key} {user}'.format(\n user=self.ssh_user,\n key=key.decode()\n )"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _launch_instance(self):\n metadata = {'key': 'ssh-keys', 'value': self.ssh_public_key}\n self.running_instance_id = ipa_utils.generate_instance_name(\n 'gce-ipa-test'\n )\n self.logger.debug('ID of instance: %s' % self.running_instance_id)\n\n kwargs = {\n 'location': self.region,\n 'ex_metadata': metadata,\n 'ex_service_accounts': [{\n 'email': self.service_account_email,\n 'scopes': ['storage-ro']\n }]\n }\n\n if self.subnet_id:\n kwargs['ex_subnetwork'] = self._get_subnet(self.subnet_id)\n kwargs['ex_network'] = kwargs['ex_subnetwork'].network\n\n try:\n instance = self.compute_driver.create_node(\n self.running_instance_id,\n self.instance_type or GCE_DEFAULT_TYPE,\n self.image_id,\n **kwargs\n )\n except ResourceNotFoundError as error:\n try:\n message = error.value['message']\n except TypeError:\n message = error\n\n raise GCECloudException(\n 'An error occurred launching instance: {message}.'.format(\n message=message\n )\n )\n\n self.compute_driver.wait_until_running(\n [instance],\n timeout=self.timeout\n )", "response": "Launch an instance of the given image."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nvalidates region was passed in and is a valid GCE zone.", "response": "def _validate_region(self):\n \"\"\"Validate region was passed in and is a valid GCE zone.\"\"\"\n if not self.region:\n raise GCECloudException(\n 'Zone is required for GCE cloud framework: '\n 'Example: us-west1-a'\n )\n\n try:\n zone = self.compute_driver.ex_get_zone(self.region)\n except Exception:\n zone = None\n\n if not zone:\n raise GCECloudException(\n '{region} is not a valid GCE zone. '\n 'Example: us-west1-a'.format(\n region=self.region\n )\n )"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _set_instance_ip(self):\n instance = self._get_instance()\n\n if instance.public_ips:\n self.instance_ip = instance.public_ips[0]\n elif instance.private_ips:\n self.instance_ip = instance.private_ips[0]\n else:\n raise GCECloudException(\n 'IP address for instance: %s cannot be found.'\n % self.running_instance_id\n )", "response": "Retrieve and set the instance ip address."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef grab_rdflib_graph_version(g: Graph) -> str:\n ''' Crap-shot for ontology iri if its properly in the header and correctly formated '''\n version = g.subject_objects( predicate = URIRef( OWL.versionIRI ) )\n version = [o for s, o in version]\n if len(version) != 1:\n print('versioning isn\\'t correct')\n else:\n version = str(version[0])\n return version", "response": "Grab the version of the ontology from the graph"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nfix ILX ID in database only excepts lower case and underscore version of ID", "response": "def fix_ilx(self, ilx_id: str) -> str:\n ''' Database only excepts lower case and underscore version of ID '''\n ilx_id = ilx_id.replace('http://uri.interlex.org/base/', '')\n if ilx_id[:4] not in ['TMP:', 'tmp_', 'ILX:', 'ilx_']:\n raise ValueError(\n 'Need to provide ilx ID with format ilx_# or ILX:# for given ID ' + ilx_id)\n return ilx_id.replace('ILX:', 'ilx_').replace('TMP:', 'tmp_')"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef pull_int_tail(self, string: str) -> str:\n ''' Useful for IDs that have giberish in the front of the real ID '''\n int_tail = ''\n for element in string[::-1]:\n try:\n int(element)\n int_tail = element + int_tail\n except:\n pass\n return int_tail", "response": "Used for getting the tail of the string from the end of the string."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef extract_fragment(self, iri: str) -> str:\n ''' Pulls only for code/ID from the iri\n\n I only add the str() conversion for the iri because rdflib objects need to be converted.\n '''\n fragment = str(iri).rsplit('/')[-1].split(':', 1)[-1].split('#', 1)[-1].split('_', 1)[-1]\n return fragment", "response": "Extracts the fragment from the given IRI"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nsearching the InterLex table for the specified curie and returns the row in InterLex associated with the curie.", "response": "def curie_search(self, curie:str) -> dict:\n ''' Returns the row in InterLex associated with the curie\n\n Note:\n Pressumed to not have duplicate curies in InterLex\n Args:\n curie: The \"prefix:fragment_id\" of the existing_id pertaining to the ontology\n Returns:\n None or dict\n '''\n ilx_row = self.curie2row.get(curie)\n if not ilx_row:\n return None\n else:\n return ilx_row"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning the rows in InterLex associated with the fragment_id", "response": "def fragment_search(self, fragement:str) -> List[dict]:\n ''' Returns the rows in InterLex associated with the fragment\n\n Note:\n Pressumed to have duplicate fragements in InterLex\n Args:\n fragment: The fragment_id of the curie pertaining to the ontology\n Returns:\n None or List[dict]\n '''\n fragement = self.extract_fragment(fragement)\n ilx_rows = self.fragment2rows.get(fragement)\n if not ilx_rows:\n return None\n else:\n return ilx_rows"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef label_search(self, label:str) -> List[dict]:\n ''' Returns the rows in InterLex associated with that label\n\n Note:\n Pressumed to have duplicated labels in InterLex\n Args:\n label: label of the entity you want to find\n Returns:\n None or List[dict]\n '''\n ilx_rows = self.label2rows(self.local_degrade(label))\n if not ilx_rows:\n return None\n else:\n return ilx_rows", "response": "Returns the rows in InterLex associated with that label"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef exhaustive_label_check( self,\n ontology:pd.DataFrame,\n label_predicate='rdfs:label',\n diff:bool=True, ) -> Tuple[list]:\n ''' All entities with conflicting labels gets a full diff\n\n Args:\n ontology: pandas DataFrame created from an ontology where the colnames are predicates\n and if classes exist it is also thrown into a the colnames.\n label_predicate: usually in qname form and is the colname of the DataFrame for the label\n diff: complete exhaustive diff if between curie matches... will take FOREVER if there are a lot -> n^2\n Returns:\n inside: entities that are inside of InterLex\n outside: entities NOT in InterLex\n diff (optional): List[List[dict]]... so complicated but usefull diff between matches only '''\n\n inside, outside = [], []\n header = ['Index'] + list(ontology.columns)\n for row in ontology.itertuples():\n\n row = {header[i]:val for i, val in enumerate(row)}\n label_obj = row[label_predicate]\n if isinstance(label_obj, list):\n if len(label_obj) != 1:\n exit('Need to have only 1 label in the cell from the onotology.')\n else:\n label_obj = label_obj[0]\n entity_label = self.local_degrade(label_obj)\n ilx_rows = self.label2rows.get(entity_label)\n if ilx_rows:\n inside.append({\n 'external_ontology_row': row,\n 'ilx_rows': ilx_rows,\n })\n else:\n outside.append(row)\n\n if diff:\n diff = self.__exhaustive_diff(inside)\n return inside, outside, diff\n return inside, outside", "response": "Check if the label_predicate is in the label2rows of the entity and if so get a full diff between the curie matches"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncheck if all entities with conflicting curies get a full diff to see if they belong.", "response": "def exhaustive_curie_check( self,\n ontology:pd.DataFrame,\n curie_predicate:str,\n curie_prefix:str,\n diff:bool=True, ) -> Tuple[list]:\n ''' All entities with conflicting curies gets a full diff to see if they belong\n\n Args:\n ontology: pandas DataFrame created from an ontology where the colnames are predicates\n and if classes exist it is also thrown into a the colnames.\n curie_predicate: usually in qname form and is the colname of the DataFrame\n curie_prefix: Not all cells in the DataFrame will have complete curies so we extract\n the fragement from the cell and use the prefix to complete it.\n diff: complete exhaustive diff if between curie matches... will take FOREVER if there are a lot -> n^2\n Returns:\n inside: entities that are inside of InterLex\n outside: entities NOT in InterLex\n diff (optional): List[List[dict]]... so complicated but usefull diff between matches only '''\n inside, outside = [], []\n curie_prefix = curie_prefix.replace(':', '') # just in case I forget a colon isnt in a prefix\n header = ['Index'] + list(ontology.columns)\n for row in ontology.itertuples():\n row = {header[i]:val for i, val in enumerate(row)}\n entity_curie = row[curie_predicate]\n if isinstance(entity_curie, list):\n if len(entity_curie) != 0:\n exit('Need to have only 1 iri in the cell from the onotology.')\n else:\n entity_curie = entity_curie[0]\n entity_curie = curie_prefix + ':' + self.extract_fragment(entity_curie)\n\n ilx_row = self.curie2row.get(entity_curie)\n if ilx_row:\n inside.append({\n 'external_ontology_row': row,\n 'ilx_rows': [ilx_row],\n })\n else:\n outside.append(row)\n if diff:\n diff = self.__exhaustive_diff(inside)\n return inside, outside, diff\n return inside, outside"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nchecking if entities with conflicting fragments get a full diff to see if they belong", "response": "def exhaustive_fragment_check( self,\n ontology:pd.DataFrame,\n iri_curie_fragment_predicate:str = 'iri',\n cross_reference_iris:bool = False,\n cross_reference_fragments:bool = False,\n diff:bool = True, ) -> Tuple[list]:\n ''' All entities with conflicting fragments gets a full diff to see if they belong\n\n Args:\n ontology: pandas DataFrame created from an ontology where the colnames are predicates\n and if classes exist it is also thrown into a the colnames.\n iri_curie_fragment_predicate: usually in qname form and is the colname of the DataFrame for iri\n Default is \"iri\" for graph2pandas module\n diff: complete exhaustive diff if between curie matches... will take FOREVER if there are a lot -> n^2\n Returns:\n inside: entities that are inside of InterLex\n outside: entities NOT in InterLex\n diff (optional): List[List[dict]]... so complicated but usefull diff between matches only '''\n\n inside, outside = [], []\n header = ['Index'] + list(ontology.columns)\n for row in ontology.itertuples():\n row = {header[i]:val for i, val in enumerate(row)}\n entity_suffix = row[iri_curie_fragment_predicate]\n if isinstance(entity_suffix, list):\n if len(entity_suffix) != 0:\n exit('Need to have only 1 iri in the cell from the onotology.')\n else:\n entity_suffix = entity_suffix[0]\n entity_fragment = self.extract_fragment(entity_suffix)\n ilx_rows = self.fragment2rows.get(entity_fragment)\n if cross_reference_fragments and ilx_rows:\n ilx_rows = [row for row in ilx_rows if entity_fragment.lower() in row['iri'].lower()]\n if cross_reference_iris and ilx_rows:\n # true suffix of iris\n ilx_rows = [row for row in ilx_rows if entity_suffix.rsplit('/', 1)[-1].lower() in row['iri'].lower()]\n if ilx_rows:\n inside.append({\n 'external_ontology_row': row,\n 'ilx_rows': ilx_rows,\n })\n else:\n outside.append(row)\n if diff:\n diff = self.__exhaustive_diff(inside)\n return inside, outside, diff\n return inside, outside"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef exhaustive_ontology_ilx_diff_row_only( self, ontology_row: dict ) -> dict:\n ''' WARNING RUNTIME IS AWEFUL '''\n results = []\n header = ['Index'] + list(self.existing_ids.columns)\n for row in self.existing_ids.itertuples():\n row = {header[i]:val for i, val in enumerate(row)}\n check_list = [\n {\n 'external_ontology_row': ontology_row,\n 'ilx_rows': [row],\n },\n ]\n # First layer for each external row. Second is for each potential ilx row. It's simple here 1-1.\n result = self.__exhaustive_diff(check_list)[0][0]\n if result['same']:\n results.append(result)\n return results", "response": "This method is used to exhaustive the ontology ilx diff for each row of an ontology."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncombines the label & definition exhaustive check out of convenience", "response": "def combo_exhaustive_label_definition_check( self,\n ontology: pd.DataFrame,\n label_predicate:str,\n definition_predicates:str,\n diff = True) -> List[List[dict]]:\n ''' Combo of label & definition exhaustive check out of convenience\n\n Args:\n ontology: pandas DataFrame created from an ontology where the colnames are predicates\n and if classes exist it is also thrown into a the colnames.\n label_predicate: usually in qname form and is the colname of the DataFrame for the label\n diff: complete exhaustive diff if between curie matches... will take FOREVER if there are a lot -> n^2\n Returns:\n inside: entities that are inside of InterLex\n outside: entities NOT in InterLex\n diff (optional): List[List[dict]]... so complicated but usefull diff between matches only '''\n\n inside, outside = [], []\n header = ['Index'] + list(ontology.columns)\n for row in ontology.itertuples():\n\n row = {header[i]:val for i, val in enumerate(row)}\n\n label_obj = row[label_predicate]\n if isinstance(label_obj, list):\n if len(label_obj) != 1:\n exit('Need to have only 1 label in the cell from the onotology.')\n else:\n label_obj = label_obj[0]\n entity_label = self.local_degrade(label_obj)\n label_search_results = self.label2rows.get(entity_label)\n label_ilx_rows = label_search_results if label_search_results else []\n\n definition_ilx_rows = []\n for definition_predicate in definition_predicates:\n definition_objs = row[definition_predicate]\n if not definition_objs:\n continue\n definition_objs = [definition_objs] if not isinstance(definition_objs, list) else definition_objs\n for definition_obj in definition_objs:\n definition_obj = self.local_degrade(definition_obj)\n definition_search_results = self.definition2rows.get(definition_obj)\n if definition_search_results:\n definition_ilx_rows.extend(definition_search_results)\n\n ilx_rows = [dict(t) for t in {tuple(d.items()) for d in (label_ilx_rows + definition_ilx_rows)}]\n if ilx_rows:\n inside.append({\n 'external_ontology_row': row,\n 'ilx_rows': ilx_rows,\n })\n else:\n outside.append(row)\n\n if diff:\n diff = self.__exhaustive_diff(inside)\n return inside, outside, diff\n return inside, outside"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nclear the client cache or remove key matching the given ip.", "response": "def clear_cache(ip=None):\n \"\"\"Clear the client cache or remove key matching the given ip.\"\"\"\n if ip:\n with ignored(Exception):\n client = CLIENT_CACHE[ip]\n del CLIENT_CACHE[ip]\n client.close()\n else:\n for client in CLIENT_CACHE.values():\n with ignored(Exception):\n client.close()\n CLIENT_CACHE.clear()"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nestablishing SSH connection to the instance.", "response": "def establish_ssh_connection(ip,\n ssh_private_key_file,\n ssh_user,\n port,\n attempts=5,\n timeout=None):\n \"\"\"\n Establish ssh connection and return paramiko client.\n\n Raises:\n IpaSSHException: If connection cannot be established\n in given number of attempts.\n \"\"\"\n client = paramiko.SSHClient()\n client.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n\n while attempts:\n try:\n client.connect(\n ip,\n port=port,\n username=ssh_user,\n key_filename=ssh_private_key_file,\n timeout=timeout\n )\n except: # noqa: E722\n attempts -= 1\n time.sleep(10)\n else:\n return client\n\n raise IpaSSHException(\n 'Failed to establish SSH connection to instance.'\n )"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nexecuting given command using paramiko.", "response": "def execute_ssh_command(client, cmd):\n \"\"\"\n Execute given command using paramiko.\n\n Returns:\n String output of cmd execution.\n Raises:\n IpaSSHException: If stderr returns a non-empty string.\n \"\"\"\n try:\n stdin, stdout, stderr = client.exec_command(cmd)\n err = stderr.read()\n out = stdout.read()\n if err:\n raise IpaSSHException(out.decode() + err.decode())\n except: # noqa: E722\n raise\n return out.decode()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef extract_archive(client, archive_path, extract_path=None):\n command = 'tar -xf {path}'.format(path=archive_path)\n\n if extract_path:\n command += ' -C {extract_path}'.format(extract_path=extract_path)\n\n out = execute_ssh_command(client, command)\n return out", "response": "Extract the archive in current path using the provided client."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ngenerate SSH public key from private key file.", "response": "def generate_public_ssh_key(ssh_private_key_file):\n \"\"\"Generate SSH public key from private key file.\"\"\"\n try:\n with open(ssh_private_key_file, \"rb\") as key_file:\n key = key_file.read()\n except FileNotFoundError:\n raise IpaUtilsException(\n 'SSH private key file: %s cannot be found.' % ssh_private_key_file\n )\n\n try:\n private_key = serialization.load_pem_private_key(\n key,\n password=None,\n backend=default_backend()\n )\n except ValueError:\n raise IpaUtilsException(\n 'SSH private key file: %s is not a valid key file.'\n % ssh_private_key_file\n )\n\n return private_key.public_key().public_bytes(\n serialization.Encoding.OpenSSH,\n serialization.PublicFormat.OpenSSH\n )"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nparse ini config file and return a dict of values.", "response": "def get_config_values(config_path, section, default='default'):\n \"\"\"\n Parse ini config file and return a dict of values.\n\n The provided section overrides any values in default section.\n \"\"\"\n values = {}\n\n if not os.path.isfile(config_path):\n raise IpaUtilsException(\n 'Config file not found: %s' % config_path\n )\n\n config = configparser.ConfigParser()\n\n try:\n config.read(config_path)\n except Exception:\n raise IpaUtilsException(\n 'Config file format invalid.'\n )\n\n try:\n values.update(config.items(default))\n except Exception:\n pass\n\n try:\n values.update(config.items(section))\n except Exception:\n pass\n\n return values"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nestablishing and test SSH connection.", "response": "def get_ssh_client(ip,\n ssh_private_key_file,\n ssh_user='root',\n port=22,\n timeout=600,\n wait_period=10):\n \"\"\"Attempt to establish and test ssh connection.\"\"\"\n if ip in CLIENT_CACHE:\n return CLIENT_CACHE[ip]\n\n start = time.time()\n end = start + timeout\n\n client = None\n while time.time() < end:\n try:\n client = establish_ssh_connection(\n ip,\n ssh_private_key_file,\n ssh_user,\n port,\n timeout=wait_period\n )\n execute_ssh_command(client, 'ls')\n except: # noqa: E722\n if client:\n client.close()\n wait_period += wait_period\n else:\n CLIENT_CACHE[ip] = client\n return client\n\n raise IpaSSHException(\n 'Attempt to establish SSH connection failed.'\n )"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nload yaml config file and return dictionary.", "response": "def get_yaml_config(config_path):\n \"\"\"\n Load yaml config file and return dictionary.\n\n Todo:\n * This will need refactoring similar to the test search.\n \"\"\"\n config_path = os.path.expanduser(config_path)\n if not os.path.isfile(config_path):\n raise IpaUtilsException(\n 'Config file not found: %s' % config_path\n )\n\n with open(config_path, 'r') as f:\n config = yaml.safe_load(f)\n return config"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nparses sync points into list of test files.", "response": "def parse_sync_points(names, tests):\n \"\"\"\n Slice list of test names on sync points.\n\n If test is test file find full path to file.\n\n Returns:\n A list of test file sets and sync point strings.\n Examples:\n ['test_hard_reboot']\n [set('test1', 'test2')]\n [set('test1', 'test2'), 'test_soft_reboot']\n [set('test1', 'test2'), 'test_soft_reboot', set('test3')]\n \"\"\"\n test_files = []\n section = set()\n\n for name in names:\n if name in SYNC_POINTS:\n if section:\n test_files.append(section)\n\n test_files.append(name)\n section = set()\n else:\n section.add(find_test_file(name, tests))\n\n if section:\n test_files.append(section)\n return test_files"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef put_file(client, source_file, destination_file):\n try:\n sftp_client = client.open_sftp()\n sftp_client.put(source_file, destination_file)\n except Exception as error:\n raise IpaUtilsException(\n 'Error copying file to instance: {0}.'.format(error)\n )\n finally:\n with ignored(Exception):\n sftp_client.close()", "response": "Copy file to instance using Paramiko client connection."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nredirecting standard out to file.", "response": "def redirect_output(fileobj):\n \"\"\"Redirect standard out to file.\"\"\"\n old = sys.stdout\n sys.stdout = fileobj\n try:\n yield fileobj\n finally:\n sys.stdout = old"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates temporary ssh config file.", "response": "def ssh_config(ssh_user, ssh_private_key_file):\n \"\"\"Create temporary ssh config file.\"\"\"\n try:\n ssh_file = NamedTemporaryFile(delete=False, mode='w+')\n ssh_file.write('Host *\\n')\n ssh_file.write(' IdentityFile %s\\n' % ssh_private_key_file)\n ssh_file.write(' User %s' % ssh_user)\n ssh_file.close()\n yield ssh_file.name\n finally:\n with ignored(OSError):\n os.remove(ssh_file.name)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nupdates the history log file with the item.", "response": "def update_history_log(history_log,\n clear=False,\n description=None,\n test_log=None):\n \"\"\"\n Update the history log file with item.\n If clear flag is provided the log file is deleted.\n \"\"\"\n if not test_log and not clear:\n raise IpaUtilsException(\n 'A test log or clear flag must be provided.'\n )\n\n if clear:\n with ignored(OSError):\n os.remove(history_log)\n\n else:\n history_dir = os.path.dirname(history_log)\n if not os.path.isdir(history_dir):\n try:\n os.makedirs(history_dir)\n except OSError as error:\n raise IpaUtilsException(\n 'Unable to create directory: %s' % error\n )\n\n with open(history_log, 'a+') as f:\n # Using append mode creates file if it does not exist\n if description:\n description = '\"%s\"' % description\n\n out = '{} {}'.format(\n test_log,\n description or ''\n )\n f.write(out.strip() + '\\n')"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef validate(self, value):\n if not self._compiled_regex.match(value):\n raise ValidationError(\n 'value {:s} not match r\"{:s}\"'.format(value, self._regex))", "response": "Validate string by regex."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef ontology2df(self):\n '''Updates self.g or self.path bc you could only choose 1'''\n\n if isinstance(self.path, str) or isinstance(self.path, p):\n self.path = str(self.path)\n filetype = p(self.path).suffix\n if filetype == '.json':\n self.g = None\n try:\n records = open_json(self.path)\n return pd.DataFrame(records)\n except:\n exit('Json file is not in records format.')\n if filetype == '.pickle':\n self.g = None\n return pickle.load(open(self.path, 'rb'))\n elif filetype == '.ttl' or filetype == '.rdf':\n self.g = rdflib.Graph()\n self.g.parse(self.path, format='turtle')\n return self.get_sparql_dataframe()\n elif filetype == '.nt':\n self.g = rdflib.Graph()\n self.g.parse(self.path, format='nt')\n return self.get_sparql_dataframe()\n elif filetype == '.owl' or filetype == '.xrdf':\n self.g = rdflib.Graph()\n try:\n self.g.parse(self.path, format='xml')\n except:\n # some owl formats are more rdf than owl\n self.g.parse(self.path, format='turtle')\n return self.get_sparql_dataframe()\n else:\n exit('Format options: owl, ttl, df_pickle, rdflib.Graph()')\n try:\n return self.get_sparql_dataframe()\n self.path = None\n except:\n exit('Format options: owl, ttl, df_pickle, rdflib.Graph()')\n\n elif isinstance(self.g, rdflib.graph.Graph):\n self.path = None\n return self.get_sparql_dataframe()\n\n else:\n exit('Obj given is not str, pathlib obj, or an rdflib.Graph()')", "response": "Updates self. g or self. path bc you could only choose 1"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ntaking list linked to common name and maps common name to accepted predicate and their respected suffixes to decrease sensitivity.", "response": "def create_pred2common(self):\n ''' Takes list linked to common name and maps common name to accepted predicate\n and their respected suffixes to decrease sensitivity.\n '''\n self.pred2common = {}\n for common_name, ext_preds in self.common2preds.items():\n for pred in ext_preds:\n pred = pred.lower().strip()\n self.pred2common[pred] = common_name"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef clean_pred(self, pred, ignore_warning=False):\n ''' Takes the predicate and returns the suffix, lower case, stripped version\n '''\n original_pred = pred\n pred = pred.lower().strip()\n if 'http' in pred:\n pred = pred.split('/')[-1]\n elif ':' in pred:\n if pred[-1] != ':': # some matches are \"prefix:\" only\n pred = pred.split(':')[-1]\n else:\n if not ignore_warning:\n exit('Not a valid predicate: ' + original_pred + '. Needs to be an iri \"/\" or curie \":\".')\n return pred", "response": "Takes the predicate and returns the suffix lower case stripped version\n "} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_common_pred(self, pred):\n ''' Gets version of predicate and sees if we have a translation to a common relation.\n INPUT:\n pred = predicate from the triple\n OUTPUT:\n Common relationship or None\n '''\n pred = self.clean_pred(pred)\n common_pred = self.pred2common.get(pred)\n return common_pred", "response": "Gets version of predicate and sees if we have a translation to a common relation."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncreating a network interface in the resource group.", "response": "def _create_network_interface(\n self, ip_config_name, nic_name, public_ip, region,\n resource_group_name, subnet, accelerated_networking=False\n ):\n \"\"\"\n Create a network interface in the resource group.\n\n Attach NIC to the subnet and public IP provided.\n \"\"\"\n nic_config = {\n 'location': region,\n 'ip_configurations': [{\n 'name': ip_config_name,\n 'private_ip_allocation_method': 'Dynamic',\n 'subnet': {\n 'id': subnet.id\n },\n 'public_ip_address': {\n 'id': public_ip.id\n },\n }]\n }\n\n if accelerated_networking:\n nic_config['enable_accelerated_networking'] = True\n\n try:\n nic_setup = self.network.network_interfaces.create_or_update(\n resource_group_name, nic_name, nic_config\n )\n except Exception as error:\n raise AzureCloudException(\n 'Unable to create network interface: {0}.'.format(\n error\n )\n )\n\n return nic_setup.result()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _create_public_ip(self, public_ip_name, resource_group_name, region):\n public_ip_config = {\n 'location': region,\n 'public_ip_allocation_method': 'Dynamic'\n }\n\n try:\n public_ip_setup = \\\n self.network.public_ip_addresses.create_or_update(\n resource_group_name, public_ip_name, public_ip_config\n )\n except Exception as error:\n raise AzureCloudException(\n 'Unable to create public IP: {0}.'.format(error)\n )\n\n return public_ip_setup.result()", "response": "Create public IP address in the resource group."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _create_resource_group(self, region, resource_group_name):\n resource_group_config = {'location': region}\n\n try:\n self.resource.resource_groups.create_or_update(\n resource_group_name, resource_group_config\n )\n except Exception as error:\n raise AzureCloudException(\n 'Unable to create resource group: {0}.'.format(error)\n )", "response": "Create resource group if it does not exist."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate the storage profile for the instance.", "response": "def _create_storage_profile(self):\n \"\"\"\n Create the storage profile for the instance.\n\n Image reference can be a custom image name or a published urn.\n \"\"\"\n if self.image_publisher:\n storage_profile = {\n 'image_reference': {\n 'publisher': self.image_publisher,\n 'offer': self.image_offer,\n 'sku': self.image_sku,\n 'version': self.image_version\n },\n }\n else:\n for image in self.compute.images.list():\n if image.name == self.image_id:\n image_id = image.id\n break\n else:\n raise AzureCloudException(\n 'Image with name {0} not found.'.format(self.image_id)\n )\n\n storage_profile = {\n 'image_reference': {\n 'id': image_id\n }\n }\n\n return storage_profile"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ncreating a subnet in the provided vnet and resource group.", "response": "def _create_subnet(self, resource_group_name, subnet_id, vnet_name):\n \"\"\"\n Create a subnet in the provided vnet and resource group.\n \"\"\"\n subnet_config = {'address_prefix': '10.0.0.0/29'}\n\n try:\n subnet_setup = self.network.subnets.create_or_update(\n resource_group_name, vnet_name, subnet_id, subnet_config\n )\n except Exception as error:\n raise AzureCloudException(\n 'Unable to create subnet: {0}.'.format(error)\n )\n\n return subnet_setup.result()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _create_virtual_network(self, region, resource_group_name, vnet_name):\n vnet_config = {\n 'location': region,\n 'address_space': {\n 'address_prefixes': ['10.0.0.0/27']\n }\n }\n\n try:\n vnet_setup = self.network.virtual_networks.create_or_update(\n resource_group_name, vnet_name, vnet_config\n )\n except Exception as error:\n raise AzureCloudException(\n 'Unable to create vnet: {0}.'.format(error)\n )\n\n vnet_setup.wait()", "response": "Create a vnet in the given resource group with default address space."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncreating or update a VM instance based on vm_parameters config.", "response": "def _create_vm(self, vm_config):\n \"\"\"\n Attempt to create or update VM instance based on vm_parameters config.\n \"\"\"\n try:\n vm_setup = self.compute.virtual_machines.create_or_update(\n self.running_instance_id, self.running_instance_id,\n vm_config\n )\n except Exception as error:\n raise AzureCloudException(\n 'An exception occurred creating virtual machine: {0}'.format(\n error\n )\n )\n\n vm_setup.wait()"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates the VM config dictionary.", "response": "def _create_vm_config(self, interface):\n \"\"\"\n Create the VM config dictionary.\n\n Requires an existing network interface object.\n \"\"\"\n # Split image ID into it's components.\n self._process_image_id()\n\n hardware_profile = {\n 'vm_size': self.instance_type or AZURE_DEFAULT_TYPE\n }\n\n network_profile = {\n 'network_interfaces': [{\n 'id': interface.id,\n 'primary': True\n }]\n }\n\n storage_profile = self._create_storage_profile()\n\n os_profile = {\n 'computer_name': self.running_instance_id,\n 'admin_username': self.ssh_user,\n 'linux_configuration': {\n 'disable_password_authentication': True,\n 'ssh': {\n 'public_keys': [{\n 'path': '/home/{0}/.ssh/authorized_keys'.format(\n self.ssh_user\n ),\n 'key_data': self.ssh_public_key\n }]\n }\n }\n }\n\n vm_config = {\n 'location': self.region,\n 'os_profile': os_profile,\n 'hardware_profile': hardware_profile,\n 'storage_profile': storage_profile,\n 'network_profile': network_profile\n }\n\n return vm_config"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets the instance matching the running_instance_id.", "response": "def _get_instance(self):\n \"\"\"\n Return the instance matching the running_instance_id.\n \"\"\"\n try:\n instance = self.compute.virtual_machines.get(\n self.running_instance_id, self.running_instance_id,\n expand='instanceView'\n )\n except Exception as error:\n raise AzureCloudException(\n 'Unable to retrieve instance: {0}'.format(error)\n )\n\n return instance"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _get_instance_state(self):\n instance = self._get_instance()\n statuses = instance.instance_view.statuses\n\n for status in statuses:\n if status.code.startswith('PowerState'):\n return status.display_status", "response": "Retrieve state of the instance."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _get_management_client(self, client_class):\n try:\n client = get_client_from_auth_file(\n client_class, auth_path=self.service_account_file\n )\n except ValueError as error:\n raise AzureCloudException(\n 'Service account file format is invalid: {0}.'.format(error)\n )\n except KeyError as error:\n raise AzureCloudException(\n 'Service account file missing key: {0}.'.format(error)\n )\n except Exception as error:\n raise AzureCloudException(\n 'Unable to create resource management client: '\n '{0}.'.format(error)\n )\n\n return client", "response": "Get instance of resource management client."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncreate new test instance in resource group.", "response": "def _launch_instance(self):\n \"\"\"\n Create new test instance in a resource group with the same name.\n \"\"\"\n self.running_instance_id = ipa_utils.generate_instance_name(\n 'azure-ipa-test'\n )\n self.logger.debug('ID of instance: %s' % self.running_instance_id)\n self._set_default_resource_names()\n\n try:\n # Try block acts as a transaction. If an exception is raised\n # attempt to cleanup the resource group and all created resources.\n\n # Create resource group.\n self._create_resource_group(self.region, self.running_instance_id)\n\n if self.subnet_id:\n # Use existing vnet/subnet.\n subnet = self.network.subnets.get(\n self.vnet_resource_group, self.vnet_name, self.subnet_id\n )\n else:\n self.subnet_id = ''.join([self.running_instance_id, '-subnet'])\n self.vnet_name = ''.join([self.running_instance_id, '-vnet'])\n\n # Create new vnet\n self._create_virtual_network(\n self.region, self.running_instance_id, self.vnet_name\n )\n\n # Create new subnet in new vnet\n subnet = self._create_subnet(\n self.running_instance_id, self.subnet_id, self.vnet_name\n )\n\n # Setup interface and public ip in resource group.\n public_ip = self._create_public_ip(\n self.public_ip_name, self.running_instance_id, self.region\n )\n interface = self._create_network_interface(\n self.ip_config_name, self.nic_name, public_ip, self.region,\n self.running_instance_id, subnet, self.accelerated_networking\n )\n\n # Get dictionary of VM parameters and create instance.\n vm_config = self._create_vm_config(interface)\n self._create_vm(vm_config)\n except Exception:\n try:\n self._terminate_instance()\n except Exception:\n pass\n raise\n else:\n # Ensure VM is in the running state.\n self._wait_on_instance('VM running', timeout=self.timeout)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nprocesses the image id into publisher offer sku and version.", "response": "def _process_image_id(self):\n \"\"\"\n Split image id into component values.\n\n Example: SUSE:SLES:12-SP3:2018.01.04\n Publisher:Offer:Sku:Version\n\n Raises:\n If image_id is not a valid format.\n \"\"\"\n try:\n image_info = self.image_id.strip().split(':')\n self.image_publisher = image_info[0]\n self.image_offer = image_info[1]\n self.image_sku = image_info[2]\n self.image_version = image_info[3]\n except Exception:\n self.image_publisher = None"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ngenerating names for resources based on the running_instance_id.", "response": "def _set_default_resource_names(self):\n \"\"\"\n Generate names for resources based on the running_instance_id.\n \"\"\"\n self.ip_config_name = ''.join([\n self.running_instance_id, '-ip-config'\n ])\n self.nic_name = ''.join([self.running_instance_id, '-nic'])\n self.public_ip_name = ''.join([self.running_instance_id, '-public-ip'])"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _set_image_id(self):\n instance = self._get_instance()\n image_info = instance.storage_profile.image_reference\n\n if image_info.publisher:\n self.image_id = ':'.join([\n image_info.publisher, image_info.offer,\n image_info.sku, image_info.version\n ])\n else:\n self.image_id = image_info.id.rsplit('/', maxsplit=1)[1]", "response": "Set the image id from the instance info."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _set_instance_ip(self):\n try:\n ip_address = self.network.public_ip_addresses.get(\n self.running_instance_id, self.public_ip_name\n ).ip_address\n except Exception:\n try:\n ip_address = self.network.network_interfaces.get(\n self.running_instance_id, self.nic_name\n ).ip_configurations[0].private_ip_address\n except Exception as error:\n raise AzureCloudException(\n 'Unable to retrieve instance IP address: {0}.'.format(\n error\n )\n )\n\n self.instance_ip = ip_address", "response": "Get the public IP address based on instance ID."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _terminate_instance(self):\n try:\n self.resource.resource_groups.delete(self.running_instance_id)\n except Exception as error:\n raise AzureCloudException(\n 'Unable to terminate resource group: {0}.'.format(error)\n )", "response": "Terminate the resource group and instance."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef preferred_change(data):\n ''' Determines preferred existing id based on curie prefix in the ranking list '''\n ranking = [\n 'CHEBI',\n 'NCBITaxon',\n 'COGPO',\n 'CAO',\n 'DICOM',\n 'UBERON',\n 'NLX',\n 'NLXANAT',\n 'NLXCELL',\n 'NLXFUNC',\n 'NLXINV',\n 'NLXORG',\n 'NLXRES',\n 'NLXSUB'\n 'BIRNLEX',\n 'SAO',\n 'NDA.CDE',\n 'PR',\n 'IAO',\n 'NIFEXT',\n 'OEN',\n 'ILX',\n ]\n mock_rank = ranking[::-1]\n score = []\n old_pref_index = None\n for i, d in enumerate(data['existing_ids']):\n if not d.get('preferred'): # db allows None or '' which will cause a problem\n d['preferred'] = 0\n if int(d['preferred']) == 1:\n old_pref_index = i\n if d.get('curie'):\n pref = d['curie'].split(':')[0]\n if pref in mock_rank:\n score.append(mock_rank.index(pref))\n else:\n score.append(-1)\n else:\n score.append(-1)\n\n new_pref_index = score.index(max(score))\n new_pref_iri = data['existing_ids'][new_pref_index]['iri']\n if new_pref_iri.rsplit('/', 1)[0] == 'http://uri.interlex.org/base':\n if old_pref_index:\n if old_pref_index != new_pref_index:\n return data\n for e in data['existing_ids']:\n e['preferred'] = 0\n data['existing_ids'][new_pref_index]['preferred'] = 1\n return data", "response": "Determines preferred existing id based on curie prefix in the ranking list."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nmerging two object bug that can create duplicates if in the same batch", "response": "def merge(new, old):\n ''' synonyms and existing_ids are part of an object bug that can create duplicates if in the same batch '''\n\n for k, vals in new.items():\n\n if k == 'synonyms':\n new_synonyms = vals\n if old['synonyms']:\n old_literals = [syn['literal'].lower().strip() for syn in old['synonyms']]\n for new_synonym in new_synonyms:\n if new_synonym['literal'].lower().strip() not in old_literals:\n old['synonyms'].append(new_synonym) # default is a list in SciCrunch, that's why this works without initing old['synonyms']\n else:\n old['synonyms'].extend(new['synonyms'])\n\n elif k == 'existing_ids':\n iris = [e['iri'] for e in old['existing_ids']]\n for new_existing_id in vals:\n\n new_existing_id['preferred'] = 0\n\n if 'change' not in list(new_existing_id): # notion that you want to add it\n new_existing_id['change'] = False\n\n if new_existing_id.get('delete') == True:\n if new_existing_id['iri'] in iris:\n new_existing_ids = []\n for e in old['existing_ids']:\n if e['iri'] != new_existing_id['iri']:\n new_existing_ids.append(e)\n old['existing_ids'] = new_existing_ids\n else:\n print(new_existing_id)\n sys.exit(\"You want to delete an iri that doesn't exist\")\n\n elif new_existing_id.get('replace') == True:\n if not new_existing_id.get('old_iri'):\n sys.exit(\n 'Need to have old_iri as a key to have a ref for replace'\n )\n old_iri = new_existing_id.pop('old_iri')\n if old_iri in iris:\n new_existing_ids = []\n for e in old['existing_ids']:\n if e['iri'] == old_iri:\n if new_existing_id.get('curie'):\n e['curie'] = new_existing_id['curie']\n if new_existing_id.get('iri'):\n e['iri'] = new_existing_id['iri']\n new_existing_ids.append(e)\n old['existing_ids'] = new_existing_ids\n else:\n print(new_existing_id)\n sys.exit(\"You want to replace an iri that doesn't exist\", '\\n', new)\n\n else:\n if new_existing_id['iri'] not in iris and new_existing_id['change'] == True:\n sys.exit('You want to change iri that doesnt exist ' + str(new))\n elif new_existing_id['iri'] not in iris and new_existing_id['change'] == False:\n old['existing_ids'].append(new_existing_id)\n elif new_existing_id['iri'] in iris and new_existing_id['change'] == True:\n new_existing_ids = []\n for e in old['existing_ids']:\n if e['iri'] == new_existing_id['iri']:\n if not new_existing_id.get('curie'):\n new_existing_id['curie'] = e['curie']\n new_existing_ids.append(new_existing_id)\n else:\n new_existing_ids.append(e)\n old['existing_ids'] = new_existing_ids\n elif new_existing_id['iri'] in iris and new_existing_id['change'] == False:\n pass # for sanity readability\n else:\n sys.exit('Something broke while merging in existing_ids')\n\n elif k in ['definition', 'superclasses', 'id', 'type', 'comment', 'label', 'uid', 'ontologies']:\n old[k] = vals\n\n # TODO: still need to mark them... but when batch elastic for update works\n # old['uid'] = 34142 # DEBUG: need to mark as mine manually until all Old terms are fixed\n\n ''' REMOVE REPEATS; needs to exist due to server overloads '''\n if old.get('synonyms'):\n visited = {}\n new_synonyms = []\n for synonym in old['synonyms']:\n if not visited.get(synonym.get('literal')):\n new_synonyms.append(synonym)\n visited[synonym['literal']] = True\n old['synonyms'] = new_synonyms\n\n visited = {}\n new_existing_ids = []\n for e in old['existing_ids']:\n if not visited.get(e['iri']):\n new_existing_ids.append(e)\n visited[e['iri']] = True\n old['existing_ids'] = new_existing_ids\n old = preferred_change(old)\n\n return old"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nprocessing provided history log and results files.", "response": "def results(context, history_log):\n \"\"\"Process provided history log and results files.\"\"\"\n if context.obj is None:\n context.obj = {}\n context.obj['history_log'] = history_log\n\n if context.invoked_subcommand is None:\n context.invoke(show, item=1)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef archive(context, clear_log, items, path, name):\n history_log = context.obj['history_log']\n no_color = context.obj['no_color']\n\n with open(history_log, 'r') as f:\n # Get history items\n history_items = f.readlines()\n\n if items:\n # Split comma separated list and cast indices to integer.\n items = [int(item) for item in items.split(',')]\n\n lines = []\n for index in items:\n lines.append(history_items[len(history_items) - index])\n history_items = lines\n\n with tempfile.TemporaryDirectory() as temp_dir:\n for item in history_items:\n # Copy log and results file,\n # update results file with relative path.\n archive_history_item(item, temp_dir, no_color)\n\n file_name = ''.join([name, '.tar.gz'])\n archive_path = os.path.join(path, file_name)\n\n with tarfile.open(archive_path, \"w:gz\") as tar:\n # Create tar archive\n tar.add(temp_dir, arcname='results')\n\n if clear_log:\n if items:\n # Remove duplicates to prevent unwanted deletion.\n items = list(set(items))\n\n # Must delete items from bottom to top of history file\n # to preserve indices. (Index 0 is last item in file)\n items.sort()\n for index in items:\n context.invoke(delete, item=index)\n else:\n context.invoke(clear)\n\n click.echo(\n 'Exported results history to archive: {0}'.format(archive_path)\n )", "response": "Archive the history log and all results files."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef delete(context, item):\n history_log = context.obj['history_log']\n no_color = context.obj['no_color']\n try:\n with open(history_log, 'r+') as f:\n lines = f.readlines()\n history = lines.pop(len(lines) - item)\n f.seek(0)\n f.write(''.join(lines))\n f.flush()\n f.truncate()\n except IndexError:\n echo_style(\n 'History result at index %s does not exist.' % item,\n no_color,\n fg='red'\n )\n sys.exit(1)\n except Exception as error:\n echo_style(\n 'Unable to delete result item {0}. {1}'.format(item, error),\n no_color,\n fg='red'\n )\n sys.exit(1)\n\n log_file = get_log_file_from_item(history)\n try:\n os.remove(log_file)\n except Exception:\n echo_style(\n 'Unable to delete results file for item {0}.'.format(item),\n no_color,\n fg='red'\n )\n\n try:\n os.remove(log_file.rsplit('.', 1)[0] + '.results')\n except Exception:\n echo_style(\n 'Unable to delete log file for item {0}.'.format(item),\n no_color,\n fg='red'\n )", "response": "Delete the specified history item from the history log."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef show(context,\n log,\n results_file,\n verbose,\n item):\n \"\"\"\n Print test results info from provided results json file.\n\n If no results file is supplied echo results from most recent\n test in history if it exists.\n\n If verbose option selected, echo all test cases.\n\n If log option selected echo test log.\n \"\"\"\n history_log = context.obj['history_log']\n no_color = context.obj['no_color']\n if not results_file:\n # Find results/log file from history\n # Default -1 is most recent test run\n try:\n with open(history_log, 'r') as f:\n lines = f.readlines()\n history = lines[len(lines) - item]\n except IndexError:\n echo_style(\n 'History result at index %s does not exist.' % item,\n no_color,\n fg='red'\n )\n sys.exit(1)\n except Exception:\n echo_style(\n 'Unable to retrieve results history, '\n 'provide results file or re-run test.',\n no_color,\n fg='red'\n )\n sys.exit(1)\n\n log_file = get_log_file_from_item(history)\n if log:\n echo_log(log_file, no_color)\n else:\n echo_results_file(\n log_file.rsplit('.', 1)[0] + '.results',\n no_color,\n verbose\n )\n\n elif log:\n # Log file provided\n echo_log(results_file, no_color)\n else:\n # Results file provided\n echo_results_file(results_file, no_color, verbose)", "response": "Prints test results info from provided results json file."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _get_ssh_client(self):\n return ipa_utils.get_ssh_client(\n self.instance_ip,\n self.ssh_private_key_file,\n self.ssh_user,\n timeout=self.timeout\n )", "response": "Return a new SSH client for given ip."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\noutput test run information to top of log file.", "response": "def _log_info(self):\n \"\"\"Output test run information to top of log file.\"\"\"\n if self.cloud == 'ssh':\n self.results['info'] = {\n 'platform': self.cloud,\n 'distro': self.distro_name,\n 'image': self.instance_ip,\n 'timestamp': self.time_stamp,\n 'log_file': self.log_file,\n 'results_file': self.results_file\n }\n else:\n self.results['info'] = {\n 'platform': self.cloud,\n 'region': self.region,\n 'distro': self.distro_name,\n 'image': self.image_id,\n 'instance': self.running_instance_id,\n 'timestamp': self.time_stamp,\n 'log_file': self.log_file,\n 'results_file': self.results_file\n }\n\n self._write_to_log(\n '\\n'.join(\n '%s: %s' % (key, val) for key, val\n in self.results['info'].items()\n )\n )"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _write_to_log(self, output):\n with open(self.log_file, 'a') as log_file:\n log_file.write('\\n')\n log_file.write(output)\n log_file.write('\\n')", "response": "Write the output string to the log file."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _merge_results(self, results):\n self.results['tests'] += results['tests']\n\n for key, value in results['summary'].items():\n self.results['summary'][key] += value", "response": "Combine results of test run with exisiting dict."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _save_results(self):\n with open(self.results_file, 'w') as results_file:\n json.dump(self.results, results_file)", "response": "Save results dictionary to json file."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _set_distro(self):\n if self.distro_name == 'sles':\n self.distro = SLES()\n elif self.distro_name == 'opensuse_leap':\n self.distro = openSUSE_Leap()\n else:\n raise IpaCloudException(\n 'Distribution: %s, not supported.' % self.distro_name\n )", "response": "Determine distro for image and create instance of class."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates results directory if not exists.", "response": "def _set_results_dir(self):\n \"\"\"Create results directory if not exists.\"\"\"\n if self.running_instance_id:\n self.results_dir = os.path.join(\n self.results_dir,\n self.cloud,\n self.image_id,\n self.running_instance_id\n )\n else:\n self.results_dir = os.path.join(\n self.results_dir,\n self.cloud,\n self.instance_ip\n )\n\n try:\n os.makedirs(self.results_dir)\n except OSError as error:\n if not os.path.isdir(self.results_dir):\n raise IpaCloudException(\n 'Unable to create ipa results directory: %s' % error\n )\n\n self.time_stamp = datetime.now().strftime('%Y%m%d%H%M%S')\n self.log_file = ''.join(\n [self.results_dir, os.sep, self.time_stamp, '.log']\n )\n self.logger.debug('Created log file %s' % self.log_file)\n\n self.results_file = ''.join(\n [self.results_dir, os.sep, self.time_stamp, '.results']\n )\n self.logger.debug('Created results file %s' % self.results_file)\n\n # Add log file handler\n file_handler = logging.FileHandler(self.log_file)\n file_handler.setLevel(logging.DEBUG)\n file_handler.setFormatter(logging.Formatter('\\n%(message)s\\n'))\n self.logger.addHandler(file_handler)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngathers basic info about VM", "response": "def _collect_vm_info(self):\n \"\"\"\n Gather basic info about VM\n \"\"\"\n self.logger.info('Collecting basic info about VM')\n client = self._get_ssh_client()\n\n out = self.distro.get_vm_info(client)\n self._write_to_log(out)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _update_history(self):\n ipa_utils.update_history_log(\n self.history_log,\n description=self.description,\n test_log=self.log_file\n )", "response": "Save the current test information to history json."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nwait until instance is in given state.", "response": "def _wait_on_instance(self, state, timeout=600, wait_period=10):\n \"\"\"Wait until instance is in given state.\"\"\"\n current_state = 'Undefined'\n start = time.time()\n end = start + timeout\n\n while time.time() < end:\n current_state = self._get_instance_state()\n\n if state.lower() == current_state.lower():\n return\n\n time.sleep(wait_period)\n\n raise IpaCloudException(\n 'Instance has not arrived at the given state: {state}'.format(\n state=state\n )\n )"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nexecutes the provided command and log output.", "response": "def execute_ssh_command(self, client, command):\n \"\"\"Execute the provided command and log output.\"\"\"\n try:\n out = ipa_utils.execute_ssh_command(client, command)\n except Exception as error:\n raise IpaCloudException(\n 'Command: \"{0}\", failed execution: {1}.'.format(\n command, error\n )\n )\n else:\n self._write_to_log(out)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nextract the archive files using the client in the current path.", "response": "def extract_archive(self, client, archive_path, extract_path=None):\n \"\"\"Extract the archive files using the client in the current path.\"\"\"\n try:\n out = ipa_utils.extract_archive(client, archive_path, extract_path)\n except Exception as error:\n raise IpaCloudException(\n 'Failed to extract archive, \"{0}\": {1}.'.format(\n archive_path, error\n )\n )\n\n else:\n self._write_to_log(out)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef hard_reboot_instance(self):\n self._stop_instance()\n self._start_instance()\n self._set_instance_ip()\n self.logger.debug('IP of instance: %s' % self.instance_ip)\n ipa_utils.clear_cache()", "response": "Stop then start the instance."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ninstalling package using distro specific install method.", "response": "def install_package(self, client, package):\n \"\"\"\n Install package using distro specific install method.\n \"\"\"\n try:\n out = self.distro.install_package(client, package)\n except Exception as error:\n raise IpaCloudException(\n 'Failed installing package, \"{0}\"; {1}.'.format(\n package, error\n )\n )\n else:\n self._write_to_log(out)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef process_injection_file(self, client):\n configuration = ipa_utils.get_yaml_config(self.inject)\n\n if configuration.get('inject_packages'):\n inject_packages = configuration['inject_packages']\n\n if not isinstance(inject_packages, list):\n inject_packages = [inject_packages]\n\n for package in inject_packages:\n package_path = self.put_file(client, package)\n self.install_package(client, package_path)\n\n if configuration.get('inject_archives'):\n inject_archives = configuration['inject_archives']\n\n if not isinstance(inject_archives, list):\n inject_archives = [inject_archives]\n\n for archive in inject_archives:\n archive_path = self.put_file(client, archive)\n self.extract_archive(client, archive_path)\n\n if configuration.get('inject_files'):\n inject_files = configuration['inject_files']\n\n if not isinstance(inject_files, list):\n inject_files = [inject_files]\n\n for file_path in inject_files:\n self.put_file(client, file_path)\n\n if configuration.get('execute'):\n execute = configuration['execute']\n\n if not isinstance(execute, list):\n execute = [execute]\n\n for command in execute:\n self.execute_ssh_command(client, command)\n\n if configuration.get('install'):\n install = configuration['install']\n\n if not isinstance(install, list):\n install = [install]\n\n for package in install:\n self.install_package(client, package)", "response": "Loads the injection file and processes it."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef put_file(self, client, source_file):\n try:\n file_name = os.path.basename(source_file)\n ipa_utils.put_file(client, source_file, file_name)\n except Exception as error:\n raise IpaCloudException(\n 'Failed copying file, \"{0}\"; {1}.'.format(\n source_file, error\n )\n )\n else:\n return file_name", "response": "Put file on instance in default SSH directory."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef build_transgenic_lines(self):\n\n triples = []\n for cell_line in self.neuron_data:\n for tl in cell_line['donor']['transgenic_lines']:\n _id = tl['stock_number'] if tl['stock_number'] else tl['id']\n prefix = tl['transgenic_line_source_name']\n line_type = tl['transgenic_line_type_name']\n if prefix not in ['JAX', 'MMRRC', 'AIBS']:\n print(tc.red('WARNING:'), 'unknown prefix', prefix, json.dumps(tl, indent=4))\n continue\n elif prefix == 'AIBS':\n prefix = 'AllenTL'\n\n _class = self.ns[prefix][str(_id)]\n triples.append((_class, rdf.type, owl.Class))\n triples.append((_class, rdfs.label, rdflib.Literal(tl['name'])))\n triples.append((_class, definition, rdflib.Literal(tl['description'])))\n triples.append((_class, rdfs.subClassOf, ilxtr.transgenicLine))\n triples.append((_class, ilxtr.hasTransgenicType, ilxtr[line_type + 'Line']))\n\n # TODO aspects.ttl?\n transgenic_lines = simpleOnt(filename='allen-transgenic-lines',\n path='ttl/generated/',\n prefixes=self.prefixes,\n triples=triples,\n comment='Allen transgenic lines for cell types',\n branch=self.branch)\n\n transgenic_lines._graph.write()", "response": "Build the Transgenic Lines for allen cell types."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ndecodes the response from the ILX server into a list of IlX IDs and resource IDs.", "response": "def decodeIlxResp(resp):\n \"\"\" We need this until we can get json back directly and this is SUPER nasty\"\"\"\n lines = [_ for _ in resp.text.split('\\n') if _] # strip empties\n\n if 'successfull' in lines[0]:\n return [(_.split('\"')[1],\n ilxIdFix(_.split(': ')[-1]))\n for _ in lines[1:]]\n elif 'errors' in lines[0]:\n return [(_.split('\"')[1],\n ilxIdFix(_.split('(')[1].split(')')[0]))\n for _ in lines[1:]]"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef getSubOrder(existing):\n alpha = list(zip(*sorted(((k, v['rec']['label']) for k, v in existing.items()), key=lambda a: a[1])))[0]\n depths = {}\n def getDepth(id_):\n if id_ in depths:\n return depths[id_]\n else:\n if id_ in existing:\n names_above = getDepth(existing[id_]['sc'])\n depths[id_] = names_above + [existing[id_]['rec']['label']]\n return depths[id_]\n else:\n return ['']\n\n for id_ in existing:\n getDepth(id_)\n\n print(sorted(depths.values()))\n\n def key_(id_):\n return depths[id_]\n\n return sorted(depths, key=key_)", "response": "Alpha sort by the full chain of parents."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nclean the string to remove odd noise", "response": "def clean(string):\n ''' Begining of the string can sometimes have odd noise '''\n # manual fixes in the source\n # 24/1.png\n # 9/1.png\n # 3/1.png\n #if ')' in string: # fix in the source data\n #string = string.split(')')[0] + ')' # remove trailing garbage\n return (string\n .replace('_', '')\n .replace('-', '')\n .replace('\u2014', '')\n .replace('.', '')\n .replace('=', '')\n .replace('\\u2018',\"'\") # LEFT SINGLE QUOTATION MARK\n .replace('\\u2019', \"'\") # RIGHT SINGLE QUOTATION MARK\n .strip())"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef loadData(cls):\n \"\"\" Have to run this out here because resSource is handicapped \"\"\"\n data = []\n if cls.source_images.exists():\n for folder in cls.source_images.glob('*'):\n plate_num = int(folder.stem)\n text_file = cls.source / f'{plate_num}.txt'\n if not text_file.exists() or cls.run_ocr:\n legends = []\n raw_text = ''\n for img in folder.glob('*.png'):\n print('num', plate_num, img.stem)\n p = subprocess.Popen(('tesseract',\n img.as_posix(),\n 'stdout', '-l', 'eng', '--oem', '2', '--psm', '6'),\n stdout=subprocess.PIPE)\n bytes_text, err = p.communicate()\n raw_text += bytes_text.decode() + '\\n'\n\n with open(text_file, 'wt') as f:\n f.write(raw_text)\n else:\n with open(text_file, 'rt') as f:\n raw_text = f.read()\n\n legends = get_legends(raw_text)\n data.append((plate_num, legends))\n\n elif cls.source.exists():\n for text_file in cls.source.glob('*.txt'):\n plate_num = int(text_file.stem)\n with open(text_file, 'rt') as f:\n raw_text = f.read()\n\n legends = get_legends(raw_text)\n data.append((plate_num, legends))\n\n return data", "response": "This method reads the source and source files and returns a list of tuples each containing the plate number and legends."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef ilx_conv(graph, prefix, ilx_start):\n to_sub = set()\n for subject in graph.subjects(rdflib.RDF.type, rdflib.OWL.Class):\n if PREFIXES[prefix] in subject:\n to_sub.add(subject)\n\n ilx_base = 'ilx_{:0>7}'\n ILX_base = 'ILX:{:0>7}' # ah rdflib/owlapi, you infuriate me\n\n ilx_labels = {}\n replace = {}\n for sub in sorted(to_sub):\n ilx_format = ilx_base.format(ilx_start)\n ILX_format = ILX_base.format(ilx_start)\n ilx_start += 1\n prefix, url, suffix = graph.namespace_manager.compute_qname(sub)\n curie = prefix + ':' + suffix\n replace[curie] = ILX_format\n label = [_ for _ in graph.objects(sub, rdflib.RDFS.label)][0]\n ilx_labels[ilx_format] = label\n new_sub = expand('ilx:' + ilx_format)\n for p, o in graph.predicate_objects(sub):\n graph.remove((sub, p, o))\n graph.add((new_sub, p, o))\n\n for s, p in graph.subject_predicates(sub):\n graph.remove((s, p, sub))\n graph.add((s, p, new_sub))\n\n\n return ilx_labels, replace", "response": "converts a set of temporary identifiers to ilx and modify the graph in place"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning true if there is an owl. Class with the old id", "response": "def alreadyHasEntry(oldClassString, og):\n \"\"\" Return true if there is already an owl:Class with the old id\"\"\"\n namespace = oldClassString.split(':')[0]\n if namespace == 'http':\n target = rdflib.URIRef(oldClassString)\n print('OLD CLASS ID IS A URL', oldClassString)\n else:\n try:\n og.add_known_namespaces(namespace)\n target = og.expand(oldClassString)\n except KeyError:\n print('MISSING NAMESPACE', namespace, oldClassString)\n return True # we only want known namespaces\n return (target, rdf.type, owl.Class) in og.g"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef config(remote_base= 'https://raw.githubusercontent.com/SciCrunch/NIF-Ontology/',\n local_base= None, # devconfig.ontology_local_repo by default\n branch= devconfig.neurons_branch,\n core_graph_paths= ['ttl/phenotype-core.ttl',\n 'ttl/phenotypes.ttl'],\n core_graph= None,\n in_graph_paths= tuple(),\n out_graph_path= '/tmp/_Neurons.ttl',\n out_imports= ['ttl/phenotype-core.ttl'],\n out_graph= None,\n prefixes= tuple(),\n force_remote= False,\n checkout_ok= ont_checkout_ok,\n scigraph= None, # defaults to devconfig.scigraph_api\n iri= None,\n sources= tuple(),\n source_file= None,\n use_local_import_paths=True,\n ignore_existing= True):\n \"\"\" Wraps graphBase.configGraphIO to provide a set of sane defaults\n for input ontologies and output files. \"\"\"\n graphBase.configGraphIO(remote_base=remote_base,\n local_base=local_base,\n branch=branch,\n core_graph_paths=core_graph_paths,\n core_graph=core_graph,\n in_graph_paths=in_graph_paths,\n out_graph_path=out_graph_path,\n out_imports=out_imports,\n out_graph=out_graph,\n prefixes=prefixes,\n force_remote=force_remote,\n checkout_ok=checkout_ok,\n scigraph=scigraph,\n iri=iri,\n sources=sources,\n source_file=source_file,\n use_local_import_paths=use_local_import_paths,\n ignore_existing=ignore_existing)\n\n pred = graphBase._predicates\n return pred", "response": "A function that creates a set of sane defaults for the input ontologies and output files."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nadding the versionIRI to the given epoch.", "response": "def add_version_iri(graph, epoch):\n \"\"\" Also remove the previous versionIRI if there was one.\"\"\"\n for ont in graph.subjects(rdf.type, owl.Ontology):\n for versionIRI in graph.objects(ont, owl.versionIRI):\n graph.remove((ont, owl.versionIRI, versionIRI))\n t = ont, owl.versionIRI, make_version_iri_from_iri(ont, epoch)\n graph.add(t)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn credentials for current Bitbucket user.", "response": "def auth(self):\n \"\"\" Return credentials for current Bitbucket user. \"\"\"\n if self.oauth:\n return self.oauth\n return (self.username, self.password)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ncalling this with your consumer key, secret and callback URL, to generate a token for verification.", "response": "def authorize(self, consumer_key, consumer_secret, callback_url=None,\n access_token=None, access_token_secret=None):\n \"\"\"\n Call this with your consumer key, secret and callback URL, to\n generate a token for verification.\n \"\"\"\n self.consumer_key = consumer_key\n self.consumer_secret = consumer_secret\n\n if not access_token and not access_token_secret:\n if not callback_url:\n return (False, \"Callback URL required\")\n oauth = OAuth1(\n consumer_key,\n client_secret=consumer_secret,\n callback_uri=callback_url)\n r = requests.post(self.url('REQUEST_TOKEN'), auth=oauth)\n if r.status_code == 200:\n creds = parse_qs(r.content)\n\n self.access_token = creds.get('oauth_token')[0]\n self.access_token_secret = creds.get('oauth_token_secret')[0]\n else:\n return (False, r.content)\n else:\n self.finalize_oauth(access_token, access_token_secret)\n\n return (True, None)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nverifying the token and return the response.", "response": "def verify(self, verifier, consumer_key=None, consumer_secret=None,\n access_token=None, access_token_secret=None):\n \"\"\"\n After converting the token into verifier, call this to finalize the\n authorization.\n \"\"\"\n #\u00a0Stored values can be supplied to verify\n self.consumer_key = consumer_key or self.consumer_key\n self.consumer_secret = consumer_secret or self.consumer_secret\n self.access_token = access_token or self.access_token\n self.access_token_secret = access_token_secret or self.access_token_secret\n\n oauth = OAuth1(\n self.consumer_key,\n client_secret=self.consumer_secret,\n resource_owner_key=self.access_token,\n resource_owner_secret=self.access_token_secret,\n verifier=verifier)\n r = requests.post(self.url('ACCESS_TOKEN'), auth=oauth)\n if r.status_code == 200:\n creds = parse_qs(r.content)\n else:\n return (False, r.content)\n\n self.finalize_oauth(creds.get('oauth_token')[0],\n creds.get('oauth_token_secret')[0])\n return (True, None)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nfinalize the OAuth object.", "response": "def finalize_oauth(self, access_token, access_token_secret):\n \"\"\" Called internally once auth process is complete. \"\"\"\n self.access_token = access_token\n self.access_token_secret = access_token_secret\n\n # Final OAuth object\n self.oauth = OAuth1(\n self.consumer_key,\n client_secret=self.consumer_secret,\n resource_owner_key=self.access_token,\n resource_owner_secret=self.access_token_secret)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsending HTTP request to the given URL and return the success and the result on success.", "response": "def dispatch(self, method, url, auth=None, params=None, **kwargs):\n \"\"\" Send HTTP request, with given method,\n credentials and data to the given URL,\n and return the success and the result on success.\n \"\"\"\n r = Request(\n method=method,\n url=url,\n auth=auth,\n params=params,\n data=kwargs)\n s = Session()\n resp = s.send(r.prepare())\n status = resp.status_code\n text = resp.text\n error = resp.reason\n if status >= 200 and status < 300:\n if text:\n try:\n return (True, json.loads(text))\n except TypeError:\n pass\n except ValueError:\n pass\n return (True, text)\n elif status >= 300 and status < 400:\n return (\n False,\n 'Unauthorized access, '\n 'please check your credentials.')\n elif status >= 400 and status < 500:\n return (False, 'Service not found.')\n elif status >= 500 and status < 600:\n return (False, 'Server error.')\n else:\n return (False, error)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nconstructing and return the URL for a specific API service.", "response": "def url(self, action, **kwargs):\n \"\"\" Construct and return the URL for a specific API service. \"\"\"\n # TODO : should be static method ?\n return self.URLS['BASE'] % self.URLS[action] % kwargs"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning user informations. If username is not defined, tries to return own informations.", "response": "def get_user(self, username=None):\n \"\"\" Returns user informations.\n If username is not defined, tries to return own informations.\n \"\"\"\n username = username or self.username or ''\n url = self.url('GET_USER', username=username)\n response = self.dispatch('GET', url)\n try:\n return (response[0], response[1]['user'])\n except TypeError:\n pass\n return response"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets a single repository on Bitbucket and return its tags.", "response": "def get_tags(self, repo_slug=None):\n \"\"\" Get a single repository on Bitbucket and return its tags.\"\"\"\n repo_slug = repo_slug or self.repo_slug or ''\n url = self.url('GET_TAGS', username=self.username, repo_slug=repo_slug)\n return self.dispatch('GET', url, auth=self.auth)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_branches(self, repo_slug=None):\n repo_slug = repo_slug or self.repo_slug or ''\n url = self.url('GET_BRANCHES',\n username=self.username,\n repo_slug=repo_slug)\n return self.dispatch('GET', url, auth=self.auth)", "response": "Get a single repository on Bitbucket and return its branches."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_privileges(self):\n url = self.url('GET_USER_PRIVILEGES')\n return self.dispatch('GET', url, auth=self.auth)", "response": "Get all the privledges for this user."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nassociating an ssh key with your repo and return it.", "response": "def create(self, repo_slug=None, key=None, label=None):\n \"\"\" Associate an ssh key with your repo and return it.\n \"\"\"\n key = '%s' % key\n repo_slug = repo_slug or self.bitbucket.repo_slug or ''\n url = self.bitbucket.url('SET_DEPLOY_KEY',\n username=self.bitbucket.username,\n repo_slug=repo_slug)\n return self.bitbucket.dispatch('POST',\n url,\n auth=self.bitbucket.auth,\n key=key,\n label=label)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef public(self, username=None):\n username = username or self.bitbucket.username or ''\n url = self.bitbucket.url('GET_USER', username=username)\n response = self.bitbucket.dispatch('GET', url)\n try:\n return (response[0], response[1]['repositories'])\n except TypeError:\n pass\n return response", "response": "Returns all public repositories from an user."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create(self, repo_name, scm='git', private=True, **kwargs):\n url = self.bitbucket.url('CREATE_REPO')\n return self.bitbucket.dispatch('POST', url, auth=self.bitbucket.auth, name=repo_name, scm=scm, is_private=private, **kwargs)", "response": "Creates a new repository on own Bitbucket account and returns it."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nget one of your repositories and compress it as an archive.", "response": "def archive(self, repo_slug=None, format='zip', prefix=''):\n \"\"\" Get one of your repositories and compress it as an archive.\n Return the path of the archive.\n\n format parameter is curently not supported.\n \"\"\"\n prefix = '%s'.lstrip('/') % prefix\n self._get_files_in_dir(repo_slug=repo_slug, dir='/')\n if self.bitbucket.repo_tree:\n with NamedTemporaryFile(delete=False) as archive:\n with ZipFile(archive, 'w') as zip_archive:\n for name, file in self.bitbucket.repo_tree.items():\n with NamedTemporaryFile(delete=False) as temp_file:\n temp_file.write(file.encode('utf-8'))\n zip_archive.write(temp_file.name, prefix + name)\n return (True, archive.name)\n return (False, 'Could not archive your project.')"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ncreating a new issue comment.", "response": "def create(self, issue_id=None, repo_slug=None, **kwargs):\n \"\"\" Add an issue comment to one of your repositories.\n Each issue comment require only the content data field\n the system autopopulate the rest.\n \"\"\"\n issue_id = issue_id or self.issue_id\n repo_slug = repo_slug or self.bitbucket.repo_slug or ''\n url = self.bitbucket.url('CREATE_COMMENT',\n username=self.bitbucket.username,\n repo_slug=repo_slug,\n issue_id=issue_id)\n return self.bitbucket.dispatch('POST', url, auth=self.bitbucket.auth, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef delete(self, comment_id, issue_id=None, repo_slug=None):\n issue_id = issue_id or self.issue_id\n repo_slug = repo_slug or self.bitbucket.repo_slug or ''\n url = self.bitbucket.url('DELETE_COMMENT',\n username=self.bitbucket.username,\n repo_slug=repo_slug,\n issue_id=issue_id,\n comment_id=comment_id)\n return self.bitbucket.dispatch('DELETE', url, auth=self.bitbucket.auth)", "response": "Delete an issue from one of your repositories."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget all SSH keys associated with your account.", "response": "def all(self):\n \"\"\" Get all ssh keys associated with your account.\n \"\"\"\n url = self.bitbucket.url('GET_SSH_KEYS')\n return self.bitbucket.dispatch('GET', url, auth=self.bitbucket.auth)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get(self, key_id=None):\n url = self.bitbucket.url('GET_SSH_KEY', key_id=key_id)\n return self.bitbucket.dispatch('GET', url, auth=self.bitbucket.auth)", "response": "Get one of the ssh keys associated with your account."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef create(self, key=None, label=None):\n key = '%s' % key\n url = self.bitbucket.url('SET_SSH_KEY')\n return self.bitbucket.dispatch('POST', url, auth=self.bitbucket.auth, key=key, label=label)", "response": "Associate an ssh key with your account and return it."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef delete(self, key_id=None):\n url = self.bitbucket.url('DELETE_SSH_KEY', key_id=key_id)\n return self.bitbucket.dispatch('DELETE', url, auth=self.bitbucket.auth)", "response": "Delete one of the ssh keys associated with your account."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nemulate unix which command.", "response": "def which(program):\n '''\n Emulate unix 'which' command. If program is a path to an executable file\n (i.e. it contains any directory components, like './myscript'), return\n program. Otherwise, if an executable file matching program is found in one\n of the directories in the PATH environment variable, return the first match\n found.\n\n On Windows, if PATHEXT is defined and program does not include an\n extension, include the extensions in PATHEXT when searching for a matching\n executable file.\n\n Return None if no executable file is found.\n\n http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python/377028#377028\n https://github.com/webcoyote/vagrant/blob/f70507062e3b30c00db1f0d8b90f9245c4c997d4/lib/vagrant/util/file_util.rb\n Python3.3+ implementation:\n https://hg.python.org/cpython/file/default/Lib/shutil.py\n '''\n def is_exe(fpath):\n return os.path.isfile(fpath) and os.access(fpath, os.X_OK)\n\n # Shortcut: If program contains any dir components, do not search the path\n # e.g. './backup', '/bin/ls'\n if os.path.dirname(program):\n if is_exe(program):\n return program\n else:\n return None\n\n # Are we on windows?\n # http://stackoverflow.com/questions/1325581/how-do-i-check-if-im-running-on-windows-in-python\n windows = (os.name == 'nt')\n # Or cygwin?\n # https://docs.python.org/2/library/sys.html#sys.platform\n cygwin = sys.platform.startswith('cygwin')\n\n # Paths: a list of directories\n path_str = os.environ.get('PATH', os.defpath)\n if not path_str:\n paths = []\n else:\n paths = path_str.split(os.pathsep)\n # The current directory takes precedence on Windows.\n if windows:\n paths.insert(0, os.curdir)\n\n # Only search PATH if there is one to search.\n if not paths:\n return None\n\n # Files: add any necessary extensions to program\n # On cygwin and non-windows systems do not add extensions when searching\n # for the executable\n if cygwin or not windows:\n files = [program]\n else:\n # windows path extensions in PATHEXT.\n # e.g. ['.EXE', '.CMD', '.BAT']\n # http://environmentvariables.org/PathExt\n # This might not properly use extensions that have been \"registered\" in\n # Windows. In the future it might make sense to use one of the many\n # \"which\" packages on PyPI.\n exts = os.environ.get('PATHEXT', '').split(os.pathsep)\n\n # if the program ends with one of the extensions, only test that one.\n # otherwise test all the extensions.\n matching_exts = [ext for ext in exts if\n program.lower().endswith(ext.lower())]\n if matching_exts:\n files = [program + ext for ext in matching_exts]\n else:\n files = [program + ext for ext in exts]\n\n # Check each combination of path, program, and extension, returning\n # the first combination that exists and is executable.\n for path in paths:\n for f in files:\n fpath = os.path.normcase(os.path.join(path, f))\n if is_exe(fpath):\n return fpath\n\n return None"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate a contextmanager that opens a file for appending and yield the open filehandle after yielding it.", "response": "def make_file_cm(filename, mode='a'):\n '''\n Open a file for appending and yield the open filehandle. Close the \n filehandle after yielding it. This is useful for creating a context\n manager for logging the output of a `Vagrant` instance.\n\n filename: a path to a file\n mode: The mode in which to open the file. Defaults to 'a', append\n\n Usage example:\n\n log_cm = make_file_cm('application.log')\n v = Vagrant(out_cm=log_cm, err_cm=log_cm)\n '''\n @contextlib.contextmanager\n def cm():\n with open(filename, mode=mode) as fh:\n yield fh\n\n return cm"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef version(self):\n '''\n Return the installed vagrant version, as a string, e.g. '1.5.0'\n '''\n output = self._run_vagrant_command(['--version'])\n m = re.search(r'^Vagrant (?P.+)$', output)\n if m is None:\n raise Exception('Failed to parse vagrant --version output. output={!r}'.format(output))\n return m.group('version')", "response": "Return the installed vagrant version as a string e. g. 1. 5. 0"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef up(self, no_provision=False, provider=None, vm_name=None,\n provision=None, provision_with=None, stream_output=False):\n '''\n Invoke `vagrant up` to start a box or boxes, possibly streaming the\n command output.\n vm_name=None: name of VM.\n provision_with: optional list of provisioners to enable.\n provider: Back the machine with a specific provider\n no_provision: if True, disable provisioning. Same as 'provision=False'.\n provision: optional boolean. Enable or disable provisioning. Default\n behavior is to use the underlying vagrant default.\n stream_output: if True, return a generator that yields each line of the\n output of running the command. Consume the generator or the\n subprocess might hang. if False, None is returned and the command\n is run to completion without streaming the output. Defaults to\n False.\n Note: If provision and no_provision are not None, no_provision will be\n ignored.\n returns: None or a generator yielding lines of output.\n '''\n provider_arg = '--provider=%s' % provider if provider else None\n prov_with_arg = None if provision_with is None else '--provision-with'\n providers_arg = None if provision_with is None else ','.join(provision_with)\n\n # For the sake of backward compatibility, no_provision is allowed.\n # However it is ignored if provision is set.\n if provision is not None:\n no_provision = None\n no_provision_arg = '--no-provision' if no_provision else None\n provision_arg = None if provision is None else '--provision' if provision else '--no-provision'\n\n args = ['up', vm_name, no_provision_arg, provision_arg, provider_arg, prov_with_arg, providers_arg]\n if stream_output:\n generator = self._stream_vagrant_command(args)\n else:\n self._call_vagrant_command(args)\n\n self._cached_conf[vm_name] = None # remove cached configuration\n return generator if stream_output else None", "response": "Start a box or boxes with the specified provider."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nrunning the provisioners defined in the Vagrantfile.", "response": "def provision(self, vm_name=None, provision_with=None):\n '''\n Runs the provisioners defined in the Vagrantfile.\n vm_name: optional VM name string.\n provision_with: optional list of provisioners to enable.\n e.g. ['shell', 'chef_solo']\n '''\n prov_with_arg = None if provision_with is None else '--provision-with'\n providers_arg = None if provision_with is None else ','.join(provision_with)\n self._call_vagrant_command(['provision', vm_name, prov_with_arg,\n providers_arg])"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreloads the configuration of a Vagrantfile.", "response": "def reload(self, vm_name=None, provision=None, provision_with=None,\n stream_output=False):\n '''\n Quoting from Vagrant docs:\n > The equivalent of running a halt followed by an up.\n > This command is usually required for changes made in the Vagrantfile\n to take effect. After making any modifications to the Vagrantfile, a\n reload should be called.\n > The configured provisioners will not run again, by default. You can\n force the provisioners to re-run by specifying the --provision flag.\n\n provision: optional boolean. Enable or disable provisioning. Default\n behavior is to use the underlying vagrant default.\n provision_with: optional list of provisioners to enable.\n e.g. ['shell', 'chef_solo']\n stream_output: if True, return a generator that yields each line of the\n output of running the command. Consume the generator or the\n subprocess might hang. if False, None is returned and the command\n is run to completion without streaming the output. Defaults to\n False.\n returns: None or a generator yielding lines of output.\n '''\n prov_with_arg = None if provision_with is None else '--provision-with'\n providers_arg = None if provision_with is None else ','.join(provision_with)\n provision_arg = None if provision is None else '--provision' if provision else '--no-provision'\n\n args = ['reload', vm_name, provision_arg, prov_with_arg, providers_arg]\n if stream_output:\n generator = self._stream_vagrant_command(args)\n else:\n self._call_vagrant_command(args)\n\n self._cached_conf[vm_name] = None # remove cached configuration\n return generator if stream_output else None"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef halt(self, vm_name=None, force=False):\n '''\n Halt the Vagrant box.\n\n force: If True, force shut down.\n '''\n force_opt = '--force' if force else None\n self._call_vagrant_command(['halt', vm_name, force_opt])\n self._cached_conf[vm_name] = None", "response": "Halt the Vagrant box."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef status(self, vm_name=None):\n '''\n Return the results of a `vagrant status` call as a list of one or more\n Status objects. A Status contains the following attributes:\n\n - name: The VM name in a multi-vm environment. 'default' otherwise.\n - state: The state of the underlying guest machine (i.e. VM).\n - provider: the name of the VM provider, e.g. 'virtualbox'. None\n if no provider is output by vagrant.\n\n Example return values for a multi-VM environment:\n\n [Status(name='web', state='not created', provider='virtualbox'),\n Status(name='db', state='not created', provider='virtualbox')]\n\n And for a single-VM environment:\n\n [Status(name='default', state='not created', provider='virtualbox')]\n\n Possible states include, but are not limited to (since new states are\n being added as Vagrant evolves):\n\n - 'not_created' if the vm is destroyed\n - 'running' if the vm is up\n - 'poweroff' if the vm is halted\n - 'saved' if the vm is suspended\n - 'aborted' if the vm is aborted\n\n Implementation Details:\n\n This command uses the `--machine-readable` flag added in\n Vagrant 1.5, mapping the target name, state, and provider-name\n to a Status object.\n\n Example with no VM name and multi-vm Vagrantfile:\n\n $ vagrant status --machine-readable\n 1424098924,web,provider-name,virtualbox\n 1424098924,web,state,running\n 1424098924,web,state-human-short,running\n 1424098924,web,state-human-long,The VM is running. To stop this VM%!(VAGRANT_COMMA) you can run `vagrant halt` to\\nshut it down forcefully%!(VAGRANT_COMMA) or you can run `vagrant suspend` to simply\\nsuspend the virtual machine. In either case%!(VAGRANT_COMMA) to restart it again%!(VAGRANT_COMMA)\\nsimply run `vagrant up`.\n 1424098924,db,provider-name,virtualbox\n 1424098924,db,state,not_created\n 1424098924,db,state-human-short,not created\n 1424098924,db,state-human-long,The environment has not yet been created. Run `vagrant up` to\\ncreate the environment. If a machine is not created%!(VAGRANT_COMMA) only the\\ndefault provider will be shown. So if a provider is not listed%!(VAGRANT_COMMA)\\nthen the machine is not created for that environment.\n\n Example with VM name:\n\n $ vagrant status --machine-readable web\n 1424099027,web,provider-name,virtualbox\n 1424099027,web,state,running\n 1424099027,web,state-human-short,running\n 1424099027,web,state-human-long,The VM is running. To stop this VM%!(VAGRANT_COMMA) you can run `vagrant halt` to\\nshut it down forcefully%!(VAGRANT_COMMA) or you can run `vagrant suspend` to simply\\nsuspend the virtual machine. In either case%!(VAGRANT_COMMA) to restart it again%!(VAGRANT_COMMA)\\nsimply run `vagrant up`.\n\n Example with no VM name and single-vm Vagrantfile:\n\n $ vagrant status --machine-readable\n 1424100021,default,provider-name,virtualbox\n 1424100021,default,state,not_created\n 1424100021,default,state-human-short,not created\n 1424100021,default,state-human-long,The environment has not yet been created. Run `vagrant up` to\\ncreate the environment. If a machine is not created%!(VAGRANT_COMMA) only the\\ndefault provider will be shown. So if a provider is not listed%!(VAGRANT_COMMA)\\nthen the machine is not created for that environment.\n\n Error example with incorrect VM name:\n\n $ vagrant status --machine-readable api\n 1424099042,,error-exit,Vagrant::Errors::MachineNotFound,The machine with the name 'api' was not found configured for\\nthis Vagrant environment.\n\n Error example with missing Vagrantfile:\n\n $ vagrant status --machine-readable\n 1424099094,,error-exit,Vagrant::Errors::NoEnvironmentError,A Vagrant environment or target machine is required to run this\\ncommand. Run `vagrant init` to create a new Vagrant environment. Or%!(VAGRANT_COMMA)\\nget an ID of a target machine from `vagrant global-status` to run\\nthis command on. A final option is to change to a directory with a\\nVagrantfile and to try again.\n '''\n # machine-readable output are CSV lines\n output = self._run_vagrant_command(['status', '--machine-readable', vm_name])\n return self._parse_status(output)", "response": "Return the results of a vagrant status call."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _parse_status(self, output):\n '''\n Unit testing is so much easier when Vagrant is removed from the\n equation.\n '''\n parsed = self._parse_machine_readable_output(output)\n statuses = []\n # group tuples by target name\n # assuming tuples are sorted by target name, this should group all\n # the tuples with info for each target.\n for target, tuples in itertools.groupby(parsed, lambda tup: tup[1]):\n # transform tuples into a dict mapping \"type\" to \"data\"\n info = {kind: data for timestamp, _, kind, data in tuples}\n status = Status(name=target, state=info.get('state'),\n provider=info.get('provider-name'))\n statuses.append(status)\n\n return statuses", "response": "Parse the status output from the Vagrant status command."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef conf(self, ssh_config=None, vm_name=None):\n '''\n Parse ssh_config into a dict containing the keys defined in ssh_config,\n which should include these keys (listed with example values): 'User'\n (e.g. 'vagrant'), 'HostName' (e.g. 'localhost'), 'Port' (e.g. '2222'),\n 'IdentityFile' (e.g. '/home/todd/.ssh/id_dsa'). Cache the parsed\n configuration dict. Return the dict.\n\n If ssh_config is not given, return the cached dict. If there is no\n cached configuration, call ssh_config() to get the configuration, then\n parse, cache, and return the config dict. Calling ssh_config() raises\n an Exception if the Vagrant box has not yet been created or has been\n destroyed.\n\n vm_name: required in a Multi-VM Vagrant environment. This name will be\n used to get the configuration for the named vm and associate the config\n with the vm name in the cache.\n\n ssh_config: a valid ssh confige file host section. Defaults to\n the value returned from ssh_config(). For speed, the configuration\n parsed from ssh_config is cached for subsequent calls.\n '''\n if self._cached_conf.get(vm_name) is None or ssh_config is not None:\n if ssh_config is None:\n ssh_config = self.ssh_config(vm_name=vm_name)\n conf = self._parse_config(ssh_config)\n self._cached_conf[vm_name] = conf\n\n return self._cached_conf[vm_name]", "response": "Parse the ssh config into a dictionary containing the keys defined in ssh_config and the values defined in vm_name."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef user_hostname(self, vm_name=None):\n '''\n Return a string combining user and hostname, e.g. 'vagrant@127.0.0.1'.\n This string is suitable for use in an ssh commmand. If user is None\n or empty, it will be left out of the string, e.g. 'localhost'. If\n hostname is None, have bigger problems.\n\n Raises an Exception if the Vagrant box has not yet been created or\n has been destroyed.\n '''\n user = self.user(vm_name=vm_name)\n user_prefix = user + '@' if user else ''\n return user_prefix + self.hostname(vm_name=vm_name)", "response": "Return a string combining user and hostname e. g. vagrant@127. 0. 1."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef user_hostname_port(self, vm_name=None):\n '''\n Return a string combining user, hostname and port, e.g.\n 'vagrant@127.0.0.1:2222'. This string is suitable for use with Fabric,\n in env.hosts. If user or port is None or empty, they will be left\n out of the string. E.g. 'vagrant@localhost', or 'localhost:2222' or\n 'localhost'. If hostname is None, you have bigger problems.\n\n\n Raises an Exception if the Vagrant box has not yet been created or\n has been destroyed.\n '''\n user = self.user(vm_name=vm_name)\n port = self.port(vm_name=vm_name)\n user_prefix = user + '@' if user else ''\n port_suffix = ':' + port if port else ''\n return user_prefix + self.hostname(vm_name=vm_name) + port_suffix", "response": "Return a string combining user hostname and port."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nadding a box with given name from given url.", "response": "def box_add(self, name, url, provider=None, force=False):\n '''\n Adds a box with given name, from given url.\n\n force: If True, overwrite an existing box if it exists.\n '''\n force_opt = '--force' if force else None\n cmd = ['box', 'add', name, url, force_opt]\n if provider is not None:\n cmd += ['--provider', provider]\n\n self._call_vagrant_command(cmd)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef package(self, vm_name=None, base=None, output=None, vagrantfile=None):\n '''\n Packages a running vagrant environment into a box.\n\n vm_name=None: name of VM.\n base=None: name of a VM in virtualbox to package as a base box\n output=None: name of the file to output\n vagrantfile=None: Vagrantfile to package with this box\n '''\n cmd = ['package', vm_name]\n if output is not None:\n cmd += ['--output', output]\n if vagrantfile is not None:\n cmd += ['--vagrantfile', vagrantfile]\n\n self._call_vagrant_command(cmd)", "response": "Packages a running vagrant environment into a virtualbox."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef snapshot_pop(self):\n '''\n This command is the inverse of vagrant snapshot push: it will restore the pushed state.\n '''\n NO_SNAPSHOTS_PUSHED = 'No pushed snapshot found!'\n output = self._run_vagrant_command(['snapshot', 'pop'])\n if NO_SNAPSHOTS_PUSHED in output:\n raise RuntimeError(NO_SNAPSHOTS_PUSHED)", "response": "This command will pop the pushed state of the current resource."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nexecuting a command via ssh on the vm specified.", "response": "def ssh(self, vm_name=None, command=None, extra_ssh_args=None):\n '''\n Execute a command via ssh on the vm specified.\n command: The command to execute via ssh.\n extra_ssh_args: Corresponds to '--' option in the vagrant ssh command\n Returns the output of running the command.\n '''\n cmd = ['ssh', vm_name, '--command', command]\n if extra_ssh_args is not None:\n cmd += ['--', extra_ssh_args]\n\n return self._run_vagrant_command(cmd)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nparse the output of the box list command.", "response": "def _parse_box_list(self, output):\n '''\n Remove Vagrant usage for unit testing\n '''\n # Parse box list output\n\n boxes = []\n # initialize box values\n name = provider = version = None\n for timestamp, target, kind, data in self._parse_machine_readable_output(output):\n if kind == 'box-name':\n # finish the previous box, if any\n if name is not None:\n boxes.append(Box(name=name, provider=provider, version=version))\n\n # start a new box\n name = data # box name\n provider = version = None\n elif kind == 'box-provider':\n provider = data\n elif kind == 'box-version':\n version = data\n\n # finish the previous box, if any\n if name is not None:\n boxes.append(Box(name=name, provider=provider, version=version))\n\n return boxes"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nparsing the output of the plugin list command.", "response": "def _parse_plugin_list(self, output):\n '''\n Remove Vagrant from the equation for unit testing.\n '''\n ENCODED_COMMA = '%!(VAGRANT_COMMA)'\n\n plugins = []\n # initialize plugin values\n name = None\n version = None\n system = False\n for timestamp, target, kind, data in self._parse_machine_readable_output(output):\n if kind == 'plugin-name':\n # finish the previous plugin, if any\n if name is not None:\n plugins.append(Plugin(name=name, version=version, system=system))\n\n # start a new plugin\n name = data # plugin name\n version = None\n system = False\n elif kind == 'plugin-version':\n if ENCODED_COMMA in data:\n version, etc = data.split(ENCODED_COMMA)\n system = (etc.strip().lower() == 'system')\n else:\n version = data\n system = False\n\n # finish the previous plugin, if any\n if name is not None:\n plugins.append(Plugin(name=name, version=version, system=system))\n\n return plugins"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nparse the output of the vagrant machine - readable command.", "response": "def _parse_machine_readable_output(self, output):\n '''\n param output: a string containing the output of a vagrant command with the `--machine-readable` option.\n\n returns: a dict mapping each 'target' in the machine readable output to\n a dict. The dict of each target, maps each target line type/kind to\n its data.\n\n Machine-readable output is a collection of CSV lines in the format:\n\n timestamp, target, kind, data\n\n Target is a VM name, possibly 'default', or ''. The empty string\n denotes information not specific to a particular VM, such as the\n results of `vagrant box list`.\n '''\n # each line is a tuple of (timestamp, target, type, data)\n # target is the VM name\n # type is the type of data, e.g. 'provider-name', 'box-version'\n # data is a (possibly comma separated) type-specific value, e.g. 'virtualbox', '0'\n parsed_lines = [line.split(',', 4) for line in output.splitlines() if line.strip()]\n # vagrant 1.8 adds additional fields that aren't required,\n # and will break parsing if included in the status lines.\n # filter them out pending future implementation.\n parsed_lines = list(filter(lambda x: x[2] not in [\"metadata\", \"ui\", \"action\"], parsed_lines))\n return parsed_lines"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _parse_config(self, ssh_config):\n '''\n This lame parser does not parse the full grammar of an ssh config\n file. It makes assumptions that are (hopefully) correct for the output\n of `vagrant ssh-config [vm-name]`. Specifically it assumes that there\n is only one Host section, the default vagrant host. It assumes that\n the parameters of the ssh config are not changing.\n every line is of the form 'key value', where key is a single token\n without any whitespace and value is the remaining part of the line.\n Value may optionally be surrounded in double quotes. All leading and\n trailing whitespace is removed from key and value. Example lines:\n\n ' User vagrant\\n'\n ' IdentityFile \"/home/robert/.vagrant.d/insecure_private_key\"\\n'\n\n Lines with '#' as the first non-whitespace character are considered\n comments and ignored. Whitespace-only lines are ignored. This parser\n does NOT handle using an '=' in options. Values surrounded in double\n quotes will have the double quotes removed.\n\n See https://github.com/bitprophet/ssh/blob/master/ssh/config.py for a\n more compliant ssh config file parser.\n '''\n conf = dict()\n started_parsing = False\n for line in ssh_config.splitlines():\n if line.strip().startswith('Host ') and not started_parsing:\n started_parsing = True\n if not started_parsing or not line.strip() or line.strip().startswith('#'):\n continue\n key, value = line.strip().split(None, 1)\n # Remove leading and trailing \" from the values\n conf[key] = value.strip('\"')\n return conf", "response": "This lame parser does not parse the full grammar of an ssh config file. It does not parse the full grammar of an ssh config file."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nrunning a vagrant command. Return None.", "response": "def _call_vagrant_command(self, args):\n '''\n Run a vagrant command. Return None.\n args: A sequence of arguments to a vagrant command line.\n\n '''\n # Make subprocess command\n command = self._make_vagrant_command(args)\n with self.out_cm() as out_fh, self.err_cm() as err_fh:\n subprocess.check_call(command, cwd=self.root, stdout=out_fh,\n stderr=err_fh, env=self.env)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nrunning a vagrant command and return its stdout.", "response": "def _run_vagrant_command(self, args):\n '''\n Run a vagrant command and return its stdout.\n args: A sequence of arguments to a vagrant command line.\n e.g. ['up', 'my_vm_name', '--no-provision'] or\n ['up', None, '--no-provision'] for a non-Multi-VM environment.\n '''\n # Make subprocess command\n command = self._make_vagrant_command(args)\n with self.err_cm() as err_fh:\n return compat.decode(subprocess.check_output(command, cwd=self.root,\n env=self.env, stderr=err_fh))"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nexecutes a vagrant command and return a generator of the output lines.", "response": "def _stream_vagrant_command(self, args):\n \"\"\"\n Execute a vagrant command, returning a generator of the output lines.\n Caller should consume the entire generator to avoid the hanging the\n subprocess.\n\n :param args: Arguments for the Vagrant command.\n :return: generator that yields each line of the command stdout.\n :rtype: generator iterator\n \"\"\"\n py3 = sys.version_info > (3, 0)\n\n # Make subprocess command\n command = self._make_vagrant_command(args)\n with self.err_cm() as err_fh:\n sp_args = dict(args=command, cwd=self.root, env=self.env,\n stdout=subprocess.PIPE, stderr=err_fh, bufsize=1)\n\n # Iterate over output lines.\n # See http://stackoverflow.com/questions/2715847/python-read-streaming-input-from-subprocess-communicate#17698359\n p = subprocess.Popen(**sp_args)\n with p.stdout:\n for line in iter(p.stdout.readline, b''):\n yield compat.decode(line) # if PY3 decode bytestrings\n p.wait()\n # Raise CalledProcessError for consistency with _call_vagrant_command\n if p.returncode != 0:\n raise subprocess.CalledProcessError(p.returncode, command)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef sandbox_status(self, vm_name=None):\n '''\n Returns the status of the sandbox mode.\n\n Possible values are:\n - on\n - off\n - unknown\n - not installed\n '''\n vagrant_sandbox_output = self._run_sandbox_command(['status', vm_name])\n return self._parse_vagrant_sandbox_status(vagrant_sandbox_output)", "response": "Returns the status of the sandbox mode."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _parse_vagrant_sandbox_status(self, vagrant_output):\n '''\n Returns the status of the sandbox mode given output from\n 'vagrant sandbox status'.\n '''\n # typical output\n # [default] - snapshot mode is off\n # or\n # [default] - machine not created\n # if the box VM is down\n tokens = [token.strip() for token in vagrant_output.split(' ')]\n if tokens[0] == 'Usage:':\n sahara_status = 'not installed'\n elif \"{} {}\".format(tokens[-2], tokens[-1]) == 'not created':\n sahara_status = 'unknown'\n else:\n sahara_status = tokens[-1]\n return sahara_status", "response": "Parses the vagrant sandbox status output from vagrant sandbox status."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nconverting JSON dicts back into objects.", "response": "def deserialize(obj):\n \"\"\"Convert JSON dicts back into objects.\"\"\"\n # Be careful of shallow copy here\n target = dict(obj)\n class_name = None\n if '__class__' in target:\n class_name = target.pop('__class__')\n if '__module__' in obj:\n obj.pop('__module__')\n # Use getattr(module, class_name) for custom types if needed\n if class_name == 'datetime':\n return datetime.datetime(tzinfo=utc, **target)\n if class_name == 'StreamingBody':\n return StringIO(target['body'])\n # Return unrecognized structures as-is\n return obj"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef serialize(obj):\n # Record class and module information for deserialization\n result = {'__class__': obj.__class__.__name__}\n try:\n result['__module__'] = obj.__module__\n except AttributeError:\n pass\n # Convert objects to dictionary representation based on type\n if isinstance(obj, datetime.datetime):\n result['year'] = obj.year\n result['month'] = obj.month\n result['day'] = obj.day\n result['hour'] = obj.hour\n result['minute'] = obj.minute\n result['second'] = obj.second\n result['microsecond'] = obj.microsecond\n return result\n if isinstance(obj, StreamingBody):\n result['body'] = obj.read()\n obj._raw_stream = StringIO(result['body'])\n obj._amount_read = 0\n return result\n # Raise a TypeError if the object isn't recognized\n raise TypeError(\"Type not serializable\")", "response": "Convert an object into a dictionary representation based on the object s type."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _serialize_json(obj, fp):\n json.dump(obj, fp, indent=4, default=serialize)", "response": "Serialize obj as a JSON formatted stream to fp."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_serializer(serializer_format):\n if serializer_format == Format.JSON:\n return _serialize_json\n if serializer_format == Format.PICKLE:\n return _serialize_pickle", "response": "Get the serializer for a specific format"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_deserializer(serializer_format):\n if serializer_format == Format.JSON:\n return _deserialize_json\n if serializer_format == Format.PICKLE:\n return _deserialize_pickle", "response": "Get the deserializer for a specific format"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsetting the logger to be debug.", "response": "def _set_logger(logger_name, level=logging.INFO):\n \"\"\"\n Convenience function to quickly configure full debug output\n to go to the console.\n \"\"\"\n log = logging.getLogger(logger_name)\n log.setLevel(level)\n\n ch = logging.StreamHandler(None)\n ch.setLevel(level)\n\n formatter = logging.Formatter(DebugFmtString)\n\n # add formatter to ch\n ch.setFormatter(formatter)\n\n # add ch to logger\n log.addHandler(ch)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef find_file_format(file_name):\n for file_format in Format.ALLOWED:\n file_path = '.'.join((file_name, file_format))\n if os.path.exists(file_path):\n return file_path, file_format\n return None, None", "response": "Find the file format and the file path and format of the given file name."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_next_file_path(self, service, operation):\n base_name = '{0}.{1}'.format(service, operation)\n if self.prefix:\n base_name = '{0}.{1}'.format(self.prefix, base_name)\n LOG.debug('get_next_file_path: %s', base_name)\n next_file = None\n serializer_format = None\n index = self._index.setdefault(base_name, 1)\n\n while not next_file:\n file_name = os.path.join(\n self._data_path, base_name + '_{0}'.format(index))\n next_file, serializer_format = self.find_file_format(file_name)\n if next_file:\n self._index[base_name] += 1\n elif index != 1:\n index = 1\n self._index[base_name] = 1\n else:\n raise IOError('response file ({0}.[{1}]) not found'.format(\n file_name, \"|\".join(Format.ALLOWED)))\n\n return next_file, serializer_format", "response": "Returns the next file to read and the serializer for the current entry."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef save_response(self, service, operation, response_data,\n http_response=200):\n \"\"\"\n Store a response to the data directory. The ``operation``\n should be the name of the operation in the service API (e.g.\n DescribeInstances), the ``response_data`` should a value you want\n to return from a placebo call and the ``http_response`` should be\n the HTTP status code returned from the service. You can add\n multiple responses for a given operation and they will be\n returned in order.\n \"\"\"\n LOG.debug('save_response: %s.%s', service, operation)\n filepath = self.get_new_file_path(service, operation)\n LOG.debug('save_response: path=%s', filepath)\n data = {'status_code': http_response,\n 'data': response_data}\n with open(filepath, Format.write_mode(self.record_format)) as fp:\n self._serializer(data, fp)", "response": "Save a response to the data directory."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nparsing and verifies an authentication request.", "response": "def parse_authentication_request(self, request_body, http_headers=None):\n # type: (str, Optional[Mapping[str, str]]) -> oic.oic.message.AuthorizationRequest\n \"\"\"\n Parses and verifies an authentication request.\n\n :param request_body: urlencoded authentication request\n :param http_headers: http headers\n \"\"\"\n\n auth_req = AuthorizationRequest().deserialize(request_body)\n\n for validator in self.authentication_request_validators:\n validator(auth_req)\n\n logger.debug('parsed authentication_request: %s', auth_req)\n return auth_req"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates an Authorization Response for the specified authentication request and local identifier of the authenticated user.", "response": "def authorize(self, authentication_request, # type: oic.oic.message.AuthorizationRequest\n user_id, # type: str\n extra_id_token_claims=None\n # type: Optional[Union[Mapping[str, Union[str, List[str]]], Callable[[str, str], Mapping[str, Union[str, List[str]]]]]\n ):\n # type: (...) -> oic.oic.message.AuthorizationResponse\n \"\"\"\n Creates an Authentication Response for the specified authentication request and local identifier of the\n authenticated user.\n \"\"\"\n custom_sub = self.userinfo[user_id].get('sub')\n if custom_sub:\n self.authz_state.subject_identifiers[user_id] = {'public': custom_sub}\n sub = custom_sub\n else:\n sub = self._create_subject_identifier(user_id, authentication_request['client_id'],\n authentication_request['redirect_uri'])\n\n self._check_subject_identifier_matches_requested(authentication_request, sub)\n response = AuthorizationResponse()\n\n authz_code = None\n if 'code' in authentication_request['response_type']:\n authz_code = self.authz_state.create_authorization_code(authentication_request, sub)\n response['code'] = authz_code\n\n access_token_value = None\n if 'token' in authentication_request['response_type']:\n access_token = self.authz_state.create_access_token(authentication_request, sub)\n access_token_value = access_token.value\n self._add_access_token_to_response(response, access_token)\n\n if 'id_token' in authentication_request['response_type']:\n if extra_id_token_claims is None:\n extra_id_token_claims = {}\n elif callable(extra_id_token_claims):\n extra_id_token_claims = extra_id_token_claims(user_id, authentication_request['client_id'])\n\n requested_claims = self._get_requested_claims_in(authentication_request, 'id_token')\n if len(authentication_request['response_type']) == 1:\n # only id token is issued -> no way of doing userinfo request, so include all claims in ID Token,\n # even those requested by the scope parameter\n requested_claims.update(\n scope2claims(\n authentication_request['scope'], extra_scope_dict=self.extra_scopes\n )\n )\n\n user_claims = self.userinfo.get_claims_for(user_id, requested_claims)\n response['id_token'] = self._create_signed_id_token(authentication_request['client_id'], sub,\n user_claims,\n authentication_request.get('nonce'),\n authz_code, access_token_value, extra_id_token_claims)\n logger.debug('issued id_token=%s from requested_claims=%s userinfo=%s extra_claims=%s',\n response['id_token'], requested_claims, user_claims, extra_id_token_claims)\n\n if 'state' in authentication_request:\n response['state'] = authentication_request['state']\n return response"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nadds the Access Token and associated parameters to the Token Response.", "response": "def _add_access_token_to_response(self, response, access_token):\n # type: (oic.message.AccessTokenResponse, se_leg_op.access_token.AccessToken) -> None\n \"\"\"\n Adds the Access Token and the associated parameters to the Token Response.\n \"\"\"\n response['access_token'] = access_token.value\n response['token_type'] = access_token.type\n response['expires_in'] = access_token.expires_in"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates a subject identifier for the specified user and client.", "response": "def _create_subject_identifier(self, user_id, client_id, redirect_uri):\n # type (str, str, str) -> str\n \"\"\"\n Creates a subject identifier for the specified client and user\n see \n \"OpenID Connect Core 1.0\", Section 1.2.\n :param user_id: local user identifier\n :param client_id: which client to generate a subject identifier for\n :param redirect_uri: the clients' redirect_uri\n :return: a subject identifier for the user intended for client who made the authentication request\n \"\"\"\n supported_subject_types = self.configuration_information['subject_types_supported'][0]\n subject_type = self.clients[client_id].get('subject_type', supported_subject_types)\n sector_identifier = urlparse(redirect_uri).netloc\n return self.authz_state.get_subject_identifier(subject_type, user_id, sector_identifier)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nparse any claims requested using the claims request parameter see", "response": "def _get_requested_claims_in(self, authentication_request, response_method):\n # type (oic.oic.message.AuthorizationRequest, str) -> Mapping[str, Optional[Mapping[str, Union[str, List[str]]]]\n \"\"\"\n Parses any claims requested using the 'claims' request parameter, see\n \n \"OpenID Connect Core 1.0\", Section 5.5.\n :param authentication_request: the authentication request\n :param response_method: 'id_token' or 'userinfo'\n \"\"\"\n if response_method != 'id_token' and response_method != 'userinfo':\n raise ValueError('response_method must be \\'id_token\\' or \\'userinfo\\'')\n\n requested_claims = {}\n\n if 'claims' in authentication_request and response_method in authentication_request['claims']:\n requested_claims.update(authentication_request['claims'][response_method])\n return requested_claims"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _create_signed_id_token(self,\n client_id, # type: str\n sub, # type: str\n user_claims=None, # type: Optional[Mapping[str, Union[str, List[str]]]]\n nonce=None, # type: Optional[str]\n authorization_code=None, # type: Optional[str]\n access_token_value=None, # type: Optional[str]\n extra_id_token_claims=None): # type: Optional[Mappings[str, Union[str, List[str]]]]\n # type: (...) -> str\n \"\"\"\n Creates a signed ID Token.\n :param client_id: who the ID Token is intended for\n :param sub: who the ID Token is regarding\n :param user_claims: any claims about the user to be included\n :param nonce: nonce from the authentication request\n :param authorization_code: the authorization code issued together with this ID Token\n :param access_token_value: the access token issued together with this ID Token\n :param extra_id_token_claims: any extra claims that should be included in the ID Token\n :return: a JWS, containing the ID Token as payload\n \"\"\"\n\n alg = self.clients[client_id].get('id_token_signed_response_alg',\n self.configuration_information['id_token_signing_alg_values_supported'][0])\n args = {}\n\n hash_alg = 'HS{}'.format(alg[-3:])\n if authorization_code:\n args['c_hash'] = jws.left_hash(authorization_code.encode('utf-8'), hash_alg)\n if access_token_value:\n args['at_hash'] = jws.left_hash(access_token_value.encode('utf-8'), hash_alg)\n\n if user_claims:\n args.update(user_claims)\n\n if extra_id_token_claims:\n args.update(extra_id_token_claims)\n\n id_token = IdToken(iss=self.configuration_information['issuer'],\n sub=sub,\n aud=client_id,\n iat=int(time.time()),\n exp=int(time.time()) + self.id_token_lifetime,\n **args)\n\n if nonce:\n id_token['nonce'] = nonce\n\n logger.debug('signed id_token with kid=%s using alg=%s', self.signing_key, alg)\n return id_token.to_jwt([self.signing_key], alg)", "response": "Create a signed ID Token."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nchecks that the subject identifier matches the requested one.", "response": "def _check_subject_identifier_matches_requested(self, authentication_request, sub):\n # type (oic.message.AuthorizationRequest, str) -> None\n \"\"\"\n Verifies the subject identifier against any requested subject identifier using the claims request parameter.\n :param authentication_request: authentication request\n :param sub: subject identifier\n :raise AuthorizationError: if the subject identifier does not match the requested one\n \"\"\"\n if 'claims' in authentication_request:\n requested_id_token_sub = authentication_request['claims'].get('id_token', {}).get('sub')\n requested_userinfo_sub = authentication_request['claims'].get('userinfo', {}).get('sub')\n if requested_id_token_sub and requested_userinfo_sub and requested_id_token_sub != requested_userinfo_sub:\n raise AuthorizationError('Requested different subject identifier for IDToken and userinfo: {} != {}'\n .format(requested_id_token_sub, requested_userinfo_sub))\n\n requested_sub = requested_id_token_sub or requested_userinfo_sub\n if requested_sub and sub != requested_sub:\n raise AuthorizationError('Requested subject identifier \\'{}\\' could not be matched'\n .format(requested_sub))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nhandling a token request.", "response": "def handle_token_request(self, request_body, # type: str\n http_headers=None, # type: Optional[Mapping[str, str]]\n extra_id_token_claims=None\n # type: Optional[Union[Mapping[str, Union[str, List[str]]], Callable[[str, str], Mapping[str, Union[str, List[str]]]]]\n ):\n # type: (...) -> oic.oic.message.AccessTokenResponse\n \"\"\"\n Handles a token request, either for exchanging an authorization code or using a refresh token.\n :param request_body: urlencoded token request\n :param http_headers: http headers\n :param extra_id_token_claims: extra claims to include in the signed ID Token\n \"\"\"\n\n token_request = self._verify_client_authentication(request_body, http_headers)\n\n if 'grant_type' not in token_request:\n raise InvalidTokenRequest('grant_type missing', token_request)\n elif token_request['grant_type'] == 'authorization_code':\n return self._do_code_exchange(token_request, extra_id_token_claims)\n elif token_request['grant_type'] == 'refresh_token':\n return self._do_token_refresh(token_request)\n\n raise InvalidTokenRequest('grant_type \\'{}\\' unknown'.format(token_request['grant_type']), token_request,\n oauth_error='unsupported_grant_type')"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _do_code_exchange(self, request, # type: Dict[str, str]\n extra_id_token_claims=None\n # type: Optional[Union[Mapping[str, Union[str, List[str]]], Callable[[str, str], Mapping[str, Union[str, List[str]]]]]\n ):\n # type: (...) -> oic.message.AccessTokenResponse\n \"\"\"\n Handles a token request for exchanging an authorization code for an access token\n (grant_type=authorization_code).\n :param request: parsed http request parameters\n :param extra_id_token_claims: any extra parameters to include in the signed ID Token, either as a dict-like\n object or as a callable object accepting the local user identifier and client identifier which returns\n any extra claims which might depend on the user id and/or client id.\n :return: a token response containing a signed ID Token, an Access Token, and a Refresh Token\n :raise InvalidTokenRequest: if the token request is invalid\n \"\"\"\n token_request = AccessTokenRequest().from_dict(request)\n try:\n token_request.verify()\n except MessageException as e:\n raise InvalidTokenRequest(str(e), token_request) from e\n\n authentication_request = self.authz_state.get_authorization_request_for_code(token_request['code'])\n\n if token_request['client_id'] != authentication_request['client_id']:\n logger.info('Authorization code \\'%s\\' belonging to \\'%s\\' was used by \\'%s\\'',\n token_request['code'], authentication_request['client_id'], token_request['client_id'])\n raise InvalidAuthorizationCode('{} unknown'.format(token_request['code']))\n if token_request['redirect_uri'] != authentication_request['redirect_uri']:\n raise InvalidTokenRequest('Invalid redirect_uri: {} != {}'.format(token_request['redirect_uri'],\n authentication_request['redirect_uri']),\n token_request)\n\n sub = self.authz_state.get_subject_identifier_for_code(token_request['code'])\n user_id = self.authz_state.get_user_id_for_subject_identifier(sub)\n\n response = AccessTokenResponse()\n\n access_token = self.authz_state.exchange_code_for_token(token_request['code'])\n self._add_access_token_to_response(response, access_token)\n refresh_token = self.authz_state.create_refresh_token(access_token.value)\n if refresh_token is not None:\n response['refresh_token'] = refresh_token\n\n if extra_id_token_claims is None:\n extra_id_token_claims = {}\n elif callable(extra_id_token_claims):\n extra_id_token_claims = extra_id_token_claims(user_id, authentication_request['client_id'])\n requested_claims = self._get_requested_claims_in(authentication_request, 'id_token')\n user_claims = self.userinfo.get_claims_for(user_id, requested_claims)\n response['id_token'] = self._create_signed_id_token(authentication_request['client_id'], sub,\n user_claims,\n authentication_request.get('nonce'),\n None, access_token.value,\n extra_id_token_claims)\n logger.debug('issued id_token=%s from requested_claims=%s userinfo=%s extra_claims=%s',\n response['id_token'], requested_claims, user_claims, extra_id_token_claims)\n\n return response", "response": "Handles an authorization code request for an access token and returns a token response containing a signed ID Token an Access Token and a Refresh Token."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nhandles a request for refreshing an access token", "response": "def _do_token_refresh(self, request):\n # type: (Mapping[str, str]) -> oic.oic.message.AccessTokenResponse\n \"\"\"\n Handles a token request for refreshing an access token (grant_type=refresh_token).\n :param request: parsed http request parameters\n :return: a token response containing a new Access Token and possibly a new Refresh Token\n :raise InvalidTokenRequest: if the token request is invalid\n \"\"\"\n token_request = RefreshAccessTokenRequest().from_dict(request)\n try:\n token_request.verify()\n except MessageException as e:\n raise InvalidTokenRequest(str(e), token_request) from e\n\n response = AccessTokenResponse()\n\n access_token, refresh_token = self.authz_state.use_refresh_token(token_request['refresh_token'],\n scope=token_request.get('scope'))\n self._add_access_token_to_response(response, access_token)\n if refresh_token:\n response['refresh_token'] = refresh_token\n\n return response"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nverify the client authentication.", "response": "def _verify_client_authentication(self, request_body, http_headers=None):\n # type (str, Optional[Mapping[str, str]] -> Mapping[str, str]\n \"\"\"\n Verifies the client authentication.\n :param request_body: urlencoded token request\n :param http_headers:\n :return: The parsed request body.\n \"\"\"\n if http_headers is None:\n http_headers = {}\n token_request = dict(parse_qsl(request_body))\n token_request['client_id'] = verify_client_authentication(self.clients, token_request, http_headers.get('Authorization'))\n return token_request"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nhandle a userinfo request.", "response": "def handle_userinfo_request(self, request=None, http_headers=None):\n # type: (Optional[str], Optional[Mapping[str, str]]) -> oic.oic.message.OpenIDSchema\n \"\"\"\n Handles a userinfo request.\n :param request: urlencoded request (either query string or POST body)\n :param http_headers: http headers\n \"\"\"\n if http_headers is None:\n http_headers = {}\n userinfo_request = dict(parse_qsl(request))\n bearer_token = extract_bearer_token_from_http_request(userinfo_request, http_headers.get('Authorization'))\n\n introspection = self.authz_state.introspect_access_token(bearer_token)\n if not introspection['active']:\n raise InvalidAccessToken('The access token has expired')\n scopes = introspection['scope'].split()\n user_id = self.authz_state.get_user_id_for_subject_identifier(introspection['sub'])\n\n requested_claims = scope2claims(scopes, extra_scope_dict=self.extra_scopes)\n authentication_request = self.authz_state.get_authorization_request_for_access_token(bearer_token)\n requested_claims.update(self._get_requested_claims_in(authentication_request, 'userinfo'))\n user_claims = self.userinfo.get_claims_for(user_id, requested_claims)\n\n user_claims.setdefault('sub', introspection['sub'])\n response = OpenIDSchema(**user_claims)\n logger.debug('userinfo=%s from requested_claims=%s userinfo=%s',\n response, requested_claims, user_claims)\n return response"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef match_client_preferences_with_provider_capabilities(self, client_preferences):\n # type: (oic.message.RegistrationRequest) -> Mapping[str, Union[str, List[str]]]\n \"\"\"\n Match as many as of the client preferences as possible.\n :param client_preferences: requested preferences from client registration request\n :return: the matched preferences selected by the provider\n \"\"\"\n matched_prefs = client_preferences.to_dict()\n for pref in ['response_types', 'default_acr_values']:\n if pref not in client_preferences:\n continue\n\n capability = PREFERENCE2PROVIDER[pref]\n # only preserve the common values\n matched_values = find_common_values(client_preferences[pref], self.configuration_information[capability])\n # deal with space separated values\n matched_prefs[pref] = [' '.join(v) for v in matched_values]\n\n return matched_prefs", "response": "Match client preferences with provider capabilities."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nhandle a client registration request.", "response": "def handle_client_registration_request(self, request, http_headers=None):\n # type: (Optional[str], Optional[Mapping[str, str]]) -> oic.oic.message.RegistrationResponse\n \"\"\"\n Handles a client registration request.\n :param request: JSON request from POST body\n :param http_headers: http headers\n \"\"\"\n registration_req = RegistrationRequest().deserialize(request, 'json')\n\n for validator in self.registration_request_validators:\n validator(registration_req)\n logger.debug('parsed authentication_request: %s', registration_req)\n\n client_id, client_secret = self._issue_new_client()\n credentials = {\n 'client_id': client_id,\n 'client_id_issued_at': int(time.time()),\n 'client_secret': client_secret,\n 'client_secret_expires_at': 0 # never expires\n }\n\n response_params = self.match_client_preferences_with_provider_capabilities(registration_req)\n response_params.update(credentials)\n self.clients[client_id] = copy.deepcopy(response_params)\n\n registration_resp = RegistrationResponse(**response_params)\n logger.debug('registration_resp=%s from registration_req=%s', registration_resp, registration_req)\n return registration_resp"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef extract_bearer_token_from_http_request(parsed_request=None, authz_header=None):\n # type (Optional[Mapping[str, str]], Optional[str] -> str\n \"\"\"\n Extracts a Bearer token from an http request\n :param parsed_request: parsed request (URL query part of request body)\n :param authz_header: HTTP Authorization header\n :return: Bearer access token, if found\n :raise BearerTokenError: if no Bearer token could be extracted from the request\n \"\"\"\n if authz_header:\n # Authorization Request Header Field: https://tools.ietf.org/html/rfc6750#section-2.1\n if authz_header.startswith(AccessToken.BEARER_TOKEN_TYPE):\n access_token = authz_header[len(AccessToken.BEARER_TOKEN_TYPE) + 1:]\n logger.debug('found access token %s in authz header', access_token)\n return access_token\n elif parsed_request:\n if 'access_token' in parsed_request:\n \"\"\"\n Form-Encoded Body Parameter: https://tools.ietf.org/html/rfc6750#section-2.2, and\n URI Query Parameter: https://tools.ietf.org/html/rfc6750#section-2.3\n \"\"\"\n access_token = parsed_request['access_token']\n logger.debug('found access token %s in request', access_token)\n return access_token\n\n raise BearerTokenError('Bearer Token could not be found in the request')", "response": "Extracts a Bearer token from an HTTP request."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _format_mongodb_uri(parsed_uri):\n user_pass = ''\n if parsed_uri.get('username') and parsed_uri.get('password'):\n user_pass = '{username!s}:{password!s}@'.format(**parsed_uri)\n\n _nodes = []\n for host, port in parsed_uri.get('nodelist'):\n if ':' in host and not host.endswith(']'):\n # IPv6 address without brackets\n host = '[{!s}]'.format(host)\n if port == 27017:\n _nodes.append(host)\n else:\n _nodes.append('{!s}:{!s}'.format(host, port))\n nodelist = ','.join(_nodes)\n\n options = ''\n if parsed_uri.get('options'):\n _opt_list = []\n for key, value in parsed_uri.get('options').items():\n if isinstance(value, bool):\n value = str(value).lower()\n _opt_list.append('{!s}={!s}'.format(key, value))\n options = '?' + '&'.join(_opt_list)\n\n db_name = parsed_uri.get('database') or ''\n\n res = \"mongodb://{user_pass!s}{nodelist!s}/{db_name!s}{options!s}\".format(\n user_pass=user_pass,\n nodelist=nodelist,\n db_name=db_name,\n # collection is ignored\n options=options)\n return res", "response": "Reconstructs a MongoDB URI from a parsed dict."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the database URI we re using in a format sensible for logging etc.", "response": "def sanitized_uri(self):\n \"\"\"\n Return the database URI we're using in a format sensible for logging etc.\n\n :return: db_uri\n \"\"\"\n if self._sanitized_uri is None:\n _parsed = copy.copy(self._parsed_uri)\n if 'username' in _parsed:\n _parsed['password'] = 'secret'\n _parsed['nodelist'] = [_parsed['nodelist'][0]]\n self._sanitized_uri = _format_mongodb_uri(_parsed)\n return self._sanitized_uri"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_database(self, database_name=None, username=None, password=None):\n if database_name is None:\n database_name = self._database_name\n if database_name is None:\n raise ValueError('No database_name supplied, and no default provided to __init__')\n db = self._connection[database_name]\n if username and password:\n db.authenticate(username, password)\n elif self._parsed_uri.get(\"username\", None):\n if 'authSource' in self._options and self._options['authSource'] is not None:\n db.authenticate(\n self._parsed_uri.get(\"username\", None),\n self._parsed_uri.get(\"password\", None),\n source=self._options['authSource']\n )\n else:\n db.authenticate(\n self._parsed_uri.get(\"username\", None),\n self._parsed_uri.get(\"password\", None)\n )\n return db", "response": "Get a pymongo database handle after authenticating."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nget a pymongo collection handle.", "response": "def get_collection(self, collection, database_name=None, username=None, password=None):\n \"\"\"\n Get a pymongo collection handle.\n\n :param collection: Name of collection\n :param database_name: (optional) Name of database\n :param username: (optional) Username to login with\n :param password: (optional) Password to login with\n :return: Pymongo collection object\n \"\"\"\n _db = self.get_database(database_name, username, password)\n return _db[collection]"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef balanced_binary_tree(n_leaves):\n def _balanced_subtree(leaves):\n if len(leaves) == 1:\n return leaves[0]\n elif len(leaves) == 2:\n return (leaves[0], leaves[1])\n else:\n split = len(leaves) // 2\n return (_balanced_subtree(leaves[:split]),\n _balanced_subtree(leaves[split:]))\n\n return _balanced_subtree(np.arange(n_leaves))", "response": "Create a balanced binary tree."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef decision_list(n_leaves):\n def _list(leaves):\n if len(leaves) == 2:\n return (leaves[0], leaves[1])\n else:\n return (leaves[0], _list(leaves[1:]))\n return _list(np.arange(n_leaves))", "response": "Create a decision list of n_leaves elements."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef random_tree(n_leaves):\n def _random_subtree(leaves):\n if len(leaves) == 1:\n return leaves[0]\n elif len(leaves) == 2:\n return (leaves[0], leaves[1])\n else:\n split = npr.randint(1, len(leaves)-1)\n return (_random_subtree(leaves[:split]),\n _random_subtree(leaves[split:]))\n\n return _random_subtree(np.arange(n_leaves))", "response": "Randomly partition the nodes into n_leaves nodes"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef leaves(tree):\n lvs = []\n def _leaves(node):\n if np.isscalar(node):\n lvs.append(node)\n elif isinstance(node, tuple) and len(node) == 2:\n _leaves(node[0])\n _leaves(node[1])\n else:\n raise Exception(\"Not a tree!\")\n\n _leaves(tree)\n return lvs", "response": "Return the leaves in this subtree."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nget the address of each leaf node in terms of internal node choices", "response": "def choices(tree):\n \"\"\"\n Get the 'address' of each leaf node in terms of internal\n node choices\n \"\"\"\n n = len(leaves(tree))\n addr = np.nan * np.ones((n, n-1))\n def _addresses(node, index, choices):\n # index is the index of the current internal node\n # choices is a list of (indice, 0/1) choices made\n if np.isscalar(node):\n for i, choice in choices:\n addr[node, i] = choice\n return index\n\n elif isinstance(node, tuple) and len(node) == 2:\n newindex = _addresses(node[0], index+1, choices + [(index, 0)])\n newindex = _addresses(node[1], newindex, choices + [(index, 1)])\n return newindex\n\n else:\n raise Exception(\"Not a tree!\")\n\n _addresses(tree, 0, [])\n return addr"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nconstructing the adjacency matrix of the tree", "response": "def adjacency(tree):\n \"\"\"\n Construct the adjacency matrix of the tree\n :param tree:\n :return:\n \"\"\"\n dd = ids(tree)\n N = len(dd)\n A = np.zeros((N, N))\n\n def _adj(node):\n if np.isscalar(node):\n return\n elif isinstance(node, tuple) and len(node) == 2:\n A[dd[node], dd[node[0]]] = 1\n A[dd[node[0]], dd[node]] = 1\n _adj(node[0])\n\n A[dd[node], dd[node[1]]] = 1\n A[dd[node[1]], dd[node]] = 1\n _adj(node[1])\n\n _adj(tree)\n return A"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef max_likelihood(self, data, weights=None, stats=None, lmbda=0.1):\n import autograd.numpy as anp\n from autograd import value_and_grad, hessian_vector_product\n from scipy.optimize import minimize\n\n assert weights is None\n assert stats is None\n if not isinstance(data, list):\n assert isinstance(data, tuple) and len(data) == 2\n data = [data]\n\n # Define a helper function for the log of the logistic fn\n def loglogistic(psi):\n return psi - anp.log(1+anp.exp(psi))\n\n # optimize each row of A and b\n for n in range(self.D_out):\n\n # Define an objective function for the n-th row of hstack((A, b))\n # This is the negative log likelihood of the n-th column of data.\n def nll(abn):\n an, bn = abn[:-1], abn[-1]\n T = 0\n ll = 0\n for (x, y) in data:\n T += x.shape[0]\n yn = y[:, n]\n psi = anp.dot(x, an) + bn\n ll += anp.sum(yn * loglogistic(psi))\n ll += anp.sum((1 - yn) * loglogistic(-1. * psi))\n\n # Include a penalty on the weights\n ll -= lmbda * T * anp.sum(an**2)\n ll -= lmbda * T * bn**2\n\n return -1 * ll / T\n\n abn0 = np.concatenate((self.A[n], self.b[n]))\n res = minimize(value_and_grad(nll), abn0,\n tol=1e-3,\n method=\"Newton-CG\",\n jac=True,\n hessp=hessian_vector_product(nll))\n\n assert res.success\n self.A[n] = res.x[:-1]\n self.b[n] = res.x[-1]", "response": "This method calculates the maximum likelihood of a set of data."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a dictionary of claims that are available for the given user_id.", "response": "def get_claims_for(self, user_id, requested_claims):\n # type: (str, Mapping[str, Optional[Mapping[str, Union[str, List[str]]]]) -> Dict[str, Union[str, List[str]]]\n \"\"\"\n Filter the userinfo based on which claims where requested.\n :param user_id: user identifier\n :param requested_claims: see \n \"OpenID Connect Core 1.0\", Section 5.5 for structure\n :return: All requested claims available from the userinfo.\n \"\"\"\n\n userinfo = self._db[user_id]\n claims = {claim: userinfo[claim] for claim in requested_claims if claim in userinfo}\n return claims"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate an authorization code bound to the authorization request and the authenticated user identified by the subject identifier.", "response": "def create_authorization_code(self, authorization_request, subject_identifier, scope=None):\n # type: (oic.oic.message.AuthorizationRequest, str, Optional[List[str]]) -> str\n \"\"\"\n Creates an authorization code bound to the authorization request and the authenticated user identified\n by the subject identifier.\n \"\"\"\n\n if not self._is_valid_subject_identifier(subject_identifier):\n raise InvalidSubjectIdentifier('{} unknown'.format(subject_identifier))\n\n scope = ' '.join(scope or authorization_request['scope'])\n logger.debug('creating authz code for scope=%s', scope)\n\n authorization_code = rand_str()\n authz_info = {\n 'used': False,\n 'exp': int(time.time()) + self.authorization_code_lifetime,\n 'sub': subject_identifier,\n 'granted_scope': scope,\n self.KEY_AUTHORIZATION_REQUEST: authorization_request.to_dict()\n }\n self.authorization_codes[authorization_code] = authz_info\n logger.debug('new authz_code=%s to client_id=%s for sub=%s valid_until=%s', authorization_code,\n authorization_request['client_id'], subject_identifier, authz_info['exp'])\n return authorization_code"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef create_access_token(self, authorization_request, subject_identifier, scope=None):\n # type: (oic.oic.message.AuthorizationRequest, str, Optional[List[str]]) -> se_leg_op.access_token.AccessToken\n \"\"\"\n Creates an access token bound to the authentication request and the authenticated user identified by the\n subject identifier.\n \"\"\"\n if not self._is_valid_subject_identifier(subject_identifier):\n raise InvalidSubjectIdentifier('{} unknown'.format(subject_identifier))\n\n scope = scope or authorization_request['scope']\n\n return self._create_access_token(subject_identifier, authorization_request.to_dict(), ' '.join(scope))", "response": "Creates an access token bound to the given authorization request and the authenticated user identified by the subject identifier."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncreate an access token bound to the subject identifier client id and requested scope.", "response": "def _create_access_token(self, subject_identifier, auth_req, granted_scope, current_scope=None):\n # type: (str, Mapping[str, Union[str, List[str]]], str, Optional[str]) -> se_leg_op.access_token.AccessToken\n \"\"\"\n Creates an access token bound to the subject identifier, client id and requested scope.\n \"\"\"\n access_token = AccessToken(rand_str(), self.access_token_lifetime)\n\n scope = current_scope or granted_scope\n logger.debug('creating access token for scope=%s', scope)\n\n authz_info = {\n 'iat': int(time.time()),\n 'exp': int(time.time()) + self.access_token_lifetime,\n 'sub': subject_identifier,\n 'client_id': auth_req['client_id'],\n 'aud': [auth_req['client_id']],\n 'scope': scope,\n 'granted_scope': granted_scope,\n 'token_type': access_token.BEARER_TOKEN_TYPE,\n self.KEY_AUTHORIZATION_REQUEST: auth_req\n }\n self.access_tokens[access_token.value] = authz_info\n\n logger.debug('new access_token=%s to client_id=%s for sub=%s valid_until=%s',\n access_token.value, auth_req['client_id'], subject_identifier, authz_info['exp'])\n return access_token"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef exchange_code_for_token(self, authorization_code):\n # type: (str) -> se_leg_op.access_token.AccessToken\n \"\"\"\n Exchanges an authorization code for an access token.\n \"\"\"\n if authorization_code not in self.authorization_codes:\n raise InvalidAuthorizationCode('{} unknown'.format(authorization_code))\n\n authz_info = self.authorization_codes[authorization_code]\n if authz_info['used']:\n logger.debug('detected already used authz_code=%s', authorization_code)\n raise InvalidAuthorizationCode('{} has already been used'.format(authorization_code))\n elif authz_info['exp'] < int(time.time()):\n logger.debug('detected expired authz_code=%s, now=%s > exp=%s ',\n authorization_code, int(time.time()), authz_info['exp'])\n raise InvalidAuthorizationCode('{} has expired'.format(authorization_code))\n\n authz_info['used'] = True\n\n access_token = self._create_access_token(authz_info['sub'], authz_info[self.KEY_AUTHORIZATION_REQUEST],\n authz_info['granted_scope'])\n\n logger.debug('authz_code=%s exchanged to access_token=%s', authorization_code, access_token.value)\n return access_token", "response": "Returns an access token object for the given authorization code."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef introspect_access_token(self, access_token_value):\n # type: (str) -> Dict[str, Union[str, List[str]]]\n \"\"\"\n Returns authorization data associated with the access token.\n See \"Token Introspection\", Section 2.2.\n \"\"\"\n if access_token_value not in self.access_tokens:\n raise InvalidAccessToken('{} unknown'.format(access_token_value))\n\n authz_info = self.access_tokens[access_token_value]\n\n introspection = {'active': authz_info['exp'] >= int(time.time())}\n\n introspection_params = {k: v for k, v in authz_info.items() if k in TokenIntrospectionResponse.c_param}\n introspection.update(introspection_params)\n return introspection", "response": "Returns the Introspection data associated with the access token."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ncreating a new refresh token bound to the specified access token.", "response": "def create_refresh_token(self, access_token_value):\n # type: (str) -> str\n \"\"\"\n Creates an refresh token bound to the specified access token.\n \"\"\"\n if access_token_value not in self.access_tokens:\n raise InvalidAccessToken('{} unknown'.format(access_token_value))\n\n if not self.refresh_token_lifetime:\n logger.debug('no refresh token issued for for access_token=%s', access_token_value)\n return None\n\n refresh_token = rand_str()\n authz_info = {'access_token': access_token_value, 'exp': int(time.time()) + self.refresh_token_lifetime}\n self.refresh_tokens[refresh_token] = authz_info\n\n logger.debug('issued refresh_token=%s expiring=%d for access_token=%s', refresh_token, authz_info['exp'],\n access_token_value)\n return refresh_token"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef use_refresh_token(self, refresh_token, scope=None):\n # type (str, Optional[List[str]]) -> Tuple[se_leg_op.access_token.AccessToken, Optional[str]]\n \"\"\"\n Creates a new access token, and refresh token, based on the supplied refresh token.\n :return: new access token and new refresh token if the old one had an expiration time\n \"\"\"\n\n if refresh_token not in self.refresh_tokens:\n raise InvalidRefreshToken('{} unknown'.format(refresh_token))\n\n refresh_token_info = self.refresh_tokens[refresh_token]\n if 'exp' in refresh_token_info and refresh_token_info['exp'] < int(time.time()):\n raise InvalidRefreshToken('{} has expired'.format(refresh_token))\n\n authz_info = self.access_tokens[refresh_token_info['access_token']]\n\n if scope:\n if not requested_scope_is_allowed(scope, authz_info['granted_scope']):\n logger.debug('trying to refresh token with superset scope, requested_scope=%s, granted_scope=%s',\n scope, authz_info['granted_scope'])\n raise InvalidScope('Requested scope includes non-granted value')\n scope = ' '.join(scope)\n logger.debug('refreshing token with new scope, old_scope=%s -> new_scope=%s', authz_info['scope'], scope)\n else:\n # OAuth 2.0: scope: \"[...] if omitted is treated as equal to the scope originally granted by the resource owner\"\n scope = authz_info['granted_scope']\n\n new_access_token = self._create_access_token(authz_info['sub'], authz_info[self.KEY_AUTHORIZATION_REQUEST],\n authz_info['granted_scope'], scope)\n\n new_refresh_token = None\n if self.refresh_token_threshold \\\n and 'exp' in refresh_token_info \\\n and refresh_token_info['exp'] - int(time.time()) < self.refresh_token_threshold:\n # refresh token is close to expiry, issue a new one\n new_refresh_token = self.create_refresh_token(new_access_token.value)\n else:\n self.refresh_tokens[refresh_token]['access_token'] = new_access_token.value\n\n logger.debug('refreshed tokens, new_access_token=%s new_refresh_token=%s old_refresh_token=%s',\n new_access_token, new_refresh_token, refresh_token)\n return new_access_token, new_refresh_token", "response": "Creates a new access token and a new refresh token based on the supplied refresh token."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn the subject identifier for the given user identifier.", "response": "def get_subject_identifier(self, subject_type, user_id, sector_identifier=None):\n # type: (str, str, str) -> str\n \"\"\"\n Returns a subject identifier for the local user identifier.\n :param subject_type: 'pairwise' or 'public', see\n \n \"OpenID Connect Core 1.0\", Section 8.\n :param user_id: local user identifier\n :param sector_identifier: the client's sector identifier,\n see \n \"OpenID Connect Core 1.0\", Section 1.2\n \"\"\"\n\n if user_id not in self.subject_identifiers:\n self.subject_identifiers[user_id] = {}\n\n if subject_type == 'public':\n if 'public' not in self.subject_identifiers[user_id]:\n new_sub = self._subject_identifier_factory.create_public_identifier(user_id)\n self.subject_identifiers[user_id] = {'public': new_sub}\n\n logger.debug('created new public sub=% for user_id=%s',\n self.subject_identifiers[user_id]['public'], user_id)\n sub = self.subject_identifiers[user_id]['public']\n logger.debug('returning public sub=%s', sub)\n return sub\n elif subject_type == 'pairwise':\n if not sector_identifier:\n raise ValueError('sector_identifier cannot be None or empty')\n\n subject_id = self._subject_identifier_factory.create_pairwise_identifier(user_id, sector_identifier)\n logger.debug('returning pairwise sub=%s for user_id=%s and sector_identifier=%s',\n subject_id, user_id, sector_identifier)\n sub = self.subject_identifiers[user_id]\n pairwise_set = set(sub.get('pairwise', []))\n pairwise_set.add(subject_id)\n sub['pairwise'] = list(pairwise_set)\n self.subject_identifiers[user_id] = sub\n return subject_id\n\n raise ValueError('Unknown subject_type={}'.format(subject_type))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef authorization_request_verify(authentication_request):\n try:\n authentication_request.verify()\n except MessageException as e:\n raise InvalidAuthenticationRequest(str(e), authentication_request, oauth_error='invalid_request') from e", "response": "Verifies that all required parameters and correct values are included in the authentication request."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef client_id_is_known(provider, authentication_request):\n if authentication_request['client_id'] not in provider.clients:\n logger.error('Unknown client_id \\'{}\\''.format(authentication_request['client_id']))\n raise InvalidAuthenticationRequest('Unknown client_id',\n authentication_request,\n oauth_error='unauthorized_client')", "response": "Verifies the client identifier is known."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef redirect_uri_is_in_registered_redirect_uris(provider, authentication_request):\n error = InvalidAuthenticationRequest('Redirect uri is not registered',\n authentication_request,\n oauth_error=\"invalid_request\")\n try:\n allowed_redirect_uris = provider.clients[authentication_request['client_id']]['redirect_uris']\n except KeyError as e:\n logger.error('client metadata is missing redirect_uris')\n raise error\n\n if authentication_request['redirect_uri'] not in allowed_redirect_uris:\n logger.error(\"Redirect uri \\'{0}\\' is not registered for this client\".format(authentication_request['redirect_uri']))\n raise error", "response": "Verifies that the redirect uri is registered for the client making the request."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nverifies that the requested response type is allowed for the client making the request.", "response": "def response_type_is_in_registered_response_types(provider, authentication_request):\n \"\"\"\n Verifies that the requested response type is allowed for the client making the request.\n :param provider: provider instance\n :param authentication_request: authentication request to verify\n :raise InvalidAuthenticationRequest: if the response type is not allowed\n \"\"\"\n error = InvalidAuthenticationRequest('Response type is not registered',\n authentication_request, \n oauth_error='invalid_request')\n try:\n allowed_response_types = provider.clients[authentication_request['client_id']]['response_types']\n except KeyError as e:\n logger.error('client metadata is missing response_types')\n raise error\n\n if not is_allowed_response_type(authentication_request['response_type'], allowed_response_types):\n logger.error('Response type \\'{}\\' is not registered'.format(' '.join(authentication_request['response_type'])))\n raise error"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\naccording to \"OpenID Connect Core 1.0\", Section 5.5: \"When the userinfo member is used, the request MUST also use a response_type value that results in an Access Token being issued to the Client for use at the UserInfo Endpoint.\" :param authentication_request: the authentication request to verify :raise InvalidAuthenticationRequest: if the requested claims can not be returned according to the request", "response": "def userinfo_claims_only_specified_when_access_token_is_issued(authentication_request):\n \"\"\"\n According to \n \"OpenID Connect Core 1.0\", Section 5.5: \"When the userinfo member is used, the request MUST\n also use a response_type value that results in an Access Token being issued to the Client for\n use at the UserInfo Endpoint.\"\n :param authentication_request: the authentication request to verify\n :raise InvalidAuthenticationRequest: if the requested claims can not be returned according to the request\n \"\"\"\n will_issue_access_token = authentication_request['response_type'] != ['id_token']\n contains_userinfo_claims_request = 'claims' in authentication_request and 'userinfo' in authentication_request[\n 'claims']\n if not will_issue_access_token and contains_userinfo_claims_request:\n raise InvalidAuthenticationRequest('Userinfo claims cannot be requested, when response_type=\\'id_token\\'',\n authentication_request,\n oauth_error='invalid_request')"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nverify that all required parameters and correct values are included in the client registration request.", "response": "def registration_request_verify(registration_request):\n \"\"\"\n Verifies that all required parameters and correct values are included in the client registration request.\n :param registration_request: the authentication request to verify\n :raise InvalidClientRegistrationRequest: if the registration is incorrect\n \"\"\"\n try:\n registration_request.verify()\n except MessageException as e:\n raise InvalidClientRegistrationRequest(str(e), registration_request, oauth_error='invalid_request') from e"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nverify that all requested preferences in the client metadata can be fulfilled by this provider.", "response": "def client_preferences_match_provider_capabilities(provider, registration_request):\n \"\"\"\n Verifies that all requested preferences in the client metadata can be fulfilled by this provider.\n :param registration_request: the authentication request to verify\n :raise InvalidClientRegistrationRequest: if the registration is incorrect\n \"\"\"\n\n def match(client_preference, provider_capability):\n if isinstance(client_preference, list):\n # deal with comparing space separated values, e.g. 'response_types', without considering the order\n # at least one requested preference must be matched\n return len(find_common_values(client_preference, provider_capability)) > 0\n\n return client_preference in provider_capability\n\n for client_preference in registration_request.keys():\n if client_preference not in PREFERENCE2PROVIDER:\n # metadata parameter that shouldn't be matched\n continue\n\n provider_capability = PREFERENCE2PROVIDER[client_preference]\n if not match(registration_request[client_preference], provider.configuration_information[provider_capability]):\n raise InvalidClientRegistrationRequest(\n 'Could not match client preference {}={} with provider capability {}={}'.format(\n client_preference, registration_request[client_preference], provider_capability,\n provider.configuration_information[provider_capability]),\n registration_request,\n oauth_error='invalid_request')"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncompute the n - th term in the infinite sum of the Jacobi density.", "response": "def _psi_n(x, n, b):\n \"\"\"\n Compute the n-th term in the infinite sum of\n the Jacobi density.\n \"\"\"\n return 2**(b-1) / gamma(b) * (-1)**n * \\\n np.exp(gammaln(n+b) -\n gammaln(n+1) +\n np.log(2*n+b) -\n 0.5 * np.log(2*np.pi*x**3) -\n (2*n+b)**2 / (8.*x))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _tilt(omega, b, psi):\n return np.cosh(psi/2.0)**b * np.exp(-psi**2/2.0 * omega)", "response": "Compute the tilt of the PG density for value omega\n and tilt psi."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncomputing the density log PG of omega | b psi", "response": "def pgpdf(omega, b, psi, trunc=200):\n \"\"\"\n Approximate the density log PG(omega | b, psi) using a\n truncation of the density written as an infinite sum.\n\n :param omega: point at which to evaluate density\n :param b: first parameter of PG\n :param psi: tilting of PG\n :param trunc: number of terms in sum\n \"\"\"\n ns = np.arange(trunc)\n psi_ns = np.array([_psi_n(omega, n, b) for n in ns])\n pdf = np.sum(psi_ns, axis=0)\n\n # Account for tilting\n pdf *= _tilt(omega, b, psi)\n\n return pdf"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nconverts a psi array to a probability vector pi.", "response": "def psi_to_pi(psi, axis=None):\n \"\"\"\n Convert psi to a probability vector pi\n :param psi: Length K-1 vector\n :return: Length K normalized probability vector\n \"\"\"\n if axis is None:\n if psi.ndim == 1:\n K = psi.size + 1\n pi = np.zeros(K)\n\n # Set pi[1..K-1]\n stick = 1.0\n for k in range(K-1):\n pi[k] = logistic(psi[k]) * stick\n stick -= pi[k]\n\n # Set the last output\n pi[-1] = stick\n # DEBUG\n assert np.allclose(pi.sum(), 1.0)\n\n elif psi.ndim == 2:\n M, Km1 = psi.shape\n K = Km1 + 1\n pi = np.zeros((M,K))\n\n # Set pi[1..K-1]\n stick = np.ones(M)\n for k in range(K-1):\n pi[:,k] = logistic(psi[:,k]) * stick\n stick -= pi[:,k]\n\n # Set the last output\n pi[:,-1] = stick\n\n # DEBUG\n assert np.allclose(pi.sum(axis=1), 1.0)\n\n else:\n raise ValueError(\"psi must be 1 or 2D\")\n else:\n K = psi.shape[axis] + 1\n pi = np.zeros([psi.shape[dim] if dim != axis else K for dim in range(psi.ndim)])\n stick = np.ones(psi.shape[:axis] + psi.shape[axis+1:])\n for k in range(K-1):\n inds = [slice(None) if dim != axis else k for dim in range(psi.ndim)]\n pi[inds] = logistic(psi[inds]) * stick\n stick -= pi[inds]\n pi[[slice(None) if dim != axis else -1 for dim in range(psi.ndim)]] = stick\n assert np.allclose(pi.sum(axis=axis), 1.)\n\n return pi"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef verify_client_authentication(clients, parsed_request, authz_header=None):\n # type: (Mapping[str, str], Mapping[str, Mapping[str, Any]], Optional[str]) -> bool\n \"\"\"\n Verifies client authentication at the token endpoint, see\n \"The OAuth 2.0 Authorization Framework\",\n Section 2.3.1\n :param parsed_request: key-value pairs from parsed urlencoded request\n :param clients: clients db\n :param authz_header: the HTTP Authorization header value\n :return: the unmodified parsed request\n :raise InvalidClientAuthentication: if the client authentication was incorrect\n \"\"\"\n client_id = None\n client_secret = None\n authn_method = None\n if authz_header:\n logger.debug('client authentication in Authorization header %s', authz_header)\n\n authz_scheme = authz_header.split(maxsplit=1)[0]\n if authz_scheme == 'Basic':\n authn_method = 'client_secret_basic'\n credentials = authz_header[len('Basic '):]\n missing_padding = 4 - len(credentials) % 4\n if missing_padding:\n credentials += '=' * missing_padding\n try:\n auth = base64.urlsafe_b64decode(credentials.encode('utf-8')).decode('utf-8')\n except UnicodeDecodeError as e:\n raise InvalidClientAuthentication('Could not userid/password from authorization header'.format(authz_scheme))\n client_id, client_secret = auth.split(':')\n else:\n raise InvalidClientAuthentication('Unknown scheme in authorization header, {} != Basic'.format(authz_scheme))\n elif 'client_id' in parsed_request:\n logger.debug('client authentication in request body %s', parsed_request)\n\n client_id = parsed_request['client_id']\n if 'client_secret' in parsed_request:\n authn_method = 'client_secret_post'\n client_secret = parsed_request['client_secret']\n else:\n authn_method = 'none'\n client_secret = None\n\n if client_id not in clients:\n raise InvalidClientAuthentication('client_id \\'{}\\' unknown'.format(client_id))\n\n client_info = clients[client_id]\n if client_secret != client_info.get('client_secret', None):\n raise InvalidClientAuthentication('Incorrect client_secret')\n\n expected_authn_method = client_info.get('token_endpoint_auth_method', 'client_secret_basic')\n if authn_method != expected_authn_method:\n raise InvalidClientAuthentication(\n 'Wrong authentication method used, MUST use \\'{}\\''.format(expected_authn_method))\n\n return client_id", "response": "Verifies the client authentication at the token endpoint."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef add_visualization(self, visualization, size_x=6, size_y=3, col=0, row=0):\n new_panel_index = self.get_max_index()+1\n if col and row:\n new_panel = {\n 'col': col, 'row': row,\n 'size_x': size_x, 'size_y': size_y,\n 'panelIndex': new_panel_index,\n 'type': 'visualization',\n 'id': visualization.id\n }\n self.panels.append(new_panel)\n return new_panel\n else:\n new_panel = append_panel(self.panels, size_x, size_y)\n if new_panel:\n new_panel['id'] = visualization.id\n new_panel['panelIndex'] = new_panel_index\n new_panel['type'] = 'visualization'\n return new_panel", "response": "Adds a visualization to the dashboard."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nremove all visualizations with the specified id from the dashboard.", "response": "def remove_visualization(self, visualization_id):\n \"\"\"\n Removes all visualizations with the specified id from the dashboard\n :param visualization_id:\n :return:\n \"\"\"\n for i, panel in enumerate(self.panels):\n if panel['id'] == visualization_id:\n del self.panels[i]"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning a list of all visualizations", "response": "def get_all(self):\n \"\"\"\n Returns a list of all visualizations\n :return: list of the Visualization instances\n \"\"\"\n res = self.es.search(index=self.index, doc_type=self.doc_type, body={'query': {'match_all': {}}})\n if not res['hits']['total']:\n return []\n return [Visualization.from_kibana(hit) for hit in res['hits']['hits']]"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nadding a new visualization to the index", "response": "def add(self, visualization):\n \"\"\"\n Creates a new visualization\n :param visualization: instance of Visualization\n :return:\n \"\"\"\n res = self.es.create(index=self.index,\n id=visualization.id or str(uuid.uuid1()),\n doc_type=self.doc_type,\n body=visualization.to_kibana(), refresh=True)\n return res"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef update(self, visualization):\n res = self.es.update(index=self.index, id=visualization.id, doc_type=self.doc_type,\n body={'doc': visualization.to_kibana()},\n refresh=True)\n return res", "response": "Updates existing visualization with new data"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ndeletes the visualization s related items from the index", "response": "def remove(self, visualization):\n \"\"\"\n Deletes the visualization\n :param visualization: instance of Visualization that was previously loaded\n :return:\n \"\"\"\n res = self.es.delete(index=self.index, id=visualization.id, doc_type=self.doc_type, refresh=True)\n return res"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nfinding bottom lines of all panels in a", "response": "def bottoms(panels):\n \"\"\"\n Finds bottom lines of all panels\n :param panels:\n :return: sorted by row list of tuples representing lines (col, row , col + len, row)\n \"\"\"\n bottom_lines = [(p['col'], p['row'] + p['size_y'], p['col'] + p['size_x'], p['row'] + p['size_y']) for p in panels]\n return sorted(bottom_lines, key=lambda l: l[1], reverse=True)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef find_shape(bottom_lines, max_len):\n shape = [1] * max_len\n for i in range(max_len):\n for line in bottom_lines:\n if line[0] <= i + 1 < line[2]:\n shape[i] = line[1]\n break\n return shape", "response": "Find a shape of lowest horizontal lines with step = 1\n "} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef longest_lines(shape):\n lines = []\n for level in set(shape):\n count = 0\n for i in range(len(shape)):\n if shape[i] <= level:\n count += 1\n elif count:\n lines.append({'row': level, 'col': i - count + 1, 'len': count})\n count = 0\n if count:\n lines.append({'row': level, 'col': i - count + 2, 'len': count})\n\n return sorted(lines, key=lambda l: l['row'])", "response": "Creates lines from shape containing longest available data."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nappend a panel to the list of panels.", "response": "def append_panel(panels, size_x, size_y, max_col=12):\n \"\"\"\n Appends a panel to the list of panels. Finds the highest palce at the left for the new panel.\n :param panels:\n :param size_x:\n :param size_y:\n :param max_col:\n :return: a new panel or None if it is not possible to place a panel with such size_x\n \"\"\"\n bottom_lines = bottoms(panels)\n shape = find_shape(bottom_lines, max_col)\n lines = longest_lines(shape)\n line = find_place(lines, size_x)\n if not line:\n return\n panel = {\n 'col': line['col'],\n 'row': line['row'],\n 'size_x': size_x,\n 'size_y': size_y,\n }\n panels.append(panel)\n return panel"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef run_from_argv(self, argv):\n\n self.test_runner = test_runner_class\n super(Command, self).run_from_argv(argv)", "response": "Set the default test runner for the command."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef handle(self, *test_labels, **options):\n if not options.get('testrunner', None):\n options['testrunner'] = test_runner_class\n\n return super(Command, self).handle(*test_labels, **options)", "response": "Set the default Gherkin test runner."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef django_url(step, url=None):\n\n base_url = step.test.live_server_url\n\n if url:\n return urljoin(base_url, url)\n else:\n return base_url", "response": "Returns the URL for a page from the test server."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nfiltering by namespace. Try to guess which field to use in lookup. Accept 'to' argument if you need to specify.", "response": "def namespace(self, namespace, to=None):\n \"\"\"\n Filter by namespace. Try to guess which field to use in lookup.\n Accept 'to' argument if you need to specify.\n \"\"\"\n fields = get_apphook_field_names(self.model)\n if not fields:\n raise ValueError(\n ugettext(\n 'Can\\'t find any relation to an ApphookConfig model in {0}'\n ).format(self.model.__name__)\n )\n if to and to not in fields:\n raise ValueError(\n ugettext(\n 'Can\\'t find relation to ApphookConfig model named '\n '\"{0}\" in \"{1}\"'\n ).format(to, self.model.__name__)\n )\n if len(fields) > 1 and to not in fields:\n raise ValueError(\n ugettext(\n '\"{0}\" has {1} relations to an ApphookConfig model.'\n ' Please, specify which one to use in argument \"to\".'\n ' Choices are: {2}'\n ).format(\n self.model.__name__, len(fields), ', '.join(fields)\n )\n )\n else:\n if not to:\n to = fields[0]\n\n lookup = '{0}__namespace'.format(to)\n kwargs = {lookup: namespace}\n return self.filter(**kwargs)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn the select value for apphook configs clf_id", "response": "def _app_config_select(self, request, obj):\n \"\"\"\n Return the select value for apphook configs\n\n :param request: request object\n :param obj: current object\n :return: False if no preselected value is available (more than one or no apphook\n config is present), apphook config instance if exactly one apphook\n config is defined or apphook config defined in the request or in the current\n object, False otherwise\n \"\"\"\n if not obj and not request.GET.get(self.app_config_attribute, False):\n config_model = get_apphook_model(self.model, self.app_config_attribute)\n if config_model.objects.count() == 1:\n return config_model.objects.first()\n return None\n elif obj and getattr(obj, self.app_config_attribute, False):\n return getattr(obj, self.app_config_attribute)\n elif request.GET.get(self.app_config_attribute, False):\n config_model = get_apphook_model(self.model, self.app_config_attribute)\n return config_model.objects.get(\n pk=int(request.GET.get(self.app_config_attribute, False))\n )\n return False"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsetting the form value according to the apphook config values.", "response": "def _set_config_defaults(self, request, form, obj=None):\n \"\"\"\n Cycle through app_config_values and sets the form value according to the\n options in the current apphook config.\n\n self.app_config_values is a dictionary containing config options as keys, form fields as\n values::\n\n app_config_values = {\n 'apphook_config': 'form_field',\n ...\n }\n\n :param request: request object\n :param form: model form for the current model\n :param obj: current object\n :return: form with defaults set\n \"\"\"\n for config_option, field in self.app_config_values.items():\n if field in form.base_fields:\n form.base_fields[field].initial = self.get_config_data(request, obj, config_option)\n return form"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns a list of fieldset objects for the apphook config.", "response": "def get_fieldsets(self, request, obj=None):\n \"\"\"\n If the apphook config must be selected first, returns a fieldset with just the\n app config field and help text\n :param request:\n :param obj:\n :return:\n \"\"\"\n app_config_default = self._app_config_select(request, obj)\n if app_config_default is None and request.method == 'GET':\n return (_(self.app_config_selection_title),\n {'fields': (self.app_config_attribute, ),\n 'description': _(self.app_config_selection_desc)}),\n else:\n return super(ModelAppHookConfig, self).get_fieldsets(request, obj)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_form(self, request, obj=None, **kwargs):\n form = super(ModelAppHookConfig, self).get_form(request, obj, **kwargs)\n if self.app_config_attribute not in form.base_fields:\n return form\n app_config_default = self._app_config_select(request, obj)\n if app_config_default:\n form.base_fields[self.app_config_attribute].initial = app_config_default\n get = copy.copy(request.GET)\n get[self.app_config_attribute] = app_config_default.pk\n request.GET = get\n elif app_config_default is None and request.method == 'GET':\n class InitialForm(form):\n class Meta(form.Meta):\n fields = (self.app_config_attribute,)\n form = InitialForm\n form = self._set_config_defaults(request, form, obj)\n return form", "response": "Returns the right form according to the context."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngenerates a hash of verbose names to models", "response": "def _models_generator():\n \"\"\"\n Build a hash of model verbose names to models\n \"\"\"\n\n for app in apps.get_app_configs():\n for model in app.get_models():\n yield (str(model._meta.verbose_name).lower(), model)\n yield (str(model._meta.verbose_name_plural).lower(), model)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns a model class by its verbose name.", "response": "def get_model(name):\n \"\"\"\n Convert a model's verbose name to the model class. This allows us to\n use the models verbose name in steps.\n \"\"\"\n\n model = MODELS.get(name.lower(), None)\n\n assert model, \"Could not locate model by name '%s'\" % name\n\n return model"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nresets the ID sequence for a given model.", "response": "def reset_sequence(model):\n \"\"\"\n Reset the ID sequence for a model.\n \"\"\"\n sql = connection.ops.sequence_reset_sql(no_style(), [model])\n for cmd in sql:\n connection.cursor().execute(cmd)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _dump_model(model, attrs=None):\n\n fields = []\n\n for field in model._meta.fields:\n fields.append((field.name, str(getattr(model, field.name))))\n\n if attrs is not None:\n for attr in attrs:\n fields.append((attr, str(getattr(model, attr))))\n\n for field in model._meta.many_to_many:\n vals = getattr(model, field.name)\n fields.append((field.name, '{val} ({count})'.format(\n val=', '.join(map(str, vals.all())),\n count=vals.count(),\n )))\n\n print(', '.join(\n '{0}={1}'.format(field, value)\n for field, value in fields\n ))", "response": "Dump the model fields for debugging."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _model_exists_step(self, model, should_exist):\n\n model = get_model(model)\n data = guess_types(self.hashes)\n\n queryset = model.objects\n\n try:\n existence_check = _TEST_MODEL[model]\n except KeyError:\n existence_check = test_existence\n\n failed = 0\n try:\n for hash_ in data:\n match = existence_check(queryset, hash_)\n\n if should_exist:\n assert match, \\\n \"%s does not exist: %s\" % (model.__name__, hash_)\n else:\n assert not match, \\\n \"%s exists: %s\" % (model.__name__, hash_)\n\n except AssertionError as exc:\n print(exc)\n failed += 1\n\n if failed:\n print(\"Rows in DB are:\")\n for existing_model in queryset.all():\n _dump_model(existing_model,\n attrs=[k[1:]\n for k in data[0].keys()\n if k.startswith('@')])\n\n if should_exist:\n raise AssertionError(\"%i rows missing\" % failed)\n else:\n raise AssertionError(\"%i rows found\" % failed)", "response": "Test for existence of a model matching the given data."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef write_models(model, data, field):\n written = []\n\n for hash_ in data:\n if field:\n if field not in hash_:\n raise KeyError((\"The \\\"%s\\\" field is required for all update \"\n \"operations\") % field)\n\n model_kwargs = {field: hash_[field]}\n model_obj = model.objects.get(**model_kwargs)\n\n for to_set, val in hash_.items():\n setattr(model_obj, to_set, val)\n\n model_obj.save()\n\n else:\n model_obj = model.objects.create(**hash_)\n\n written.append(model_obj)\n\n reset_sequence(model)\n return written", "response": "Writes models for each data hash to the database."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _write_models_step(self, model, field=None):\n\n model = get_model(model)\n data = guess_types(self.hashes)\n\n try:\n func = _WRITE_MODEL[model]\n except KeyError:\n func = partial(write_models, model)\n\n func(data, field)", "response": "Write or update a model."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncreate a new model linked to the given model.", "response": "def _create_models_for_relation_step(self, rel_model_name,\n rel_key, rel_value, model):\n \"\"\"\n Create a new model linked to the given model.\n\n Syntax:\n\n And `model` with `field` \"`value`\" has `new model` in the database:\n\n Example:\n\n .. code-block:: gherkin\n\n And project with name \"Ball Project\" has goals in the database:\n | description |\n | To have fun playing with balls of twine |\n \"\"\"\n\n model = get_model(model)\n lookup = {rel_key: rel_value}\n rel_model = get_model(rel_model_name).objects.get(**lookup)\n\n data = guess_types(self.hashes)\n\n for hash_ in data:\n hash_['%s' % rel_model_name] = rel_model\n\n try:\n func = _WRITE_MODEL[model]\n except KeyError:\n func = partial(write_models, model)\n\n func(data, None)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _create_m2m_links_step(self, rel_model_name,\n rel_key, rel_value, relation_name):\n \"\"\"\n Link many-to-many models together.\n\n Syntax:\n\n And `model` with `field` \"`value`\" is linked to `other model` in the\n database:\n\n Example:\n\n .. code-block:: gherkin\n\n And article with name \"Guidelines\" is linked to tags in the database:\n | name |\n | coding |\n | style |\n \"\"\"\n\n lookup = {rel_key: rel_value}\n rel_model = get_model(rel_model_name).objects.get(**lookup)\n relation = None\n for m2m in rel_model._meta.many_to_many:\n if relation_name in (m2m.name, m2m.verbose_name):\n relation = getattr(rel_model, m2m.name)\n break\n if not relation:\n try:\n relation = getattr(rel_model, relation_name)\n except AttributeError:\n pass\n assert relation, \\\n \"%s does not have a many-to-many relation named '%s'\" % (\n rel_model._meta.verbose_name.capitalize(),\n relation_name,\n )\n m2m_model = relation.model\n\n for hash_ in self.hashes:\n relation.add(m2m_model.objects.get(**hash_))", "response": "Create many - to - many links for a given model."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _model_count_step(self, count, model):\n\n model = get_model(model)\n\n expected = int(count)\n found = model.objects.count()\n assert found == expected, \"Expected %d %s, found %d.\" % \\\n (expected, model._meta.verbose_name_plural, found)", "response": "Count the number of models in the database."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ntesting that count mails have been sent.", "response": "def mail_sent_count(self, count):\n \"\"\"\n Test that `count` mails have been sent.\n\n Syntax:\n\n I have sent `count` emails\n\n Example:\n\n .. code-block:: gherkin\n\n Then I have sent 2 emails\n \"\"\"\n expected = int(count)\n actual = len(mail.outbox)\n assert expected == actual, \\\n \"Expected to send {0} email(s), got {1}.\".format(expected, actual)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ntesting an email contains the given text in the relevant", "response": "def mail_sent_content(self, text, part):\n \"\"\"\n Test an email contains (assert text in) the given text in the relevant\n message part (accessible as an attribute on the email object).\n\n This step strictly applies whitespace.\n\n Syntax:\n\n I have sent an email with \"`text`\" in the `part`\n\n Example:\n\n .. code-block:: gherkin\n\n Then I have sent an email with \"pandas\" in the body\n \"\"\"\n if not any(text in getattr(email, part) for email in mail.outbox):\n dump_emails(part)\n raise AssertionError(\n \"No email contained expected text in the {0}.\".format(part))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ntesting that an email has the HTML in the multiline as", "response": "def mail_sent_contains_html(self):\n \"\"\"\n Test that an email contains the HTML (assert HTML in) in the multiline as\n one of its MIME alternatives.\n\n The HTML is normalised by passing through Django's\n :func:`django.test.html.parse_html`.\n\n Example:\n\n .. code-block:: gherkin\n\n And I have sent an email with the following HTML alternative:\n \\\"\\\"\\\"\n

Name: Sir Panda

\n

Phone: 0400000000

\n

Email: sir.panda@pand.as

\n \\\"\\\"\\\"\n \"\"\"\n\n for email in mail.outbox:\n try:\n html = next(content for content, mime in email.alternatives\n if mime == 'text/html')\n dom1 = parse_html(html)\n dom2 = parse_html(self.multiline)\n\n assert_in(dom1, dom2)\n\n except AssertionError as exc:\n print(\"Email did not match\", exc)\n # we intentionally eat the exception\n continue\n\n return True\n\n raise AssertionError(\"No email contained the HTML\")"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nshowing the sent emails to aid in debugging.", "response": "def dump_emails(part):\n \"\"\"Show the sent emails' tested parts, to aid in debugging.\"\"\"\n\n print(\"Sent emails:\")\n for email in mail.outbox:\n print(getattr(email, part))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning an absolute URL matching named view with its parameters and the application instance namespace.", "response": "def namespace_url(context, view_name, *args, **kwargs):\n \"\"\"\n Returns an absolute URL matching named view with its parameters and the\n provided application instance namespace.\n\n If no namespace is passed as a kwarg (or it is \"\" or None), this templatetag\n will look into the request object for the app_config's namespace. If there\n is still no namespace found, this tag will act like the normal {% url ... %}\n template tag.\n\n Normally, this tag will return whatever is returned by the ultimate call to\n reverse, which also means it will raise NoReverseMatch if reverse() cannot\n find a match. This behaviour can be override by suppling a 'default' kwarg\n with the value of what should be returned when no match is found.\n \"\"\"\n\n namespace = kwargs.pop('namespace', None)\n\n if not namespace:\n namespace, __ = get_app_instance(context['request'])\n\n if namespace:\n namespace += ':'\n\n reverse = partial(\n urls.reverse, '{0:s}{1:s}'.format(namespace, view_name))\n\n # We're explicitly NOT happy to just re-raise the exception, as that may\n # adversely affect stack traces.\n if 'default' not in kwargs:\n if kwargs:\n return reverse(kwargs=kwargs)\n elif args:\n return reverse(args=args)\n else:\n return reverse()\n\n default = kwargs.pop('default', None)\n try:\n if kwargs:\n return reverse(kwargs=kwargs)\n elif args:\n return reverse(args=args)\n else:\n return reverse()\n except urls.NoReverseMatch:\n return default"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_app_instance(request):\n app = None\n if getattr(request, 'current_page', None) and request.current_page.application_urls:\n app = apphook_pool.get_apphook(request.current_page.application_urls)\n if app and app.app_config:\n try:\n config = None\n with override(get_language_from_request(request, check_path=True)):\n namespace = resolve(request.path_info).namespace\n config = app.get_config(namespace)\n return namespace, config\n except Resolver404:\n pass\n return '', None", "response": "Returns the current namespace and AppHookConfig instance of the current AppHook"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nregistering the provided form as config form for the provided config model", "response": "def setup_config(form_class, config_model=None):\n \"\"\"\n Register the provided form as config form for the provided config model\n\n This can be used as a decorator by adding a `model` attribute to the config form::\n\n @setup_config\n class ExampleConfigForm(AppDataForm):\n model = ExampleConfig\n\n :param form_class: Form class derived from AppDataForm\n :param config_model: Model class derived from AppHookConfig\n :return:\n \"\"\"\n # allow use as a decorator\n if config_model is None:\n return setup_config(form_class, form_class.model)\n\n app_registry.register('config', AppDataContainer.from_form(form_class), config_model)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _get_apphook_field_names(model):\n from .models import AppHookConfig # avoid circular dependencies\n fields = []\n for field in model._meta.fields:\n if isinstance(field, ForeignKey) and issubclass(field.remote_field.model, AppHookConfig):\n fields.append(field)\n return [field.name for field in fields]", "response": "Return all foreign key field names for a AppHookConfig based model"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_apphook_field_names(model):\n key = APP_CONFIG_FIELDS_KEY.format(\n app_label=model._meta.app_label,\n model_name=model._meta.object_name\n ).lower()\n if not hasattr(model, key):\n field_names = _get_apphook_field_names(model)\n setattr(model, key, field_names)\n return getattr(model, key)", "response": "Cache app - hook field names on model\n "} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_apphook_configs(obj):\n keys = get_apphook_field_names(obj)\n return [getattr(obj, key) for key in keys] if keys else []", "response": "Get apphook configs for a given object"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreceiving data from the protocol.", "response": "def data_received(self, data):\n \"\"\"Receive data from the protocol.\n\n Called when asyncio.Protocol detects received data from network.\n \"\"\"\n _LOGGER.debug(\"Starting: data_received\")\n _LOGGER.debug('Received %d bytes from PLM: %s',\n len(data), binascii.hexlify(data))\n self._buffer.put_nowait(data)\n asyncio.ensure_future(self._peel_messages_from_buffer(),\n loop=self._loop)\n\n _LOGGER.debug(\"Finishing: data_received\")"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncalls when the connection to the transport is lost.", "response": "def connection_lost(self, exc):\n \"\"\"Reestablish the connection to the transport.\n\n Called when asyncio.Protocol loses the network connection.\n \"\"\"\n if exc is None:\n _LOGGER.warning('End of file received from Insteon Modem')\n else:\n _LOGGER.warning('Lost connection to Insteon Modem: %s', exc)\n\n self.transport = None\n asyncio.ensure_future(self.pause_writing(), loop=self.loop)\n\n if self._connection_lost_callback:\n self._connection_lost_callback()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef add_all_link_done_callback(self, callback):\n _LOGGER.debug('Added new callback %s ', callback)\n self._cb_load_all_link_db_done.append(callback)", "response": "Register a callback to be invoked when the ALDB is loaded."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nregister callback to be invoked when a device is not responding.", "response": "def add_device_not_active_callback(self, callback):\n \"\"\"Register callback to be invoked when a device is not responding.\"\"\"\n _LOGGER.debug('Added new callback %s ', callback)\n self._cb_device_not_active.append(callback)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef poll_devices(self):\n for addr in self.devices:\n device = self.devices[addr]\n if not device.address.is_x10:\n device.async_refresh_state()", "response": "Request status updates from each device."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef send_msg(self, msg, wait_nak=True, wait_timeout=WAIT_TIMEOUT):\n msg_info = MessageInfo(msg=msg, wait_nak=wait_nak,\n wait_timeout=wait_timeout)\n _LOGGER.debug(\"Queueing msg: %s\", msg)\n self._send_queue.put_nowait(msg_info)", "response": "Send a message to the send queue."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nstarting All - Linking mode.", "response": "def start_all_linking(self, mode, group):\n \"\"\"Put the IM into All-Linking mode.\n\n Puts the IM into All-Linking mode for 4 minutes.\n\n Parameters:\n mode: 0 | 1 | 3 | 255\n 0 - PLM is responder\n 1 - PLM is controller\n 3 - Device that initiated All-Linking is Controller\n 255 = Delete All-Link\n group: All-Link group number (0 - 255)\n\n \"\"\"\n msg = StartAllLinking(mode, group)\n self.send_msg(msg)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef add_x10_device(self, housecode, unitcode, feature='OnOff'):\n device = insteonplm.devices.create_x10(self, housecode,\n unitcode, feature)\n if device:\n self.devices[device.address.id] = device\n return device", "response": "Add an X10 device based on a feature description."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nremoving device from ALDB device list.", "response": "def aldb_device_handled(self, addr):\n \"\"\"Remove device from ALDB device list.\"\"\"\n if isinstance(addr, Address):\n remove_addr = addr.id\n else:\n remove_addr = addr\n try:\n self._aldb_devices.pop(remove_addr)\n _LOGGER.debug('Removed ALDB device %s', remove_addr)\n except KeyError:\n _LOGGER.debug('Device %s not in ALDB device list', remove_addr)\n _LOGGER.debug('ALDB device count: %d', len(self._aldb_devices))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef manage_aldb_record(self, control_code, control_flags, group, address,\n data1, data2, data3):\n \"\"\"Update an IM All-Link record.\n\n Control Code values:\n - 0x00 Find First Starting at the top of the ALDB, search for the\n first ALL-Link Record matching the and in bytes\n 5 \u2013 8. The search ignores byte 4, . You will\n receive an ACK at the end of the returned message if such an ALL-Link\n Record exists, or else a NAK if it doesn\u2019t. If the record exists, the\n IM will return it in an ALL-Link Record Response (0x51) message.\n\n - 0x01 Find Next Search for the next ALL-Link Record following the one\n found using 0x00 above. This allows you to find both\n Controller and Responder records for a given and\n . Be sure to use the same and (bytes 5 \u2013 8)\n as you used for 0x00. You will receive an ACK at the\n end of the returned message if another matching ALL-Link Record\n exists, or else a NAK if it doesn\u2019t. If the record exists, the IM\n will return it in an ALL-Link Record Response (0x51) message.\n\n - 0x20 Modify First Found or Add Modify an existing or else add a new\n ALL-Link Record for either a Controller or Responder. Starting at the\n top of the ALDB, search for the first ALL-Link Record matching the\n and in bytes 5 \u2013 8. The search ignores byte 4,\n . If such an ALL-Link Record exists, overwrite\n it with the data in bytes 4 \u2013 11; otherwise, create a new ALL-Link\n Record using bytes 4 \u2013 11. Note that the IM will copy\n you supplied in byte 4 below directly into the\n byte of the ALL-Link Record in an ALDB-L\n (linear) database. Use caution, because you can damage an ALDB-L if\n you misuse this Command. For instance, if you zero the\n byte in the first ALL-Link Record, the IM\u2019s\n ALDB-L database will then appear empty.\n\n - 0x40 Modify First Controller Found or Add Modify an existing or else\n add a new Controller (master) ALL-Link Record. Starting at the top of\n the ALDB, search for the first ALL-Link Controller Record matching the\n and in bytes 5 \u2013 8. An ALL-Link Controller\n Record has bit 6 of its byte set to 1. If such\n a Controller ALL-Link Record exists, overwrite it with the data in\n bytes 5 \u2013 11; otherwise, create a new ALL-Link Record using bytes\n 5 \u2013 11. In either case, the IM will set bit 6 of the\n byte in the ALL-Link Record to 1 to indicate\n that the record is for a Controller.\n\n - 0x41 Modify First Responder Found or Add Modify an existing or else\n add a new Responder (slave) ALLLink Record. Starting at the top of the\n ALDB, search for the first ALL-Link Responder Record matching the\n and in bytes 5 \u2013 8. An ALL-Link Responder\n Record has bit 6 of its byte cleared to 0. If\n such a Responder ALL-Link Record exists, overwrite it with the data in\n bytes 5 \u2013 11; otherwise, create a new ALL-Link Record using bytes\n 5 \u2013 11. In either case, The IM will clear bit 6 of the\n byte in the ALL-Link Record to 0 to indicate\n that the record is for a Responder.\n\n - 0x80 Delete First Found Delete an ALL-Link Record. Starting at the\n top of the ALDB, search for the first ALL-Link Record matching the\n and in bytes 5 \u2013 8. The search ignores byte 4,\n . You will receive an ACK at the end of the\n returned message if such an ALL-Link Record existed and was deleted,\n or else a NAK no such record exists.\n \"\"\"\n msg = ManageAllLinkRecord(control_code, control_flags, group, address,\n data1, data2, data3)\n self.send_msg(msg)", "response": "This function is called by the IM when it is about to update the ALL - Link record. It is called by the IM when it is about to update the ALL - Link record."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _load_all_link_database(self):\n _LOGGER.debug(\"Starting: _load_all_link_database\")\n self.devices.state = 'loading'\n self._get_first_all_link_record()\n _LOGGER.debug(\"Ending: _load_all_link_database\")", "response": "Load the ALL - Link Database into object."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nrequests first ALL - Link record.", "response": "def _get_first_all_link_record(self):\n \"\"\"Request first ALL-Link record.\"\"\"\n _LOGGER.debug(\"Starting: _get_first_all_link_record\")\n _LOGGER.info('Requesting ALL-Link Records')\n if self.aldb.status == ALDBStatus.LOADED:\n self._next_all_link_rec_nak_retries = 3\n self._handle_get_next_all_link_record_nak(None)\n return\n self.aldb.clear()\n self._next_all_link_rec_nak_retries = 0\n msg = GetFirstAllLinkRecord()\n self.send_msg(msg, wait_nak=True, wait_timeout=.5)\n _LOGGER.debug(\"Ending: _get_first_all_link_record\")"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _get_next_all_link_record(self):\n _LOGGER.debug(\"Starting: _get_next_all_link_record\")\n _LOGGER.debug(\"Requesting Next All-Link Record\")\n msg = GetNextAllLinkRecord()\n self.send_msg(msg, wait_nak=True, wait_timeout=.5)\n _LOGGER.debug(\"Ending: _get_next_all_link_record\")", "response": "Request next ALL - Link record."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nsending the X10 All Units Off command.", "response": "def x10_all_units_off(self, housecode):\n \"\"\"Send the X10 All Units Off command.\"\"\"\n if isinstance(housecode, str):\n housecode = housecode.upper()\n else:\n raise TypeError('Housecode must be a string')\n msg = X10Send.command_msg(housecode, X10_COMMAND_ALL_UNITS_OFF)\n self.send_msg(msg)\n self._x10_command_to_device(housecode, X10_COMMAND_ALL_UNITS_OFF, msg)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsends the X10 All Lights Off command to the specified resource.", "response": "def x10_all_lights_off(self, housecode):\n \"\"\"Send the X10 All Lights Off command.\"\"\"\n msg = X10Send.command_msg(housecode, X10_COMMAND_ALL_LIGHTS_OFF)\n self.send_msg(msg)\n self._x10_command_to_device(housecode, X10_COMMAND_ALL_LIGHTS_OFF, msg)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef x10_all_lights_on(self, housecode):\n msg = X10Send.command_msg(housecode, X10_COMMAND_ALL_LIGHTS_ON)\n self.send_msg(msg)\n self._x10_command_to_device(housecode, X10_COMMAND_ALL_LIGHTS_ON, msg)", "response": "Send the X10 All Lights Off command."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncalls when asyncio. Protocol establishes the network connection.", "response": "def connection_made(self, transport):\n \"\"\"Start the PLM connection process.\n\n Called when asyncio.Protocol establishes the network connection.\n \"\"\"\n _LOGGER.info('Connection established to PLM')\n self.transport = transport\n\n self._restart_writer = True\n self.restart_writing()\n\n # Testing to see if this fixes the 2413S issue\n self.transport.serial.timeout = 1\n self.transport.serial.write_timeout = 1\n self.transport.set_write_buffer_limits(128)\n # limit = self.transport.get_write_buffer_size()\n # _LOGGER.debug('Write buffer size is %d', limit)\n if self._aldb.status != ALDBStatus.LOADED:\n asyncio.ensure_future(self._setup_devices(), loop=self._loop)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncall when asyncio. Protocol establishes the network connection.", "response": "def connection_made(self, transport):\n \"\"\"Start the Hub connection process.\n\n Called when asyncio.Protocol establishes the network connection.\n \"\"\"\n _LOGGER.info('Connection established to Hub')\n _LOGGER.debug('Transport: %s', transport)\n self.transport = transport\n\n self._restart_writer = True\n self.restart_writing()\n\n if self._aldb.status != ALDBStatus.LOADED:\n asyncio.ensure_future(self._setup_devices(), loop=self._loop)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncreate a device from device info data.", "response": "def create(plm, address, cat, subcat, firmware=None):\n \"\"\"Create a device from device info data.\"\"\"\n from insteonplm.devices.ipdb import IPDB\n ipdb = IPDB()\n product = ipdb[[cat, subcat]]\n deviceclass = product.deviceclass\n device = None\n if deviceclass is not None:\n device = deviceclass(plm, address, cat, subcat,\n product.product_key,\n product.description,\n product.model)\n return device"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create_x10(plm, housecode, unitcode, feature):\n from insteonplm.devices.ipdb import IPDB\n ipdb = IPDB()\n product = ipdb.x10(feature)\n deviceclass = product.deviceclass\n device = None\n if deviceclass:\n device = deviceclass(plm, housecode, unitcode)\n return device", "response": "Create an X10 device from a feature definition."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef id_request(self):\n import inspect\n curframe = inspect.currentframe()\n calframe = inspect.getouterframes(curframe, 2)\n _LOGGER.debug('caller name: %s', calframe[1][3])\n msg = StandardSend(self.address, COMMAND_ID_REQUEST_0X10_0X00)\n self._plm.send_msg(msg)", "response": "Request a device ID from a device."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef product_data_request(self):\n msg = StandardSend(self._address,\n COMMAND_PRODUCT_DATA_REQUEST_0X03_0X00)\n self._send_msg(msg)", "response": "Request product data from a device."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nassign a device to an All - Link Group.", "response": "def assign_to_all_link_group(self, group=0x01):\n \"\"\"Assign a device to an All-Link Group.\n\n The default is group 0x01.\n \"\"\"\n msg = StandardSend(self._address,\n COMMAND_ASSIGN_TO_ALL_LINK_GROUP_0X01_NONE,\n cmd2=group)\n self._send_msg(msg)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef delete_from_all_link_group(self, group):\n msg = StandardSend(self._address,\n COMMAND_DELETE_FROM_ALL_LINK_GROUP_0X02_NONE,\n cmd2=group)\n self._send_msg(msg)", "response": "Delete a device from an All - Link Group."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef fx_username(self):\n msg = StandardSend(self._address, COMMAND_FX_USERNAME_0X03_0X01)\n self._send_msg(msg)", "response": "Get FX Username.\n\n Only required for devices that support FX Commands.\n FX Addressee responds with an ED 0x0301 FX Username Response message"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nsends a FX Username Response message to the device.", "response": "def device_text_string_request(self):\n \"\"\"Get FX Username.\n\n Only required for devices that support FX Commands.\n FX Addressee responds with an ED 0x0301 FX Username Response message.\n \"\"\"\n msg = StandardSend(self._address, COMMAND_FX_USERNAME_0X03_0X01)\n self._send_msg(msg)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ntelling a device to enter All - Linking Mode.", "response": "def enter_linking_mode(self, group=0x01):\n \"\"\"Tell a device to enter All-Linking Mode.\n\n Same as holding down the Set button for 10 sec.\n Default group is 0x01.\n\n Not supported by i1 devices.\n \"\"\"\n msg = ExtendedSend(self._address,\n COMMAND_ENTER_LINKING_MODE_0X09_NONE,\n cmd2=group,\n userdata=Userdata())\n msg.set_checksum()\n self._send_msg(msg)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef enter_unlinking_mode(self, group):\n msg = StandardSend(self._address,\n COMMAND_ENTER_UNLINKING_MODE_0X0A_NONE,\n cmd2=group)\n self._send_msg(msg)", "response": "Unlink a device from an All - Link group."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ngetting the device engine version.", "response": "def get_engine_version(self):\n \"\"\"Get the device engine version.\"\"\"\n msg = StandardSend(self._address,\n COMMAND_GET_INSTEON_ENGINE_VERSION_0X0D_0X00)\n self._send_msg(msg)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create_default_links(self):\n self._plm.manage_aldb_record(0x40, 0xe2, 0x00, self.address,\n self.cat, self.subcat, self.product_key)\n self.manage_aldb_record(0x41, 0xa2, 0x00, self._plm.address,\n self._plm.cat, self._plm.subcat,\n self._plm.product_key)\n for link in self._stateList:\n state = self._stateList[link]\n if state.is_responder:\n # IM is controller\n self._plm.manage_aldb_record(0x40, 0xe2, link, self._address,\n 0x00, 0x00, 0x00)\n # Device is responder\n self.manage_aldb_record(0x41, 0xa2, link, self._plm.address,\n state.linkdata1, state.linkdata2,\n state.linkdata3)\n if state.is_controller:\n # IM is responder\n self._plm.manage_aldb_record(0x41, 0xa2, link, self._address,\n 0x00, 0x00, 0x00)\n # Device is controller\n self.manage_aldb_record(0x40, 0xe2, link, self._plm.address,\n 0x00, 0x00, 0x00)\n self.read_aldb()", "response": "Create the default links between the IM and the device."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef read_aldb(self, mem_addr=0x0000, num_recs=0):\n if self._aldb.version == ALDBVersion.Null:\n _LOGGER.info('Device %s does not contain an All-Link Database',\n self._address.human)\n else:\n _LOGGER.info('Reading All-Link Database for device %s',\n self._address.human)\n asyncio.ensure_future(self._aldb.load(mem_addr, num_recs),\n loop=self._plm.loop)\n self._aldb.add_loaded_callback(self._aldb_loaded_callback)", "response": "Read the device All - Link Database."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef manage_aldb_record(self, control_code, control_flags, group, address,\n data1, data2, data3):\n \"\"\"Update an IM All-Link record.\n\n Control Code values:\n - 0x00 Find First Starting at the top of the ALDB, search for the\n first ALL-Link Record matching the and in bytes\n 5 \u2013 8. The search ignores byte 4, . You will\n receive an ACK at the end of the returned message if such an ALL-Link\n Record exists, or else a NAK if it doesn\u2019t. If the record exists, the\n IM will return it in an ALL-Link Record Response (0x51) message.\n\n - 0x01 Find Next Search for the next ALL-Link Record following the one\n found using 0x00 above. This allows you to find both\n Controller and Responder records for a given and\n . Be sure to use the same and (bytes 5 \u2013 8)\n as you used for 0x00. You will receive an ACK at the\n end of the returned message if another matching ALL-Link Record\n exists, or else a NAK if it doesn\u2019t. If the record exists, the IM\n will return it in an ALL-Link Record Response (0x51) message.\n\n - 0x20 Modify First Found or Add Modify an existing or else add a new\n ALL-Link Record for either a Controller or Responder. Starting at the\n top of the ALDB, search for the first ALL-Link Record matching the\n and in bytes 5 \u2013 8. The search ignores byte 4,\n . If such an ALL-Link Record exists, overwrite\n it with the data in bytes 4 \u2013 11; otherwise, create a new ALL-Link\n Record using bytes 4 \u2013 11. Note that the IM will copy\n you supplied in byte 4 below directly into the\n byte of the ALL-Link Record in an ALDB-L\n (linear) database. Use caution, because you can damage an ALDB-L if\n you misuse this Command. For instance, if you zero the\n byte in the first ALL-Link Record, the IM\u2019s\n ALDB-L database will then appear empty.\n\n - 0x40 Modify First Controller Found or Add Modify an existing or else\n add a new Controller (master) ALL-Link Record. Starting at the top of\n the ALDB, search for the first ALL-Link Controller Record matching the\n and in bytes 5 \u2013 8. An ALL-Link Controller\n Record has bit 6 of its byte set to 1. If such\n a Controller ALL-Link Record exists, overwrite it with the data in\n bytes 5 \u2013 11; otherwise, create a new ALL-Link Record using bytes\n 5 \u2013 11. In either case, the IM will set bit 6 of the\n byte in the ALL-Link Record to 1 to indicate\n that the record is for a Controller.\n\n - 0x41 Modify First Responder Found or Add Modify an existing or else\n add a new Responder (slave) ALLLink Record. Starting at the top of the\n ALDB, search for the first ALL-Link Responder Record matching the\n and in bytes 5 \u2013 8. An ALL-Link Responder\n Record has bit 6 of its byte cleared to 0. If\n such a Responder ALL-Link Record exists, overwrite it with the data in\n bytes 5 \u2013 11; otherwise, create a new ALL-Link Record using bytes\n 5 \u2013 11. In either case, The IM will clear bit 6 of the\n byte in the ALL-Link Record to 0 to indicate\n that the record is for a Responder.\n\n - 0x80 Delete First Found Delete an ALL-Link Record. Starting at the\n top of the ALDB, search for the first ALL-Link Record matching the\n and in bytes 5 \u2013 8. The search ignores byte 4,\n . You will receive an ACK at the end of the\n returned message if such an ALL-Link Record existed and was deleted,\n or else a NAK no such record exists.\n \"\"\"\n found_first = False\n for memaddr in self.aldb:\n found = False\n rec = self.aldb[memaddr]\n found = Address(address) == rec.address\n found = found & int(group) == rec.group\n if control_code == 0x00:\n if found:\n return (True, rec)\n if rec.control_flags.is_high_water_mark:\n return (False, None)\n if control_code == 0x01:\n if found & found_first:\n return (True, rec)\n if rec.control_flags.is_high_water_mark:\n return (False, None)\n if found:\n found_first = True\n elif control_code == 0x20:\n if found or rec.control_flags.is_high_water_mark:\n new_rec = self._write_new_aldb_rec(\n rec.mem_addr, control_flags, group, address,\n data1, data2, data3)\n return (found, new_rec)\n elif control_code == 0x40:\n found = found & rec.control_flags.is_controller\n if found or rec.control_flags.is_high_water_mark:\n new_rec = self._write_new_aldb_rec(\n rec.mem_addr, control_flags, group, address,\n data1, data2, data3)\n return (found, new_rec)\n elif control_code == 0x41:\n found = found & rec.control_flags.is_responder\n if found or rec.control_flags.is_high_water_mark:\n new_rec = self._write_new_aldb_rec(\n rec.mem_addr, control_flags, group, address,\n data1, data2, data3)\n return (found, new_rec)\n elif control_code == 0x80:\n if found:\n cf = rec.control_flags\n new_control_flags = ControlFlags(False, cf.is_controller,\n cf.is_high_water_mark,\n False, False)\n new_rec = self._write_new_aldb_rec(\n rec.mem_addr, new_control_flags, rec.group,\n rec.address, rec.data1, rec.data2, rec.data3)\n return (found, new_rec)\n return (False, rec)", "response": "This function is called by the IM when it is about to update the ALL - Link record. It is called by the IM when it is about to update the ALL - Link record."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nwrite the record to the All - Link Database.", "response": "def write_aldb(self, mem_addr: int, mode: str, group: int, target,\n data1=0x00, data2=0x00, data3=0x00):\n \"\"\"Write to the device All-Link Database.\n\n Parameters:\n Required:\n mode: r - device is a responder of target\n c - device is a controller of target\n group: Link group\n target: Address of the other device\n\n Optional:\n data1: Device dependant\n data2: Device dependant\n data3: Device dependant\n\n \"\"\"\n if isinstance(mode, str) and mode.lower() in ['c', 'r']:\n pass\n else:\n _LOGGER.error('Insteon link mode: %s', mode)\n raise ValueError(\"Mode must be 'c' or 'r'\")\n if isinstance(group, int):\n pass\n else:\n raise ValueError(\"Group must be an integer\")\n\n target_addr = Address(target)\n\n _LOGGER.debug('calling aldb write_record')\n self._aldb.write_record(mem_addr, mode, group, target_addr,\n data1, data2, data3)\n self._aldb.add_loaded_callback(self._aldb_loaded_callback)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ndelete an All - Link Database record.", "response": "def del_aldb(self, mem_addr: int):\n \"\"\"Delete an All-Link Database record.\"\"\"\n self._aldb.del_record(mem_addr)\n self._aldb.add_loaded_callback(self._aldb_loaded_callback)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _refresh_aldb_records(self, linkcode, address, group):\n if self.aldb.status in [ALDBStatus.LOADED, ALDBStatus.PARTIAL]:\n for mem_addr in self.aldb:\n rec = self.aldb[mem_addr]\n if linkcode in [0, 1, 3]:\n if rec.control_flags.is_high_water_mark:\n _LOGGER.debug('Removing HWM record %04x', mem_addr)\n self.aldb.pop(mem_addr)\n elif not rec.control_flags.is_in_use:\n _LOGGER.debug('Removing not in use record %04x',\n mem_addr)\n self.aldb.pop(mem_addr)\n else:\n if rec.address == self.address and rec.group == group:\n _LOGGER.debug('Removing record %04x with addr %s and '\n 'group %d', mem_addr, rec.address,\n rec.group)\n self.aldb.pop(mem_addr)\n self.read_aldb()", "response": "Refresh the IM and device ALDB records."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreceive a message from the device.", "response": "def receive_message(self, msg):\n \"\"\"Receive a messages sent to this device.\"\"\"\n _LOGGER.debug('Starting Device.receive_message')\n if hasattr(msg, 'isack') and msg.isack:\n _LOGGER.debug('Got Message ACK')\n if self._sent_msg_wait_for_directACK.get('callback') is not None:\n _LOGGER.debug('Look for direct ACK')\n asyncio.ensure_future(self._wait_for_direct_ACK(),\n loop=self._plm.loop)\n else:\n _LOGGER.debug('DA queue: %s',\n self._sent_msg_wait_for_directACK)\n _LOGGER.debug('Message ACK with no callback')\n if (hasattr(msg, 'flags') and\n hasattr(msg.flags, 'isDirectACK') and\n msg.flags.isDirectACK):\n _LOGGER.debug('Got Direct ACK message')\n if self._send_msg_lock.locked():\n self._directACK_received_queue.put_nowait(msg)\n else:\n _LOGGER.debug('But Direct ACK not expected')\n if not self._is_duplicate(msg):\n callbacks = self._message_callbacks.get_callbacks_from_message(msg)\n for callback in callbacks:\n _LOGGER.debug('Scheduling msg callback: %s', callback)\n self._plm.loop.call_soon(callback, msg)\n else:\n _LOGGER.debug('msg is duplicate')\n _LOGGER.debug(msg)\n self._last_communication_received = datetime.datetime.now()\n _LOGGER.debug('Ending Device.receive_message')"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef receive_message(self, msg):\n _LOGGER.debug('Starting X10Device.receive_message')\n if hasattr(msg, 'isack') and msg.isack:\n _LOGGER.debug('Got Message ACK')\n if self._send_msg_lock.locked():\n self._send_msg_lock.release()\n callbacks = self._message_callbacks.get_callbacks_from_message(msg)\n _LOGGER.debug('Found %d callbacks for msg %s', len(callbacks), msg)\n for callback in callbacks:\n _LOGGER.debug('Scheduling msg callback: %s', callback)\n self._plm.loop.call_soon(callback, msg)\n self._last_communication_received = datetime.datetime.now()\n _LOGGER.debug('Ending Device.receive_message')", "response": "Receive a message from the device."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef add(self, plm, device, stateType, stateName, group, defaultValue=None):\n self._stateList[group] = stateType(plm, device, stateName, group,\n defaultValue=defaultValue)", "response": "Add a state to the StateList."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef create_from_userdata(userdata):\n memhi = userdata.get('d3')\n memlo = userdata.get('d4')\n memory = memhi << 8 | memlo\n control_flags = userdata.get('d6')\n group = userdata.get('d7')\n addrhi = userdata.get('d8')\n addrmed = userdata.get('d9')\n addrlo = userdata.get('d10')\n addr = Address(bytearray([addrhi, addrmed, addrlo]))\n data1 = userdata.get('d11')\n data2 = userdata.get('d12')\n data3 = userdata.get('d13')\n return ALDBRecord(memory, control_flags, group, addr,\n data1, data2, data3)", "response": "Create ALDB Record from the userdata dictionary."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning a Userdata dictionary.", "response": "def to_userdata(self):\n \"\"\"Return a Userdata dictionary.\"\"\"\n userdata = Userdata({'d3': self.memhi,\n 'd4': self.memlo,\n 'd6': self.control_flags,\n 'd7': self.group,\n 'd8': self.address.bytes[2],\n 'd9': self.address.bytes[1],\n 'd10': self.address.bytes[0],\n 'd11': self.data1,\n 'd12': self.data2,\n 'd13': self.data3})\n return userdata"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning a byte representation of ControlFlags.", "response": "def byte(self):\n \"\"\"Return a byte representation of ControlFlags.\"\"\"\n flags = int(self._in_use) << 7 \\\n | int(self._controller) << 6 \\\n | int(self._bit5) << 5 \\\n | int(self._bit4) << 4 \\\n | int(self._used_before) << 1\n return flags"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef create_from_byte(control_flags):\n in_use = bool(control_flags & 1 << 7)\n controller = bool(control_flags & 1 << 6)\n bit5 = bool(control_flags & 1 << 5)\n bit4 = bool(control_flags & 1 << 4)\n used_before = bool(control_flags & 1 << 1)\n flags = ControlFlags(in_use, controller, used_before,\n bit5=bit5, bit4=bit4)\n return flags", "response": "Create a ControlFlags class from a control flags byte."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nadd a callback to be run when the ALDB load is complete.", "response": "def add_loaded_callback(self, callback):\n \"\"\"Add a callback to be run when the ALDB load is complete.\"\"\"\n if callback not in self._cb_aldb_loaded:\n self._cb_aldb_loaded.append(callback)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreads the device database and load.", "response": "async def load(self, mem_addr=0x0000, rec_count=0, retry=0):\n \"\"\"Read the device database and load.\"\"\"\n if self._version == ALDBVersion.Null:\n self._status = ALDBStatus.LOADED\n _LOGGER.debug('Device has no ALDB')\n else:\n self._status = ALDBStatus.LOADING\n _LOGGER.debug('Tring to lock from load')\n await self._rec_mgr_lock\n _LOGGER.debug('load yielded lock')\n\n mem_hi = mem_addr >> 8\n mem_lo = mem_addr & 0xff\n log_output = 'ALDB read'\n max_retries = 0\n if rec_count:\n max_retries = ALDB_RECORD_RETRIES\n if mem_addr == 0x0000:\n log_output = '{:s} first record'.format(log_output)\n else:\n log_output = '{:s} record {:04x}'.format(log_output,\n mem_addr)\n else:\n max_retries = ALDB_ALL_RECORD_RETRIES\n log_output = '{:s} all records'.format(log_output)\n\n if retry:\n log_output = '{:s} retry {:d} of {:d}'.format(log_output,\n retry,\n max_retries)\n _LOGGER.info(log_output)\n userdata = Userdata({'d1': 0,\n 'd2': 0,\n 'd3': mem_hi,\n 'd4': mem_lo,\n 'd5': rec_count})\n msg = ExtendedSend(self._address,\n COMMAND_EXTENDED_READ_WRITE_ALDB_0X2F_0X00,\n userdata=userdata)\n msg.set_checksum()\n self._send_method(msg, self._handle_read_aldb_ack, True)\n\n if not self._load_action:\n self._set_load_action(mem_addr, rec_count, -1, False)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nwrite an All - Link database record.", "response": "def write_record(self, mem_addr: int, mode: str, group: int, target,\n data1=0x00, data2=0x00, data3=0x00):\n \"\"\"Write an All-Link database record.\"\"\"\n if not (self._have_first_record() and self._have_last_record()):\n _LOGGER.error('Must load the Insteon All-Link Database before '\n 'writing to it')\n else:\n self._prior_status = self._status\n self._status = ALDBStatus.LOADING\n mem_hi = mem_addr >> 8\n mem_lo = mem_addr & 0xff\n controller = mode == 'c'\n control_flag = ControlFlags(True, controller, True, False, False)\n addr = Address(target)\n addr_lo = addr.bytes[0]\n addr_mid = addr.bytes[1]\n addr_hi = addr.bytes[2]\n userdata = Userdata({'d1': 0,\n 'd2': 0x02,\n 'd3': mem_hi,\n 'd4': mem_lo,\n 'd5': 0x08,\n 'd6': control_flag.byte,\n 'd7': group,\n 'd8': addr_lo,\n 'd9': addr_mid,\n 'd10': addr_hi,\n 'd11': data1,\n 'd12': data2,\n 'd13': data3})\n msg = ExtendedSend(self._address,\n COMMAND_EXTENDED_READ_WRITE_ALDB_0X2F_0X00,\n userdata=userdata)\n msg.set_checksum()\n _LOGGER.debug('writing message %s', msg)\n self._send_method(msg, self._handle_write_aldb_ack, True)\n self._load_action = LoadAction(mem_addr, 1, 0)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef del_record(self, mem_addr: int):\n record = self._records.get(mem_addr)\n if not record:\n _LOGGER.error('Must load the Insteon All-Link Database record '\n 'before deleting it')\n else:\n self._prior_status = self._status\n self._status = ALDBStatus.LOADING\n mem_hi = mem_addr >> 8\n mem_lo = mem_addr & 0xff\n controller = record.control_flags.is_controller\n control_flag = ControlFlags(False, controller, True, False, False)\n addr = record.address\n addr_lo = addr.bytes[0]\n addr_mid = addr.bytes[1]\n addr_hi = addr.bytes[2]\n group = record.group\n data1 = record.data1\n data2 = record.data2\n data3 = record.data3\n userdata = Userdata({'d1': 0,\n 'd2': 0x02,\n 'd3': mem_hi,\n 'd4': mem_lo,\n 'd5': 0x08,\n 'd6': control_flag.byte,\n 'd7': group,\n 'd8': addr_lo,\n 'd9': addr_mid,\n 'd10': addr_hi,\n 'd11': data1,\n 'd12': data2,\n 'd13': data3})\n msg = ExtendedSend(self._address,\n COMMAND_EXTENDED_READ_WRITE_ALDB_0X2F_0X00,\n userdata=userdata)\n msg.set_checksum()\n _LOGGER.debug('writing message %s', msg)\n self._send_method(msg, self._handle_write_aldb_ack, True)\n self._load_action = LoadAction(mem_addr, 1, 0)", "response": "Write an All - Link database record."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef find_matching_link(self, mode, group, addr):\n found_rec = None\n mode_test = None\n if mode.lower() in ['c', 'r']:\n link_group = int(group)\n link_addr = Address(addr)\n for mem_addr in self:\n rec = self[mem_addr]\n if mode.lower() == 'r':\n mode_test = rec.control_flags.is_controller\n else:\n mode_test = rec.control_flags.is_responder\n if (mode_test and\n rec.group == link_group and\n rec.address == link_addr):\n found_rec = rec\n return found_rec", "response": "Find a matching link in the current device."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef record_received(self, msg):\n release_lock = False\n userdata = msg.userdata\n rec = ALDBRecord.create_from_userdata(userdata)\n self._records[rec.mem_addr] = rec\n\n _LOGGER.debug('ALDB Record: %s', rec)\n\n rec_count = self._load_action.rec_count\n if rec_count == 1 or self._have_all_records():\n release_lock = True\n\n if self._is_first_record(rec):\n self._mem_addr = rec.mem_addr\n\n if release_lock and self._rec_mgr_lock.locked():\n _LOGGER.debug('Releasing lock because record received')\n self._rec_mgr_lock.release()", "response": "Handle ALDB record received from device."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nload ALDB records from a set of saved records.", "response": "def load_saved_records(self, status, records):\n \"\"\"Load ALDB records from a set of saved records.\"\"\"\n if isinstance(status, ALDBStatus):\n self._status = status\n else:\n self._status = ALDBStatus(status)\n for mem_addr in records:\n rec = records[mem_addr]\n control_flags = int(rec.get('control_flags', 0))\n group = int(rec.get('group', 0))\n rec_addr = rec.get('address', '000000')\n data1 = int(rec.get('data1', 0))\n data2 = int(rec.get('data2', 0))\n data3 = int(rec.get('data3', 0))\n self[int(mem_addr)] = ALDBRecord(int(mem_addr), control_flags,\n group, rec_addr,\n data1, data2, data3)\n if self._status == ALDBStatus.LOADED:\n keys = list(self._records.keys())\n keys.sort(reverse=True)\n first_key = keys[0]\n self._mem_addr = first_key"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncalculate the next record to read.", "response": "def _set_load_action(self, mem_addr, rec_count, retries,\n read_complete=False):\n \"\"\"Calculate the next record to read.\n\n If the last record was successful and one record was being read then\n look for the next record until we get to the high water mark.\n\n If the last read was successful and all records were being read then\n look for the first record.\n\n if the last read was unsuccessful and one record was being read then\n repeat the last read until max retries\n\n If the last read was unsuccessful and all records were being read then\n repeat the last read until max retries or look for the first record.\n \"\"\"\n if self._have_all_records():\n mem_addr = None\n rec_count = 0\n retries = 0\n elif read_complete:\n retries = 0\n if rec_count:\n mem_addr = self._next_address(mem_addr)\n else:\n mem_addr = self._next_address(mem_addr)\n rec_count = 1\n retries = 0\n elif rec_count and retries < ALDB_RECORD_RETRIES:\n retries = retries + 1\n elif not rec_count and retries < ALDB_ALL_RECORD_RETRIES:\n retries = retries + 1\n elif not rec_count and retries >= ALDB_ALL_RECORD_RETRIES:\n mem_addr = self._next_address(mem_addr)\n rec_count = 1\n retries = 0\n else:\n mem_addr = None\n rec_count = 0\n retries = 0\n\n self._load_action = LoadAction(mem_addr, rec_count, retries)\n if mem_addr is not None:\n _LOGGER.debug('Load action: addr: %04x rec_count: %d retries: %d',\n self._load_action.mem_addr,\n self._load_action.rec_count,\n self._load_action.retries)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef register_updates(self, callback):\n _LOGGER.debug(\"Registered callback for state: %s\", self._stateName)\n self._observer_callbacks.append(callback)", "response": "Register a callback to notify a listener of state changes."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsaving state value and notify subscribers of the change.", "response": "def _update_subscribers(self, val):\n \"\"\"Save state value and notify listeners of the change.\"\"\"\n self._value = val\n for callback in self._observer_callbacks:\n callback(self._address, self._group, val)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef from_raw_message(cls, rawmessage):\n userdata = Userdata.from_raw_message(rawmessage[11:25])\n return ExtendedReceive(rawmessage[2:5],\n rawmessage[5:8],\n {'cmd1': rawmessage[9],\n 'cmd2': rawmessage[10]},\n userdata,\n flags=rawmessage[8])", "response": "Create a new message from a raw byte stream."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef template(cls, address=None, target=None, commandtuple=None,\n userdata=None, cmd2=-1, flags=None):\n \"\"\"Create message template for callbacks.\"\"\"\n msgraw = bytearray([0x02, cls._code])\n msgraw.extend(bytes(cls._receivedSize))\n msg = ExtendedReceive.from_raw_message(msgraw)\n\n if commandtuple:\n cmd1 = commandtuple.get('cmd1')\n cmd2out = commandtuple.get('cmd2')\n else:\n cmd1 = None\n cmd2out = None\n\n if cmd2 is not -1:\n cmd2out = cmd2\n\n msg._address = Address(address)\n msg._target = Address(target)\n msg._messageFlags = MessageFlags(flags)\n msg._cmd1 = cmd1\n msg._cmd2 = cmd2out\n msg._userdata = Userdata.create_pattern(userdata)\n return msg", "response": "Create message template for callbacks."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef targetLow(self):\n low_byte = None\n if self.target is not None and self._messageFlags.isBroadcast:\n low_byte = self.target.bytes[0]\n return low_byte", "response": "Return the low byte of the target address field."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef set_level(self, val):\n if val == 0:\n self.off()\n else:\n setlevel = 255\n if val < 1:\n setlevel = val * 100\n elif val <= 0xff:\n setlevel = val\n set_command = StandardSend(\n self._address, COMMAND_LIGHT_ON_0X11_NONE, cmd2=setlevel)\n self._send_method(set_command, self._on_message_received)", "response": "Set the devive ON LEVEL."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef brighten(self):\n brighten_command = StandardSend(\n self._address, COMMAND_LIGHT_BRIGHTEN_ONE_STEP_0X15_0X00)\n self._send_method(brighten_command)", "response": "Brighten the device one step."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ndim the device one step.", "response": "def dim(self):\n \"\"\"Dim the device one step.\"\"\"\n dim_command = StandardSend(\n self._address, COMMAND_LIGHT_DIM_ONE_STEP_0X16_0X00)\n self._send_method(dim_command)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsetting the fan speed.", "response": "def set_level(self, val):\n \"\"\"Set the fan speed.\"\"\"\n speed = self._value_to_fan_speed(val)\n if val == 0:\n self.off()\n else:\n set_command = ExtendedSend(self._address,\n COMMAND_LIGHT_ON_0X11_NONE,\n self._udata, cmd2=speed)\n set_command.set_checksum()\n self._send_method(set_command, self._on_message_received)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nturning the device ON.", "response": "def open(self):\n \"\"\"Turn the device ON.\"\"\"\n open_command = StandardSend(self._address,\n COMMAND_LIGHT_ON_0X11_NONE, cmd2=0xff)\n self._send_method(open_command, self._open_message_received)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef open_fast(self):\n open_command = StandardSend(self._address,\n COMMAND_LIGHT_ON_FAST_0X12_NONE, cmd2=0xff)\n self._send_method(open_command, self._open_message_received)", "response": "Turn the device ON Fast."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nturns the device off.", "response": "def close(self):\n \"\"\"Turn the device off.\"\"\"\n close_command = StandardSend(self._address,\n COMMAND_LIGHT_OFF_0X13_0X00)\n self._send_method(close_command, self._closed_message_received)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef close_fast(self):\n close_command = StandardSend(self._address,\n COMMAND_LIGHT_OFF_FAST_0X14_0X00)\n self._send_method(close_command, self._closed_message_received)", "response": "Turn the device off fast."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef set_position(self, val):\n if val == 0:\n self.close()\n else:\n setlevel = 255\n if val < 1:\n setlevel = val * 100\n elif val <= 0xff:\n setlevel = val\n set_command = StandardSend(\n self._address, COMMAND_LIGHT_ON_0X11_NONE, cmd2=setlevel)\n self._send_method(set_command, self._open_message_received)", "response": "Set the devive OPEN LEVEL."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef set_position_fast(self, val):\n if val == 0:\n self.close_fast()\n else:\n setlevel = 255\n if val < 1:\n setlevel = val * 100\n elif val <= 0xff:\n setlevel = val\n set_command = StandardSend(\n self._address, COMMAND_LIGHT_ON_FAST_0X12_NONE, cmd2=setlevel)\n self._send_method(set_command, self._open_message_received)", "response": "Set the devive OPEN LEVEL."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nrequest each state to provide status update.", "response": "def async_refresh_state(self):\n \"\"\"Request each state to provide status update.\"\"\"\n _LOGGER.debug('Setting up extended status')\n ext_status = ExtendedSend(\n address=self._address,\n commandtuple=COMMAND_EXTENDED_GET_SET_0X2E_0X00,\n cmd2=0x02,\n userdata=Userdata())\n ext_status.set_crc()\n _LOGGER.debug('Sending ext status: %s', ext_status)\n self._send_msg(ext_status)\n _LOGGER.debug('Sending temp status request')\n self.temperature.async_refresh_state()"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef byte_to_housecode(bytecode):\n hc = list(HC_LOOKUP.keys())[list(HC_LOOKUP.values()).index(bytecode)]\n return hc.upper()", "response": "Return an X10 housecode value from a byte value."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef byte_to_unitcode(bytecode):\n return list(UC_LOOKUP.keys())[list(UC_LOOKUP.values()).index(bytecode)]", "response": "Return an X10 unitcode value from a byte value."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef x10_command_type(command):\n command_type = X10CommandType.DIRECT\n if command in [X10_COMMAND_ALL_UNITS_OFF,\n X10_COMMAND_ALL_LIGHTS_ON,\n X10_COMMAND_ALL_LIGHTS_OFF]:\n command_type = X10CommandType.BROADCAST\n return command_type", "response": "Return the X10 command type from an X10 command."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef set_bit(bitmask, bit, is_on):\n bitshift = bit - 1\n if is_on:\n return bitmask | (1 << bitshift)\n return bitmask & (0xff & ~(1 << bitshift))", "response": "Set the value of a bit in a bitmask on or off."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ncreating a message from a raw byte stream.", "response": "def from_raw_message(cls, rawmessage):\n \"\"\"Create a message from a raw byte stream.\"\"\"\n if (rawmessage[5] &\n MESSAGE_FLAG_EXTENDED_0X10) == MESSAGE_FLAG_EXTENDED_0X10:\n if len(rawmessage) >= ExtendedSend.receivedSize:\n msg = ExtendedSend.from_raw_message(rawmessage)\n else:\n msg = None\n else:\n msg = StandardSend(rawmessage[2:5],\n {'cmd1': rawmessage[6],\n 'cmd2': rawmessage[7]},\n flags=rawmessage[5],\n acknak=rawmessage[8:9])\n return msg"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef from_raw_message(cls, rawmessage):\n userdata_dict = Userdata(rawmessage[8:22])\n return ExtendedSend(rawmessage[2:5],\n {'cmd1': rawmessage[6],\n 'cmd2': rawmessage[7]},\n userdata_dict,\n flags=rawmessage[5],\n acknak=rawmessage[22:23])", "response": "Create a message from a raw byte stream."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef template(cls, address=None, commandtuple=None,\n userdata=None, cmd2=-1, flags=None, acknak=None):\n \"\"\"Create a message template used for callbacks.\"\"\"\n msgraw = bytearray([0x02, cls._code])\n msgraw.extend(bytes(cls._receivedSize))\n msg = ExtendedSend.from_raw_message(msgraw)\n\n if commandtuple:\n cmd1 = commandtuple.get('cmd1')\n cmd2out = commandtuple.get('cmd2')\n else:\n cmd1 = None\n cmd2out = None\n\n if cmd2 is not -1:\n cmd2out = cmd2\n\n msg._address = Address(address)\n msg._messageFlags = MessageFlags(flags)\n msg._messageFlags.extended = 1\n msg._cmd1 = cmd1\n msg._cmd2 = cmd2out\n msg._userdata = Userdata.template(userdata)\n msg._acknak = acknak\n return msg", "response": "Create a message template used for callbacks."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef set_checksum(self):\n data_sum = self.cmd1 + self.cmd2\n for i in range(1, 14):\n data_sum += self._userdata['d{:d}'.format(i)]\n chksum = 0xff - (data_sum & 0xff) + 1\n self._userdata['d14'] = chksum", "response": "Set byte 14 of the userdata to a checksum value."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nset Userdata [ 13 and 14 to the CRC value.", "response": "def set_crc(self):\n \"\"\"Set Userdata[13] and Userdata[14] to the CRC value.\"\"\"\n data = self.bytes[6:20]\n crc = int(0)\n for b in data:\n # pylint: disable=unused-variable\n for bit in range(0, 8):\n fb = b & 0x01\n fb = fb ^ 0x01 if (crc & 0x8000) else fb\n fb = fb ^ 0x01 if (crc & 0x4000) else fb\n fb = fb ^ 0x01 if (crc & 0x1000) else fb\n fb = fb ^ 0x01 if (crc & 0x0008) else fb\n crc = ((crc << 1) | fb) & 0xffff\n b = b >> 1\n self._userdata['d13'] = (crc >> 8) & 0xff\n self._userdata['d14'] = crc & 0xff"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate a message from a raw byte stream.", "response": "def from_raw_message(cls, rawmessage):\n \"\"\"Create message from a raw byte stream.\"\"\"\n return StandardReceive(rawmessage[2:5],\n rawmessage[5:8],\n {'cmd1': rawmessage[9],\n 'cmd2': rawmessage[10]},\n flags=rawmessage[8])"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the middle byte of the target message property.", "response": "def targetMed(self):\n \"\"\"Return the middle byte of the target message property.\n\n Used in All-Link Cleanup message types.\n \"\"\"\n med_byte = None\n if self.target.addr is not None and self._messageFlags.isBroadcast:\n med_byte = self.target.bytes[1]\n return med_byte"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns the high byte of the target message property.", "response": "def targetHi(self):\n \"\"\"Return the high byte of the target message property.\n\n Used in All-Link Cleanup message types.\n \"\"\"\n hi_byte = None\n if self.target.addr is not None and self._messageFlags.isBroadcast:\n hi_byte = self.target.bytes[2]\n return hi_byte"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ncreating a message from a raw byte stream.", "response": "def from_raw_message(cls, rawmessage):\n \"\"\"Create a message from a raw byte stream.\"\"\"\n return AllLinkComplete(rawmessage[2],\n rawmessage[3],\n rawmessage[4:7],\n rawmessage[7],\n rawmessage[8],\n rawmessage[9])"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef create(rawmessage):\n rawmessage = _trim_buffer_garbage(rawmessage)\n\n if len(rawmessage) < 2:\n return (None, rawmessage)\n\n code = rawmessage[1]\n msgclass = _get_msg_class(code)\n\n msg = None\n\n remaining_data = rawmessage\n if msgclass is None:\n _LOGGER.debug('Did not find message class 0x%02x', rawmessage[1])\n rawmessage = rawmessage[1:]\n rawmessage = _trim_buffer_garbage(rawmessage, False)\n if rawmessage:\n _LOGGER.debug('Create: %s', create)\n _LOGGER.debug('rawmessage: %s', binascii.hexlify(rawmessage))\n msg, remaining_data = create(rawmessage)\n else:\n remaining_data = rawmessage\n else:\n if iscomplete(rawmessage):\n msg = msgclass.from_raw_message(rawmessage)\n if msg:\n remaining_data = rawmessage[len(msg.bytes):]\n # _LOGGER.debug(\"Returning msg: %s\", msg)\n # _LOGGER.debug('Returning buffer: %s', binascii.hexlify(remaining_data))\n return (msg, remaining_data)", "response": "Return an INSTEON message class based on a raw byte stream."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ntesting if the raw message is a complete message.", "response": "def iscomplete(rawmessage):\n \"\"\"Test if the raw message is a complete message.\"\"\"\n if len(rawmessage) < 2:\n return False\n\n if rawmessage[0] != 0x02:\n raise ValueError('message does not start with 0x02')\n\n messageBuffer = bytearray()\n filler = bytearray(30)\n messageBuffer.extend(rawmessage)\n messageBuffer.extend(filler)\n\n msg = _get_msg_class(rawmessage[1])\n\n if hasattr(msg, 'receivedSize') and msg.receivedSize:\n expectedSize = msg.receivedSize\n else:\n _LOGGER.error('Unable to find a receivedSize for code 0x%x',\n rawmessage[1])\n return ValueError\n\n is_expected_size = False\n if len(rawmessage) >= expectedSize:\n is_expected_size = True\n\n return is_expected_size"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nget the message class based on the message code.", "response": "def _get_msg_class(code):\n \"\"\"Get the message class based on the message code.\"\"\"\n msg_classes = {}\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_STANDARD_MESSAGE_RECEIVED_0X50,\n StandardReceive)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_EXTENDED_MESSAGE_RECEIVED_0X51,\n ExtendedReceive)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_X10_MESSAGE_RECEIVED_0X52,\n X10Received)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_ALL_LINKING_COMPLETED_0X53,\n AllLinkComplete)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_BUTTON_EVENT_REPORT_0X54,\n ButtonEventReport)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_USER_RESET_DETECTED_0X55,\n UserReset)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_ALL_LINK_CEANUP_FAILURE_REPORT_0X56,\n AllLinkCleanupFailureReport)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_ALL_LINK_RECORD_RESPONSE_0X57,\n AllLinkRecordResponse)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_ALL_LINK_CLEANUP_STATUS_REPORT_0X58,\n AllLinkCleanupStatusReport)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_GET_IM_INFO_0X60,\n GetImInfo)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_SEND_ALL_LINK_COMMAND_0X61,\n SendAllLinkCommand)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_SEND_STANDARD_MESSAGE_0X62,\n StandardSend)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_X10_MESSAGE_SEND_0X63,\n X10Send)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_START_ALL_LINKING_0X64,\n StartAllLinking)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_CANCEL_ALL_LINKING_0X65,\n CancelAllLinking)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_RESET_IM_0X67,\n ResetIM)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_GET_FIRST_ALL_LINK_RECORD_0X69,\n GetFirstAllLinkRecord)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_GET_NEXT_ALL_LINK_RECORD_0X6A,\n GetNextAllLinkRecord)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_MANAGE_ALL_LINK_RECORD_0X6F,\n ManageAllLinkRecord)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_SET_IM_CONFIGURATION_0X6B,\n SetIMConfiguration)\n msg_classes = _add_msg_class(msg_classes,\n MESSAGE_GET_IM_CONFIGURATION_0X73,\n GetImConfiguration)\n\n return msg_classes.get(code, None)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _trim_buffer_garbage(rawmessage, debug=True):\n while rawmessage and rawmessage[0] != MESSAGE_START_CODE_0X02:\n if debug:\n _LOGGER.debug('Buffer content: %s', binascii.hexlify(rawmessage))\n _LOGGER.debug('Trimming leading buffer garbage')\n rawmessage = rawmessage[1:]\n return rawmessage", "response": "Trim leading bytes from a proper message byte stream."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ntest if current message matches a patterns or template.", "response": "def matches_pattern(self, other):\n \"\"\"Test if current message match a patterns or template.\"\"\"\n if hasattr(other, 'messageType'):\n messageTypeIsEqual = False\n if self.messageType is None or other.messageType is None:\n messageTypeIsEqual = True\n else:\n messageTypeIsEqual = (self.messageType == other.messageType)\n extendedIsEqual = False\n if self.extended is None or other.extended is None:\n extendedIsEqual = True\n else:\n extendedIsEqual = (self.extended == other.extended)\n return messageTypeIsEqual and extendedIsEqual\n return False"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets all properties of the MessageFlags class.", "response": "def get_properties(cls):\n \"\"\"Get all properties of the MessageFlags class.\"\"\"\n property_names = [p for p in dir(cls)\n if isinstance(getattr(cls, p), property)]\n return property_names"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef isDirect(self):\n direct = (self._messageType == 0x00)\n if self.isDirectACK or self.isDirectNAK:\n direct = True\n return direct", "response": "Test if the message is a direct message type."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create(cls, messageType, extended, hopsleft=3, hopsmax=3):\n flags = MessageFlags(None)\n if messageType < 8:\n flags._messageType = messageType\n else:\n flags._messageType = messageType >> 5\n if extended in [0, 1, True, False]:\n if extended:\n flags._extended = 1\n else:\n flags._extended = 0\n else:\n flags._extended = extended >> 4\n flags._hopsLeft = hopsleft\n flags._hopsMax = hopsmax\n return flags", "response": "Create a new message flags."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a byte representation of the message flags.", "response": "def bytes(self):\n \"\"\"Return a byte representation of the message flags.\"\"\"\n flagByte = 0x00\n messageType = 0\n if self._messageType is not None:\n messageType = self._messageType << 5\n extendedBit = 0 if self._extended is None else self._extended << 4\n hopsMax = 0 if self._hopsMax is None else self._hopsMax\n hopsLeft = 0 if self._hopsLeft is None else (self._hopsLeft << 2)\n flagByte = flagByte | messageType | extendedBit | hopsLeft | hopsMax\n return bytes([flagByte])"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ntake any format of flags and turn it into a hex string.", "response": "def _normalize(self, flags):\n \"\"\"Take any format of flags and turn it into a hex string.\"\"\"\n norm = None\n if isinstance(flags, MessageFlags):\n norm = flags.bytes\n elif isinstance(flags, bytearray):\n norm = binascii.hexlify(flags)\n elif isinstance(flags, int):\n norm = bytes([flags])\n elif isinstance(flags, bytes):\n norm = binascii.hexlify(flags)\n elif isinstance(flags, str):\n flags = flags[0:2]\n norm = binascii.hexlify(binascii.unhexlify(flags.lower()))\n elif flags is None:\n norm = None\n else:\n _LOGGER.warning('MessageFlags with unknown type %s: %r',\n type(flags), flags)\n return norm"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _set_properties(self, flags):\n flagByte = self._normalize(flags)\n\n if flagByte is not None:\n self._messageType = (flagByte[0] & 0xe0) >> 5\n self._extended = (flagByte[0] & MESSAGE_FLAG_EXTENDED_0X10) >> 4\n self._hopsLeft = (flagByte[0] & 0x0c) >> 2\n self._hopsMax = flagByte[0] & 0x03\n else:\n self._messageType = None\n self._extended = None\n self._hopsLeft = None\n self._hopsMax = None", "response": "Sets the properties of the message flags based on a byte input."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef set_value(self, dry_wet: LeakSensorState):\n value = 0\n if dry_wet == self._dry_wet_type:\n value = 1\n self._update_subscribers(value)", "response": "Set the value of the state to dry or wet."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreport a dry or a wet state.", "response": "def _dry_wet_message_received(self, msg):\n \"\"\"Report a dry or a wet state.\"\"\"\n for callback in self._dry_wet_callbacks:\n callback(self._dry_wet_type)\n self._update_subscribers(0x01)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsets the state to dry or wet.", "response": "def set_value(self, dry_wet: LeakSensorState):\n \"\"\"Set the state to wet or dry.\"\"\"\n if dry_wet == LeakSensorState.DRY:\n self._update_subscribers(0x11)\n else:\n self._update_subscribers(0x13)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _dry_message_received(self, msg):\n for callback in self._dry_wet_callbacks:\n callback(LeakSensorState.DRY)\n self._update_subscribers(0x11)", "response": "Report a dry state."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _wet_message_received(self, msg):\n for callback in self._dry_wet_callbacks:\n callback(LeakSensorState.WET)\n self._update_subscribers(0x13)", "response": "Report a wet state."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef add(self, msg, callback, override=False):\n if override:\n if isinstance(callback, list):\n self._dict[msg] = callback\n else:\n self._dict[msg] = [callback]\n else:\n cb = self[msg]\n cb.append(callback)\n self._dict[msg] = cb", "response": "Add a callback to the callback list."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nremove a callback from the callback list.", "response": "def remove(self, msg, callback):\n \"\"\"Remove a callback from the callback list.\n\n msg: Message template\n callback: Callback method to remove.\n\n If callback is None, all callbacks for the message template are\n removed.\n \"\"\"\n if callback is None:\n self._dict.pop(msg, None)\n else:\n cb = self._dict.get(msg, [])\n try:\n cb.remove(callback)\n except ValueError:\n pass\n if cb:\n _LOGGER.debug('%d callbacks for message: %s', len(cb), msg)\n self.add(msg, cb, True)\n else:\n self._dict.pop(msg, None)\n _LOGGER.debug('Removed all callbacks for message: %s', msg)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the callbacks associated with a message template.", "response": "def get_callbacks_from_message(self, msg):\n \"\"\"Return the callbacks associated with a message template.\"\"\"\n callbacks = []\n for key in self._find_matching_keys(msg):\n for callback in self[key]:\n callbacks.append(callback)\n return callbacks"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning an X10 device based on a feature.", "response": "def x10(self, feature):\n \"\"\"Return an X10 device based on a feature.\n\n Current features:\n - OnOff\n - Dimmable\n \"\"\"\n x10_product = None\n for product in self._x10_products:\n if feature.lower() == product.feature:\n x10_product = product\n\n if not x10_product:\n x10_product = X10Product(feature, None)\n\n return x10_product"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef from_raw_message(cls, rawmessage):\n return AllLinkRecordResponse(rawmessage[2],\n rawmessage[3],\n rawmessage[4:7],\n rawmessage[7],\n rawmessage[8],\n rawmessage[9])", "response": "Create message from raw byte stream."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef monitor():\n parser = argparse.ArgumentParser(description=__doc__)\n parser.add_argument('--device', default='/dev/ttyUSB0',\n help='Path to PLM device')\n parser.add_argument('--verbose', '-v', action='count',\n help='Set logging level to verbose')\n parser.add_argument('-l', '--logfile', default='',\n help='Log file name')\n parser.add_argument('--workdir', default='',\n help='Working directory for reading and saving '\n 'device information.')\n\n args = parser.parse_args()\n loop = asyncio.get_event_loop()\n monTool = Tools(loop, args)\n asyncio.ensure_future(monTool.monitor_mode())\n try:\n loop.run_forever()\n except KeyboardInterrupt:\n if monTool.plm:\n if monTool.plm.transport:\n _LOGGING.info('Closing the session')\n asyncio.ensure_future(monTool.plm.transport.close(), loop=loop)\n loop.stop()\n pending = asyncio.Task.all_tasks(loop=loop)\n for task in pending:\n task.cancel()\n try:\n loop.run_until_complete(task)\n except asyncio.CancelledError:\n pass\n except KeyboardInterrupt:\n pass\n loop.close()", "response": "Connect to receiver and show events as they occur."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef interactive():\n parser = argparse.ArgumentParser(description=__doc__)\n parser.add_argument('--device', default='/dev/ttyUSB0',\n help='Path to PLM device')\n parser.add_argument('-v', '--verbose', action='count',\n help='Set logging level to verbose')\n parser.add_argument('-l', '--logfile', default='',\n help='Log file name')\n parser.add_argument('--workdir', default='',\n help='Working directory for reading and saving '\n 'device information.')\n args = parser.parse_args()\n\n loop = asyncio.get_event_loop()\n cmd = Commander(loop, args)\n cmd.start()\n try:\n loop.run_forever()\n except KeyboardInterrupt:\n if cmd.tools.plm:\n if cmd.tools.plm.transport:\n # _LOGGING.info('Closing the session')\n cmd.tools.plm.transport.close()\n loop.stop()\n pending = asyncio.Task.all_tasks(loop=loop)\n for task in pending:\n task.cancel()\n try:\n loop.run_until_complete(task)\n except asyncio.CancelledError:\n pass\n except KeyboardInterrupt:\n pass\n loop.close()", "response": "Create an interactive command line tool."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\nasync def connect(self, poll_devices=False, device=None, workdir=None):\n await self.aldb_load_lock.acquire()\n device = self.host if self.host else self.device\n _LOGGING.info('Connecting to Insteon Modem at %s', device)\n self.device = device if device else self.device\n self.workdir = workdir if workdir else self.workdir\n conn = await insteonplm.Connection.create(\n device=self.device,\n host=self.host,\n port=self.port,\n username=self.username,\n password=self.password,\n loop=self.loop,\n poll_devices=poll_devices,\n workdir=self.workdir)\n _LOGGING.info('Connecton made to Insteon Modem at %s', device)\n conn.protocol.add_device_callback(self.async_new_device_callback)\n conn.protocol.add_all_link_done_callback(\n self.async_aldb_loaded_callback)\n self.plm = conn.protocol\n await self.aldb_load_lock\n if self.aldb_load_lock.locked():\n self.aldb_load_lock.release()", "response": "Connect to the IM."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\nasync def monitor_mode(self, poll_devices=False, device=None,\n workdir=None):\n \"\"\"Place the IM in monitoring mode.\"\"\"\n print(\"Running monitor mode\")\n await self.connect(poll_devices, device, workdir)\n self.plm.monitor_mode()", "response": "Place the IM in monitoring mode."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef async_new_device_callback(self, device):\n _LOGGING.info(\n 'New Device: %s cat: 0x%02x subcat: 0x%02x desc: %s, model: %s',\n device.id, device.cat, device.subcat,\n device.description, device.model)\n for state in device.states:\n device.states[state].register_updates(\n self.async_state_change_callback)", "response": "Log that our new device callback worked."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nlog the state change.", "response": "def async_state_change_callback(self, addr, state, value):\n \"\"\"Log the state change.\"\"\"\n _LOGGING.info('Device %s state %s value is changed to %s',\n addr, state, value)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef async_aldb_loaded_callback(self):\n if self.aldb_load_lock.locked():\n self.aldb_load_lock.release()\n _LOGGING.info('ALDB Loaded')", "response": "Unlock the ALDB load lock when loading is complete."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\nasync def start_all_linking(self, linkcode, group, address=None):\n _LOGGING.info('Starting the All-Linking process')\n if address:\n linkdevice = self.plm.devices[Address(address).id]\n if not linkdevice:\n linkdevice = create(self.plm, address, None, None)\n _LOGGING.info('Attempting to link the PLM to device %s. ',\n address)\n self.plm.start_all_linking(linkcode, group)\n asyncio.sleep(.5, loop=self.loop)\n linkdevice.enter_linking_mode(group=group)\n else:\n _LOGGING.info('Starting All-Linking on PLM. '\n 'Waiting for button press')\n self.plm.start_all_linking(linkcode, group)\n await asyncio.sleep(self.wait_time, loop=self.loop)\n\n _LOGGING.info('%d devices added to the All-Link Database',\n len(self.plm.devices))\n await asyncio.sleep(.1, loop=self.loop)", "response": "Start the All - Linking process with the IM and device."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef list_devices(self):\n if self.plm.devices:\n for addr in self.plm.devices:\n device = self.plm.devices[addr]\n if device.address.is_x10:\n _LOGGING.info('Device: %s %s', device.address.human,\n device.description)\n else:\n _LOGGING.info('Device: %s cat: 0x%02x subcat: 0x%02x '\n 'desc: %s, model: %s',\n device.address.human, device.cat,\n device.subcat, device.description,\n device.model)\n else:\n _LOGGING.info('No devices found')\n if not self.plm.transport:\n _LOGGING.info('IM connection has not been made.')\n _LOGGING.info('Use `connect [device]` to open the connection')", "response": "List devices in the ALDB."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef print_device_aldb(self, addr):\n if Address(addr).id == self.plm.address.id:\n device = self.plm\n else:\n dev_addr = Address(addr)\n device = self.plm.devices[dev_addr.id]\n if device:\n if device.aldb.status in [ALDBStatus.LOADED, ALDBStatus.PARTIAL]:\n if device.aldb.status == ALDBStatus.PARTIAL:\n _LOGGING.info('ALDB partially loaded for device %s', addr)\n for mem_addr in device.aldb:\n record = device.aldb[mem_addr]\n _LOGGING.debug('mem_addr: %s', mem_addr)\n _LOGGING.info('ALDB record: %s', record)\n else:\n _LOGGING.info('ALDB not loaded. '\n 'Use `load_aldb %s` first.',\n device.address.id)\n else:\n _LOGGING.info('Device not found.')", "response": "Diplay the All - Link database for a device."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\nasync def load_device_aldb(self, addr, clear=True):\n dev_addr = Address(addr)\n device = None\n if dev_addr == self.plm.address:\n device = self.plm\n else:\n device = self.plm.devices[dev_addr.id]\n if device:\n if clear:\n device.aldb.clear()\n device.read_aldb()\n await asyncio.sleep(1, loop=self.loop)\n while device.aldb.status == ALDBStatus.LOADING:\n await asyncio.sleep(1, loop=self.loop)\n if device.aldb.status == ALDBStatus.LOADED:\n _LOGGING.info('ALDB loaded for device %s', addr)\n self.print_device_aldb(addr)\n else:\n _LOGGING.error('Could not find device %s', addr)", "response": "Load the ALDB for a specific device."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\nasync def load_all_aldb(self, clear=True):\n for addr in self.plm.devices:\n await self.load_device_aldb(addr, clear)", "response": "Read all ALDB devices."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\nasync def write_aldb(self, addr, mem_addr: int, mode: str, group: int,\n target, data1=0x00, data2=0x00, data3=0x00):\n \"\"\"Write a device All-Link record.\"\"\"\n dev_addr = Address(addr)\n target_addr = Address(target)\n device = self.plm.devices[dev_addr.id]\n if device:\n _LOGGING.debug('calling device write_aldb')\n device.write_aldb(mem_addr, mode, group, target_addr,\n data1, data2, data3)\n await asyncio.sleep(1, loop=self.loop)\n while device.aldb.status == ALDBStatus.LOADING:\n await asyncio.sleep(1, loop=self.loop)\n self.print_device_aldb(addr)", "response": "Write a device All - Link record."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\nasync def del_aldb(self, addr, mem_addr: int):\n dev_addr = Address(addr)\n device = self.plm.devices[dev_addr.id]\n if device:\n _LOGGING.debug('calling device del_aldb')\n device.del_aldb(mem_addr)\n await asyncio.sleep(1, loop=self.loop)\n while device.aldb.status == ALDBStatus.LOADING:\n await asyncio.sleep(1, loop=self.loop)\n self.print_device_aldb(addr)", "response": "Write a device All - Link record."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nadd a device override to the PLM.", "response": "def add_device_override(self, addr, cat, subcat, firmware=None):\n \"\"\"Add a device override to the PLM.\"\"\"\n self.plm.devices.add_override(addr, 'cat', cat)\n self.plm.devices.add_override(addr, 'subcat', subcat)\n if firmware:\n self.plm.devices.add_override(addr, 'firmware', firmware)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef add_x10_device(self, housecode, unitcode, dev_type):\n device = None\n try:\n device = self.plm.devices.add_x10_device(self.plm, housecode,\n unitcode, dev_type)\n except ValueError:\n pass\n return device", "response": "Add an X10 device to the PLM."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ngetting the status of a KPL button.", "response": "def kpl_status(self, address, group):\n \"\"\"Get the status of a KPL button.\"\"\"\n addr = Address(address)\n device = self.plm.devices[addr.id]\n device.states[group].async_refresh_state()"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef kpl_on(self, address, group):\n addr = Address(address)\n device = self.plm.devices[addr.id]\n device.states[group].on()", "response": "Get the status of a KPL button."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting the status of a KPL button.", "response": "def kpl_off(self, address, group):\n \"\"\"Get the status of a KPL button.\"\"\"\n addr = Address(address)\n device = self.plm.devices[addr.id]\n device.states[group].off()"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef kpl_set_on_mask(self, address, group, mask):\n addr = Address(address)\n device = self.plm.devices[addr.id]\n device.states[group].set_on_mask(mask)", "response": "Set the status of a KPL button on a given mask."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nstarts the command process loop.", "response": "def start(self):\n \"\"\"Start the command process loop.\"\"\"\n self.loop.create_task(self._read_line())\n self.loop.create_task(self._greeting())"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\nasync def do_connect(self, args):\n params = args.split()\n device = '/dev/ttyUSB0'\n workdir = None\n\n try:\n device = params[0]\n except IndexError:\n if self.tools.device:\n device = self.tools.device\n try:\n workdir = params[1]\n except IndexError:\n if self.tools.workdir:\n workdir = self.tools.workdir\n\n if device:\n await self.tools.connect(False, device=device, workdir=workdir)\n _LOGGING.info('Connection complete.')", "response": "Connect to the PLM device."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef do_running_tasks(self, arg):\n for task in asyncio.Task.all_tasks(loop=self.loop):\n _LOGGING.info(task)", "response": "List tasks running in the background."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef do_add_all_link(self, args):\n linkcode = 1\n group = 0\n addr = None\n params = args.split()\n if params:\n try:\n linkcode = int(params[0])\n except IndexError:\n linkcode = 1\n except ValueError:\n linkcode = None\n\n try:\n group = int(params[1])\n except IndexError:\n group = 0\n except ValueError:\n group = None\n\n try:\n addr = params[2]\n except IndexError:\n addr = None\n\n if linkcode in [0, 1, 3] and 255 >= group >= 0:\n self.loop.create_task(\n self.tools.start_all_linking(linkcode, group, addr))\n else:\n _LOGGING.error('Link code %d or group number %d not valid',\n linkcode, group)\n self.do_help('add_all_link')", "response": "Add an All - Link record to the IM and a device."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nprint the All - Link database for a specific device.", "response": "def do_print_aldb(self, args):\n \"\"\"Print the All-Link database for a device.\n\n Usage:\n print_aldb address|plm|all\n Arguments:\n address: INSTEON address of the device\n plm: Print the All-Link database for the PLM\n all: Print the All-Link database for all devices\n\n This method requires that the device ALDB has been loaded.\n To load the device ALDB use the command:\n load_aldb address|plm|all\n \"\"\"\n params = args.split()\n addr = None\n\n try:\n addr = params[0]\n except IndexError:\n _LOGGING.error('Device address required.')\n self.do_help('print_aldb')\n\n if addr:\n if addr.lower() == 'all':\n self.tools.print_all_aldb()\n elif addr.lower() == 'plm':\n addr = self.tools.plm.address.id\n self.tools.print_device_aldb(addr)\n else:\n self.tools.print_device_aldb(addr)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef do_set_hub_connection(self, args):\n params = args.split()\n username = None\n password = None\n host = None\n port = None\n\n try:\n username = params[0]\n password = params[1]\n host = params[2]\n port = params[3]\n except IndexError:\n pass\n\n if username and password and host:\n if not port:\n port = 25105\n self.tools.username = username\n self.tools.password = password\n self.tools.host = host\n self.tools.port = port\n else:\n _LOGGING.error('username password host are required')\n self.do_help('set_hub_connection')", "response": "Set Hub connection parameters."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef do_set_log_file(self, args):\n params = args.split()\n try:\n filename = params[0]\n logging.basicConfig(filename=filename)\n except IndexError:\n self.do_help('set_log_file')", "response": "Set the log file."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\nasync def do_load_aldb(self, args):\n params = args.split()\n addr = None\n clear = True\n\n try:\n addr = params[0]\n except IndexError:\n _LOGGING.error('Device address required.')\n self.do_help('load_aldb')\n\n try:\n clear_prior = params[1]\n _LOGGING.info('param clear_prior %s', clear_prior)\n if clear_prior.lower() == 'y':\n clear = True\n elif clear_prior.lower() == 'n':\n clear = False\n else:\n _LOGGING.error('Invalid value for parameter `clear_prior`')\n _LOGGING.error('Valid values are `y` or `n`')\n except IndexError:\n pass\n\n if addr:\n if addr.lower() == 'all':\n await self.tools.load_all_aldb(clear)\n else:\n await self.tools.load_device_aldb(addr, clear)\n else:\n self.do_help('load_aldb')", "response": "Load the All - Link database for a device."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nwriting device All - Link record.", "response": "async def do_write_aldb(self, args):\n \"\"\"Write device All-Link record.\n\n WARNING THIS METHOD CAN DAMAGE YOUR DEVICE IF USED INCORRECTLY.\n Please ensure the memory id is appropriate for the device.\n You must load the ALDB of the device before using this method.\n The memory id must be an existing memory id in the ALDB or this\n method will return an error.\n\n If you are looking to create a new link between two devices,\n use the `link_devices` command or the `start_all_linking` command.\n\n Usage:\n write_aldb addr memory mode group target [data1 data2 data3]\n\n Required Parameters:\n addr: Inseon address of the device to write\n memory: record ID of the record to write (i.e. 0fff)\n mode: r | c\n r = Device is a responder of target\n c = Device is a controller of target\n group: All-Link group integer\n target: Insteon address of the link target device\n\n Optional Parameters:\n data1: int = Device sepcific\n data2: int = Device specific\n data3: int = Device specific\n \"\"\"\n params = args.split()\n addr = None\n mem_bytes = None\n memory = None\n mode = None\n group = None\n target = None\n data1 = 0x00\n data2 = 0x00\n data3 = 0x00\n\n try:\n addr = Address(params[0])\n mem_bytes = binascii.unhexlify(params[1])\n memory = int.from_bytes(mem_bytes, byteorder='big')\n mode = params[2]\n group = int(params[3])\n target = Address(params[4])\n\n _LOGGING.info('address: %s', addr)\n _LOGGING.info('memory: %04x', memory)\n _LOGGING.info('mode: %s', mode)\n _LOGGING.info('group: %d', group)\n _LOGGING.info('target: %s', target)\n\n except IndexError:\n _LOGGING.error('Device address memory mode group and target '\n 'are all required.')\n self.do_help('write_aldb')\n except ValueError:\n _LOGGING.error('Value error - Check parameters')\n self.do_help('write_aldb')\n\n try:\n data1 = int(params[5])\n data2 = int(params[6])\n data3 = int(params[7])\n except IndexError:\n pass\n except ValueError:\n addr = None\n _LOGGING.error('Value error - Check parameters')\n self.do_help('write_aldb')\n return\n\n if addr and memory and mode and isinstance(group, int) and target:\n await self.tools.write_aldb(addr, memory, mode, group, target,\n data1, data2, data3)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ndelete device All - Link record.", "response": "async def do_del_aldb(self, args):\n \"\"\"Delete device All-Link record.\n\n WARNING THIS METHOD CAN DAMAGE YOUR DEVICE IF USED INCORRECTLY.\n Please ensure the memory id is appropriate for the device.\n You must load the ALDB of the device before using this method.\n The memory id must be an existing memory id in the ALDB or this\n method will return an error.\n\n If you are looking to create a new link between two devices,\n use the `link_devices` command or the `start_all_linking` command.\n\n Usage:\n del_aldb addr memory\n\n Required Parameters:\n addr: Inseon address of the device to write\n memory: record ID of the record to write (i.e. 0fff)\n \"\"\"\n params = args.split()\n addr = None\n mem_bytes = None\n memory = None\n\n try:\n addr = Address(params[0])\n mem_bytes = binascii.unhexlify(params[1])\n memory = int.from_bytes(mem_bytes, byteorder='big')\n\n _LOGGING.info('address: %s', addr)\n _LOGGING.info('memory: %04x', memory)\n\n except IndexError:\n _LOGGING.error('Device address and memory are required.')\n self.do_help('del_aldb')\n except ValueError:\n _LOGGING.error('Value error - Check parameters')\n self.do_help('write_aldb')\n\n if addr and memory:\n await self.tools.del_aldb(addr, memory)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsetting the log level.", "response": "def do_set_log_level(self, arg):\n \"\"\"Set the log level.\n\n Usage:\n set_log_level i|v\n Parameters:\n log_level: i - info | v - verbose\n\n \"\"\"\n if arg in ['i', 'v']:\n _LOGGING.info('Setting log level to %s', arg)\n if arg == 'i':\n _LOGGING.setLevel(logging.INFO)\n _INSTEONPLM_LOGGING.setLevel(logging.INFO)\n else:\n _LOGGING.setLevel(logging.DEBUG)\n _INSTEONPLM_LOGGING.setLevel(logging.DEBUG)\n else:\n _LOGGING.error('Log level value error.')\n self.do_help('set_log_level')"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef do_set_device(self, args):\n params = args.split()\n device = None\n\n try:\n device = params[0]\n except IndexError:\n _LOGGING.error('Device name required.')\n self.do_help('set_device')\n\n if device:\n self.tools.device = device", "response": "Set the PLM OS device."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nset the working directory.", "response": "def do_set_workdir(self, args):\n \"\"\"Set the working directory.\n\n The working directory is used to load and save known devices\n to improve startup times. During startup the application\n loads and saves a file `insteon_plm_device_info.dat`. This file\n is saved in the working directory.\n\n The working directory has no default value. If the working directory is\n not set, the `insteon_plm_device_info.dat` file is not loaded or saved.\n\n Usage:\n set_workdir workdir\n Arguments:\n workdir: Required - Working directory to load and save devie list\n \"\"\"\n params = args.split()\n workdir = None\n\n try:\n workdir = params[0]\n except IndexError:\n _LOGGING.error('Device name required.')\n self.do_help('set_workdir')\n\n if workdir:\n self.tools.workdir = workdir"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef do_help(self, arg):\n cmds = arg.split()\n\n if cmds:\n func = getattr(self, 'do_{}'.format(cmds[0]))\n if func:\n _LOGGING.info(func.__doc__)\n else:\n _LOGGING.error('Command %s not found', cmds[0])\n else:\n _LOGGING.info(\"Available command list: \")\n for curr_cmd in dir(self.__class__):\n if curr_cmd.startswith(\"do_\") and not curr_cmd == 'do_test':\n print(\" - \", curr_cmd[3:])\n _LOGGING.info(\"For help with a command type `help command`\")", "response": "Help command.\n\n Usage:\n help [command]\n Parameters:\n command: Optional - command name to display detailed help"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef do_add_device_override(self, args):\n params = args.split()\n addr = None\n cat = None\n subcat = None\n firmware = None\n error = None\n\n try:\n addr = Address(params[0])\n cat = binascii.unhexlify(params[1][2:])\n subcat = binascii.unhexlify(params[2][2:])\n firmware = binascii.unhexlify(params[3][2:])\n except IndexError:\n error = 'missing'\n except ValueError:\n error = 'value'\n\n if addr and cat and subcat:\n self.tools.add_device_override(addr, cat, subcat, firmware)\n else:\n if error == 'missing':\n _LOGGING.error('Device address, category and subcategory are '\n 'required.')\n else:\n _LOGGING.error('Check the vales for address, category and '\n 'subcategory.')\n self.do_help('add_device_override')", "response": "Add a device override to the IM."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef do_add_x10_device(self, args):\n params = args.split()\n housecode = None\n unitcode = None\n dev_type = None\n\n try:\n housecode = params[0]\n unitcode = int(params[1])\n if unitcode not in range(1, 17):\n raise ValueError\n dev_type = params[2]\n except IndexError:\n pass\n except ValueError:\n _LOGGING.error('X10 unit code must be an integer 1 - 16')\n unitcode = None\n\n if housecode and unitcode and dev_type:\n device = self.tools.add_x10_device(housecode, unitcode, dev_type)\n if not device:\n _LOGGING.error('Device not added. Please check the '\n 'information you provided.')\n self.do_help('add_x10_device')\n else:\n _LOGGING.error('Device housecode, unitcode and type are '\n 'required.')\n self.do_help('add_x10_device')", "response": "Add an X10 device to the IM."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef do_kpl_status(self, args):\n params = args.split()\n address = None\n group = None\n\n try:\n address = params[0]\n group = int(params[1])\n except IndexError:\n _LOGGING.error(\"Address and group are regquired\")\n self.do_help('kpl_status')\n except TypeError:\n _LOGGING.error(\"Group must be an integer\")\n self.do_help('kpl_status')\n\n if address and group:\n self.tools.kpl_status(address, group)", "response": "Get the status of a KeypadLinc button."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef do_kpl_on(self, args):\n params = args.split()\n address = None\n group = None\n\n try:\n address = params[0]\n group = int(params[1])\n except IndexError:\n _LOGGING.error(\"Address and group are regquired\")\n self.do_help('kpl_status')\n except TypeError:\n _LOGGING.error(\"Group must be an integer\")\n self.do_help('kpl_status')\n\n if address and group:\n self.tools.kpl_on(address, group)", "response": "Turn on a KeypadLinc button."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nturn off a KeypadLinc button.", "response": "def do_kpl_off(self, args):\n \"\"\"Turn off a KeypadLinc button.\n\n Usage:\n kpl_on address group\n \"\"\"\n params = args.split()\n address = None\n group = None\n\n try:\n address = params[0]\n group = int(params[1])\n except IndexError:\n _LOGGING.error(\"Address and group are regquired\")\n self.do_help('kpl_status')\n except TypeError:\n _LOGGING.error(\"Group must be an integer\")\n self.do_help('kpl_status')\n\n if address and group:\n self.tools.kpl_off(address, group)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef do_kpl_set_on_mask(self, args):\n params = args.split()\n address = None\n group = None\n mask_string = None\n mask = None\n\n try:\n address = params[0]\n group = int(params[1])\n mask_string = params[2]\n if mask_string[0:2].lower() == '0x':\n mask = binascii.unhexlify(mask_string[2:])\n else:\n mask = int(mask_string)\n except IndexError:\n _LOGGING.error(\"Address, group and mask are regquired\")\n self.do_help('kpl_status')\n except TypeError:\n _LOGGING.error(\"Group must be an integer\")\n self.do_help('kpl_status')\n\n if address and group and mask:\n self.tools.kpl_set_on_mask(address, group, mask)", "response": "Set the on mask for a KeypadLinc button."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nemits the address in human - readible format.", "response": "def human(self):\n \"\"\"Emit the address in human-readible format (AA.BB.CC).\"\"\"\n strout = ''\n first = True\n for i in range(0, 28, 2):\n if first:\n first = False\n else:\n strout = strout + '.'\n strout = strout + self.hex[i:i + 2]\n return strout"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef bytes(self):\n byteout = bytearray()\n for i in range(1, 15):\n key = 'd' + str(i)\n if self._userdata[key] is not None:\n byteout.append(self._userdata[key])\n else:\n byteout.append(0x00)\n return byteout", "response": "Emit the address in bytes format."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates a user data instance from a raw byte stream.", "response": "def from_raw_message(cls, rawmessage):\n \"\"\"Create a user data instance from a raw byte stream.\"\"\"\n empty = cls.create_empty(0x00)\n userdata_dict = cls.normalize(empty, rawmessage)\n return Userdata(userdata_dict)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef create_pattern(cls, userdata):\n empty = cls.create_empty(None)\n userdata_dict = cls.normalize(empty, userdata)\n return Userdata(userdata_dict)", "response": "Create a user data instance with all values the same."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef template(cls, userdata):\n ud = Userdata(cls.normalize(cls.create_empty(None), userdata))\n return ud", "response": "Create a template instance used for message callbacks."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ntest if the current instance matches a template instance.", "response": "def matches_pattern(self, other):\n \"\"\"Test if the current instance matches a template instance.\"\"\"\n ismatch = False\n if isinstance(other, Userdata):\n for key in self._userdata:\n if self._userdata[key] is None or other[key] is None:\n ismatch = True\n elif self._userdata[key] == other[key]:\n ismatch = True\n else:\n ismatch = False\n break\n return ismatch"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates an empty Userdata object.", "response": "def create_empty(cls, val=0x00):\n \"\"\"Create an empty Userdata object.\n\n val: value to fill the empty user data fields with (default is 0x00)\n \"\"\"\n userdata_dict = {}\n for i in range(1, 15):\n key = 'd{}'.format(i)\n userdata_dict.update({key: val})\n return userdata_dict"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn normalized user data as a dictionary.", "response": "def normalize(cls, empty, userdata):\n \"\"\"Return normalized user data as a dictionary.\n\n empty: an empty dictionary\n userdata: data in the form of Userdata, dict or None\n \"\"\"\n if isinstance(userdata, Userdata):\n return userdata.to_dict()\n if isinstance(userdata, dict):\n return cls._dict_to_dict(empty, userdata)\n if isinstance(userdata, (bytes, bytearray)):\n return cls._bytes_to_dict(empty, userdata)\n if userdata is None:\n return empty\n raise ValueError"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef hex(self):\n props = self._message_properties()\n msg = bytearray([MESSAGE_START_CODE_0X02, self._code])\n\n for prop in props:\n # pylint: disable=unused-variable\n for key, val in prop.items():\n if val is None:\n pass\n elif isinstance(val, int):\n msg.append(val)\n elif isinstance(val, Address):\n if val.addr is None:\n pass\n else:\n msg.extend(val.bytes)\n elif isinstance(val, MessageFlags):\n msg.extend(val.bytes)\n elif isinstance(val, bytearray):\n msg.extend(val)\n elif isinstance(val, bytes):\n msg.extend(val)\n elif isinstance(val, Userdata):\n msg.extend(val.bytes)\n\n return binascii.hexlify(msg).decode()", "response": "Hexideciaml representation of the message in bytes."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef matches_pattern(self, other):\n properties = self._message_properties()\n ismatch = False\n if isinstance(other, Message) and self.code == other.code:\n for prop in properties:\n for key, prop_val in prop.items():\n if hasattr(other, key):\n key_val = getattr(other, key)\n ismatch = self._test_match(prop_val, key_val)\n else:\n ismatch = False\n if not ismatch:\n break\n if not ismatch:\n break\n return ismatch", "response": "Return True if the current message matches a message template."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _register_messages(self):\n template_on_group = StandardReceive.template(\n commandtuple=COMMAND_LIGHT_ON_0X11_NONE,\n address=self._address,\n target=bytearray([0x00, 0x00, self._group]),\n flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_BROADCAST, None))\n template_fast_on_group = StandardReceive.template(\n commandtuple=COMMAND_LIGHT_ON_FAST_0X12_NONE,\n address=self._address,\n target=bytearray([0x00, 0x00, self._group]),\n flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_BROADCAST, None))\n template_off_group = StandardReceive.template(\n commandtuple=COMMAND_LIGHT_OFF_0X13_0X00,\n address=self._address,\n target=bytearray([0x00, 0x00, self._group]),\n flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_BROADCAST, None),\n cmd2=None)\n template_fast_off_group = StandardReceive.template(\n commandtuple=COMMAND_LIGHT_OFF_FAST_0X14_0X00,\n address=self._address,\n target=bytearray([0x00, 0x00, self._group]),\n flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_BROADCAST, None),\n cmd2=None)\n template_manual_on_group = StandardReceive.template(\n commandtuple=COMMAND_LIGHT_MANUALLY_TURNED_ON_0X23_0X00,\n address=self._address,\n target=bytearray([0x00, 0x00, self._group]),\n flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_BROADCAST, None),\n cmd2=None)\n template_manual_off_group = StandardReceive.template(\n commandtuple=COMMAND_LIGHT_MANUALLY_TURNED_OFF_0X22_0X00,\n address=self._address,\n target=bytearray([0x00, 0x00, self._group]),\n flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_BROADCAST, None),\n cmd2=None)\n\n template_on_cleanup = StandardReceive.template(\n commandtuple=COMMAND_LIGHT_ON_0X11_NONE,\n address=self._address,\n flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_CLEANUP, None),\n cmd2=self._group)\n template_fast_on_cleanup = StandardReceive.template(\n commandtuple=COMMAND_LIGHT_ON_FAST_0X12_NONE,\n address=self._address,\n flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_CLEANUP, None),\n cmd2=self._group)\n template_off_cleanup = StandardReceive.template(\n commandtuple=COMMAND_LIGHT_OFF_0X13_0X00,\n address=self._address,\n flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_CLEANUP, None),\n cmd2=self._group)\n template_fast_off_cleanup = StandardReceive.template(\n commandtuple=COMMAND_LIGHT_OFF_FAST_0X14_0X00,\n address=self._address,\n flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_CLEANUP, None),\n cmd2=self._group)\n template_manual_on_cleanup = StandardReceive.template(\n commandtuple=COMMAND_LIGHT_MANUALLY_TURNED_ON_0X23_0X00,\n address=self._address,\n flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_CLEANUP, None),\n cmd2=self._group)\n template_manual_off_cleanup = StandardReceive.template(\n commandtuple=COMMAND_LIGHT_MANUALLY_TURNED_OFF_0X22_0X00,\n address=self._address,\n flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_CLEANUP, None),\n cmd2=self._group)\n\n self._message_callbacks.add(template_on_group,\n self._on_message_received)\n self._message_callbacks.add(template_fast_on_group,\n self._on_message_received)\n self._message_callbacks.add(template_off_group,\n self._off_message_received)\n self._message_callbacks.add(template_fast_off_group,\n self._off_message_received)\n self._message_callbacks.add(template_manual_on_group,\n self._manual_change_received)\n self._message_callbacks.add(template_manual_off_group,\n self._manual_change_received)\n\n self._message_callbacks.add(template_on_cleanup,\n self._on_message_received)\n self._message_callbacks.add(template_fast_on_cleanup,\n self._on_message_received)\n self._message_callbacks.add(template_off_cleanup,\n self._off_message_received)\n self._message_callbacks.add(template_fast_off_cleanup,\n self._off_message_received)\n self._message_callbacks.add(template_manual_on_cleanup,\n self._manual_change_received)\n self._message_callbacks.add(template_manual_off_cleanup,\n self._manual_change_received)", "response": "Register messages to listen for."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nsend ON command to device.", "response": "def on(self):\n \"\"\"Send ON command to device.\"\"\"\n on_command = StandardSend(self._address,\n COMMAND_LIGHT_ON_0X11_NONE, 0xff)\n self._send_method(on_command,\n self._on_message_received)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsending OFF command to device.", "response": "def off(self):\n \"\"\"Send OFF command to device.\"\"\"\n self._send_method(StandardSend(self._address,\n COMMAND_LIGHT_OFF_0X13_0X00),\n self._off_message_received)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _send_status_0x01_request(self):\n status_command = StandardSend(self._address,\n COMMAND_LIGHT_STATUS_REQUEST_0X19_0X01)\n self._send_method(status_command, self._status_message_0x01_received)", "response": "Send status request to device."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _status_message_0x01_received(self, msg):\n if msg.cmd2 == 0x00 or msg.cmd2 == 0x02:\n self._update_subscribers(0x00)\n elif msg.cmd2 == 0x01 or msg.cmd2 == 0x03:\n self._update_subscribers(0xff)\n else:\n raise ValueError", "response": "Handle status received messages."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef on(self):\n on_command = ExtendedSend(self._address,\n COMMAND_LIGHT_ON_0X11_NONE,\n self._udata,\n cmd2=0xff)\n on_command.set_checksum()\n self._send_method(on_command, self._on_message_received)", "response": "Send an ON message to the device group."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef off(self):\n off_command = ExtendedSend(self._address,\n COMMAND_LIGHT_OFF_0X13_0X00,\n self._udata)\n off_command.set_checksum()\n self._send_method(off_command, self._off_message_received)", "response": "Send an OFF message to the device group."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsending a status request.", "response": "def _send_status_0x01_request(self):\n \"\"\"Send a status request.\"\"\"\n status_command = StandardSend(self._address,\n COMMAND_LIGHT_STATUS_REQUEST_0X19_0X01)\n self._send_method(status_command, self._status_message_received)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreceiving a status message.", "response": "def _status_message_received(self, msg):\n \"\"\"Receive a status message.\n\n The following status values can be received:\n 0x00 = Both Outlets Off\n 0x01 = Only Top Outlet On\n 0x02 = Only Bottom Outlet On\n 0x03 = Both Outlets On\n \"\"\"\n if msg.cmd2 == 0x00 or msg.cmd2 == 0x01:\n self._update_subscribers(0x00)\n elif msg.cmd2 == 0x02 or msg.cmd2 == 0x03:\n self._update_subscribers(0xff)\n else:\n raise ValueError"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef close(self):\n close_command = StandardSend(self._address,\n COMMAND_LIGHT_OFF_0X13_0X00)\n self._send_method(close_command, self._close_message_received)", "response": "Send CLOSE command to device."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef led_changed(self, addr, group, val):\n _LOGGER.debug(\"Button %d LED changed from %d to %d\",\n self._group, self._value, val)\n led_on = bool(val)\n if led_on != bool(self._value):\n self._update_subscribers(int(led_on))", "response": "Capture a change to the LED for this button."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef set_on_mask(self, mask):\n set_cmd = self._create_set_property_msg('_on_mask', 0x02, mask)\n self._send_method(set_cmd, self._property_set)", "response": "Set the on mask for the current group and button."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsets the off mask for the current group and button.", "response": "def set_off_mask(self, mask):\n \"\"\"Set the off mask for the current group/button.\"\"\"\n set_cmd = self._create_set_property_msg('_off_mask', 0x03, mask)\n self._send_method(set_cmd, self._property_set)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef set_x10_address(self, x10address):\n set_cmd = self._create_set_property_msg('_x10_house_code', 0x04,\n x10address)\n self._send_method(set_cmd, self._property_set)", "response": "Set the X10 address for the current group and button."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nsets the X10 ramp rate for the current group and button.", "response": "def set_ramp_rate(self, ramp_rate):\n \"\"\"Set the X10 address for the current group/button.\"\"\"\n set_cmd = self._create_set_property_msg('_ramp_rate', 0x05,\n ramp_rate)\n self._send_method(set_cmd, self._property_set)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef set_on_level(self, val):\n on_cmd = self._create_set_property_msg(\"_on_level\", 0x06,\n val)\n self._send_method(on_cmd, self._property_set)\n self._send_method(on_cmd, self._on_message_received)", "response": "Set on level for the button group."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef set_led_brightness(self, brightness):\n set_cmd = self._create_set_property_msg(\"_led_brightness\", 0x07,\n brightness)\n self._send_method(set_cmd, self._property_set)", "response": "Set the LED brightness for the current group or button."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nset the non - toggle mask for the current group and button.", "response": "def set_non_toggle_mask(self, non_toggle_mask):\n \"\"\"Set the non_toggle_mask for the current group/button.\"\"\"\n set_cmd = self._create_set_property_msg(\"_non_toggle_mask\", 0x08,\n non_toggle_mask)\n self._send_method(set_cmd, self._property_set)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef set_x10_all_bit_mask(self, x10_all_bit_mask):\n set_cmd = self._create_set_property_msg(\"_x10_all_bit_mask\", 0x0a,\n x10_all_bit_mask)\n self._send_method(set_cmd, self._property_set)", "response": "Set the x10_all_bit_mask for the current group and button."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef set_trigger_group_bit_mask(self, trigger_group_bit_mask):\n set_cmd = self._create_set_property_msg(\"_trigger_group_bit_mask\",\n 0x0c, trigger_group_bit_mask)\n self._send_method(set_cmd, self._property_set)", "response": "Set the trigger_group_bit_mask for the current group and button."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef scene_on(self):\n user_data = Userdata({'d1': self._group,\n 'd2': 0x00,\n 'd3': 0x00,\n 'd4': 0x11,\n 'd5': 0xff,\n 'd6': 0x00})\n self._set_sent_property(DIMMABLE_KEYPAD_SCENE_ON_LEVEL, 0xff)\n cmd = ExtendedSend(self._address,\n COMMAND_EXTENDED_TRIGGER_ALL_LINK_0X30_0X00,\n user_data)\n cmd.set_checksum()\n _LOGGER.debug('Calling scene_on and sending response to '\n '_received_scene_triggered')\n self._send_method(cmd, self._received_scene_triggered)", "response": "Trigger group / scene to ON level."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef scene_off(self):\n user_data = Userdata({'d1': self._group,\n 'd2': 0x00,\n 'd3': 0x00,\n 'd4': 0x13,\n 'd5': 0x00,\n 'd6': 0x00})\n self._set_sent_property(DIMMABLE_KEYPAD_SCENE_ON_LEVEL, 0x00)\n cmd = ExtendedSend(self._address,\n COMMAND_EXTENDED_TRIGGER_ALL_LINK_0X30_0X00,\n user_data)\n cmd.set_checksum()\n self._send_method(cmd, self._received_scene_triggered)", "response": "Trigger group / scene to OFF level."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef extended_status_request(self):\n self._status_received = False\n user_data = Userdata({'d1': self.group,\n 'd2': 0x00})\n cmd = ExtendedSend(self._address,\n COMMAND_EXTENDED_GET_SET_0X2E_0X00,\n userdata=user_data)\n cmd.set_checksum()\n self._send_method(cmd, self._status_message_received, True)", "response": "Send extended status request for group and button."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreceive a status message from the server.", "response": "def _status_message_received(self, msg):\n \"\"\"Receive confirmation that the status message is coming.\n\n The real status message is the extended direct message.\n \"\"\"\n if not self._status_received:\n asyncio.ensure_future(self._confirm_status_received(),\n loop=self._loop)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _status_extended_message_received(self, msg):\n self._status_received = True\n self._status_retries = 0\n _LOGGER.debug(\"Extended status message received\")\n if self._status_response_lock.locked():\n self._status_response_lock.release()\n user_data = msg.userdata\n # self._update_subscribers(user_data['d8'])\n self._set_status_data(user_data)", "response": "Receeive an extended status message."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nset property of the object.", "response": "def _property_set(self, msg):\n \"\"\"Set command received and acknowledged.\"\"\"\n prop = self._sent_property.get('prop')\n if prop and hasattr(self, prop):\n setattr(self, prop, self._sent_property.get('val'))\n self._sent_property = {}"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _set_status_data(self, userdata):\n self._on_mask = userdata['d3']\n self._off_mask = userdata['d4']\n self._x10_house_code = userdata['d5']\n self._x10_unit = userdata['d6']\n self._ramp_rate = userdata['d7']\n self._on_level = userdata['d8']\n self._led_brightness = userdata['d9']\n self._non_toggle_mask = userdata['d10']\n self._led_bit_mask = userdata['d11']\n self._x10_all_bit_mask = userdata['d12']\n self._on_off_bit_mask = userdata['d13']\n self._trigger_group_bit_mask = userdata['d14']", "response": "Set status properties from userdata response."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncreating an extended message to set a property.", "response": "def _create_set_property_msg(self, prop, cmd, val):\n \"\"\"Create an extended message to set a property.\n\n Create an extended message with:\n cmd1: 0x2e\n cmd2: 0x00\n flags: Direct Extended\n d1: group\n d2: cmd\n d3: val\n d4 - d14: 0x00\n\n Parameters:\n prop: Property name to update\n cmd: Command value\n 0x02: on mask\n 0x03: off mask\n 0x04: x10 house code\n 0x05: ramp rate\n 0x06: on level\n 0x07: LED brightness\n 0x08: Non-Toggle mask\n 0x09: LED bit mask (Do not use in this class. Use LED class)\n 0x0a: X10 All bit mask\n 0x0c: Trigger group bit mask\n val: New property value\n\n \"\"\"\n user_data = Userdata({'d1': self.group,\n 'd2': cmd,\n 'd3': val})\n msg = ExtendedSend(self._address,\n COMMAND_EXTENDED_GET_SET_0X2E_0X00,\n user_data)\n msg.set_checksum()\n self._set_sent_property(prop, val)\n return msg"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nturns the LED on for a group.", "response": "def on(self, group):\n \"\"\"Turn the LED on for a group.\"\"\"\n asyncio.ensure_future(self._send_led_on_off_request(group, 1),\n loop=self._loop)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef off(self, group):\n asyncio.ensure_future(self._send_led_on_off_request(group, 0),\n loop=self._loop)", "response": "Turn the LED off for a group."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef register_led_updates(self, callback, button):\n button_callbacks = self._button_observer_callbacks.get(button)\n if not button_callbacks:\n self._button_observer_callbacks[button] = []\n _LOGGER.debug('New callback for button %d', button)\n self._button_observer_callbacks[button].append(callback)", "response": "Register a callback when a specific button LED changes."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _set_led_value(self, group, val):\n new_bitmask = set_bit(self._value, group, bool(val))\n self._set_led_bitmask(new_bitmask)", "response": "Set the LED value and confirm with a status check."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _bit_value(self, group, val):\n bitshift = group - 1\n if val:\n new_value = self._value | (1 << bitshift)\n else:\n new_value = self._value & (0xff & ~(1 << bitshift))\n return new_value", "response": "Set the LED on or off value from the LED bitmap."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef set(self, mode):\n new_mode = None\n if mode == ThermostatMode.OFF:\n new_mode = COMMAND_THERMOSTAT_CONTROL_OFF_ALL_0X6B_0X09\n elif mode == ThermostatMode.HEAT:\n new_mode = COMMAND_THERMOSTAT_CONTROL_ON_HEAT_0X6B_0X04\n elif mode == ThermostatMode.COOL:\n new_mode = COMMAND_THERMOSTAT_CONTROL_ON_COOL_0X6B_0X05\n elif mode == ThermostatMode.AUTO:\n new_mode = COMMAND_THERMOSTAT_CONTROL_ON_AUTO_0X6B_0X06\n if new_mode:\n msg = ExtendedSend(address=self._address,\n commandtuple=new_mode,\n userdata=Userdata())\n msg.set_checksum()\n self._send_method(msg, self._mode_change_ack)", "response": "Set the thermostat mode."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsets the thermostat mode.", "response": "def set(self, mode):\n \"\"\"Set the thermostat mode.\n\n Mode optons:\n OFF = 0x00,\n HEAT = 0x01,\n COOL = 0x02,\n AUTO = 0x03,\n FAN_AUTO = 0x04,\n FAN_ALWAYS_ON = 0x8\n \"\"\"\n if mode == ThermostatMode.FAN_AUTO:\n new_mode = COMMAND_THERMOSTAT_CONTROL_OFF_FAN_0X6B_0X08\n elif mode == ThermostatMode.FAN_ALWAYS_ON:\n new_mode = COMMAND_THERMOSTAT_CONTROL_ON_FAN_0X6B_0X07\n if new_mode:\n msg = ExtendedSend(address=self._address,\n commandtuple=new_mode,\n userdata=Userdata())\n msg.set_checksum()\n self._send_method(msg, self._mode_change_ack)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsets the cool set point.", "response": "def set(self, val):\n \"\"\"Set the cool set point.\"\"\"\n msg = ExtendedSend(\n address=self._address,\n commandtuple=COMMAND_THERMOSTAT_SET_COOL_SETPOINT_0X6C_NONE,\n cmd2=int(val * 2),\n userdata=Userdata())\n msg.set_checksum()\n self._send_method(msg, self._set_cool_point_ack)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsets the heat set point.", "response": "def set(self, val):\n \"\"\"Set the heat set point.\"\"\"\n msg = ExtendedSend(\n address=self._address,\n commandtuple=COMMAND_THERMOSTAT_SET_HEAT_SETPOINT_0X6D_NONE,\n cmd2=int(val * 2),\n userdata=Userdata())\n msg.set_checksum()\n self._send_method(msg, self._set_heat_point_ack)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef add_device_callback(self, callback):\n _LOGGER.debug('Added new callback %s ', callback)\n self._cb_new_device.append(callback)", "response": "Register a callback to be invoked when a new device appears."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nregister an attribute override for a device.", "response": "def add_override(self, addr, key, value):\n \"\"\"Register an attribute override for a device.\"\"\"\n address = Address(str(addr)).id\n _LOGGER.debug('New override for %s %s is %s', address, key, value)\n device_override = self._overrides.get(address, {})\n device_override[key] = value\n self._overrides[address] = device_override"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef create_device_from_category(self, plm, addr, cat, subcat,\n product_key=0x00):\n \"\"\"Create a new device from the cat, subcat and product_key data.\"\"\"\n saved_device = self._saved_devices.get(Address(addr).id, {})\n cat = saved_device.get('cat', cat)\n subcat = saved_device.get('subcat', subcat)\n product_key = saved_device.get('product_key', product_key)\n\n device_override = self._overrides.get(Address(addr).id, {})\n cat = device_override.get('cat', cat)\n subcat = device_override.get('subcat', subcat)\n product_key = device_override.get('firmware', product_key)\n product_key = device_override.get('product_key', product_key)\n\n return insteonplm.devices.create(plm, addr, cat, subcat, product_key)", "response": "Create a new device from the cat subcat and product_key data."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef has_saved(self, addr):\n saved = False\n if self._saved_devices.get(addr, None) is not None:\n saved = True\n return saved", "response": "Test if a device has data from the saved data file."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ntesting if a device has a device override setting.", "response": "def has_override(self, addr):\n \"\"\"Test if device has data from a device override setting.\"\"\"\n override = False\n if self._overrides.get(addr, None) is not None:\n override = True\n return override"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nadding devices from the saved devices or from the device overrides.", "response": "def add_known_devices(self, plm):\n \"\"\"Add devices from the saved devices or from the device overrides.\"\"\"\n from insteonplm.devices import ALDBStatus\n for addr in self._saved_devices:\n if not self._devices.get(addr):\n saved_device = self._saved_devices.get(Address(addr).id, {})\n cat = saved_device.get('cat')\n subcat = saved_device.get('subcat')\n product_key = saved_device.get('firmware')\n product_key = saved_device.get('product_key', product_key)\n device = self.create_device_from_category(\n plm, addr, cat, subcat, product_key)\n if device:\n _LOGGER.debug('Device with id %s added to device list '\n 'from saved device data.', addr)\n aldb_status = saved_device.get('aldb_status', 0)\n device.aldb.status = ALDBStatus(aldb_status)\n aldb = saved_device.get('aldb', {})\n device.aldb.load_saved_records(aldb_status, aldb)\n self[addr] = device\n for addr in self._overrides:\n if not self._devices.get(addr):\n device_override = self._overrides.get(Address(addr).id, {})\n cat = device_override.get('cat')\n subcat = device_override.get('subcat')\n product_key = device_override.get('firmware')\n product_key = device_override.get('product_key', product_key)\n device = self.create_device_from_category(\n plm, addr, cat, subcat, product_key)\n if device:\n _LOGGER.debug('Device with id %s added to device list '\n 'from device override data.', addr)\n self[addr] = device"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef save_device_info(self):\n if self._workdir is not None:\n devices = []\n for addr in self._devices:\n device = self._devices.get(addr)\n if not device.address.is_x10:\n aldb = {}\n for mem in device.aldb:\n rec = device.aldb[mem]\n if rec:\n aldbRec = {'memory': mem,\n 'control_flags': rec.control_flags.byte,\n 'group': rec.group,\n 'address': rec.address.id,\n 'data1': rec.data1,\n 'data2': rec.data2,\n 'data3': rec.data3}\n aldb[mem] = aldbRec\n deviceInfo = {'address': device.address.id,\n 'cat': device.cat,\n 'subcat': device.subcat,\n 'product_key': device.product_key,\n 'aldb_status': device.aldb.status.value,\n 'aldb': aldb}\n devices.append(deviceInfo)\n asyncio.ensure_future(self._write_saved_device_info(devices),\n loop=self._loop)", "response": "Save all device information to the device info file."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _add_saved_device_info(self, **kwarg):\n addr = kwarg.get('address')\n _LOGGER.debug('Found saved device with address %s', addr)\n self._saved_devices[addr] = kwarg", "response": "Register device info from the saved data file."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\nasync def load_saved_device_info(self):\n _LOGGER.debug(\"Loading saved device info.\")\n deviceinfo = []\n if self._workdir:\n _LOGGER.debug(\"Really Loading saved device info.\")\n try:\n device_file = '{}/{}'.format(self._workdir, DEVICE_INFO_FILE)\n with open(device_file, 'r') as infile:\n try:\n deviceinfo = json.load(infile)\n _LOGGER.debug(\"Saved device file loaded\")\n except json.decoder.JSONDecodeError:\n _LOGGER.debug(\"Loading saved device file failed\")\n except FileNotFoundError:\n _LOGGER.debug(\"Saved device file not found\")\n for device in deviceinfo:\n self._add_saved_device_info(**device)", "response": "Load the saved device info from the device info file."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates an X10 message to send the house code and unit code.", "response": "def unit_code_msg(housecode, unitcode):\n \"\"\"Create an X10 message to send the house code and unit code.\"\"\"\n house_byte = 0\n unit_byte = 0\n if isinstance(housecode, str):\n house_byte = insteonplm.utils.housecode_to_byte(housecode) << 4\n unit_byte = insteonplm.utils.unitcode_to_byte(unitcode)\n elif isinstance(housecode, int) and housecode < 16:\n house_byte = housecode << 4\n unit_byte = unitcode\n else:\n house_byte = housecode\n unit_byte = unitcode\n return X10Received(house_byte + unit_byte, 0x00)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate an X10 message to send the house code and a command code.", "response": "def command_msg(housecode, command):\n \"\"\"Create an X10 message to send the house code and a command code.\"\"\"\n house_byte = 0\n if isinstance(housecode, str):\n house_byte = insteonplm.utils.housecode_to_byte(housecode) << 4\n elif isinstance(housecode, int) and housecode < 16:\n house_byte = housecode << 4\n else:\n house_byte = housecode\n return X10Received(house_byte + command, 0x80)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\nasync def create_http_connection(loop, protocol_factory, host, port=25105,\n auth=None):\n \"\"\"Create an HTTP session used to connect to the Insteon Hub.\"\"\"\n protocol = protocol_factory()\n transport = HttpTransport(loop, protocol, host, port, auth)\n _LOGGER.debug(\"create_http_connection Finished creating connection\")\n return (transport, protocol)", "response": "Create an HTTP connection to the Insteon Hub."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate a new connection to a specific device.", "response": "async def create(cls, device='/dev/ttyUSB0', host=None,\n username=None, password=None, port=25010, hub_version=2,\n auto_reconnect=True, loop=None, workdir=None,\n poll_devices=True, load_aldb=True):\n \"\"\"Create a connection to a specific device.\n\n Here is where we supply the device and callback callables we\n expect for this PLM class object.\n\n :param device:\n Unix device where the PLM is attached\n :param address:\n IP Address of the Hub\n :param username:\n User name for connecting to the Hub\n :param password:\n Password for connecting to the Hub\n :param auto_reconnect:\n Should the Connection try to automatically reconnect if needed?\n :param loop:\n asyncio.loop for async operation\n :param load_aldb:\n Should the ALDB be loaded on connect\n\n :type device:\n str\n :type auto_reconnect:\n boolean\n :type loop:\n asyncio.loop\n :type update_callback:\n callable\n \"\"\"\n _LOGGER.debug(\"Starting Modified Connection.create\")\n conn = cls(device=device, host=host, username=username,\n password=password, port=port, hub_version=hub_version,\n loop=loop, retry_interval=1, auto_reconnect=auto_reconnect)\n\n def connection_lost():\n \"\"\"Respond to Protocol connection lost.\"\"\"\n if conn.auto_reconnect and not conn.closing:\n _LOGGER.debug(\"Reconnecting to transport\")\n asyncio.ensure_future(conn.reconnect(), loop=conn.loop)\n\n protocol_class = PLM\n if conn.host and conn.hub_version == 2:\n protocol_class = Hub\n conn.protocol = protocol_class(\n connection_lost_callback=connection_lost,\n loop=conn.loop,\n workdir=workdir,\n poll_devices=poll_devices,\n load_aldb=load_aldb)\n\n await conn.reconnect()\n\n _LOGGER.debug(\"Ending Connection.create\")\n return conn"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\nasync def reconnect(self):\n _LOGGER.debug('starting Connection.reconnect')\n await self._connect()\n while self._closed:\n await self._retry_connection()\n _LOGGER.debug('ending Connection.reconnect')", "response": "Reconnect to the modem."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nclose the PLM device connection and don t try to reconnect.", "response": "async def close(self, event):\n \"\"\"Close the PLM device connection and don't try to reconnect.\"\"\"\n _LOGGER.info('Closing connection to Insteon Modem')\n self._closing = True\n self._auto_reconnect = False\n await self.protocol.close()\n if self.protocol.transport:\n self.protocol.transport.close()\n await asyncio.sleep(0, loop=self._loop)\n _LOGGER.info('Insteon Modem connection closed')"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef on(self):\n msg = X10Send.unit_code_msg(self.address.x10_housecode,\n self.address.x10_unitcode)\n self._send_method(msg)\n\n msg = X10Send.command_msg(self.address.x10_housecode,\n X10_COMMAND_ON)\n self._send_method(msg, False)\n self._update_subscribers(0xff)", "response": "Send the On command to an X10 device."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsends the Off command to an X10 device.", "response": "def off(self):\n \"\"\"Send the Off command to an X10 device.\"\"\"\n msg = X10Send.unit_code_msg(self.address.x10_housecode,\n self.address.x10_unitcode)\n self._send_method(msg)\n\n msg = X10Send.command_msg(self.address.x10_housecode,\n X10_COMMAND_OFF)\n self._send_method(msg, False)\n self._update_subscribers(0x00)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef set_level(self, val):\n if val == 0:\n self.off()\n elif val == 255:\n self.on()\n else:\n setlevel = 255\n if val < 1:\n setlevel = val * 255\n elif val <= 0xff:\n setlevel = val\n change = setlevel - self._value\n increment = 255 / self._steps\n steps = round(abs(change) / increment)\n print('Steps: ', steps)\n if change > 0:\n method = self.brighten\n self._value += round(steps * increment)\n self._value = min(255, self._value)\n else:\n method = self.dim\n self._value -= round(steps * increment)\n self._value = max(0, self._value)\n # pylint: disable=unused-variable\n for step in range(0, steps):\n method(True)\n self._update_subscribers(self._value)", "response": "Set the device ON LEVEL."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef brighten(self, defer_update=False):\n msg = X10Send.unit_code_msg(self.address.x10_housecode,\n self.address.x10_unitcode)\n self._send_method(msg)\n\n msg = X10Send.command_msg(self.address.x10_housecode,\n X10_COMMAND_BRIGHT)\n self._send_method(msg, False)\n if not defer_update:\n self._update_subscribers(self._value + 255 / self._steps)", "response": "Brighten the device one step."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ndims the device one step.", "response": "def dim(self, defer_update=False):\n \"\"\"Dim the device one step.\"\"\"\n msg = X10Send.unit_code_msg(self.address.x10_housecode,\n self.address.x10_unitcode)\n self._send_method(msg)\n\n msg = X10Send.command_msg(self.address.x10_housecode,\n X10_COMMAND_DIM)\n self._send_method(msg, False)\n if not defer_update:\n self._update_subscribers(self._value - 255 / self._steps)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef matches_pattern(self, other):\n matches = False\n if hasattr(other, 'addr'):\n if self.addr is None or other.addr is None:\n matches = True\n else:\n matches = self.addr == other.addr\n return matches", "response": "Test Address object matches the pattern of another object."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ntaking any format of address and turn it into a hex string.", "response": "def _normalize(self, addr):\n \"\"\"Take any format of address and turn it into a hex string.\"\"\"\n normalize = None\n if isinstance(addr, Address):\n normalize = addr.addr\n self._is_x10 = addr.is_x10\n\n elif isinstance(addr, bytearray):\n normalize = binascii.unhexlify(binascii.hexlify(addr).decode())\n\n elif isinstance(addr, bytes):\n normalize = addr\n\n elif isinstance(addr, str):\n addr = addr.replace('.', '')\n addr = addr[0:6]\n if addr[0:3].lower() == 'x10':\n x10_addr = Address.x10(addr[3:4], int(addr[4:6]))\n normalize = x10_addr.addr\n self._is_x10 = True\n else:\n normalize = binascii.unhexlify(addr.lower())\n\n elif addr is None:\n normalize = None\n\n else:\n _LOGGER.warning('Address class init with unknown type %s: %r',\n type(addr), addr)\n return normalize"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef human(self):\n addrstr = '00.00.00'\n if self.addr:\n if self._is_x10:\n housecode_byte = self.addr[1]\n housecode = insteonplm.utils.byte_to_housecode(housecode_byte)\n unitcode_byte = self.addr[2]\n unitcode = insteonplm.utils.byte_to_unitcode(unitcode_byte)\n addrstr = 'X10.{}.{:02d}'.format(housecode.upper(), unitcode)\n else:\n addrstr = '{}.{}.{}'.format(self.hex[0:2],\n self.hex[2:4],\n self.hex[4:6]).upper()\n return addrstr", "response": "Emit the address in human - readible format."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef hex(self):\n addrstr = '000000'\n if self.addr is not None:\n addrstr = binascii.hexlify(self.addr).decode()\n return addrstr", "response": "Emit the address in bare hex format."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nemit the address in bytes format.", "response": "def bytes(self):\n \"\"\"Emit the address in bytes format.\"\"\"\n addrbyte = b'\\x00\\x00\\x00'\n if self.addr is not None:\n addrbyte = self.addr\n return addrbyte"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef id(self):\n dev_id = ''\n if self._is_x10:\n dev_id = 'x10{}{:02d}'.format(self.x10_housecode,\n self.x10_unitcode)\n else:\n dev_id = self.hex\n return dev_id", "response": "Return the ID of the device address."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef x10_housecode(self):\n housecode = None\n if self.is_x10:\n housecode = insteonplm.utils.byte_to_housecode(self.addr[1])\n return housecode", "response": "Emit the X10 house code."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef x10_unitcode(self):\n unitcode = None\n if self.is_x10:\n unitcode = insteonplm.utils.byte_to_unitcode(self.addr[2])\n return unitcode", "response": "Emit the X10 unit code."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef x10(cls, housecode, unitcode):\n if housecode.lower() in ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',\n 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p']:\n byte_housecode = insteonplm.utils.housecode_to_byte(housecode)\n else:\n if isinstance(housecode, str):\n _LOGGER.error('X10 house code error: %s', housecode)\n else:\n _LOGGER.error('X10 house code is not a string')\n raise ValueError\n\n # 20, 21 and 22 for All Units Off, All Lights On and All Lights Off\n # 'fake' units\n if unitcode in range(1, 17) or unitcode in range(20, 23):\n byte_unitcode = insteonplm.utils.unitcode_to_byte(unitcode)\n else:\n if isinstance(unitcode, int):\n _LOGGER.error('X10 unit code error: %d', unitcode)\n else:\n _LOGGER.error('X10 unit code is not an integer 1 - 16')\n raise ValueError\n\n addr = Address(bytearray([0x00, byte_housecode, byte_unitcode]))\n addr.is_x10 = True\n return addr", "response": "Create an X10 device address."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncreating message from raw byte stream.", "response": "def from_raw_message(cls, rawmessage):\n \"\"\"Create message from raw byte stream.\"\"\"\n return ManageAllLinkRecord(rawmessage[2:3],\n rawmessage[3:4],\n rawmessage[4:7],\n rawmessage[7:8],\n rawmessage[8:9],\n rawmessage[9:10],\n rawmessage[10:11])"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _term_to_xapian_value(term, field_type):\n assert field_type in FIELD_TYPES\n\n def strf(dt):\n \"\"\"\n Equivalent to datetime.datetime.strptime(dt, DATETIME_FORMAT)\n but accepts years below 1900 (see http://stackoverflow.com/q/10263956/931303)\n \"\"\"\n return '%04d%02d%02d%02d%02d%02d' % (\n dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)\n\n if field_type == 'boolean':\n assert isinstance(term, bool)\n if term:\n value = 't'\n else:\n value = 'f'\n\n elif field_type == 'integer':\n value = INTEGER_FORMAT % term\n elif field_type == 'float':\n value = xapian.sortable_serialise(term)\n elif field_type == 'date' or field_type == 'datetime':\n if field_type == 'date':\n # http://stackoverflow.com/a/1937636/931303 and comments\n term = datetime.datetime.combine(term, datetime.time())\n value = strf(term)\n else: # field_type == 'text'\n value = _to_xapian_term(term)\n\n return value", "response": "Converts a term to a serialized Xapian value based on the field_type."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _from_xapian_value(value, field_type):\n assert field_type in FIELD_TYPES\n if field_type == 'boolean':\n if value == 't':\n return True\n elif value == 'f':\n return False\n else:\n InvalidIndexError('Field type \"%d\" does not accept value \"%s\"' % (field_type, value))\n elif field_type == 'integer':\n return int(value)\n elif field_type == 'float':\n return xapian.sortable_unserialise(value)\n elif field_type == 'date' or field_type == 'datetime':\n datetime_value = datetime.datetime.strptime(value, DATETIME_FORMAT)\n if field_type == 'datetime':\n return datetime_value\n else:\n return datetime_value.date()\n else: # field_type == 'text'\n return value", "response": "Converts a serialized Xapian value to a Python equivalent based on the field_type."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _update_cache(self):\n fields = connections[self.connection_alias].get_unified_index().all_searchfields()\n if self._fields != fields:\n self._fields = fields\n self._content_field_name, self._schema = self.build_schema(self._fields)", "response": "Update the internal cache with the current search fields."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nupdates the index with any objects in iterable.", "response": "def update(self, index, iterable, commit=True):\n \"\"\"\n Updates the `index` with any objects in `iterable` by adding/updating\n the database as needed.\n\n Required arguments:\n `index` -- The `SearchIndex` to process\n `iterable` -- An iterable of model instances to index\n Optional arguments:\n `commit` -- ignored\n\n For each object in `iterable`, a document is created containing all\n of the terms extracted from `index.full_prepare(obj)` with field prefixes,\n and 'as-is' as needed. Also, if the field type is 'text' it will be\n stemmed and stored with the 'Z' prefix as well.\n\n eg. `content:Testing` ==> `testing, Ztest, ZXCONTENTtest, XCONTENTtest`\n\n Each document also contains an extra term in the format:\n\n `XCONTENTTYPE.`\n\n As well as a unique identifier in the the format:\n\n `Q..`\n\n eg.: foo.bar (pk=1) ==> `Qfoo.bar.1`, `XCONTENTTYPEfoo.bar`\n\n This is useful for querying for a specific document corresponding to\n a model instance.\n\n The document also contains a pickled version of the object itself and\n the document ID in the document data field.\n\n Finally, we also store field values to be used for sorting data. We\n store these in the document value slots (position zero is reserver\n for the document ID). All values are stored as unicode strings with\n conversion of float, int, double, values being done by Xapian itself\n through the use of the :method:xapian.sortable_serialise method.\n \"\"\"\n database = self._database(writable=True)\n\n try:\n term_generator = xapian.TermGenerator()\n term_generator.set_database(database)\n term_generator.set_stemmer(xapian.Stem(self.language))\n try:\n term_generator.set_stemming_strategy(self.stemming_strategy)\n except AttributeError: \n # Versions before Xapian 1.2.11 do not support stemming strategies for TermGenerator\n pass\n if self.include_spelling is True:\n term_generator.set_flags(xapian.TermGenerator.FLAG_SPELLING)\n\n def _add_text(termpos, text, weight, prefix=''):\n \"\"\"\n indexes text appending 2 extra terms\n to identify beginning and ending of the text.\n \"\"\"\n term_generator.set_termpos(termpos)\n\n start_term = '%s^' % prefix\n end_term = '%s$' % prefix\n # add begin\n document.add_posting(start_term, termpos, weight)\n # add text\n term_generator.index_text(text, weight, prefix)\n termpos = term_generator.get_termpos()\n # add ending\n termpos += 1\n document.add_posting(end_term, termpos, weight)\n\n # increase termpos\n term_generator.set_termpos(termpos)\n term_generator.increase_termpos(TERMPOS_DISTANCE)\n\n return term_generator.get_termpos()\n\n def _add_literal_text(termpos, text, weight, prefix=''):\n \"\"\"\n Adds sentence to the document with positional information\n but without processing.\n\n The sentence is bounded by \"^\" \"$\" to allow exact matches.\n \"\"\"\n text = '^ %s $' % text\n for word in text.split():\n term = '%s%s' % (prefix, word)\n document.add_posting(term, termpos, weight)\n termpos += 1\n termpos += TERMPOS_DISTANCE\n return termpos\n\n def add_text(termpos, prefix, text, weight):\n \"\"\"\n Adds text to the document with positional information\n and processing (e.g. stemming).\n \"\"\"\n termpos = _add_text(termpos, text, weight, prefix=prefix)\n termpos = _add_text(termpos, text, weight, prefix='')\n termpos = _add_literal_text(termpos, text, weight, prefix=prefix)\n termpos = _add_literal_text(termpos, text, weight, prefix='')\n return termpos\n\n def _get_ngram_lengths(value):\n values = value.split()\n for item in values:\n for ngram_length in six.moves.range(NGRAM_MIN_LENGTH, NGRAM_MAX_LENGTH + 1):\n yield item, ngram_length\n\n for obj in iterable:\n document = xapian.Document()\n term_generator.set_document(document)\n\n def ngram_terms(value):\n for item, length in _get_ngram_lengths(value):\n item_length = len(item)\n for start in six.moves.range(0, item_length - length + 1):\n for size in six.moves.range(length, length + 1):\n end = start + size\n if end > item_length:\n continue\n yield _to_xapian_term(item[start:end])\n\n def edge_ngram_terms(value):\n for item, length in _get_ngram_lengths(value):\n yield _to_xapian_term(item[0:length])\n\n def add_edge_ngram_to_document(prefix, value, weight):\n \"\"\"\n Splits the term in ngrams and adds each ngram to the index.\n The minimum and maximum size of the ngram is respectively\n NGRAM_MIN_LENGTH and NGRAM_MAX_LENGTH.\n \"\"\"\n for term in edge_ngram_terms(value):\n document.add_term(term, weight)\n document.add_term(prefix + term, weight)\n\n def add_ngram_to_document(prefix, value, weight):\n \"\"\"\n Splits the term in ngrams and adds each ngram to the index.\n The minimum and maximum size of the ngram is respectively\n NGRAM_MIN_LENGTH and NGRAM_MAX_LENGTH.\n \"\"\"\n for term in ngram_terms(value):\n document.add_term(term, weight)\n document.add_term(prefix + term, weight)\n\n def add_non_text_to_document(prefix, term, weight):\n \"\"\"\n Adds term to the document without positional information\n and without processing.\n\n If the term is alone, also adds it as \"^$\"\n to allow exact matches on single terms.\n \"\"\"\n document.add_term(term, weight)\n document.add_term(prefix + term, weight)\n\n def add_datetime_to_document(termpos, prefix, term, weight):\n \"\"\"\n Adds a datetime to document with positional order\n to allow exact matches on it.\n \"\"\"\n date, time = term.split()\n document.add_posting(date, termpos, weight)\n termpos += 1\n document.add_posting(time, termpos, weight)\n termpos += 1\n document.add_posting(prefix + date, termpos, weight)\n termpos += 1\n document.add_posting(prefix + time, termpos, weight)\n termpos += TERMPOS_DISTANCE + 1\n return termpos\n\n data = index.full_prepare(obj)\n weights = index.get_field_weights()\n\n termpos = term_generator.get_termpos() # identifies the current position in the document.\n for field in self.schema:\n if field['field_name'] not in list(data.keys()):\n # not supported fields are ignored.\n continue\n\n if field['field_name'] in weights:\n weight = int(weights[field['field_name']])\n else:\n weight = 1\n\n value = data[field['field_name']]\n\n if field['field_name'] in (ID, DJANGO_ID, DJANGO_CT):\n # Private fields are indexed in a different way:\n # `django_id` is an int and `django_ct` is text;\n # besides, they are indexed by their (unstemmed) value.\n if field['field_name'] == DJANGO_ID:\n value = int(value)\n value = _term_to_xapian_value(value, field['type'])\n\n document.add_term(TERM_PREFIXES[field['field_name']] + value, weight)\n document.add_value(field['column'], value)\n continue\n else:\n prefix = TERM_PREFIXES['field'] + field['field_name'].upper()\n\n # if not multi_valued, we add as a document value\n # for sorting and facets\n if field['multi_valued'] == 'false':\n document.add_value(field['column'], _term_to_xapian_value(value, field['type']))\n else:\n for t in value:\n # add the exact match of each value\n term = _to_xapian_term(t)\n termpos = add_text(termpos, prefix, term, weight)\n continue\n\n term = _to_xapian_term(value)\n if term == '':\n continue\n # from here on the term is a string;\n # we now decide how it is indexed\n\n if field['type'] == 'text':\n # text is indexed with positional information\n termpos = add_text(termpos, prefix, term, weight)\n elif field['type'] == 'datetime':\n termpos = add_datetime_to_document(termpos, prefix, term, weight)\n elif field['type'] == 'ngram':\n add_ngram_to_document(prefix, value, weight)\n elif field['type'] == 'edge_ngram':\n add_edge_ngram_to_document(prefix, value, weight)\n else:\n # all other terms are added without positional information\n add_non_text_to_document(prefix, term, weight)\n\n # store data without indexing it\n document.set_data(pickle.dumps(\n (obj._meta.app_label, obj._meta.model_name, obj.pk, data),\n pickle.HIGHEST_PROTOCOL\n ))\n\n # add the id of the document\n document_id = TERM_PREFIXES[ID] + get_identifier(obj)\n document.add_term(document_id)\n\n # finally, replace or add the document to the database\n database.replace_document(document_id, document)\n\n except UnicodeDecodeError:\n sys.stderr.write('Chunk failed.\\n')\n pass\n\n finally:\n database.close()"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nremoving indexes for obj from the database.", "response": "def remove(self, obj, commit=True):\n \"\"\"\n Remove indexes for `obj` from the database.\n\n We delete all instances of `Q..` which\n should be unique to this object.\n\n Optional arguments:\n `commit` -- ignored\n \"\"\"\n database = self._database(writable=True)\n database.delete_document(TERM_PREFIXES[ID] + get_identifier(obj))\n database.close()"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef clear(self, models=(), commit=True):\n if not models:\n # Because there does not appear to be a \"clear all\" method,\n # it's much quicker to remove the contents of the `self.path`\n # folder than it is to remove each document one at a time.\n if os.path.exists(self.path):\n shutil.rmtree(self.path)\n else:\n database = self._database(writable=True)\n for model in models:\n database.delete_document(TERM_PREFIXES[DJANGO_CT] + get_model_ct(model))\n database.close()", "response": "Clears all instances of models from the database or all models."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _build_models_query(self, query):\n registered_models_ct = self.build_models_list()\n if registered_models_ct:\n restrictions = [xapian.Query('%s%s' % (TERM_PREFIXES[DJANGO_CT], model_ct))\n for model_ct in registered_models_ct]\n limit_query = xapian.Query(xapian.Query.OP_OR, restrictions)\n\n query = xapian.Query(xapian.Query.OP_AND, query, limit_query)\n\n return query", "response": "Builds a query from query that filters to documents only from registered models."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _check_field_names(self, field_names):\n if field_names:\n for field_name in field_names:\n try:\n self.column[field_name]\n except KeyError:\n raise InvalidIndexError('Trying to use non indexed field \"%s\"' % field_name)", "response": "Checks that the given field names are indexed."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nexecute a query and returns a dictionary of search results.", "response": "def search(self, query, sort_by=None, start_offset=0, end_offset=None,\n fields='', highlight=False, facets=None, date_facets=None,\n query_facets=None, narrow_queries=None, spelling_query=None,\n limit_to_registered_models=None, result_class=None, **kwargs):\n \"\"\"\n Executes the Xapian::query as defined in `query`.\n\n Required arguments:\n `query` -- Search query to execute\n\n Optional arguments:\n `sort_by` -- Sort results by specified field (default = None)\n `start_offset` -- Slice results from `start_offset` (default = 0)\n `end_offset` -- Slice results at `end_offset` (default = None), if None, then all documents\n `fields` -- Filter results on `fields` (default = '')\n `highlight` -- Highlight terms in results (default = False)\n `facets` -- Facet results on fields (default = None)\n `date_facets` -- Facet results on date ranges (default = None)\n `query_facets` -- Facet results on queries (default = None)\n `narrow_queries` -- Narrow queries (default = None)\n `spelling_query` -- An optional query to execute spelling suggestion on\n `limit_to_registered_models` -- Limit returned results to models registered in\n the current `SearchSite` (default = True)\n\n Returns:\n A dictionary with the following keys:\n `results` -- A list of `SearchResult`\n `hits` -- The total available results\n `facets` - A dictionary of facets with the following keys:\n `fields` -- A list of field facets\n `dates` -- A list of date facets\n `queries` -- A list of query facets\n If faceting was not used, the `facets` key will not be present\n\n If `query` is None, returns no results.\n\n If `INCLUDE_SPELLING` was enabled in the connection options, the\n extra flag `FLAG_SPELLING_CORRECTION` will be passed to the query parser\n and any suggestions for spell correction will be returned as well as\n the results.\n \"\"\"\n if xapian.Query.empty(query):\n return {\n 'results': [],\n 'hits': 0,\n }\n\n self._check_field_names(facets)\n self._check_field_names(date_facets)\n self._check_field_names(query_facets)\n\n database = self._database()\n\n if limit_to_registered_models is None:\n limit_to_registered_models = getattr(settings, 'HAYSTACK_LIMIT_TO_REGISTERED_MODELS', True)\n\n if result_class is None:\n result_class = SearchResult\n\n if self.include_spelling is True:\n spelling_suggestion = self._do_spelling_suggestion(database, query, spelling_query)\n else:\n spelling_suggestion = ''\n\n if narrow_queries is not None:\n query = xapian.Query(\n xapian.Query.OP_AND, query, xapian.Query(\n xapian.Query.OP_AND, [self.parse_query(narrow_query) for narrow_query in narrow_queries]\n )\n )\n\n if limit_to_registered_models:\n query = self._build_models_query(query)\n\n enquire = xapian.Enquire(database)\n if hasattr(settings, 'HAYSTACK_XAPIAN_WEIGHTING_SCHEME'):\n enquire.set_weighting_scheme(xapian.BM25Weight(*settings.HAYSTACK_XAPIAN_WEIGHTING_SCHEME))\n enquire.set_query(query)\n\n if sort_by:\n try:\n _xapian_sort(enquire, sort_by, self.column)\n except NotSupportedError:\n _old_xapian_sort(enquire, sort_by, self.column)\n\n results = []\n facets_dict = {\n 'fields': {},\n 'dates': {},\n 'queries': {},\n }\n\n if not end_offset:\n end_offset = database.get_doccount() - start_offset\n\n ## prepare spies in case of facets\n if facets:\n facets_spies = self._prepare_facet_field_spies(facets)\n for spy in facets_spies:\n enquire.add_matchspy(spy)\n\n # print enquire.get_query()\n\n matches = self._get_enquire_mset(database, enquire, start_offset, end_offset)\n\n for match in matches:\n app_label, model_name, pk, model_data = pickle.loads(self._get_document_data(database, match.document))\n if highlight:\n model_data['highlighted'] = {\n self.content_field_name: self._do_highlight(\n model_data.get(self.content_field_name), query\n )\n }\n results.append(\n result_class(app_label, model_name, pk, match.percent, **model_data)\n )\n\n if facets:\n # pick single valued facets from spies\n single_facets_dict = self._process_facet_field_spies(facets_spies)\n\n # pick multivalued valued facets from results\n multi_facets_dict = self._do_multivalued_field_facets(results, facets)\n\n # merge both results (http://stackoverflow.com/a/38990/931303)\n facets_dict['fields'] = dict(list(single_facets_dict.items()) + list(multi_facets_dict.items()))\n\n if date_facets:\n facets_dict['dates'] = self._do_date_facets(results, date_facets)\n\n if query_facets:\n facets_dict['queries'] = self._do_query_facets(results, query_facets)\n\n return {\n 'results': results,\n 'hits': self._get_hit_count(database, enquire),\n 'facets': facets_dict,\n 'spelling_suggestion': spelling_suggestion,\n }"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef more_like_this(self, model_instance, additional_query=None,\n start_offset=0, end_offset=None,\n limit_to_registered_models=True, result_class=None, **kwargs):\n \"\"\"\n Given a model instance, returns a result set of similar documents.\n\n Required arguments:\n `model_instance` -- The model instance to use as a basis for\n retrieving similar documents.\n\n Optional arguments:\n `additional_query` -- An additional query to narrow results\n `start_offset` -- The starting offset (default=0)\n `end_offset` -- The ending offset (default=None), if None, then all documents\n `limit_to_registered_models` -- Limit returned results to models registered in the search (default = True)\n\n Returns:\n A dictionary with the following keys:\n `results` -- A list of `SearchResult`\n `hits` -- The total available results\n\n Opens a database connection, then builds a simple query using the\n `model_instance` to build the unique identifier.\n\n For each document retrieved(should always be one), adds an entry into\n an RSet (relevance set) with the document id, then, uses the RSet\n to query for an ESet (A set of terms that can be used to suggest\n expansions to the original query), omitting any document that was in\n the original query.\n\n Finally, processes the resulting matches and returns.\n \"\"\"\n database = self._database()\n\n if result_class is None:\n result_class = SearchResult\n\n query = xapian.Query(TERM_PREFIXES[ID] + get_identifier(model_instance))\n\n enquire = xapian.Enquire(database)\n enquire.set_query(query)\n\n rset = xapian.RSet()\n\n if not end_offset:\n end_offset = database.get_doccount()\n\n match = None\n for match in self._get_enquire_mset(database, enquire, 0, end_offset):\n rset.add_document(match.docid)\n\n if match is None:\n if not self.silently_fail:\n raise InvalidIndexError('Instance %s with id \"%d\" not indexed' %\n (get_identifier(model_instance), model_instance.id))\n else:\n return {'results': [],\n 'hits': 0}\n\n query = xapian.Query(\n xapian.Query.OP_ELITE_SET,\n [expand.term for expand in enquire.get_eset(match.document.termlist_count(), rset, XHExpandDecider())],\n match.document.termlist_count()\n )\n query = xapian.Query(\n xapian.Query.OP_AND_NOT, [query, TERM_PREFIXES[ID] + get_identifier(model_instance)]\n )\n\n if limit_to_registered_models:\n query = self._build_models_query(query)\n\n if additional_query:\n query = xapian.Query(\n xapian.Query.OP_AND, query, additional_query\n )\n\n enquire.set_query(query)\n\n results = []\n matches = self._get_enquire_mset(database, enquire, start_offset, end_offset)\n\n for match in matches:\n app_label, model_name, pk, model_data = pickle.loads(self._get_document_data(database, match.document))\n results.append(\n result_class(app_label, model_name, pk, match.percent, **model_data)\n )\n\n return {\n 'results': results,\n 'hits': self._get_hit_count(database, enquire),\n 'facets': {\n 'fields': {},\n 'dates': {},\n 'queries': {},\n },\n 'spelling_suggestion': None,\n }", "response": "Returns a result set of similar documents."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nparse a query string and return a xapian. Query object", "response": "def parse_query(self, query_string):\n \"\"\"\n Given a `query_string`, will attempt to return a xapian.Query\n\n Required arguments:\n ``query_string`` -- A query string to parse\n\n Returns a xapian.Query\n \"\"\"\n if query_string == '*':\n return xapian.Query('') # Match everything\n elif query_string == '':\n return xapian.Query() # Match nothing\n\n qp = xapian.QueryParser()\n qp.set_database(self._database())\n qp.set_stemmer(xapian.Stem(self.language))\n qp.set_stemming_strategy(self.stemming_strategy)\n qp.set_default_op(XAPIAN_OPTS[DEFAULT_OPERATOR])\n qp.add_boolean_prefix(DJANGO_CT, TERM_PREFIXES[DJANGO_CT])\n\n for field_dict in self.schema:\n # since 'django_ct' has a boolean_prefix,\n # we ignore it here.\n if field_dict['field_name'] == DJANGO_CT:\n continue\n\n qp.add_prefix(\n field_dict['field_name'],\n TERM_PREFIXES['field'] + field_dict['field_name'].upper()\n )\n\n vrp = XHValueRangeProcessor(self)\n qp.add_valuerangeprocessor(vrp)\n\n return qp.parse_query(query_string, self.flags)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef build_schema(self, fields):\n content_field_name = ''\n schema_fields = [\n {'field_name': ID,\n 'type': 'text',\n 'multi_valued': 'false',\n 'column': 0},\n {'field_name': DJANGO_ID,\n 'type': 'integer',\n 'multi_valued': 'false',\n 'column': 1},\n {'field_name': DJANGO_CT,\n 'type': 'text',\n 'multi_valued': 'false',\n 'column': 2},\n ]\n self._columns[ID] = 0\n self._columns[DJANGO_ID] = 1\n self._columns[DJANGO_CT] = 2\n\n column = len(schema_fields)\n\n for field_name, field_class in sorted(list(fields.items()), key=lambda n: n[0]):\n if field_class.document is True:\n content_field_name = field_class.index_fieldname\n\n if field_class.indexed is True:\n field_data = {\n 'field_name': field_class.index_fieldname,\n 'type': 'text',\n 'multi_valued': 'false',\n 'column': column,\n }\n\n if field_class.field_type == 'date':\n field_data['type'] = 'date'\n elif field_class.field_type == 'datetime':\n field_data['type'] = 'datetime'\n elif field_class.field_type == 'integer':\n field_data['type'] = 'integer'\n elif field_class.field_type == 'float':\n field_data['type'] = 'float'\n elif field_class.field_type == 'boolean':\n field_data['type'] = 'boolean'\n elif field_class.field_type == 'ngram':\n field_data['type'] = 'ngram'\n elif field_class.field_type == 'edge_ngram':\n field_data['type'] = 'edge_ngram'\n\n if field_class.is_multivalued:\n field_data['multi_valued'] = 'true'\n\n schema_fields.append(field_data)\n self._columns[field_data['field_name']] = column\n column += 1\n\n return content_field_name, schema_fields", "response": "Builds the schema from the fields in the index\n ."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nhighlight the query terms in content with html tag.", "response": "def _do_highlight(content, query, tag='em'):\n \"\"\"\n Highlight `query` terms in `content` with html `tag`.\n\n This method assumes that the input text (`content`) does not contain\n any special formatting. That is, it does not contain any html tags\n or similar markup that could be screwed up by the highlighting.\n\n Required arguments:\n `content` -- Content to search for instances of `text`\n `text` -- The text to be highlighted\n \"\"\"\n for term in query:\n term = term.decode('utf-8')\n for match in re.findall('[^A-Z]+', term): # Ignore field identifiers\n match_re = re.compile(match, re.I)\n content = match_re.sub('<%s>%s' % (tag, term, tag), content)\n\n return content"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _prepare_facet_field_spies(self, facets):\n spies = []\n for facet in facets:\n slot = self.column[facet]\n spy = xapian.ValueCountMatchSpy(slot)\n # add attribute \"slot\" to know which column this spy is targeting.\n spy.slot = slot\n spies.append(spy)\n return spies", "response": "Returns a list of spies based on the facets."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _process_facet_field_spies(self, spies):\n facet_dict = {}\n for spy in spies:\n field = self.schema[spy.slot]\n field_name, field_type = field['field_name'], field['type']\n\n facet_dict[field_name] = []\n for facet in list(spy.values()):\n if field_type == 'float':\n # the float term is a Xapian serialized object, which is\n # in bytes.\n term = facet.term\n else:\n term = facet.term.decode('utf-8')\n\n facet_dict[field_name].append((_from_xapian_value(term, field_type),\n facet.termfreq))\n return facet_dict", "response": "Returns a dict of facet names with lists of tuples of the form term term_frequency"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _do_multivalued_field_facets(self, results, field_facets):\n facet_dict = {}\n\n for field in field_facets:\n facet_list = {}\n if not self._multi_value_field(field):\n continue\n\n for result in results:\n field_value = getattr(result, field)\n for item in field_value: # Facet each item in a MultiValueField\n facet_list[item] = facet_list.get(item, 0) + 1\n\n facet_dict[field] = list(facet_list.items())\n return facet_dict", "response": "Implements a multivalued field facet on the results."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _do_date_facets(results, date_facets):\n def next_datetime(previous, gap_value, gap_type):\n year = previous.year\n month = previous.month\n\n if gap_type == 'year':\n next = previous.replace(year=year + gap_value)\n elif gap_type == 'month':\n if month + gap_value <= 12:\n next = previous.replace(month=month + gap_value)\n else:\n next = previous.replace(\n month=((month + gap_value) % 12),\n year=(year + (month + gap_value) // 12)\n )\n elif gap_type == 'day':\n next = previous + datetime.timedelta(days=gap_value)\n elif gap_type == 'hour':\n return previous + datetime.timedelta(hours=gap_value)\n elif gap_type == 'minute':\n next = previous + datetime.timedelta(minutes=gap_value)\n elif gap_type == 'second':\n next = previous + datetime.timedelta(seconds=gap_value)\n else:\n raise TypeError('\\'gap_by\\' must be '\n '{second, minute, day, month, year}')\n return next\n\n facet_dict = {}\n\n for date_facet, facet_params in list(date_facets.items()):\n gap_type = facet_params.get('gap_by')\n gap_value = facet_params.get('gap_amount', 1)\n date_range = facet_params['start_date']\n\n # construct the bins of the histogram\n facet_list = []\n while date_range < facet_params['end_date']:\n facet_list.append((date_range, 0))\n date_range = next_datetime(date_range, gap_value, gap_type)\n\n facet_list = sorted(facet_list, key=lambda x: x[0], reverse=True)\n\n for result in results:\n result_date = getattr(result, date_facet)\n\n # convert date to datetime\n if not isinstance(result_date, datetime.datetime):\n result_date = datetime.datetime(result_date.year,\n result_date.month,\n result_date.day)\n\n # ignore results outside the boundaries.\n if facet_list[0][0] < result_date < facet_list[-1][0]:\n continue\n\n # populate the histogram by putting the result on the right bin.\n for n, facet_date in enumerate(facet_list):\n if result_date > facet_date[0]:\n # equal to facet_list[n][1] += 1, but for a tuple\n facet_list[n] = (facet_list[n][0], (facet_list[n][1] + 1))\n break # bin found; go to next result\n\n facet_dict[date_facet] = facet_list\n\n return facet_dict", "response": "Private method that returns a dictionary containing faceted date fields and a count of documents that match the date range."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _do_spelling_suggestion(database, query, spelling_query):\n if spelling_query:\n if ' ' in spelling_query:\n return ' '.join([database.get_spelling_suggestion(term).decode('utf-8') for term in spelling_query.split()])\n else:\n return database.get_spelling_suggestion(spelling_query).decode('utf-8')\n\n term_set = set()\n for term in query:\n for match in re.findall('[^A-Z]+', term.decode('utf-8')): # Ignore field identifiers\n term_set.add(database.get_spelling_suggestion(match).decode('utf-8'))\n\n return ' '.join(term_set)", "response": "Private method that returns a single spelling suggestion based on spelling_query or query."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nwrapping for the Xapian. enquire. get_mset method that wraps the Xapian. database. get_mset method and catches any Xapian. DatabaseModifiedError and returns the result.", "response": "def _get_enquire_mset(database, enquire, start_offset, end_offset, checkatleast=DEFAULT_CHECK_AT_LEAST):\n \"\"\"\n A safer version of Xapian.enquire.get_mset\n\n Simply wraps the Xapian version and catches any `Xapian.DatabaseModifiedError`,\n attempting a `database.reopen` as needed.\n\n Required arguments:\n `database` -- The database to be read\n `enquire` -- An instance of an Xapian.enquire object\n `start_offset` -- The start offset to pass to `enquire.get_mset`\n `end_offset` -- The end offset to pass to `enquire.get_mset`\n \"\"\"\n try:\n return enquire.get_mset(start_offset, end_offset, checkatleast)\n except xapian.DatabaseModifiedError:\n database.reopen()\n return enquire.get_mset(start_offset, end_offset, checkatleast)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning the estimated number of matches in a database and an enquire instance.", "response": "def _get_hit_count(self, database, enquire):\n \"\"\"\n Given a database and enquire instance, returns the estimated number\n of matches.\n\n Required arguments:\n `database` -- The database to be queried\n `enquire` -- The enquire instance\n \"\"\"\n return self._get_enquire_mset(\n database, enquire, 0, database.get_doccount()\n ).size()"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a list of xapian. Query s.", "response": "def _query_from_term(self, term, field_name, filter_type, is_not):\n \"\"\"\n Uses arguments to construct a list of xapian.Query's.\n \"\"\"\n if field_name != 'content' and field_name not in self.backend.column:\n raise InvalidIndexError('field \"%s\" not indexed' % field_name)\n\n # It it is an AutoQuery, it has no filters\n # or others, thus we short-circuit the procedure.\n if isinstance(term, AutoQuery):\n if field_name != 'content':\n query = '%s:%s' % (field_name, term.prepare(self))\n else:\n query = term.prepare(self)\n return [self.backend.parse_query(query)]\n query_list = []\n\n # Handle `ValuesListQuerySet`.\n if hasattr(term, 'values_list'):\n term = list(term)\n\n if field_name == 'content':\n # content is the generic search:\n # force no field_name search\n # and the field_type to be 'text'.\n field_name = None\n field_type = 'text'\n\n # we don't know what is the type(term), so we parse it.\n # Ideally this would not be required, but\n # some filters currently depend on the term to make decisions.\n term = _to_xapian_term(term)\n\n query_list.append(self._filter_contains(term, field_name, field_type, is_not))\n # when filter has no filter_type, haystack uses\n # filter_type = 'content'. Here we remove it\n # since the above query is already doing this\n if filter_type == 'content':\n filter_type = None\n else:\n # get the field_type from the backend\n field_type = self.backend.schema[self.backend.column[field_name]]['type']\n\n # private fields don't accept 'contains' or 'startswith'\n # since they have no meaning.\n if filter_type in ('contains', 'startswith') and field_name in (ID, DJANGO_ID, DJANGO_CT):\n filter_type = 'exact'\n\n if field_type == 'text':\n # we don't know what type \"term\" is, but we know we are searching as text\n # so we parse it like that.\n # Ideally this would not be required since _term_query does it, but\n # some filters currently depend on the term to make decisions.\n if isinstance(term, list):\n term = [_to_xapian_term(term) for term in term]\n else:\n term = _to_xapian_term(term)\n\n # todo: we should check that the filter is valid for this field_type or raise InvalidIndexError\n if filter_type == 'contains':\n query_list.append(self._filter_contains(term, field_name, field_type, is_not))\n elif filter_type in ('content', 'exact'):\n query_list.append(self._filter_exact(term, field_name, field_type, is_not))\n elif filter_type == 'in':\n query_list.append(self._filter_in(term, field_name, field_type, is_not))\n elif filter_type == 'startswith':\n query_list.append(self._filter_startswith(term, field_name, field_type, is_not))\n elif filter_type == 'endswith':\n raise NotImplementedError(\"The Xapian search backend doesn't support endswith queries.\")\n elif filter_type == 'gt':\n query_list.append(self._filter_gt(term, field_name, field_type, is_not))\n elif filter_type == 'gte':\n query_list.append(self._filter_gte(term, field_name, field_type, is_not))\n elif filter_type == 'lt':\n query_list.append(self._filter_lt(term, field_name, field_type, is_not))\n elif filter_type == 'lte':\n query_list.append(self._filter_lte(term, field_name, field_type, is_not))\n elif filter_type == 'range':\n query_list.append(self._filter_range(term, field_name, field_type, is_not))\n return query_list"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a Query that returns a list of objects that contain the term.", "response": "def _filter_contains(self, term, field_name, field_type, is_not):\n \"\"\"\n Splits the sentence in terms and join them with OR,\n using stemmed and un-stemmed.\n\n Assumes term is not a list.\n \"\"\"\n if field_type == 'text':\n term_list = term.split()\n else:\n term_list = [term]\n\n query = self._or_query(term_list, field_name, field_type)\n if is_not:\n return xapian.Query(xapian.Query.OP_AND_NOT, self._all_query(), query)\n else:\n return query"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a query that matches exactly ANY term in term_list.", "response": "def _filter_in(self, term_list, field_name, field_type, is_not):\n \"\"\"\n Returns a query that matches exactly ANY term in term_list.\n\n Notice that:\n A in {B,C} <=> (A = B or A = C)\n ~(A in {B,C}) <=> ~(A = B or A = C)\n Because OP_AND_NOT(C, D) <=> (C and ~D), then D=(A in {B,C}) requires `is_not=False`.\n\n Assumes term is a list.\n \"\"\"\n query_list = [self._filter_exact(term, field_name, field_type, is_not=False)\n for term in term_list]\n\n if is_not:\n return xapian.Query(xapian.Query.OP_AND_NOT, self._all_query(),\n xapian.Query(xapian.Query.OP_OR, query_list))\n else:\n return xapian.Query(xapian.Query.OP_OR, query_list)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _filter_exact(self, term, field_name, field_type, is_not):\n if field_type == 'text' and field_name not in (DJANGO_CT,):\n term = '^ %s $' % term\n query = self._phrase_query(term.split(), field_name, field_type)\n else:\n query = self._term_query(term, field_name, field_type, stemmed=False)\n\n if is_not:\n return xapian.Query(xapian.Query.OP_AND_NOT, self._all_query(), query)\n else:\n return query", "response": "Returns a query that matches exactly the un - stemmed term term\n "} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _filter_startswith(self, term, field_name, field_type, is_not):\n if field_type == 'text':\n if len(term.split()) == 1:\n term = '^ %s*' % term\n query = self.backend.parse_query(term)\n else:\n term = '^ %s' % term\n query = self._phrase_query(term.split(), field_name, field_type)\n else:\n term = '^%s*' % term\n query = self.backend.parse_query(term)\n\n if is_not:\n return xapian.Query(xapian.Query.OP_AND_NOT, self._all_query(), query)\n return query", "response": "Returns a startswith query on the un - stemmed term."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _or_query(self, term_list, field, field_type):\n term_list = [self._term_query(term, field, field_type) for term in term_list]\n return xapian.Query(xapian.Query.OP_OR, term_list)", "response": "Returns an xapian. Query object that is an OR query."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _phrase_query(self, term_list, field_name, field_type):\n term_list = [self._term_query(term, field_name, field_type,\n stemmed=False) for term in term_list]\n\n query = xapian.Query(xapian.Query.OP_PHRASE, term_list)\n return query", "response": "Returns a query that matches exact terms with\n positional order"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nconstruct a query for a single term.", "response": "def _term_query(self, term, field_name, field_type, stemmed=True):\n \"\"\"\n Constructs a query of a single term.\n\n If `field_name` is not `None`, the term is search on that field only.\n If exact is `True`, the search is restricted to boolean matches.\n \"\"\"\n constructor = '{prefix}{term}'\n\n # construct the prefix to be used.\n prefix = ''\n if field_name:\n prefix = TERM_PREFIXES['field'] + field_name.upper()\n term = _to_xapian_term(term)\n\n if field_name in (ID, DJANGO_ID, DJANGO_CT):\n # to ensure the value is serialized correctly.\n if field_name == DJANGO_ID:\n term = int(term)\n term = _term_to_xapian_value(term, field_type)\n return xapian.Query('%s%s' % (TERM_PREFIXES[field_name], term))\n\n # we construct the query dates in a slightly different way\n if field_type == 'datetime':\n date, time = term.split()\n return xapian.Query(xapian.Query.OP_AND_MAYBE,\n constructor.format(prefix=prefix, term=date),\n constructor.format(prefix=prefix, term=time)\n )\n\n # only use stem if field is text or \"None\"\n if field_type not in ('text', None):\n stemmed = False\n\n unstemmed_term = constructor.format(prefix=prefix, term=term)\n if stemmed:\n stem = xapian.Stem(self.backend.language)\n stemmed_term = 'Z' + constructor.format(prefix=prefix, term=stem(term).decode('utf-8'))\n\n return xapian.Query(xapian.Query.OP_OR,\n xapian.Query(stemmed_term),\n xapian.Query(unstemmed_term)\n )\n else:\n return xapian.Query(unstemmed_term)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _filter_gte(self, term, field_name, field_type, is_not):\n vrp = XHValueRangeProcessor(self.backend)\n pos, begin, end = vrp('%s:%s' % (field_name, _term_to_xapian_value(term, field_type)), '*')\n if is_not:\n return xapian.Query(xapian.Query.OP_AND_NOT,\n self._all_query(),\n xapian.Query(xapian.Query.OP_VALUE_RANGE, pos, begin, end)\n )\n return xapian.Query(xapian.Query.OP_VALUE_RANGE, pos, begin, end)", "response": "Private method that returns a xapian. Query that searches for any term that is greater than term in a specified field."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get(company='', company_uri=''):\n if not company and not company_uri:\n raise Exception(\"glassdoor.gd.get(company='', company_uri=''): \"\\\n \" company or company_uri required\")\n payload = {}\n if not company_uri:\n payload.update({'clickSource': 'searchBtn',\n 'sc.keyword': company\n })\n uri = '%s/%s' % (GLASSDOOR_API, REVIEWS_URL)\n else:\n uri = '%s%s' % (GLASSDOOR_API, company_uri)\n r = requests.get(uri, params=payload)\n soup = BeautifulSoup(r.content)\n results = parse(soup)\n return results", "response": "Performs a HTTP GET for a glassdoor page and returns json"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef parse_suggestions(soup):\n selector_comps = {'class': 'companyData'}\n companies = soup.findAll('div', selector_comps)\n\n def is_exact_match(c):\n \"\"\"Determines if this company suggestion has an [exact match]\n html label or whether its name matches the company name the\n user searched for\n \"\"\"\n selector_exact = {'class' : 'chickletExactMatch chicklet'}\n searched_name = soup.findAll('input', {'name': 'sc.keyword'})[0]['value']\n actual_name = c.findAll('h3')[0].text\n names_match = searched_name.lower() == actual_name.lower()\n exact_tag = bool(c.findAll('i', selector_exact))\n return exact_tag or names_match\n\n def parse_suggestion(c):\n return {\n 'name': c.findAll('h3')[0].text,\n 'uri': c.findAll('a')[1]['href'],\n 'exact': is_exact_match(c)\n }\n \n suggestions = []\n for c in companies:\n try:\n suggestions.append(parse_suggestion(c))\n except IndexError as e:\n pass\n \n return suggestions", "response": "Parses the suggestions from the company and returns a list of dicts."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef parse(soup):\n\n if is_direct_match(soup):\n return {'satisfaction': parse_satisfaction(soup),\n 'ceo': parse_ceo(soup),\n 'meta': parse_meta(soup),\n 'salary': parse_salary(soup)\n }\n\n suggestions = parse_suggestions(soup)\n exact_match = next((s for s in suggestions if s['exact']), None)\n if exact_match:\n return get(company_uri=exact_match['uri'])\n return suggestions", "response": "Parses the results for a company search and returns the results\n if is_direct_match."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef init_app(self, app):\n # Save this so we can use it later in the extension\n if not hasattr(app, 'extensions'): # pragma: no cover\n app.extensions = {}\n app.extensions['flask-graphql-auth'] = self\n\n self._set_default__configuration_options(app)", "response": "Register this extension with the flask app."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsets the default configuration options used by this extension.", "response": "def _set_default__configuration_options(app):\n \"\"\"\n Sets the default configuration options used by this extension\n \"\"\"\n app.config.setdefault('JWT_TOKEN_ARGUMENT_NAME', \"token\") # Name of token argument in GraphQL request resolver\n app.config.setdefault('JWT_REFRESH_TOKEN_ARGUMENT_NAME', \"refresh_token\")\n\n app.config.setdefault('JWT_ACCESS_TOKEN_EXPIRES', datetime.timedelta(minutes=15))\n app.config.setdefault('JWT_REFRESH_TOKEN_EXPIRES', datetime.timedelta(days=30))\n\n app.config.setdefault('JWT_SECRET_KEY', None)\n\n app.config.setdefault('JWT_IDENTITY_CLAIM', 'identity')\n app.config.setdefault('JWT_USER_CLAIMS', 'user_claims')"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ndecode an encoded JWT into a dictionary containing the contents of the JWT", "response": "def decode_jwt(encoded_token, secret, algorithm, identity_claim_key,\n user_claims_key):\n \"\"\"\n Decodes an encoded JWT\n\n :param encoded_token: The encoded JWT string to decode\n :param secret: Secret key used to encode the JWT\n :param algorithm: Algorithm used to encode the JWT\n :param identity_claim_key: expected key that contains the identity\n :param user_claims_key: expected key that contains the user claims\n :return: Dictionary containing contents of the JWT\n \"\"\"\n # This call verifies the ext, iat, and nbf claims\n data = jwt.decode(encoded_token, secret, algorithms=[algorithm])\n\n # Make sure that any custom claims we expect in the token are present\n if 'jti' not in data:\n raise JWTDecodeError(\"Missing claim: jti\")\n if identity_claim_key not in data:\n raise JWTDecodeError(\"Missing claim: {}\".format(identity_claim_key))\n if 'type' not in data or data['type'] not in ('refresh', 'access'):\n raise JWTDecodeError(\"Missing or invalid claim: type\")\n if user_claims_key not in data:\n data[user_claims_key] = {}\n\n return data"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_jwt_data(token, token_type):\n jwt_data = decode_jwt(\n encoded_token=token,\n secret=current_app.config['JWT_SECRET_KEY'],\n algorithm='HS256',\n identity_claim_key=current_app.config['JWT_IDENTITY_CLAIM'],\n user_claims_key=current_app.config['JWT_USER_CLAIMS']\n )\n\n # token type verification\n if jwt_data['type'] != token_type:\n raise WrongTokenError('Only {} tokens are allowed'.format(token_type))\n\n return jwt_data", "response": "Decodes an encoded JWT string and validates it"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef query_jwt_required(fn):\n @wraps(fn)\n def wrapper(*args, **kwargs):\n print(args[0])\n token = kwargs.pop(current_app.config['JWT_TOKEN_ARGUMENT_NAME'])\n try:\n verify_jwt_in_argument(token)\n except Exception as e:\n return AuthInfoField(message=str(e))\n\n return fn(*args, **kwargs)\n return wrapper", "response": "Decorator to protect a query resolver."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef mutation_jwt_refresh_token_required(fn):\n @wraps(fn)\n def wrapper(cls, *args, **kwargs):\n token = kwargs.pop(current_app.config['JWT_REFRESH_TOKEN_ARGUMENT_NAME'])\n try:\n verify_refresh_jwt_in_argument(token)\n except Exception as e:\n return cls(AuthInfoField(message=str(e)))\n\n return fn(*args, **kwargs)\n return wrapper", "response": "Decorator to protect a mutation."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _hid_enumerate(vendor_id=0, product_id=0):\n start = hidapi.hid_enumerate(vendor_id, product_id)\n result = []\n cur = ffi.new(\"struct hid_device_info*\");\n cur = start\n\n # Copy everything into python list\n while cur != ffi.NULL:\n result.append(HIDDevice(cur))\n cur = cur.next\n\n # Free the C memory\n hidapi.hid_free_enumeration(start)\n\n return result", "response": "Enumerates all the hid devices for VID : PID. Returns a list of HIDDevice objects."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nopens the HID device for reading and writing.", "response": "def open(self):\n \"\"\"\n Open the HID device for reading and writing.\n \"\"\"\n if self._is_open:\n raise HIDException(\"Failed to open device: HIDDevice already open\")\n\n path = self.path.encode('utf-8')\n dev = hidapi.hid_open_path(path)\n\n if dev:\n self._is_open = True\n self._device = dev\n else:\n raise HIDException(\"Failed to open device\")"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef close(self):\n if self._is_open:\n self._is_open = False\n hidapi.hid_close(self._device)", "response": "Closes the hid device."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef write(self, data, report_id=0):\n\n if not self._is_open:\n raise HIDException(\"HIDDevice not open\")\n\n write_data = bytearray([report_id]) + bytearray(data)\n cdata = ffi.new(\"const unsigned char[]\", bytes(write_data))\n num_written = hidapi.hid_write(self._device, cdata, len(write_data))\n if num_written < 0:\n raise HIDException(\"Failed to write to HID device: \" + str(num_written))\n else:\n return num_written", "response": "Writes data to the HID device on its endpoint."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef read(self, size=64, timeout=None):\n\n if not self._is_open:\n raise HIDException(\"HIDDevice not open\")\n\n data = [0] * size\n cdata = ffi.new(\"unsigned char[]\", data)\n bytes_read = 0\n\n if timeout == None:\n bytes_read = hidapi.hid_read(self._device, cdata, len(cdata))\n else:\n bytes_read = hidapi.hid_read_timeout(self._device, cdata, len(cdata), timeout)\n\n\n if bytes_read < 0:\n raise HIDException(\"Failed to read from HID device: \" + str(bytes_read))\n elif bytes_read == 0:\n return []\n else:\n return bytearray(cdata)", "response": "Read from the HID device on its endpoint."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncheck if the USB device is still connected to the USB device.", "response": "def is_connected(self):\n \"\"\"\n Checks if the USB device is still connected\n \"\"\"\n if self._is_open:\n err = hidapi.hid_read_timeout(self._device, ffi.NULL, 0, 0)\n if err == -1:\n return False\n else:\n return True\n else:\n en = Enumeration(vid=self.vendor_id, pid=self.product_id).find(path=self.path)\n if len(en) == 0:\n return False\n else:\n return True"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef send_feature_report(self, data, report_id=0x00):\n if not self._is_open:\n raise HIDException(\"HIDDevice not open\")\n\n report = bytearray([report_id]) + bytearray(data)\n cdata = ffi.new(\"const unsigned char[]\", bytes(report))\n bytes_written = hidapi.hid_send_feature_report(self._device, cdata, len(report))\n\n if bytes_written == -1:\n raise HIDException(\"Failed to send feature report to HID device\")\n\n return bytes_written", "response": "Send a Feature report to a HID device."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_feature_report(self, size, report_id=0x00):\n data = [0] * (size+1)\n cdata = ffi.new(\"unsigned char[]\", bytes(data))\n cdata[0] = report_id\n\n bytes_read = hidapi.hid_get_feature_report(self._device, cdata, len(cdata))\n\n if bytes_read == -1:\n raise HIDException(\"Failed to get feature report from HID device\")\n\n return bytearray(cdata[1:size+1])", "response": "Get a feature report from a HID device."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets an error string from the device", "response": "def get_error(self):\n \"\"\"\n Get an error string from the device\n \"\"\"\n err_str = hidapi.hid_error(self._device)\n if err_str == ffi.NULL:\n return None\n else:\n return ffi.string(err_str)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_indexed_string(self, index):\n max_len = 128\n str_buf = ffi.new(\"wchar_t[]\", str(bytearray(max_len)))\n ret = hidapi.hid_get_indexed_string(self._device, index, str_buf, max_len)\n\n if ret < 0:\n raise HIDException(self._device.get_error())\n elif ret == 0:\n return None\n else:\n return ffi.string(str_buf).encode('utf-8')", "response": "Get the string with the given index from the device."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef description(self):\n return \\\n\"\"\"HIDDevice:\n {} | {:x}:{:x} | {} | {} | {}\n release_number: {}\n usage_page: {}\n usage: {}\n interface_number: {}\\\n\"\"\".format(self.path,\n self.vendor_id,\n self.product_id,\n self.manufacturer_string,\n self.product_string,\n self.serial_number,\n self.release_number,\n self.usage_page,\n self.usage,\n self.interface_number\n )", "response": "Get a string describing the HID device."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef find(self, vid=None, pid=None, serial=None, interface=None, \\\n path=None, release_number=None, manufacturer=None,\n product=None, usage=None, usage_page=None):\n \"\"\"\n Attempts to open a device in this `Enumeration` object. Optional\n arguments can be provided to filter the resulting list based on various\n parameters of the HID devices.\n\n Args:\n vid: filters by USB Vendor ID\n pid: filters by USB Product ID\n serial: filters by USB serial string (.iSerialNumber)\n interface: filters by interface number (bInterfaceNumber)\n release_number: filters by the USB release number (.bcdDevice)\n manufacturer: filters by USB manufacturer string (.iManufacturer)\n product: filters by USB product string (.iProduct)\n usage: filters by HID usage\n usage_page: filters by HID usage_page\n path: filters by HID API path.\n \"\"\"\n result = []\n\n for dev in self.device_list:\n if vid not in [0, None] and dev.vendor_id != vid:\n continue\n if pid not in [0, None] and dev.product_id != pid:\n continue\n if serial and dev.serial_number != serial:\n continue\n if path and dev.path != path:\n continue\n if manufacturer and dev.manufacturer_string != manufacturer:\n continue\n if product and dev.product_string != product:\n continue\n if release_number != None and dev.release_number != release_number:\n continue\n if interface != None and dev.interface_number != interface:\n continue\n if usage != None and dev.usage != usage:\n continue\n if usage_page != None and dev.usage_page != usage_page:\n continue\n result.append(dev)\n return result", "response": "Finds a specific HID device in this Enumeration object."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nretrieve all the tracks of the album", "response": "def get_tracks(self):\n \"\"\"\n Retrieves all the tracks of the album if they haven't been retrieved yet\n :return: List. Tracks of the current album\n \"\"\"\n if not self._track_list:\n tracks = itunespy.lookup(id=self.collection_id, entity=itunespy.entities['song'])[1:]\n for track in tracks:\n self._track_list.append(track)\n return self._track_list"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ngetting all of the track s length and returns the sum of all the track s length and the sum of all the track s length", "response": "def get_album_time(self, round_number=2):\n \"\"\"\n Retrieves all of the track's length and returns the sum of all\n :param round_number: Int. Number of decimals to round the sum\n :return: Int. Sum of all the track's length\n \"\"\"\n if not self._track_list:\n self.get_tracks()\n if self._album_time is None:\n album_time = 0.0\n for track in self._track_list:\n album_time += track.get_track_time_minutes()\n self._album_time = round(album_time, round_number)\n return self._album_time"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_movies(self):\n return itunespy.lookup(id=self.artist_id, entity=itunespy.entities['movie'])[1:]", "response": "Retrieves all the movies published by the artist\n "} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef solarcalcs(self):\n\n self.dir = self.forc.dir # Direct sunlight (perpendicular to the sun's ray)\n self.dif = self.forc.dif # Diffuse sunlight\n\n if self.dir + self.dif > 0.:\n\n self.logger.debug(\"{} Solar radiation > 0\".format(__name__))\n\n # calculate zenith tangent, and critOrient solar angles\n self.solarangles()\n\n self.horSol = max(math.cos(self.zenith)*self.dir, 0.0) # Direct horizontal radiation\n # Fractional terms for wall & road\n self.Kw_term = min(abs(1./self.UCM.canAspect*(0.5-self.critOrient/math.pi) \\\n + 1/math.pi*self.tanzen*(1-math.cos(self.critOrient))),1.)\n self.Kr_term = min(abs(2.*self.critOrient/math.pi \\\n - (2/math.pi*self.UCM.canAspect*self.tanzen)*(1-math.cos(self.critOrient))), 1-2*self.UCM.canAspect*self.Kw_term)\n\n\n # Direct and diffuse solar radiation\n self.bldSol = self.horSol*self.Kw_term + self.UCM.wallConf*self.dif # Assume trees are shorter than buildings\n self.roadSol = self.horSol*self.Kr_term + self.UCM.roadConf*self.dif\n\n # Solar reflections. Add diffuse radiation from vegetation to alb_road if in season\n if self.simTime.month < self.parameter.vegStart or self.simTime.month > self.parameter.vegEnd:\n alb_road = self.UCM.road.albedo\n else:\n alb_road = self.UCM.road.albedo*(1.-self.UCM.road.vegCoverage) + self.parameter.vegAlbedo*self.UCM.road.vegCoverage\n\n # First set of reflections\n rr = alb_road * self.roadSol\n rw = self.UCM.alb_wall * self.bldSol\n\n # bounces\n fr = (1. - (1. - 2.*self.UCM.wallConf) * self.UCM.alb_wall + (1. - self.UCM.roadConf) \\\n * self.UCM.wallConf * alb_road * self.UCM.alb_wall)\n\n # (1.0-self.UCM.roadConf) road to wall view\n self.mr = (rr + (1.0-self.UCM.roadConf) * alb_road * (rw + self.UCM.wallConf * self.UCM.alb_wall * rr)) / fr\n self.mw = (rw + self.UCM.wallConf * self.UCM.alb_wall * rr) / fr\n\n # Receiving solar, including bounces (W m-2)\n self.UCM.road.solRec = self.roadSol + (1 - self.UCM.roadConf)*self.mw\n\n for j in range(len(self.BEM)):\n self.BEM[j].roof.solRec = self.horSol + self.dif\n self.BEM[j].wall.solRec = self.bldSol + (1 - 2*self.UCM.wallConf) * self.mw + self.UCM.wallConf * self.mr\n\n self.rural.solRec = self.horSol + self.dif # Solar received by rural\n self.UCM.SolRecRoof = self.horSol + self.dif # Solar received by roof\n self.UCM.SolRecRoad = self.UCM.road.solRec # Solar received by road\n self.UCM.SolRecWall = self.bldSol+(1-2*self.UCM.wallConf)*self.UCM.road.albedo*self.roadSol # Solar received by wall\n\n # Vegetation heat (per m^2 of veg)\n self.UCM.treeSensHeat = (1-self.parameter.vegAlbedo)*(1-self.parameter.treeFLat)*self.UCM.SolRecRoad\n self.UCM.treeLatHeat = (1-self.parameter.vegAlbedo)*self.parameter.treeFLat*self.UCM.SolRecRoad\n\n else: # No Sun\n\n self.logger.debug(\"{} Solar radiation = 0\".format(__name__))\n\n self.UCM.road.solRec = 0.\n self.rural.solRec = 0.\n\n for j in range(len(self.BEM)):\n self.BEM[j].roof.solRec = 0.\n self.BEM[j].wall.solRec = 0.\n\n self.UCM.SolRecRoad = 0. # Solar received by road\n self.UCM.SolRecRoof = 0. # Solar received by roof\n self.UCM.SolRecWall = 0. # Solar received by wall\n self.UCM.treeSensHeat = 0.\n self.UCM.treeLatHeat = 0.\n\n return self.rural, self.UCM, self.BEM", "response": "Returns a list of solar calibrations for the current object."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncalculate solar angles for the current canyon and the solar declination angle and critical canyon angle for the current canyon and solar radiation.", "response": "def solarangles (self):\n \"\"\"\n Calculation based on NOAA. Solves for zenith angle, tangent of zenithal angle,\n and critical canyon angle based on following parameters:\n canAspect # aspect Ratio of canyon\n simTime # simulation parameters\n RSM.lon # longitude (deg)\n RSM.lat # latitude (deg)\n RSM.GMT # GMT hour correction\n\n Properties\n self.ut # elapsed hours on current day\n self.ad # fractional year in radians\n self.eqtime\n self.decsol # solar declination angle\n self.zenith # Angle between normal to earth's surface and sun position\n self.tanzen # tangente of solar zenithal angle\n self.critOrient # critical canyon angle for which solar radiation reaches the road\n \"\"\"\n\n ln = self.RSM.lon\n\n month = self.simTime.month\n day = self.simTime.day\n secDay = self.simTime.secDay # Total elapsed seconds in simulation\n inobis = self.simTime.inobis # total days for first of month\n # i.e [0,31,59,90,120,151,181,212,243,273,304,334]\n canAspect = self.UCM.canAspect\n lon = self.RSM.lon\n lat = self.RSM.lat\n GMT = self.RSM.GMT\n\n self.ut = (24. + (int(secDay)/3600.%24.)) % 24. # Get elapsed hours on current day\n ibis = list(range(len(inobis)))\n\n for JI in range(1, 12):\n ibis[JI] = inobis[JI]+1\n\n date = day + inobis[month-1]-1 # Julian day of the year\n # divide circle by 365 days, multiply by elapsed days + hours\n self.ad = 2.0 * math.pi/365. * (date-1 + (self.ut-(12/24.))) # Fractional year (radians)\n\n self.eqtime = 229.18 * (0.000075+0.001868*math.cos(self.ad)-0.032077*math.sin(self.ad) - \\\n 0.01461*math.cos(2*self.ad)-0.040849*math.sin(2*self.ad))\n\n # Declination angle (angle of sun with equatorial plane)\n self.decsol = 0.006918-0.399912*math.cos(self.ad)+0.070257*math.sin(self.ad) \\\n -0.006758*math.cos(2.*self.ad)+0.000907*math.sin(2.*self.ad) \\\n -0.002697*math.cos(3.*self.ad)+0.00148 *math.sin(3.*self.ad)\n\n time_offset = self.eqtime - 4. * lon + 60 * GMT\n tst = secDay + time_offset * 60\n\n ha = (tst/4./60.-180.) * math.pi/180.\n zlat = lat * (math.pi/180.) # change angle units to radians\n\n # Calculate zenith solar angle\n self.zenith = math.acos(math.sin(zlat)*math.sin(self.decsol) + math.cos(zlat)*math.cos(self.decsol)*math.cos(ha))\n\n # tangente of solar zenithal angle\n if abs(0.5*math.pi - self.zenith) < 1e-6:\n if 0.5*math.pi - self.zenith > 0.:\n self.tanzen = math.tan(0.5*math.pi-1e-6)\n\n elif 0.5*math.pi - self.zenith <= 0.:\n self.tanzen = math.tan(0.5*math.pi+1e-6)\n\n elif abs(self.zenith) < 1e-6:\n # lim x->0 tan(x) -> 0 which results in division by zero error\n # when calculating the critical canyon angle\n # so set tanzen to 1e-6 which will result in critical canyon angle = 90\n self.tanzen = 1e-6\n\n else:\n self.tanzen = math.tan(self.zenith)\n\n # critical canyon angle for which solar radiation reaches the road\n # has to do with street canyon orientation for given solar angle\n self.critOrient = math.asin(min(abs( 1./self.tanzen)/canAspect, 1. ))"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nmodify version of Psychometrics by Tea Zakula MIT Building Technology Lab Input: Tdb_in, w_in, P Output: Tdb, w, phi, h, Tdp, v where: Tdb_in = [K] dry bulb temperature w_in = [kgv/kgda] Humidity Ratio P = [P] Atmospheric Station Pressure Tdb: [C] dry bulb temperature w: [kgv/kgda] Humidity Ratio phi: [Pw/Pws*100] relative humidity Tdp: [C] dew point temperature h: [J/kga] enthalpy v: [m3/kga] specific volume", "response": "def psychrometrics (Tdb_in, w_in, P):\n \"\"\"\n Modified version of Psychometrics by Tea Zakula\n MIT Building Technology Lab\n Input: Tdb_in, w_in, P\n Output: Tdb, w, phi, h, Tdp, v\n\n where:\n Tdb_in = [K] dry bulb temperature\n w_in = [kgv/kgda] Humidity Ratio\n P = [P] Atmospheric Station Pressure\n\n Tdb: [C] dry bulb temperature\n w: [kgv/kgda] Humidity Ratio\n phi: [Pw/Pws*100] relative humidity\n Tdp: [C] dew point temperature\n h: [J/kga] enthalpy\n v: [m3/kga] specific volume\n \"\"\"\n # Change units\n c_air = 1006. # [J/kg] air heat capacity, value from ASHRAE Fundamentals\n hlg = 2501000. # [J/kg] latent heat, value from ASHRAE Fundamentals\n cw = 1860. # [J/kg] value from ASHRAE Fundamentals\n P = P/1000. # convert from Pa to kPa\n\n Tdb = Tdb_in - 273.15\n w = w_in\n\n # phi (RH) calculation from Tdb and w\n Pw = (w*P)/(0.621945 + w) # partial pressure of water vapor\n Pws = saturation_pressure(Tdb) # Get saturation pressure for given Tdb\n phi = Pw/Pws*100.0\n\n # enthalpy calculation from Tdb and w\n h = c_air*Tdb + w*(hlg+cw*Tdb) # [J kga-1]\n\n # specific volume calculation from Tdb and w\n v = 0.287042 * (Tdb+273.15)*(1+1.607858*w)/P # ?\n\n # dew point calculation from w\n _pw = (w*P)/(0.621945 + w) # water vapor partial pressure in kPa\n alpha = log(_pw)\n\n Tdp = 6.54 + 14.526*alpha + pow(alpha,2)*0.7389 + pow(alpha,3)*0.09486 + pow(_pw,0.1984)*0.4569 # valid for Tdp between 0 C and 93 C\n\n return Tdb, w, phi, h, Tdp, v"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsearches for a term in an array of result_item objects.", "response": "def search(term, country='US', media='all', entity=None, attribute=None, limit=50):\n \"\"\"\n Returns the result of the search of the specified term in an array of result_item(s)\n :param term: String. The URL-encoded text string you want to search for. Example: Steven Wilson.\n The method will take care of spaces so you don't have to.\n :param country: String. The two-letter country code for the store you want to search.\n For a full list of the codes: http://en.wikipedia.org/wiki/%20ISO_3166-1_alpha-2\n :param media: String. The media type you want to search for. Example: music\n :param entity: String. The type of results you want returned, relative to the specified media type. Example: musicArtist.\n Full list: musicArtist, musicTrack, album, musicVideo, mix, song\n :param attribute: String. The attribute you want to search for in the stores, relative to the specified media type.\n :param limit: Integer. The number of search results you want the iTunes Store to return.\n :return: An array of result_item(s)\n \"\"\"\n search_url = _url_search_builder(term, country, media, entity, attribute, limit)\n r = requests.get(search_url)\n\n try:\n json = r.json()['results']\n result_count = r.json()['resultCount']\n except:\n raise ConnectionError(general_no_connection)\n\n if result_count == 0:\n raise LookupError(search_error + str(term))\n\n return _get_result_list(json)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef lookup(id=None, artist_amg_id=None, upc=None, country='US', media='all', entity=None, attribute=None, limit=50):\n # If none of the basic lookup arguments are provided, raise a ValueError\n if id is None and artist_amg_id is None and upc is None:\n raise ValueError(lookup_no_ids)\n\n lookup_url = _url_lookup_builder(id, artist_amg_id, upc, country, media, entity, attribute, limit)\n r = requests.get(lookup_url)\n\n try:\n json = r.json()['results']\n result_count = r.json()['resultCount']\n except:\n raise ConnectionError(general_no_connection)\n\n if result_count == 0:\n raise LookupError(lookup_error)\n\n return _get_result_list(json)", "response": "Returns the result of the lookup of the specified id artist_amg_id or upc in an array of result_item objects."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nanalyzing the provided JSON data and returns an array of result_item s based on its content", "response": "def _get_result_list(json):\n \"\"\"\n Analyzes the provided JSON data and returns an array of result_item(s) based on its content\n :param json: Raw JSON data to analyze\n :return: An array of result_item(s) from the provided JSON data\n \"\"\"\n result_list = []\n\n for item in json:\n if 'wrapperType' in item:\n # Music\n if item['wrapperType'] == 'artist' and item['artistType'] == 'Artist':\n music_artist_result = music_artist.MusicArtist(item)\n result_list.append(music_artist_result)\n elif item['wrapperType'] == 'collection' and item['collectionType'] == 'Album':\n music_album_result = music_album.MusicAlbum(item)\n result_list.append(music_album_result)\n elif item['wrapperType'] == 'track' and item['kind'] == 'song':\n music_track_result = track.Track(item)\n result_list.append(music_track_result)\n elif item['wrapperType'] == 'track' and item['kind'] == 'music-video':\n music_video_result = track.Track(item)\n result_list.append(music_video_result)\n # Movies\n elif item['wrapperType'] == 'artist' and item['artistType'] == 'Movie Artist':\n movie_artist_result = movie_artist.MovieArtist(item)\n result_list.append(movie_artist_result)\n elif item['wrapperType'] == 'track' and item['kind'] == 'feature-movie':\n movie_result = track.Track(item)\n result_list.append(movie_result)\n # Ebook Author\n elif item['wrapperType'] == 'artist' and item['artistType'] == 'Author':\n ebook_artist_result = ebook_artist.EbookArtist(item)\n result_list.append(ebook_artist_result)\n # Tv Shows\n elif item['wrapperType'] == 'collection' and item['collectionType'] == 'TV Season':\n tv_season_result = result_item.ResultItem(item)\n result_list.append(tv_season_result)\n elif item['wrapperType'] == 'track' and item['kind'] == 'tv-episode':\n tv_episode_result = track.Track(item)\n result_list.append(tv_episode_result)\n # Software\n elif item['wrapperType'] == 'software' and item['kind'] == 'software':\n software_result = result_item.ResultItem(item)\n result_list.append(software_result)\n elif item['wrapperType'] == 'software' and item['kind'] == 'mac-software':\n mac_software_result = result_item.ResultItem(item)\n result_list.append(mac_software_result)\n\n elif 'kind' in item and item['kind'] == 'ebook':\n ebook_result = result_item.ResultItem(item)\n result_list.append(ebook_result)\n else:\n unknown_result = result_item.ResultItem(item)\n result_list.append(unknown_result)\n\n return result_list"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nbuild the URL to perform the search based on the provided data.", "response": "def _url_search_builder(term, country='US', media='all', entity=None, attribute=None, limit=50):\n \"\"\"\n Builds the URL to perform the search based on the provided data\n :param term: String. The URL-encoded text string you want to search for. Example: Steven Wilson.\n The method will take care of spaces so you don't have to.\n :param country: String. The two-letter country code for the store you want to search.\n For a full list of the codes: http://en.wikipedia.org/wiki/%20ISO_3166-1_alpha-2\n :param media: String. The media type you want to search for. Example: music\n :param entity: String. The type of results you want returned, relative to the specified media type. Example: musicArtist.\n Full list: musicArtist, musicTrack, album, musicVideo, mix, song\n :param attribute: String. The attribute you want to search for in the stores, relative to the specified media type.\n :param limit: Integer. The number of search results you want the iTunes Store to return.\n :return: The built URL as a string\n \"\"\"\n built_url = base_search_url + _parse_query(term)\n built_url += ampersand + parameters[1] + country\n built_url += ampersand + parameters[2] + media\n\n if entity is not None:\n built_url += ampersand + parameters[3] + entity\n\n if attribute is not None:\n built_url += ampersand + parameters[4] + attribute\n\n built_url += ampersand + parameters[5] + str(limit)\n\n return built_url"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _url_lookup_builder(id=None, artist_amg_id=None, upc=None, country='US', media='music', entity=None, attribute=None,\n limit=50):\n \"\"\"\n Builds the URL to perform the lookup based on the provided data\n :param id: String. iTunes ID of the artist, album, track, ebook or software\n :param artist_amg_id: String. All Music Guide ID of the artist\n :param upc: String. UPCs/EANs\n :param country: String. The two-letter country code for the store you want to search.\n For a full list of the codes: http://en.wikipedia.org/wiki/%20ISO_3166-1_alpha-2\n :param media: String. The media type you want to search for. Example: music\n :param entity: String. The type of results you want returned, relative to the specified media type. Example: musicArtist.\n Full list: musicArtist, musicTrack, album, musicVideo, mix, song\n :param attribute: String. The attribute you want to search for in the stores, relative to the specified media type.\n :param limit: Integer. The number of search results you want the iTunes Store to return.\n :return: The built URL as a string\n \"\"\"\n built_url = base_lookup_url\n has_one_argument = False\n\n if id is not None:\n built_url += parameters[6] + str(id)\n has_one_argument = True\n\n if artist_amg_id is not None:\n if has_one_argument:\n built_url += ampersand + parameters[7] + artist_amg_id\n else:\n built_url += parameters[7] + str(artist_amg_id)\n has_one_argument = True\n\n if upc is not None:\n if has_one_argument:\n built_url += ampersand + parameters[8] + upc\n else:\n built_url += parameters[8] + str(upc)\n\n built_url += ampersand + parameters[1] + country\n built_url += ampersand + parameters[2] + media\n\n if entity is not None:\n built_url += ampersand + parameters[3] + entity\n\n if attribute is not None:\n built_url += ampersand + parameters[4] + attribute\n\n built_url += ampersand + parameters[5] + str(limit)\n\n return built_url", "response": "Builds the URL to perform the lookup based on the provided data."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nretrieves all the albums published by the artist", "response": "def get_albums(self):\n \"\"\"\n Retrieves all the albums by the artist\n :return: List. Albums published by the artist\n \"\"\"\n return itunespy.lookup(id=self.artist_id, entity=itunespy.entities['album'])[1:]"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef str2fl(x):\n def helper_to_fl(s_):\n \"\"\" deals with odd string imports converts to float\"\"\"\n if s_ == \"\":\n return \"null\"\n elif \",\" in s_:\n s_ = s_.replace(\",\", \"\")\n\n try:\n return float(s_)\n except:\n return (s_)\n\n fl_lst = []\n if isinstance(x[0], str): # Check if list of strings, then sent to conversion\n for xi in range(len(x)):\n fl_lst.append(helper_to_fl(x[xi]))\n elif isinstance(x[0], list): # Check if list of lists, then recurse\n for xi in range(len(x)):\n fl_lst.append(str2fl(x[xi]))\n else:\n return False\n\n return fl_lst", "response": "Recurses through lists and converts lists of strings to float"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef procMat(materials, max_thickness, min_thickness):\r\n newmat = []\r\n newthickness = []\r\n k = materials.layerThermalCond\r\n Vhc = materials.layerVolHeat\r\n\r\n if len(materials.layerThickness) > 1:\r\n\r\n for j in range(len(materials.layerThickness)):\r\n # Break up each layer that's more than max thickness (0.05m)\r\n if materials.layerThickness[j] > max_thickness:\r\n nlayers = math.ceil(materials.layerThickness[j]/float(max_thickness))\r\n for i in range(int(nlayers)):\r\n newmat.append(Material(k[j], Vhc[j], name=materials._name))\r\n newthickness.append(materials.layerThickness[j]/float(nlayers))\r\n # Material that's less then min_thickness is not added.\r\n elif materials.layerThickness[j] < min_thickness:\r\n print(\"WARNING: Material '{}' layer found too thin (<{:.2f}cm), ignored.\").format(\r\n materials._name, min_thickness*100)\r\n else:\r\n newmat.append(Material(k[j], Vhc[j], name=materials._name))\r\n newthickness.append(materials.layerThickness[j])\r\n\r\n else:\r\n\r\n # Divide single layer into two (uwg assumes at least 2 layers)\r\n if materials.layerThickness[0] > max_thickness:\r\n nlayers = math.ceil(materials.layerThickness[0]/float(max_thickness))\r\n for i in range(int(nlayers)):\r\n newmat.append(Material(k[0], Vhc[0], name=materials._name))\r\n newthickness.append(materials.layerThickness[0]/float(nlayers))\r\n # Material should be at least 1cm thick, so if we're here,\r\n # should give warning and stop. Only warning given for now.\r\n elif materials.layerThickness[0] < min_thickness*2:\r\n newthickness = [min_thickness/2., min_thickness/2.]\r\n newmat = [Material(k[0], Vhc[0], name=materials._name),\r\n Material(k[0], Vhc[0], name=materials._name)]\r\n print(\"WARNING: a thin (<2cm) single material '{}' layer found. May cause error.\".format(\r\n materials._name))\r\n else:\r\n newthickness = [materials.layerThickness[0]/2., materials.layerThickness[0]/2.]\r\n newmat = [Material(k[0], Vhc[0], name=materials._name),\r\n Material(k[0], Vhc[0], name=materials._name)]\r\n return newmat, newthickness", "response": "Processes a material layer so that it is too narrowed and that it is too narrowed."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreading the UWG_Matlab input file and populate self. _init_param_dict with the values extracted from the UWG_Matlab parameter file.", "response": "def read_input(self):\r\n \"\"\"Section 3 - Read Input File (.m, file)\r\n Note: UWG_Matlab input files are xlsm, XML, .m, file.\r\n properties:\r\n self._init_param_dict # dictionary of simulation initialization parameters\r\n\r\n self.sensAnth # non-building sensible heat (W/m^2)\r\n self.SchTraffic # Traffice schedule\r\n\r\n self.BEM # list of BEMDef objects extracted from readDOE\r\n self.Sch # list of Schedule objects extracted from readDOE\r\n\r\n \"\"\"\r\n\r\n uwg_param_file_path = os.path.join(self.uwgParamDir, self.uwgParamFileName)\r\n\r\n if not os.path.exists(uwg_param_file_path):\r\n raise Exception(\"Param file: '{}' does not exist.\".format(uwg_param_file_path))\r\n\r\n # Open .uwg file and feed csv data to initializeDataFile\r\n try:\r\n uwg_param_data = utilities.read_csv(uwg_param_file_path)\r\n except Exception as e:\r\n raise Exception(\"Failed to read .uwg file! {}\".format(e.message))\r\n\r\n # The initialize.uwg is read with a dictionary so that users changing\r\n # line endings or line numbers doesn't make reading input incorrect\r\n self._init_param_dict = {}\r\n count = 0\r\n while count < len(uwg_param_data):\r\n row = uwg_param_data[count]\r\n row = [row[i].replace(\" \", \"\") for i in range(len(row))] # strip white spaces\r\n\r\n # Optional parameters might be empty so handle separately\r\n is_optional_parameter = (\r\n row != [] and\r\n (\r\n row[0] == \"albRoof\" or\r\n row[0] == \"vegRoof\" or\r\n row[0] == \"glzR\" or\r\n row[0] == \"hvac\" or\r\n row[0] == \"albWall\" or\r\n row[0] == \"SHGC\"\r\n )\r\n )\r\n try:\r\n if row == [] or \"#\" in row[0]:\r\n count += 1\r\n continue\r\n elif row[0] == \"SchTraffic\":\r\n # SchTraffic: 3 x 24 matrix\r\n trafficrows = uwg_param_data[count+1:count+4]\r\n self._init_param_dict[row[0]] = [utilities.str2fl(r[:24]) for r in trafficrows]\r\n count += 4\r\n elif row[0] == \"bld\":\r\n # bld: 17 x 3 matrix\r\n bldrows = uwg_param_data[count+1:count+17]\r\n self._init_param_dict[row[0]] = [utilities.str2fl(r[:3]) for r in bldrows]\r\n count += 17\r\n elif is_optional_parameter:\r\n self._init_param_dict[row[0]] = float(row[1]) if row[1] != \"\" else None\r\n count += 1\r\n else:\r\n self._init_param_dict[row[0]] = float(row[1])\r\n count += 1\r\n except ValueError:\r\n print(\"Error while reading parameter at {} {}\".format(count, row))\r\n\r\n ipd = self._init_param_dict\r\n\r\n # Define Simulation and Weather parameters\r\n if self.Month is None: self.Month = ipd['Month']\r\n if self.Day is None: self.Day = ipd['Day']\r\n if self.nDay is None: self.nDay = ipd['nDay']\r\n if self.dtSim is None: self.dtSim = ipd['dtSim']\r\n if self.dtWeather is None: self.dtWeather = ipd['dtWeather']\r\n\r\n # HVAC system and internal laod\r\n if self.autosize is None: self.autosize = ipd['autosize']\r\n if self.sensOcc is None: self.sensOcc = ipd['sensOcc']\r\n if self.LatFOcc is None: self.LatFOcc = ipd['LatFOcc']\r\n if self.RadFOcc is None: self.RadFOcc = ipd['RadFOcc']\r\n if self.RadFEquip is None: self.RadFEquip = ipd['RadFEquip']\r\n if self.RadFLight is None: self.RadFLight = ipd['RadFLight']\r\n\r\n # Define Urban microclimate parameters\r\n if self.h_ubl1 is None: self.h_ubl1 = ipd['h_ubl1']\r\n if self.h_ubl2 is None: self.h_ubl2 = ipd['h_ubl2']\r\n if self.h_ref is None: self.h_ref = ipd['h_ref']\r\n if self.h_temp is None: self.h_temp = ipd['h_temp']\r\n if self.h_wind is None: self.h_wind = ipd['h_wind']\r\n if self.c_circ is None: self.c_circ = ipd['c_circ']\r\n if self.c_exch is None: self.c_exch = ipd['c_exch']\r\n if self.maxDay is None: self.maxDay = ipd['maxDay']\r\n if self.maxNight is None: self.maxNight = ipd['maxNight']\r\n if self.windMin is None: self.windMin = ipd['windMin']\r\n if self.h_obs is None: self.h_obs = ipd['h_obs']\r\n\r\n # Urban characteristics\r\n if self.bldHeight is None: self.bldHeight = ipd['bldHeight']\r\n if self.h_mix is None: self.h_mix = ipd['h_mix']\r\n if self.bldDensity is None: self.bldDensity = ipd['bldDensity']\r\n if self.verToHor is None: self.verToHor = ipd['verToHor']\r\n if self.charLength is None: self.charLength = ipd['charLength']\r\n if self.alb_road is None: self.alb_road = ipd['albRoad']\r\n if self.d_road is None: self.d_road = ipd['dRoad']\r\n if self.sensAnth is None: self.sensAnth = ipd['sensAnth']\r\n # if self.latAnth is None: self.latAnth = ipd['latAnth'] # Not used, taken out by JH.\r\n\r\n # climate Zone\r\n if self.zone is None: self.zone = ipd['zone']\r\n\r\n # Vegetation parameters\r\n if self.vegCover is None: self.vegCover = ipd['vegCover']\r\n if self.treeCoverage is None: self.treeCoverage = ipd['treeCoverage']\r\n if self.vegStart is None: self.vegStart = ipd['vegStart']\r\n if self.vegEnd is None: self.vegEnd = ipd['vegEnd']\r\n if self.albVeg is None: self.albVeg = ipd['albVeg']\r\n if self.rurVegCover is None: self.rurVegCover = ipd['rurVegCover']\r\n if self.latGrss is None: self.latGrss = ipd['latGrss']\r\n if self.latTree is None: self.latTree = ipd['latTree']\r\n\r\n # Define Traffic schedule\r\n if self.SchTraffic is None: self.SchTraffic = ipd['SchTraffic']\r\n\r\n # Define Road (Assume 0.5m of asphalt)\r\n if self.kRoad is None: self.kRoad = ipd['kRoad']\r\n if self.cRoad is None: self.cRoad = ipd['cRoad']\r\n\r\n # Building stock fraction\r\n if self.bld is None: self.bld = ipd['bld']\r\n\r\n # Optional parameters\r\n if self.albRoof is None: self.albRoof = ipd['albRoof']\r\n if self.vegRoof is None: self.vegRoof = ipd['vegRoof']\r\n if self.glzR is None: self.glzR = ipd['glzR']\r\n if self.albWall is None: self.albWall = ipd['albWall']\r\n if self.SHGC is None: self.SHGC = ipd['SHGC']"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsetting inputs from. uwg input file if not already defined.", "response": "def set_input(self):\r\n \"\"\" Set inputs from .uwg input file if not already defined, the check if all\r\n the required input parameters are there.\r\n \"\"\"\r\n\r\n # If a uwgParamFileName is set, then read inputs from .uwg file.\r\n # User-defined class properties will override the inputs from the .uwg file.\r\n if self.uwgParamFileName is not None:\r\n print(\"\\nReading uwg file input.\")\r\n self.read_input()\r\n else:\r\n print(\"\\nNo .uwg file input.\")\r\n\r\n self.check_required_inputs()\r\n\r\n # Modify zone to be used as python index\r\n self.zone = int(self.zone)-1"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef init_BEM_obj(self):\r\n\r\n if not os.path.exists(self.readDOE_file_path):\r\n raise Exception(\"readDOE.pkl file: '{}' does not exist.\".format(readDOE_file_path))\r\n\r\n readDOE_file = open(self.readDOE_file_path, 'rb') # open pickle file in binary form\r\n refDOE = pickle.load(readDOE_file)\r\n refBEM = pickle.load(readDOE_file)\r\n refSchedule = pickle.load(readDOE_file)\r\n readDOE_file.close()\r\n\r\n # Define building energy models\r\n k = 0\r\n self.r_glaze_total = 0. # Glazing ratio for total building stock\r\n self.SHGC_total = 0. # SHGC addition for total building stock\r\n self.alb_wall_total = 0. # albedo wall addition for total building stock\r\n h_floor = self.flr_h or 3.05 # average floor height\r\n\r\n total_urban_bld_area = math.pow(self.charLength, 2)*self.bldDensity * \\\r\n self.bldHeight/h_floor # total building floor area\r\n area_matrix = utilities.zeros(16, 3)\r\n\r\n self.BEM = [] # list of BEMDef objects\r\n self.Sch = [] # list of Schedule objects\r\n\r\n for i in range(16): # 16 building types\r\n for j in range(3): # 3 built eras\r\n if self.bld[i][j] > 0.:\r\n # Add to BEM list\r\n self.BEM.append(refBEM[i][j][self.zone])\r\n self.BEM[k].frac = self.bld[i][j]\r\n self.BEM[k].fl_area = self.bld[i][j] * total_urban_bld_area\r\n\r\n # Overwrite with optional parameters if provided\r\n if self.glzR:\r\n self.BEM[k].building.glazingRatio = self.glzR\r\n if self.albRoof:\r\n self.BEM[k].roof.albedo = self.albRoof\r\n if self.vegRoof:\r\n self.BEM[k].roof.vegCoverage = self.vegRoof\r\n if self.SHGC:\r\n self.BEM[k].building.shgc = self.SHGC\r\n if self.albWall:\r\n self.BEM[k].wall.albedo = self.albWall\r\n if self.flr_h:\r\n self.BEM[k].building.floorHeight = self.flr_h\r\n\r\n # Keep track of total urban r_glaze, SHGC, and alb_wall for UCM model\r\n self.r_glaze_total += self.BEM[k].frac * self.BEM[k].building.glazingRatio\r\n self.SHGC_total += self.BEM[k].frac * self.BEM[k].building.shgc\r\n self.alb_wall_total += self.BEM[k].frac * self.BEM[k].wall.albedo\r\n # Add to schedule list\r\n self.Sch.append(refSchedule[i][j][self.zone])\r\n k += 1", "response": "Initialize the instance variables for the BEM and Sch objects."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncreate uwg objects from input parameters and return the new object.", "response": "def init_input_obj(self):\r\n \"\"\"Section 4 - Create uwg objects from input parameters\r\n\r\n self.simTime # simulation time parameter obj\r\n self.weather # weather obj for simulation time period\r\n self.forcIP # Forcing obj\r\n self.forc # Empty forcing obj\r\n self.geoParam # geographic parameters obj\r\n self.RSM # Rural site & vertical diffusion model obj\r\n self.USM # Urban site & vertical diffusion model obj\r\n self.UCM # Urban canopy model obj\r\n self.UBL # Urban boundary layer model\r\n\r\n self.road # urban road element\r\n self.rural # rural road element\r\n\r\n self.soilindex1 # soil index for urban rsoad depth\r\n self.soilindex2 # soil index for rural road depth\r\n self.Sch # list of Schedule objects\r\n \"\"\"\r\n\r\n climate_file_path = os.path.join(self.epwDir, self.epwFileName)\r\n\r\n self.simTime = SimParam(self.dtSim, self.dtWeather, self.Month,\r\n self.Day, self.nDay) # simulation time parametrs\r\n # weather file data for simulation time period\r\n self.weather = Weather(climate_file_path, self.simTime.timeInitial, self.simTime.timeFinal)\r\n self.forcIP = Forcing(self.weather.staTemp, self.weather) # initialized Forcing class\r\n self.forc = Forcing() # empty forcing class\r\n\r\n # Initialize geographic Param and Urban Boundary Layer Objects\r\n nightStart = 18. # arbitrary values for begin/end hour for night setpoint\r\n nightEnd = 8.\r\n maxdx = 250. # max dx (m)\r\n\r\n self.geoParam = Param(self.h_ubl1, self.h_ubl2, self.h_ref, self.h_temp, self.h_wind, self.c_circ,\r\n self.maxDay, self.maxNight, self.latTree, self.latGrss, self.albVeg, self.vegStart, self.vegEnd,\r\n nightStart, nightEnd, self.windMin, self.WGMAX, self.c_exch, maxdx, self.G, self.CP, self.VK, self.R,\r\n self.RV, self.LV, math.pi, self.SIGMA, self.WATERDENS, self.LVTT, self.TT, self.ESTT, self.CL,\r\n self.CPV, self.B, self.CM, self.COLBURN)\r\n\r\n self.UBL = UBLDef(\r\n 'C', self.charLength, self.weather.staTemp[0], maxdx, self.geoParam.dayBLHeight, self.geoParam.nightBLHeight)\r\n\r\n # Defining road\r\n emis = 0.93\r\n asphalt = Material(self.kRoad, self.cRoad, 'asphalt')\r\n road_T_init = 293.\r\n road_horizontal = 1\r\n # fraction of surface vegetation coverage\r\n road_veg_coverage = min(self.vegCover/(1-self.bldDensity), 1.)\r\n\r\n # define road layers\r\n road_layer_num = int(math.ceil(self.d_road/0.05))\r\n # 0.5/0.05 ~ 10 x 1 matrix of 0.05 thickness\r\n thickness_vector = [0.05 for r in range(road_layer_num)]\r\n material_vector = [asphalt for r in range(road_layer_num)]\r\n\r\n self.road = Element(self.alb_road, emis, thickness_vector, material_vector, road_veg_coverage,\r\n road_T_init, road_horizontal, name=\"urban_road\")\r\n\r\n self.rural = copy.deepcopy(self.road)\r\n self.rural.vegCoverage = self.rurVegCover\r\n self.rural._name = \"rural_road\"\r\n\r\n # Reference site class (also include VDM)\r\n self.RSM = RSMDef(self.lat, self.lon, self.GMT, self.h_obs,\r\n self.weather.staTemp[0], self.weather.staPres[0], self.geoParam, self.z_meso_dir_path)\r\n self.USM = RSMDef(self.lat, self.lon, self.GMT, self.bldHeight/10.,\r\n self.weather.staTemp[0], self.weather.staPres[0], self.geoParam, self.z_meso_dir_path)\r\n\r\n T_init = self.weather.staTemp[0]\r\n H_init = self.weather.staHum[0]\r\n\r\n self.UCM = UCMDef(self.bldHeight, self.bldDensity, self.verToHor, self.treeCoverage, self.sensAnth, self.latAnth, T_init, H_init,\r\n self.weather.staUmod[0], self.geoParam, self.r_glaze_total, self.SHGC_total, self.alb_wall_total, self.road)\r\n self.UCM.h_mix = self.h_mix\r\n\r\n # Define Road Element & buffer to match ground temperature depth\r\n roadMat, newthickness = procMat(self.road, self.MAXTHICKNESS, self.MINTHICKNESS)\r\n\r\n for i in range(self.nSoil):\r\n # if soil depth is greater then the thickness of the road\r\n # we add new slices of soil at max thickness until road is greater or equal\r\n\r\n is_soildepth_equal = self.is_near_zero(self.depth_soil[i][0] - sum(newthickness), 1e-15)\r\n\r\n if is_soildepth_equal or (self.depth_soil[i][0] > sum(newthickness)):\r\n while self.depth_soil[i][0] > sum(newthickness):\r\n newthickness.append(self.MAXTHICKNESS)\r\n roadMat.append(self.SOIL)\r\n self.soilindex1 = i\r\n break\r\n\r\n self.road = Element(self.road.albedo, self.road.emissivity, newthickness, roadMat,\r\n self.road.vegCoverage, self.road.layerTemp[0], self.road.horizontal, self.road._name)\r\n\r\n # Define Rural Element\r\n ruralMat, newthickness = procMat(self.rural, self.MAXTHICKNESS, self.MINTHICKNESS)\r\n\r\n for i in range(self.nSoil):\r\n # if soil depth is greater then the thickness of the road\r\n # we add new slices of soil at max thickness until road is greater or equal\r\n\r\n is_soildepth_equal = self.is_near_zero(self.depth_soil[i][0] - sum(newthickness), 1e-15)\r\n\r\n if is_soildepth_equal or (self.depth_soil[i][0] > sum(newthickness)):\r\n while self.depth_soil[i][0] > sum(newthickness):\r\n newthickness.append(self.MAXTHICKNESS)\r\n ruralMat.append(self.SOIL)\r\n\r\n self.soilindex2 = i\r\n break\r\n\r\n self.rural = Element(self.rural.albedo, self.rural.emissivity, newthickness,\r\n ruralMat, self.rural.vegCoverage, self.rural.layerTemp[0], self.rural.horizontal, self.rural._name)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef hvac_autosize(self):\r\n\r\n for i in range(len(self.BEM)):\r\n if self.is_near_zero(self.autosize) == False:\r\n self.BEM[i].building.coolCap = 9999.\r\n self.BEM[i].building.heatCap = 9999.", "response": "A method to set the cooling and heating cap of all the related resources."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef simulate(self):\r\n\r\n self.N = int(self.simTime.days * 24) # total number of hours in simulation\r\n n = 0 # weather time step counter\r\n self.ph = self.simTime.dt/3600. # dt (simulation time step) in hours\r\n\r\n # Data dump variables\r\n time = range(self.N)\r\n\r\n self.WeatherData = [None for x in range(self.N)]\r\n self.UCMData = [None for x in range(self.N)]\r\n self.UBLData = [None for x in range(self.N)]\r\n self.RSMData = [None for x in range(self.N)]\r\n self.USMData = [None for x in range(self.N)]\r\n\r\n print('\\nSimulating new temperature and humidity values for {} days from {}/{}.\\n'.format(\r\n int(self.nDay), int(self.Month), int(self.Day)))\r\n self.logger.info(\"Start simulation\")\r\n\r\n for it in range(1, self.simTime.nt, 1): # for every simulation time-step (i.e 5 min) defined by uwg\r\n # Update water temperature (estimated)\r\n if self.nSoil < 3: # correction to original matlab code\r\n # for BUBBLE/CAPITOUL/Singapore only\r\n self.forc.deepTemp = sum(self.forcIP.temp)/float(len(self.forcIP.temp))\r\n self.forc.waterTemp = sum(\r\n self.forcIP.temp)/float(len(self.forcIP.temp)) - 10. # for BUBBLE/CAPITOUL/Singapore only\r\n else:\r\n # soil temperature by depth, by month\r\n self.forc.deepTemp = self.Tsoil[self.soilindex1][self.simTime.month-1]\r\n self.forc.waterTemp = self.Tsoil[2][self.simTime.month-1]\r\n\r\n # There's probably a better way to update the weather...\r\n self.simTime.UpdateDate()\r\n\r\n self.logger.info(\"\\n{0} m={1}, d={2}, h={3}, s={4}\".format(\r\n __name__, self.simTime.month, self.simTime.day, self.simTime.secDay/3600., self.simTime.secDay))\r\n\r\n # simulation time increment raised to weather time step\r\n self.ceil_time_step = int(math.ceil(it * self.ph))-1\r\n # minus one to be consistent with forcIP list index\r\n # Updating forcing instance\r\n # horizontal Infrared Radiation Intensity (W m-2)\r\n self.forc.infra = self.forcIP.infra[self.ceil_time_step]\r\n # wind speed (m s-1)\r\n self.forc.wind = max(self.forcIP.wind[self.ceil_time_step], self.geoParam.windMin)\r\n self.forc.uDir = self.forcIP.uDir[self.ceil_time_step] # wind direction\r\n # specific humidty (kg kg-1)\r\n self.forc.hum = self.forcIP.hum[self.ceil_time_step]\r\n self.forc.pres = self.forcIP.pres[self.ceil_time_step] # Pressure (Pa)\r\n self.forc.temp = self.forcIP.temp[self.ceil_time_step] # air temperature (C)\r\n self.forc.rHum = self.forcIP.rHum[self.ceil_time_step] # Relative humidity (%)\r\n self.forc.prec = self.forcIP.prec[self.ceil_time_step] # Precipitation (mm h-1)\r\n # horizontal solar diffuse radiation (W m-2)\r\n self.forc.dif = self.forcIP.dif[self.ceil_time_step]\r\n # normal solar direct radiation (W m-2)\r\n self.forc.dir = self.forcIP.dir[self.ceil_time_step]\r\n # Canyon humidity (absolute) same as rural\r\n self.UCM.canHum = copy.copy(self.forc.hum)\r\n\r\n # Update solar flux\r\n self.solar = SolarCalcs(self.UCM, self.BEM, self.simTime,\r\n self.RSM, self.forc, self.geoParam, self.rural)\r\n self.rural, self.UCM, self.BEM = self.solar.solarcalcs()\r\n\r\n # Update building & traffic schedule\r\n # Assign day type (1 = weekday, 2 = sat, 3 = sun/other)\r\n if self.is_near_zero(self.simTime.julian % 7):\r\n self.dayType = 3 # Sunday\r\n elif self.is_near_zero(self.simTime.julian % 7 - 6.):\r\n self.dayType = 2 # Saturday\r\n else:\r\n self.dayType = 1 # Weekday\r\n\r\n # Update anthropogenic heat load for each hour (building & UCM)\r\n self.UCM.sensAnthrop = self.sensAnth * (self.SchTraffic[self.dayType-1][self.simTime.hourDay])\r\n\r\n # Update the energy components for building types defined in initialize.uwg\r\n for i in range(len(self.BEM)):\r\n # Set temperature\r\n self.BEM[i].building.coolSetpointDay = self.Sch[i].Cool[self.dayType -\r\n 1][self.simTime.hourDay] + 273.15 # add from temperature schedule for cooling\r\n self.BEM[i].building.coolSetpointNight = self.BEM[i].building.coolSetpointDay\r\n self.BEM[i].building.heatSetpointDay = self.Sch[i].Heat[self.dayType -\r\n 1][self.simTime.hourDay] + 273.15 # add from temperature schedule for heating\r\n self.BEM[i].building.heatSetpointNight = self.BEM[i].building.heatSetpointDay\r\n\r\n # Internal Heat Load Schedule (W/m^2 of floor area for Q)\r\n self.BEM[i].Elec = self.Sch[i].Qelec * self.Sch[i].Elec[self.dayType -\r\n 1][self.simTime.hourDay] # Qelec x elec fraction for day\r\n self.BEM[i].Light = self.Sch[i].Qlight * self.Sch[i].Light[self.dayType -\r\n 1][self.simTime.hourDay] # Qlight x light fraction for day\r\n self.BEM[i].Nocc = self.Sch[i].Nocc * self.Sch[i].Occ[self.dayType -\r\n 1][self.simTime.hourDay] # Number of occupants x occ fraction for day\r\n # Sensible Q occupant * fraction occupant sensible Q * number of occupants\r\n self.BEM[i].Qocc = self.sensOcc * (1 - self.LatFOcc) * self.BEM[i].Nocc\r\n\r\n # SWH and ventilation schedule\r\n self.BEM[i].SWH = self.Sch[i].Vswh * self.Sch[i].SWH[self.dayType -\r\n 1][self.simTime.hourDay] # litres per hour x SWH fraction for day\r\n # m^3/s/m^2 of floor\r\n self.BEM[i].building.vent = self.Sch[i].Vent\r\n self.BEM[i].Gas = self.Sch[i].Qgas * self.Sch[i].Gas[self.dayType -\r\n 1][self.simTime.hourDay] # Gas Equip Schedule, per m^2 of floor\r\n\r\n # This is quite messy, should update\r\n # Update internal heat and corresponding fractional loads\r\n intHeat = self.BEM[i].Light + self.BEM[i].Elec + self.BEM[i].Qocc\r\n # W/m2 from light, electricity, occupants\r\n self.BEM[i].building.intHeatDay = intHeat\r\n self.BEM[i].building.intHeatNight = intHeat\r\n # fraction of radiant heat from light and equipment of whole internal heat\r\n self.BEM[i].building.intHeatFRad = (\r\n self.RadFLight * self.BEM[i].Light + self.RadFEquip * self.BEM[i].Elec) / intHeat\r\n # fraction of latent heat (from occupants) of whole internal heat\r\n self.BEM[i].building.intHeatFLat = self.LatFOcc * \\\r\n self.sensOcc * self.BEM[i].Nocc/intHeat\r\n\r\n # Update envelope temperature layers\r\n self.BEM[i].T_wallex = self.BEM[i].wall.layerTemp[0]\r\n self.BEM[i].T_wallin = self.BEM[i].wall.layerTemp[-1]\r\n self.BEM[i].T_roofex = self.BEM[i].roof.layerTemp[0]\r\n self.BEM[i].T_roofin = self.BEM[i].roof.layerTemp[-1]\r\n\r\n # Update rural heat fluxes & update vertical diffusion model (VDM)\r\n self.rural.infra = self.forc.infra - self.rural.emissivity * self.SIGMA * \\\r\n self.rural.layerTemp[0]**4. # Infrared radiation from rural road\r\n\r\n self.rural.SurfFlux(self.forc, self.geoParam, self.simTime,\r\n self.forc.hum, self.forc.temp, self.forc.wind, 2., 0.)\r\n self.RSM.VDM(self.forc, self.rural, self.geoParam, self.simTime)\r\n\r\n # Calculate urban heat fluxes, update UCM & UBL\r\n self.UCM, self.UBL, self.BEM = urbflux(\r\n self.UCM, self.UBL, self.BEM, self.forc, self.geoParam, self.simTime, self.RSM)\r\n self.UCM.UCModel(self.BEM, self.UBL.ublTemp, self.forc, self.geoParam)\r\n self.UBL.UBLModel(self.UCM, self.RSM, self.rural,\r\n self.forc, self.geoParam, self.simTime)\r\n\r\n \"\"\"\r\n # Experimental code to run diffusion model in the urban area\r\n # N.B Commented out in python uwg because computed wind speed in\r\n # urban VDM: y = =0.84*ln((2-x/20)/0.51) results in negative log\r\n # for building heights >= 40m.\r\n\r\n Uroad = copy.copy(self.UCM.road)\r\n Uroad.sens = copy.copy(self.UCM.sensHeat)\r\n Uforc = copy.copy(self.forc)\r\n Uforc.wind = copy.copy(self.UCM.canWind)\r\n Uforc.temp = copy.copy(self.UCM.canTemp)\r\n self.USM.VDM(Uforc,Uroad,self.geoParam,self.simTime)\r\n \"\"\"\r\n\r\n self.logger.info(\"dbT = {}\".format(self.UCM.canTemp-273.15))\r\n if n > 0:\r\n logging.info(\"dpT = {}\".format(self.UCM.Tdp))\r\n logging.info(\"RH = {}\".format(self.UCM.canRHum))\r\n\r\n if self.is_near_zero(self.simTime.secDay % self.simTime.timePrint) and n < self.N:\r\n\r\n self.logger.info(\"{0} ----sim time step = {1}----\\n\\n\".format(__name__, n))\r\n\r\n self.WeatherData[n] = copy.copy(self.forc)\r\n _Tdb, _w, self.UCM.canRHum, _h, self.UCM.Tdp, _v = psychrometrics(\r\n self.UCM.canTemp, self.UCM.canHum, self.forc.pres)\r\n\r\n self.UBLData[n] = copy.copy(self.UBL)\r\n self.UCMData[n] = copy.copy(self.UCM)\r\n self.RSMData[n] = copy.copy(self.RSM)\r\n\r\n self.logger.info(\"dbT = {}\".format(self.UCMData[n].canTemp-273.15))\r\n self.logger.info(\"dpT = {}\".format(self.UCMData[n].Tdp))\r\n self.logger.info(\"RH = {}\".format(self.UCMData[n].canRHum))\r\n\r\n n += 1", "response": "Create a new object instance with the given information."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nwrite the new EPW file.", "response": "def write_epw(self):\r\n \"\"\" Section 8 - Writing new EPW file\r\n \"\"\"\r\n epw_prec = self.epw_precision # precision of epw file input\r\n\r\n for iJ in range(len(self.UCMData)):\r\n # [iJ+self.simTime.timeInitial-8] = increments along every weather timestep in epw\r\n # [6 to 21] = column data of epw\r\n self.epwinput[iJ+self.simTime.timeInitial-8][6] = \"{0:.{1}f}\".format(\r\n self.UCMData[iJ].canTemp - 273.15, epw_prec) # dry bulb temperature [?C]\r\n # dew point temperature [?C]\r\n self.epwinput[iJ+self.simTime.timeInitial -\r\n 8][7] = \"{0:.{1}f}\".format(self.UCMData[iJ].Tdp, epw_prec)\r\n # relative humidity [%]\r\n self.epwinput[iJ+self.simTime.timeInitial -\r\n 8][8] = \"{0:.{1}f}\".format(self.UCMData[iJ].canRHum, epw_prec)\r\n self.epwinput[iJ+self.simTime.timeInitial-8][21] = \"{0:.{1}f}\".format(\r\n self.WeatherData[iJ].wind, epw_prec) # wind speed [m/s]\r\n\r\n # Writing new EPW file\r\n epw_new_id = open(self.newPathName, \"w\")\r\n\r\n for i in range(8):\r\n new_epw_line = '{}\\n'.format(reduce(lambda x, y: x+\",\"+y, self._header[i]))\r\n epw_new_id.write(new_epw_line)\r\n\r\n for i in range(len(self.epwinput)):\r\n printme = \"\"\r\n for ei in range(34):\r\n printme += \"{}\".format(self.epwinput[i][ei]) + ','\r\n printme = printme + \"{}\".format(self.epwinput[i][ei])\r\n new_epw_line = \"{0}\\n\".format(printme)\r\n epw_new_id.write(new_epw_line)\r\n\r\n epw_new_id.close()\r\n\r\n print(\"New climate file '{}' is generated at {}.\".format(\r\n self.destinationFileName, self.destinationDir))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncalculate net heat flux and updates element layer temperatures with new values.", "response": "def SurfFlux(self,forc,parameter,simTime,humRef,tempRef,windRef,boundCond,intFlux):\n \"\"\" Calculate net heat flux, and update element layer temperatures\n \"\"\"\n\n # Calculated per unit area (m^2)\n dens = forc.pres/(1000*0.287042*tempRef*(1.+1.607858*humRef)) # air density (kgd m-3)\n self.aeroCond = 5.8 + 3.7 * windRef # Convection coef (ref: uwg, eq. 12))\n\n if (self.horizontal): # For roof, mass, road\n # Evaporation (m s-1), Film water & soil latent heat\n if not self.is_near_zero(self.waterStorage) and self.waterStorage > 0.0:\n # N.B In the current uwg code, latent heat from evapotranspiration, stagnant water,\n # or anthropogenic sources is not modelled due to the difficulty of validation, and\n # lack of reliability of precipitation data from EPW files.Therefore this condition\n # is never run because all elements have had their waterStorage hardcoded to 0.\n qtsat = self.qsat([self.layerTemp[0]],[forc.pres],parameter)[0]\n eg = self.aeroCond*parameter.colburn*dens*(qtsat-humRef)/parameter.waterDens/parameter.cp\n self.waterStorage = min(self.waterStorage + simTime.dt*(forc.prec-eg),parameter.wgmax)\n self.waterStorage = max(self.waterStorage,0.) # (m)\n else:\n eg = 0.\n soilLat = eg*parameter.waterDens*parameter.lv\n\n # Winter, no veg\n if simTime.month < parameter.vegStart and simTime.month > parameter.vegEnd:\n self.solAbs = (1.-self.albedo)*self.solRec # (W m-2)\n vegLat = 0.\n vegSens = 0.\n else: # Summer, veg\n self.solAbs = ((1.-self.vegCoverage)*(1.-self.albedo)+self.vegCoverage*(1.-parameter.vegAlbedo))*self.solRec\n vegLat = self.vegCoverage*parameter.grassFLat*(1.-parameter.vegAlbedo)*self.solRec\n vegSens = self.vegCoverage*(1.-parameter.grassFLat)*(1.-parameter.vegAlbedo)*self.solRec\n\n self.lat = soilLat + vegLat\n # Sensible & net heat flux\n self.sens = vegSens + self.aeroCond*(self.layerTemp[0]-tempRef)\n self.flux = -self.sens + self.solAbs + self.infra - self.lat # (W m-2)\n\n else: # For vertical surfaces (wall)\n self.solAbs = (1.-self.albedo)*self.solRec\n self.lat = 0.\n\n # Sensible & net heat flux\n self.sens = self.aeroCond*(self.layerTemp[0]-tempRef)\n self.flux = -self.sens + self.solAbs + self.infra - self.lat # (W m-2)\n\n self.layerTemp = self.Conduction(simTime.dt, self.flux, boundCond, forc.deepTemp, intFlux)\n self.T_ext = self.layerTemp[0]\n self.T_int = self.layerTemp[-1]"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef Conduction(self, dt, flx1, bc, temp2, flx2):\n t = self.layerTemp # vector of layer temperatures (K)\n hc = self.layerVolHeat # vector of layer volumetric heat (J m-3 K-1)\n tc = self.layerThermalCond # vector of layer thermal conductivities (W m-1 K-1)\n d = self.layerThickness # vector of layer thicknesses (m)\n\n # flx1 : net heat flux on surface\n # bc : boundary condition parameter (1 or 2)\n # temp2 : deep soil temperature (avg of air temperature)\n # flx2 : surface flux (sum of absorbed, emitted, etc.)\n\n fimp = 0.5 # implicit coefficient\n fexp = 0.5 # explicit coefficient\n num = len(t) # number of layers\n\n # Mean thermal conductivity over distance between 2 layers (W/mK)\n tcp = [0 for x in range(num)]\n # Thermal capacity times layer depth (J/m2K)\n hcp = [0 for x in range(num)]\n # lower, main, and upper diagonals\n za = [[0 for y in range(3)] for x in range(num)]\n # RHS\n zy = [0 for x in range(num)]\n\n #--------------------------------------------------------------------------\n # Define the column vectors for heat capactiy and conductivity\n hcp[0] = hc[0] * d[0]\n for j in range(1,num):\n tcp[j] = 2. / (d[j-1] / tc[j-1] + d[j] / tc[j])\n hcp[j] = hc[j] * d[j]\n\n #--------------------------------------------------------------------------\n # Define the first row of za matrix, and RHS column vector\n za[0][0] = 0.\n za[0][1] = hcp[0]/dt + fimp*tcp[1]\n za[0][2] = -fimp*tcp[1]\n zy[0] = hcp[0]/dt*t[0] - fexp*tcp[1]*(t[0]-t[1]) + flx1\n\n #--------------------------------------------------------------------------\n # Define other rows\n for j in range(1,num-1):\n za[j][0] = fimp*(-tcp[j])\n za[j][1] = hcp[j]/dt + fimp*(tcp[j]+tcp[j+1])\n za[j][2] = fimp*(-tcp[j+1])\n zy[j] = hcp[j]/dt * t[j] + fexp * \\\n (tcp[j]*t[j-1] - tcp[j]*t[j] - tcp[j+1]*t[j] + tcp[j+1]*t[j+1])\n\n #--------------------------------------------------------------------------\n # Boundary conditions\n if self.is_near_zero(bc-1.): # heat flux\n za[num-1][0] = fimp * (-tcp[num-1])\n za[num-1][1] = hcp[num-1]/dt + fimp*tcp[num-1]\n za[num-1][2] = 0.\n zy[num-1] = hcp[num-1]/dt*t[num-1] + fexp*tcp[num-1]*(t[num-2]-t[num-1]) + flx2\n elif self.is_near_zero(bc-2.): # deep-temperature\n za[num-1][0] = 0.\n za[num-1][1] = 1.\n za[num-1][2] = 0.\n zy[num-1] = temp2\n else:\n raise Exception(self.CONDUCTION_INPUT_MSG)\n\n #--------------------------------------------------------------------------\n\n zx = self.invert(num,za,zy)\n #t(:) = zx(:);\n return zx", "response": "Solve the conductance of heat based on the element layers."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncalculates the QSAT vector of saturation humidity from a set of temperature and pressure.", "response": "def qsat(self,temp,pres,parameter):\n \"\"\"\n Calculate (qsat_lst) vector of saturation humidity from:\n temp = vector of element layer temperatures\n pres = pressure (at current timestep).\n \"\"\"\n gamw = (parameter.cl - parameter.cpv) / parameter.rv\n betaw = (parameter.lvtt/parameter.rv) + (gamw * parameter.tt)\n alpw = math.log(parameter.estt) + (betaw /parameter.tt) + (gamw * math.log(parameter.tt))\n work2 = parameter.r/parameter.rv\n foes_lst = [0 for i in range(len(temp))]\n work1_lst = [0 for i in range(len(temp))]\n qsat_lst = [0 for i in range(len(temp))]\n\n for i in range(len(temp)):\n # saturation vapor pressure\n foes_lst[i] = math.exp( alpw - betaw/temp[i] - gamw*math.log(temp[i]) )\n work1_lst[i] = foes_lst[i]/pres[i]\n # saturation humidity\n qsat_lst[i] = work2*work1_lst[i] / (1. + (work2-1.) * work1_lst[i])\n\n return qsat_lst"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef readDOE(serialize_output=True):\n\n #Nested, nested lists of Building, SchDef, BEMDef objects\n refDOE = [[[None]*16 for k_ in range(3)] for j_ in range(16)] #refDOE(16,3,16) = Building\n Schedule = [[[None]*16 for k_ in range(3)] for j_ in range(16)] #Schedule (16,3,16) = SchDef\n refBEM = [[[None]*16 for k_ in range(3)] for j_ in range(16)] #refBEM (16,3,16) = BEMDef\n\n #Purpose: Loop through every DOE reference csv and extract building data\n #Nested loop = 16 types, 3 era, 16 zones = time complexity O(n*m*k) = 768\n for i in range(16):\n\n #i = 16 types of buildings\n #print \"\\tType: {} @i={}\".format(BLDTYPE[i], i)\n\n # Read building summary (Sheet 1)\n file_doe_name_bld = os.path.join(\"{}\".format(DIR_DOE_PATH), \"BLD{}\".format(i+1),\"BLD{}_BuildingSummary.csv\".format(i+1))\n list_doe1 = read_csv(file_doe_name_bld)\n #listof(listof 3 era values)\n nFloor = str2fl(list_doe1[3][3:6]) # Number of Floors, this will be list of floats and str if \"basement\"\n glazing = str2fl(list_doe1[4][3:6]) # [?] Total\n hCeiling = str2fl(list_doe1[5][3:6]) # [m] Ceiling height\n ver2hor = str2fl(list_doe1[7][3:6]) # Wall to Skin Ratio\n AreaRoof = str2fl(list_doe1[8][3:6]) # [m2] Gross Dimensions - Total area\n\n # Read zone summary (Sheet 2)\n file_doe_name_zone = os.path.join(\"{}\".format(DIR_DOE_PATH), \"BLD{}\".format(i+1),\"BLD{}_ZoneSummary.csv\".format(i+1))\n list_doe2 = read_csv(file_doe_name_zone)\n #listof(listof 3 eras)\n AreaFloor = str2fl([list_doe2[2][5],list_doe2[3][5],list_doe2[4][5]]) # [m2]\n Volume = str2fl([list_doe2[2][6],list_doe2[3][6],list_doe2[4][6]]) # [m3]\n AreaWall = str2fl([list_doe2[2][8],list_doe2[3][8],list_doe2[4][8]]) # [m2]\n AreaWindow = str2fl([list_doe2[2][9],list_doe2[3][9],list_doe2[4][9]]) # [m2]\n Occupant = str2fl([list_doe2[2][11],list_doe2[3][11],list_doe2[4][11]]) # Number of People\n Light = str2fl([list_doe2[2][12],list_doe2[3][12],list_doe2[4][12]]) # [W/m2]\n Elec = str2fl([list_doe2[2][13],list_doe2[3][13],list_doe2[4][13]]) # [W/m2] Electric Plug and Process\n Gas = str2fl([list_doe2[2][14],list_doe2[3][14],list_doe2[4][14]]) # [W/m2] Gas Plug and Process\n SHW = str2fl([list_doe2[2][15],list_doe2[3][15],list_doe2[4][15]]) # [Litres/hr] Peak Service Hot Water\n Vent = str2fl([list_doe2[2][17],list_doe2[3][17],list_doe2[4][17]]) # [L/s/m2] Ventilation\n Infil = str2fl([list_doe2[2][20],list_doe2[3][20],list_doe2[4][20]]) # Air Changes Per Hour (ACH) Infiltration\n\n # Read location summary (Sheet 3)\n file_doe_name_location = os.path.join(\"{}\".format(DIR_DOE_PATH), \"BLD{}\".format(i+1),\"BLD{}_LocationSummary.csv\".format(i+1))\n list_doe3 = read_csv(file_doe_name_location)\n #(listof (listof 3 eras (listof 16 climate types)))\n TypeWall = [list_doe3[3][4:20],list_doe3[14][4:20],list_doe3[25][4:20]] # Construction type\n RvalWall = str2fl([list_doe3[4][4:20],list_doe3[15][4:20],list_doe3[26][4:20]]) # [m2*K/W] R-value\n TypeRoof = [list_doe3[5][4:20],list_doe3[16][4:20],list_doe3[27][4:20]] # Construction type\n RvalRoof = str2fl([list_doe3[6][4:20],list_doe3[17][4:20],list_doe3[28][4:20]]) # [m2*K/W] R-value\n Uwindow = str2fl([list_doe3[7][4:20],list_doe3[18][4:20],list_doe3[29][4:20]]) # [W/m2*K] U-factor\n SHGC = str2fl([list_doe3[8][4:20],list_doe3[19][4:20],list_doe3[30][4:20]]) # [-] coefficient\n HVAC = str2fl([list_doe3[9][4:20],list_doe3[20][4:20],list_doe3[31][4:20]]) # [kW] Air Conditioning\n HEAT = str2fl([list_doe3[10][4:20],list_doe3[21][4:20],list_doe3[32][4:20]]) # [kW] Heating\n COP = str2fl([list_doe3[11][4:20],list_doe3[22][4:20],list_doe3[33][4:20]]) # [-] Air Conditioning COP\n EffHeat = str2fl([list_doe3[12][4:20],list_doe3[23][4:20],list_doe3[34][4:20]]) # [%] Heating Efficiency\n FanFlow = str2fl([list_doe3[13][4:20],list_doe3[24][4:20],list_doe3[35][4:20]]) # [m3/s] Fan Max Flow Rate\n\n # Read Schedules (Sheet 4)\n file_doe_name_schedules = os.path.join(\"{}\".format(DIR_DOE_PATH), \"BLD{}\".format(i+1),\"BLD{}_Schedules.csv\".format(i+1))\n list_doe4 = read_csv(file_doe_name_schedules)\n\n #listof(listof weekday, sat, sun (list of 24 fractions)))\n SchEquip = str2fl([list_doe4[1][6:30],list_doe4[2][6:30],list_doe4[3][6:30]]) # Equipment Schedule 24 hrs\n SchLight = str2fl([list_doe4[4][6:30],list_doe4[5][6:30],list_doe4[6][6:30]]) # Light Schedule 24 hrs; Wkday=Sat=Sun=Hol\n SchOcc = str2fl([list_doe4[7][6:30],list_doe4[8][6:30],list_doe4[9][6:30]]) # Occupancy Schedule 24 hrs\n SetCool = str2fl([list_doe4[10][6:30],list_doe4[11][6:30],list_doe4[12][6:30]]) # Cooling Setpoint Schedule 24 hrs\n SetHeat = str2fl([list_doe4[13][6:30],list_doe4[14][6:30],list_doe4[15][6:30]]) # Heating Setpoint Schedule 24 hrs; summer design\n SchGas = str2fl([list_doe4[16][6:30],list_doe4[17][6:30],list_doe4[18][6:30]]) # Gas Equipment Schedule 24 hrs; wkday=sat\n SchSWH = str2fl([list_doe4[19][6:30],list_doe4[20][6:30],list_doe4[21][6:30]]) # Solar Water Heating Schedule 24 hrs; wkday=summerdesign, sat=winterdesgin\n\n\n for j in range(3):\n\n # j = 3 built eras\n #print\"\\tEra: {} @j={}\".format(BUILTERA[j], j)\n\n for k in range(16):\n\n # k = 16 climate zones\n #print \"\\tClimate zone: {} @k={}\".format(ZONETYPE[k], k)\n\n B = Building(\n hCeiling[j], # floorHeight by era\n 1, # intHeatNight\n 1, # intHeatDay\n 0.1, # intHeatFRad\n 0.1, # intHeatFLat\n Infil[j], # infil (ACH) by era\n Vent[j]/1000., # vent (m^3/s/m^2) by era, converted from liters\n glazing[j], # glazing ratio by era\n Uwindow[j][k], # uValue by era, by climate type\n SHGC[j][k], # SHGC, by era, by climate type\n 'AIR', # cooling condensation system type: AIR, WATER\n COP[j][k], # cop by era, climate type\n 297, # coolSetpointDay = 24 C\n 297, # coolSetpointNight\n 293, # heatSetpointDay = 20 C\n 293, # heatSetpointNight\n (HVAC[j][k]*1000.0)/AreaFloor[j], # coolCap converted to W/m2 by era, climate type\n EffHeat[j][k], # heatEff by era, climate type\n 293) # initialTemp at 20 C\n\n #Not defined in the constructor\n B.heatCap = (HEAT[j][k]*1000.0)/AreaFloor[j] # heating Capacity converted to W/m2 by era, climate type\n B.Type = BLDTYPE[i]\n B.Era = BUILTERA[j]\n B.Zone = ZONETYPE[k]\n refDOE[i][j][k] = B\n\n # Define wall, mass(floor), roof\n # Reference from E+ for conductivity, thickness (reference below)\n\n # Material: (thermalCond, volHeat = specific heat * density)\n Concrete = Material (1.311, 836.8 * 2240,\"Concrete\")\n Insulation = Material (0.049, 836.8 * 265.0, \"Insulation\")\n Gypsum = Material (0.16, 830.0 * 784.9, \"Gypsum\")\n Wood = Material (0.11, 1210.0 * 544.62, \"Wood\")\n Stucco = Material(0.6918, 837.0 * 1858.0, \"Stucco\")\n\n # Wall (1 in stucco, concrete, insulation, gypsum)\n # Check TypWall by era, by climate\n if TypeWall[j][k] == \"MassWall\":\n #Construct wall based on R value of Wall from refDOE and properties defined above\n # 1\" stucco, 8\" concrete, tbd insulation, 1/2\" gypsum\n Rbase = 0.271087 # R val based on stucco, concrete, gypsum\n Rins = RvalWall[j][k] - Rbase #find insulation value\n D_ins = Rins * Insulation.thermalCond # depth of ins from m2*K/W * W/m*K = m\n if D_ins > 0.01:\n thickness = [0.0254,0.0508,0.0508,0.0508,0.0508,D_ins,0.0127]\n layers = [Stucco,Concrete,Concrete,Concrete,Concrete,Insulation,Gypsum]\n else:\n #if it's less then 1 cm don't include in layers\n thickness = [0.0254,0.0508,0.0508,0.0508,0.0508,0.0127]\n layers = [Stucco,Concrete,Concrete,Concrete,Concrete,Gypsum]\n\n wall = Element(0.08,0.92,thickness,layers,0.,293.,0.,\"MassWall\")\n\n # If mass wall, assume mass floor (4\" concrete)\n # Mass (assume 4\" concrete);\n alb = 0.2\n emis = 0.9\n thickness = [0.054,0.054]\n concrete = Material (1.31, 2240.0*836.8)\n mass = Element(alb,emis,thickness,[concrete,concrete],0,293,1,\"MassFloor\")\n\n elif TypeWall[j][k] == \"WoodFrame\":\n # 0.01m wood siding, tbd insulation, 1/2\" gypsum\n Rbase = 0.170284091 # based on wood siding, gypsum\n Rins = RvalWall[j][k] - Rbase\n D_ins = Rins * Insulation.thermalCond #depth of insulatino\n\n if D_ins > 0.01:\n thickness = [0.01,D_ins,0.0127]\n layers = [Wood,Insulation,Gypsum]\n else:\n thickness = [0.01,0.0127]\n layers = [Wood,Gypsum]\n\n wall = Element(0.22,0.92,thickness,layers,0.,293.,0.,\"WoodFrameWall\")\n\n # If wood frame wall, assume wooden floor\n alb = 0.2\n emis = 0.9\n thickness = [0.05,0.05]\n wood = Material(1.31, 2240.0*836.8)\n mass = Element(alb,emis,thickness,[wood,wood],0.,293.,1.,\"WoodFloor\")\n\n elif TypeWall[j][k] == \"SteelFrame\":\n # 1\" stucco, 8\" concrete, tbd insulation, 1/2\" gypsum\n Rbase = 0.271087 # based on stucco, concrete, gypsum\n Rins = RvalWall[j][k] - Rbase\n D_ins = Rins * Insulation.thermalCond\n if D_ins > 0.01:\n thickness = [0.0254,0.0508,0.0508,0.0508,0.0508,D_ins,0.0127]\n layers = [Stucco,Concrete,Concrete,Concrete,Concrete,Insulation,Gypsum]\n else: # If insulation is too thin, assume no insulation\n thickness = [0.0254,0.0508,0.0508,0.0508,0.0508,0.0127]\n layers = [Stucco,Concrete,Concrete,Concrete,Concrete,Gypsum]\n wall = Element(0.15,0.92,thickness,layers,0.,293.,0.,\"SteelFrame\")\n\n # If mass wall, assume mass foor\n # Mass (assume 4\" concrete),\n alb = 0.2\n emis = 0.93\n thickness = [0.05,0.05]\n mass = Element(alb,emis,thickness,[Concrete,Concrete],0.,293.,1.,\"MassFloor\")\n\n elif TypeWall[j][k] == \"MetalWall\":\n # metal siding, insulation, 1/2\" gypsum\n alb = 0.2\n emis = 0.9\n D_ins = max((RvalWall[j][k] * Insulation.thermalCond)/2, 0.01) #use derived insul thickness or 0.01 based on max\n thickness = [D_ins,D_ins,0.0127]\n materials = [Insulation,Insulation,Gypsum]\n wall = Element(alb,emis,thickness,materials,0,293,0,\"MetalWall\")\n\n # Mass (assume 4\" concrete);\n alb = 0.2\n emis = 0.9\n thickness = [0.05, 0.05]\n concrete = Material(1.31, 2240.0*836.8)\n mass = Element(alb,emis,thickness,[concrete,concrete],0.,293.,1.,\"MassFloor\")\n\n # Roof\n if TypeRoof[j][k] == \"IEAD\": #Insulation Entirely Above Deck\n # IEAD-> membrane, insulation, decking\n alb = 0.2\n emis = 0.93\n D_ins = max(RvalRoof[j][k] * Insulation.thermalCond/2.,0.01);\n roof = Element(alb,emis,[D_ins,D_ins],[Insulation,Insulation],0.,293.,0.,\"IEAD\")\n\n elif TypeRoof[j][k] == \"Attic\":\n # IEAD-> membrane, insulation, decking\n alb = 0.2\n emis = 0.9\n D_ins = max(RvalRoof[j][k] * Insulation.thermalCond/2.,0.01)\n roof = Element(alb,emis,[D_ins,D_ins],[Insulation,Insulation],0.,293.,0.,\"Attic\")\n\n elif TypeRoof[j][k] == \"MetalRoof\":\n # IEAD-> membrane, insulation, decking\n alb = 0.2\n emis = 0.9\n D_ins = max(RvalRoof[j][k] * Insulation.thermalCond/2.,0.01)\n roof = Element(alb,emis,[D_ins,D_ins],[Insulation,Insulation],0.,293.,0.,\"MetalRoof\")\n\n # Define bulding energy model, set fraction of the urban floor space of this typology to zero\n refBEM[i][j][k] = BEMDef(B, mass, wall, roof, 0.0)\n refBEM[i][j][k].building.FanMax = FanFlow[j][k] # max fan flow rate (m^3/s) per DOE\n\n Schedule[i][j][k] = SchDef()\n\n Schedule[i][j][k].Elec = SchEquip # 3x24 matrix of schedule for fraction electricity (WD,Sat,Sun)\n Schedule[i][j][k].Light = SchLight # 3x24 matrix of schedule for fraction light (WD,Sat,Sun)\n Schedule[i][j][k].Gas = SchGas # 3x24 matrix of schedule for fraction gas (WD,Sat,Sun)\n Schedule[i][j][k].Occ = SchOcc # 3x24 matrix of schedule for fraction occupancy (WD,Sat,Sun)\n Schedule[i][j][k].Cool = SetCool # 3x24 matrix of schedule for fraction cooling temp (WD,Sat,Sun)\n Schedule[i][j][k].Heat = SetHeat # 3x24 matrix of schedule for fraction heating temp (WD,Sat,Sun)\n Schedule[i][j][k].SWH = SchSWH # 3x24 matrix of schedule for fraction SWH (WD,Sat,Sun\n\n Schedule[i][j][k].Qelec = Elec[j] # W/m^2 (max) for electrical plug process\n Schedule[i][j][k].Qlight = Light[j] # W/m^2 (max) for light\n Schedule[i][j][k].Nocc = Occupant[j]/AreaFloor[j] # Person/m^2\n Schedule[i][j][k].Qgas = Gas[j] # W/m^2 (max) for gas\n Schedule[i][j][k].Vent = Vent[j]/1000.0 # m^3/m^2 per person\n Schedule[i][j][k].Vswh = SHW[j]/AreaFloor[j] # litres per hour per m^2 of floor\n\n\n # if not test serialize refDOE,refBEM,Schedule and store in resources\n if serialize_output:\n\n # create a binary file for serialized obj\n pkl_file_path = os.path.join(DIR_CURR,'refdata','readDOE.pkl')\n pickle_readDOE = open(pkl_file_path, 'wb')\n\n # dump in ../resources\n # Pickle objects, protocol 1 b/c binary file\n pickle.dump(refDOE, pickle_readDOE,1)\n pickle.dump(refBEM, pickle_readDOE,1)\n pickle.dump(Schedule, pickle_readDOE,1)\n\n pickle_readDOE.close()\n\n return refDOE, refBEM, Schedule", "response": "Read all DOE data and return a list of dicts."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef readPlist(pathOrFile):\n didOpen = False\n result = None\n if isinstance(pathOrFile, (bytes, unicode)):\n pathOrFile = open(pathOrFile, 'rb')\n didOpen = True\n try:\n reader = PlistReader(pathOrFile)\n result = reader.parse()\n except NotBinaryPlistException as e:\n try:\n pathOrFile.seek(0)\n result = None\n if hasattr(plistlib, 'loads'):\n contents = None\n if isinstance(pathOrFile, (bytes, unicode)):\n with open(pathOrFile, 'rb') as f:\n contents = f.read()\n else:\n contents = pathOrFile.read()\n result = plistlib.loads(contents)\n else:\n result = plistlib.readPlist(pathOrFile)\n result = wrapDataObject(result, for_binary=True)\n except Exception as e:\n raise InvalidPlistException(e)\n finally:\n if didOpen:\n pathOrFile.close()\n return result", "response": "Reads a single item from a plist file."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef getSizedInteger(self, data, byteSize, as_number=False):\n result = 0\n if byteSize == 0:\n raise InvalidPlistException(\"Encountered integer with byte size of 0.\")\n # 1, 2, and 4 byte integers are unsigned\n elif byteSize == 1:\n result = unpack('>B', data)[0]\n elif byteSize == 2:\n result = unpack('>H', data)[0]\n elif byteSize == 4:\n result = unpack('>L', data)[0]\n elif byteSize == 8:\n if as_number:\n result = unpack('>q', data)[0]\n else:\n result = unpack('>Q', data)[0]\n elif byteSize <= 16:\n # Handle odd-sized or integers larger than 8 bytes\n # Don't naively go over 16 bytes, in order to prevent infinite loops.\n result = 0\n if hasattr(int, 'from_bytes'):\n result = int.from_bytes(data, 'big')\n else:\n for byte in data:\n if not isinstance(byte, int): # Python3.0-3.1.x return ints, 2.x return str\n byte = unpack_from('>B', byte)[0]\n result = (result << 8) | byte\n else:\n raise InvalidPlistException(\"Encountered integer longer than 16 bytes.\")\n return result", "response": "Returns the integer in bytes of size byteSize."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef writeRoot(self, root):\n output = self.header\n wrapped_root = self.wrapRoot(root)\n self.computeOffsets(wrapped_root, asReference=True, isRoot=True)\n self.trailer = self.trailer._replace(**{'objectRefSize':self.intSize(len(self.computedUniques))})\n self.writeObjectReference(wrapped_root, output)\n output = self.writeObject(wrapped_root, output, setReferencePosition=True)\n \n # output size at this point is an upper bound on how big the\n # object reference offsets need to be.\n self.trailer = self.trailer._replace(**{\n 'offsetSize':self.intSize(len(output)),\n 'offsetCount':len(self.computedUniques),\n 'offsetTableOffset':len(output),\n 'topLevelObjectNumber':0\n })\n \n output = self.writeOffsetTable(output)\n output += pack('!xxxxxxBBQQQ', *self.trailer)\n self.file.write(output)", "response": "Write the root object to the file."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef writeObjectReference(self, obj, output):\n position = self.positionOfObjectReference(obj)\n if position is None:\n self.writtenReferences[obj] = len(self.writtenReferences)\n output += self.binaryInt(len(self.writtenReferences) - 1, byteSize=self.trailer.objectRefSize)\n return (True, output)\n else:\n output += self.binaryInt(position, byteSize=self.trailer.objectRefSize)\n return (False, output)", "response": "Tries to write an object reference to the output. Returns a tuple of whether the object was a new reference or not."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef writeObject(self, obj, output, setReferencePosition=False):\n def proc_variable_length(format, length):\n result = b''\n if length > 0b1110:\n result += pack('!B', (format << 4) | 0b1111)\n result = self.writeObject(length, result)\n else:\n result += pack('!B', (format << 4) | length)\n return result\n \n def timedelta_total_seconds(td):\n # Shim for Python 2.6 compatibility, which doesn't have total_seconds.\n # Make one argument a float to ensure the right calculation.\n return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10.0**6) / 10.0**6\n \n if setReferencePosition:\n self.referencePositions[obj] = len(output)\n \n if obj is None:\n output += pack('!B', 0b00000000)\n elif isinstance(obj, BoolWrapper):\n if obj.value is False:\n output += pack('!B', 0b00001000)\n else:\n output += pack('!B', 0b00001001)\n elif isinstance(obj, Uid):\n size = self.intSize(obj.integer)\n output += pack('!B', (0b1000 << 4) | size - 1)\n output += self.binaryInt(obj.integer)\n elif isinstance(obj, (int, long)):\n byteSize = self.intSize(obj)\n root = math.log(byteSize, 2)\n output += pack('!B', (0b0001 << 4) | int(root))\n output += self.binaryInt(obj, as_number=True)\n elif isinstance(obj, FloatWrapper):\n # just use doubles\n output += pack('!B', (0b0010 << 4) | 3)\n output += self.binaryReal(obj)\n elif isinstance(obj, datetime.datetime):\n try:\n timestamp = (obj - apple_reference_date).total_seconds()\n except AttributeError:\n timestamp = timedelta_total_seconds(obj - apple_reference_date)\n output += pack('!B', 0b00110011)\n output += pack('!d', float(timestamp))\n elif isinstance(obj, Data):\n output += proc_variable_length(0b0100, len(obj))\n output += obj\n elif isinstance(obj, StringWrapper):\n output += proc_variable_length(obj.encodingMarker, len(obj))\n output += obj.encodedValue\n elif isinstance(obj, bytes):\n output += proc_variable_length(0b0101, len(obj))\n output += obj\n elif isinstance(obj, HashableWrapper):\n obj = obj.value\n if isinstance(obj, (set, list, tuple)):\n if isinstance(obj, set):\n output += proc_variable_length(0b1100, len(obj))\n else:\n output += proc_variable_length(0b1010, len(obj))\n \n objectsToWrite = []\n for objRef in sorted(obj) if isinstance(obj, set) else obj:\n (isNew, output) = self.writeObjectReference(objRef, output)\n if isNew:\n objectsToWrite.append(objRef)\n for objRef in objectsToWrite:\n output = self.writeObject(objRef, output, setReferencePosition=True)\n elif isinstance(obj, dict):\n output += proc_variable_length(0b1101, len(obj))\n keys = []\n values = []\n objectsToWrite = []\n for key, value in sorted(iteritems(obj)):\n keys.append(key)\n values.append(value)\n for key in keys:\n (isNew, output) = self.writeObjectReference(key, output)\n if isNew:\n objectsToWrite.append(key)\n for value in values:\n (isNew, output) = self.writeObjectReference(value, output)\n if isNew:\n objectsToWrite.append(value)\n for objRef in objectsToWrite:\n output = self.writeObject(objRef, output, setReferencePosition=True)\n return output", "response": "Serializes the given object to the output."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nwriting all of the object reference offsets.", "response": "def writeOffsetTable(self, output):\n \"\"\"Writes all of the object reference offsets.\"\"\"\n all_positions = []\n writtenReferences = list(self.writtenReferences.items())\n writtenReferences.sort(key=lambda x: x[1])\n for obj,order in writtenReferences:\n # Porting note: Elsewhere we deliberately replace empty unicdoe strings\n # with empty binary strings, but the empty unicode string\n # goes into writtenReferences. This isn't an issue in Py2\n # because u'' and b'' have the same hash; but it is in\n # Py3, where they don't.\n if bytes != str and obj == unicodeEmpty:\n obj = b''\n position = self.referencePositions.get(obj)\n if position is None:\n raise InvalidPlistException(\"Error while writing offsets table. Object not found. %s\" % obj)\n output += self.binaryInt(position, self.trailer.offsetSize)\n all_positions.append(position)\n return output"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the number of bytes needed to store the given integer.", "response": "def intSize(self, obj):\n \"\"\"Returns the number of bytes necessary to store the given integer.\"\"\"\n # SIGNED\n if obj < 0: # Signed integer, always 8 bytes\n return 8\n # UNSIGNED\n elif obj <= 0xFF: # 1 byte\n return 1\n elif obj <= 0xFFFF: # 2 bytes\n return 2\n elif obj <= 0xFFFFFFFF: # 4 bytes\n return 4\n # SIGNED\n # 0x7FFFFFFFFFFFFFFF is the max.\n elif obj <= 0x7FFFFFFFFFFFFFFF: # 8 bytes signed\n return 8\n elif obj <= 0xffffffffffffffff: # 8 bytes unsigned\n return 16\n else:\n raise InvalidPlistException(\"Core Foundation can't handle integers with size greater than 8 bytes.\")"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nopen the z_meso. txt file and return heights as list", "response": "def load_z_meso(self,z_meso_path):\n \"\"\" Open the z_meso.txt file and return heights as list \"\"\"\n\n self.z_meso = []\n z_meso_file_path = os.path.join(z_meso_path, self.Z_MESO_FILE_NAME)\n\n # Check if exists\n if not os.path.exists(z_meso_file_path):\n raise Exception(\"z_meso.txt file: '{}' does not exist.\".format(uwg_param_file))\n\n f = open(z_meso_file_path,'r')\n for txtline in f:\n z_ = float(\"\".join(txtline.split())) # Strip all white spaces and change to float\n self.z_meso.append(z_)\n f.close()"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef urbflux(UCM, UBL, BEM, forc, parameter, simTime, RSM):\n T_can = UCM.canTemp\n Cp = parameter.cp\n UCM.Q_roof = 0.\n sigma = 5.67e-8 # Stephan-Boltzman constant\n UCM.roofTemp = 0. # Average urban roof temperature\n UCM.wallTemp = 0. # Average urban wall temperature\n\n for j in range(len(BEM)):\n # Building energy model\n BEM[j].building.BEMCalc(UCM, BEM[j], forc, parameter, simTime)\n BEM[j].ElecTotal = BEM[j].building.ElecTotal * BEM[j].fl_area # W m-2\n\n # Update roof infra calc\n e_roof = BEM[j].roof.emissivity\n T_roof = BEM[j].roof.layerTemp[0]\n BEM[j].roof.infra = e_roof * (forc.infra - sigma * T_roof**4.)\n\n # update wall infra calc (road done later)\n e_wall = BEM[j].wall.emissivity\n T_wall = BEM[j].wall.layerTemp[0]\n # calculates the infrared radiation for wall, taking into account radiation exchange from road\n _infra_road_, BEM[j].wall.infra = infracalcs(UCM, forc, UCM.road.emissivity, e_wall, UCM.roadTemp, T_wall)\n\n # Update element temperatures\n BEM[j].mass.layerTemp = BEM[j].mass.Conduction(simTime.dt, BEM[j].building.fluxMass,1.,0.,BEM[j].building.fluxMass)\n BEM[j].roof.SurfFlux(forc,parameter,simTime,UCM.canHum,T_can,max(forc.wind,UCM.canWind),1.,BEM[j].building.fluxRoof)\n BEM[j].wall.SurfFlux(forc,parameter,simTime,UCM.canHum,T_can,UCM.canWind,1.,BEM[j].building.fluxWall)\n\n # Note the average wall & roof temperature\n UCM.wallTemp = UCM.wallTemp + BEM[j].frac*BEM[j].wall.layerTemp[0]\n UCM.roofTemp = UCM.roofTemp + BEM[j].frac*BEM[j].roof.layerTemp[0]\n\n # Update road infra calc (assume walls have similar emissivity, so use the last one)\n UCM.road.infra, _wall_infra = infracalcs(UCM,forc,UCM.road.emissivity,e_wall,UCM.roadTemp,UCM.wallTemp)\n UCM.road.SurfFlux(forc,parameter,simTime,UCM.canHum,T_can,UCM.canWind,2.,0.)\n UCM.roadTemp = UCM.road.layerTemp[0]\n\n # Sensible & latent heat flux (total)\n if UCM.latHeat != None:\n UCM.latHeat = UCM.latHeat + UCM.latAnthrop + UCM.treeLatHeat + UCM.road.lat*(1.-UCM.bldDensity)\n\n # ---------------------------------------------------------------------\n # Advective heat flux to UBL from VDM\n #\n # Note: UWG_Matlab code here is modified to compensate for rounding errors\n # that occur when recursively adding forDens, intAdv1, and intAdv2.\n # This causes issues in the UBL.advHeat calculatiuon when large (1e5)\n # numbers are subtracted to produce small numbers (1e-10) that can\n # differ from equivalent matlab calculations by a factor of 2.\n # Values this small are ~ 0, but for consistency's sake Kahan Summation\n # algorithm is applied to keep margin of difference from UWG_Matlab low.\n # ---------------------------------------------------------------------\n\n forDens = 0.0\n intAdv1 = 0.0\n intAdv2 = 0.0\n\n # c1 & c2 stores values truncated by floating point rounding for values < 10^-16\n c1 = 0.0\n c2 = 0.0\n c3 = 0.0\n\n for iz in range(RSM.nzfor):\n # At c loss of precision at at low order of magnitude, that we need in UBL.advHeat calc\n # Algebraically t is 0, but with floating pt numbers c will accumulate truncated values\n y = RSM.densityProfC[iz]*RSM.dz[iz]/(RSM.z[RSM.nzfor-1] + RSM.dz[RSM.nzfor-1]/2.)\n t = forDens + y\n c1 += (t - forDens) - y\n forDens = t\n\n y = RSM.windProf[iz]*RSM.tempProf[iz]*RSM.dz[iz]\n t = intAdv1 + y\n c2 += (t - intAdv1) - y\n intAdv1 = t\n\n y = RSM.windProf[iz]*RSM.dz[iz]\n t = intAdv2 + y\n c3 += (t - intAdv2) - y\n intAdv2 = t\n\n # Add the truncated values back\n forDens -= c1\n intAdv1 -= c2\n intAdv2 -= c3\n UBL.advHeat = UBL.paralLength*Cp*forDens*(intAdv1-(UBL.ublTemp*intAdv2))/UBL.urbArea\n\n # ---------------------------------------------------------------------\n # Convective heat flux to UBL from UCM (see Appendix - Bueno (2014))\n # ---------------------------------------------------------------------\n zrUrb = 2*UCM.bldHeight\n zref = RSM.z[RSM.nzref-1] # Reference height\n\n # Reference wind speed & canyon air density\n windUrb = forc.wind*log(zref/RSM.z0r)/log(parameter.windHeight/RSM.z0r)*\\\n log(zrUrb/UCM.z0u)/log(zref/UCM.z0u)\n dens = forc.pres/(1000*0.287042*T_can*(1.+1.607858*UCM.canHum))\n\n # Friction velocity\n UCM.ustar = parameter.vk*windUrb/log((zrUrb-UCM.l_disp)/UCM.z0u)\n\n # Convective scaling velocity\n wstar = (parameter.g*max(UCM.sensHeat,0.0)*zref/dens/Cp/T_can)**(1/3.)\n UCM.ustarMod = max(UCM.ustar,wstar) # Modified friction velocity\n UCM.uExch = parameter.exCoeff*UCM.ustarMod # Exchange velocity\n\n # Canyon wind speed, Eq. 27 Chp. 3 Hanna and Britter, 2002\n # assuming CD = 1 and lambda_f = verToHor/4\n UCM.canWind = UCM.ustarMod*(UCM.verToHor/8.)**(-1/2.)\n\n # Canyon turbulent velocities\n UCM.turbU = 2.4*UCM.ustarMod\n UCM.turbV = 1.9*UCM.ustarMod\n UCM.turbW = 1.3*UCM.ustarMod\n\n # Urban wind profile\n for iz in range(RSM.nzref):\n UCM.windProf.append(UCM.ustar/parameter.vk*\\\n log((RSM.z[iz]+UCM.bldHeight-UCM.l_disp)/UCM.z0u))\n\n return UCM,UBL,BEM", "response": "Calculate the surface heat fluxes of the urban and the wall"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nretrieve all the books published by the artist", "response": "def get_books(self):\n \"\"\"\n Retrieves all the books published by the artist\n :return: List. Books published by the artist\n \"\"\"\n return itunespy.lookup(id=self.artist_id, entity=itunespy.entities['ebook'])[1:]"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets a list of tasks optionally filtered by task id.", "response": "def get_tasks(task_id='', completed=True):\n \"\"\" Get a list of tasks, optionally filtered by task id.\n The task_id can be the abbrevated. Example: If a task named 'sleep' is\n scaled to 3 in marathon, there will be be 3 tasks starting with 'sleep.'\n\n :param task_id: task ID\n :type task_id: str\n :param completed: include completed tasks?\n :type completed: bool\n\n :return: a list of tasks\n :rtype: []\n \"\"\"\n\n client = mesos.DCOSClient()\n master = mesos.Master(client.get_master_state())\n mesos_tasks = master.tasks(completed=completed, fltr=task_id)\n return [task.__dict__['_task'] for task in mesos_tasks]"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_task(task_id, completed=True):\n tasks = get_tasks(task_id=task_id, completed=completed)\n\n if len(tasks) == 0:\n return None\n\n assert len(tasks) == 1, 'get_task should return at max 1 task for a task id'\n return tasks[0]", "response": "Get a task by task id where a task_id is required."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef task_completed(task_id):\n\n tasks = get_tasks(task_id=task_id)\n completed_states = ('TASK_FINISHED',\n 'TASK_FAILED',\n 'TASK_KILLED',\n 'TASK_LOST',\n 'TASK_ERROR')\n\n for task in tasks:\n if task['state'] in completed_states:\n return True\n\n return False", "response": "Check whether a task has completed."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef task_property_present_predicate(service, task, prop):\n try:\n response = get_service_task(service, task)\n except Exception as e:\n pass\n\n return (response is not None) and (prop in response)", "response": "True if the json_element passed in is present for the task specified."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nwait for a task to be launched", "response": "def wait_for_task(service, task, timeout_sec=120):\n \"\"\"Waits for a task which was launched to be launched\"\"\"\n return time_wait(lambda: task_predicate(service, task), timeout_seconds=timeout_sec)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef wait_for_task_property(service, task, prop, timeout_sec=120):\n return time_wait(lambda: task_property_present_predicate(service, task, prop), timeout_seconds=timeout_sec)", "response": "Waits for a task to have the specified property."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef copy_file(\n host,\n file_path,\n remote_path='.',\n username=None,\n key_path=None,\n action='put'\n):\n \"\"\" Copy a file via SCP, proxied through the mesos master\n\n :param host: host or IP of the machine to execute the command on\n :type host: str\n :param file_path: the local path to the file to be copied\n :type file_path: str\n :param remote_path: the remote path to copy the file to\n :type remote_path: str\n :param username: SSH username\n :type username: str\n :param key_path: path to the SSH private key to use for SSH authentication\n :type key_path: str\n\n :return: True if successful, False otherwise\n :rtype: bool\n \"\"\"\n\n if not username:\n username = shakedown.cli.ssh_user\n\n if not key_path:\n key_path = shakedown.cli.ssh_key_file\n\n key = validate_key(key_path)\n\n transport = get_transport(host, username, key)\n transport = start_transport(transport, username, key)\n\n if transport.is_authenticated():\n start = time.time()\n\n channel = scp.SCPClient(transport)\n\n if action == 'get':\n print(\"\\n{}scp {}:{} {}\\n\".format(shakedown.cli.helpers.fchr('>>'), host, remote_path, file_path))\n channel.get(remote_path, file_path)\n else:\n print(\"\\n{}scp {} {}:{}\\n\".format(shakedown.cli.helpers.fchr('>>'), file_path, host, remote_path))\n channel.put(file_path, remote_path)\n\n print(\"{} bytes copied in {} seconds.\".format(str(os.path.getsize(file_path)), str(round(time.time() - start, 2))))\n\n try_close(channel)\n try_close(transport)\n\n return True\n else:\n print(\"error: unable to authenticate {}@{} with key {}\".format(username, host, key_path))\n return False", "response": "Copy a file via SCP"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef copy_file_to_master(\n file_path,\n remote_path='.',\n username=None,\n key_path=None\n):\n \"\"\" Copy a file to the Mesos master\n \"\"\"\n\n return copy_file(shakedown.master_ip(), file_path, remote_path, username, key_path)", "response": "Copy a file to the master"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef copy_file_to_agent(\n host,\n file_path,\n remote_path='.',\n username=None,\n key_path=None\n):\n \"\"\" Copy a file to a Mesos agent, proxied through the master\n \"\"\"\n\n return copy_file(host, file_path, remote_path, username, key_path)", "response": "Copy a file to a Mesos agent"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef copy_file_from_master(\n remote_path,\n file_path='.',\n username=None,\n key_path=None\n):\n \"\"\" Copy a file to the Mesos master\n \"\"\"\n\n return copy_file(shakedown.master_ip(), file_path, remote_path, username, key_path, 'get')", "response": "Copy a file from the master to the Mesos master"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncopy a file from a Mesos agent to a master", "response": "def copy_file_from_agent(\n host,\n remote_path,\n file_path='.',\n username=None,\n key_path=None\n):\n \"\"\" Copy a file to a Mesos agent, proxied through the master\n \"\"\"\n\n return copy_file(host, file_path, remote_path, username, key_path, 'get')"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning json for specific cluster metadata.", "response": "def __metadata_helper(json_path):\n \"\"\" Returns json for specific cluster metadata. Important to realize that\n this was introduced in dcos-1.9. Clusters prior to 1.9 and missing metadata\n will return None\n \"\"\"\n url = shakedown.dcos_url_path('dcos-metadata/{}'.format(json_path))\n try:\n response = dcos.http.request('get', url)\n\n if response.status_code == 200:\n return response.json()\n except:\n pass\n\n return None"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget the list of resources from the state summary", "response": "def _get_resources(rtype='resources'):\n \"\"\" resource types from state summary include: resources, used_resources\n offered_resources, reserved_resources, unreserved_resources\n The default is resources.\n\n :param rtype: the type of resources to return\n :type rtype: str\n :param role: the name of the role if for reserved and if None all reserved\n :type rtype: str\n\n :return: resources(cpu,mem)\n :rtype: Resources\n \"\"\"\n cpus = 0\n mem = 0\n summary = DCOSClient().get_state_summary()\n\n if 'slaves' in summary:\n agents = summary.get('slaves')\n for agent in agents:\n if agent[rtype].get('cpus') is not None:\n cpus += agent[rtype].get('cpus')\n if agent[rtype].get('mem') is not None:\n mem += agent[rtype].get('mem')\n\n return Resources(cpus, mem)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning the number of resources that are reserved and are reserved and the same as the current state", "response": "def get_reserved_resources(role=None):\n \"\"\" resource types from state summary include: reserved_resources\n\n :param role: the name of the role if for reserved and if None all reserved\n :type role: str\n\n :return: resources(cpu,mem)\n :rtype: Resources\n \"\"\"\n rtype = 'reserved_resources'\n cpus = 0.0\n mem = 0.0\n summary = DCOSClient().get_state_summary()\n\n if 'slaves' in summary:\n agents = summary.get('slaves')\n for agent in agents:\n resource_reservations = agent.get(rtype)\n reservations = []\n if role is None or '*' in role:\n reservations = resource_reservations.values()\n elif role in resource_reservations:\n reservations = [resource_reservations.get(role)]\n for reservation in reservations:\n if reservation.get('cpus') is not None:\n cpus += reservation.get('cpus')\n if reservation.get('mem') is not None:\n mem += reservation.get('mem')\n\n return Resources(cpus, mem)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef log_decl_method(func):\n from functools import wraps\n\n @wraps(func)\n def with_logging(*args, **kwargs):\n self = args[0]\n decl = args[2]\n log(DEBUG, u\" {}: {} {}\".format(\n self.state['current_step'], decl.name,\n serialize(decl.value).strip()).encode('utf-8'))\n return func(*args, **kwargs)\n return with_logging", "response": "Decorate do_declartion methods for debug logging."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nconverting a css selector to an xpath supporting pseudo elements.", "response": "def css_to_func(css, flags, css_namespaces, lang):\n \"\"\"Convert a css selector to an xpath, supporting pseudo elements.\"\"\"\n from cssselect import parse, HTMLTranslator\n from cssselect.parser import FunctionalPseudoElement\n # FIXME HACK need lessc to support functional-pseudo-selectors instead\n # of marking as strings and stripping \" here.\n if not (css):\n return None\n\n sel = parse(css.strip('\" '))[0]\n xpath = HTMLTranslator().selector_to_xpath(sel)\n\n first_letter = False\n if sel.pseudo_element is not None:\n if type(sel.pseudo_element) == FunctionalPseudoElement:\n if sel.pseudo_element.name in ('attr', 'first-letter'):\n xpath += '/@' + sel.pseudo_element.arguments[0].value\n if sel.pseudo_element.name == 'first-letter':\n first_letter = True\n elif isinstance(sel.pseudo_element, type(u'')):\n if sel.pseudo_element == 'first-letter':\n first_letter = True\n\n xp = etree.XPath(xpath, namespaces=css_namespaces)\n\n def toupper(u):\n \"\"\"Use icu library for locale sensitive uppercasing (python2).\"\"\"\n loc = Locale(lang) if lang else Locale()\n return UnicodeString(u).toUpper(loc).encode('utf-8').decode('utf-8')\n\n def func(elem):\n res = xp(elem)\n if res:\n if etree.iselement(res[0]):\n res_str = etree.tostring(res[0], encoding='unicode',\n method=\"text\")\n else:\n res_str = res[0]\n if first_letter:\n if res_str:\n if flags and 'nocase' in flags:\n return toupper(res_str[0])\n else:\n return res_str[0]\n else:\n return res_str\n else:\n if flags and 'nocase' in flags:\n return toupper(res_str)\n else:\n return res_str\n\n return func"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef append_string(t, string):\n node = t.tree\n if string:\n if len(node) == 0:\n if node.text is not None:\n node.text += string\n else:\n node.text = string\n else: # Get last child\n child = list(node)[-1]\n if child.tail is not None:\n child.tail += string\n else:\n child.tail = string", "response": "Append a string to a node as text or tail of last child."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ninserts value into the target tree t with correct grouping.", "response": "def grouped_insert(t, value):\n \"\"\"Insert value into the target tree 't' with correct grouping.\"\"\"\n collator = Collator.createInstance(Locale(t.lang) if t.lang else Locale())\n if value.tail is not None:\n val_prev = value.getprevious()\n if val_prev is not None:\n val_prev.tail = (val_prev.tail or '') + value.tail\n else:\n val_parent = value.getparent()\n if val_parent is not None:\n val_parent.text = (val_parent.text or '') + value.tail\n value.tail = None\n if t.isgroup and t.sort(value) is not None:\n if t.groupby:\n for child in t.tree:\n if child.get('class') == 'group-by':\n # child[0] is the label span\n order = collator.compare(\n t.groupby(child[1]) or '', t.groupby(value) or '')\n if order == 0:\n c_target = Target(child, sort=t.sort, lang=t.lang)\n insert_group(value, c_target)\n break\n elif order > 0:\n group = create_group(t.groupby(value))\n group.append(value)\n child.addprevious(group)\n break\n else:\n group = create_group(t.groupby(value))\n group.append(value)\n t.tree.append(group)\n else:\n insert_group(value, t)\n\n elif t.sort and t.sort(value) is not None:\n insert_sort(value, t)\n\n elif t.location == 'inside':\n for child in t.tree:\n value.append(child)\n value.text = t.tree.text\n t.tree.text = None\n t.tree.append(value)\n\n elif t.location == 'outside':\n value.tail = t.tree.tail\n t.tree.tail = None\n target_parent_descendants = (\n [n.getparent() for n in t.parent.iterdescendants() if n == t.tree])\n try:\n parent = target_parent_descendants[0]\n parent.insert(parent.index(t.tree), value)\n value.append(t.tree)\n except IndexError as e:\n logger.error('Target of outside has been moved or deleted')\n raise e\n\n elif t.location == 'before':\n value.tail = t.tree.text\n t.tree.text = None\n t.tree.insert(0, value)\n else:\n t.tree.append(value)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef insert_sort(node, target):\n sort = target.sort\n lang = target.lang\n collator = Collator.createInstance(Locale(lang) if lang else Locale())\n for child in target.tree:\n if collator.compare(sort(child) or '', sort(node) or '') > 0:\n child.addprevious(node)\n break\n else:\n target.tree.append(node)", "response": "Insert node into sorted position in target tree."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ninserting node into target tree in appropriate group.", "response": "def insert_group(node, target):\n \"\"\"Insert node into in target tree, in appropriate group.\n\n Uses group and lang from target function. This assumes the node and\n target share a structure of a first child that determines the grouping,\n and a second child that will be accumulated in the group.\n \"\"\"\n group = target.sort\n lang = target.lang\n\n collator = Collator.createInstance(Locale(lang) if lang else Locale())\n for child in target.tree:\n order = collator.compare(group(child) or '', group(node) or '')\n if order == 0:\n for nodechild in node[1:]:\n child.append(nodechild)\n break\n elif order > 0:\n child.addprevious(node)\n break\n else:\n target.tree.append(node)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncreating the group wrapper node.", "response": "def create_group(value):\n \"\"\"Create the group wrapper node.\"\"\"\n node = etree.Element('div', attrib={'class': 'group-by'})\n span = etree.Element('span', attrib={'class': 'group-label'})\n span.text = value\n node.append(span)\n return node"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn selector special pseudo class info.", "response": "def extract_selector_info(sel):\n \"\"\"Return selector special pseudo class info (steps and other).\"\"\"\n # walk the parsed_tree, looking for pseudoClass selectors, check names\n # add in steps and/or deferred extras\n steps, extras = _extract_sel_info(sel.parsed_tree)\n steps = sorted(set(steps))\n extras = sorted(set(extras))\n if len(steps) == 0:\n steps = ['default']\n return (steps, extras)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _to_roman(num):\n roman_numeral_map = (\n ('M', 1000),\n ('CM', 900),\n ('D', 500),\n ('CD', 400),\n ('C', 100),\n ('XC', 90),\n ('L', 50),\n ('XL', 40),\n ('X', 10),\n ('IX', 9),\n ('V', 5),\n ('IV', 4),\n ('I', 1)\n )\n if not (0 < num < 5000):\n log(WARN, 'Number out of range for roman (must be 1..4999)')\n return str(num)\n result = ''\n for numeral, integer in roman_numeral_map:\n while num >= integer:\n result += numeral\n num -= integer\n return result", "response": "Convert integer to roman numerals."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nmaking a deep copy of the provided tree with the provided id suffix.", "response": "def copy_w_id_suffix(elem, suffix=\"_copy\"):\n \"\"\"Make a deep copy of the provided tree, altering ids.\"\"\"\n mycopy = deepcopy(elem)\n for id_elem in mycopy.xpath('//*[@id]'):\n id_elem.set('id', id_elem.get('id') + suffix)\n return mycopy"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef generate_id(self):\n if self.use_repeatable_ids:\n self.repeatable_id_counter += 1\n return 'autobaked-{}'.format(self.repeatable_id_counter)\n else:\n return str(uuid4())", "response": "Generate a fresh id"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nclear the recipe state.", "response": "def clear_state(self):\n \"\"\"Clear the recipe state.\"\"\"\n self.state = {}\n self.state['steps'] = []\n self.state['current_step'] = None\n self.state['scope'] = []\n self.state['counters'] = {}\n self.state['strings'] = {}\n for step in self.matchers:\n self.state[step] = {}\n self.state[step]['pending'] = {}\n self.state[step]['actions'] = []\n self.state[step]['counters'] = {}\n self.state[step]['strings'] = {}\n # FIXME rather than boolean should ref HTML tree\n self.state[step]['recipe'] = False"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nupdates the internal state of the internal state with the CSS rules in css_in.", "response": "def update_css(self, css_in=None, clear_css=False):\n \"\"\"Add additional CSS rules, optionally replacing all.\"\"\"\n if clear_css:\n self.matchers = {}\n\n # CSS is changing, so clear processing state\n self.clear_state()\n\n if css_in is None:\n return\n try:\n with open(css_in, 'rb') as f: # is it a path/filename?\n css = f.read()\n except (IOError, TypeError):\n try:\n css = css_in.read() # Perhaps a file obj?\n except AttributeError:\n css = css_in # Treat it as a string\n\n # Namespaces defined in the CSS\n if clear_css:\n self.css_namespaces = {}\n\n rules, _ = tinycss2.parse_stylesheet_bytes(css, skip_whitespace=True)\n for rule in rules:\n # Check for any @namespace declarations\n if rule.type == 'at-rule':\n if rule.lower_at_keyword == 'namespace':\n # The 1 supported format is:\n #\n # default (unsupported): [,\n # ]\n # prefixed: [,\n # ,\n # ,\n # ]\n if len(rule.prelude) == 4 and \\\n [tok.type for tok in rule.prelude] == \\\n ['whitespace', 'ident', 'whitespace', 'string']:\n\n # Prefixed namespace\n ns_prefix = rule.prelude[1].value\n ns_url = rule.prelude[3].value\n self.css_namespaces[ns_prefix] = ns_url\n else:\n # etree.XPath does not support the default namespace\n # and XPath is used to implement `sort-by:`\n # http://www.goodmami.org/2015/11/04/\n # ... python-xpath-and-default-namespaces.html\n log(WARN, u'Unknown @namespace format at {}:{}'\n .format(rule.source_line, rule.source_column)\n .encode('utf-8'))\n\n elif rule.type == 'qualified-rule':\n\n selectors = parse(rule.prelude, namespaces=self.css_namespaces,\n extensions=extensions)\n decls = [d for d in\n parse_declaration_list(rule.content,\n skip_whitespace=True)\n if d.type == 'declaration'] # Could also be a comment\n for sel in selectors:\n try:\n csel = CompiledSelector(sel)\n except cssselect2.SelectorError as error:\n log(WARN, u'Invalid selector: {} {}'.format(\n serialize(rule.prelude),\n to_str(error.args)).encode('utf-8'))\n else:\n steps, extras = extract_selector_info(sel)\n label = csel.pseudo_element\n if len(extras) > 0:\n if label is not None:\n extras.insert(0, label)\n label = '_'.join(extras)\n\n for step in steps:\n if step not in self.matchers:\n self.matchers[step] = cssselect2.Matcher()\n self.record_coverage_zero(rule,\n sel.source_line_offset)\n self.matchers[step].add_selector(\n csel,\n ((rule.source_line + sel.source_line_offset,\n serialize(rule.prelude).replace('\\n', ' ')),\n decls, label))\n elif rule.type == 'comment':\n pass\n elif rule.type == 'error':\n log(ERROR, u'Parse Error {}: {}'.format(\n rule.kind, rule.message).encode('utf-8'))\n # Phil does not know how to nicely exit with staus != 0\n raise ValueError(u'Parse Error {}: {}'.format(\n rule.kind, rule.message).encode('utf-8'))\n else:\n raise ValueError(u'BUG: Unknown ruletype={}'.format(rule.type))\n\n steps = sorted(self.matchers.keys())\n if len(steps) > 1:\n if 'default' in steps:\n steps.remove('default')\n try:\n steps.sort(key=int)\n steps.insert(0, '0')\n self.matchers['0'] = self.matchers.pop('default')\n except ValueError:\n steps.insert(0, 'default')\n else:\n try:\n steps.sort(key=int)\n except ValueError:\n pass # already sorted alpha\n\n self.clear_state()\n self.state['steps'] = steps\n log(DEBUG, 'Passes: {}'.format(to_str(steps)))"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\napply recipes to HTML tree. Will build recipes if needed.", "response": "def bake(self, element, last_step=None):\n \"\"\"Apply recipes to HTML tree. Will build recipes if needed.\"\"\"\n\n if last_step is not None:\n try:\n self.state['steps'] = [s for s in self.state['steps']\n if int(s) < int(last_step)]\n except ValueError:\n self.state['steps'] = [s for s in self.state['steps']\n if s < last_step]\n for step in self.state['steps']:\n self.state['current_step'] = step\n self.state['scope'].insert(0, step)\n # Need to wrap each loop, since tree may have changed\n wrapped_html_tree = ElementWrapper.from_html_root(element)\n\n if not self.state[step]['recipe']:\n recipe = self.build_recipe(wrapped_html_tree, step)\n else:\n recipe = self.state[step]\n\n log(DEBUG, u'Recipe {} length: {}'.format(\n step, len(recipe['actions'])).encode('utf-8'))\n target = None\n old_content = {}\n node_counts = {}\n for action, value in recipe['actions']:\n if action == 'target':\n target = value\n old_content = {}\n elif action == 'tag':\n target.tree.tag = value\n elif action == 'clear':\n old_content['text'] = target.tree.text\n target.tree.text = None\n old_content['children'] = []\n for child in target.tree:\n old_content['children'].append(child)\n target.tree.remove(child)\n elif action == 'content':\n if value is not None:\n append_string(target, value.text)\n for child in value:\n target.tree.append(child)\n elif old_content:\n append_string(target, old_content['text'])\n for child in old_content['children']:\n target.tree.append(child)\n elif action == 'attrib':\n attname, vals = value\n strval = u''.join([u'{}'.format(s) for s in vals])\n target.tree.set(attname, strval)\n elif action == 'string':\n strval = u''.join([u'{}'.format(s) for s in value])\n if target.location == 'before':\n prepend_string(target, strval)\n else:\n append_string(target, strval)\n elif action == 'move':\n grouped_insert(target, value)\n elif action == 'copy':\n mycopy = copy_w_id_suffix(value)\n mycopy.tail = None\n grouped_insert(target, mycopy)\n elif action == 'nodeset':\n node_counts[value] = node_counts.setdefault(value, 0) + 1\n suffix = u'_copy_{}'.format(node_counts[value])\n mycopy = copy_w_id_suffix(value, suffix)\n mycopy.tail = None\n grouped_insert(target, mycopy)\n else:\n log(WARN, u'Missing action {}'.format(\n action).encode('utf-8'))\n\n # Do numbering\n\n # Do label/link updates\n\n # Add an empty string to each element just to make sure the element\n # is closed. This is useful for browsers that parse the output\n # as HTML5 rather than as XHTML5.\n #\n # One use-case would be users that inject the content into an\n # existing HTML (not XHTML) document.\n walkAll = element.iter()\n for elt in walkAll:\n if elt.tag not in SELF_CLOSING_TAGS:\n if len(elt) == 0 and not elt.text:\n elt.text = ''"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef record_coverage_zero(self, rule, offset):\n self.coverage_lines.append('DA:{},0'.format(rule.source_line + offset))", "response": "Add entry to coverage_lines if this selector was parsed"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nadd entry to coverage saying this selector was matched", "response": "def record_coverage(self, rule):\n \"\"\"Add entry to coverage saying this selector was matched\"\"\"\n log(DEBUG, u'Rule ({}): {}'.format(*rule).encode('utf-8'))\n self.coverage_lines.append('DA:{},1'.format(rule[0]))"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef build_recipe(self, element, step, depth=0):\n element_id = element.etree_element.get('id')\n\n self.state['lang'] = element.lang\n\n matching_rules = {}\n # specificity, order, pseudo, payload = match\n # selector_rule, declaration_list, label = payload\n for _, _, pseudo, payload in self.matchers[step].match(element):\n rule, decs, label = payload\n matching_rules.setdefault(label, []).append((rule, decs))\n\n # Do non-pseudo\n if None in matching_rules:\n for rule, declarations in matching_rules.get(None):\n self.record_coverage(rule)\n self.push_target_elem(element)\n for decl in declarations:\n method = self.find_method(decl)\n method(element, decl, None)\n\n # Store all variables (strings and counters) before children\n if element_id:\n temp_counters = {}\n temp_strings = {}\n for s_step in self.state['scope']:\n temp_counters[s_step] = {\n 'counters': deepcopy(self.state[s_step]['counters'])}\n temp_strings[s_step] = {\n 'strings': deepcopy(self.state[s_step]['strings'])}\n\n self.state['counters'][element_id] = temp_counters\n self.state['strings'][element_id] = temp_strings\n\n # Do before\n if 'before' in matching_rules:\n for rule, declarations in matching_rules.get('before'):\n self.record_coverage(rule)\n # pseudo element, create wrapper\n self.push_pending_elem(element, 'before')\n for decl in declarations:\n method = self.find_method(decl)\n method(element, decl, 'before')\n # deal w/ pending_elements, per rule\n self.pop_pending_if_empty(element)\n\n # Recurse\n for el in element.iter_children():\n _state = self.build_recipe(el, step, depth=depth+1) # noqa\n\n # Do after\n if 'after' in matching_rules:\n for rule, declarations in matching_rules.get('after'):\n self.record_coverage(rule)\n # pseudo element, create wrapper\n self.push_pending_elem(element, 'after')\n for decl in declarations:\n method = self.find_method(decl)\n method(element, decl, 'after')\n # deal w/ pending_elements, per rule\n self.pop_pending_if_empty(element)\n\n # Do outside\n if 'outside' in matching_rules:\n for rule, declarations in matching_rules.get('outside'):\n self.record_coverage(rule)\n self.push_pending_elem(element, 'outside')\n for decl in declarations:\n method = self.find_method(decl)\n method(element, decl, 'outside')\n\n # Do inside\n if 'inside' in matching_rules:\n for rule, declarations in matching_rules.get('inside'):\n self.record_coverage(rule)\n self.push_pending_elem(element, 'inside')\n for decl in declarations:\n method = self.find_method(decl)\n method(element, decl, 'inside')\n\n # Do deferred\n if ('deferred' in matching_rules or\n 'after_deferred' in matching_rules or # NOQA\n 'before_deferred' in matching_rules or\n 'outside_deferred' in matching_rules or\n 'inside_deferred' in matching_rules):\n\n # store strings and counters, in case a deferred rule changes one\n if element_id:\n temp_counters = {}\n temp_strings = {}\n for s_step in self.state['scope']:\n temp_counters[s_step] = {\n 'counters': deepcopy(self.state[s_step]['counters'])}\n temp_strings[s_step] = {\n 'strings': deepcopy(self.state[s_step]['strings'])}\n\n # Do straight up deferred\n if 'deferred' in matching_rules:\n for rule, declarations in matching_rules.get('deferred'):\n self.record_coverage(rule)\n self.push_target_elem(element)\n for decl in declarations:\n method = self.find_method(decl)\n method(element, decl, None)\n\n # Do before_deferred\n if 'before_deferred' in matching_rules:\n for rule, declarations in \\\n matching_rules.get('before_deferred'):\n self.record_coverage(rule)\n # pseudo element, create wrapper\n self.push_pending_elem(element, 'before')\n for decl in declarations:\n method = self.find_method(decl)\n method(element, decl, 'before')\n # deal w/ pending_elements, per rule\n self.pop_pending_if_empty(element)\n\n # Do after_deferred\n if 'after_deferred' in matching_rules:\n for rule, declarations in matching_rules.get('after_deferred'):\n self.record_coverage(rule)\n # pseudo element, create wrapper\n self.push_pending_elem(element, 'after')\n for decl in declarations:\n method = self.find_method(decl)\n method(element, decl, 'after')\n # deal w/ pending_elements, per rule\n self.pop_pending_if_empty(element)\n\n # Do outside_deferred\n if 'outside_deferred' in matching_rules:\n for rule, declarations in \\\n matching_rules.get('outside_deferred'):\n self.record_coverage(rule)\n self.push_pending_elem(element, 'outside')\n for decl in declarations:\n method = self.find_method(decl)\n method(element, decl, 'outside')\n\n # Do inside_deferred\n if 'inside_deferred' in matching_rules:\n for rule, declarations in \\\n matching_rules.get('inside_deferred'):\n self.record_coverage(rule)\n self.push_pending_elem(element, 'inside')\n for decl in declarations:\n method = self.find_method(decl)\n method(element, decl, 'inside')\n\n # Did a deferred rule change a stored variable?\n if element_id:\n for s_step in self.state['scope']:\n for counter, val in self.state[s_step]['counters'].items():\n if (counter not in temp_counters or\n temp_counters[counter] != val):\n self.state['counters'][element_id][s_step]['counters'][counter] = val # NOQA\n for string, val in self.state[s_step]['strings'].items():\n if (string not in temp_strings or\n temp_strings[string] != val):\n self.state['strings'][element_id][s_step]['strings'][string] = val # NOQA\n\n if depth == 0:\n self.state[step]['recipe'] = True # FIXME should ref HTML tree\n return self.state[step]", "response": "Build a set of steps to collate an HTML doc."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef push_target_elem(self, element, pseudo=None):\n actions = self.state[self.state['current_step']]['actions']\n if len(actions) > 0 and actions[-1][0] == 'target':\n actions.pop()\n actions.append(('target', Target(element.etree_element,\n pseudo, element.parent.etree_element\n )))", "response": "Place target element onto action stack."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef push_pending_elem(self, element, pseudo):\n self.push_target_elem(element, pseudo)\n elem = etree.Element('div')\n actions = self.state[self.state['current_step']]['actions']\n actions.append(('move', elem))\n actions.append(('target', Target(elem)))", "response": "Create and place pending target element onto stack."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef pop_pending_if_empty(self, element):\n actions = self.state[self.state['current_step']]['actions']\n elem = self.current_target().tree\n if actions[-1][0] == ('target') and actions[-1][1].tree == elem:\n actions.pop()\n actions.pop()\n actions.pop()", "response": "Remove empty wrapper element."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nfind method to call for declaration based on name.", "response": "def find_method(self, decl):\n \"\"\"Find class method to call for declaration based on name.\"\"\"\n name = decl.name\n method = None\n try:\n method = getattr(self, u'do_{}'.format(\n (name).replace('-', '_')))\n except AttributeError:\n if name.startswith('data-'):\n method = getattr(self, 'do_data_any')\n elif name.startswith('attr-'):\n method = getattr(self, 'do_attr_any')\n else:\n log(WARN, u'Missing method {}'.format(\n (name).replace('-', '_')).encode('utf-8'))\n if method:\n self.record_coverage_line(decl.source_line)\n return method\n else:\n return lambda x, y, z: None"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns value of vname from the variable store vtype.", "response": "def lookup(self, vtype, vname, target_id=None):\n \"\"\"Return value of vname from the variable store vtype.\n\n Valid vtypes are `strings` 'counters', and `pending`. If the value\n is not found in the current steps store, earlier steps will be\n checked. If not found, '', 0, or (None, None) is returned.\n \"\"\"\n nullvals = {'strings': '', 'counters': 0, 'pending': (None, None)}\n nullval = nullvals[vtype]\n vstyle = None\n\n if vtype == 'counters':\n if len(vname) > 1:\n vname, vstyle = vname\n else:\n vname = vname[0]\n\n if target_id is not None:\n try:\n state = self.state[vtype][target_id]\n steps = self.state[vtype][target_id].keys()\n except KeyError:\n log(WARN, u'Bad ID target lookup {}'.format(\n target_id).encode('utf-8'))\n return nullval\n\n else:\n state = self.state\n steps = self.state['scope']\n\n for step in steps:\n if vname in state[step][vtype]:\n if vtype == 'pending':\n return(state[step][vtype][vname], step)\n else:\n val = state[step][vtype][vname]\n if vstyle is not None:\n return self.counter_style(val, vstyle)\n return val\n else:\n return nullval"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef counter_style(self, val, style):\n if style == 'decimal-leading-zero':\n if val < 10:\n valstr = \"0{}\".format(val)\n else:\n valstr = str(val)\n elif style == 'lower-roman':\n valstr = _to_roman(val).lower()\n elif style == 'upper-roman':\n valstr = _to_roman(val)\n elif style == 'lower-latin' or style == 'lower-alpha':\n if 1 <= val <= 26:\n valstr = chr(val + 96)\n else:\n log(WARN, 'Counter out of range for latin (must be 1...26)')\n valstr = str(val)\n elif style == 'upper-latin' or style == 'upper-alpha':\n if 1 <= val <= 26:\n valstr = chr(val + 64)\n else:\n log(WARN, 'Counter out of range for latin (must be 1...26)')\n valstr = str(val)\n elif style == 'decimal':\n valstr = str(val)\n else:\n log(WARN, u\"ERROR: Counter numbering not supported for\"\n u\" list type {}. Using decimal.\".format(\n style).encode('utf-8'))\n valstr = str(val)\n return valstr", "response": "Return counter value in given style."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef eval_string_value(self, element, value):\n strval = ''\n vals = []\n\n for term in value:\n if type(term) is ast.WhitespaceToken:\n pass\n\n elif type(term) is ast.StringToken:\n strval += term.value\n\n elif type(term) is ast.IdentToken:\n log(DEBUG, u\"IdentToken as string: {}\".format(\n term.value).encode('utf-8'))\n strval += term.value\n\n elif type(term) is ast.LiteralToken:\n log(DEBUG, u\"LiteralToken as string: {}\".format(\n term.value).encode('utf-8'))\n strval += term.value\n\n elif type(term) is ast.FunctionBlock:\n if term.name == 'string':\n str_args = split(term.arguments, ',')\n str_name = self.eval_string_value(element,\n str_args[0])[0]\n val = self.lookup('strings', str_name)\n if val == '':\n if len(str_args) > 1:\n val = self.eval_string_value(element,\n str_args[1])[0]\n else:\n log(WARN, u\"{} blank string\"\n .format(str_name).encode('utf-8'))\n strval += val\n\n elif term.name == u'attr':\n att_args = split(term.arguments, ',')\n att_name = self.eval_string_value(element,\n att_args[0])[0]\n att_def = ''\n if len(att_args) > 1:\n att_def = self.eval_string_value(element,\n att_args[1])[0]\n if '|' in att_name:\n ns, att = att_name.split('|')\n try:\n ns = self.css_namespaces[ns]\n except KeyError:\n log(WARN, u\"Undefined namespace prefix {}\"\n .format(ns).encode('utf-8'))\n continue\n att_name = etree.QName(ns, att)\n strval += element.etree_element.get(att_name, att_def)\n\n elif term.name == u'uuid':\n strval += self.generate_id()\n\n elif term.name == u'content':\n strval += etree.tostring(element.etree_element,\n encoding='unicode',\n method='text',\n with_tail=False)\n\n elif term.name.startswith('target-'):\n if strval:\n vals.append(strval)\n strval = ''\n target_args = split(term.arguments, ',')\n vref = self.eval_string_value(element,\n target_args[0])[0]\n vname = self.eval_string_value(element,\n target_args[1])[0]\n\n vtype = term.name[7:]+'s'\n vals.append(TargetVal(self, vref[1:], vname, vtype))\n\n elif term.name == u'first-letter':\n tmpstr = self.eval_string_value(element, term.arguments)\n if tmpstr:\n if isinstance(tmpstr[0], basestring):\n strval += tmpstr[0][0]\n else:\n log(WARN, u\"Bad string value:\"\n u\" nested target-* not allowed. \"\n u\"{}\".format(\n serialize(value)).encode(\n 'utf-8'))\n\n # FIXME can we do delayed first-letter\n\n elif term.name == 'counter':\n counterargs = [serialize(t).strip(\" \\'\")\n for t in split(term.arguments, ',')]\n count = self.lookup('counters', counterargs)\n strval += str(count)\n\n elif term.name == u'pending':\n log(WARN, u\"Bad string value: pending() not allowed. \"\n u\"{}\".format(serialize(value)).encode(\n 'utf-8'))\n else:\n log(WARN, u\"Bad string value: unknown function: {}. \"\n u\"{}\".format(term.name, serialize(value)).encode(\n 'utf-8'))\n\n if strval:\n vals.append(strval)\n return vals", "response": "Evaluate parsed string.\n\n Returns a list of current and delayed values."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef do_string_set(self, element, decl, pseudo):\n args = serialize(decl.value)\n step = self.state[self.state['current_step']]\n\n strval = ''\n strname = None\n for term in decl.value:\n if type(term) is ast.WhitespaceToken:\n continue\n\n elif type(term) is ast.StringToken:\n if strname is not None:\n strval += term.value\n else:\n log(WARN, u\"Bad string-set: {}\".format(\n args).encode('utf-8'))\n\n elif type(term) is ast.IdentToken:\n if strname is not None:\n log(WARN, u\"Bad string-set: {}\".format(\n args).encode('utf-8'))\n else:\n strname = term.value\n\n elif type(term) is ast.LiteralToken:\n if strname is None:\n log(WARN, u\"Bad string-set: {}\".format(\n args).encode('utf-8'))\n else:\n step['strings'][strname] = strval\n strval = ''\n strname = None\n\n elif type(term) is ast.FunctionBlock:\n if term.name == 'string':\n str_args = split(term.arguments, ',')\n str_name = self.eval_string_value(element,\n str_args[0])[0]\n val = self.lookup('strings', str_name)\n if val == '':\n if len(str_args) > 1:\n val = self.eval_string_value(element,\n str_args[1])[0]\n else:\n log(WARN, u\"{} blank string\"\n .format(str_name).encode('utf-8'))\n\n if strname is not None:\n strval += val\n else:\n log(WARN, u\"Bad string-set: {}\".format(\n args).encode('utf-8'))\n\n elif term.name == 'counter':\n counterargs = [serialize(t).strip(\" \\'\")\n for t in split(term.arguments, ',')]\n count = self.lookup('counters', counterargs)\n strval += str(count)\n\n elif term.name == u'attr':\n if strname is not None:\n att_args = split(term.arguments, ',')\n att_name = self.eval_string_value(element,\n att_args[0])[0]\n att_def = ''\n if len(att_args) > 1:\n att_def = self.eval_string_value(element,\n att_args[1])[0]\n if '|' in att_name:\n ns, att = att_name.split('|')\n try:\n ns = self.css_namespaces[ns]\n except KeyError:\n log(WARN, u\"Undefined namespace prefix {}\"\n .format(ns).encode('utf-8'))\n continue\n att_name = etree.QName(ns, att)\n strval += element.etree_element.get(att_name, att_def)\n else:\n log(WARN, u\"Bad string-set: {}\".format(\n args).encode('utf-8'))\n\n elif term.name == u'content':\n if strname is not None:\n strval += etree.tostring(element.etree_element,\n encoding='unicode',\n method='text',\n with_tail=False)\n else:\n log(WARN, u\"Bad string-set: {}\".format(\n args).encode('utf-8'))\n\n elif term.name == u'first-letter':\n tmpstr = self.eval_string_value(element, term.arguments)\n if tmpstr:\n if isinstance(tmpstr[0], basestring):\n strval += tmpstr[0][0]\n else:\n log(WARN, u\"Bad string value:\"\n u\" nested target-* not allowed. \"\n u\"{}\".format(serialize(\n args)).encode('utf-8'))\n\n elif term.name == u'pending':\n log(WARN, u\"Bad string-set:pending() not allowed. {}\"\n .format(args).encode('utf-8'))\n\n if strname is not None:\n step['strings'][strname] = strval", "response": "Implement string - set declaration."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef do_node_set(self, element, decl, pseudo):\n target = serialize(decl.value).strip()\n step = self.state[self.state['current_step']]\n elem = self.current_target().tree\n _, valstep = self.lookup('pending', target)\n if not valstep:\n step['pending'][target] = [('nodeset', elem)]\n else:\n self.state[valstep]['pending'][target] = [('nodeset', elem)]", "response": "Implement node - set declaration."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nimplementing move - to declaration.", "response": "def do_move_to(self, element, decl, pseudo):\n \"\"\"Implement move-to declaration.\"\"\"\n target = serialize(decl.value).strip()\n step = self.state[self.state['current_step']]\n elem = self.current_target().tree\n\n # Find if the current node already has a move, and remove it.\n actions = step['actions']\n for pos, action in enumerate(reversed(actions)):\n if action[0] == 'move' and action[1] == elem:\n target_index = - pos - 1\n actions[target_index:] = actions[target_index+1:]\n break\n\n _, valstep = self.lookup('pending', target)\n if not valstep:\n step['pending'][target] = [('move', elem)]\n else:\n self.state[valstep]['pending'][target].append(('move', elem))"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef do_container(self, element, decl, pseudo):\n value = serialize(decl.value).strip()\n\n if '|' in value:\n namespace, tag = value.split('|', 1)\n\n try:\n namespace = self.css_namespaces[namespace]\n except KeyError:\n log(WARN, u'undefined namespace prefix: {}'.format(\n namespace).encode('utf-8'))\n value = tag\n else:\n value = etree.QName(namespace, tag)\n\n step = self.state[self.state['current_step']]\n actions = step['actions']\n actions.append(('tag', value))", "response": "Implement setting tag for new wrapper element."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nimplementing class declaration - pre - match.", "response": "def do_class(self, element, decl, pseudo):\n \"\"\"Implement class declaration - pre-match.\"\"\"\n step = self.state[self.state['current_step']]\n actions = step['actions']\n strval = self.eval_string_value(element, decl.value)\n actions.append(('attrib', ('class', strval)))"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nimplement generic attribute setting.", "response": "def do_attr_any(self, element, decl, pseudo):\n \"\"\"Implement generic attribute setting.\"\"\"\n step = self.state[self.state['current_step']]\n actions = step['actions']\n strval = self.eval_string_value(element, decl.value)\n actions.append(('attrib', (decl.name[5:], strval)))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nimplementing group - by declaration - pre - match.", "response": "def do_group_by(self, element, decl, pseudo):\n \"\"\"Implement group-by declaration - pre-match.\"\"\"\n sort_css = groupby_css = flags = ''\n if ',' in decl.value:\n if decl.value.count(',') == 2:\n sort_css, groupby_css, flags = \\\n map(serialize, split(decl.value, ','))\n else:\n sort_css, groupby_css = map(serialize, split(decl.value, ','))\n else:\n sort_css = serialize(decl.value)\n if groupby_css.strip() == 'nocase':\n flags = groupby_css\n groupby_css = ''\n sort = css_to_func(sort_css, flags,\n self.css_namespaces, self.state['lang'])\n groupby = css_to_func(groupby_css, flags,\n self.css_namespaces, self.state['lang'])\n step = self.state[self.state['current_step']]\n\n target = self.current_target()\n target.sort = sort\n target.lang = self.state['lang']\n target.isgroup = True\n target.groupby = groupby\n # Find current target, set its sort/grouping as well\n for pos, action in \\\n enumerate(reversed(step['actions'])):\n if action[0] == 'target' and \\\n action[1].tree == element.etree_element:\n action[1].sort = sort\n action[1].isgroup = True\n action[1].groupby = groupby\n break"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef do_sort_by(self, element, decl, pseudo):\n if ',' in decl.value:\n css, flags = split(decl.value, ',')\n else:\n css = decl.value\n flags = None\n sort = css_to_func(serialize(css), serialize(flags or ''),\n self.css_namespaces, self.state['lang'])\n step = self.state[self.state['current_step']]\n\n target = self.current_target()\n target.sort = sort\n target.lang = self.state['lang']\n target.isgroup = False\n target.groupby = None\n # Find current target, set its sort as well\n for pos, action in \\\n enumerate(reversed(step['actions'])):\n if action[0] == 'target' and \\\n action[1].tree == element.etree_element:\n action[1].sort = sort\n action[1].isgroup = False\n action[1].groupby = None\n break", "response": "Implement sort - by declaration - pre - match."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn an authenticated SSH connection.", "response": "def _get_connection(host, username: str, key_path: str) \\\n -> paramiko.Transport or None:\n \"\"\"Return an authenticated SSH connection.\n\n :param host: host or IP of the machine\n :type host: str\n :param username: SSH username\n :type username: str\n :param key_path: path to the SSH private key for SSH auth\n :type key_path: str\n :return: SSH connection\n :rtype: paramiko.Transport or None\n \"\"\"\n if not username:\n username = shakedown.cli.ssh_user\n if not key_path:\n key_path = shakedown.cli.ssh_key_file\n key = validate_key(key_path)\n transport = get_transport(host, username, key)\n\n if transport:\n transport = start_transport(transport, username, key)\n if transport.is_authenticated():\n return transport\n else:\n print(\"error: unable to authenticate {}@{} with key {}\".format(username, host, key_path))\n else:\n print(\"error: unable to connect to {}\".format(host))\n\n return None"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nrunning a command on the master and returns the output of the command", "response": "def run_command(\n host,\n command,\n username=None,\n key_path=None,\n noisy=True\n):\n \"\"\" Run a command via SSH, proxied through the mesos master\n\n :param host: host or IP of the machine to execute the command on\n :type host: str\n :param command: the command to execute\n :type command: str\n :param username: SSH username\n :type username: str\n :param key_path: path to the SSH private key to use for SSH authentication\n :type key_path: str\n :return: True if successful, False otherwise\n :rtype: bool\n :return: Output of command\n :rtype: string\n \"\"\"\n \n with HostSession(host, username, key_path, noisy) as s:\n if noisy:\n print(\"\\n{}{} $ {}\\n\".format(shakedown.fchr('>>'), host, command))\n s.run(command)\n \n ec, output = s.get_result()\n return ec == 0, output"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nruns a command on the master node", "response": "def run_command_on_master(\n command,\n username=None,\n key_path=None,\n noisy=True\n):\n \"\"\" Run a command on the Mesos master\n \"\"\"\n\n return run_command(shakedown.master_ip(), command, username, key_path, noisy)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef run_command_on_leader(\n command,\n username=None,\n key_path=None,\n noisy=True\n):\n \"\"\" Run a command on the Mesos leader. Important for Multi-Master.\n \"\"\"\n\n return run_command(shakedown.master_leader_ip(), command, username, key_path, noisy)", "response": "Run a command on the Mesos leader."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nrunning a command on the Marathon leader", "response": "def run_command_on_marathon_leader(\n command,\n username=None,\n key_path=None,\n noisy=True\n):\n \"\"\" Run a command on the Marathon leader\n \"\"\"\n\n return run_command(shakedown.marathon_leader_ip(), command, username, key_path, noisy)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nrun a command on a Mesos agent", "response": "def run_command_on_agent(\n host,\n command,\n username=None,\n key_path=None,\n noisy=True\n):\n \"\"\" Run a command on a Mesos agent, proxied through the master\n \"\"\"\n\n return run_command(host, command, username, key_path, noisy)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nexecute a command via DC - OS and return the stdout and stderr of the command.", "response": "def run_dcos_command(command, raise_on_error=False, print_output=True):\n \"\"\" Run `dcos {command}` via DC/OS CLI\n\n :param command: the command to execute\n :type command: str\n :param raise_on_error: whether to raise a DCOSException if the return code is nonzero\n :type raise_on_error: bool\n :param print_output: whether to print the resulting stdout/stderr from running the command\n :type print_output: bool\n\n :return: (stdout, stderr, return_code)\n :rtype: tuple\n \"\"\"\n\n call = shlex.split(command)\n call.insert(0, 'dcos')\n\n print(\"\\n{}{}\\n\".format(shakedown.fchr('>>'), ' '.join(call)))\n\n proc = subprocess.Popen(call, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n output, error = proc.communicate()\n return_code = proc.wait()\n stdout = output.decode('utf-8')\n stderr = error.decode('utf-8')\n\n if print_output:\n print(stdout, stderr, return_code)\n\n if return_code != 0 and raise_on_error:\n raise DCOSException(\n 'Got error code {} when running command \"dcos {}\":\\nstdout: \"{}\"\\nstderr: \"{}\"'.format(\n return_code, command, stdout, stderr))\n\n return stdout, stderr, return_code"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _wait_for_recv(self):\n while True:\n time.sleep(0.2)\n if self.session.recv_ready() or self.session.closed:\n return", "response": "Wait for the result of a command execution."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nattach to an already set - up cluster", "response": "def attach_cluster(url):\n \"\"\"Attach to an already set-up cluster\n :return: True if successful, else False\n \"\"\"\n with shakedown.stdchannel_redirected(sys.stderr, os.devnull):\n clusters = [c.dict() for c in dcos.cluster.get_clusters()]\n for c in clusters:\n if url == c['url']:\n try:\n dcos.cluster.set_attached(dcos.cluster.get_cluster(c['name']).get_cluster_path())\n return True\n except:\n return False\n\n return False"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns the version of the running cluster.", "response": "def dcos_version():\n \"\"\"Return the version of the running cluster.\n :return: DC/OS cluster version as a string\n \"\"\"\n url = _gen_url('dcos-metadata/dcos-version.json')\n response = dcos.http.request('get', url)\n\n if response.status_code == 200:\n return response.json()['version']\n else:\n return None"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nauthenticate with a DC / OS cluster and return an ACS token.", "response": "def authenticate(username, password):\n \"\"\"Authenticate with a DC/OS cluster and return an ACS token.\n return: ACS token\n \"\"\"\n url = _gen_url('acs/api/v1/auth/login')\n\n creds = {\n 'uid': username,\n 'password': password\n }\n\n response = dcos.http.request('post', url, json=creds)\n\n if response.status_code == 200:\n return response.json()['token']\n else:\n return None"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef authenticate_oauth(oauth_token):\n url = _gen_url('acs/api/v1/auth/login')\n\n creds = {\n 'token': oauth_token\n }\n\n response = dcos.http.request('post', url, json=creds)\n\n if response.status_code == 200:\n return response.json()['token']\n else:\n return None", "response": "Authenticate by checking for a valid OAuth token."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ngenerate an absolute URL by combining DC OS URL and url_path.", "response": "def _gen_url(url_path):\n \"\"\"Return an absolute URL by combining DC/OS URL and url_path.\n\n :param url_path: path to append to DC/OS URL\n :type url_path: str\n :return: absolute URL\n :rtype: str\n \"\"\"\n from six.moves import urllib\n return urllib.parse.urljoin(dcos_url(), url_path)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\npartition master's port alone. To keep DC/OS cluster running. :param incoming: Partition incoming traffic to master process. Default True. :param outgoing: Partition outgoing traffic from master process. Default True.", "response": "def partition_master(incoming=True, outgoing=True):\n \"\"\" Partition master's port alone. To keep DC/OS cluster running.\n\n :param incoming: Partition incoming traffic to master process. Default True.\n :param outgoing: Partition outgoing traffic from master process. Default True.\n \"\"\"\n\n echo('Partitioning master. Incoming:{} | Outgoing:{}'.format(incoming, outgoing))\n\n network.save_iptables(shakedown.master_ip())\n network.flush_all_rules(shakedown.master_ip())\n network.allow_all_traffic(shakedown.master_ip())\n\n if incoming and outgoing:\n network.run_iptables(shakedown.master_ip(), DISABLE_MASTER_INCOMING)\n network.run_iptables(shakedown.master_ip(), DISABLE_MASTER_OUTGOING)\n elif incoming:\n network.run_iptables(shakedown.master_ip(), DISABLE_MASTER_INCOMING)\n elif outgoing:\n network.run_iptables(shakedown.master_ip(), DISABLE_MASTER_OUTGOING)\n else:\n pass"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_all_masters():\n masters = []\n for master in __master_zk_nodes_keys():\n master_zk_str = get_zk_node_data(master)['str']\n masters.append(json.loads(master_zk_str))\n\n return masters", "response": "Returns the json object that represents each of the masters."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_public_agents_public_ip():\n public_ip_list = []\n agents = get_public_agents()\n for agent in agents:\n status, public_ip = shakedown.run_command_on_agent(agent, \"/opt/mesosphere/bin/detect_ip_public\")\n public_ip_list.append(public_ip)\n\n return public_ip_list", "response": "Provides a list public IPs for public agents in the cluster"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nprovides a list of hostnames / private IPs that are public agents in the cluster", "response": "def get_public_agents():\n \"\"\"Provides a list of hostnames / private IPs that are public agents in the cluster\"\"\"\n agent_list = []\n agents = __get_all_agents()\n for agent in agents:\n for reservation in agent[\"reserved_resources\"]:\n if \"slave_public\" in reservation:\n agent_list.append(agent[\"hostname\"])\n\n return agent_list"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nprovide a list of hostnames / IPs that are private agents in the cluster", "response": "def get_private_agents():\n \"\"\"Provides a list of hostnames / IPs that are private agents in the cluster\"\"\"\n\n agent_list = []\n agents = __get_all_agents()\n for agent in agents:\n if(len(agent[\"reserved_resources\"]) == 0):\n agent_list.append(agent[\"hostname\"])\n else:\n private = True\n for reservation in agent[\"reserved_resources\"]:\n if(\"slave_public\" in reservation):\n private = False\n\n if(private):\n agent_list.append(agent[\"hostname\"])\n\n return agent_list"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_agents():\n\n agent_list = []\n agents = __get_all_agents()\n for agent in agents:\n agent_list.append(agent[\"hostname\"])\n\n return agent_list", "response": "Provides a list of hostnames / IPs of all agents in the cluster"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\npartition agent from all network traffic except for SSH and loopback.", "response": "def partition_agent(host):\n \"\"\" Partition a node from all network traffic except for SSH and loopback\n\n :param hostname: host or IP of the machine to partition from the cluster\n \"\"\"\n\n network.save_iptables(host)\n network.flush_all_rules(host)\n network.allow_all_traffic(host)\n network.run_iptables(host, ALLOW_SSH)\n network.run_iptables(host, ALLOW_PING)\n network.run_iptables(host, DISALLOW_MESOS)\n network.run_iptables(host, DISALLOW_INPUT)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nkills the process matching pattern at ip", "response": "def kill_process_on_host(\n hostname,\n pattern\n):\n \"\"\" Kill the process matching pattern at ip\n\n :param hostname: the hostname or ip address of the host on which the process will be killed\n :param pattern: a regular expression matching the name of the process to kill\n \"\"\"\n\n status, stdout = run_command_on_agent(hostname, \"ps aux | grep -v grep | grep '{}'\".format(pattern))\n pids = [p.strip().split()[1] for p in stdout.splitlines()]\n\n for pid in pids:\n status, stdout = run_command_on_agent(hostname, \"sudo kill -9 {}\".format(pid))\n if status:\n print(\"Killed pid: {}\".format(pid))\n else:\n print(\"Unable to killed pid: {}\".format(pid))"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nkill a process from a pid file on the host.", "response": "def kill_process_from_pid_file_on_host(hostname, pid_file='app.pid'):\n \"\"\" Retrieves the PID of a process from a pid file on host and kills it.\n\n :param hostname: the hostname or ip address of the host on which the process will be killed\n :param pid_file: pid file to use holding the pid number to kill\n \"\"\"\n status, pid = run_command_on_agent(hostname, 'cat {}'.format(pid_file))\n status, stdout = run_command_on_agent(hostname, \"sudo kill -9 {}\".format(pid))\n if status:\n print(\"Killed pid: {}\".format(pid))\n run_command_on_agent(hostname, 'rm {}'.format(pid_file))\n else:\n print(\"Unable to killed pid: {}\".format(pid))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nwaiting or spins for a predicate returning the result.", "response": "def wait_for(\n predicate,\n timeout_seconds=120,\n sleep_seconds=1,\n ignore_exceptions=True,\n inverse_predicate=False,\n noisy=False,\n required_consecutive_success_count=1):\n \"\"\" waits or spins for a predicate, returning the result.\n Predicate is a function that returns a truthy or falsy value.\n An exception in the function will be returned.\n A timeout will throw a TimeoutExpired Exception.\n\n \"\"\"\n count = 0\n start_time = time_module.time()\n timeout = Deadline.create_deadline(timeout_seconds)\n while True:\n try:\n result = predicate()\n except Exception as e:\n if ignore_exceptions:\n if noisy:\n logger.exception(\"Ignoring error during wait.\")\n else:\n count = 0\n raise # preserve original stack\n else:\n if (not inverse_predicate and result) or (inverse_predicate and not result):\n count = count + 1\n if count >= required_consecutive_success_count:\n return result\n\n if timeout.is_expired():\n funname = __stringify_predicate(predicate)\n raise TimeoutExpired(timeout_seconds, funname)\n if noisy:\n header = '{}[{}/{}]'.format(\n shakedown.cli.helpers.fchr('>>'),\n pretty_duration(time_module.time() - start_time),\n pretty_duration(timeout_seconds)\n )\n if required_consecutive_success_count > 1:\n header = '{} [{} of {} times]'.format(\n header,\n count,\n required_consecutive_success_count)\n print('{} spinning...'.format(header))\n time_module.sleep(sleep_seconds)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef __stringify_predicate(predicate):\n funname = getsource(predicate).strip().split(' ')[2].rstrip(',')\n params = 'None'\n\n # if args dig in the stack\n if '()' not in funname:\n stack = getouterframes(currentframe())\n for frame in range(0, len(stack)):\n if funname in str(stack[frame]):\n _, _, _, params = getargvalues(stack[frame][0])\n\n return \"function: {} params: {}\".format(funname, params)", "response": "Return a string representation of the predicate."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef time_wait(\n predicate,\n timeout_seconds=120,\n sleep_seconds=1,\n ignore_exceptions=True,\n inverse_predicate=False,\n noisy=True,\n required_consecutive_success_count=1):\n \"\"\" waits or spins for a predicate and returns the time of the wait.\n An exception in the function will be returned.\n A timeout will throw a TimeoutExpired Exception.\n\n \"\"\"\n start = time_module.time()\n wait_for(predicate, timeout_seconds, sleep_seconds, ignore_exceptions, inverse_predicate, noisy, required_consecutive_success_count)\n return elapse_time(start)", "response": "Time a predicate and returns the time of the wait."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nwait for a predicate ignoring exceptions and returns the result.", "response": "def wait_while_exceptions(\n predicate,\n timeout_seconds=120,\n sleep_seconds=1,\n noisy=False):\n \"\"\" waits for a predicate, ignoring exceptions, returning the result.\n Predicate is a function.\n Exceptions will trigger the sleep and retry; any non-exception result\n will be returned.\n A timeout will throw a TimeoutExpired Exception.\n \"\"\"\n start_time = time_module.time()\n timeout = Deadline.create_deadline(timeout_seconds)\n while True:\n try:\n result = predicate()\n return result\n except Exception as e:\n if noisy:\n logger.exception(\"Ignoring error during wait.\")\n\n if timeout.is_expired():\n funname = __stringify_predicate(predicate)\n raise TimeoutExpired(timeout_seconds, funname)\n if noisy:\n header = '{}[{}/{}]'.format(\n shakedown.cli.helpers.fchr('>>'),\n pretty_duration(time_module.time() - start_time),\n pretty_duration(timeout_seconds)\n )\n print('{} spinning...'.format(header))\n time_module.sleep(sleep_seconds)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a user - friendly representation of the provided duration in seconds.", "response": "def pretty_duration(seconds):\n \"\"\" Returns a user-friendly representation of the provided duration in seconds.\n For example: 62.8 => \"1m2.8s\", or 129837.8 => \"2d12h4m57.8s\"\n \"\"\"\n if seconds is None:\n return ''\n ret = ''\n if seconds >= 86400:\n ret += '{:.0f}d'.format(int(seconds / 86400))\n seconds = seconds % 86400\n if seconds >= 3600:\n ret += '{:.0f}h'.format(int(seconds / 3600))\n seconds = seconds % 3600\n if seconds >= 60:\n ret += '{:.0f}m'.format(int(seconds / 60))\n seconds = seconds % 60\n if seconds > 0:\n ret += '{:.1f}s'.format(seconds)\n return ret"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef read_config(args):\n\n configfile = os.path.expanduser('~/.shakedown')\n\n if os.path.isfile(configfile):\n with open(configfile, 'r') as f:\n config = toml.loads(f.read())\n\n for key in config:\n param = key.replace('-', '_')\n\n if not param in args or args[param] in [False, None]:\n args[param] = config[key]\n\n return args", "response": "Read configuration options from ~/.shakedown."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef set_config_defaults(args):\n\n defaults = {\n 'fail': 'fast',\n 'stdout': 'fail'\n }\n\n for key in defaults:\n if not args[key]:\n args[key] = defaults[key]\n\n return args", "response": "Set configuration defaults for the available language."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ndisplay a delightful Mesosphere logo", "response": "def banner():\n \"\"\" Display a product banner\n\n :return: a delightful Mesosphere logo rendered in unicode\n :rtype: str\n \"\"\"\n\n banner_dict = {\n 'a0': click.style(chr(9601), fg='magenta'),\n 'a1': click.style(chr(9601), fg='magenta', bold=True),\n 'b0': click.style(chr(9616), fg='magenta'),\n 'c0': click.style(chr(9626), fg='magenta'),\n 'c1': click.style(chr(9626), fg='magenta', bold=True),\n 'd0': click.style(chr(9622), fg='magenta'),\n 'd1': click.style(chr(9622), fg='magenta', bold=True),\n 'e0': click.style(chr(9623), fg='magenta'),\n 'e1': click.style(chr(9623), fg='magenta', bold=True),\n 'f0': click.style(chr(9630), fg='magenta'),\n 'f1': click.style(chr(9630), fg='magenta', bold=True),\n 'g1': click.style(chr(9612), fg='magenta', bold=True),\n 'h0': click.style(chr(9624), fg='magenta'),\n 'h1': click.style(chr(9624), fg='magenta', bold=True),\n 'i0': click.style(chr(9629), fg='magenta'),\n 'i1': click.style(chr(9629), fg='magenta', bold=True),\n 'j0': click.style(fchr('>>'), fg='magenta'),\n 'k0': click.style(chr(9473), fg='magenta'),\n 'l0': click.style('_', fg='magenta'),\n 'l1': click.style('_', fg='magenta', bold=True),\n 'v0': click.style('mesosphere', fg='magenta'),\n 'x1': click.style('shakedown', fg='magenta', bold=True),\n 'y0': click.style('v' + shakedown.VERSION, fg='magenta'),\n 'z0': chr(32)\n }\n\n banner_map = [\n \" %(z0)s%(z0)s%(l0)s%(l0)s%(l1)s%(l0)s%(l1)s%(l1)s%(l1)s%(l1)s%(l1)s%(l1)s%(l1)s%(l1)s\",\n \" %(z0)s%(b0)s%(z0)s%(c0)s%(z0)s%(d0)s%(z0)s%(z0)s%(z0)s%(z0)s%(e1)s%(z0)s%(f1)s%(z0)s%(g1)s\",\n \" %(z0)s%(b0)s%(z0)s%(z0)s%(c0)s%(z0)s%(h0)s%(e0)s%(d1)s%(i1)s%(z0)s%(f1)s%(z0)s%(z0)s%(g1)s%(z0)s%(j0)s%(v0)s %(x1)s %(y0)s\",\n \" %(z0)s%(b0)s%(z0)s%(z0)s%(f0)s%(c0)s%(i0)s%(z0)s%(z0)s%(h1)s%(f1)s%(c1)s%(z0)s%(z0)s%(g1)s%(z0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(k0)s%(z0)s%(k0)s%(k0)s%(z0)s%(z0)s%(k0)s\",\n \" %(z0)s%(i0)s%(f0)s%(h0)s%(z0)s%(z0)s%(c0)s%(z0)s%(z0)s%(f0)s%(z0)s%(z0)s%(i1)s%(c1)s%(h1)s\",\n \" %(z0)s%(z0)s%(z0)s%(z0)s%(z0)s%(z0)s%(z0)s%(c0)s%(f0)s\",\n ]\n\n if 'TERM' in os.environ and os.environ['TERM'] in ('velocity', 'xterm', 'xterm-256color', 'xterm-color'):\n return echo(\"\\n\".join(banner_map) % banner_dict)\n else:\n return echo(fchr('>>') + 'mesosphere shakedown v' + shakedown.VERSION, b=True)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef decorate(text, style):\n\n return {\n 'step-maj': click.style(\"\\n\" + '> ' + text, fg='yellow', bold=True),\n 'step-min': click.style(' - ' + text + ' ', bold=True),\n 'item-maj': click.style(' - ' + text + ' '),\n 'item-min': click.style(' - ' + text + ' '),\n 'quote-head-fail': click.style(\"\\n\" + chr(9485) + (chr(9480)*2) + ' ' + text, fg='red'),\n 'quote-head-pass': click.style(\"\\n\" + chr(9485) + (chr(9480)*2) + ' ' + text, fg='green'),\n 'quote-head-skip': click.style(\"\\n\" + chr(9485) + (chr(9480)*2) + ' ' + text, fg='yellow'),\n 'quote-fail': re.sub('^', click.style(chr(9482) + ' ', fg='red'), text, flags=re.M),\n 'quote-pass': re.sub('^', click.style(chr(9482) + ' ', fg='green'), text, flags=re.M),\n 'quote-skip': re.sub('^', click.style(chr(9482) + ' ', fg='yellow'), text, flags=re.M),\n 'fail': click.style(text + ' ', fg='red'),\n 'pass': click.style(text + ' ', fg='green'),\n 'skip': click.style(text + ' ', fg='yellow')\n }.get(style, '')", "response": "Console decoration style definitions for the base class"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nprints the results to the console", "response": "def echo(text, **kwargs):\n \"\"\" Print results to the console\n\n :param text: the text string to print\n :type text: str\n\n :return: a string\n :rtype: str\n \"\"\"\n\n if shakedown.cli.quiet:\n return\n\n if not 'n' in kwargs:\n kwargs['n'] = True\n\n if 'd' in kwargs:\n text = decorate(text, kwargs['d'])\n\n if 'TERM' in os.environ and os.environ['TERM'] == 'velocity':\n if text:\n print(text, end=\"\", flush=True)\n if kwargs.get('n'):\n print()\n else:\n click.echo(text, nl=kwargs.get('n'))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef add_user(uid, password, desc=None):\n try:\n desc = uid if desc is None else desc\n user_object = {\"description\": desc, \"password\": password}\n acl_url = urljoin(_acl_url(), 'users/{}'.format(uid))\n r = http.put(acl_url, json=user_object)\n assert r.status_code == 201\n except DCOSHTTPException as e:\n # already exists\n if e.response.status_code != 409:\n raise", "response": "Adds a user to the DCOS Enterprise."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_user(uid):\n try:\n acl_url = urljoin(_acl_url(), 'users/{}'.format(uid))\n r = http.get(acl_url)\n return r.json()\n # assert r.status_code == 201\n except DCOSHTTPException as e:\n if e.response.status_code == 400:\n return None\n else:\n raise", "response": "Returns a user from the DCOS Enterprise."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef remove_user(uid):\n try:\n acl_url = urljoin(_acl_url(), 'users/{}'.format(uid))\n r = http.delete(acl_url)\n assert r.status_code == 204\n except DCOSHTTPException as e:\n # doesn't exist\n if e.response.status_code != 400:\n raise", "response": "Removes a user from the DCOS Enterprise."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef ensure_resource(rid):\n try:\n acl_url = urljoin(_acl_url(), 'acls/{}'.format(rid))\n r = http.put(acl_url, json={'description': 'jope'})\n assert r.status_code == 201\n except DCOSHTTPException as e:\n if e.response.status_code != 409:\n raise", "response": "Ensures that a resource is added into the DCOS Enterprise System."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nset the user permission on a resource.", "response": "def set_user_permission(rid, uid, action='full'):\n \"\"\" Sets users permission on a given resource. The resource will be created\n if it doesn't exist. Actions are: read, write, update, delete, full.\n\n :param uid: user id\n :type uid: str\n :param rid: resource ID\n :type rid: str\n :param action: read, write, update, delete or full\n :type action: str\n \"\"\"\n rid = rid.replace('/', '%252F')\n # Create ACL if it does not yet exist.\n ensure_resource(rid)\n\n # Set the permission triplet.\n try:\n acl_url = urljoin(_acl_url(), 'acls/{}/users/{}/{}'.format(rid, uid, action))\n r = http.put(acl_url)\n assert r.status_code == 204\n except DCOSHTTPException as e:\n if e.response.status_code != 409:\n raise"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef remove_user_permission(rid, uid, action='full'):\n rid = rid.replace('/', '%252F')\n\n try:\n acl_url = urljoin(_acl_url(), 'acls/{}/users/{}/{}'.format(rid, uid, action))\n r = http.delete(acl_url)\n assert r.status_code == 204\n except DCOSHTTPException as e:\n if e.response.status_code != 400:\n raise", "response": "Removes a user permission on a resource."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nprovides a context with no logged in user.", "response": "def no_user():\n \"\"\" Provides a context with no logged in user.\n \"\"\"\n o_token = dcos_acs_token()\n dcos.config.set_val('core.dcos_acs_token', '')\n yield\n dcos.config.set_val('core.dcos_acs_token', o_token)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nprovide a context with a newly created user.", "response": "def new_dcos_user(user_id, password):\n \"\"\" Provides a context with a newly created user.\n \"\"\"\n o_token = dcos_acs_token()\n shakedown.add_user(user_id, password, user_id)\n\n token = shakedown.authenticate(user_id, password)\n dcos.config.set_val('core.dcos_acs_token', token)\n yield\n dcos.config.set_val('core.dcos_acs_token', o_token)\n shakedown.remove_user(user_id)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nprovides a context with user other than super", "response": "def dcos_user(user_id, password):\n \"\"\" Provides a context with user otherthan super\n \"\"\"\n\n o_token = dcos_acs_token()\n\n token = shakedown.authenticate(user_id, password)\n dcos.config.set_val('core.dcos_acs_token', token)\n yield\n dcos.config.set_val('core.dcos_acs_token', o_token)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nadd a group to the Enterprise.", "response": "def add_group(id, description=None):\n \"\"\" Adds group to the DCOS Enterprise. If not description\n is provided the id will be used for the description.\n\n :param id: group id\n :type id: str\n :param desc: description of user\n :type desc: str\n \"\"\"\n\n if not description:\n description = id\n data = {\n 'description': description\n }\n acl_url = urljoin(_acl_url(), 'groups/{}'.format(id))\n try:\n r = http.put(acl_url, json=data)\n assert r.status_code == 201\n except DCOSHTTPException as e:\n if e.response.status_code != 409:\n raise"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning a group from the DCOS Enterprise.", "response": "def get_group(id):\n \"\"\" Returns a group from the DCOS Enterprise. It returns None if none exists.\n\n :param id: group id\n :type id: str\n :return: Group\n :rtype: dict\n \"\"\"\n acl_url = urljoin(_acl_url(), 'groups/{}'.format(id))\n try:\n r = http.get(acl_url)\n return r.json()\n except DCOSHTTPException as e:\n if e.response.status_code != 400:\n raise"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nremoves a group from the DCOS Enterprise.", "response": "def remove_group(id):\n \"\"\" Removes a group from the DCOS Enterprise. The group is\n removed regardless of associated users.\n\n :param id: group id\n :type id: str\n \"\"\"\n acl_url = urljoin(_acl_url(), 'groups/{}'.format(id))\n try:\n r = http.delete(acl_url)\n print(r.status_code)\n except DCOSHTTPException as e:\n if e.response.status_code != 400:\n raise"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nadding a user to a group.", "response": "def add_user_to_group(uid, gid, exist_ok=True):\n \"\"\" Adds a user to a group within DCOS Enterprise. The group and\n user must exist.\n\n :param uid: user id\n :type uid: str\n :param gid: group id\n :type gid: str\n :param exist_ok: True if it is ok for the relationship to pre-exist.\n :type exist_ok: bool\n \"\"\"\n acl_url = urljoin(_acl_url(), 'groups/{}/users/{}'.format(gid, uid))\n try:\n r = http.put(acl_url)\n assert r.status_code == 204\n except DCOSHTTPException as e:\n if e.response.status_code == 409 and exist_ok:\n pass\n else:\n raise"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef remove_user_from_group(uid, gid):\n acl_url = urljoin(_acl_url(), 'groups/{}/users/{}'.format(gid, uid))\n try:\n r = http.delete(acl_url)\n assert r.status_code == 204\n except dcos.errors.DCOSBadRequest:\n pass", "response": "Removes a user from a group within DCOS Enterprise."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nprocessing the given HTML file stream with the css stream.", "response": "def easybake(css_in, html_in=sys.stdin, html_out=sys.stdout, last_step=None,\n coverage_file=None, use_repeatable_ids=False):\n \"\"\"Process the given HTML file stream with the css stream.\"\"\"\n html_doc = etree.parse(html_in)\n oven = Oven(css_in, use_repeatable_ids)\n oven.bake(html_doc, last_step)\n\n # serialize out HTML\n print(etree.tostring(html_doc, method=\"xml\").decode('utf-8'),\n file=html_out)\n\n # generate CSS coverage_file file\n if coverage_file:\n print('SF:{}'.format(css_in.name), file=coverage_file)\n print(oven.get_coverage_report(), file=coverage_file)\n print('end_of_record', file=coverage_file)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef main(argv=None):\n parser = argparse.ArgumentParser(description=\"Process raw HTML to baked\"\n \" (embedded numbering and\"\n \" collation)\")\n parser.add_argument('-v', '--version', action=\"version\",\n version=__version__, help='Report the library version')\n parser.add_argument(\"css_rules\",\n type=argparse.FileType('rb'),\n help=\"CSS3 ruleset stylesheet recipe\")\n parser.add_argument(\"html_in\", nargs=\"?\",\n type=argparse.FileType('r'),\n help=\"raw HTML file to bake (default stdin)\",\n default=sys.stdin)\n parser.add_argument(\"html_out\", nargs=\"?\",\n type=argparse.FileType('w'),\n help=\"baked HTML file output (default stdout)\",\n default=sys.stdout)\n parser.add_argument('-s', '--stop-at', action='store', metavar='',\n help='Stop baking just before given pass name')\n parser.add_argument('-d', '--debug', action='store_true',\n help='Send debugging info to stderr')\n parser.add_argument('-q', '--quiet', action='store_true',\n help=\"Quiet all on stderr except errors\")\n parser.add_argument('-c', '--coverage-file', metavar='coverage.lcov',\n type=FileTypeExt('w'),\n help=\"output coverage file (lcov format). If \"\n \"filename starts with '+', append coverage info.\")\n parser.add_argument('--use-repeatable-ids', action='store_true',\n help=\"use repeatable id attributes instead of uuids \"\n \"which is useful for diffing\")\n args = parser.parse_args(argv)\n\n formatter = logging.Formatter('%(name)s %(levelname)s %(message)s')\n handler = logging.StreamHandler(sys.stderr)\n handler.setFormatter(formatter)\n logger.addHandler(handler)\n\n use_quiet_log = (args.quiet and logging.ERROR)\n use_debug_log = (args.debug and logging.DEBUG)\n\n # Debug option takes higher priority than quiet warnings option\n logger.setLevel(use_debug_log or use_quiet_log or logging.WARNING)\n\n try:\n easybake(args.css_rules, args.html_in, args.html_out, args.stop_at,\n args.coverage_file, args.use_repeatable_ids)\n finally:\n if args.css_rules:\n args.css_rules.close()\n if args.html_in:\n args.html_in.close()\n if args.html_out:\n args.html_out.close()\n if args.coverage_file:\n args.coverage_file.close()", "response": "Commandline script wrapping Baker."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef mom_version(name='marathon-user'):\n if service_available_predicate(name):\n with marathon_on_marathon(name):\n return marathon_version()\n else:\n # We can either skip the corresponding test by returning False\n # or raise an exception.\n print('WARN: {} MoM not found. Version is None'.format(name))\n return None", "response": "Returns the version of marathon on marathon."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef mom_version_less_than(version, name='marathon-user'):\n if service_available_predicate(name):\n return mom_version() < LooseVersion(version)\n else:\n # We can either skip the corresponding test by returning False\n # or raise an exception.\n print('WARN: {} MoM not found. mom_version_less_than({}) is False'.format(name, version))\n return False", "response": "Returns True if the given MoM has a version less than the given version."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncreate a transport object for the given host username and key.", "response": "def get_transport(host, username, key):\n \"\"\" Create a transport object\n\n :param host: the hostname to connect to\n :type host: str\n :param username: SSH username\n :type username: str\n :param key: key object used for authentication\n :type key: paramiko.RSAKey\n\n :return: a transport object\n :rtype: paramiko.Transport\n \"\"\"\n\n if host == shakedown.master_ip():\n transport = paramiko.Transport(host)\n else:\n transport_master = paramiko.Transport(shakedown.master_ip())\n transport_master = start_transport(transport_master, username, key)\n\n if not transport_master.is_authenticated():\n print(\"error: unable to authenticate {}@{} with key {}\".format(username, shakedown.master_ip(), key))\n return False\n\n try:\n channel = transport_master.open_channel('direct-tcpip', (host, 22), ('127.0.0.1', 0))\n except paramiko.SSHException:\n print(\"error: unable to connect to {}\".format(host))\n return False\n\n transport = paramiko.Transport(channel)\n\n return transport"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef start_transport(transport, username, key):\n\n transport.start_client()\n\n agent = paramiko.agent.Agent()\n keys = itertools.chain((key,) if key else (), agent.get_keys())\n for test_key in keys:\n try:\n transport.auth_publickey(username, test_key)\n break\n except paramiko.AuthenticationException as e:\n pass\n else:\n raise ValueError('No valid key supplied')\n\n return transport", "response": "Begin a transport client and authenticate it with the given username and key."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nvalidate a key in a key_path and return a paramiko. RSAKey object.", "response": "def validate_key(key_path):\n \"\"\" Validate a key\n\n :param key_path: path to a key to use for authentication\n :type key_path: str\n\n :return: key object used for authentication\n :rtype: paramiko.RSAKey\n \"\"\"\n\n key_path = os.path.expanduser(key_path)\n\n if not os.path.isfile(key_path):\n return False\n\n return paramiko.RSAKey.from_private_key_file(key_path)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_service(\n service_name,\n inactive=False,\n completed=False\n):\n \"\"\" Get a dictionary describing a service\n :param service_name: the service name\n :type service_name: str\n :param inactive: whether to include inactive services\n :type inactive: bool\n :param completed: whether to include completed services\n :type completed: bool\n\n :return: a dict describing a service\n :rtype: dict, or None\n \"\"\"\n\n services = mesos.get_master().frameworks(inactive=inactive, completed=completed)\n\n for service in services:\n if service['name'] == service_name:\n return service\n\n return None", "response": "Get a dictionary describing a service"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nget the framework ID for a service", "response": "def get_service_framework_id(\n service_name,\n inactive=False,\n completed=False\n):\n \"\"\" Get the framework ID for a service\n :param service_name: the service name\n :type service_name: str\n :param inactive: whether to include inactive services\n :type inactive: bool\n :param completed: whether to include completed services\n :type completed: bool\n\n :return: a framework id\n :rtype: str, or None\n \"\"\"\n\n service = get_service(service_name, inactive, completed)\n\n if service is not None and service['id']:\n return service['id']\n\n return None"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget a list of tasks associated with a service", "response": "def get_service_tasks(\n service_name,\n inactive=False,\n completed=False\n):\n \"\"\" Get a list of tasks associated with a service\n :param service_name: the service name\n :type service_name: str\n :param inactive: whether to include inactive services\n :type inactive: bool\n :param completed: whether to include completed services\n :type completed: bool\n\n :return: a list of task objects\n :rtye: [dict], or None\n \"\"\"\n\n service = get_service(service_name, inactive, completed)\n\n if service is not None and service['tasks']:\n return service['tasks']\n\n return []"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_service_task_ids(\n service_name,\n task_predicate=None,\n inactive=False,\n completed=False\n):\n \"\"\" Get a list of task IDs associated with a service\n :param service_name: the service name\n :type service_name: str\n :param task_predicate: filter function which accepts a task object and returns a boolean\n :type task_predicate: function, or None\n :param inactive: whether to include inactive services\n :type inactive: bool\n :param completed: whether to include completed services\n :type completed: bool\n\n :return: a list of task ids\n :rtye: [str], or None\n \"\"\"\n tasks = get_service_tasks(service_name, inactive, completed)\n if task_predicate:\n return [t['id'] for t in tasks if task_predicate(t)]\n else:\n return [t['id'] for t in tasks]", "response": "Get a list of task IDs associated with a service."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting a dictionary describing a service task", "response": "def get_service_task(\n service_name,\n task_name,\n inactive=False,\n completed=False\n):\n \"\"\" Get a dictionary describing a service task, or None\n :param service_name: the service name\n :type service_name: str\n :param task_name: the task name\n :type task_name: str\n :param inactive: whether to include inactive services\n :type inactive: bool\n :param completed: whether to include completed services\n :type completed: bool\n\n :return: a dictionary describing the service\n :rtye: dict, or None\n \"\"\"\n\n service = get_service_tasks(service_name, inactive, completed)\n\n if service is not None:\n for task in service:\n if task['name'] == task_name:\n return task\n\n return None"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nget a dictionary describing a named marathon task", "response": "def get_marathon_task(\n task_name,\n inactive=False,\n completed=False\n):\n \"\"\" Get a dictionary describing a named marathon task\n \"\"\"\n\n return get_service_task('marathon', task_name, inactive, completed)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets a mesos task with a specific task name", "response": "def get_mesos_task(task_name):\n \"\"\" Get a mesos task with a specific task name\n \"\"\"\n tasks = get_mesos_tasks()\n\n if tasks is not None:\n for task in tasks:\n if task['name'] == task_name:\n return task\n return None"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_service_ips(\n service_name,\n task_name=None,\n inactive=False,\n completed=False\n):\n \"\"\" Get a set of the IPs associated with a service\n :param service_name: the service name\n :type service_name: str\n :param task_name: the task name\n :type task_name: str\n :param inactive: wehther to include inactive services\n :type inactive: bool\n :param completed: whether to include completed services\n :type completed: bool\n\n :return: a list of IP addresses\n :rtype: [str]\n \"\"\"\n\n service_tasks = get_service_tasks(service_name, inactive, completed)\n\n ips = set([])\n\n for task in service_tasks:\n if task_name is None or task['name'] == task_name:\n for status in task['statuses']:\n # Only the TASK_RUNNING status will have correct IP information.\n if status[\"state\"] != \"TASK_RUNNING\":\n continue\n\n for ip in status['container_status']['network_infos'][0]['ip_addresses']:\n ips.add(ip['ip_address'])\n\n return ips", "response": "Get a list of the IPs associated with a service and task."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef service_healthy(service_name, app_id=None):\n\n marathon_client = marathon.create_client()\n apps = marathon_client.get_apps_for_framework(service_name)\n\n if apps:\n for app in apps:\n if (app_id is not None) and (app['id'] != \"/{}\".format(str(app_id))):\n continue\n\n if (app['tasksHealthy']) \\\n and (app['tasksRunning']) \\\n and (not app['tasksStaged']) \\\n and (not app['tasksUnhealthy']):\n return True\n\n return False", "response": "Check if a service is healthy."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ndelete any persistent data associated with the specified role and zk node.", "response": "def delete_persistent_data(role, zk_node):\n \"\"\" Deletes any persistent data associated with the specified role, and zk node.\n\n :param role: the mesos role to delete, or None to omit this\n :type role: str\n :param zk_node: the zookeeper node to be deleted, or None to skip this deletion\n :type zk_node: str\n \"\"\"\n if role:\n destroy_volumes(role)\n unreserve_resources(role)\n\n if zk_node:\n delete_zk_node(zk_node)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ndestroy all volumes on all the slaves in the cluster for the given role.", "response": "def destroy_volumes(role):\n \"\"\" Destroys all volumes on all the slaves in the cluster for the role.\n \"\"\"\n state = dcos_agents_state()\n if not state or 'slaves' not in state.keys():\n return False\n all_success = True\n for agent in state['slaves']:\n if not destroy_volume(agent, role):\n all_success = False\n return all_success"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef destroy_volume(agent, role):\n volumes = []\n agent_id = agent['id']\n\n reserved_resources_full = agent.get('reserved_resources_full', None)\n if not reserved_resources_full:\n # doesn't exist\n return True\n\n reserved_resources = reserved_resources_full.get(role, None)\n if not reserved_resources:\n # doesn't exist\n return True\n\n for reserved_resource in reserved_resources:\n name = reserved_resource.get('name', None)\n disk = reserved_resource.get('disk', None)\n\n if name == 'disk' and disk is not None and 'persistence' in disk:\n volumes.append(reserved_resource)\n\n req_url = urljoin(master_url(), 'destroy-volumes')\n data = {\n 'slaveId': agent_id,\n 'volumes': json.dumps(volumes)\n }\n\n success = False\n try:\n response = http.post(req_url, data=data)\n success = 200 <= response.status_code < 300\n if response.status_code == 409:\n # thoughts on what to do here? throw exception\n # i would rather not print\n print('''###\\nIs a framework using these resources still installed?\\n###''')\n except DCOSHTTPException as e:\n print(\"HTTP {}: Unabled to delete volume based on: {}\".format(\n e.response.status_code,\n e.response.text))\n\n return success", "response": "Delete the volumes on the specific agent for the specific role."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef unreserve_resources(role):\n state = dcos_agents_state()\n if not state or 'slaves' not in state.keys():\n return False\n all_success = True\n for agent in state['slaves']:\n if not unreserve_resource(agent, role):\n all_success = False\n return all_success", "response": "Unreserves all the resources for all the slaves for the role."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef wait_for_service_endpoint(service_name, timeout_sec=120):\n\n master_count = len(get_all_masters())\n return time_wait(lambda: service_available_predicate(service_name),\n timeout_seconds=timeout_sec,\n required_consecutive_success_count=master_count)", "response": "Checks the service url if available it returns true on expiration\n it returns false on timeout_sec"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef task_states_predicate(service_name, expected_task_count, expected_task_states):\n try:\n tasks = get_service_tasks(service_name)\n except (DCOSConnectionError, DCOSHTTPException):\n tasks = []\n matching_tasks = []\n other_tasks = []\n for t in tasks:\n name = t.get('name', 'UNKNOWN_NAME')\n state = t.get('state', None)\n if state and state in expected_task_states:\n matching_tasks.append(name)\n else:\n other_tasks.append('{}={}'.format(name, state))\n print('expected {} tasks in {}:\\n- {} in expected {}: {}\\n- {} in other states: {}'.format(\n expected_task_count, ', '.join(expected_task_states),\n len(matching_tasks), ', '.join(expected_task_states), ', '.join(matching_tasks),\n len(other_tasks), ', '.join(other_tasks)))\n return len(matching_tasks) >= expected_task_count", "response": "Returns True if the provided service_names s tasks have at least expected_task_count tasks which are in any of expected_task_states False otherwise."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nwaiting until the service has at least N tasks in one of the specified state", "response": "def wait_for_service_tasks_state(\n service_name,\n expected_task_count,\n expected_task_states,\n timeout_sec=120\n):\n \"\"\" Returns once the service has at least N tasks in one of the specified state(s)\n\n :param service_name: the service name\n :type service_name: str\n :param expected_task_count: the expected number of tasks in the specified state(s)\n :type expected_task_count: int\n :param expected_task_states: the expected state(s) for tasks to be in, e.g. 'TASK_RUNNING'\n :type expected_task_states: [str]\n :param timeout_sec: duration to wait\n :type timeout_sec: int\n\n :return: the duration waited in seconds\n :rtype: int\n \"\"\"\n return time_wait(\n lambda: task_states_predicate(service_name, expected_task_count, expected_task_states),\n timeout_seconds=timeout_sec)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn True if all of old_task_ids have been replaced with new tasks", "response": "def tasks_all_replaced_predicate(\n service_name,\n old_task_ids,\n task_predicate=None\n):\n \"\"\" Returns whether ALL of old_task_ids have been replaced with new tasks\n\n :param service_name: the service name\n :type service_name: str\n :param old_task_ids: list of original task ids as returned by get_service_task_ids\n :type old_task_ids: [str]\n :param task_predicate: filter to use when searching for tasks\n :type task_predicate: func\n\n :return: True if none of old_task_ids are still present in the service\n :rtype: bool\n \"\"\"\n try:\n task_ids = get_service_task_ids(service_name, task_predicate)\n except DCOSHTTPException:\n print('failed to get task ids for service {}'.format(service_name))\n task_ids = []\n\n print('waiting for all task ids in \"{}\" to change:\\n- old tasks: {}\\n- current tasks: {}'.format(\n service_name, old_task_ids, task_ids))\n for id in task_ids:\n if id in old_task_ids:\n return False # old task still present\n if len(task_ids) < len(old_task_ids): # new tasks haven't fully replaced old tasks\n return False\n return True"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn whether any of old_task_ids are no longer present in the service_name.", "response": "def tasks_missing_predicate(\n service_name,\n old_task_ids,\n task_predicate=None\n):\n \"\"\" Returns whether any of old_task_ids are no longer present\n\n :param service_name: the service name\n :type service_name: str\n :param old_task_ids: list of original task ids as returned by get_service_task_ids\n :type old_task_ids: [str]\n :param task_predicate: filter to use when searching for tasks\n :type task_predicate: func\n\n :return: True if any of old_task_ids are no longer present in the service\n :rtype: bool\n \"\"\"\n try:\n task_ids = get_service_task_ids(service_name, task_predicate)\n except DCOSHTTPException:\n print('failed to get task ids for service {}'.format(service_name))\n task_ids = []\n\n print('checking whether old tasks in \"{}\" are missing:\\n- old tasks: {}\\n- current tasks: {}'.format(\n service_name, old_task_ids, task_ids))\n for id in old_task_ids:\n if id not in task_ids:\n return True # an old task was not present\n return False"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef wait_for_service_tasks_all_changed(\n service_name,\n old_task_ids,\n task_predicate=None,\n timeout_sec=120\n):\n \"\"\" Returns once ALL of old_task_ids have been replaced with new tasks\n\n :param service_name: the service name\n :type service_name: str\n :param old_task_ids: list of original task ids as returned by get_service_task_ids\n :type old_task_ids: [str]\n :param task_predicate: filter to use when searching for tasks\n :type task_predicate: func\n :param timeout_sec: duration to wait\n :type timeout_sec: int\n\n :return: the duration waited in seconds\n :rtype: int\n \"\"\"\n return time_wait(\n lambda: tasks_all_replaced_predicate(service_name, old_task_ids, task_predicate),\n timeout_seconds=timeout_sec)", "response": "Waits until ALL of old_task_ids have been replaced with new tasks\n "} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nwaiting until all of the old_task_ids of the service has been removed or replaced from the service.", "response": "def wait_for_service_tasks_all_unchanged(\n service_name,\n old_task_ids,\n task_predicate=None,\n timeout_sec=30\n):\n \"\"\" Returns after verifying that NONE of old_task_ids have been removed or replaced from the service\n\n :param service_name: the service name\n :type service_name: str\n :param old_task_ids: list of original task ids as returned by get_service_task_ids\n :type old_task_ids: [str]\n :param task_predicate: filter to use when searching for tasks\n :type task_predicate: func\n :param timeout_sec: duration to wait until assuming tasks are unchanged\n :type timeout_sec: int\n\n :return: the duration waited in seconds (the timeout value)\n :rtype: int\n \"\"\"\n try:\n time_wait(\n lambda: tasks_missing_predicate(service_name, old_task_ids, task_predicate),\n timeout_seconds=timeout_sec)\n # shouldn't have exited successfully: raise below\n except TimeoutExpired:\n return timeout_sec # no changes occurred within timeout, as expected\n raise DCOSException(\"One or more of the following tasks were no longer found: {}\".format(old_task_ids))"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns the version of Docker [ Server ]", "response": "def docker_version(host=None, component='server'):\n \"\"\" Return the version of Docker [Server]\n\n :param host: host or IP of the machine Docker is running on\n :type host: str\n :param component: Docker component\n :type component: str\n :return: Docker version\n :rtype: str\n \"\"\"\n\n if component.lower() == 'client':\n component = 'Client'\n else:\n component = 'Server'\n\n # sudo is required for non-coreOS installs\n command = 'sudo docker version -f {{.{}.Version}}'.format(component)\n\n if host is None:\n success, output = shakedown.run_command_on_master(command, None, None, False)\n else:\n success, output = shakedown.run_command_on_host(host, command, None, None, False)\n\n if success:\n return output\n else:\n return 'unknown'"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncreating a docker credentials file.", "response": "def create_docker_credentials_file(\n username,\n password,\n file_name='docker.tar.gz'):\n \"\"\" Create a docker credentials file.\n Docker username and password are used to create a `{file_name}`\n with `.docker/config.json` containing the credentials.\n\n :param username: docker username\n :type username: str\n :param password: docker password\n :type password: str\n :param file_name: credentials file name `docker.tar.gz` by default\n :type file_name: str\n \"\"\"\n\n import base64\n auth_hash = base64.b64encode(\n '{}:{}'.format(username, password).encode()).decode()\n\n config_json = {\n \"auths\": {\n \"https://index.docker.io/v1/\": {\"auth\": auth_hash}\n }\n }\n\n config_json_filename = 'config.json'\n # Write config.json to file\n with open(config_json_filename, 'w') as f:\n json.dump(config_json, f, indent=4)\n\n try:\n # Create a docker.tar.gz\n import tarfile\n with tarfile.open(file_name, 'w:gz') as tar:\n tar.add(config_json_filename, arcname='.docker/config.json')\n tar.close()\n except Exception as e:\n print('Failed to create a docker credentils file {}'.format(e))\n raise e\n finally:\n os.remove(config_json_filename)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef distribute_docker_credentials_to_private_agents(\n username,\n password,\n file_name='docker.tar.gz'):\n \"\"\" Create and distributes a docker credentials file to all private agents\n\n :param username: docker username\n :type username: str\n :param password: docker password\n :type password: str\n :param file_name: credentials file name `docker.tar.gz` by default\n :type file_name: str\n \"\"\"\n\n create_docker_credentials_file(username, password, file_name)\n\n try:\n __distribute_docker_credentials_file()\n finally:\n os.remove(file_name)", "response": "Create and distribute a docker credentials file to all private agents."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef prefetch_docker_image_on_private_agents(\n image,\n timeout=timedelta(minutes=5).total_seconds()):\n \"\"\" Given a docker image. An app with the image is scale across the private\n agents to ensure that the image is prefetched to all nodes.\n\n :param image: docker image name\n :type image: str\n :param timeout: timeout for deployment wait in secs (default: 5m)\n :type password: int\n \"\"\"\n agents = len(shakedown.get_private_agents())\n app = {\n \"id\": \"/prefetch\",\n \"instances\": agents,\n \"container\": {\n \"type\": \"DOCKER\",\n \"docker\": {\"image\": image}\n },\n \"cpus\": 0.1,\n \"mem\": 128\n }\n\n client = marathon.create_client()\n client.add_app(app)\n\n shakedown.deployment_wait(timeout)\n\n shakedown.delete_all_apps()\n shakedown.deployment_wait(timeout)", "response": "Given a docker image create a prefetch app on all private agents."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nread in options_file as JSON.", "response": "def _get_options(options_file=None):\n \"\"\" Read in options_file as JSON.\n\n :param options_file: filename to return\n :type options_file: str\n\n :return: options as dictionary\n :rtype: dict\n \"\"\"\n\n if options_file is not None:\n with open(options_file, 'r') as opt_file:\n options = json.loads(opt_file.read())\n else:\n options = {}\n return options"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ninstalling a marathon package via the DC / OS library.", "response": "def install_package(\n package_name,\n package_version=None,\n service_name=None,\n options_file=None,\n options_json=None,\n wait_for_completion=False,\n timeout_sec=600,\n expected_running_tasks=0\n):\n \"\"\" Install a package via the DC/OS library\n\n :param package_name: name of the package\n :type package_name: str\n :param package_version: version of the package (defaults to latest)\n :type package_version: str\n :param service_name: unique service name for the package\n :type service_name: str\n :param options_file: filename that has options to use and is JSON format\n :type options_file: str\n :param options_json: dict that has options to use and is JSON format\n :type options_json: dict\n :param wait_for_completion: whether or not to wait for the app's deployment to complete\n :type wait_for_completion: bool\n :param timeout_sec: number of seconds to wait for task completion\n :type timeout_sec: int\n :param expected_running_tasks: number of service tasks to check for, or zero to disable\n :type expected_task_count: int\n\n :return: True if installation was successful, False otherwise\n :rtype: bool\n \"\"\"\n\n start = time.time()\n\n if options_file:\n options = _get_options(options_file)\n elif options_json:\n options = options_json\n else:\n options = {}\n\n package_manager = _get_package_manager()\n pkg = package_manager.get_package_version(package_name, package_version)\n\n if package_version is None:\n # Get the resolved version for logging below\n package_version = 'auto:{}'.format(pkg.version())\n\n if service_name is None:\n # Get the service name from the marathon template\n try:\n labels = pkg.marathon_json(options).get('labels')\n if 'DCOS_SERVICE_NAME' in labels:\n service_name = labels['DCOS_SERVICE_NAME']\n except errors.DCOSException as e:\n pass\n\n print('\\n{}installing {} with service={} version={} options={}'.format(\n shakedown.cli.helpers.fchr('>>'), package_name, service_name, package_version, options))\n\n try:\n # Print pre-install notes to console log\n pre_install_notes = pkg.package_json().get('preInstallNotes')\n if pre_install_notes:\n print(pre_install_notes)\n\n package_manager.install_app(pkg, options, service_name)\n\n # Print post-install notes to console log\n post_install_notes = pkg.package_json().get('postInstallNotes')\n if post_install_notes:\n print(post_install_notes)\n\n # Optionally wait for the app's deployment to finish\n if wait_for_completion:\n print(\"\\n{}waiting for {} deployment to complete...\".format(\n shakedown.cli.helpers.fchr('>>'), service_name))\n if expected_running_tasks > 0 and service_name is not None:\n wait_for_service_tasks_running(service_name, expected_running_tasks, timeout_sec)\n\n app_id = pkg.marathon_json(options).get('id')\n shakedown.deployment_wait(timeout_sec, app_id)\n print('\\n{}install completed after {}\\n'.format(\n shakedown.cli.helpers.fchr('>>'), pretty_duration(time.time() - start)))\n else:\n print('\\n{}install started after {}\\n'.format(\n shakedown.cli.helpers.fchr('>>'), pretty_duration(time.time() - start)))\n except errors.DCOSException as e:\n print('\\n{}{}'.format(\n shakedown.cli.helpers.fchr('>>'), e))\n\n # Install subcommands (if defined)\n if pkg.cli_definition():\n print(\"{}installing CLI commands for package '{}'\".format(\n shakedown.cli.helpers.fchr('>>'), package_name))\n subcommand.install(pkg)\n\n return True"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ninstalls a package via DC / OS library and wait for completion.", "response": "def install_package_and_wait(\n package_name,\n package_version=None,\n service_name=None,\n options_file=None,\n options_json=None,\n wait_for_completion=True,\n timeout_sec=600,\n expected_running_tasks=0\n):\n \"\"\" Install a package via the DC/OS library and wait for completion\n \"\"\"\n\n return install_package(\n package_name,\n package_version,\n service_name,\n options_file,\n options_json,\n wait_for_completion,\n timeout_sec,\n expected_running_tasks\n )"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef package_installed(package_name, service_name=None):\n\n package_manager = _get_package_manager()\n\n app_installed = len(package_manager.installed_apps(package_name, service_name)) > 0\n\n subcommand_installed = False\n for subcmd in package.installed_subcommands():\n package_json = subcmd.package_json()\n if package_json['name'] == package_name:\n subcommand_installed = True\n\n return (app_installed or subcommand_installed)", "response": "Check whether the given package is currently installed."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef uninstall_package(\n package_name,\n service_name=None,\n all_instances=False,\n wait_for_completion=False,\n timeout_sec=600\n):\n \"\"\" Uninstall a package using the DC/OS library.\n\n :param package_name: name of the package\n :type package_name: str\n :param service_name: unique service name for the package\n :type service_name: str\n :param all_instances: uninstall all instances of package\n :type all_instances: bool\n :param wait_for_completion: whether or not to wait for task completion before returning\n :type wait_for_completion: bool\n :param timeout_sec: number of seconds to wait for task completion\n :type timeout_sec: int\n\n :return: True if uninstall was successful, False otherwise\n :rtype: bool\n \"\"\"\n\n package_manager = _get_package_manager()\n pkg = package_manager.get_package_version(package_name, None)\n\n try:\n if service_name is None:\n service_name = _get_service_name(package_name, pkg)\n\n print(\"{}uninstalling package '{}' with service name '{}'\\n\".format(\n shakedown.cli.helpers.fchr('>>'), package_name, service_name))\n\n package_manager.uninstall_app(package_name, all_instances, service_name)\n\n # Optionally wait for the service to unregister as a framework\n if wait_for_completion:\n wait_for_mesos_task_removal(service_name, timeout_sec=timeout_sec)\n except errors.DCOSException as e:\n print('\\n{}{}'.format(\n shakedown.cli.helpers.fchr('>>'), e))\n\n # Uninstall subcommands (if defined)\n if pkg.cli_definition():\n print(\"{}uninstalling CLI commands for package '{}'\".format(\n shakedown.cli.helpers.fchr('>>'), package_name))\n subcommand.uninstall(package_name)\n\n return True", "response": "Uninstalls a package using the DC / OS library."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef uninstall_package_and_wait(\n package_name,\n service_name=None,\n all_instances=False,\n wait_for_completion=True,\n timeout_sec=600\n):\n \"\"\" Uninstall a package via the DC/OS library and wait for completion\n\n :param package_name: name of the package\n :type package_name: str\n :param service_name: unique service name for the package\n :type service_name: str\n :param all_instances: uninstall all instances of package\n :type all_instances: bool\n :param wait_for_completion: whether or not to wait for task completion before returning\n :type wait_for_completion: bool\n :param timeout_sec: number of seconds to wait for task completion\n :type timeout_sec: int\n\n :return: True if uninstall was successful, False otherwise\n :rtype: bool\n \"\"\"\n\n return uninstall_package(\n package_name,\n service_name,\n all_instances,\n wait_for_completion,\n timeout_sec\n )", "response": "Uninstall a package via DC / OS library and wait for completion."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef add_package_repo(\n repo_name,\n repo_url,\n index=None,\n wait_for_package=None,\n expect_prev_version=None):\n \"\"\" Add a repository to the list of package sources\n\n :param repo_name: name of the repository to add\n :type repo_name: str\n :param repo_url: location of the repository to add\n :type repo_url: str\n :param index: index (precedence) for this repository\n :type index: int\n :param wait_for_package: the package whose version should change after the repo is added\n :type wait_for_package: str, or None\n\n :return: True if successful, False otherwise\n :rtype: bool\n \"\"\"\n\n package_manager = _get_package_manager()\n if wait_for_package:\n prev_version = package_manager.get_package_version(wait_for_package, None)\n if not package_manager.add_repo(repo_name, repo_url, index):\n return False\n if wait_for_package:\n try:\n spinner.time_wait(lambda: package_version_changed_predicate(package_manager, wait_for_package, prev_version))\n except TimeoutExpired:\n return False\n return True", "response": "Adds a repository to the list of package sources that can be used to create a new version of the package."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nremoves a repository from the list of package sources.", "response": "def remove_package_repo(repo_name, wait_for_package=None):\n \"\"\" Remove a repository from the list of package sources\n\n :param repo_name: name of the repository to remove\n :type repo_name: str\n :param wait_for_package: the package whose version should change after the repo is removed\n :type wait_for_package: str, or None\n\n :returns: True if successful, False otherwise\n :rtype: bool\n \"\"\"\n\n package_manager = _get_package_manager()\n if wait_for_package:\n prev_version = package_manager.get_package_version(wait_for_package, None)\n if not package_manager.remove_repo(repo_name):\n return False\n if wait_for_package:\n try:\n spinner.time_wait(lambda: package_version_changed_predicate(package_manager, wait_for_package, prev_version))\n except TimeoutExpired:\n return False\n return True"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning the list of versions of a given package", "response": "def get_package_versions(package_name):\n \"\"\" Returns the list of versions of a given package\n :param package_name: name of the package\n :type package_name: str\n \"\"\"\n package_manager = _get_package_manager()\n pkg = package_manager.get_package_version(package_name, None)\n return pkg.package_versions()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef cli(**args):\n import shakedown\n\n # Read configuration options from ~/.shakedown (if exists)\n args = read_config(args)\n\n # Set configuration defaults\n args = set_config_defaults(args)\n\n if args['quiet']:\n shakedown.cli.quiet = True\n\n if not args['dcos_url']:\n try:\n args['dcos_url'] = dcos_url()\n except:\n click.secho('error: cluster URL not set, use --dcos-url or see --help for more information.', fg='red', bold=True)\n sys.exit(1)\n\n if not args['dcos_url']:\n click.secho('error: --dcos-url is a required option; see --help for more information.', fg='red', bold=True)\n sys.exit(1)\n\n if args['ssh_key_file']:\n shakedown.cli.ssh_key_file = args['ssh_key_file']\n\n if args['ssh_user']:\n shakedown.cli.ssh_user = args['ssh_user']\n\n if not args['no_banner']:\n echo(banner(), n=False)\n\n echo('Running pre-flight checks...', d='step-maj')\n\n # required modules and their 'version' method\n imported = {}\n requirements = {\n 'pytest': '__version__',\n 'dcos': 'version'\n }\n\n for req in requirements:\n ver = requirements[req]\n\n echo(\"Checking for {} library...\".format(req), d='step-min', n=False)\n try:\n imported[req] = importlib.import_module(req, package=None)\n except ImportError:\n click.secho(\"error: {p} is not installed; run 'pip install {p}'.\".format(p=req), fg='red', bold=True)\n sys.exit(1)\n\n echo(getattr(imported[req], requirements[req]))\n\n if shakedown.attach_cluster(args['dcos_url']):\n echo('Checking DC/OS cluster version...', d='step-min', n=False)\n echo(shakedown.dcos_version())\n else:\n with imported['dcos'].cluster.setup_directory() as temp_path:\n imported['dcos'].cluster.set_attached(temp_path)\n\n imported['dcos'].config.set_val('core.dcos_url', args['dcos_url'])\n if args['ssl_no_verify']:\n imported['dcos'].config.set_val('core.ssl_verify', 'False')\n\n try:\n imported['dcos'].cluster.setup_cluster_config(args['dcos_url'], temp_path, False)\n except:\n echo('Authenticating with DC/OS cluster...', d='step-min')\n authenticated = False\n token = imported['dcos'].config.get_config_val(\"core.dcos_acs_token\")\n if token is not None:\n echo('trying existing ACS token...', d='step-min', n=False)\n try:\n shakedown.dcos_leader()\n\n authenticated = True\n echo(fchr('PP'), d='pass')\n except imported['dcos'].errors.DCOSException:\n echo(fchr('FF'), d='fail')\n if not authenticated and args['oauth_token']:\n try:\n echo('trying OAuth token...', d='item-maj', n=False)\n token = shakedown.authenticate_oauth(args['oauth_token'])\n\n with stdchannel_redirected(sys.stderr, os.devnull):\n imported['dcos'].config.set_val('core.dcos_acs_token', token)\n\n authenticated = True\n echo(fchr('PP'), d='pass')\n except:\n echo(fchr('FF'), d='fail')\n if not authenticated and args['username'] and args['password']:\n try:\n echo('trying username and password...', d='item-maj', n=False)\n token = shakedown.authenticate(args['username'], args['password'])\n\n with stdchannel_redirected(sys.stderr, os.devnull):\n imported['dcos'].config.set_val('core.dcos_acs_token', token)\n\n authenticated = True\n echo(fchr('PP'), d='pass')\n except:\n echo(fchr('FF'), d='fail')\n\n if authenticated:\n imported['dcos'].cluster.setup_cluster_config(args['dcos_url'], temp_path, False)\n\n echo('Checking DC/OS cluster version...', d='step-min', n=False)\n echo(shakedown.dcos_version())\n else:\n click.secho(\"error: no authentication credentials or token found.\", fg='red', bold=True)\n sys.exit(1)\n\n class shakedown:\n \"\"\" This encapsulates a PyTest wrapper plugin\n \"\"\"\n\n state = {}\n\n stdout = []\n\n tests = {\n 'file': {},\n 'test': {}\n }\n\n report_stats = {\n 'passed':[],\n 'skipped':[],\n 'failed':[],\n 'total_passed':0,\n 'total_skipped':0,\n 'total_failed':0,\n }\n\n\n def output(title, state, text, status=True):\n \"\"\" Capture and display stdout/stderr output\n\n :param title: the title of the output box (eg. test name)\n :type title: str\n :param state: state of the result (pass, fail)\n :type state: str\n :param text: the stdout/stderr output\n :type text: str\n :param status: whether to output a status marker\n :type status: bool\n \"\"\"\n if state == 'fail':\n schr = fchr('FF')\n elif state == 'pass':\n schr = fchr('PP')\n elif state == 'skip':\n schr = fchr('SK')\n else:\n schr = ''\n\n if status:\n if not args['stdout_inline']:\n if state == 'fail':\n echo(schr, d='fail')\n elif state == 'pass':\n echo(schr, d='pass')\n else:\n if not text:\n if state == 'fail':\n echo(schr, d='fail')\n elif state == 'pass':\n if '::' in title:\n echo(title.split('::')[-1], d='item-min', n=False)\n echo(schr, d='pass')\n\n if text and args['stdout'] in [state, 'all']:\n o = decorate(schr + ': ', 'quote-head-' + state)\n o += click.style(decorate(title, style=state), bold=True) + \"\\n\"\n o += decorate(str(text).strip(), style='quote-' + state)\n\n if args['stdout_inline']:\n echo(o)\n else:\n shakedown.stdout.append(o)\n\n\n def pytest_collectreport(self, report):\n \"\"\" Collect and validate individual test files\n \"\"\"\n\n if not 'collect' in shakedown.state:\n shakedown.state['collect'] = 1\n echo('Collecting and validating test files...', d='step-min')\n\n if report.nodeid:\n echo(report.nodeid, d='item-maj', n=False)\n\n state = None\n\n if report.failed:\n state = 'fail'\n if report.passed:\n state = 'pass'\n if report.skipped:\n state = 'skip'\n\n if state:\n if report.longrepr:\n shakedown.output(report.nodeid, state, report.longrepr)\n else:\n shakedown.output(report.nodeid, state, None)\n\n\n def pytest_sessionstart(self):\n \"\"\" Tests have been collected, begin running them...\n \"\"\"\n\n echo('Initiating testing phase...', d='step-maj')\n\n\n def pytest_report_teststatus(self, report):\n \"\"\" Print report results to the console as they are run\n \"\"\"\n\n try:\n report_file, report_test = report.nodeid.split('::', 1)\n except ValueError:\n return\n\n if not 'test' in shakedown.state:\n shakedown.state['test'] = 1\n echo('Running individual tests...', d='step-min')\n\n if not report_file in shakedown.tests['file']:\n shakedown.tests['file'][report_file] = 1\n echo(report_file, d='item-maj')\n if not report.nodeid in shakedown.tests['test']:\n shakedown.tests['test'][report.nodeid] = {}\n if args['stdout_inline']:\n echo('')\n echo(report_test + ':', d='item-min')\n else:\n echo(report_test, d='item-min', n=False)\n\n if report.failed:\n shakedown.tests['test'][report.nodeid]['fail'] = True\n\n if report.when == 'teardown' and not 'tested' in shakedown.tests['test'][report.nodeid]:\n shakedown.output(report.nodeid, 'pass', None)\n\n # Suppress excess terminal output\n return report.outcome, None, None\n\n\n def pytest_runtest_logreport(self, report):\n \"\"\" Log the [stdout, stderr] results of tests if desired\n \"\"\"\n\n state = None\n\n for secname, content in report.sections:\n if report.failed:\n state = 'fail'\n if report.passed:\n state = 'pass'\n if report.skipped:\n state = 'skip'\n\n if state and secname != 'Captured stdout call':\n module = report.nodeid.split('::', 1)[0]\n cap_type = secname.split(' ')[-1]\n\n if not 'setup' in shakedown.tests['test'][report.nodeid]:\n shakedown.tests['test'][report.nodeid]['setup'] = True\n shakedown.output(module + ' ' + cap_type, state, content, False)\n elif cap_type == 'teardown':\n shakedown.output(module + ' ' + cap_type, state, content, False)\n elif state and report.when == 'call':\n if 'tested' in shakedown.tests['test'][report.nodeid]:\n shakedown.output(report.nodeid, state, content, False)\n else:\n shakedown.tests['test'][report.nodeid]['tested'] = True\n shakedown.output(report.nodeid, state, content)\n\n # Capture execution crashes\n if hasattr(report.longrepr, 'reprcrash'):\n longreport = report.longrepr\n\n if 'tested' in shakedown.tests['test'][report.nodeid]:\n shakedown.output(report.nodeid, 'fail', 'error: ' + str(longreport.reprcrash), False)\n else:\n shakedown.tests['test'][report.nodeid]['tested'] = True\n shakedown.output(report.nodeid, 'fail', 'error: ' + str(longreport.reprcrash))\n\n\n def pytest_sessionfinish(self, session, exitstatus):\n \"\"\" Testing phase is complete; print extra reports (stdout/stderr, JSON) as requested\n \"\"\"\n\n echo('Test phase completed.', d='step-maj')\n\n if ('stdout' in args and args['stdout']) and shakedown.stdout:\n for output in shakedown.stdout:\n echo(output)\n\n opts = ['-q', '--tb=no', \"--timeout={}\".format(args['timeout'])]\n\n if args['fail'] == 'fast':\n opts.append('-x')\n\n if args['pytest_option']:\n for opt in args['pytest_option']:\n opts.append(opt)\n\n if args['stdout_inline']:\n opts.append('-s')\n\n if args['tests']:\n tests_to_run = []\n for test in args['tests']:\n tests_to_run.extend(test.split())\n for test in tests_to_run:\n opts.append(test)\n\n exitstatus = imported['pytest'].main(opts, plugins=[shakedown()])\n\n sys.exit(exitstatus)", "response": "This is the main CLI for the DCOS test - harness tool."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nfind the bounding box of the tile that is available at a given zoom level.", "response": "def find_coverage(self, zoom):\n \"\"\"\n Returns the bounding box (minx, miny, maxx, maxy) of an adjacent\n group of tiles at this zoom level.\n \"\"\"\n # Find a group of adjacent available tiles at this zoom level\n rows = self._query('''SELECT tile_column, tile_row FROM tiles\n WHERE zoom_level=?\n ORDER BY tile_column, tile_row;''', (zoom,))\n t = rows.fetchone()\n xmin, ymin = t\n previous = t\n while t and t[0] - previous[0] <= 1:\n # adjacent, go on\n previous = t\n t = rows.fetchone()\n xmax, ymax = previous\n # Transform (xmin, ymin) (xmax, ymax) to pixels\n S = self.tilesize\n bottomleft = (xmin * S, (ymax + 1) * S)\n topright = ((xmax + 1) * S, ymin * S)\n # Convert center to (lon, lat)\n proj = GoogleProjection(S, [zoom]) # WGS84\n return proj.unproject_pixels(bottomleft, zoom) + proj.unproject_pixels(topright, zoom)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef tile(self, z, x, y):\n logger.debug(_(\"Download tile %s\") % ((z, x, y),))\n # Render each keyword in URL ({s}, {x}, {y}, {z}, {size} ... )\n size = self.tilesize\n s = self.tiles_subdomains[(x + y) % len(self.tiles_subdomains)];\n try:\n url = self.tiles_url.format(**locals())\n except KeyError as e:\n raise DownloadError(_(\"Unknown keyword %s in URL\") % e)\n\n logger.debug(_(\"Retrieve tile at %s\") % url)\n r = DOWNLOAD_RETRIES\n sleeptime = 1\n while r > 0:\n try:\n request = requests.get(url, headers=self.headers)\n if request.status_code == 200:\n return request.content\n raise DownloadError(_(\"Status code : %s, url : %s\") % (request.status_code, url))\n except requests.exceptions.ConnectionError as e:\n logger.debug(_(\"Download error, retry (%s left). (%s)\") % (r, e))\n r -= 1\n time.sleep(sleeptime)\n # progressivly sleep longer to wait for this tile\n if (sleeptime <= 10) and (r % 2 == 0):\n sleeptime += 1 # increase wait\n raise DownloadError(_(\"Cannot download URL %s\") % url)", "response": "Download the specified tile from the specified URL."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nrendering the specified tile with Mapnik", "response": "def tile(self, z, x, y):\n \"\"\"\n Render the specified tile with Mapnik\n \"\"\"\n logger.debug(_(\"Render tile %s\") % ((z, x, y),))\n proj = GoogleProjection(self.tilesize, [z])\n return self.render(proj.tile_bbox((z, x, y)))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef render(self, bbox, width=None, height=None):\n width = width or self.tilesize\n height = height or self.tilesize\n self._prepare_rendering(bbox, width=width, height=height)\n\n # Render image with default Agg renderer\n tmpfile = NamedTemporaryFile(delete=False)\n im = mapnik.Image(width, height)\n mapnik.render(self._mapnik, im)\n im.save(tmpfile.name, 'png256') # TODO: mapnik output only to file?\n tmpfile.close()\n content = open(tmpfile.name, 'rb').read()\n os.unlink(tmpfile.name)\n return content", "response": "Render the specified tile with Mapnik\n "} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef grid(self, z, x, y, fields, layer):\n logger.debug(_(\"Render grid %s\") % ((z, x, y),))\n proj = GoogleProjection(self.tilesize, [z])\n return self.render_grid(proj.tile_bbox((z, x, y)), fields, layer)", "response": "Render a grid with Mapnik s tile bounding box."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef render_grid(self, bbox, grid_fields, layer, width=None, height=None):\n width = width or self.tilesize\n height = height or self.tilesize\n self._prepare_rendering(bbox, width=width, height=height)\n\n grid = mapnik.Grid(width, height)\n mapnik.render_layer(self._mapnik, grid, layer=layer, fields=grid_fields)\n grid = grid.encode()\n return json.dumps(grid)", "response": "Render the specified grid with Mapnik"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns a tuple of z x y coordinates for the tile at the given zoom level and position.", "response": "def tile_at(self, zoom, position):\n \"\"\"\n Returns a tuple of (z, x, y)\n \"\"\"\n x, y = self.project_pixels(position, zoom)\n return (zoom, int(x/self.tilesize), int(y/self.tilesize))"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning the coordinates in meters from WGS84", "response": "def project(self, lng_lat):\n \"\"\"\n Returns the coordinates in meters from WGS84\n \"\"\"\n (lng, lat) = lng_lat\n x = lng * DEG_TO_RAD\n lat = max(min(MAX_LATITUDE, lat), -MAX_LATITUDE)\n y = lat * DEG_TO_RAD\n y = log(tan((pi / 4) + (y / 2)))\n return (x*EARTH_RADIUS, y*EARTH_RADIUS)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nconverting a color string to a color tuple.", "response": "def string2rgba(cls, colorstring):\n \"\"\" Convert #RRGGBBAA to an (R, G, B, A) tuple \"\"\"\n colorstring = colorstring.strip()\n if colorstring[0] == '#':\n colorstring = colorstring[1:]\n if len(colorstring) < 6:\n raise ValueError(\"input #%s is not in #RRGGBB format\" % colorstring)\n r, g, b = colorstring[:2], colorstring[2:4], colorstring[4:6]\n a = 'ff'\n if len(colorstring) > 6:\n a = colorstring[6:8]\n r, g, b, a = [int(n, 16) for n in (r, g, b, a)]\n return (r, g, b, a)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef tileslist(self, bbox, zoomlevels):\n proj = GoogleProjection(self.tile_size, zoomlevels, self.tile_scheme)\n return proj.tileslist(bbox)", "response": "Build the tiles list within the bottom - left or top - right bounding box at the specified zoom levels."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef add_layer(self, tilemanager, opacity=1.0):\n assert has_pil, _(\"Cannot blend layers without python PIL\")\n assert self.tile_size == tilemanager.tile_size, _(\"Cannot blend layers whose tile size differs\")\n assert 0 <= opacity <= 1, _(\"Opacity should be between 0.0 (transparent) and 1.0 (opaque)\")\n self.cache.basename += '%s%.1f' % (tilemanager.cache.basename, opacity)\n self._layers.append((tilemanager, opacity))", "response": "Add a layer to be blended on top of the tile."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nadding an image filter for post - processing.", "response": "def add_filter(self, filter_):\n \"\"\" Add an image filter for post-processing \"\"\"\n assert has_pil, _(\"Cannot add filters without python PIL\")\n self.cache.basename += filter_.basename\n self._filters.append(filter_)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef tile(self, z_x_y):\n (z, x, y) = z_x_y\n logger.debug(_(\"tile method called with %s\") % ([z, x, y]))\n\n output = self.cache.read((z, x, y))\n if output is None:\n output = self.reader.tile(z, x, y)\n # Blend layers\n if len(self._layers) > 0:\n logger.debug(_(\"Will blend %s layer(s)\") % len(self._layers))\n output = self._blend_layers(output, (z, x, y))\n # Apply filters\n for f in self._filters:\n image = f.process(self._tile_image(output))\n output = self._image_tile(image)\n # Save result to cache\n self.cache.save(output, (z, x, y))\n\n self.rendered += 1\n return output", "response": "Return the binary content of the tile and seed the cache."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef grid(self, z_x_y):\n # sources.py -> MapnikRenderer -> grid\n (z, x, y) = z_x_y\n content = self.reader.grid(z, x, y, self.grid_fields, self.grid_layer)\n return content", "response": "Return the UTFGrid content for the given z - x - y."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _blend_layers(self, imagecontent, z_x_y):\n (z, x, y) = z_x_y\n result = self._tile_image(imagecontent)\n # Paste each layer\n for (layer, opacity) in self._layers:\n try:\n # Prepare tile of overlay, if available\n overlay = self._tile_image(layer.tile((z, x, y)))\n except (IOError, DownloadError, ExtractionError)as e:\n logger.warn(e)\n continue\n # Extract alpha mask\n overlay = overlay.convert(\"RGBA\")\n r, g, b, a = overlay.split()\n overlay = Image.merge(\"RGB\", (r, g, b))\n a = ImageEnhance.Brightness(a).enhance(opacity)\n overlay.putalpha(a)\n mask = Image.merge(\"L\", (a,))\n result.paste(overlay, (0, 0), mask)\n # Read result\n return self._image_tile(result)", "response": "Blends all layers into a single tile."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ntile binary content as PIL Image.", "response": "def _tile_image(self, data):\n \"\"\"\n Tile binary content as PIL Image.\n \"\"\"\n image = Image.open(BytesIO(data))\n return image.convert('RGBA')"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef zoomlevels(self):\n zooms = set()\n for coverage in self._bboxes:\n for zoom in coverage[1]:\n zooms.add(zoom)\n return sorted(zooms)", "response": "Return the list of covered zoom levels in ascending order"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nbuilds a MBTile file and add coverage to the metadata.", "response": "def run(self, force=False):\n \"\"\"\n Build a MBTile file.\n\n force -- overwrite if MBTiles file already exists.\n \"\"\"\n if os.path.exists(self.filepath):\n if force:\n logger.warn(_(\"%s already exists. Overwrite.\") % self.filepath)\n os.remove(self.filepath)\n else:\n # Already built, do not do anything.\n logger.info(_(\"%s already exists. Nothing to do.\") % self.filepath)\n return\n\n # Clean previous runs\n self._clean_gather()\n\n # If no coverage added, use bottom layer metadata\n if len(self._bboxes) == 0 and len(self._layers) > 0:\n bottomlayer = self._layers[0]\n metadata = bottomlayer.reader.metadata()\n if 'bounds' in metadata:\n logger.debug(_(\"Use bounds of bottom layer %s\") % bottomlayer)\n bbox = map(float, metadata.get('bounds', '').split(','))\n zoomlevels = range(int(metadata.get('minzoom', 0)), int(metadata.get('maxzoom', 0)))\n self.add_coverage(bbox=bbox, zoomlevels=zoomlevels)\n\n # Compute list of tiles\n tileslist = set()\n for bbox, levels in self._bboxes:\n logger.debug(_(\"Compute list of tiles for bbox %s on zooms %s.\") % (bbox, levels))\n bboxlist = self.tileslist(bbox, levels)\n logger.debug(_(\"Add %s tiles.\") % len(bboxlist))\n tileslist = tileslist.union(bboxlist)\n logger.debug(_(\"%s tiles in total.\") % len(tileslist))\n self.nbtiles = len(tileslist)\n if not self.nbtiles:\n raise EmptyCoverageError(_(\"No tiles are covered by bounding boxes : %s\") % self._bboxes)\n logger.debug(_(\"%s tiles to be packaged.\") % self.nbtiles)\n\n # Go through whole list of tiles and gather them in tmp_dir\n self.rendered = 0\n for (z, x, y) in tileslist:\n try:\n self._gather((z, x, y))\n except Exception as e:\n logger.warn(e)\n if not self.ignore_errors:\n raise\n\n logger.debug(_(\"%s tiles were missing.\") % self.rendered)\n\n # Some metadata\n middlezoom = self.zoomlevels[len(self.zoomlevels) // 2]\n lat = self.bounds[1] + (self.bounds[3] - self.bounds[1])/2\n lon = self.bounds[0] + (self.bounds[2] - self.bounds[0])/2\n metadata = {}\n metadata['name'] = str(uuid.uuid4())\n metadata['format'] = self._tile_extension[1:]\n metadata['minzoom'] = self.zoomlevels[0]\n metadata['maxzoom'] = self.zoomlevels[-1]\n metadata['bounds'] = '%s,%s,%s,%s' % tuple(self.bounds)\n metadata['center'] = '%s,%s,%s' % (lon, lat, middlezoom)\n #display informations from the grids on hover\n content_to_display = ''\n for field_name in self.grid_fields:\n content_to_display += \"{{{ %s }}}
\" % field_name\n metadata['template'] = '{{#__location__}}{{/__location__}} {{#__teaser__}} \\\n %s {{/__teaser__}}{{#__full__}}{{/__full__}}' % content_to_display\n metadatafile = os.path.join(self.tmp_dir, 'metadata.json')\n with open(metadatafile, 'w') as output:\n json.dump(metadata, output)\n\n # TODO: add UTF-Grid of last layer, if any\n\n # Package it!\n logger.info(_(\"Build MBTiles file '%s'.\") % self.filepath)\n extension = self.tile_format.split(\"image/\")[-1]\n disk_to_mbtiles(\n self.tmp_dir,\n self.filepath,\n format=extension,\n scheme=self.cache.scheme\n )\n\n try:\n os.remove(\"%s-journal\" % self.filepath) # created by mbutil\n except OSError as e:\n pass\n self._clean_gather()"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn a grid of x y tuples representing the juxtaposition of tiles on the specified bounding box at the specified zoomlevel.", "response": "def grid_tiles(self, bbox, zoomlevel):\n \"\"\"\n Return a grid of (x, y) tuples representing the juxtaposition\n of tiles on the specified ``bbox`` at the specified ``zoomlevel``.\n \"\"\"\n tiles = self.tileslist(bbox, [zoomlevel])\n grid = {}\n for (z, x, y) in tiles:\n if not grid.get(y):\n grid[y] = []\n grid[y].append(x)\n sortedgrid = []\n for y in sorted(grid.keys(), reverse=self.tile_scheme == 'tms'):\n sortedgrid.append([(x, y) for x in sorted(grid[y])])\n return sortedgrid"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef export_image(self, bbox, zoomlevel, imagepath):\n assert has_pil, _(\"Cannot export image without python PIL\")\n grid = self.grid_tiles(bbox, zoomlevel)\n width = len(grid[0])\n height = len(grid)\n widthpix = width * self.tile_size\n heightpix = height * self.tile_size\n\n result = Image.new(\"RGBA\", (widthpix, heightpix))\n offset = (0, 0)\n for i, row in enumerate(grid):\n for j, (x, y) in enumerate(row):\n offset = (j * self.tile_size, i * self.tile_size)\n img = self._tile_image(self.tile((zoomlevel, x, y)))\n result.paste(img, offset)\n logger.info(_(\"Save resulting image to '%s'\") % imagepath)\n result.save(imagepath)", "response": "Exports the image for the specified bounding box and zoomlevel."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nrun therough GPOS and make an alphabetically ordered list of scripts.", "response": "def _makeScriptOrder(gpos):\n \"\"\"\n Run therough GPOS and make an alphabetically\n ordered list of scripts. If DFLT is in the list,\n move it to the front.\n \"\"\"\n scripts = []\n for scriptRecord in gpos.ScriptList.ScriptRecord:\n scripts.append(scriptRecord.ScriptTag)\n if \"DFLT\" in scripts:\n scripts.remove(\"DFLT\")\n scripts.insert(0, \"DFLT\")\n return sorted(scripts)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ngather kerning and classes from the applicable lookups and return them in script order.", "response": "def _gatherDataFromLookups(gpos, scriptOrder):\n \"\"\"\n Gather kerning and classes from the applicable lookups\n and return them in script order.\n \"\"\"\n lookupIndexes = _gatherLookupIndexes(gpos)\n seenLookups = set()\n kerningDictionaries = []\n leftClassDictionaries = []\n rightClassDictionaries = []\n for script in scriptOrder:\n kerning = []\n leftClasses = []\n rightClasses = []\n for lookupIndex in lookupIndexes[script]:\n if lookupIndex in seenLookups:\n continue\n seenLookups.add(lookupIndex)\n result = _gatherKerningForLookup(gpos, lookupIndex)\n if result is None:\n continue\n k, lG, rG = result\n kerning.append(k)\n leftClasses.append(lG)\n rightClasses.append(rG)\n if kerning:\n kerningDictionaries.append(kerning)\n leftClassDictionaries.append(leftClasses)\n rightClassDictionaries.append(rightClasses)\n return kerningDictionaries, leftClassDictionaries, rightClassDictionaries"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngathers a mapping of script to lookup indexes for each kern feature.", "response": "def _gatherLookupIndexes(gpos):\n \"\"\"\n Gather a mapping of script to lookup indexes\n referenced by the kern feature for each script.\n Returns a dictionary of this structure:\n {\n \"latn\" : [0],\n \"DFLT\" : [0]\n }\n \"\"\"\n # gather the indexes of the kern features\n kernFeatureIndexes = [index for index, featureRecord in enumerate(gpos.FeatureList.FeatureRecord) if featureRecord.FeatureTag == \"kern\"]\n # find scripts and languages that have kern features\n scriptKernFeatureIndexes = {}\n for scriptRecord in gpos.ScriptList.ScriptRecord:\n script = scriptRecord.ScriptTag\n thisScriptKernFeatureIndexes = []\n defaultLangSysRecord = scriptRecord.Script.DefaultLangSys\n if defaultLangSysRecord is not None:\n f = []\n for featureIndex in defaultLangSysRecord.FeatureIndex:\n if featureIndex not in kernFeatureIndexes:\n continue\n f.append(featureIndex)\n if f:\n thisScriptKernFeatureIndexes.append((None, f))\n if scriptRecord.Script.LangSysRecord is not None:\n for langSysRecord in scriptRecord.Script.LangSysRecord:\n langSys = langSysRecord.LangSysTag\n f = []\n for featureIndex in langSysRecord.LangSys.FeatureIndex:\n if featureIndex not in kernFeatureIndexes:\n continue\n f.append(featureIndex)\n if f:\n thisScriptKernFeatureIndexes.append((langSys, f))\n scriptKernFeatureIndexes[script] = thisScriptKernFeatureIndexes\n # convert the feature indexes to lookup indexes\n scriptLookupIndexes = {}\n for script, featureDefinitions in scriptKernFeatureIndexes.items():\n lookupIndexes = scriptLookupIndexes[script] = []\n for language, featureIndexes in featureDefinitions:\n for featureIndex in featureIndexes:\n featureRecord = gpos.FeatureList.FeatureRecord[featureIndex]\n for lookupIndex in featureRecord.Feature.LookupListIndex:\n if lookupIndex not in lookupIndexes:\n lookupIndexes.append(lookupIndex)\n # done\n return scriptLookupIndexes"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngathering the kerning and left clases and right classes for a particular lookup.", "response": "def _gatherKerningForLookup(gpos, lookupIndex):\n \"\"\"\n Gather the kerning and class data for a particular lookup.\n Returns kerning, left clases, right classes.\n The kerning dictionary is of this structure:\n {\n (\"a\", \"a\") : 10,\n ((1, 1, 3), \"a\") : -20\n }\n The class dictionaries have this structure:\n {\n (1, 1, 3) : [\"x\", \"y\", \"z\"]\n }\n Where the tuple means this:\n (lookup index, subtable index, class index)\n \"\"\"\n allKerning = {}\n allLeftClasses = {}\n allRightClasses = {}\n lookup = gpos.LookupList.Lookup[lookupIndex]\n # only handle pair positioning and extension\n if lookup.LookupType not in (2, 9):\n return\n for subtableIndex, subtable in enumerate(lookup.SubTable):\n if lookup.LookupType == 2:\n format = subtable.Format\n lookupType = subtable.LookupType\n if (lookupType, format) == (2, 1):\n kerning = _handleLookupType2Format1(subtable)\n allKerning.update(kerning)\n elif (lookupType, format) == (2, 2):\n kerning, leftClasses, rightClasses = _handleLookupType2Format2(subtable, lookupIndex, subtableIndex)\n allKerning.update(kerning)\n allLeftClasses.update(leftClasses)\n allRightClasses.update(rightClasses)\n elif lookup.LookupType == 9:\n extSubtable = subtable.ExtSubTable\n format = extSubtable.Format\n lookupType = extSubtable.LookupType\n if (lookupType, format) == (2, 1):\n kerning = _handleLookupType2Format1(extSubtable)\n allKerning.update(kerning)\n elif (lookupType, format) == (2, 2):\n kerning, leftClasses, rightClasses = _handleLookupType2Format2(extSubtable, lookupIndex, subtableIndex)\n allKerning.update(kerning)\n allLeftClasses.update(leftClasses)\n allRightClasses.update(rightClasses)\n # done\n return allKerning, allLeftClasses, allRightClasses"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nhandles a Lookup Type 2 Format 1 subtable.", "response": "def _handleLookupType2Format1(subtable):\n \"\"\"\n Extract a kerning dictionary from a Lookup Type 2 Format 1.\n \"\"\"\n kerning = {}\n coverage = subtable.Coverage.glyphs\n valueFormat1 = subtable.ValueFormat1\n pairSets = subtable.PairSet\n for index, leftGlyphName in enumerate(coverage):\n pairSet = pairSets[index]\n for pairValueRecord in pairSet.PairValueRecord:\n rightGlyphName = pairValueRecord.SecondGlyph\n if valueFormat1:\n value = pairValueRecord.Value1\n else:\n value = pairValueRecord.Value2\n if hasattr(value, \"XAdvance\"):\n value = value.XAdvance\n kerning[leftGlyphName, rightGlyphName] = value\n return kerning"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nhandle a Lookup Type 2 Format 2.", "response": "def _handleLookupType2Format2(subtable, lookupIndex, subtableIndex):\n \"\"\"\n Extract kerning, left class and right class dictionaries from a Lookup Type 2 Format 2.\n \"\"\"\n # extract the classes\n leftClasses = _extractFeatureClasses(lookupIndex=lookupIndex, subtableIndex=subtableIndex, classDefs=subtable.ClassDef1.classDefs, coverage=subtable.Coverage.glyphs)\n rightClasses = _extractFeatureClasses(lookupIndex=lookupIndex, subtableIndex=subtableIndex, classDefs=subtable.ClassDef2.classDefs)\n # extract the pairs\n kerning = {}\n for class1RecordIndex, class1Record in enumerate(subtable.Class1Record):\n for class2RecordIndex, class2Record in enumerate(class1Record.Class2Record):\n leftClass = (lookupIndex, subtableIndex, class1RecordIndex)\n rightClass = (lookupIndex, subtableIndex, class2RecordIndex)\n valueFormat1 = subtable.ValueFormat1\n if valueFormat1:\n value = class2Record.Value1\n else:\n value = class2Record.Value2\n if hasattr(value, \"XAdvance\") and value.XAdvance != 0:\n value = value.XAdvance\n kerning[leftClass, rightClass] = value\n return kerning, leftClasses, rightClasses"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _mergeKerningDictionaries(kerningDictionaries):\n # work through the dictionaries backwards since\n # this uses an update to load the kerning. this\n # will ensure that the script order is honored.\n kerning = {}\n for dictionaryGroup in reversed(kerningDictionaries):\n for dictionary in dictionaryGroup:\n kerning.update(dictionary)\n # done.\n return kerning", "response": "Merge all of the kerning dictionaries found into one flat dictionary."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _findSingleMemberGroups(classDictionaries):\n toRemove = {}\n for classDictionaryGroup in classDictionaries:\n for classDictionary in classDictionaryGroup:\n for name, members in list(classDictionary.items()):\n if len(members) == 1:\n toRemove[name] = list(members)[0]\n del classDictionary[name]\n return toRemove", "response": "Find all classes that have only one member."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ntranslate group names into glyph names in pairs", "response": "def _removeSingleMemberGroupReferences(kerning, leftGroups, rightGroups):\n \"\"\"\n Translate group names into glyph names in pairs\n if the group only contains one glyph.\n \"\"\"\n new = {}\n for (left, right), value in kerning.items():\n left = leftGroups.get(left, left)\n right = rightGroups.get(right, right)\n new[left, right] = value\n return new"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _mergeClasses(classDictionaries):\n # build a mapping of members to names\n memberTree = {}\n for classDictionaryGroup in classDictionaries:\n for classDictionary in classDictionaryGroup:\n for name, members in classDictionary.items():\n if members not in memberTree:\n memberTree[members] = set()\n memberTree[members].add(name)\n # find members that have more than one name\n classes = {}\n rename = {}\n for members, names in memberTree.items():\n name = names.pop()\n if len(names) > 0:\n for otherName in names:\n rename[otherName] = name\n classes[name] = members\n return classes, rename", "response": "This function merges the classes that have the exact same list\n of members and flag them for removal."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsets the final names into the groups.", "response": "def _setGroupNames(classes, classRename):\n \"\"\"\n Set the final names into the groups.\n \"\"\"\n groups = {}\n for groupName, glyphList in classes.items():\n groupName = classRename.get(groupName, groupName)\n # if the glyph list has only one member,\n # the glyph name will be used in the pairs.\n # no group is needed.\n if len(glyphList) == 1:\n continue\n groups[groupName] = glyphList\n return groups"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _validateClasses(classes):\n glyphToClass = {}\n for className, glyphList in classes.items():\n for glyphName in glyphList:\n if glyphName not in glyphToClass:\n glyphToClass[glyphName] = set()\n glyphToClass[glyphName].add(className)\n for glyphName, groupList in glyphToClass.items():\n if len(groupList) > 1:\n raise ExtractorError(\"Kerning classes are in an conflicting state.\")", "response": "Validate that a glyph is part of more than one class."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreplacing the renamed pair members in the kerning with the new ones.", "response": "def _replaceRenamedPairMembers(kerning, leftRename, rightRename):\n \"\"\"\n Populate the renamed pair members into the kerning.\n \"\"\"\n renamedKerning = {}\n for (left, right), value in kerning.items():\n left = leftRename.get(left, left)\n right = rightRename.get(right, right)\n renamedKerning[left, right] = value\n return renamedKerning"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _renameClasses(classes, prefix):\n renameMap = {}\n for classID, glyphList in classes.items():\n if len(glyphList) == 0:\n groupName = \"%s_empty_lu.%d_st.%d_cl.%d\" % (prefix, classID[0], classID[1], classID[2])\n elif len(glyphList) == 1:\n groupName = list(glyphList)[0]\n else:\n glyphList = list(sorted(glyphList))\n groupName = prefix + glyphList[0]\n renameMap[classID] = groupName\n return renameMap", "response": "Returns a dictionary mapping class IDs to nice strings."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _extractFeatureClasses(lookupIndex, subtableIndex, classDefs, coverage=None):\n # gather the class members\n classDict = {}\n for glyphName, classIndex in classDefs.items():\n if classIndex not in classDict:\n classDict[classIndex] = set()\n classDict[classIndex].add(glyphName)\n # specially handle class index 0\n revisedClass0 = set()\n if coverage is not None and 0 in classDict:\n for glyphName in classDict[0]:\n if glyphName in coverage:\n revisedClass0.add(glyphName)\n elif coverage is not None and 0 not in classDict:\n revisedClass0 = set(coverage)\n for glyphList in classDict.values():\n revisedClass0 = revisedClass0 - glyphList\n classDict[0] = revisedClass0\n # flip the class map around\n classes = {}\n for classIndex, glyphList in classDict.items():\n classes[lookupIndex, subtableIndex, classIndex] = frozenset(glyphList)\n return classes", "response": "Extract classes for a specific lookup in a specific subtable."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngets the voters from the server.", "response": "def _get_voters(cls, session, owner_id, poll_id, answer_id):\n \"\"\"\n https://vk.com/dev/polls.getVoters\n \"\"\"\n from .users import User\n\n return session.fetch_items(\"polls.getVoters\", User._get_users, count=100, owner_id=owner_id, poll_id=poll_id, answer_ids=answer_id)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nadd a module as an included module.", "response": "def add_include(self, name, module_spec):\n \"\"\"Adds a module as an included module.\n\n :param name:\n Name under which the included module should be exposed in the\n current module.\n :param module_spec:\n ModuleSpec of the included module.\n \"\"\"\n assert name, 'name is required'\n assert self.can_include\n\n if name in self.includes:\n raise ThriftCompilerError(\n 'Cannot include module \"%s\" as \"%s\" in \"%s\". '\n 'The name is already taken.'\n % (module_spec.name, name, self.path)\n )\n\n self.includes[name] = module_spec\n self.scope.add_include(name, module_spec.scope, module_spec.surface)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef link(self):\n if self.linked:\n return self\n\n self.linked = True\n\n included_modules = []\n\n # Link includes\n for include in self.includes.values():\n included_modules.append(include.link().surface)\n\n self.scope.add_surface('__includes__', tuple(included_modules))\n self.scope.add_surface('__thrift_source__', self.thrift_source)\n\n # Link self\n for linker in LINKERS:\n linker(self.scope).link()\n\n self.scope.add_surface('loads', Deserializer(self.protocol))\n self.scope.add_surface('dumps', Serializer(self.protocol))\n\n return self", "response": "Link all the types in this module and all included modules."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncompile the given Thrift document into a Python module.", "response": "def compile(self, name, contents, path=None):\n \"\"\"Compile the given Thrift document into a Python module.\n\n The generated module contains,\n\n .. py:attribute:: __services__\n\n A collection of generated classes for all services defined in the\n thrift file.\n\n .. versionchanged:: 1.0\n\n Renamed from ``services`` to ``__services__``.\n\n .. py:attribute:: __types__\n\n A collection of generated types for all types defined in the\n thrift file.\n\n .. versionchanged:: 1.0\n\n Renamed from ``types`` to ``__types__``.\n\n .. py:attribute:: __includes__\n\n A collection of modules included by this module.\n\n .. versionadded:: 1.0\n\n .. py:attribute:: __constants__\n\n A mapping of constant name to value for all constants defined in\n the thrift file.\n\n .. versionchanged:: 1.0\n\n Renamed from ``constants`` to ``__constants__``.\n\n .. py:attribute:: __thrift_source__\n\n Contents of the .thrift file from which this module was compiled.\n\n .. versionadded:: 1.1\n\n .. py:function:: dumps(obj)\n\n Serializes the given object using the protocol the compiler was\n instantiated with.\n\n .. py:function:: loads(cls, payload)\n\n Deserializes an object of type ``cls`` from ``payload`` using the\n protocol the compiler was instantiated with.\n\n .. py:function:: dumps.message(obj, seqid=0)\n\n Serializes the given request or response into a\n :py:class:`~thriftrw.wire.Message` using the protocol that the\n compiler was instantiated with.\n\n See :ref:`calling-apache-thrift`.\n\n .. versionadded:: 1.0\n\n .. py:function:: loads.message(service, payload)\n\n Deserializes a :py:class:`~thriftrw.wire.Message` from\n ``payload`` using the protocol the compiler was instantiated with.\n A request or response of a method defined in the given service is\n parsed in the message body.\n\n See :ref:`calling-apache-thrift`.\n\n .. versionadded:: 1.0\n\n And one class each for every struct, union, exception, enum, and\n service defined in the IDL.\n\n Service classes have references to\n :py:class:`thriftrw.spec.ServiceFunction` objects for each method\n defined in the service.\n\n :param str name:\n Name of the Thrift document. This will be the name of the\n generated module.\n :param str contents:\n Thrift document to compile\n :param str path:\n Path to the Thrift file being compiled. If not specified, imports\n from within the Thrift file will be disallowed.\n :returns:\n ModuleSpec of the generated module.\n \"\"\"\n assert name\n\n if path:\n path = os.path.abspath(path)\n if path in self._module_specs:\n return self._module_specs[path]\n\n module_spec = ModuleSpec(name, self.protocol, path, contents)\n if path:\n self._module_specs[path] = module_spec\n\n program = self.parser.parse(contents)\n\n header_processor = HeaderProcessor(self, module_spec, self.include_as)\n for header in program.headers:\n header.apply(header_processor)\n\n generator = Generator(module_spec.scope, strict=self.strict)\n for definition in program.definitions:\n generator.process(definition)\n\n return module_spec"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef t_LITERAL(self, t):\n r'(\\\"([^\\\\\\n]|(\\\\.))*?\\\")|\\'([^\\\\\\n]|(\\\\.))*?\\''\n s = t.value[1:-1]\n maps = {\n 't': '\\t',\n 'r': '\\r',\n 'n': '\\n',\n '\\\\': '\\\\',\n '\\'': '\\'',\n '\"': '\\\"'\n }\n i = 0\n length = len(s)\n val = ''\n while i < length:\n if s[i] == '\\\\':\n i += 1\n if s[i] in maps:\n val += maps[s[i]]\n else:\n msg = 'Cannot escape character: %s' % s[i]\n raise ThriftParserError(msg)\n else:\n val += s[i]\n i += 1\n\n t.value = val\n return t", "response": "t_LITERAL is a parser for the ISO - 8601 tag."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef t_IDENTIFIER(self, t):\n r'[a-zA-Z_](\\.[a-zA-Z_0-9]|[a-zA-Z_0-9])*'\n\n if t.value in THRIFT_KEYWORDS:\n # Not an identifier after all.\n t.type = t.value.upper()\n\n return t", "response": "t_IDENTIFIER is a identifier"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef input(self, data):\n # input(..) doesn't reset the lineno. We have to do that manually.\n self._lexer.lineno = 1\n return self._lexer.input(data)", "response": "Reset the lexer and feed in new input."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates a photo object from a JSON response.", "response": "def from_json(cls, session, photo_json):\n \"\"\"\n https://vk.com/dev/objects/photo\n \"\"\"\n photo = cls()\n photo.id = photo_json.get('id')\n photo.album_id = photo_json.get('album_id')\n photo.owner_id = photo_json.get('owner_id')\n photo.user_id = photo_json.get('user_id')\n photo.text = photo_json.get('text')\n photo.type = \"photo\"\n photo.date = photo_json.get('date')\n photo.photo_75 = photo_json.get('photo_75')\n photo.photo_130 = photo_json.get('photo_130')\n photo.photo_604 = photo_json.get('photo_604')\n photo.photo_807 = photo_json.get('photo_807')\n photo.photo_1280 = photo_json.get('photo_1280')\n photo.photo_2560 = photo_json.get('photo_2560')\n photo._session = session\n return photo"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting all photos for a user or group.", "response": "def _get_photos(session, user_or_group_id):\n \"\"\"\n https://vk.com/dev/photos.getAll\n \"\"\"\n response = session.fetch_items(\"photos.getAll\", Photo.from_json, count=200, owner_id=user_or_group_id)\n return response"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _get_owner_cover_photo_upload_server(session, group_id, crop_x=0, crop_y=0, crop_x2=795, crop_y2=200):\n group_id = abs(group_id)\n response = session.fetch(\"photos.getOwnerCoverPhotoUploadServer\", group_id=group_id, crop_x=crop_x, crop_y=crop_y, crop_x2=crop_x2, crop_y2=crop_y2)\n return response['upload_url']", "response": "Get owner cover photo upload server."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _save_owner_cover_photo(session, hash, photo):\n response = session.fetch('photos.saveOwnerCoverPhoto', hash=hash, photo=photo)\n return response", "response": "Save a photo to the owner cover."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _get_save_wall_photo(session, photo, server, hash, user_id=None, group_id=None):\n if group_id < 0:\n group_id = abs(group_id)\n\n response = session.fetch(\"photos.saveWallPhoto\", photo=photo, server=server, hash=hash, user_id=user_id, group_id=group_id)[0]\n return response['id'], response['owner_id']", "response": "Get the ID and owner ID of a save wall photo."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting the ID and owner ID of a save messages photo.", "response": "def _get_save_messages_photo(session, photo, server, hash):\n \"\"\"\n https://vk.com/dev/photos.saveMessagesPhoto\n \"\"\"\n response = session.fetch(\"photos.saveMessagesPhoto\", photo=photo, server=server, hash=hash)[0]\n return response['id'], response['owner_id']"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_city(self):\n response = self._session.fetch(\"users.get\", user_ids=self.id, fields=\"city\")[0]\n if response.get('city'):\n return City.from_json(self._session, response.get('city'))", "response": "Returns the city of the user if it exists else None."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_country(self):\n response = self._session.fetch(\"users.get\", user_ids=self.id, fields=\"country\")[0]\n if response.get('country'):\n return Country.from_json(self._session, response.get('country'))", "response": "Get the country of the user."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_followers(self):\n response = self._session.fetch_items(\"users.getFollowers\", self.from_json, self._session, count=1000, user_id=self.id, fields=self.USER_FIELDS)\n return response", "response": "Get a list of followers for this user."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets a User object from the session.", "response": "def _get_user(session, slug_or_user_id):\n \"\"\"\n :param slug_or_user_id: str or int\n :return: User\n \"\"\"\n user_json_items = session.fetch('users.get', user_ids=slug_or_user_id, fields=User.USER_FIELDS)\n return User.from_json(session, user_json_items[0])"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets the data from the", "response": "def get_data():\n \"\"\"\n Currently pretends to talk to an instrument and get back the magnitud\n and phase of the measurement.\n \"\"\"\n\n # pretend we're measuring a noisy resonance at zero\n y = 1.0 / (1.0 + 1j*(n_x.get_value()-0.002)*1000) + _n.random.rand()*0.1\n\n # and that it takes time to do so\n _t.sleep(0.1)\n\n # return mag phase\n return abs(y), _n.angle(y, True)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nlist the keys and values.", "response": "def List(self):\n \"\"\"\n Lists the keys and values.\n \"\"\"\n print()\n for key in list(self.keys()):\n print(key,'=',self[key])\n print()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nset the key - value pair and dumps to the preferences file.", "response": "def Set(self, key, value):\n \"\"\"\n Sets the key-value pair and dumps to the preferences file.\n \"\"\"\n if not value == None: self.prefs[key] = value\n else: self.prefs.pop(key)\n\n self.Dump()"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates a directory of the specified path in the spinmob directory.", "response": "def MakeDir(self, path=\"temp\"):\n \"\"\"\n Creates a directory of the specified path in the .spinmob directory.\n \"\"\"\n full_path = _os.path.join(self.path_home, path)\n\n # only make it if it doesn't exist!\n if not _os.path.exists(full_path): _os.makedirs(full_path)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nlisting the directory of the user s log files.", "response": "def ListDir(self, path=\"temp\"):\n \"\"\"\n Returns a list of files in the specified path (directory), or an\n empty list if the directory doesn't exist.\n \"\"\"\n full_path = _os.path.join(self.path_home, path)\n\n # only if the path exists!\n if _os.path.exists(full_path) and _os.path.isdir(full_path):\n return _os.listdir(full_path)\n else:\n return []"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nconvert list to string", "response": "def _convert_list2str(self, fields):\n \"\"\"\n :param fields: ('bdate', 'domain')\n :return: 'bdate,domain'\n \"\"\"\n if isinstance(fields, tuple) or isinstance(fields, list):\n return ','.join(fields)\n return fields"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef p_start(self, p):\n '''start : header definition'''\n p[0] = ast.Program(headers=p[1], definitions=p[2])", "response": "start : header definition"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef p_include(self, p):\n '''include : INCLUDE IDENTIFIER LITERAL\n | INCLUDE LITERAL'''\n if len(p) == 4:\n p[0] = ast.Include(name=p[2], path=p[3], lineno=p.lineno(1))\n else:\n p[0] = ast.Include(name=None, path=p[2], lineno=p.lineno(1))", "response": "include : INCLUDE IDENTIFIER LITERAL\n | INCLUDE LITERAL"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef p_namespace(self, p):\n '''namespace : NAMESPACE namespace_scope IDENTIFIER'''\n p[0] = ast.Namespace(scope=p[2], name=p[3], lineno=p.lineno(1))", "response": "parse a NAMESPACE node"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef p_const(self, p):\n '''const : CONST field_type IDENTIFIER '=' const_value\n | CONST field_type IDENTIFIER '=' const_value sep'''\n p[0] = ast.Const(\n name=p[3],\n value_type=p[2],\n value=p[5],\n lineno=p.lineno(3),\n )", "response": "parse a CONST expression"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef p_const_value_primitive(self, p):\n '''const_value_primitive : INTCONSTANT\n | DUBCONSTANT\n | LITERAL\n | const_bool'''\n p[0] = ast.ConstPrimitiveValue(p[1], lineno=p.lineno(1))", "response": "Parse the CONST_VALUE_PRIMITIVE instruction."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef p_const_list(self, p):\n '''const_list : '[' const_list_seq ']' '''\n p[0] = ast.ConstList(list(p[2]), p.lineno(1))", "response": "const_list : '[' const_list_seq ']'"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nparses a CONSTANT_REF node", "response": "def p_const_ref(self, p):\n '''const_ref : IDENTIFIER'''\n p[0] = ast.ConstReference(p[1], lineno=p.lineno(1))"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nparsing TYPEDEF field_type IDENTIFIER annotations", "response": "def p_typedef(self, p):\n '''typedef : TYPEDEF field_type IDENTIFIER annotations'''\n p[0] = ast.Typedef(\n name=p[3], target_type=p[2], annotations=p[4], lineno=p.lineno(3)\n )"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef p_enum(self, p): # noqa\n '''enum : ENUM IDENTIFIER '{' enum_seq '}' annotations'''\n p[0] = ast.Enum(\n name=p[2], items=p[4], annotations=p[6], lineno=p.lineno(2)\n )", "response": "Parse an enum statement."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef p_enum_item(self, p):\n '''enum_item : IDENTIFIER '=' INTCONSTANT annotations\n | IDENTIFIER annotations'''\n if len(p) == 5:\n p[0] = ast.EnumItem(\n name=p[1], value=p[3], annotations=p[4], lineno=p.lineno(1)\n )\n else:\n p[0] = ast.EnumItem(\n name=p[1], value=None, annotations=p[2], lineno=p.lineno(1)\n )", "response": "Parse an enum item."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef p_struct(self, p):\n '''struct : STRUCT IDENTIFIER '{' field_seq '}' annotations'''\n p[0] = ast.Struct(\n name=p[2], fields=p[4], annotations=p[6], lineno=p.lineno(2)\n )", "response": "parse a STRUCT object"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef p_exception(self, p):\n '''exception : EXCEPTION IDENTIFIER '{' field_seq '}' annotations'''\n p[0] = ast.Exc(\n name=p[2], fields=p[4], annotations=p[6], lineno=p.lineno(2)\n )", "response": "parse an exception expression"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef p_service(self, p):\n '''service : SERVICE IDENTIFIER '{' function_seq '}' annotations\n | SERVICE IDENTIFIER EXTENDS IDENTIFIER \\\n '{' function_seq '}' annotations\n '''\n\n if len(p) == 7:\n p[0] = ast.Service(\n name=p[2],\n functions=p[4],\n parent=None,\n annotations=p[6],\n lineno=p.lineno(2),\n )\n else:\n p[0] = ast.Service(\n name=p[2],\n functions=p[6],\n parent=ast.ServiceReference(p[4], p.lineno(4)),\n annotations=p[8],\n lineno=p.lineno(2),\n )", "response": "parse a service statement"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nparse a function expression", "response": "def p_function(self, p):\n '''function : oneway function_type IDENTIFIER '(' field_seq ')' \\\n throws annotations '''\n p[0] = ast.Function(\n name=p[3],\n parameters=p[5],\n return_type=p[2],\n exceptions=p[7],\n oneway=p[1],\n annotations=p[8],\n lineno=p.lineno(3),\n )"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nparses a field declaration", "response": "def p_field(self, p):\n '''field : field_id field_req field_type IDENTIFIER annotations\n | field_id field_req field_type IDENTIFIER '=' const_value \\\n annotations'''\n\n if len(p) == 8:\n default = p[6]\n annotations = p[7]\n else:\n default = None\n annotations = p[5]\n\n p[0] = ast.Field(\n id=p[1],\n name=p[4],\n field_type=p[3],\n requiredness=p[2],\n default=default,\n annotations=annotations,\n lineno=p.lineno(4),\n )"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nparses the field ID.", "response": "def p_field_id(self, p):\n '''field_id : INTCONSTANT ':'\n | '''\n if len(p) == 3:\n\n if p[1] == 0:\n # Prevent users from ever using field ID 0. It's reserved for\n # internal use only.\n raise ThriftParserError(\n 'Line %d: Field ID 0 is reserved for internal use.'\n % p.lineno(1)\n )\n\n p[0] = p[1]\n else:\n p[0] = None"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nparses a ref_type expression", "response": "def p_ref_type(self, p):\n '''ref_type : IDENTIFIER'''\n p[0] = ast.DefinedType(p[1], lineno=p.lineno(1))"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nparses the base type of the object.", "response": "def p_base_type(self, p): # noqa\n '''base_type : BOOL annotations\n | BYTE annotations\n | I8 annotations\n | I16 annotations\n | I32 annotations\n | I64 annotations\n | DOUBLE annotations\n | STRING annotations\n | BINARY annotations'''\n\n name = p[1]\n if name == 'i8':\n name = 'byte'\n\n p[0] = ast.PrimitiveType(name, p[2])"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef p_map_type(self, p):\n '''map_type : MAP '<' field_type ',' field_type '>' annotations'''\n p[0] = ast.MapType(key_type=p[3], value_type=p[5], annotations=p[7])", "response": "parse a MAP type"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef p_list_type(self, p):\n '''list_type : LIST '<' field_type '>' annotations'''\n p[0] = ast.ListType(value_type=p[3], annotations=p[5])", "response": "parse LIST < field_type > annotations"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nset < field_type > annotations", "response": "def p_set_type(self, p):\n '''set_type : SET '<' field_type '>' annotations'''\n p[0] = ast.SetType(value_type=p[3], annotations=p[5])"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _parse_seq(self, p):\n # This basically says:\n #\n # - When you reach the end of the list, construct and return an empty\n # deque.\n # - Otherwise, prepend to start of what you got from the parser.\n #\n # So this ends up constructing an in-order list.\n if len(p) == 4:\n p[3].appendleft(p[1])\n p[0] = p[3]\n elif len(p) == 3:\n p[2].appendleft(p[1])\n p[0] = p[2]\n elif len(p) == 1:\n p[0] = deque()\n else:\n raise ThriftParserError(\n 'Wrong number of tokens received for expression at line %d'\n % p.lineno(1)\n )", "response": "Helper function to parse a sequence rule."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nparse the given input.", "response": "def parse(self, input, **kwargs):\n \"\"\"Parse the given input.\n\n :param input:\n String containing the text to be parsed.\n :raises thriftrw.errors.ThriftParserError:\n For parsing errors.\n \"\"\"\n return self._parser.parse(input, lexer=self._lexer, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nget comments from the VK.", "response": "def _get_comments(session, group_or_user_id, wall_id):\n \"\"\"\n https://vk.com/dev/wall.getComments\n \"\"\"\n return session.fetch_items(\"wall.getComments\", Comment.from_json, count=100, owner_id=group_or_user_id, post_id=wall_id, need_likes=1)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _get_comments_count(session, group_or_user_id, wall_id):\n response = session.fetch(\"wall.getComments\", count=100, owner_id=group_or_user_id, post_id=wall_id)\n return response.get('count')", "response": "Get the number of comments for a given user in a given group."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_likes(self):\n from .users import User\n return self._session.fetch_items('likes.getList', User._get_user, count=100, type='post', owner_id=self.from_id, item_id=self.id)", "response": "Get a list of likes."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngets the number of likes for this item.", "response": "def get_likes_count(self):\n \"\"\"\n https://vk.com/dev/likes.getList\n \"\"\"\n response = self._session.fetch('likes.getList', count=1, type='post', owner_id=self.from_id, item_id=self.id)\n likes_count = response.get('count')\n return likes_count"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets all reposts for this post.", "response": "def get_reposts(self):\n \"\"\"\n https://vk.com/dev/wall.getReposts\n \"\"\"\n return self._session.fetch_items('wall.getReposts', self.from_json, count=1000, owner_id=self.from_id, post_id=self.id)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _wall_post(session, owner_id, message=None, attachments=None, from_group=True):\n response = session.fetch(\"wall.post\", owner_id=owner_id, message=message, attachments=attachments, from_group=from_group)\n return response", "response": "Post a new entry to the Wall."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nget the number of friends for a user", "response": "def _get_friends_count(session, user_id):\n \"\"\"\n https://vk.com/dev/friends.get\n \"\"\"\n response = session.fetch('friends.get', user_id=user_id, count=1)\n return response[\"count\"]"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef acquire_fake_data(number_of_points=1000):\n # time array\n t = _n.linspace(0,10,number_of_points)\n\n return(t, [_n.cos(t)*(1.0+0.2*_n.random.random(number_of_points)),\n _n.sin(t +0.5*_n.random.random(number_of_points))])", "response": "This function generates some fake data and returns two channels of data in the form of\n time_array [ channel1 channel2... ]"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef load(path=None, first_data_line='auto', filters='*.*', text='Select a file, FACEHEAD.', default_directory='default_directory', quiet=True, header_only=False, transpose=False, **kwargs):\n d = databox(**kwargs)\n d.load_file(path=path, first_data_line=first_data_line,\n filters=filters, text=text, default_directory=default_directory,\n header_only=header_only)\n\n if not quiet: print(\"\\nloaded\", d.path, \"\\n\")\n\n if transpose: return d.transpose()\n return d", "response": "Load a data file into the databox data class."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef load_multiple(paths=None, first_data_line=\"auto\", filters=\"*.*\", text=\"Select some files, FACEHEAD.\", default_directory=\"default_directory\", quiet=True, header_only=False, transpose=False, **kwargs):\n if paths == None: paths = _s.dialogs.load_multiple(filters, text, default_directory)\n if paths is None : return\n\n datas = []\n for path in paths:\n if _os.path.isfile(path): datas.append(load(path=path, first_data_line=first_data_line,\n filters=filters, text=text, default_directory=default_directory,\n header_only=header_only, transpose=transpose, **kwargs))\n\n return datas", "response": "Load a list of data files into a list of databox data objects."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef more_info(self):\n print(\"\\nDatabox Instance\", self.path)\n print(\"\\nHeader\")\n for h in self.hkeys: print(\" \"+h+\":\", self.h(h))\n s = \"\\nColumns (\"+str(len(self.ckeys))+\"): \"\n for c in self.ckeys: s = s+c+\", \"\n print(s[:-2])", "response": "Prints out more information about the databox."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _globals(self):\n\n # start with numpy\n globbies = dict(_n.__dict__)\n globbies.update(_special.__dict__)\n\n # update with required stuff\n globbies.update({'h':self.h, 'c':self.c, 'd':self, 'self':self})\n\n # update with user stuff\n globbies.update(self.extra_globals)\n\n return globbies", "response": "Returns the globals needed for eval statements."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nload a file and store the header info in self. headers and self. columns.", "response": "def load_file(self, path=None, first_data_line='auto', filters='*.*', text='Select a file, FACEPANTS.', default_directory=None, header_only=False, quiet=False):\n \"\"\"\n This will clear the databox, load a file, storing the header info in \n self.headers, and the data in self.columns\n\n If first_data_line=\"auto\", then the first data line is assumed to be \n the first line where all the elements are numbers.\n\n If you specify a first_data_line (index, starting at 0), the columns \n need not be numbers. Everything above will be considered header \n information and below will be data columns.\n\n In both cases, the line used to label the columns will always be the \n last header line with the same (or more) number of elements as the \n first data line.\n \n Parameters\n ----------\n path=None\n Path to the file. Using None will bring up a dialog.\n filters='*.*' \n Filter for the file dialog (if path isn't specified)\n text='Select a file, FACEPANTS.' \n Prompt on file dialog\n default_directory=None\n Which spinmob.settings key to use for the dialog's default \n directory. Will create one if it doesn't already exist.\n header_only=False \n Only load the header\n quiet=False \n Don't print anything while loading.\n \"\"\"\n \n # Set the default directory\n if default_directory is None: default_directory = self.directory\n\n # Ask user for a file to open\n if path == None:\n path = _s.dialogs.load(filters=filters,\n default_directory=self.directory,\n text=text)\n self.path = path\n\n if path is None:\n return None\n\n # make sure the file exists!\n if not _os.path.exists(path):\n if not quiet: print(\"ERROR: \"+repr(path)+\" does not exist.\")\n return None\n\n # clear all the existing data\n self.clear()\n\n\n\n # First check if the file is SPINMOB_BINARY format!\n f = open(path, 'rb')\n\n # If this file is in SPINMOB_BINARY mode!\n if f.read(14).decode('utf-8') == 'SPINMOB_BINARY':\n \n # Next character is the delimiter\n self.delimiter = f.read(1).decode('utf-8')\n \n # Find the newline and get the data type\n s = ' '\n while not s[-1] == '\\n': s = s+f.read(1).decode('utf-8')\n \n # Rest of the line is the binary dtype\n self.h(SPINMOB_BINARY = s.strip())\n\n # Now manually assemble the header lines to use in the analysis \n # below. If I try readline() on the binary file, it will crash.\n lines = ['\\n']\n \n # The end of the header is specified by 'SPINMOB_BINARY' on its own line.\n while not lines[-1] == 'SPINMOB_BINARY':\n \n # Get the next line, one character at a time.\n s = ' '\n while not s[-1] == '\\n': s = s+f.read(1).decode('utf-8')\n \n # Okay we have it\n lines.append(s.strip())\n \n # Pop that last line, which should be 'SPINMOB_BINARY'\n lines.pop(-1)\n \n # We've reached the end of the header.\n \n # Close the binary read.\n f.close()\n \n\n\n # If we're not in binary mode, we can read all the lines and find\n # the delimiter as usual. (In binary mode, the delimiter is specified)\n if not 'SPINMOB_BINARY' in self.hkeys:\n\n # For non-binary files, we always read all the lines.\n f = open(path, 'r')\n lines = f.readlines()\n f.close()\n \n # Determine the delimiter\n if self.delimiter is None:\n \n # loop from the end of the file until we get something other than white space\n for n in range(len(lines)):\n \n # strip away the white space\n s = lines[-n-1].strip()\n \n # if this line has any content\n if len(s) > 0:\n \n # try the different delimiter schemes until we find one\n # that produces a number. Otherwise it's ambiguous.\n if _s.fun.is_a_number(s.split(None)[0]): self.delimiter = None\n elif _s.fun.is_a_number(s.split(',') [0]): self.delimiter = ','\n elif _s.fun.is_a_number(s.split(';') [0]): self.delimiter = ';'\n \n # quit the loop!\n break\n \n # Done reading lines and auto-determining delimiter.\n \n\n\n ##### Pares the header from lines\n self.header_lines = []\n\n for n in range(len(lines)):\n\n # split the line by the delimiter\n s = lines[n].strip().split(self.delimiter)\n\n # remove a trailing whitespace entry if it exists.\n if len(s) and s[-1].strip() == '': s.pop(-1)\n\n # first check and see if this is a data line (all elements are numbers)\n if first_data_line == \"auto\" and _s.fun.elements_are_numbers(s):\n\n # we've reached the first data line\n first_data_line = n\n\n # quit the header loop\n break;\n\n ### after that check, we know it's a header line\n\n # save the lines for the avid user.\n self.header_lines.append(lines[n].strip())\n\n # store the hkey and the rest of it\n if len(s):\n hkey = s[0]\n if self.delimiter is None: remainder = ' '.join(s[1:])\n else: remainder = self.delimiter.join(s[1:])\n\n # first thing to try is simply evaluating the remaining string\n try: self.insert_header(hkey, eval(remainder, self._globals()))\n\n # otherwise store the string\n except: self.insert_header(hkey, remainder)\n\n # now we have a valid set of column ckeys one way or another, and we know first_data_line.\n if header_only: return self\n\n \n \n \n \n \n \n # Deal with the binary mode\n if 'SPINMOB_BINARY' in self.hkeys:\n \n # Read the binary file\n f = open(path, 'rb')\n s = f.read()\n f.close()\n \n # Get the delimiter for easier coding\n delimiter = self.delimiter.encode('utf-8')\n \n # Get the binary mode, e.g., 'float32'\n binary = self.h('SPINMOB_BINARY')\n \n # Number of bytes per element\n size = eval('_n.'+binary+'().itemsize', dict(_n=_n))\n \n # Location of first ckey\n start = s.find(b'SPINMOB_BINARY',14) + 15\n \n # Continue until we reach the last character.\n while not start >= len(s):\n \n # Get the location of the end of the ckey\n stop = s.find(delimiter, start)\n \n # Woa, Nelly! We're at the end of the file.\n if stop == -1: break\n ckey = s[start:stop].decode('utf-8').strip()\n \n # Get the array length\n start = stop+1\n stop = s.find(b'\\n', start)\n length = int(s[start:stop].strip())\n \n # Get the data!\n start = stop+1\n stop = start+size*length\n self[ckey] = _n.fromstring(s[start:stop], binary)\n \n # Go to next ckey\n start = stop+1 \n \n \n \n # Otherwise we have a text file to load.\n else:\n # Make sure first_data_line isn't still 'auto'\n # which happens if there's no data, or if it's a binary file\n if first_data_line == \"auto\" and not 'SPINMOB_BINARY' in self.hkeys:\n if not quiet: print(\"\\ndatabox.load_file(): Could not find a line of pure data! Perhaps check the delimiter?\")\n return self\n \n \n ##### at this point we've found the first_data_line,\n \n # look for the ckeys\n \n # special case: no header\n if first_data_line == 0: ckeys = []\n \n # start by assuming it's the previous line\n else: ckeys = lines[first_data_line-1].strip().split(self.delimiter)\n \n # count the number of actual data columns for comparison\n column_count = len(lines[first_data_line].strip().split(self.delimiter))\n \n # check to see if ckeys is equal in length to the\n # number of data columns. If it isn't, it's a false ckeys line\n if len(ckeys) >= column_count:\n # it is close enough\n # if we have too many column keys, mention it\n while len(ckeys) > column_count:\n extra = ckeys.pop(-1)\n if not quiet: print(\"Extra ckey: \"+extra)\n \n else:\n # it is an invalid ckeys line. Generate our own!\n ckeys = []\n for m in range(0, column_count): ckeys.append(\"c\"+str(m))\n \n # last step with ckeys: make sure they're all different!\n self.ckeys = []\n while len(ckeys):\n \n # remove the key\n ckey = ckeys.pop(0)\n \n # if there is a duplicate\n if (ckey in ckeys) or (ckey in self.ckeys):\n # increase the label index until it's unique\n n=0\n while (ckey+\"_\"+str(n) in ckeys) or (ckey+\"_\"+str(n) in self.ckeys): n+=1\n ckey = ckey+\"_\"+str(n)\n self.ckeys.append(ckey)\n \n # initialize the columns arrays\n # I did benchmarks and there's not much improvement by using numpy-arrays here.\n for label in self.ckeys: self.columns[label] = []\n \n \n \n \n \n \n \n \n # Python 2 format\n #if _sys.version_info[0] == 2:\n try:\n def fix(x): return str(x.replace('i','j'))\n \n # loop over the remaining data lines, converting to numbers\n z = _n.genfromtxt((fix(x) for x in lines[first_data_line:]),\n delimiter=self.delimiter,\n dtype=_n.complex)\n \n # Python 3 format\n except:\n def fix(x): return bytearray(x.replace('i','j'), encoding='utf-8') \n \n # loop over the remaining data lines, converting to numbers\n z = _n.genfromtxt((fix(x) for x in lines[first_data_line:]),\n delimiter=self.delimiter,\n dtype=_n.complex)\n \n # genfromtxt returns a 1D array if there is only one data line.\n # highly confusing behavior, numpy!\n if len(_n.shape(z)) == 1:\n # check to make sure the data file contains only 1 column of data\n rows_of_data = len(lines) - first_data_line\n if rows_of_data == 1: z = _n.array([z])\n else: z = _n.array(z)\n \n # fix for different behavior of genfromtxt on single columns\n if len(z.shape) == 2: z = z.transpose()\n else: z = [z]\n \n # Add all the columns\n for n in range(len(self.ckeys)):\n \n # if any of the imaginary components are non-zero, use complex\n if _n.any(_n.imag(z[n])): self[n] = z[n]\n else: self[n] = _n.real(z[n])\n\n # Done with loading in the columns of data\n\n \n \n # now, as an added bonus, rename some of the obnoxious headers\n for k in self.obnoxious_ckeys:\n if k in self.columns:\n self.columns[self.obnoxious_ckeys[k]] = self.columns[k]\n\n return self"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef save_file(self, path=None, filters='*.dat', force_extension=None, force_overwrite=False, header_only=False, delimiter='use current', binary=None):\n \n # Make sure there isn't a problem later with no-column databoxes\n if len(self)==0: header_only=True\n\n # This is the final path. We now write to a temporary file in the user\n # directory, then move it to the destination. This (hopefully) fixes\n # problems with sync programs.\n if path in [None]: path = _s.dialogs.save(filters, default_directory=self.directory)\n if path in [\"\", None]:\n print(\"Aborted.\")\n return False\n \n # Force the extension (we do this here redundantly, because the user may have also\n # specified a path explicitly)\n if not force_extension == None:\n \n # In case the user put \"*.txt\" instead of just \"txt\"\n force_extension = force_extension.replace('*','').replace('.','')\n \n # If the file doesn't end with the extension, add it\n if not _os.path.splitext(path)[-1][1:] == force_extension:\n path = path + '.' + force_extension\n\n # Save the path for future reference\n self.path=path\n\n # if the path exists, make a backup\n if _os.path.exists(path) and not force_overwrite:\n _os.rename(path,path+\".backup\")\n\n # get the delimiter\n if delimiter == \"use current\":\n if self.delimiter is None: delimiter = \"\\t\"\n else: delimiter = self.delimiter\n\n # figure out the temporary path\n temporary_path = _os.path.join(_s.settings.path_home, \"temp-\"+str(int(1e3*_time.time()))+'-'+str(int(1e9*_n.random.rand(1))))\n\n # open the temporary file\n f = open(temporary_path, 'w')\n \n \n \n # Override any existing binary if we're supposed to\n if binary in [False, 'text', 'Text', 'ASCII', 'csv', 'CSV']:\n self.pop_header('SPINMOB_BINARY', True)\n binary = None\n \n # If the binary flag is any kind of binary format, add the key\n if not binary in [None, False, 'text', 'Text', 'ASCII', 'csv', 'CSV']: \n self.h(SPINMOB_BINARY=binary)\n \n # Now use the header element to determine the binary mode\n if 'SPINMOB_BINARY' in self.hkeys:\n \n # Get the binary mode (we'll use this later)\n binary = self.pop_header('SPINMOB_BINARY')\n \n # If it's \"True\", default to float32\n if binary in ['True', True, 1]: binary = 'float32'\n \n # Write the special first key.\n f.write('SPINMOB_BINARY' + delimiter + binary + '\\n')\n \n # Write the usual header\n for k in self.hkeys: f.write(k + delimiter + repr(self.headers[k]) + \"\\n\")\n f.write('\\n')\n\n # if we're not just supposed to write the header\n if not header_only: \n \n # Normal ascii saving mode.\n if binary in [None, 'None', False, 'False']:\n \n # write the ckeys\n elements = []\n for ckey in self.ckeys: \n elements.append(str(ckey).replace(delimiter,'_'))\n f.write(delimiter.join(elements) + \"\\n\")\n \n # now loop over the data\n for n in range(0, len(self[0])):\n # loop over each column\n elements = []\n for m in range(0, len(self.ckeys)):\n # write the data if there is any, otherwise, placeholder\n if n < len(self[m]):\n elements.append(str(self[m][n]))\n else:\n elements.append('_')\n f.write(delimiter.join(elements) + \"\\n\")\n\n # Binary mode\n else:\n # Announce that we're done with the header. It's binary time\n f.write('SPINMOB_BINARY\\n')\n \n # Loop over the ckeys\n for n in range(len(self.ckeys)):\n \n # Get the binary data string\n data_string = _n.array(self[n]).astype(binary).tostring()\n \n # Write the column\n # ckey + delimiter + count + \\n + datastring + \\n\n f.write(str(self.ckeys[n]).replace(delimiter,'_') \n + delimiter + str(len(self[n])) + '\\n')\n f.close()\n f = open(temporary_path, 'ab')\n f.write(data_string)\n f.close()\n f = open(temporary_path, 'a')\n f.write('\\n')\n\n f.close()\n\n # now move it\n _shutil.move(temporary_path, path)\n\n return self", "response": "Save the data to an ascii file."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn then'th data point from all columns.", "response": "def get_data_point(self, n):\n \"\"\"\n Returns the n'th data point (starting at 0) from all columns.\n\n Parameters\n ----------\n n \n Index of data point to return.\n \"\"\"\n # loop over the columns and pop the data\n point = []\n for k in self.ckeys: point.append(self[k][n])\n return point"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ninserting a data point at index.", "response": "def insert_data_point(self, new_data, index=None):\n \"\"\"\n Inserts a data point at index n.\n \n Parameters\n ----------\n new_data \n A list or array of new data points, one for each column.\n index \n Where to insert the point(s) in each column. None => append.\n \"\"\"\n\n if not len(new_data) == len(self.columns) and not len(self.columns)==0:\n print(\"ERROR: new_data must have as many elements as there are columns.\")\n return\n\n # otherwise, we just auto-add this data point as new columns\n elif len(self.columns)==0:\n for i in range(len(new_data)): self[i] = [new_data[i]]\n\n # otherwise it matches length so just insert it.\n else:\n for i in range(len(new_data)):\n\n # get the array and turn it into a list\n data = list(self[i])\n\n # append or insert\n if index is None: data.append( new_data[i])\n else: data.insert(index, new_data[i])\n\n # reconvert to an array\n self[i] = _n.array(data)\n \n return self"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nexecutes a script and returns the result.", "response": "def execute_script(self, script, g=None):\n \"\"\"\n Runs a script, returning the result.\n\n Parameters\n ----------\n script\n String script to be evaluated (see below).\n g=None\n Optional dictionary of additional globals for the script evaluation.\n These will automatically be inserted into self.extra_globals.\n\n Usage\n -----\n Scripts are of the form:\n\n \"3.0 + x/y - d[0] where x=3.0*c('my_column')+h('setting'); y=d[1]\"\n\n By default, \"d\" refers to the databox object itself, giving access to \n everything and enabling complete control over the universe. Meanwhile, \n c() and h() give quick reference to d.c() and d.h() to get columns and \n header lines. Additionally, these scripts can see all of the numpy \n functions like sin, cos, sqrt, etc.\n\n If you would like access to additional globals in a script,\n there are a few options in addition to specifying the g parametres. \n You can set self.extra_globals to the appropriate globals dictionary \n or add globals using self.insert_global(). Setting g=globals() will \n automatically insert all of your current globals into this databox \n instance.\n\n There are a few shorthand scripts available as well. You can simply type\n a column name such as 'my_column' or a column number like 2. However, I\n only added this functionality as a shortcut, and something like\n \"2.0*a where a=my_column\" will not work unless 'my_column is otherwise\n defined. I figure since you're already writing a complicated script in \n that case, you don't want to accidentally shortcut your way into using \n a column instead of a constant! Use \"2.0*a where a=c('my_column')\" \n instead.\n \"\"\"\n\n # add any extra user-supplied global variables for the eventual eval() call.\n if not g==None: self.extra_globals.update(g)\n\n # If the script is not a list of scripts, return the script value.\n # This is the termination of a recursive call.\n if not _s.fun.is_iterable(script):\n\n # special case\n if script is None: return None\n\n # get the expression and variables dictionary\n [expression, v] = self._parse_script(script)\n\n # if there was a problem parsing the script\n if v is None:\n print(\"ERROR: Could not parse '\"+script+\"'\")\n return None\n\n # get all the numpy stuff too\n g = self._globals()\n g.update(v)\n\n # otherwise, evaluate the script using python's eval command\n return eval(expression, g)\n\n # Otherwise, this is a list of (lists of) scripts. Make the recursive call.\n output = []\n for s in script: output.append(self.execute_script(s))\n\n return output"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _parse_script(self, script, n=0):\n\n if n > 1000:\n print(\"This script ran recursively 1000 times!\")\n a = input(\" or (q)uit: \")\n if a.strip().lower() in ['q', 'quit']:\n script = None\n\n if script is None: return [None, None]\n\n # check if the script is simply an integer\n if type(script) in [int,int]:\n if script<0: script = script+len(self.ckeys)\n return [\"___\"+str(script), {\"___\"+str(script):self[script]}]\n\n # the scripts would like to use calls like \"h('this')/3.0*c('that')\",\n # so to make eval() work we should add these functions to a local list\n\n # first split up by \"where\"\n split_script = script.split(\" where \")\n\n\n ########################################\n # Scripts without a \"where\" statement:\n ########################################\n\n # if it's a simple script, like \"column0\" or \"c(3)/2.0\"\n if len(split_script) is 1:\n if self.debug: print(\"script of length 1\")\n\n # try to evaluate the script\n\n # first try to evaluate it as a simple column label\n if n==0 and script in self.ckeys:\n # only try this on the zero'th attempt\n # if this is a recursive call, there can be ambiguities if the\n # column names are number strings\n return ['___', {'___':self[script]}]\n\n\n # Otherwise, evaluate it.\n try:\n b = eval(script, self._globals())\n return ['___', {'___':b}]\n except:\n print()\n print(\"ERROR: Could not evaluate '\"+str(script)+\"'\")\n return [None, None]\n\n\n #######################################\n # Full-on fancy scripts\n #######################################\n\n # otherwise it's a complicated script like \"c(1)-a/2 where a=h('this')\"\n\n # tidy up the expression\n expression = split_script[0].strip()\n\n # now split the variables list up by ,\n varsplit = split_script[1].split(';')\n\n # loop over the entries in the list of variables, storing the results\n # of evaluation in the \"stuff\" dictionary\n stuff = dict()\n for var in varsplit:\n\n # split each entry by the \"=\" sign\n s = var.split(\"=\")\n if len(s) == 1:\n print(s, \"has no '=' in it\")\n return [None, None]\n\n # tidy up into \"variable\" and \"column label\"\n v = s[0].strip()\n c = s[1].strip()\n\n # now try to evaluate c, given our current globbies\n\n # recursively call this sub-script. At the end of all this mess\n # we want the final return value to be the first expression\n # and a full dictionary of variables to fill it\n [x,y] = self._parse_script(c, n+1)\n\n # if it's not working, just quit out.\n if y is None: return [None, None]\n\n stuff[v] = y[x]\n\n # at this point we've found or generated the list\n return [expression, stuff]", "response": "This function parses a script and returns a list of the appropriate column names."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncopy the headers from the source databox to this databoxes", "response": "def copy_headers(self, source_databox):\n \"\"\"\n Loops over the hkeys of the source_databox, updating this databoxes' header.\n \"\"\"\n for k in source_databox.hkeys: self.insert_header(k, source_databox.h(k))\n\n return self"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef copy_columns(self, source_databox):\n for k in source_databox.ckeys: self.insert_column(source_databox[k], k)\n\n return self", "response": "Copy the columns from the source databox to this databoxes"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef copy_all(self, source_databox):\n self.copy_headers(source_databox)\n self.copy_columns(source_databox)\n return self", "response": "Copies the header and columns from source_databox to this databox."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef insert_globals(self, *args, **kwargs):\n for a in args: kwargs[a.__name__] = a\n self.extra_globals.update(kwargs)", "response": "Adds or overwrites the supplied object in the self. extra_globals dictionary."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef insert_header(self, hkey, value, index=None):\n #if hkey is '': return\n\n # if it's an integer, use the hkey from the list\n if type(hkey) in [int, int]: hkey = self.hkeys[hkey]\n\n # set the data\n self.headers[str(hkey)] = value\n if not hkey in self.hkeys:\n if index is None: self.hkeys.append(str(hkey))\n else: self.hkeys.insert(index, str(hkey))\n return self", "response": "This will insert a value into the header and hkeys."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef is_same_as(self, other_databox, headers=True, columns=True, header_order=True, column_order=True, ckeys=True):\n d = other_databox\n \n if not hasattr(other_databox, '_is_spinmob_databox'): return False\n \n # Proceed by testing things one at a time, returning false if one fails\n if headers:\n \n # Same number of elements\n if not len(self.hkeys) == len(d.hkeys): return False\n \n # Elements\n if header_order and not self.hkeys == d.hkeys: return False\n \n # Each value\n for k in self.hkeys:\n # Make sure the key exists\n if not k in d.hkeys: return False\n \n # Make sure it's the same.\n if not self.h(k) == d.h(k): return False\n \n if columns:\n \n # Same number of columns\n if not len(self.ckeys) == len(d.ckeys): return False\n \n # If we're checking columns by ckeys\n if ckeys:\n \n # Columns\n if column_order and not self.ckeys == d.ckeys: return False\n \n # Each value of each array\n for k in self.ckeys:\n # Make sure the key exists\n if not k in d.ckeys: return False\n \n # Check the values\n if not (_n.array(self[k]) == _n.array(d[k])).all(): return False\n \n # Otherwise we're ignoring ckeys\n else:\n for n in range(len(self.ckeys)):\n if not (_n.array(self[n]) == _n.array(d[n])).all(): return False\n \n # Passes all tests\n return True", "response": "Tests that the information in this databox is the same as that of the other databox."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef pop_header(self, hkey, ignore_error=False):\n\n # try the integer approach first to allow negative values\n if type(hkey) is not str:\n try: \n return self.headers.pop(self.hkeys.pop(hkey))\n except: \n if not ignore_error:\n print(\"ERROR: pop_header() could not find hkey \"+str(hkey))\n return None\n else:\n \n try:\n # find the key integer and pop it\n hkey = self.hkeys.index(hkey)\n \n # pop it!\n return self.headers.pop(self.hkeys.pop(hkey))\n\n except:\n if not ignore_error:\n print(\"ERROR: pop_header() could not find hkey \"+str(hkey))\n return", "response": "This will remove and return the specified header value from the list of headers."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef rename_column(self, column, new_name):\n if type(column) is not str: column = self.ckeys[column]\n self.ckeys[self.ckeys.index(column)] = new_name\n self.columns[new_name] = self.columns.pop(column)\n return self", "response": "This will rename the column."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nremoves data points not satisfying the supplied conditions. Conditions can be truth arrays (having the same length as the columns!) or scripted strings. Example Workflow ---------------- d1 = spinmob.data.load() d2 = d1.trim( (2len(self): stop = len(self)\n if step == None: step = 1\n \n # Return what was asked for\n return self[range(start, stop, step)]\n \n # Otherwise assume it's an integer\n return self.columns[self.ckeys[n]]", "response": "Takes a single argument or keyword argument and returns the specified \n column."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef h(self, *args, **kwargs):\n # If not arguments, print everything\n if len(args) + len(kwargs) == 0:\n print(\"Headers\")\n for n in range(len(self.hkeys)):\n print(' '+str(n)+': '+str(self.hkeys[n])+' = '+repr(self.h(n)))\n return\n\n # first loop over kwargs if there are any to set header elements\n for k in list(kwargs.keys()):\n self.insert_header(k, kwargs[k])\n\n # Meow search for a key if specified\n if len(args):\n # this can be shortened. Eventually, it'd be nice to get a tuple back!\n hkey = args[0]\n\n # if this is an index\n if type(hkey) in [int, int]: return self.headers[self.hkeys[hkey]]\n\n # if this is an exact match\n elif hkey in self.hkeys: return self.headers[hkey]\n\n # Look for a fragment.\n else:\n for k in self.hkeys:\n if k.find(hkey) >= 0:\n return self.headers[k]\n print()\n print(\"ERROR: Couldn't find '\"+str(hkey) + \"' in header.\")\n print(\"Possible values:\")\n for k in self.hkeys: print(k)\n print()\n return None", "response": "This function searches through the header keys and returns the value of the specified key in the header."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef set(self, **kwargs):\n if len(kwargs)==0: return self\n \n # Set settings\n for k in list(kwargs.keys()): self[k] = kwargs[k]\n \n # Plot if we're supposed to.\n if self['autoplot'] and not self._initializing: self.plot()\n\n return self", "response": "Sets the value of the parameter in the current object."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef print_fit_parameters(self):\n s = ''\n if self.results and self.results[1] is not None:\n s = s + \"\\n# FIT RESULTS (reduced chi squared = {:s})\\n\".format(str(self.reduced_chi_squareds()))\n for n in range(len(self._pnames)):\n s = s + \"{:10s} = {:G}\\n\".format(self._pnames[n], self.results[0][n])\n\n elif self.results and self.results[1] is None:\n s = s + \"\\n# FIT DID NOT CONVERGE\\n\"\n for n in range(len(self._pnames)):\n s = s + \"{:10s} = {:G}\\n\".format(self._pnames[n], self.results[0][n])\n\n else: s = s + \"\\n# NO FIT RESULTS\\n\"\n\n print(s)", "response": "Print the fit parameters in a way that can be copied into python."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nset the functions used to describe the data.", "response": "def set_functions(self, f='a*x*cos(b*x)+c', p='a=-0.2, b, c=3', c=None, bg=None, **kwargs):\n \"\"\"\n Sets the function(s) used to describe the data.\n\n Parameters\n ----------\n f=['a*x*cos(b*x)+c', 'a*x+c'] \n This can be a string function, a defined function\n my_function(x,a,b), or a list of some combination\n of these two types of objects. The length of such\n a list must be equal to the number of data sets\n supplied to the fit routine.\n p='a=1.5, b' \n This must be a comma-separated string list of\n parameters used to fit. If an initial guess value is\n not specified, 1.0 will be used.\n If a function object is supplied, it is assumed that\n this string lists the parameter names in order.\n c=None \n Fit _constants; like p, but won't be allowed to float\n during the fit. This can also be None.\n bg=None \n Can be functions in the same format as f describing a\n background (which can be subtracted during fits, etc)\n \n \n Additional keyword arguments are added to the globals used when\n evaluating the functions.\n \"\"\"\n\n # initialize everything\n self._pnames = []\n self._cnames = []\n self._pguess = []\n self._constants = []\n \n # Update the globals\n self._globals.update(kwargs)\n\n # store these for later\n self._f_raw = f\n self._bg_raw = bg\n\n # break up the constant names and initial values.\n if c:\n for s in c.split(','):\n\n # split by '=' and see if there is an initial value\n s = s.split('=')\n\n # add the name to the list\n self._cnames.append(s[0].strip())\n\n # if there is a guess value, add this (or 1.0)\n if len(s) > 1: self._constants.append(float(s[1]))\n else: self._constants.append(1.0)\n\n\n # break up the parameter names and initial values.\n for s in p.split(','):\n\n # split by '=' and see if there is an initial value\n s = s.split('=')\n\n # add the name to the list\n self._pnames.append(s[0].strip())\n\n # if there is a guess value, add this (or 1.0)\n if len(s) > 1: self._pguess.append(float(s[1]))\n else: self._pguess.append(1.0)\n\n\n # use the internal settings we just set to create the functions\n self._update_functions()\n \n if self['autoplot']: self.plot()\n \n return self"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _update_functions(self):\n\n self.f = []\n self.bg = []\n self._fnames = []\n self._bgnames = []\n self._odr_models = [] # Like f, but different parameters, for use in ODR\n\n\n f = self._f_raw\n bg = self._bg_raw\n\n # make sure f and bg are lists of matching length\n if not _s.fun.is_iterable(f) : f = [f]\n if not _s.fun.is_iterable(bg): bg = [bg]\n while len(bg) < len(f): bg.append(None)\n\n # get a comma-delimited string list of parameter names for the \"normal\" function\n pstring = 'x, ' + ', '.join(self._pnames)\n \n # get the comma-delimited string for the ODR function\n pstring_odr = 'x, '\n for n in range(len(self._pnames)): pstring_odr = pstring_odr+'p['+str(n)+'], '\n\n # update the globals for the functions\n # the way this is done, we must redefine the functions\n # every time we change a constant\n for cname in self._cnames: self._globals[cname] = self[cname]\n\n # loop over all the functions and create the master list\n for n in range(len(f)):\n\n # if f[n] is a string, define a function on the fly.\n if isinstance(f[n], str):\n \n # \"Normal\" least squares function (y-error bars only)\n self.f.append( eval('lambda ' + pstring + ': ' + f[n], self._globals))\n self._fnames.append(f[n])\n \n # \"ODR\" compatible function (for x-error bars), based on self.f\n self._odr_models.append( _odr.Model(eval('lambda p,x: self.f[n]('+pstring_odr+')', dict(self=self, n=n))))\n \n # Otherwise, just append it.\n else:\n self.f.append(f[n])\n self._fnames.append(f[n].__name__)\n\n # if bg[n] is a string, define a function on the fly.\n if isinstance(bg[n], str):\n self.bg.append(eval('lambda ' + pstring + ': ' + bg[n], self._globals))\n self._bgnames.append(bg[n])\n else:\n self.bg.append(bg[n])\n if bg[n] is None: self._bgnames.append(\"None\")\n else: self._bgnames.append(bg[n].__name__)\n\n # update the format of all the settings\n for k in list(self._settings.keys()): self[k] = self[k]\n\n # make sure we don't think our fit results are valid!\n self.clear_results()", "response": "Updates the internal functions for the master list of the current ODR entry."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef set_data(self, xdata=[1,2,3,4,5], ydata=[1.7,2,3,4,3], eydata=None, **kwargs):\n # SET UP DATA SETS TO MATCH EACH OTHER AND NUMBER OF FUNCTIONS\n \n # At this stage:\n # xdata, ydata 'script', [1,2,3], [[1,2,3],'script'], ['script', [1,2,3]]\n # eydata, exdata 'script', [1,1,1], [[1,1,1],'script'], ['script', [1,1,1]], 3, [3,[1,2,3]], None\n\n # if xdata, ydata, or eydata are bare scripts, make them into lists\n if type(xdata) is str: xdata = [xdata]\n if type(ydata) is str: ydata = [ydata]\n if type(eydata) is str or _s.fun.is_a_number(eydata) or eydata is None: eydata = [eydata]\n #if type(exdata) is str or _s.fun.is_a_number(exdata) or exdata is None: exdata = [exdata]\n\n # xdata and ydata ['script'], [1,2,3], [[1,2,3],'script'], ['script', [1,2,3]]\n # eydata ['script'], [1,1,1], [[1,1,1],'script'], ['script', [1,1,1]], [3], [3,[1,2,3]], [None]\n\n # if the first element of data is a number, then this is a normal array\n if _s.fun.is_a_number(xdata[0]): xdata = [xdata]\n if _s.fun.is_a_number(ydata[0]): ydata = [ydata]\n\n # xdata and ydata ['script'], [[1,2,3]], [[1,2,3],'script'], ['script', [1,2,3]]\n # eydata ['script'], [1,1,1], [[1,1,1],'script'], ['script', [1,1,1]], [3], [3,[1,2,3]], [None]\n\n # if the first element of eydata is a number, this could also just be an error bar value\n # Note: there is some ambiguity here, if the number of data sets equals the number of data points!\n if _s.fun.is_a_number(eydata[0]) and len(eydata) == len(ydata[0]): eydata = [eydata]\n #if _s.fun.is_a_number(exdata[0]) and len(exdata) == len(xdata[0]): exdata = [exdata]\n \n # xdata and ydata ['script'], [[1,2,3]], [[1,2,3],'script'], ['script', [1,2,3]]\n # eydata ['script'], [[1,1,1]], [[1,1,1],'script'], ['script', [1,1,1]], [3], [3,[1,2,3]], [None]\n\n # Inflate the x, ex, and ey data sets to match the ydata sets\n while len(xdata) < len(ydata): xdata .append( xdata[0])\n while len(ydata) < len(xdata): ydata .append( ydata[0])\n #while len(exdata) < len(xdata): exdata.append(exdata[0])\n while len(eydata) < len(ydata): eydata.append(eydata[0])\n\n\n # make sure these lists are the same length as the number of functions\n while len(ydata) < len(self.f): ydata.append(ydata[0])\n while len(xdata) < len(self.f): xdata.append(xdata[0])\n while len(eydata) < len(self.f): eydata.append(eydata[0])\n #while len(exdata) < len(self.f): exdata.append(exdata[0])\n\n # xdata and ydata ['script','script'], [[1,2,3],[1,2,3]], [[1,2,3],'script'], ['script', [1,2,3]]\n # eydata ['script','script'], [[1,1,1],[1,1,1]], [[1,1,1],'script'], ['script', [1,1,1]], [3,3], [3,[1,2,3]], [None,None]\n\n # Clean up exdata. If any element isn't None, the other None elements need\n # to be set to 0 so that ODR works.\n# if not exdata.count(None) == len(exdata):\n# # Search for and replace all None's with 0\n# for n in range(len(exdata)):\n# if exdata[n] == None: exdata[n] = 0\n# \n \n # store the data, script, or whatever it is!\n self._set_xdata = xdata\n self._set_ydata = ydata\n self._set_eydata = eydata\n #self._set_exdata = exdata\n self._set_data_globals.update(kwargs)\n\n # set the eyscale to 1 for each data set\n self['scale_eydata'] = [1.0]*len(self._set_xdata)\n #self['scale_exdata'] = [1.0]*len(self._set_xdata)\n \n # Update the settings so they match the number of data sets.\n for k in self._settings.keys(): self[k] = self[k]\n \n # Plot if necessary\n if self['autoplot']: self.plot()\n \n return self", "response": "This function will set the data of a specific entry in the log file."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef evaluate_script(self, script, **kwargs):\n self._set_data_globals.update(kwargs)\n return eval(script, self._set_data_globals)", "response": "Evaluates the supplied script (python-executable string).\n Useful for testing your scripts!\n\n globals already include all of numpy objects plus\n\n self = self\n f = self.f\n bg = self.bg\n\n and all the current guess parameters and constants\n\n kwargs are added to globals for script evaluation."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_data(self):\n # make sure we've done a \"set data\" call\n if len(self._set_xdata)==0 or len(self._set_ydata)==0: return [[],[],[]]\n\n # update the globals with the current fit parameter guess values\n for n in range(len(self._pnames)): self._set_data_globals[self._pnames[n]] = self._pguess[n]\n for n in range(len(self._cnames)): self._set_data_globals[self._cnames[n]] = self._constants[n]\n\n # update the globals with x, y, and ey, and the functions\n self._set_data_globals['f'] = self.f\n self._set_data_globals['bg'] = self.bg\n self._set_data_globals['self'] = self\n\n # possibilities after calling set_data():\n # xdata and ydata ['script','script'], [[1,2,3],[1,2,3]], [[1,2,3],'script'], ['script', [1,2,3]]\n # eydata ['script','script'], [[1,1,1],[1,1,1]], [[1,1,1],'script'], ['script', [1,1,1]], [3,3], [3,[1,2,3]], [None,None]\n\n # make a copy\n xdata = list(self._set_xdata)\n ydata = list(self._set_ydata)\n eydata = list(self._set_eydata)\n #exdata = list(self._set_exdata)\n\n # make sure they're all lists of numpy arrays\n for n in range(len(xdata)):\n\n # For xdata, handle scripts or arrays\n if type(xdata[n]) is str: xdata[n] = self.evaluate_script(xdata[n], **self._set_data_globals)\n else: xdata[n] = _n.array(xdata[n])*1.0\n\n # update the globals\n self._set_data_globals['x'] = xdata\n\n # make sure they're all lists of numpy arrays\n for n in range(len(ydata)):\n\n # For ydata, handle scripts or arrays\n if type(ydata[n]) is str: ydata[n] = self.evaluate_script(ydata[n], **self._set_data_globals)\n else: ydata[n] = _n.array(ydata[n])*1.0\n\n # update the globals\n self._set_data_globals['y'] = ydata\n\n # make sure they're all lists of numpy arrays\n self._guessed_eydata = False\n for n in range(len(eydata)):\n\n # handle scripts\n if type(eydata[n]) is str:\n eydata[n] = self.evaluate_script(eydata[n], **self._set_data_globals)\n\n # handle None (possibly returned by script): take a visually-appealing guess at the error\n if eydata[n] is None:\n eydata[n] = _n.ones(len(xdata[n])) * (max(ydata[n])-min(ydata[n]))*0.05\n self._guessed_eydata = True\n\n # handle constant error bars (possibly returned by script)\n if _s.fun.is_a_number(eydata[n]):\n eydata[n] = _n.ones(len(xdata[n])) * eydata[n]\n\n # make it an array\n eydata[n] = _n.array(eydata[n]) * self[\"scale_eydata\"][n]\n\n# # make sure they're all lists of numpy arrays\n# for n in range(len(exdata)):\n#\n# # handle scripts\n# if type(exdata[n]) is str:\n# exdata[n] = self.evaluate_script(exdata[n], **self._set_data_globals)\n#\n# # None is okay for exdata\n#\n# # handle constant error bars (possibly returned by script)\n# if _s.fun.is_a_number(exdata[n]):\n# exdata[n] = _n.ones(len(xdata[n])) * exdata[n]\n#\n# # make it an array\n# if not exdata[n] == None:\n# exdata[n] = _n.array(exdata[n]) * self[\"scale_exdata\"][n]\n\n # return it\n return xdata, ydata, eydata", "response": "Returns the current xdata ydata eydata after set_data() \n has been run."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef set_guess_to_fit_result(self):\n if self.results is None:\n print(\"No fit results to use! Run fit() first.\")\n return\n\n # loop over the results and set the guess values\n for n in range(len(self._pguess)): self._pguess[n] = self.results[0][n]\n\n if self['autoplot']: self.plot()\n\n return self", "response": "Set guess parameters to fit result."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn the data set of the current entry in the current object.", "response": "def get_processed_data(self, do_coarsen=True, do_trim=True):\n \"\"\"\n This will coarsen and then trim the data sets according to settings.\n \n Returns processed xdata, ydata, eydata.\n \n Parameters\n ----------\n do_coarsen=True\n Whether we should coarsen the data\n do_trim=True\n Whether we should trim the data\n \n Settings\n --------\n xmin, xmax, ymin, ymax\n Limits on x and y data points for trimming. \n coarsen\n Break the data set(s) into this many groups of points, and average\n each group into one point, propagating errors.\n \"\"\"\n\n # get the data\n xdatas, ydatas, eydatas = self.get_data()\n\n # get the trim limits (trimits)\n xmins = self['xmin']\n xmaxs = self['xmax']\n ymins = self['ymin']\n ymaxs = self['ymax']\n coarsen = self['coarsen'] \n\n # make sure we have one limit for each data set\n if type(xmins) is not list: xmins = [xmins] *len(xdatas)\n if type(xmaxs) is not list: xmaxs = [xmaxs] *len(xdatas)\n if type(ymins) is not list: ymins = [ymins] *len(xdatas)\n if type(ymaxs) is not list: ymaxs = [ymaxs] *len(xdatas)\n if type(coarsen) is not list: coarsen = [coarsen]*len(xdatas)\n\n # this should cover all the data sets (dimensions should match!)\n xdata_massaged = []\n ydata_massaged = []\n eydata_massaged = []\n #exdata_massaged = []\n for n in range(len(xdatas)):\n \n x = xdatas[n]\n y = ydatas[n]\n ey = eydatas[n]\n #ex = exdatas[n]\n \n # coarsen the data\n if do_coarsen:\n x = _s.fun.coarsen_array(x, self['coarsen'][n], 'mean')\n y = _s.fun.coarsen_array(y, self['coarsen'][n], 'mean')\n ey = _n.sqrt(_s.fun.coarsen_array(ey**2, self['coarsen'][n], 'mean')/self['coarsen'][n])\n# if not ex == None:\n# ex = _n.sqrt(_s.fun.coarsen_array(ex**2, self['coarsen'][n], 'mean')/self['coarsen'][n])\n \n if do_trim:\n # Create local mins and maxes\n xmin = xmins[n]\n ymin = ymins[n]\n xmax = xmaxs[n]\n ymax = ymaxs[n]\n \n # handle \"None\" limits\n if xmin is None: xmin = min(x)\n if xmax is None: xmax = max(x)\n if ymin is None: ymin = min(y)\n if ymax is None: ymax = max(y)\n \n # trim the data\n [xt, yt, eyt] = _s.fun.trim_data_uber([x, y, ey],\n [x>=xmin, x<=xmax,\n y>=ymin, y<=ymax])\n\n # Catch the over-trimmed case\n if(len(xt)==0): \n self._error(\"\\nDATA SET \"+str(n)+\": OOPS! OOPS! Specified limits (xmin, xmax, ymin, ymax) eliminate all data! Ignoring.\")\n else:\n x = xt\n y = yt\n ey = eyt\n #ex = ext\n \n # store the result\n xdata_massaged.append(x)\n ydata_massaged.append(y)\n eydata_massaged.append(ey)\n #exdata_massaged.append(ex)\n \n return xdata_massaged, ydata_massaged, eydata_massaged"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _massage_data(self):\n self._xdata_massaged, self._ydata_massaged, self._eydata_massaged = self.get_processed_data()\n \n# # Create the odr data.\n# self._odr_datas = []\n# for n in range(len(self._xdata_massaged)):\n# # Only exdata can be None; make sure it's zeros at least.\n# ex = self._exdata_massaged[n]\n# if ex == None: ex = _n.zeros(len(self._eydata_massaged[n]))\n# self._odr_datas.append(_odr.RealData(self._xdata_massaged[n],\n# self._ydata_massaged[n], \n# sx=ex, \n# sy=self._eydata_massaged[n]))\n return self", "response": "Processes the data and stores it."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nfits the data to the data and returns the results.", "response": "def fit(self, **kwargs):\n \"\"\"\n This will try to determine fit parameters using scipy.optimize.leastsq\n algorithm. This function relies on a previous call of set_data() and \n set_functions().\n\n Notes\n -----\n results of the fit algorithm are stored in self.results. \n See scipy.optimize.leastsq for more information.\n\n Optional keyword arguments are sent to self.set() prior to\n fitting. \n \"\"\"\n if len(self._set_xdata)==0 or len(self._set_ydata)==0:\n return self._error(\"No data. Please use set_data() prior to fitting.\")\n if self._f_raw is None:\n return self._error(\"No functions. Please use set_functions() prior to fitting.\")\n\n # Do the processing once, to increase efficiency\n self._massage_data()\n \n # Send the keyword arguments to the settings\n self.set(**kwargs)\n\n # do the actual optimization\n self.results = _opt.leastsq(self._studentized_residuals_concatenated, self._pguess, full_output=1)\n\n # plot if necessary\n if self['autoplot']: self.plot()\n\n return self"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nturns parameters to constants. As arguments, parameters must be strings. As keyword arguments, they can be set at the same time. Note this will NOT work when specifying a non-string fit function, because there is no flexibility in the number of arguments. To get around this, suppose you've defined a function stuff(x,a,b). Instead of sending the stuff object to self.set_functions() directly, make it a string function, e.g.: self.set_functions('stuff(x,a,b)', 'a,b', stuff=stuff)", "response": "def fix(self, *args, **kwargs):\n \"\"\"\n Turns parameters to constants. As arguments, parameters must be strings.\n As keyword arguments, they can be set at the same time.\n \n Note this will NOT work when specifying a non-string fit function, \n because there is no flexibility in the number of arguments. To get\n around this, suppose you've defined a function stuff(x,a,b). Instead\n of sending the stuff object to self.set_functions() directly, make it\n a string function, e.g.:\n \n self.set_functions('stuff(x,a,b)', 'a,b', stuff=stuff)\n \"\"\"\n\n # first set all the keyword argument values\n self.set(**kwargs)\n\n # get everything into one big list\n pnames = list(args) + list(kwargs.keys())\n\n # move each pname to the constants\n for pname in pnames:\n if not pname in self._pnames:\n self._error(\"Naughty. '\"+pname+\"' is not a valid fit parameter name.\")\n\n else:\n n = self._pnames.index(pname)\n\n # use the fit result if it exists\n if self.results: value = self.results[0][n]\n\n # otherwise use the guess value\n else: value = self._pguess[n]\n\n # make the switcheroo\n if type(self._pnames) is not list: self._pnames = list(self._pnames)\n if type(self._pguess) is not list: self._pguess = list(self._pguess)\n if type(self._cnames) is not list: self._cnames = list(self._cnames)\n if type(self._constants) is not list: self._constants = list(self._constants)\n self._pnames.pop(n)\n self._pguess.pop(n)\n self._cnames.append(pname)\n self._constants.append(value)\n\n # update\n self._update_functions()\n\n return self"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nturning a constant into a parameter. As arguments, parameters must be strings. As keyword arguments, they can be set at the same time.", "response": "def free(self, *args, **kwargs):\n \"\"\"\n Turns a constant into a parameter. As arguments, parameters must be strings.\n As keyword arguments, they can be set at the same time.\n \"\"\"\n # first set all the keyword argument values\n self.set(**kwargs)\n\n # get everything into one big list\n cnames = list(args) + list(kwargs.keys())\n\n # move each pname to the constants\n for cname in cnames:\n\n if not cname in self._cnames:\n self._error(\"Naughty. '\"+cname+\"' is not a valid constant name.\")\n\n else:\n n = self._cnames.index(cname)\n\n # make the switcheroo\n if type(self._pnames) is not list: self._pnames = list(self._pnames)\n if type(self._pguess) is not list: self._pguess = list(self._pguess)\n if type(self._cnames) is not list: self._cnames = list(self._cnames)\n if type(self._constants) is not list: self._constants = list(self._constants)\n self._pnames.append(self._cnames.pop(n))\n self._pguess.append(self._constants.pop(n))\n\n # update\n self._update_functions()\n\n return self"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _evaluate_all_functions(self, xdata, p=None):\n if p is None: p = self.results[0]\n\n output = []\n for n in range(len(self.f)):\n output.append(self._evaluate_f(n, self._xdata_massaged[n], p) )\n\n return output", "response": "This function evaluates all functions and returns a list of function outputs given the stored data sets."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _evaluate_f(self, n, xdata, p=None):\n # by default, use the fit values, otherwise, use the guess values.\n if p is None and self.results is not None: p = self.results[0]\n elif p is None and self.results is None: p = self._pguess\n\n # assemble the arguments for the function\n args = (xdata,) + tuple(p)\n\n # evaluate this function.\n return self.f[n](*args)", "response": "Evaluates a single function n for arbitrary xdata and p tuple."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nevaluate a single background function n for arbitrary xdata and p tuple.", "response": "def _evaluate_bg(self, n, xdata, p=None):\n \"\"\"\n Evaluates a single background function n for arbitrary xdata and p tuple.\n\n p=None means use the fit results\n \"\"\"\n # by default, use the fit values, otherwise, use the guess values.\n if p is None and self.results is not None: p = self.results[0]\n elif p is None and self.results is None: p = self._pguess\n\n # return None if there is no background function\n if self.bg[n] is None: return None\n\n # assemble the arguments for the function\n args = (xdata,) + tuple(p)\n\n # evaluate the function\n return self.bg[n](*args)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nformatting the value of a key - value pair as a string.", "response": "def _format_value_error(self, v, e, pm=\" +/- \"):\n \"\"\"\n Returns a string v +/- e with the right number of sig figs.\n \"\"\"\n # If we have weird stuff\n if not _s.fun.is_a_number(v) or not _s.fun.is_a_number(e) \\\n or v in [_n.inf, _n.nan, _n.NAN] or e in [_n.inf, _n.nan, _n.NAN]: \n return str(v)+pm+str(e)\n \n # Normal values.\n try:\n sig_figs = -int(_n.floor(_n.log10(abs(e))))+1 \n return str(_n.round(v, sig_figs)) + pm + str(_n.round(e, sig_figs))\n\n except:\n return str(v)+pm+str(e)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _studentized_residuals_fast(self, p=None):\n if len(self._set_xdata)==0 or len(self._set_ydata)==0: return\n \n if p is None:\n if self.results is None: p = self._pguess\n else: p = self.results[0]\n\n # evaluate the function for all the data, returns a list!\n f = self._evaluate_all_functions(self._xdata_massaged, p)\n\n # get the full residuals list\n residuals = []\n for n in range(len(f)):\n numerator = self._ydata_massaged[n]-f[n]\n denominator = _n.absolute(self._eydata_massaged[n])\n residuals.append(numerator/denominator)\n return residuals", "response": "This function returns a list of studentized residuals for all the data."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef chi_squareds(self, p=None):\n if len(self._set_xdata)==0 or len(self._set_ydata)==0: return None\n \n if p is None: p = self.results[0]\n\n # get the residuals\n rs = self.studentized_residuals(p)\n\n # Handle the none case\n if rs == None: return None\n\n # square em and sum em.\n cs = []\n for r in rs: cs.append(sum(r**2))\n return cs", "response": "Returns a list of chi squared for each data set. Also uses ydata_massaged."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef chi_squared(self, p=None):\n chi2s = self.chi_squareds(p)\n if chi2s == None: return None\n \n return sum(self.chi_squareds(p))", "response": "Returns the total chi squared of all massaged data sets."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef degrees_of_freedom(self):\n if len(self._set_xdata)==0 or len(self._set_ydata)==0: return None\n \n # Temporary hack: get the studentized residuals, which uses the massaged data\n # This should later be changed to get_massaged_data()\n r = self.studentized_residuals()\n \n # Happens if data / functions not defined\n if r == None: return\n \n # calculate the number of points\n N = 0.0\n for i in range(len(r)): N += len(r[i])\n \n return N-len(self._pnames)", "response": "Returns the number of degrees of freedom."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef reduced_chi_squareds(self, p=None):\n if len(self._set_xdata)==0 or len(self._set_ydata)==0: return None\n\n if p is None: p = self.results[0]\n r = self.studentized_residuals(p)\n \n # In case it's not possible to calculate\n if r is None: return\n\n # calculate the number of points\n N = 0\n for i in range(len(r)): N += len(r[i])\n\n # degrees of freedom\n dof_per_point = self.degrees_of_freedom()/N\n\n for n in range(len(r)):\n r[n] = sum(r[n]**2)/(len(r[n])*dof_per_point)\n\n return r", "response": "Returns the reduced chi squared for each massaged data set."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef reduced_chi_squared(self, p=None):\n \n if len(self._set_xdata)==0 or len(self._set_ydata)==0: return None\n if p is None: p = self.results[0]\n \n chi2 = self.chi_squared(p)\n dof = self.degrees_of_freedom()\n if not _s.fun.is_a_number(chi2) or not _s.fun.is_a_number(dof):\n return None\n \n return _n.divide(self.chi_squared(p), self.degrees_of_freedom())", "response": "Returns the reduced chi squared for all massaged data sets."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef plot(self, **kwargs):\n \n # Make sure there is data to plot.\n if len(self._set_xdata)==0 or len(self._set_ydata)==0: return self\n \n # Make sure the figures is a list\n if not self.figures == None and not type(self.figures) == list:\n self.figures = [self.figures]\n \n \n # Get the trimmed and full processed data\n xts, yts, eyts = self.get_processed_data()\n xas, yas, eyas = self.get_processed_data(do_trim=False)\n \n # update settings\n for k in kwargs: self[k] = kwargs[k]\n\n # Calculate all studentized residuals\n if len(self.f) > 0: rt = self.studentized_residuals()\n \n # make a new figure for each data set\n for n in range(len(self._set_xdata)):\n xt = xts[n]\n xa = xas[n]\n yt = yts[n]\n ya = yas[n]\n eyt = eyts[n]\n eya = eyas[n]\n #ext = exts[n]\n eyt = eyts[n]\n \n # get the next figure\n if self.figures == None: fig = _p.figure(self['first_figure']+n)\n else: fig = self.figures[n]\n \n # turn off interactive mode and clear the figure\n _p.ioff()\n fig.clear()\n \n # set up two axes. One for data and one for residuals.\n a1 = fig.add_subplot(211) # Residuals\n a2 = fig.add_subplot(212, sharex=a1) # Data\n a1.set_position([0.15, 0.72, 0.75, 0.15])\n a2.set_position([0.15, 0.10, 0.75, 0.60])\n\n # set the scales\n a1.set_xscale(self['xscale'][n])\n a2.set_xscale(self['xscale'][n])\n a2.set_yscale(self['yscale'][n])\n\n # Get the function xdata\n fxa = self._get_xdata_for_function(n,xa)\n fxt = self._get_xdata_for_function(n,xt)\n \n # get the values to subtract from ydata if subtracting the background\n if self['subtract_bg'][n] and not self.bg[n] is None:\n\n # if we have a fit, use that for the background\n if self.results: p = self.results[0]\n else: p = self._pguess\n \n # Get the background data\n d_ya = self._evaluate_bg(n, xa, p)\n d_fya = self._evaluate_bg(n, fxa, p)\n d_yt = self._evaluate_bg(n, xt, p)\n d_fyt = self._evaluate_bg(n, fxt, p)\n\n # Otherwise just make some zero arrays\n else:\n d_ya = 0*xa\n d_fya = 0*fxa\n d_yt = 0*xt\n d_fyt = 0*fxt\n\n \n \n # PLOT DATA FIRST\n\n # If we're supposed to, add the \"all\" data and function\n if self['plot_all_data'][n]:\n \n # Make it faint.\n style_data = dict(self['style_data' ][n]); style_data ['alpha'] = 0.3\n \n if self['plot_errors'][n]: a2.errorbar(xa, ya-d_ya, eya, zorder=5, **style_data)\n else: a2.plot (xa, ya-d_ya, zorder=5, **style_data)\n \n # add the trimmed data\n if self['plot_errors'][n]: a2.errorbar(xt, yt-d_yt, eyt, zorder=7, **self['style_data'][n])\n else: a2.plot( xt, yt-d_yt, zorder=7, **self['style_data'][n])\n \n # Zoom on just the data for now\n _s.tweaks.auto_zoom(axes=a2, draw=False)\n \n \n \n # PLOT FUNCTIONS\n \n if n < len(self.f): # If there are any functions to plot\n \n \n # Plot the GUESS under the fit\n if self['plot_guess'][n]:\n \n # FULL GUESS\n if self['plot_all_data'][n]:\n \n # Make it faint.\n style_guess = dict(self['style_guess'][n]); style_guess['alpha'] = 0.3\n \n # FULL background GUESS\n if self['plot_bg'][n] and self.bg[n] is not None:\n bg_gya = self._evaluate_bg(n, fxa, self._pguess)\n a2.plot(fxa, bg_gya-d_fya, zorder=9, **style_guess)\n \n # FULL guess\n gya = self._evaluate_f (n, fxa, self._pguess)\n a2.plot(fxa, gya-d_fya, zorder=9, **style_guess)\n \n # Update the trimmed datas so that the points line up\n [fxt] = _s.fun.trim_data_uber([fxt], [fxt>=min(xt), fxt<=max(xt)])\n\n # TRIMMED GUESS BACKGROUND\n \n # TRIMMED guess background curve\n if self['plot_bg'][n] and self.bg[n] is not None:\n bg_gyt = self._evaluate_bg(n, fxt, self._pguess)\n a2.plot(fxt, bg_gyt-d_fyt, zorder=9, **self['style_guess'][n])\n\n # TRIMMED main guess curve\n gyt = self._evaluate_f (n, fxt, self._pguess)\n a2.plot(fxt, gyt-d_fyt, zorder=9, **self['style_guess'][n])\n \n \n \n # Plot the FIT if there is one\n if not self.results == None:\n \n # FULL FIT\n if self['plot_all_data'][n]:\n \n # Make it faint.\n style_fit = dict(self['style_fit'][n]); style_fit['alpha'] = 0.3\n \n # FULL background fit\n if self['plot_bg'][n] and self.bg[n] is not None:\n bg_fya = self._evaluate_bg(n, fxa, self.results[0])\n a2.plot(fxa, bg_fya-d_fya, zorder=10, **style_fit)\n \n # FULL fit\n fya = self._evaluate_f (n, fxa, self.results[0])\n a2.plot(fxa, fya-d_fya, zorder=10, **style_fit)\n \n # Update the trimmed datas so that the points line up\n [fxt] = _s.fun.trim_data_uber([fxt], [fxt>=min(xt), fxt<=max(xt)])\n \n # TRIMMED FIT BACKGROUND\n if self['plot_bg'][n] and self.bg[n] is not None: \n bg_fyt = self._evaluate_bg(n, fxt, self.results[0]) \n a2.plot(fxt, bg_fyt-d_fyt, zorder=10, **self['style_fit'][n])\n \n # TRIMMED main curve\n fyt = self._evaluate_f(n, fxt, self.results[0])\n a2.plot(fxt, fyt-d_fyt, zorder=10, **self['style_fit'][n])\n\n if self['plot_guess_zoom'][n]: _s.tweaks.auto_zoom(axes=a2, draw=False)\n \n \n # plot the residuals only if there are functions defined\n if len(self.f):\n \n # If we're supposed to also plot all the data, we have to \n # Manually calculate the residuals. Clunky, I know.\n if self['plot_all_data'][n]:\n \n # Figure out what guy to use for the residuals\n if self.results is None: \n p = self._pguess\n style = style_guess\n else: \n p = self.results[0]\n style = style_fit\n \n # Calculate them\n ra = (ya-self._evaluate_f(n, xa, p))/eya\n \n # style_data, style_guess, and style_fit should already be faint\n a1.errorbar(xa, ra, _n.ones(len(ra)), **style_data)\n \n # Put the line on top\n a1.plot([min(xa), max(xa)], [0,0], **style)\n \n # Figure out what style to use for the line\n if self.results is None: style = self['style_guess'][n]\n else: style = self['style_fit' ][n]\n\n # Main residuals plot\n a1.errorbar (xt, rt[n], _n.ones(len(xt)), **self['style_data'][n])\n a1.plot([min(xt), max(xt)], [0,0], **style)\n \n \n # Tidy up\n yticklabels = a1.get_yticklabels()\n for m in range(2,len(yticklabels)-2): yticklabels[m].set_visible(False)\n for m in a1.get_xticklabels(): m.set_visible(False)\n \n # Add labels to the axes\n if self['xlabel'][n] is None: a2.set_xlabel('xdata['+str(n)+']')\n else: a2.set_xlabel(self['xlabel'][n])\n if self['ylabel'][n] is None:\n ylabel='ydata['+str(n)+']'\n if self['subtract_bg'][n] and self.bg[n] is not None:\n ylabel=ylabel+' - bg['+str(n)+']'\n a2.set_ylabel(ylabel)\n else: a2.set_ylabel(self['ylabel'][n])\n a1.set_ylabel('Studentized\\nResiduals')\n\n \n # Assemble the title\n wrap = 80\n indent = ' '\n \n # Include the function names if available\n if n < len(self.f):\n t = _textwrap.fill('Function ('+str(n)+'/'+str(len(self._fnames)-1)+'): y = '+self._fnames[n], wrap, subsequent_indent=indent)\n else:\n t = \"No functions defined. Use set_functions().\"\n\n if len(self._cnames):\n t1 = \"Constants: \"\n for i in range(len(self._cnames)):\n t1 = t1 + self._cnames[i] + \"={:G}, \".format(self._constants[i])\n t = t + '\\n' + _textwrap.fill(t1, wrap, subsequent_indent=indent)\n\n if self.results and not self.results[1] is None:\n t1 = \"Fit: \"\n for i in range(len(self._pnames)):\n t1 = t1 + self._pnames[i] + \"={:s}, \".format(self._format_value_error(self.results[0][i], _n.sqrt(self.results[1][i][i]), '$\\pm$'))\n t1 = t1 + '$\\chi^2_r$={} ({} DOF)'.format(\n self._format_value_error(self.reduced_chi_squared(), _n.sqrt(_n.divide(2.0,self.degrees_of_freedom())), '$\\pm$'), \n int(self.degrees_of_freedom()))\n \n t = t + '\\n' + _textwrap.fill(t1, wrap, subsequent_indent=indent)\n\n elif self.results:\n t1 = \"Fit did not converge: \"\n for i in range(len(self._pnames)):\n t1 = t1 + self._pnames[i] + \"={:8G}$, \"\n t = t + '\\n' + _textwrap.fill(t1, wrap, subsequent_indent=indent)\n\n a1.set_title(t, fontsize=10, ha='left', position=(0,1))\n\n \n # turn back to interactive and show the plots.\n _p.ion()\n if self.figures == None:\n _p.draw()\n _p.show()\n \n # End of new figure for each data set loop\n return self", "response": "Plots the data for the current set of items."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngenerates the xdata for plotting the function n.", "response": "def _get_xdata_for_function(self, n, xdata):\n \"\"\"\n Generates the x-data for plotting the function.\n \n Parameters\n ----------\n n\n Which data set we're using\n xdata\n Data set upon which to base this\n\n Returns\n -------\n float\n \"\"\"\n \n # Use the xdata itself for the function\n if self['fpoints'][n] in [None, 0]: return _n.array(xdata)\n \n # Otherwise, generate xdata with the number of fpoints\n \n # do exponential ranging if xscale is log\n if self['xscale'][n] == 'log':\n return _n.logspace(_n.log10(min(xdata)), _n.log10(max(xdata)),\n self['fpoints'][n], True, 10.0)\n \n # otherwise do linear spacing\n else:\n return _n.linspace(min(xdata), max(xdata), self['fpoints'][n])"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nremoving xmin xmax ymin ymax.", "response": "def untrim(self, n='all'):\n \"\"\"\n Removes xmin, xmax, ymin, and ymax. \n \n Parameters\n ----------\n n='all'\n Which data set to perform this action upon. 'all' means all data\n sets, or you can specify a list.\n \"\"\"\n if len(self._set_xdata)==0 or len(self._set_ydata)==0:\n self._error(\"No data. Please use set_data() and plot() prior to zooming.\")\n return\n\n if _s.fun.is_a_number(n): n = [n]\n elif isinstance(n,str): n = list(range(len(self._set_xdata)))\n\n # loop over the specified plots\n for i in n:\n self['xmin'][i] = None\n self['xmax'][i] = None\n self['ymin'][i] = None\n self['ymax'][i] = None\n \n # now show the update.\n self.clear_results()\n if self['autoplot']: self.plot()\n return self"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef zoom(self, n='all', xfactor=2.0, yfactor=2.0):\n if len(self._set_xdata)==0 or len(self._set_ydata)==0:\n self._error(\"No data. Please use set_data() and plot() prior to zooming.\")\n return\n\n # get the data\n xdata, ydata, eydata = self.get_data()\n\n if _s.fun.is_a_number(n): n = [n]\n elif isinstance(n,str): n = list(range(len(xdata)))\n\n # loop over the specified plots\n for i in n:\n fig = self['first_figure']+i\n try:\n xmin, xmax = _p.figure(fig).axes[1].get_xlim()\n xc = 0.5*(xmin+xmax)\n xs = 0.5*abs(xmax-xmin)\n self['xmin'][i] = xc - xfactor*xs\n self['xmax'][i] = xc + xfactor*xs\n\n ymin, ymax = _p.figure(fig).axes[1].get_ylim()\n yc = 0.5*(ymin+ymax)\n ys = 0.5*abs(ymax-ymin)\n self['ymin'][i] = yc - yfactor*ys\n self['ymax'][i] = yc + yfactor*ys\n except:\n self._error(\"Data \"+str(fig)+\" is not currently plotted.\")\n\n # now show the update.\n self.clear_results()\n if self['autoplot']: self.plot()\n\n return self", "response": "This will scale the chosen data set s plot range by the specified xfactor and yfactor respectively and set the trim limits xmin xmax ymin ymax accordingly"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef ginput(self, data_set=0, **kwargs):\n # this will temporarily fix the deprecation warning\n import warnings\n import matplotlib.cbook\n warnings.filterwarnings(\"ignore\",category=matplotlib.cbook.mplDeprecation)\n\n _s.tweaks.raise_figure_window(data_set+self['first_figure'])\n return _p.ginput(**kwargs)", "response": "Returns the value of ginput."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nmatches data sets x and y.", "response": "def _match_data_sets(x,y):\n \"\"\"\n Makes sure everything is the same shape. \"Intelligently\".\n \"\"\"\n # Handle the None for x or y\n if x is None: \n # If x is none, y can be either [1,2] or [[1,2],[1,2]]\n if _fun.is_iterable(y[0]):\n # make an array of arrays to match\n x = []\n for n in range(len(y)):\n x.append(list(range(len(y[n]))))\n else: x = list(range(len(y)))\n \n if y is None: \n # If x is none, y can be either [1,2] or [[1,2],[1,2]]\n if _fun.is_iterable(x[0]):\n # make an array of arrays to match\n y = []\n for n in range(len(x)):\n y.append(list(range(len(x[n]))))\n else: y = list(range(len(x)))\n \n # At this point they should be matched, but may still be 1D\n # Default behavior: if all elements are numbers in both, assume they match\n if _fun.elements_are_numbers(x) and _fun.elements_are_numbers(y):\n x = [x]\n y = [y]\n \n # Second default behavior: shared array [1,2,3], [[1,2,1],[1,2,1]] or vis versa\n if _fun.elements_are_numbers(x) and not _fun.elements_are_numbers(y): x = [x]*len(y)\n if _fun.elements_are_numbers(y) and not _fun.elements_are_numbers(x): y = [y]*len(x)\n\n # Clean up any remaining Nones\n for n in range(len(x)):\n if x[n] is None: x[n] = list(range(len(y[n])))\n if y[n] is None: y[n] = list(range(len(x[n])))\n \n return x, y"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ninflating ex to match the dimensionality of x intelligently.", "response": "def _match_error_to_data_set(x, ex):\n \"\"\"\n Inflates ex to match the dimensionality of x, \"intelligently\". \n x is assumed to be a 2D array.\n \"\"\"\n # Simplest case, ex is None or a number\n if not _fun.is_iterable(ex):\n \n # Just make a matched list of Nones\n if ex is None: ex = [ex]*len(x)\n \n # Make arrays of numbers\n if _fun.is_a_number(ex): \n value = ex # temporary storage\n ex = []\n for n in range(len(x)): \n ex.append([value]*len(x[n]))\n \n # Otherwise, ex is iterable\n \n # Default behavior: If the elements are all numbers and the length matches\n # that of the first x-array, assume this is meant to match all the x\n # data sets\n if _fun.elements_are_numbers(ex) and len(ex) == len(x[0]): ex = [ex]*len(x)\n\n # The user may specify a list of some iterable and some not. Assume\n # in this case that at least the lists are the same length \n for n in range(len(x)):\n # do nothing to the None's\n # Inflate single numbers to match\n if _fun.is_a_number(ex[n]): ex[n] = [ex[n]]*len(x[n])\n \n return ex"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef complex_data(data, edata=None, draw=True, **kwargs):\n _pylab.ioff()\n\n # generate the data the easy way\n try:\n rdata = _n.real(data)\n idata = _n.imag(data)\n if edata is None:\n erdata = None\n eidata = None\n else:\n erdata = _n.real(edata)\n eidata = _n.imag(edata)\n\n # generate the data the hard way.\n except:\n rdata = []\n idata = []\n if edata is None:\n erdata = None\n eidata = None\n else:\n erdata = []\n eidata = []\n\n for n in range(len(data)):\n rdata.append(_n.real(data[n]))\n idata.append(_n.imag(data[n]))\n\n if not edata is None:\n erdata.append(_n.real(edata[n]))\n eidata.append(_n.imag(edata[n]))\n\n if 'xlabel' not in kwargs: kwargs['xlabel'] = 'Real'\n if 'ylabel' not in kwargs: kwargs['ylabel'] = 'Imaginary'\n\n xy_data(rdata, idata, eidata, erdata, draw=False, **kwargs)\n\n if draw:\n _pylab.ion()\n _pylab.draw()\n _pylab.show()", "response": "Generate the imaginary vs real complex data for the internal class."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ngenerate data and send to a single databox", "response": "def complex_databoxes(ds, script='d[1]+1j*d[2]', escript=None, **kwargs):\n \"\"\"\n Uses databoxes and specified script to generate data and send to \n spinmob.plot.complex_data()\n\n\n Parameters\n ----------\n ds \n List of databoxes\n script='d[1]+1j*d[2]' \n Complex-valued script for data array.\n escript=None \n Complex-valued script for error bars\n\n See spinmob.plot.complex.data() for additional optional keyword arguments.\n See spinmob.data.databox.execute_script() for more information about scripts.\n \"\"\"\n datas = []\n labels = []\n if escript is None: errors = None\n else: errors = []\n\n for d in ds:\n datas.append(d(script))\n labels.append(_os.path.split(d.path)[-1])\n if not escript is None: errors.append(d(escript))\n\n complex_data(datas, errors, label=labels, **kwargs)\n\n if \"draw\" in kwargs and not kwargs[\"draw\"]: return\n\n _pylab.ion()\n _pylab.draw()\n _pylab.show()\n \n return ds"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef complex_files(script='d[1]+1j*d[2]', escript=None, paths=None, **kwargs):\n ds = _data.load_multiple(paths=paths)\n\n if len(ds) == 0: return\n\n if 'title' not in kwargs: kwargs['title'] = _os.path.split(ds[0].path)[0]\n\n return complex_databoxes(ds, script=script, **kwargs)", "response": "Load a set of complex files and plots them in the real - imaginary plane."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn a new object that can be used to plot a complex function over a specified range.", "response": "def complex_function(f='1.0/(1+1j*x)', xmin=-1, xmax=1, steps=200, p='x', g=None, erange=False, **kwargs):\n \"\"\"\n Plots function(s) in the complex plane over the specified range.\n\n Parameters\n ----------\n f='1.0/(1+1j*x)' \n Complex-valued function or list of functions to plot. \n These can be string functions or single-argument python functions;\n additional globals can be supplied by g.\n xmin=-1, xmax=1, steps=200 \n Range over which to plot and how many points to plot\n p='x'\n If using strings for functions, p is the independent parameter name.\n g=None \n Optional dictionary of extra globals. Try g=globals()!\n erange=False \n Use exponential spacing of the x data?\n\n See spinmob.plot.xy.data() for additional optional keyword arguments.\n \"\"\"\n kwargs2 = dict(xlabel='Real', ylabel='Imaginary')\n kwargs2.update(kwargs)\n function(f, xmin, xmax, steps, p, g, erange, plotter=xy_data, complex_plane=True, draw=True, **kwargs2)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nplots the magnitude and phase of complex ydata vs xdata. Parameters ---------- xdata Real-valued x-axis data ydata Complex-valued y-axis data eydata=None Complex-valued y-error exdata=None Real-valued x-error xscale='linear' 'log' or 'linear' scale of the x axis mscale='linear' 'log' or 'linear' scale of the magnitude axis pscale='linear' 'log' or 'linear' scale of the phase axis mlabel='Magnitude' y-axis label for magnitude plot plabel='Phase' y-axis label for phase plot phase='degrees' 'degrees' or 'radians' for the phase axis figure='gcf' Plot on the specified figure instance or 'gcf' for current figure. clear=1 Clear the figure? draw=True Draw the figure when complete? See spinmob.plot.xy.data() for additional optional keyword arguments.", "response": "def magphase_data(xdata, ydata, eydata=None, exdata=None, xscale='linear', mscale='linear', pscale='linear', mlabel='Magnitude', plabel='Phase', phase='degrees', figure='gcf', clear=1, draw=True, **kwargs):\n \"\"\"\n Plots the magnitude and phase of complex ydata vs xdata.\n\n Parameters\n ----------\n xdata \n Real-valued x-axis data\n ydata \n Complex-valued y-axis data\n eydata=None \n Complex-valued y-error\n exdata=None \n Real-valued x-error\n xscale='linear' \n 'log' or 'linear' scale of the x axis\n mscale='linear' \n 'log' or 'linear' scale of the magnitude axis\n pscale='linear' \n 'log' or 'linear' scale of the phase axis\n mlabel='Magnitude' \n y-axis label for magnitude plot\n plabel='Phase' \n y-axis label for phase plot\n phase='degrees' \n 'degrees' or 'radians' for the phase axis\n figure='gcf' \n Plot on the specified figure instance or 'gcf' for current figure.\n clear=1 \n Clear the figure?\n draw=True\n Draw the figure when complete?\n\n See spinmob.plot.xy.data() for additional optional keyword arguments.\n \"\"\"\n _pylab.ioff()\n \n # set up the figure and axes\n if figure == 'gcf': f = _pylab.gcf()\n if clear: f.clear()\n axes1 = _pylab.subplot(211)\n axes2 = _pylab.subplot(212,sharex=axes1)\n\n # Make sure the dimensionality of the data sets matches\n xdata, ydata = _match_data_sets(xdata, ydata)\n exdata = _match_error_to_data_set(xdata, exdata)\n eydata = _match_error_to_data_set(ydata, eydata)\n\n # convert to magnitude and phase\n m = []\n p = []\n em = []\n ep = []\n \n # Note this is a loop over data sets, not points.\n for l in range(len(ydata)):\n m.append(_n.abs(ydata[l]))\n p.append(_n.angle(ydata[l]))\n # get the mag - phase errors\n if eydata[l] is None:\n em.append(None)\n ep.append(None)\n else:\n er = _n.real(eydata[l])\n ei = _n.imag(eydata[l])\n em.append(0.5*((er+ei) + (er-ei)*_n.cos(p[l])) )\n ep.append(0.5*((er+ei) - (er-ei)*_n.cos(p[l]))/m[l] )\n\n # convert to degrees\n if phase=='degrees': \n p[-1] = p[-1]*180.0/_n.pi\n if not ep[l] is None: \n ep[l] = ep[l]*180.0/_n.pi\n\n\n if phase=='degrees': plabel = plabel + \" (degrees)\"\n else: plabel = plabel + \" (radians)\"\n if 'xlabel' in kwargs: xlabel=kwargs.pop('xlabel')\n else: xlabel=''\n if 'ylabel' in kwargs: kwargs.pop('ylabel')\n\n if 'autoformat' not in kwargs: kwargs['autoformat'] = True\n\n autoformat = kwargs['autoformat']\n kwargs['autoformat'] = False\n kwargs['xlabel'] = ''\n xy_data(xdata, m, em, exdata, ylabel=mlabel, axes=axes1, clear=0, xscale=xscale, yscale=mscale, draw=False, **kwargs)\n\n kwargs['autoformat'] = autoformat\n kwargs['xlabel'] = xlabel\n xy_data(xdata, p, ep, exdata, ylabel=plabel, axes=axes2, clear=0, xscale=xscale, yscale=pscale, draw=False, **kwargs)\n\n axes2.set_title('')\n\n if draw:\n _pylab.ion()\n _pylab.draw()\n _pylab.show()"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef magphase_databoxes(ds, xscript=0, yscript='d[1]+1j*d[2]', eyscript=None, exscript=None, g=None, **kwargs):\n databoxes(ds, xscript, yscript, eyscript, exscript, plotter=magphase_data, g=g, **kwargs)", "response": "Generate data and plot the complex magnitude and phase databoxes."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef magphase_files(xscript=0, yscript='d[1]+1j*d[2]', eyscript=None, exscript=None, paths=None, g=None, **kwargs):\n return files(xscript, yscript, eyscript, exscript, plotter=magphase_databoxes, paths=paths, g=g, **kwargs)", "response": "Returns a bunch of files that will be plotted by the specified scripts."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef magphase_function(f='1.0/(1+1j*x)', xmin=-1, xmax=1, steps=200, p='x', g=None, erange=False, **kwargs):\n function(f, xmin, xmax, steps, p, g, erange, plotter=magphase_data, **kwargs)", "response": "Returns a new object that provides the magnitude and phase of a single - argument function."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef realimag_data(xdata, ydata, eydata=None, exdata=None, xscale='linear', rscale='linear', iscale='linear', rlabel='Real', ilabel='Imaginary', figure='gcf', clear=1, draw=True, **kwargs):\n _pylab.ioff()\n\n # Make sure the dimensionality of the data sets matches\n xdata, ydata = _match_data_sets(xdata, ydata)\n exdata = _match_error_to_data_set(xdata, exdata)\n eydata = _match_error_to_data_set(ydata, eydata)\n \n # convert to real imag, and get error bars\n rdata = []\n idata = []\n erdata = []\n eidata = [] \n for l in range(len(ydata)):\n rdata.append(_n.real(ydata[l]))\n idata.append(_n.imag(ydata[l]))\n\n if eydata[l] is None:\n erdata.append(None)\n eidata.append(None)\n else:\n erdata.append(_n.real(eydata[l]))\n eidata.append(_n.imag(eydata[l]))\n\n # set up the figure and axes\n if figure == 'gcf': f = _pylab.gcf()\n if clear: f.clear()\n axes1 = _pylab.subplot(211)\n axes2 = _pylab.subplot(212,sharex=axes1)\n\n if 'xlabel' in kwargs : xlabel=kwargs.pop('xlabel')\n else: xlabel=''\n if 'ylabel' in kwargs : kwargs.pop('ylabel')\n\n if 'tall' not in kwargs: kwargs['tall'] = False\n if 'autoformat' not in kwargs: kwargs['autoformat'] = True\n\n autoformat = kwargs['autoformat']\n kwargs['autoformat'] = False\n kwargs['xlabel'] = ''\n xy_data(xdata, rdata, eydata=erdata, exdata=exdata, ylabel=rlabel, axes=axes1, clear=0, xscale=xscale, yscale=rscale, draw=False, **kwargs)\n\n kwargs['autoformat'] = autoformat\n kwargs['xlabel'] = xlabel\n xy_data(xdata, idata, eydata=eidata, exdata=exdata, ylabel=ilabel, axes=axes2, clear=0, xscale=xscale, yscale=iscale, draw=False, **kwargs)\n\n axes2.set_title('')\n\n if draw:\n _pylab.ion()\n _pylab.draw()\n _pylab.show()", "response": "Returns a list of real and imaginary parts of a complex data set."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef realimag_databoxes(ds, xscript=0, yscript=\"d[1]+1j*d[2]\", eyscript=None, exscript=None, g=None, **kwargs):\n databoxes(ds, xscript, yscript, eyscript, exscript, plotter=realimag_data, g=g, **kwargs)", "response": "Generate real and imaginary xdata versus xdata."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef realimag_files(xscript=0, yscript=\"d[1]+1j*d[2]\", eyscript=None, exscript=None, paths=None, g=None, **kwargs):\n return files(xscript, yscript, eyscript, exscript, plotter=realimag_databoxes, paths=paths, g=g, **kwargs)", "response": "This function will load a bunch of data files generate data based on the supplied scripts and then plot the xdata s real and imaginary parts versus xdata."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a new object that provides real and imaginary parts of a spinmob.", "response": "def realimag_function(f='1.0/(1+1j*x)', xmin=-1, xmax=1, steps=200, p='x', g=None, erange=False, **kwargs):\n \"\"\"\n Plots function(s) real and imaginary parts over the specified range.\n\n Parameters\n ----------\n f='1.0/(1+1j*x)' \n Complex-valued function or list of functions to plot. \n These can be string functions or single-argument python functions;\n additional globals can be supplied by g.\n xmin=-1, xmax=1, steps=200 \n Range over which to plot and how many points to plot\n p='x'\n If using strings for functions, p is the independent parameter name.\n g=None \n Optional dictionary of extra globals. Try g=globals()!\n erange=False \n Use exponential spacing of the x data?\n\n See spinmob.plot.xy.data() for additional optional keyword arguments.\n \"\"\"\n function(f, xmin, xmax, steps, p, g, erange, plotter=realimag_data, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef xy_data(xdata, ydata, eydata=None, exdata=None, label=None, xlabel='', ylabel='', \\\n title='', shell_history=0, xshift=0, yshift=0, xshift_every=1, yshift_every=1, \\\n coarsen=0, style=None, clear=True, axes=None, xscale='linear', yscale='linear', grid=False, \\\n legend='best', legend_max=20, autoformat=True, autoformat_window=True, tall=False, draw=True, **kwargs):\n \"\"\"\n Plots specified data.\n\n Parameters\n ----------\n xdata, ydata \n Arrays (or arrays of arrays) of data to plot\n eydata=None, exdata=None \n Arrays of x and y errorbar values\n label=None \n String or array of strings for the line labels\n xlabel='' \n Label for the x-axis\n ylabel='' \n Label for the y-axis\n title='' \n Title for the axes; set to None to have nothing.\n shell_history=0 \n How many commands from the pyshell history to include with the title\n xshift=0, yshift=0 \n Progressive shifts on the data, to make waterfall plots\n xshift_every=1 \n Perform the progressive shift every 1 or n'th line.\n yshift_every=1 \n perform the progressive shift every 1 or n'th line.\n style=None \n style cycle object.\n clear=True \n If no axes are specified (see below), clear the figure, otherwise clear just the axes.\n axes=None \n Which matplotlib axes to use, or \"gca\" for the current axes\n xscale='linear', yscale='linear' \n 'linear' or 'log' x and y axis scales.\n grid=False \n Should we draw a grid on the axes?\n legend='best' \n Where to place the legend (see pylab.legend() for options)\n Set this to None to ignore the legend.\n legend_max=20\n Number of legend entries before it's truncated with '...'\n autoformat=True \n Should we format the figure for printing?\n autoformat_window=True\n Should we resize and reposition the window when autoformatting?\n tall=False \n Should the format be tall?\n draw=True \n Whether or not to draw the plot after plotting.\n\n See matplotlib's errorbar() function for additional optional keyword arguments.\n \"\"\"\n _pylab.ioff()\n \n # Make sure the dimensionality of the data sets matches\n xdata, ydata = _match_data_sets(xdata, ydata)\n exdata = _match_error_to_data_set(xdata, exdata)\n eydata = _match_error_to_data_set(ydata, eydata)\n \n # check that the labels is a list of strings of the same length\n if not _fun.is_iterable(label): label = [label]*len(xdata)\n while len(label) < len(ydata): label.append(label[0])\n \n # concatenate if necessary\n if len(label) > legend_max:\n label[legend_max-2] = '...'\n for n in range(legend_max-1,len(label)-1): label[n] = \"_nolegend_\"\n\n # clear the figure?\n if clear and not axes: _pylab.gcf().clear() # axes cleared later\n\n # setup axes\n if axes==\"gca\" or axes is None: axes = _pylab.gca()\n\n # if we're clearing the axes\n if clear: axes.clear()\n\n # set the current axes\n _pylab.axes(axes)\n\n # now loop over the list of data in xdata and ydata\n for n in range(0,len(xdata)):\n # get the label\n if label[n]=='_nolegend_':\n l = '_nolegend_'\n else:\n l = str(n)+\": \"+str(label[n])\n \n # calculate the x an y progressive shifts\n dx = xshift*(n/xshift_every)\n dy = yshift*(n/yshift_every)\n \n # if we're supposed to coarsen the data, do so.\n x = _fun.coarsen_array(xdata[n], coarsen)\n y = _fun.coarsen_array(ydata[n], coarsen)\n ey = _fun.coarsen_array(eydata[n], coarsen, 'quadrature')\n ex = _fun.coarsen_array(exdata[n], coarsen, 'quadrature')\n\n # update the style\n if not style is None: kwargs.update(next(style))\n axes.errorbar(x+dx, y+dy, label=l, yerr=ey, xerr=ex, **kwargs)\n\n _pylab.xscale(xscale)\n _pylab.yscale(yscale)\n if legend: axes.legend(loc=legend)\n axes.set_xlabel(xlabel)\n axes.set_ylabel(ylabel)\n\n # for some arguments there should be no title.\n if title in [None, False, 0]:\n axes.set_title('')\n\n # add the commands to the title\n else:\n title = str(title)\n history = _fun.get_shell_history()\n for n in range(0, min(shell_history, len(history))):\n title = title + \"\\n\" + history[n].split('\\n')[0].strip()\n\n title = title + '\\nPlot created ' + _time.asctime()\n axes.set_title(title)\n\n if grid: _pylab.grid(True)\n\n if autoformat:\n _pt.format_figure(draw=False, modify_geometry=autoformat_window)\n _pt.auto_zoom(axes=axes, draw=False)\n\n # update the canvas\n if draw:\n _pylab.ion()\n _pylab.draw()\n _pylab.show()\n\n return axes", "response": "Plots the data of xdata ydata and eydata."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngenerates and plot ydata versus xdata.", "response": "def xy_databoxes(ds, xscript=0, yscript='d[1]', eyscript=None, exscript=None, g=None, **kwargs):\n \"\"\"\n Use databoxes and scripts to generate and plot ydata versus xdata.\n\n Parameters\n ----------\n ds \n List of databoxes\n xscript=0 \n Script for x data\n yscript='d[1]' \n Script for y data\n eyscript=None \n Script for y error\n exscript=None\n Script for x error\n g=None\n Optional dictionary of globals for the scripts\n\n See spinmob.plot.xy.data() for additional optional keyword arguments.\n See spinmob.data.databox.execute_script() for more information about scripts.\n \n \"\"\"\n databoxes(ds, xscript, yscript, eyscript, exscript, plotter=xy_data, g=g, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef xy_function(f='sin(x)', xmin=-1, xmax=1, steps=200, p='x', g=None, erange=False, **kwargs):\n function(f, xmin, xmax, steps, p, g, erange, plotter=xy_data, **kwargs)", "response": "Plots a function over the specified range of points."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef databoxes(ds, xscript=0, yscript=1, eyscript=None, exscript=None, g=None, plotter=xy_data, transpose=False, **kwargs):\n if not _fun.is_iterable(ds): ds = [ds]\n\n if 'xlabel' not in kwargs: kwargs['xlabel'] = str(xscript)\n if 'ylabel' not in kwargs: kwargs['ylabel'] = str(yscript)\n\n # First make sure everything is a list of scripts (or None's)\n if not _fun.is_iterable(xscript): xscript = [xscript]\n if not _fun.is_iterable(yscript): yscript = [yscript]\n if not _fun.is_iterable(exscript): exscript = [exscript]\n if not _fun.is_iterable(eyscript): eyscript = [eyscript]\n\n # make sure exscript matches shape with xscript (and the same for y)\n if len(exscript) < len(xscript):\n for n in range(len(xscript)-1): exscript.append(exscript[0])\n if len(eyscript) < len(yscript):\n for n in range(len(yscript)-1): eyscript.append(eyscript[0])\n\n # Make xscript and exscript match in shape with yscript and eyscript\n if len(xscript) < len(yscript):\n for n in range(len(yscript)-1):\n xscript.append(xscript[0])\n exscript.append(exscript[0])\n\n # check for the reverse possibility\n if len(yscript) < len(xscript):\n for n in range(len(xscript)-1):\n yscript.append(yscript[0])\n eyscript.append(eyscript[0])\n\n # now check for None's (counting scripts)\n for n in range(len(xscript)):\n if xscript[n] is None and yscript[n] is None:\n print(\"Two None scripts? But why?\")\n return\n if xscript[n] is None:\n if type(yscript[n])==str: xscript[n] = 'range(len('+yscript[n]+'))'\n else: xscript[n] = 'range(len(c('+str(yscript[n])+')))'\n if yscript[n] is None:\n if type(xscript[n])==str: yscript[n] = 'range(len('+xscript[n]+'))'\n else: yscript[n] = 'range(len(c('+str(xscript[n])+')))'\n\n xdatas = []\n ydatas = []\n exdatas = []\n eydatas = []\n labels = []\n\n\n # Loop over all the data boxes\n for i in range(len(ds)):\n \n # Reset the default globals\n all_globals = dict(n=i,m=len(ds)-1-i)\n \n # Update them with the user-specified globals\n if not g==None: all_globals.update(g)\n \n # For ease of coding\n d = ds[i]\n \n # Take the transpose if necessary\n if transpose: d = d.transpose()\n \n # Generate the x-data; returns a list of outputs, one for each xscript\n xdata = d(xscript, all_globals)\n \n # Loop over each xdata, appending to the master list, and generating a label\n for n in range(len(xdata)):\n xdatas.append(xdata[n])\n if len(xdata)>1: labels.append(str(n)+\": \"+_os.path.split(d.path)[-1])\n else: labels.append(_os.path.split(d.path)[-1])\n\n # Append the other data sets to their master lists\n for y in d( yscript, all_globals): ydatas.append(y)\n for x in d(exscript, all_globals): exdatas.append(x)\n for y in d(eyscript, all_globals): eydatas.append(y)\n\n if \"label\" in kwargs: labels = kwargs.pop(\"label\")\n\n plotter(xdatas, ydatas, eydatas, exdatas, label=labels, **kwargs)", "response": "Returns a list of databoxes with the specified scripts."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef image_data(Z, X=[0,1.0], Y=[0,1.0], aspect=1.0, zmin=None, zmax=None, clear=1, clabel='z', autoformat=True, colormap=\"Last Used\", shell_history=0, **kwargs):\n global _colormap\n \n # Set interpolation to something more relevant for every day science\n if not 'interpolation' in kwargs.keys(): kwargs['interpolation'] = 'nearest'\n\n _pylab.ioff()\n\n fig = _pylab.gcf()\n if clear:\n fig.clear()\n _pylab.axes()\n\n # generate the 3d axes\n X = _n.array(X)\n Y = _n.array(Y)\n Z = _n.array(Z)\n\n # assume X and Y are the bin centers and figure out the bin widths\n x_width = abs(float(X[-1] - X[0])/(len(Z[0])-1))\n y_width = abs(float(Y[-1] - Y[0])/(len(Z)-1))\n\n # reverse the Z's\n # Transpose and reverse\n Z = Z.transpose()\n Z = Z[-1::-1]\n \n \n # get rid of the label and title kwargs\n xlabel=''\n ylabel=''\n title =''\n if 'xlabel' in kwargs: xlabel = kwargs.pop('xlabel')\n if 'ylabel' in kwargs: ylabel = kwargs.pop('ylabel')\n if 'title' in kwargs: title = kwargs.pop('title')\n\n _pylab.imshow(Z, extent=[X[0]-x_width/2.0, X[-1]+x_width/2.0,\n Y[0]-y_width/2.0, Y[-1]+y_width/2.0], **kwargs)\n cb = _pylab.colorbar()\n _pt.image_set_clim(zmin,zmax)\n _pt.image_set_aspect(aspect)\n cb.set_label(clabel)\n\n a = _pylab.gca()\n a.set_xlabel(xlabel)\n a.set_ylabel(ylabel)\n\n #_pt.close_sliders()\n #_pt.image_sliders()\n\n # title\n history = _fun.get_shell_history()\n for n in range(0, min(shell_history, len(history))):\n title = title + \"\\n\" + history[n].split('\\n')[0].strip()\n\n title = title + '\\nPlot created ' + _time.asctime()\n a.set_title(title.strip())\n\n if autoformat: _pt.image_format_figure(fig)\n\n _pylab.ion()\n _pylab.show()\n #_pt.raise_figure_window()\n #_pt.raise_pyshell()\n _pylab.draw()\n\n # add the color sliders\n if colormap:\n if _colormap: _colormap.close()\n _colormap = _pt.image_colormap(colormap, image=a.images[0])", "response": "Generates an image plot for the given data."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngenerating a 2 - d function over the specified range and return one value.", "response": "def image_function(f='sin(5*x)*cos(5*y)', xmin=-1, xmax=1, ymin=-1, ymax=1, xsteps=100, ysteps=100, p='x,y', g=None, **kwargs):\n \"\"\"\n Plots a 2-d function over the specified range\n\n Parameters\n ----------\n f='sin(5*x)*cos(5*y)' \n Takes two inputs and returns one value. Can also\n be a string function such as sin(x*y)\n xmin=-1, xmax=1, ymin=-1, ymax=1 \n Range over which to generate/plot the data\n xsteps=100, ysteps=100\n How many points to plot on the specified range\n p='x,y'\n If using strings for functions, this is a string of parameters.\n g=None\n Optional additional globals. Try g=globals()!\n \n See spinmob.plot.image.data() for additional optional keyword arguments.\n \"\"\"\n\n default_kwargs = dict(clabel=str(f), xlabel='x', ylabel='y')\n default_kwargs.update(kwargs)\n\n # aggregate globals\n if not g: g = {}\n for k in list(globals().keys()):\n if k not in g: g[k] = globals()[k]\n\n if type(f) == str:\n f = eval('lambda ' + p + ': ' + f, g)\n\n\n # generate the grid x and y coordinates\n xones = _n.linspace(1,1,ysteps)\n x = _n.linspace(xmin, xmax, xsteps)\n xgrid = _n.outer(xones, x)\n\n yones = _n.linspace(1,1,xsteps)\n y = _n.linspace(ymin, ymax, ysteps)\n ygrid = _n.outer(y, yones)\n\n # now get the z-grid\n try:\n # try it the fast numpy way. Add 0 to assure dimensions\n zgrid = f(xgrid, ygrid) + xgrid*0.0\n except:\n print(\"Notice: function is not rocking hardcore. Generating grid the slow way...\")\n # manually loop over the data to generate the z-grid\n zgrid = []\n for ny in range(0, len(y)):\n zgrid.append([])\n for nx in range(0, len(x)):\n zgrid[ny].append(f(x[nx], y[ny]))\n\n zgrid = _n.array(zgrid)\n\n # now plot!\n image_data(zgrid.transpose(), x, y, **default_kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nloading an image file and plot it with color.", "response": "def image_file(path=None, zscript='self[1:]', xscript='[0,1]', yscript='d[0]', g=None, **kwargs):\n \"\"\"\n Loads an data file and plots it with color. Data file must have columns of the\n same length!\n\n Parameters\n ----------\n path=None\n Path to data file.\n zscript='self[1:]' \n Determines how to get data from the columns\n xscript='[0,1]', yscript='d[0]' \n Determine the x and y arrays used for setting the axes bounds\n g=None \n Optional dictionary of globals for the scripts\n\n See spinmob.plot.image.data() for additional optional keyword arguments.\n See spinmob.data.databox.execute_script() for more information about scripts.\n \"\"\"\n if 'delimiter' in kwargs: delimiter = kwargs.pop('delimiter')\n else: delimiter = None\n\n d = _data.load(paths=path, delimiter = delimiter)\n if d is None or len(d) == 0: return\n\n # allows the user to overwrite the defaults\n default_kwargs = dict(xlabel = str(xscript),\n ylabel = str(yscript),\n title = d.path,\n clabel = str(zscript))\n default_kwargs.update(kwargs)\n\n\n # get the data\n X = d(xscript, g)\n Y = d(yscript, g)\n Z = _n.array(d(zscript, g))\n# Z = Z.transpose()\n\n # plot!\n image_data(Z, X, Y, **default_kwargs)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nfunction that returns a parameterized version of the parametric function at a specified time range.", "response": "def parametric_function(fx='sin(t)', fy='cos(t)', tmin=-1, tmax=1, steps=200, p='t', g=None, erange=False, **kwargs):\n \"\"\"\n Plots the parametric function over the specified range\n\n Parameters\n ----------\n fx='sin(t)', fy='cos(t)' \n Functions or (matching) lists of functions to plot; \n can be string functions or python functions taking one argument\n tmin=-1, tmax=1, steps=200 \n Range over which to plot, and how many points to plot\n p='t'\n If using strings for functions, p is the parameter name\n g=None\n Optional dictionary of extra globals. Try g=globals()!\n erange=False\n Use exponential spacing of the t data?\n\n See spinmob.plot.xy.data() for additional optional keyword arguments.\n \"\"\"\n\n if not g: g = {}\n for k in list(globals().keys()):\n if k not in g: g[k] = globals()[k]\n\n # if the x-axis is a log scale, use erange\n if erange: r = _fun.erange(tmin, tmax, steps)\n else: r = _n.linspace(tmin, tmax, steps)\n\n # make sure it's a list so we can loop over it\n if not type(fy) in [type([]), type(())]: fy = [fy]\n if not type(fx) in [type([]), type(())]: fx = [fx]\n\n # loop over the list of functions\n xdatas = []\n ydatas = []\n labels = []\n for fs in fx:\n if type(fs) == str:\n a = eval('lambda ' + p + ': ' + fs, g)\n a.__name__ = fs\n else:\n a = fs\n\n x = []\n for z in r: x.append(a(z))\n\n xdatas.append(x)\n labels.append(a.__name__)\n\n for n in range(len(fy)):\n fs = fy[n]\n if type(fs) == str:\n a = eval('lambda ' + p + ': ' + fs, g)\n a.__name__ = fs\n else:\n a = fs\n\n y = []\n for z in r: y.append(a(z))\n\n ydatas.append(y)\n labels[n] = labels[n]+', '+a.__name__\n\n\n # plot!\n xy_data(xdatas, ydatas, label=labels, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef reset(self):\n for key in list(self.keys()): self.iterators[key] = _itertools.cycle(self[key])\n return self", "response": "Reset the style cycle."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nadding text to the specified position.", "response": "def add_text(text, x=0.01, y=0.01, axes=\"gca\", draw=True, **kwargs):\n \"\"\"\n Adds text to the axes at the specified position.\n\n **kwargs go to the axes.text() function.\n \"\"\"\n if axes==\"gca\": axes = _pylab.gca()\n axes.text(x, y, text, transform=axes.transAxes, **kwargs)\n if draw: _pylab.draw()"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nlooks at the bounds of the plotted data and zooms accordingly, leaving some space around the data.", "response": "def auto_zoom(zoomx=True, zoomy=True, axes=\"gca\", x_space=0.04, y_space=0.04, draw=True):\n \"\"\"\n Looks at the bounds of the plotted data and zooms accordingly, leaving some\n space around the data.\n \"\"\"\n\n # Disable auto-updating by default.\n _pylab.ioff()\n\n if axes==\"gca\": axes = _pylab.gca()\n\n # get the current bounds\n x10, x20 = axes.get_xlim()\n y10, y20 = axes.get_ylim()\n\n # Autoscale using pylab's technique (catches the error bars!)\n axes.autoscale(enable=True, tight=True)\n\n # Add padding\n if axes.get_xscale() == 'linear':\n x1, x2 = axes.get_xlim()\n xc = 0.5*(x1+x2)\n xs = 0.5*(1+x_space)*(x2-x1)\n axes.set_xlim(xc-xs, xc+xs)\n \n if axes.get_yscale() == 'linear':\n y1, y2 = axes.get_ylim()\n yc = 0.5*(y1+y2)\n ys = 0.5*(1+y_space)*(y2-y1)\n axes.set_ylim(yc-ys, yc+ys)\n \n # If we weren't supposed to zoom x or y, reset them\n if not zoomx: axes.set_xlim(x10, x20)\n if not zoomy: axes.set_ylim(y10, y20)\n \n if draw: \n _pylab.ion()\n _pylab.draw()"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nestimates the slope of two clicks.", "response": "def click_estimate_slope():\n \"\"\"\n Takes two clicks and returns the slope.\n\n Right-click aborts.\n \"\"\"\n\n c1 = _pylab.ginput()\n if len(c1)==0:\n return None\n\n c2 = _pylab.ginput()\n if len(c2)==0:\n return None\n\n return (c1[0][1]-c2[0][1])/(c1[0][0]-c2[0][0])"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef click_estimate_curvature():\n\n c1 = _pylab.ginput()\n if len(c1)==0:\n return None\n\n c2 = _pylab.ginput()\n if len(c2)==0:\n return None\n\n return 2*(c2[0][1]-c1[0][1])/(c2[0][0]-c1[0][0])**2", "response": "Takes two clicks and returns the curvature assuming the first click was the minimum of a parabola and the second was some other point. Right - click aborts."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef click_estimate_difference():\n\n c1 = _pylab.ginput()\n if len(c1)==0:\n return None\n\n c2 = _pylab.ginput()\n if len(c2)==0:\n return None\n\n return [c2[0][0]-c1[0][0], c2[0][1]-c1[0][1]]", "response": "Takes two clicks and returns the difference vector [ dx dy )"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef copy_figure_to_clipboard(figure='gcf'):\n try:\n import pyqtgraph as _p\n \n # Get the current figure if necessary \n if figure is 'gcf': figure = _s.pylab.gcf() \n \n # Store the figure as an image\n path = _os.path.join(_s.settings.path_home, \"clipboard.png\")\n figure.savefig(path)\n \n # Set the clipboard. I know, it's weird to use pyqtgraph, but \n # This covers both Qt4 and Qt5 with their Qt4 wrapper!\n _p.QtGui.QApplication.instance().clipboard().setImage(_p.QtGui.QImage(path))\n \n except: \n print(\"This function currently requires pyqtgraph to be installed.\")", "response": "Copies the specified figure to the system clipboard. Specifying gcf will use the current figure."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ndifferentiating the data visible on the specified axes using manipulate_shown_data.", "response": "def differentiate_shown_data(neighbors=1, fyname=1, **kwargs):\n \"\"\"\n Differentiates the data visible on the specified axes using\n fun.derivative_fit() (if neighbors > 0), and derivative() otherwise.\n Modifies the visible data using manipulate_shown_data(**kwargs)\n \"\"\"\n\n if neighbors:\n def D(x,y): return _fun.derivative_fit(x,y,neighbors)\n else:\n def D(x,y): return _fun.derivative(x,y)\n\n if fyname==1: fyname = '$\\\\partial_{x(\\\\pm'+str(neighbors)+')}$'\n\n manipulate_shown_data(D, fxname=None, fyname=fyname, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef fit_shown_data(f=\"a*x+b\", p=\"a=1, b=2\", axes=\"gca\", verbose=True, **kwargs):\n\n # get the axes\n if axes==\"gca\": axes = _pylab.gca()\n\n # get the range for trimming\n _pylab.sca(axes)\n xmin,xmax = axes.get_xlim()\n ymin,ymax = axes.get_ylim()\n\n # update the kwargs\n if 'first_figure' not in kwargs: kwargs['first_figure'] = axes.figure.number+1\n\n # loop over the lines\n fitters = []\n for l in axes.lines:\n\n # get the trimmed data\n x,y = l.get_data()\n x,y = _s.fun.trim_data_uber([x,y],[xmin\")\n if not axes.lines[-1] == l: fitters[-1].ginput(timeout=0)\n\n return fitters", "response": "Fits the data of the specified function on the lines of the specified axes and returns a list of fitters that can be used to fit the data."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nformatting a figure in a compact way with enough useful useful information for large data sets.", "response": "def format_figure(figure=None, tall=False, draw=True, modify_geometry=True):\n \"\"\"\n This formats the figure in a compact way with (hopefully) enough useful\n information for printing large data sets. Used mostly for line and scatter\n plots with long, information-filled titles.\n\n Chances are somewhat slim this will be ideal for you but it very well might\n and is at least a good starting point.\n\n figure=None specify a figure object. None will use gcf()\n\n \"\"\"\n _pylab.ioff()\n\n if figure == None: figure = _pylab.gcf()\n\n if modify_geometry:\n if tall: set_figure_window_geometry(figure, (0,0), (550,700))\n else: set_figure_window_geometry(figure, (0,0), (550,400))\n\n legend_position=1.01\n\n # first, find overall bounds of all axes.\n ymin = 1.0\n ymax = 0.0\n xmin = 1.0\n xmax = 0.0\n for axes in figure.get_axes():\n (x,y,dx,dy) = axes.get_position().bounds\n if y < ymin: ymin = y\n if y+dy > ymax: ymax = y+dy\n if x < xmin: xmin = x\n if x+dx > xmax: xmax = x+dx\n\n # Fraction of the figure's width and height to use for all the plots.\n w = 0.55\n h = 0.75\n\n # buffers on left and bottom edges\n bb = 0.15\n bl = 0.15\n\n xscale = w / (xmax-xmin)\n yscale = h / (ymax-ymin)\n\n # save this for resetting\n current_axes = _pylab.gca()\n\n # loop over the axes\n for axes in figure.get_axes():\n\n # Get the axes bounds\n (x,y,dx,dy) = axes.get_position().bounds\n y = bb + (y-ymin)*yscale\n dy = dy * yscale\n\n x = bl + (x-xmin)*xscale\n dx = dx * xscale\n\n axes.set_position([x,y,dx,dy])\n\n # set the position of the legend\n _pylab.axes(axes) # set the current axes\n if len(axes.lines)>0:\n _pylab.legend(loc=[legend_position, 0], borderpad=0.02, prop=_FontProperties(size=7))\n\n # set the label spacing in the legend\n if axes.get_legend():\n axes.get_legend().labelsep = 0.01\n axes.get_legend().set_visible(1)\n\n # set up the title label\n axes.title.set_horizontalalignment('right')\n axes.title.set_size(8)\n axes.title.set_position([1.5,1.02])\n axes.title.set_visible(1)\n #axes.yaxis.label.set_horizontalalignment('center')\n #axes.xaxis.label.set_horizontalalignment('center')\n\n _pylab.axes(current_axes)\n\n if draw:\n _pylab.ion()\n _pylab.draw()"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_figure_window_geometry(fig='gcf'):\n\n if type(fig)==str: fig = _pylab.gcf()\n elif _fun.is_a_number(fig): fig = _pylab.figure(fig)\n\n # Qt4Agg backend. Probably would work for other Qt stuff\n if _pylab.get_backend().find('Qt') >= 0:\n size = fig.canvas.window().size()\n pos = fig.canvas.window().pos()\n return [[pos.x(),pos.y()], [size.width(),size.height()]]\n\n else:\n print(\"get_figure_window_geometry() only implemented for QtAgg backend.\")\n return None", "response": "Returns the geometry of the figure window."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef impose_legend_limit(limit=30, axes=\"gca\", **kwargs):\n if axes==\"gca\": axes = _pylab.gca()\n\n # make these axes current\n _pylab.axes(axes)\n\n # loop over all the lines_pylab.\n for n in range(0,len(axes.lines)):\n if n > limit-1 and not n==len(axes.lines)-1: axes.lines[n].set_label(\"_nolegend_\")\n if n == limit-1 and not n==len(axes.lines)-1: axes.lines[n].set_label(\"...\")\n\n _pylab.legend(**kwargs)", "response": "Impose the legend to a maximum of 30 entries."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef image_neighbor_smooth(xlevel=0.2, ylevel=0.2, image=\"auto\"):\n if image == \"auto\": image = _pylab.gca().images[0]\n\n Z = _n.array(image.get_array())\n\n # store this image in the undo list\n global image_undo_list\n image_undo_list.append([image, Z])\n if len(image_undo_list) > 10: image_undo_list.pop(0)\n\n # get the diagonal smoothing level (eliptical, and scaled down by distance)\n dlevel = ((xlevel**2+ylevel**2)/2.0)**(0.5)\n\n # don't touch the first column\n new_Z = [Z[0]*1.0]\n\n for m in range(1,len(Z)-1):\n new_Z.append(Z[m]*1.0)\n for n in range(1,len(Z[0])-1):\n new_Z[-1][n] = (Z[m,n] + xlevel*(Z[m+1,n]+Z[m-1,n]) + ylevel*(Z[m,n+1]+Z[m,n-1]) \\\n + dlevel*(Z[m+1,n+1]+Z[m-1,n+1]+Z[m+1,n-1]+Z[m-1,n-1]) ) \\\n / (1.0+xlevel*2+ylevel*2 + dlevel*4)\n\n # don't touch the last column\n new_Z.append(Z[-1]*1.0)\n\n # images have transposed data\n image.set_array(_n.array(new_Z))\n\n # update the plot\n _pylab.draw()", "response": "This function will bleed nearest neighbor pixels into each other with the specified weight factors."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef image_undo():\n if len(image_undo_list) <= 0:\n print(\"no undos in memory\")\n return\n\n [image, Z] = image_undo_list.pop(-1)\n image.set_array(Z)\n _pylab.draw()", "response": "Undoes the last coarsen or smooth command."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef image_set_aspect(aspect=1.0, axes=\"gca\"):\n if axes is \"gca\": axes = _pylab.gca()\n\n e = axes.get_images()[0].get_extent()\n axes.set_aspect(abs((e[1]-e[0])/(e[3]-e[2]))/aspect)", "response": "Sets the aspect ratio of the current zoom level of the imshow image"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef image_set_extent(x=None, y=None, axes=\"gca\"):\n if axes == \"gca\": axes = _pylab.gca()\n\n # get the current plot limits\n xlim = axes.get_xlim()\n ylim = axes.get_ylim()\n\n # get the old extent\n extent = axes.images[0].get_extent()\n\n # calculate the fractional extents\n x0 = extent[0]\n y0 = extent[2]\n xwidth = extent[1]-x0\n ywidth = extent[3]-y0\n frac_x1 = (xlim[0]-x0)/xwidth\n frac_x2 = (xlim[1]-x0)/xwidth\n frac_y1 = (ylim[0]-y0)/ywidth\n frac_y2 = (ylim[1]-y0)/ywidth\n\n\n # set the new\n if not x == None:\n extent[0] = x[0]\n extent[1] = x[1]\n if not y == None:\n extent[2] = y[0]\n extent[3] = y[1]\n\n\n # get the new zoom window\n x0 = extent[0]\n y0 = extent[2]\n xwidth = extent[1]-x0\n ywidth = extent[3]-y0\n\n x1 = x0 + xwidth*frac_x1\n x2 = x0 + xwidth*frac_x2\n y1 = y0 + ywidth*frac_y1\n y2 = y0 + ywidth*frac_y2\n\n # set the extent\n axes.images[0].set_extent(extent)\n\n # rezoom us\n axes.set_xlim(x1,x2)\n axes.set_ylim(y1,y2)\n\n # draw\n image_set_aspect(1.0)", "response": "Set the extent of the first image."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef image_scale(xscale=1.0, yscale=1.0, axes=\"gca\"):\n if axes == \"gca\": axes = _pylab.gca()\n\n e = axes.images[0].get_extent()\n x1 = e[0]*xscale\n x2 = e[1]*xscale\n y1 = e[2]*yscale\n y2 = e[3]*yscale\n\n image_set_extent([x1,x2],[y1,y2], axes)", "response": "Scales the image extent."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nshift the image y by this amount", "response": "def image_click_xshift(axes = \"gca\"):\n \"\"\"\n Takes a starting and ending point, then shifts the image y by this amount\n \"\"\"\n if axes == \"gca\": axes = _pylab.gca()\n\n try:\n p1 = _pylab.ginput()\n p2 = _pylab.ginput()\n\n xshift = p2[0][0]-p1[0][0]\n\n e = axes.images[0].get_extent()\n\n e[0] = e[0] + xshift\n e[1] = e[1] + xshift\n\n axes.images[0].set_extent(e)\n\n _pylab.draw()\n except:\n print(\"whoops\")"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nshift the image y by this amount", "response": "def image_click_yshift(axes = \"gca\"):\n \"\"\"\n Takes a starting and ending point, then shifts the image y by this amount\n \"\"\"\n if axes == \"gca\": axes = _pylab.gca()\n\n try:\n p1 = _pylab.ginput()\n p2 = _pylab.ginput()\n\n yshift = p2[0][1]-p1[0][1]\n\n e = axes.images[0].get_extent()\n\n e[2] = e[2] + yshift\n e[3] = e[3] + yshift\n\n axes.images[0].set_extent(e)\n\n _pylab.draw()\n except:\n print(\"whoops\")"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef image_shift(xshift=0, yshift=0, axes=\"gca\"):\n\n if axes==\"gca\": axes = _pylab.gca()\n\n e = axes.images[0].get_extent()\n\n e[0] = e[0] + xshift\n e[1] = e[1] + xshift\n e[2] = e[2] + yshift\n e[3] = e[3] + yshift\n\n axes.images[0].set_extent(e)\n\n _pylab.draw()", "response": "This will shift an image to a new location on x and y."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef image_set_clim(zmin=None, zmax=None, axes=\"gca\"):\n if axes==\"gca\": axes=_pylab.gca()\n\n image = axes.images[0]\n\n if zmin=='auto': zmin = _n.min(image.get_array())\n if zmax=='auto': zmax = _n.max(image.get_array())\n\n if zmin==None: zmin = image.get_clim()[0]\n if zmax==None: zmax = image.get_clim()[1]\n\n image.set_clim(zmin, zmax)\n\n _pylab.draw()", "response": "This function will set the clim of the colorbar."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef manipulate_shown_data(f, input_axes=\"gca\", output_axes=None, fxname=1, fyname=1, clear=1, pause=False, **kwargs):\n\n # get the axes\n if input_axes == \"gca\": a1 = _pylab.gca()\n else: a1 = input_axes\n\n\n\n # get the xlimits\n xmin, xmax = a1.get_xlim()\n\n # get the name to stick on the x and y labels\n if fxname==1: fxname = f.__name__\n if fyname==1: fyname = f.__name__\n\n # get the output axes\n if output_axes == None:\n _pylab.figure(a1.figure.number+1)\n a2 = _pylab.axes()\n else:\n a2 = output_axes\n\n if clear: a2.clear()\n\n # loop over the data\n for line in a1.get_lines():\n\n # if it's a line, do the manipulation\n if isinstance(line, _mpl.lines.Line2D):\n\n # get the data\n x, y = line.get_data()\n\n # trim the data according to the current zoom level\n x, y = _fun.trim_data(xmin, xmax, x, y)\n\n # do the manipulation\n new_x, new_y = f(x,y)\n\n # plot the new\n _s.plot.xy.data(new_x, new_y, clear=0, label=line.get_label().replace(\"_\", \"-\"), axes=a2, **kwargs)\n\n # pause after each curve if we're supposed to\n if pause:\n _pylab.draw()\n input(\" \")\n\n # set the labels and title.\n if fxname in [0,None]: a2.set_xlabel(a1.get_xlabel())\n else: a2.set_xlabel(fxname+\"(\"+a1.get_xlabel()+\")\")\n\n if fyname in [0,None]: a2.set_ylabel(a1.get_ylabel())\n else: a2.set_ylabel(fyname+\"(\"+a1.get_ylabel()+\")\")\n\n _pylab.draw()", "response": "manipulates the shown data on the specified axes and saves the manipulated data on the specified axes."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef manipulate_shown_ydata(fy, fyname=1, **kwargs):\n def f(x,y): return x, fy(y)\n f.__name__ = fy.__name__\n manipulate_shown_data(f, fxname=None, fyname=fyname, **kwargs)", "response": "This function is used to manipulate the xdata and fy data."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _print_figures(figures, arguments='', file_format='pdf', target_width=8.5, target_height=11.0, target_pad=0.5):\n\n for fig in figures:\n\n # get the temp path\n temp_path = _os.path.join(_settings.path_home, \"temp\")\n\n # make the temp folder\n _settings.MakeDir(temp_path)\n\n # output the figure to postscript\n path = _os.path.join(temp_path, \"graph.\"+file_format)\n\n # get the dimensions of the figure in inches\n w=fig.get_figwidth()\n h=fig.get_figheight()\n\n # we're printing to 8.5 x 11, so aim for 7.5 x 10\n target_height = target_height-2*target_pad\n target_width = target_width -2*target_pad\n\n # depending on the aspect we scale by the vertical or horizontal value\n if 1.0*h/w > target_height/target_width:\n # scale down according to the vertical dimension\n new_h = target_height\n new_w = w*target_height/h\n else:\n # scale down according to the hozo dimension\n new_w = target_width\n new_h = h*target_width/w\n\n fig.set_figwidth(new_w)\n fig.set_figheight(new_h)\n\n # save it\n fig.savefig(path, bbox_inches=_pylab.matplotlib.transforms.Bbox(\n [[-target_pad, new_h-target_height-target_pad],\n [target_width-target_pad, target_height-target_pad]]))\n\n # set it back\n fig.set_figheight(h)\n fig.set_figwidth(w)\n\n\n if not arguments == '':\n c = _settings['instaprint'] + ' ' + arguments + ' \"' + path + '\"'\n else:\n c = _settings['instaprint'] + ' \"' + path + '\"'\n\n print(c)\n _os.system(c)", "response": "Print the given figures to a postscript file."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef instaprint(figure='gcf', arguments='', threaded=False, file_format='pdf'):\n\n global _settings\n\n if 'instaprint' not in _settings.keys():\n print(\"No print command setup. Set the user variable settings['instaprint'].\")\n return\n\n if figure=='gcf': figure=[_pylab.gcf().number]\n elif figure=='all': figure=_pylab.get_fignums()\n if not getattr(figure,'__iter__',False): figure = [figure]\n\n print(\"figure numbers in queue:\", figure)\n\n figures=[]\n for n in figure: figures.append(_pylab.figure(n))\n\n # now run the ps printing command\n if threaded:\n # store the canvas type of the last figure\n canvas_type = type(figures[-1].canvas)\n\n # launch the aforementioned function as a separate thread\n _thread.start_new_thread(_print_figures, (figures,arguments,file_format,))\n\n # wait until the thread is running\n _time.sleep(0.25)\n\n # wait until the canvas type has returned to normal\n t0 = _time.time()\n while not canvas_type == type(figures[-1].canvas) and _time.time()-t0 < 5.0:\n _time.sleep(0.1)\n if _time.time()-t0 >= 5.0:\n print(\"WARNING: Timed out waiting for canvas to return to original state!\")\n\n # bring back the figure and command line\n _pylab.draw()\n\n else:\n _print_figures(figures, arguments, file_format)\n _pylab.draw()", "response": "Quick function that saves the specified figure as a postscript and then calls the print command."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef raise_figure_window(f=0):\n if _fun.is_a_number(f): f = _pylab.figure(f)\n f.canvas.manager.window.raise_()", "response": "Raises the supplied figure window."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsets figure window geometry.", "response": "def set_figure_window_geometry(fig='gcf', position=None, size=None):\n \"\"\"\n This will currently only work for Qt4Agg and WXAgg backends.\n\n postion = [x, y]\n size = [width, height]\n\n fig can be 'gcf', a number, or a figure object.\n \"\"\"\n\n if type(fig)==str: fig = _pylab.gcf()\n elif _fun.is_a_number(fig): fig = _pylab.figure(fig)\n\n # Qt4Agg backend. Probably would work for other Qt stuff\n if _pylab.get_backend().find('Qt') >= 0:\n w = fig.canvas.window()\n\n if not size == None:\n w.resize(size[0],size[1])\n\n if not position == None:\n w.move(position[0], position[1])\n\n # WXAgg backend. Probably would work for other Qt stuff.\n elif _pylab.get_backend().find('WX') >= 0:\n w = fig.canvas.Parent\n\n if not size == None:\n w.SetSize(size)\n\n if not position == None:\n w.SetPosition(position)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef set_yticks(start, step, axes=\"gca\"):\n if axes==\"gca\": axes = _pylab.gca()\n\n # first get one of the tick label locations\n xposition = axes.yaxis.get_ticklabels()[0].get_position()[0]\n\n # get the bounds\n ymin, ymax = axes.get_ylim()\n\n # get the starting tick\n nstart = int(_pylab.floor((ymin-start)/step))\n nstop = int(_pylab.ceil((ymax-start)/step))\n ticks = []\n for n in range(nstart,nstop+1): ticks.append(start+n*step)\n\n axes.set_yticks(ticks)\n\n # set the x-position\n for t in axes.yaxis.get_ticklabels():\n x, y = t.get_position()\n t.set_position((xposition, y))\n\n _pylab.draw()", "response": "This will generate a tick array and apply said array to the axis\n "} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncoarsen the specified line.", "response": "def coarsen_line(line, level=2, exponential=False, draw=True):\n \"\"\"\n Coarsens the specified line (see spinmob.coarsen_data() for more information).\n \n Parameters\n ----------\n line\n Matplotlib line instance.\n level=2\n How strongly to coarsen.\n exponential=False\n If True, use the exponential method (great for log-x plots).\n draw=True\n Redraw when complete.\n \"\"\"\n\n # get the actual data values\n xdata = line.get_xdata()\n ydata = line.get_ydata()\n\n xdata,ydata = _fun.coarsen_data(xdata, ydata, level=level, exponential=exponential)\n \n # don't do anything if we don't have any data left\n if len(ydata) == 0: print(\"There's nothing left in \"+str(line)+\"!\")\n\n # otherwise set the data with the new arrays\n else: line.set_data(xdata, ydata)\n\n # we refresh in real time for giggles\n if draw: _pylab.draw()"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\napplies function fx to all xdata and fy to all ydata.", "response": "def line_math(fx=None, fy=None, axes='gca'):\n \"\"\"\n applies function fx to all xdata and fy to all ydata.\n \"\"\"\n\n if axes=='gca': axes = _pylab.gca()\n\n lines = axes.get_lines()\n\n for line in lines:\n if isinstance(line, _mpl.lines.Line2D):\n xdata, ydata = line.get_data()\n if not fx==None: xdata = fx(xdata)\n if not fy==None: ydata = fy(ydata)\n line.set_data(xdata,ydata)\n\n _pylab.draw()"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nexport the current figure to a file.", "response": "def export_figure(dpi=200, figure=\"gcf\", path=None):\n \"\"\"\n Saves the actual postscript data for the figure.\n \"\"\"\n if figure==\"gcf\": figure = _pylab.gcf()\n\n if path==None: path = _s.dialogs.Save(\"*.*\", default_directory=\"save_plot_default_directory\")\n\n if path==\"\":\n print(\"aborted.\")\n return\n\n figure.savefig(path, dpi=dpi)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsaves the figure in my own ascii format", "response": "def save_plot(axes=\"gca\", path=None):\n \"\"\"\n Saves the figure in my own ascii format\n \"\"\"\n\n global line_attributes\n\n # choose a path to save to\n if path==None: path = _s.dialogs.Save(\"*.plot\", default_directory=\"save_plot_default_directory\")\n\n if path==\"\":\n print(\"aborted.\")\n return\n\n if not path.split(\".\")[-1] == \"plot\": path = path+\".plot\"\n\n f = file(path, \"w\")\n\n # if no argument was given, get the current axes\n if axes==\"gca\": axes=_pylab.gca()\n\n # now loop over the available lines\n f.write(\"title=\" +axes.title.get_text().replace('\\n', '\\\\n')+'\\n')\n f.write(\"xlabel=\"+axes.xaxis.label.get_text().replace('\\n','\\\\n')+'\\n')\n f.write(\"ylabel=\"+axes.yaxis.label.get_text().replace('\\n','\\\\n')+'\\n')\n\n for l in axes.lines:\n # write the data header\n f.write(\"trace=new\\n\")\n f.write(\"legend=\"+l.get_label().replace('\\n', '\\\\n')+\"\\n\")\n\n for a in line_attributes: f.write(a+\"=\"+str(_pylab.getp(l, a)).replace('\\n','')+\"\\n\")\n\n # get the data\n x = l.get_xdata()\n y = l.get_ydata()\n\n # loop over the data\n for n in range(0, len(x)): f.write(str(float(x[n])) + \" \" + str(float(y[n])) + \"\\n\")\n\n f.close()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nsave the raw data of the current figure to a file.", "response": "def save_figure_raw_data(figure=\"gcf\", **kwargs):\n \"\"\"\n This will just output an ascii file for each of the traces in the shown figure.\n\n **kwargs are sent to dialogs.Save()\n \"\"\"\n\n # choose a path to save to\n path = _s.dialogs.Save(**kwargs)\n if path==\"\": return \"aborted.\"\n\n # if no argument was given, get the current axes\n if figure==\"gcf\": figure = _pylab.gcf()\n\n for n in range(len(figure.axes)):\n a = figure.axes[n]\n\n for m in range(len(a.lines)):\n l = a.lines[m]\n\n x = l.get_xdata()\n y = l.get_ydata()\n\n p = _os.path.split(path)\n p = _os.path.join(p[0], \"axes\" + str(n) + \" line\" + str(m) + \" \" + p[1])\n print(p)\n\n # loop over the data\n f = open(p, 'w')\n for j in range(0, len(x)):\n f.write(str(x[j]) + \"\\t\" + str(y[j]) + \"\\n\")\n f.close()"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_line_color(self, increment=1):\n\n i = self.line_colors_index\n\n self.line_colors_index += increment\n if self.line_colors_index >= len(self.line_colors):\n self.line_colors_index = self.line_colors_index-len(self.line_colors)\n if self.line_colors_index >= len(self.line_colors): self.line_colors_index=0 # to be safe\n\n return self.line_colors[i]", "response": "Returns the current color in the line of the log."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_marker(self, increment=1):\n\n i = self.markers_index\n\n self.markers_index += increment\n if self.markers_index >= len(self.markers):\n self.markers_index = self.markers_index-len(self.markers)\n if self.markers_index >= len(self.markers): self.markers_index=0 # to be safe\n\n return self.markers[i]", "response": "Returns the current marker in the internal list increments the marker by increment"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns the current marker in the current marker then increments the marker by increment.", "response": "def get_linestyle(self, increment=1):\n \"\"\"\n Returns the current marker, then increments the marker by what's specified\n \"\"\"\n\n i = self.linestyles_index\n\n self.linestyles_index += increment\n if self.linestyles_index >= len(self.linestyles):\n self.linestyles_index = self.linestyles_index-len(self.linestyles)\n if self.linestyles_index >= len(self.linestyles): self.linestyles_index=0 # to be safe\n\n return self.linestyles[i]"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the current face color by increment", "response": "def get_face_color(self, increment=1):\n \"\"\"\n Returns the current face, then increments the face by what's specified\n \"\"\"\n\n i = self.face_colors_index\n\n self.face_colors_index += increment\n if self.face_colors_index >= len(self.face_colors):\n self.face_colors_index = self.face_colors_index-len(self.face_colors)\n if self.face_colors_index >= len(self.face_colors): self.face_colors_index=0 # to be safe\n\n return self.face_colors[i]"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_edge_color(self, increment=1):\n\n i = self.edge_colors_index\n\n self.edge_colors_index += increment\n if self.edge_colors_index >= len(self.edge_colors):\n self.edge_colors_index = self.edge_colors_index-len(self.edge_colors)\n if self.edge_colors_index >= len(self.edge_colors): self.edge_colors_index=0 # to be safe\n\n return self.edge_colors[i]", "response": "Returns the edge color for the current face then increments the face by increment"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\napplies the style cycle to the lines in the specified axes", "response": "def apply(self, axes=\"gca\"):\n \"\"\"\n Applies the style cycle to the lines in the axes specified\n \"\"\"\n\n if axes == \"gca\": axes = _pylab.gca()\n self.reset()\n lines = axes.get_lines()\n\n for l in lines:\n l.set_color(self.get_line_color(1))\n l.set_mfc(self.get_face_color(1))\n l.set_marker(self.get_marker(1))\n l.set_mec(self.get_edge_color(1))\n l.set_linestyle(self.get_linestyle(1))\n\n _pylab.draw()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nsend a message to a user.", "response": "def _send_message(session, user_id, message=None, image_files=None):\n \"\"\"\n https://vk.com/dev/messages.send\n \"\"\"\n assert any([message, image_files])\n\n attachment_items = None\n if image_files:\n attachment_items = Photo._upload_messages_photos_for_group(session, user_id, image_files)\n\n message_id = session.fetch(\"messages.send\", user_id=user_id, message=message, attachment=attachment_items, random_id=random.randint(1, 10**6))\n return message_id"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget all the dialogs in the VK.", "response": "def get_dialog(session, unread=False, important=False, unanswered=False):\n \"\"\"\n https://vk.com/dev/messages.getDialogs\n \"\"\"\n response = session.fetch(\"messages.getDialogs\", unread=unread, important=important, unanswered=unanswered)\n dialog_json_items = response[\"items\"]\n return (Message.from_json(session, dialog_json[\"message\"]) for dialog_json in dialog_json_items)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets members of this group.", "response": "def get_members(self, sort='id_asc'):\n \"\"\"\n :param: sort {id_asc, id_desc, time_asc, time_desc} string\n Docs: https://vk.com/dev/groups.getMembers\n \"\"\"\n return self._session.fetch_items(\"groups.getMembers\", User.from_json, 1000, group_id=self.id, sort=sort, fields=User.__slots__ + User.USER_FIELDS)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting user s groups.", "response": "def _get_user_groups(session, user_id, filter):\n \"\"\"\n https://vk.com/dev/groups.get\n\n :param filter: {admin, editor, moder, groups, publics, events}\n :yield: Groups\n \"\"\"\n return session.fetch_items('groups.get', Group.from_json, count=1000, user_id=user_id, filter=filter, extended=1, fields=\",\".join(Group.GROUP_FIELDS))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef copy_spline_array(a):\n\n b = spline_array()\n\n b.x_splines = a.x_splines\n b.y_splines = a.y_splines\n b.max_y_splines = a.max_y_splines\n b.xmin = a.xmin \n b.xmax = a.xmax\n b.ymin = a.ymin\n b.ymax = a.ymax\n b.xlabel = a.xlabel\n b.ylabel = a.ylabel\n b.zlabel = a.zlabel\n b.simple = a.simple\n \n\n b.generate_y_values() \n\n return b", "response": "This returns an instance of a new spline_array with all the fixins and the data from a."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nfunction to plot a single spline.", "response": "def splot(axes=\"gca\", smoothing=5000, degree=5, presmoothing=0, plot=True, spline_class=spline_single, interactive=True, show_derivative=1):\n \"\"\"\n \n gets the data from the plot and feeds it into splint\n returns an instance of spline_single\n\n axes=\"gca\" which axes to get the data from.\n smoothing=5000 spline_single smoothing parameter\n presmoothing=0 spline_single data presmoothing factor (nearest neighbor)\n plot=True should we plot the result?\n spline_class=spline_single which data class to use?\n interactive=False should we spline fit interactively or just make a spline_single?\n \n \"\"\"\n\n if axes==\"gca\": axes = _pylab.gca()\n\n xlabel = axes.xaxis.label.get_text()\n ylabel = axes.yaxis.label.get_text()\n \n xdata = axes.get_lines()[0].get_xdata()\n ydata = axes.get_lines()[0].get_ydata()\n\n if interactive:\n return splinteractive(xdata, ydata, smoothing, degree, presmoothing, spline_class, xlabel, ylabel)\n else:\n return spline_class(xdata, ydata, smoothing, degree, presmoothing, plot, xlabel, ylabel)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nevaluates the spline data at the given time series x.", "response": "def evaluate(self, x, derivative=0, smooth=0, simple='auto'):\n \"\"\"\n smooth=0 is how much to smooth the spline data\n simple='auto' is whether we should just use straight interpolation\n you may want smooth > 0 for this, when derivative=1\n \"\"\"\n if simple=='auto': simple = self.simple\n \n # make it into an array if it isn't one, and remember that we did\n is_array = True\n if not type(x) == type(_pylab.array([])):\n x = _pylab.array([x]) \n is_array = False\n\n if simple:\n # loop over all supplied x data, and come up with a y for each\n y = []\n for n in range(0, len(x)):\n # get a window of data around x\n if smooth:\n [xtemp, ytemp, etemp] = _fun.trim_data(self.xdata, self.ydata, None, [x[n]-smooth, x[n]+smooth])\n else:\n i1 = _fun.index_nearest(x[n], self.xdata)\n\n # if the nearest data point is lower than x, use the next point to interpolate \n if self.xdata[i1] <= x[n] or i1 <= 0: i2 = i1+1\n else: i2 = i1-1\n\n # if we're at the max, extrapolate\n if i2 >= len(self.xdata):\n print(x[n], \"is out of range. extrapolating\")\n i2 = i1-1\n\n x1 = self.xdata[i1]\n y1 = self.ydata[i1]\n x2 = self.xdata[i2]\n y2 = self.ydata[i2]\n slope = (y2-y1)/(x2-x1)\n \n xtemp = _numpy.array([x[n]])\n ytemp = _numpy.array([y1 + (x[n]-x1)*slope])\n \n # calculate the slope based on xtemp and ytemp (if smoothing)\n # or just use the raw slope if smoothing=0\n if derivative == 1:\n if smooth:\n y.append((_numpy.average(xtemp*ytemp)-_numpy.average(xtemp)*_numpy.average(ytemp)) /\n (_numpy.average(xtemp*xtemp)-_numpy.average(xtemp)**2))\n else:\n y.append(slope)\n \n # otherwise just average (even with one element)\n elif derivative==0:\n y.append(_numpy.average(ytemp))\n \n if is_array: return _numpy.array(y)\n else: return y[0]\n \n \n if smooth:\n \n y = []\n for n in range(0, len(x)):\n # take 20 data points from x+/-smooth\n xlow = max(self.xmin,x[n]-smooth)\n xhi = min(self.xmax,x[n]+smooth)\n xdata = _pylab.linspace(xlow, xhi, 20)\n ydata = _interpolate.splev(xdata, self.pfit, derivative)\n y.append(_numpy.average(ydata))\n\n if is_array: return _numpy.array(y)\n else: return y[0]\n \n else: \n return _interpolate.splev(x, self.pfit, derivative)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef evaluate(self, x, y, x_derivative=0, smooth=0, simple='auto'):\n\n if simple=='auto': simple = self.simple\n \n # find which values y is in between\n for n in range(0, len(self.y_values)-1):\n # if it's in between, interpolate!\n if self.y_values[n] <= y and self.y_values[n+1] >= y:\n y1 = self.y_values[n]\n y2 = self.y_values[n+1]\n z1 = self.x_splines[y1].evaluate(x, x_derivative, smooth, simple)\n z2 = self.x_splines[y2].evaluate(x, x_derivative, smooth, simple)\n return z1 + (y-y1)*(z2-z1)/(y2-y1)\n\n print(\"YARG! The y value \"+str(y)+\" is out of interpolation range!\") \n if y >= self.y_values[-1]: return self.x_splines[self.y_values[-1]].evaluate(x, x_derivative, smooth, simple)\n else : return self.x_splines[self.y_values[0]].evaluate(x, x_derivative, smooth, simple)", "response": "Evaluate the 2 - d spline by doing linear interpolation of the curves."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nplot the data at fixed x - value so z vs x - value", "response": "def plot_fixed_x(self, x_values, x_derivative=0, steps=1000, smooth=0, simple='auto', ymin=\"auto\", ymax=\"auto\", format=True, clear=1):\n \"\"\"\n plots the data at fixed x-value, so z vs x\n \"\"\"\n if simple=='auto': simple=self.simple\n \n # get the min and max\n if ymin==\"auto\": ymin = self.ymin\n if ymax==\"auto\": ymax = self.ymax\n if clear: _pylab.gca().clear()\n\n if not type(x_values) in [type([]), type(_pylab.array([]))]: x_values = [x_values]\n\n for x in x_values:\n \n # define a new simple function to plot, then plot it\n def f(y): return self.evaluate(x, y, x_derivative, smooth, simple)\n _pylab_help.plot_function(f, ymin, ymax, steps, 0, False)\n\n # label it\n a = _pylab.gca()\n a.set_xlabel(self.ylabel)\n if x_derivative: a.set_ylabel(str(x_derivative)+\" \"+str(self.xlabel)+\" derivative of \"+self.zlabel)\n else: a.set_ylabel(self.zlabel)\n a.set_title(self._path+\"\\nSpline array plot at fixed x = \"+self.xlabel)\n a.get_lines()[-1].set_label(\"x (\"+self.xlabel+\") = \"+str(x))\n\n if format: _s.format_figure()\n return a"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef plot_fixed_y(self, y_values, x_derivative=0, steps=1000, smooth=0, simple='auto', xmin=\"auto\", xmax=\"auto\", format=True, clear=1):\n if simple=='auto': simple=self.simple\n\n\n # get the min and max\n if xmin==\"auto\": xmin = self.xmin\n if xmax==\"auto\": xmax = self.xmax\n if clear: _pylab.gca().clear()\n\n if not type(y_values) in [type([]), type(_pylab.array([]))]: y_values = [y_values]\n\n for y in y_values:\n # define a new simple function to plot, then plot it\n def f(x): return self.evaluate(x, y, x_derivative, smooth, simple)\n _pylab_help.plot_function(f, xmin, xmax, steps, 0, True)\n\n # label it\n a = _pylab.gca()\n th = \"th\"\n if x_derivative == 1: th = \"st\"\n if x_derivative == 2: th = \"nd\"\n if x_derivative == 3: th = \"rd\"\n if x_derivative: a.set_ylabel(str(x_derivative)+th+\" \"+self.xlabel+\" derivative of \"+self.zlabel+\" spline\")\n else: a.set_ylabel(self.zlabel)\n a.set_xlabel(self.xlabel)\n a.set_title(self._path+\"\\nSpline array plot at fixed y \"+self.ylabel)\n a.get_lines()[-1].set_label(\"y (\"+self.ylabel+\") = \"+str(y))\n if format: _s.format_figure()\n return a", "response": "Plots the data at a fixed y - value so z vs y - values."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn the object s parent window. Returns None if no parent window is found.", "response": "def get_window(self): \n \"\"\"\n Returns the object's parent window. Returns None if no window found.\n \"\"\"\n x = self\n while not x._parent == None and \\\n not isinstance(x._parent, Window): \n x = x._parent\n return x._parent"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef sleep(self, seconds=0.05, dt=0.01):\n t0 = _t.time()\n while _t.time()-t0 < seconds:\n \n # Pause a bit to avoid heavy CPU\n _t.sleep(dt)\n \n # process events\n self.process_events()", "response": "A smooth version of time. sleep that will pause for the specified amount of time."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nprevent the widget from sending events.", "response": "def block_events(self):\n \"\"\"\n Prevents the widget from sending signals.\n \"\"\"\n self._widget.blockSignals(True)\n self._widget.setUpdatesEnabled(False)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nunblocking the events from being processed.", "response": "def unblock_events(self):\n \"\"\"\n Allows the widget to send signals.\n \"\"\"\n self._widget.blockSignals(False)\n self._widget.setUpdatesEnabled(True)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nprint a message to the log.", "response": "def print_message(self, message=\"heya!\"):\n \"\"\"\n If self.log is defined to be an instance of TextLog, it print the\n message there. Otherwise use the usual \"print\" to command line.\n \"\"\"\n if self.log == None: print(message)\n else: self.log.append_text(message)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef save_gui_settings(self, *a):\n \n # only if we're supposed to!\n if self._autosettings_path:\n\n # Get the gui settings directory\n gui_settings_dir = _os.path.join(_cwd, 'egg_settings')\n\n # make sure the directory exists\n if not _os.path.exists(gui_settings_dir): _os.mkdir(gui_settings_dir)\n\n # make a path with a sub-directory\n path = _os.path.join(gui_settings_dir, self._autosettings_path)\n \n # for saving header info\n d = _d.databox()\n\n # add all the controls settings\n for x in self._autosettings_controls: self._store_gui_setting(d, x)\n\n # save the file\n d.save_file(path, force_overwrite=True)", "response": "Save the current configuration of the controls and header info of the current header of the controls."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nloading the settings from the main gui and updates the value of the current object.", "response": "def load_gui_settings(self, lazy_only=False):\n \"\"\"\n Loads the settings (if we're supposed to).\n \n Parameters\n ----------\n lazy_only=False\n If True, it will only load the settings into self._lazy_load, and\n not try to update the value. Presumably the widget will apply these\n settings later, e.g., when enough tabs exist to set the current\n tab in TabArea.\n \"\"\"\n\n # only do this if we're supposed to\n if self._autosettings_path is not None:\n\n # Get the gui settings directory\n gui_settings_dir = _os.path.join(_cwd, 'egg_settings')\n\n # assemble the path with a sub-directory\n path = _os.path.join(gui_settings_dir, self._autosettings_path)\n\n # databox just for loading a cfg file\n d = _d.databox()\n\n # if the settings path exists\n if _os.path.exists(path):\n \n # Load the settings\n d.load_file(path, header_only=True, quiet=True)\n\n # Store the settings in the lazy dictionary\n self._lazy_load.update(d.headers)\n \n # Loop over the settings we're supposed to change\n # Note without specifying d, this will try to pop\n # the value off of self._lazy_load. If \n #\n if not lazy_only:\n for x in self._autosettings_controls: \n self._load_gui_setting(x)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _load_gui_setting(self, key, d=None):\n # Whether we should pop the value from the dictionary when we set it.\n pop_value = False\n \n # If d is None, assume we're using the lazy load settings.\n if d == None: \n d = self._lazy_load\n pop_value = True\n \n # If we have a databox, take the header dictionary\n if not type(d) == dict: d = d.headers\n \n # only do this if the key exists\n if key in d:\n \n try:\n # Try to set the value\n eval(key).set_value(d[key])\n\n # If this fails, perhaps the element does not yet exist\n # For example, TabArea may not have all the tabs created\n # and cannot set the active tab until later.\n # If it's here, it worked, so pop if necessary\n if pop_value: d.pop(key)\n \n except: \n print(\"ERROR: Could not load gui setting \"+repr(key))", "response": "Loads the appropriate control - related settings from the GUI."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _store_gui_setting(self, databox, name):\n try: databox.insert_header(name, eval(name + \".get_value()\"))\n except: print(\"ERROR: Could not store gui setting \"+repr(name))", "response": "Store the gui setting in the header of the databox"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef place_object(self, object, column=None, row=None, column_span=1, row_span=1, alignment=1):\n\n # pick a column\n if column==None:\n column = self._auto_column\n self._auto_column += 1\n\n # pick a row\n if row==None: row = self._auto_row\n\n # create the object\n self.objects.append(object)\n\n # add the widget to the layout\n try:\n object._widget\n widget = object._widget\n\n # allows the user to specify a standard widget\n except: widget = object\n\n self._layout.addWidget(widget, row, column,\n row_span, column_span,\n _g.Qt.QtCore.Qt.Alignment(alignment))\n\n # try to store the parent object (self) in the placed object\n try: object.set_parent(self)\n except: None\n\n return object", "response": "Adds an object to the grid at the specified position."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nremoves the supplied object from the grid.", "response": "def remove_object(self, object=0, delete=True):\n \"\"\"\n Removes the supplied object from the grid. If object is an integer,\n it removes the n'th object.\n \"\"\"\n if type(object) in [int, int]: n = object\n else: n = self.objects.index(object)\n\n # pop it from the list\n object = self.objects.pop(n)\n\n # remove it from the GUI and delete it\n if hasattr_safe(object, '_widget'):\n self._layout.removeWidget(object._widget)\n object._widget.hide()\n if delete: object._widget.deleteLater()\n else:\n self._layout.removeWidget(object)\n object.hide()\n if delete: object.deleteLater()"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef new_autorow(self, row=None):\n if row==None: self._auto_row += 1\n else: self._auto_row = row\n\n self._auto_column=0\n return self", "response": "Sets the auto - add row and column."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef place_docker(self, docker, area='top'):\n # map of options\n m = dict(top = _g.QtCore.Qt.TopDockWidgetArea,\n bottom = _g.QtCore.Qt.BottomDockWidgetArea,\n left = _g.QtCore.Qt.LeftDockWidgetArea,\n right = _g.QtCore.Qt.RightDockWidgetArea)\n \n \n # set the parent\n docker.set_parent(self)\n\n # events\n docker._window.resizeEvent = self._event_resize\n docker._window.moveEvent = self._event_move \n\n # Keep it in the window \n docker._window.setFeatures(docker._window.DockWidgetMovable) \n \n # set it\n self._window.addDockWidget(m[area], docker._window)\n \n return docker", "response": "Places a Docker instance at the specified area."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _save_settings(self):\n if self._autosettings_path == None: return\n \n # Get the gui settings directory\n gui_settings_dir = _os.path.join(_cwd, 'egg_settings')\n\n # make sure the directory exists\n if not _os.path.exists(gui_settings_dir): _os.mkdir(gui_settings_dir)\n\n # make a path with a sub-directory\n path = _os.path.join(gui_settings_dir, self._autosettings_path)\n \n # Create a Qt settings object\n settings = _g.QtCore.QSettings(path, _g.QtCore.QSettings.IniFormat)\n settings.clear()\n \n # Save values\n if hasattr_safe(self._window, \"saveState\"):\n settings.setValue('State',self._window.saveState())\n settings.setValue('Geometry', self._window.saveGeometry())", "response": "Saves all the parameters to a text file."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _load_settings(self):\n if self._autosettings_path == None: return\n \n # Get the gui settings directory\n gui_settings_dir = _os.path.join(_cwd, 'egg_settings')\n\n # make a path with a sub-directory\n path = _os.path.join(gui_settings_dir, self._autosettings_path)\n \n # make sure the directory exists\n if not _os.path.exists(path): return\n \n # Create a Qt settings object\n settings = _g.QtCore.QSettings(path, _g.QtCore.QSettings.IniFormat)\n \n # Load it up! (Extra steps required for windows command line execution)\n if settings.contains('State') and hasattr_safe(self._window, \"restoreState\"): \n x = settings.value('State')\n if hasattr(x, \"toByteArray\"): x = x.toByteArray() \n self._window.restoreState(x)\n \n if settings.contains('Geometry'): \n x = settings.value('Geometry')\n if hasattr(x, \"toByteArray\"): x = x.toByteArray()\n self._window.restoreGeometry(x)", "response": "Loads all the parameters from a databox text file. If path = None loads all the parameters from self. _autosettings_path. If path = None loads all the parameters from self. _autosettings_path."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef show(self, block_command_line=False, block_timing=0.05):\n\n self._is_open = True\n self._window.show()\n self._window.raise_()\n\n # start the blocking loop\n if block_command_line:\n\n # stop when the window closes\n while self._is_open:\n _a.processEvents()\n _t.sleep(block_timing)\n\n # wait a moment in case there is some shutdown stuff.\n _t.sleep(0.5)\n\n return self", "response": "Shows the window and raises it."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef set_checked(self, value=True, block_events=False):\n if block_events: self._widget.blockSignals(True)\n self._widget.setChecked(value)\n if block_events: self._widget.blockSignals(False)\n\n return self", "response": "Sets the button is checked."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef click(self):\n if self.is_checkable(): \n if self.is_checked(): self.set_checked(False)\n else: self.set_checked(True)\n self.signal_clicked.emit(self.is_checked())\n else:\n self.signal_clicked.emit(True)\n return self", "response": "Sends the signal that the user clicked it sending the signal and everything."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsets the style of the label for the given object.", "response": "def set_style(self, *args, **kwargs):\n \"\"\"\n Provides access to any number of style sheet settings via \n self._widget.SetStyleSheet()\n \n *args can be any element between semicolons, e.g.\n self.set_stylesheet('text-align: right; padding-left: 5px')\n\n **kwargs can be any key-value pair, replacing '-' with '_' in the key, e.g.\n self.set_stylesheet(text_align='right', padding_left='5px')\n \n See QLabel.SetStyleSheet() documentation.\n \"\"\"\n # assemble the string\n s = \"QLabel {\"\n for a in args: s = s+a+\"; \"\n for k in list(kwargs.keys()): s = s+k.replace(\"_\",\"-\")+\": \"+kwargs[k]+\"; \"\n s = s+\"}\"\n \n # set it!\n self._widget.setStyleSheet(s)\n return self"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef set_value(self, value, block_events=False):\n if block_events: self.block_events()\n self._widget.setValue(value)\n if block_events: self.unblock_events()", "response": "Sets the current value of the number box."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nsetting the step of the number box.", "response": "def set_step(self, value, block_events=False):\n \"\"\"\n Sets the step of the number box.\n\n Setting block_events=True will temporarily block the widget from\n sending any signals when setting the value.\n \"\"\"\n if block_events: self.block_events()\n self._widget.setSingleStep(value)\n if block_events: self.unblock_events()"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget the text from a given index.", "response": "def get_text(self, index=None):\n \"\"\"\n Gets text from a given index. If index=None, returns the current value\n self.get_text(self.get_value())\n \"\"\"\n if index==None:\n return self.get_text(self.get_value())\n else:\n return str(self._widget.itemText(index))"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_all_items(self):\n return [self._widget.itemText(k) for k in range(self._widget.count())]", "response": "Returns all items in the combobox dictionary."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nadding a tab to the area and creates the layout for this tab.", "response": "def add_tab(self, title=\"Yeah!\", block_events=True):\n \"\"\"\n Adds a tab to the area, and creates the layout for this tab.\n \"\"\"\n self._widget.blockSignals(block_events)\n\n # create a widget to go in the tab\n tab = GridLayout()\n self.tabs.append(tab)\n tab.set_parent(self)\n\n # create and append the tab to the list\n self._widget.addTab(tab._widget, title)\n\n # try to lazy set the current tab\n if 'self' in self._lazy_load and self.get_tab_count() > self._lazy_load['self']:\n v = self._lazy_load.pop('self')\n self.set_current_tab(v)\n\n self._widget.blockSignals(False)\n\n return tab"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef remove_tab(self, tab=0):\n\n # pop it from the list\n t = self.tabs.pop(tab)\n\n # remove it from the gui\n self._widget.removeTab(tab)\n\n # return it in case someone cares\n return t", "response": "Removes the tab by index."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn a the value at column row.", "response": "def get_value(self, column=0, row=0):\n \"\"\"\n Returns a the value at column, row.\n \"\"\"\n x = self._widget.item(row, column)\n\n if x==None: return x\n else: return str(self._widget.item(row,column).text())"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsetting the value at column row.", "response": "def set_value(self, column=0, row=0, value='', block_events=False):\n \"\"\"\n Sets the value at column, row. This will create elements dynamically,\n and in a totally stupid while-looping way.\n\n Setting block_events=True will temporarily block the widget from\n sending any signals when setting the value.\n \"\"\"\n\n if block_events: self.block_events()\n\n # dynamically resize\n while column > self._widget.columnCount()-1: self._widget.insertColumn(self._widget.columnCount())\n while row > self._widget.rowCount() -1: self._widget.insertRow( self._widget.rowCount())\n\n # set the value\n self._widget.setItem(row, column, _g.Qt.QtGui.QTableWidgetItem(str(value)))\n\n if block_events: self.unblock_events()\n\n return self"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsets then'th column width in pixels.", "response": "def set_column_width(self, n=0, width=120):\n \"\"\"\n Sets the n'th column width in pixels.\n \"\"\"\n self._widget.setColumnWidth(n, width)\n return self"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef set_header_visibility(self, column=False, row=False):\n if row: self._widget.verticalHeader().show()\n else: self._widget.verticalHeader().hide()\n\n if column: self._widget.horizontalHeader().show()\n else: self._widget.horizontalHeader().hide()\n\n return self", "response": "Sets whether we can see the column and row headers."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nset then'th row height in pixels.", "response": "def set_row_height(self, n=0, height=18):\n \"\"\"\n Sets the n'th row height in pixels.\n \"\"\"\n self._widget.setRowHeight(n, height)\n return self"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _clean_up_key(self, key):\n for n in self.naughty: key = key.replace(n, '_')\n return key", "response": "Clean up the key string with no naughty characters."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncalls when a cell is changed. Updates the dictionary.", "response": "def _cell_changed(self, *a):\n \"\"\"\n Called whenever a cell is changed. Updates the dictionary.\n \"\"\"\n\n # block all signals during the update (to avoid re-calling this)\n self.block_events()\n\n # loop over the rows\n for n in range(self.get_row_count()):\n\n # get the keys and values (as text)\n key = self.get_value(0,n)\n value = self.get_value(1,n)\n\n # get rid of the None's\n if key == None:\n key = ''\n self.set_value(0,n, '')\n if value == None:\n value = ''\n self.set_value(1,n, '')\n\n # if it's not an empty entry make sure it's valid\n if not key == '':\n\n # clean up the key\n key = self._clean_up_key(key)\n\n # now make sure the value is python-able\n try:\n eval(value)\n self._widget.item(n,1).setData(_g.QtCore.Qt.BackgroundRole, _g.Qt.QtGui.QColor('white'))\n except:\n self._widget.item(n,1).setData(_g.QtCore.Qt.BackgroundRole, _g.Qt.QtGui.QColor('pink'))\n\n # unblock all signals\n self.unblock_events()"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_item(self, key):\n keys = list(self.keys())\n\n # make sure it exists\n if not key in keys:\n self.print_message(\"ERROR: '\"+str(key)+\"' not found.\")\n return None\n\n try:\n x = eval(self.get_value(1,keys.index(key)))\n return x\n\n except:\n self.print_message(\"ERROR: '\"+str(self.get_value(1,keys.index(key)))+\"' cannot be evaluated.\")\n return None", "response": "Returns the value associated with the key."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef keys(self):\n keys = list()\n for n in range(len(self)):\n\n # only append the valid keys\n key = self.get_value()\n if not key in ['', None]: keys.append(key)\n\n return keys", "response": "Returns a sorted list of keys"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef set_item(self, key, value):\n keys = list(self.keys())\n\n # if it exists, update\n if key in keys:\n self.set_value(1,keys.index(key),str(value))\n\n # otherwise we have to add an element\n else:\n self.set_value(0,len(self), str(key))\n self.set_value(1,len(self)-1, str(value))", "response": "Sets the item by key and refills the table sorted."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nupdate the dictionary with the values from kwargs.", "response": "def update(self, dictionary=None, **kwargs):\n \"\"\"\n Adds/overwrites all the keys and values from the dictionary.\n \"\"\"\n if not dictionary == None: kwargs.update(dictionary)\n for k in list(kwargs.keys()): self[k] = kwargs[k]"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_text(self):\n if self._multiline: return str(self._widget.toPlainText())\n else: return str(self._widget.text())", "response": "Returns the current text."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef set_text(self, text=\"YEAH.\"):\n # Make sure the text is actually different, so as not to \n # trigger a value changed signal\n s = str(text)\n if not s == self.get_text(): self._widget.setText(str(text))\n return self", "response": "Sets the text of the text box."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nset the colors of the text area.", "response": "def set_colors(self, text='black', background='white'):\n \"\"\"\n Sets the colors of the text area.\n \"\"\"\n if self._multiline: self._widget.setStyleSheet(\"QTextEdit {background-color: \"+str(background)+\"; color: \"+str(text)+\"}\")\n else: self._widget.setStyleSheet(\"QLineEdit {background-color: \"+str(background)+\"; color: \"+str(text)+\"}\")"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef block_events(self):\n # block events in the usual way\n BaseObject.block_events(self)\n \n # loop over all top level parameters\n for i in range(self._widget.topLevelItemCount()):\n self._widget.topLevelItem(i).param.blockSignals(True)\n \n return self", "response": "Block all tree elements and parameters in the tree."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nunblock all events in the tree.", "response": "def unblock_events(self):\n \"\"\"\n Special version of unblock_events that loops over all tree elements as well.\n \"\"\"\n # unblock events in the usual way\n BaseObject.unblock_events(self)\n \n # loop over all top level parameters\n for i in range(self._widget.topLevelItemCount()):\n self._widget.topLevelItem(i).param.blockSignals(False)\n \n return self"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef connect_any_signal_changed(self, function):\n \n # loop over all top level parameters\n for i in range(self._widget.topLevelItemCount()):\n\n # make sure there is only one connection!\n try:\n self._widget.topLevelItem(i).param.sigTreeStateChanged.connect(\n function, type=_g.QtCore.Qt.UniqueConnection)\n except:\n pass\n \n return self", "response": "Connects the anything changed signal to all of the tree items."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nconnects a changed signal from the parameters of the specified name to the supplied function.", "response": "def connect_signal_changed(self, name, function):\n \"\"\"\n Connects a changed signal from the parameters of the specified name\n to the supplied function.\n \"\"\"\n x = self._find_parameter(name.split(\"/\"))\n\n # if it pooped.\n if x==None: return None\n\n # connect it\n x.sigValueChanged.connect(function)\n \n # Keep track of the functions\n if name in self._connection_lists: self._connection_lists[name].append(function)\n else: self._connection_lists[name] = [function]\n\n return self"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef block_user_signals(self, name, ignore_error=False):\n x = self._find_parameter(name.split(\"/\"), quiet=ignore_error)\n\n # if it pooped.\n if x==None: return None\n \n # disconnect it from all its functions\n if name in self._connection_lists:\n for f in self._connection_lists[name]: x.sigValueChanged.disconnect(f)\n \n return self", "response": "Disconnects all user - defined signals for the specified parameter name."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef unblock_user_signals(self, name, ignore_error=False):\n x = self._find_parameter(name.split(\"/\"), quiet=ignore_error)\n\n # if it pooped.\n if x==None: return None\n \n # reconnect it to all its functions\n if name in self._connection_lists:\n for f in self._connection_lists[name]: x.sigValueChanged.connect(f)\n \n return self", "response": "Unblock the user - defined signals for the specified \n parameter name."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _find_parameter(self, name_list, create_missing=False, quiet=False):\n # make a copy so this isn't destructive to the supplied list\n s = list(name_list)\n\n # if the length is zero, return the root widget\n if len(s)==0: return self._widget\n\n # the first name must be treated differently because it is\n # the main widget, not a branch\n r = self._clean_up_name(s.pop(0))\n\n # search for the root name\n result = self._widget.findItems(r, _g.QtCore.Qt.MatchCaseSensitive | _g.QtCore.Qt.MatchFixedString)\n\n # if it pooped and we're not supposed to create it, quit\n if len(result) == 0 and not create_missing:\n if not quiet: self.print_message(\"ERROR: Could not find '\"+r+\"'\")\n return None\n\n # otherwise use the first value\n elif len(result): x = result[0].param\n\n # otherwise, if there are more names in the list,\n # create the branch and keep going\n else:\n x = _g.parametertree.Parameter.create(name=r, type='group', children=[])\n self._widget.addParameters(x)\n\n # loop over the remaining names, and use a different search method\n for n in s:\n\n # first clean up\n n = self._clean_up_name(n)\n\n # try to search for the name\n try: x = x.param(n)\n\n # name doesn't exist\n except:\n\n # if we're supposed to, create the new branch\n if create_missing: x = x.addChild(_g.parametertree.Parameter.create(name=n, type='group', children=[]))\n\n # otherwise poop out\n else:\n if not quiet: self.print_message(\"ERROR: Could not find '\"+n+\"' in '\"+x.name()+\"'\")\n return None\n\n # return the last one we found / created.\n return x", "response": "Tries to find and return the parameter of the specified name."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _clean_up_name(self, name):\n for n in self.naughty: name = name.replace(n, '_')\n return name", "response": "Cleans up the name according to the rules specified in this exact\n function."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef add_button(self, name, checkable=False, checked=False):\n\n # first clean up the name\n name = self._clean_up_name(name)\n\n # split into (presumably existing) branches and parameter\n s = name.split('/')\n\n # make sure it doesn't already exist\n if not self._find_parameter(s, quiet=True) == None:\n self.print_message(\"Error: '\"+name+\"' already exists.\")\n return None\n\n # get the leaf name off the list.\n p = s.pop(-1)\n\n # create / get the branch on which to add the leaf\n b = self._find_parameter(s, create_missing=True)\n\n # quit out if it pooped\n if b == None: return None\n\n # create the leaf object\n ap = _g.parametertree.Parameter.create(name=p, type='action')\n\n # add it to the tree (different methods for root)\n if b == self._widget: b.addParameters(ap)\n else: b.addChild(ap)\n\n # modify the existing class to fit our conventions\n ap.signal_clicked = ap.sigActivated\n\n # Now set the default value if any\n if name in self._lazy_load:\n v = self._lazy_load.pop(name)\n self._set_value_safe(name, v, True, True)\n \n # Connect it to autosave (will only create unique connections)\n self.connect_any_signal_changed(self.autosave)\n \n return Button(name, checkable, checked, list(ap.items.keys())[0].button)", "response": "Adds a button at the specified location."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nadds a parameter to the tree.", "response": "def add_parameter(self, name='test', value=42.0, **kwargs):\n \"\"\"\n Adds a parameter \"leaf\" to the tree. \n \n Parameters\n ----------\n name='test'\n The name of the leaf. It should be a string of the form\n \"branch1/branch2/parametername\" and will be nested as indicated.\n value=42.0\n Value of the leaf.\n \n Common Keyword Arguments\n ------------------------\n type=None\n If set to None, type will be automatically set to type(value).__name__.\n This will not work for all data types, but is \n a nice shortcut for floats, ints, strings, etc. \n If it doesn't work, just specify the type manually (see below).\n values\n Not used by default. Only relevant for 'list' type, and should then\n be a list of possible values.\n step=1 \n Step size of incrementing numbers\n dec=False\n Set to True to enable decade increments.\n limits\n Not used by default. Should be a 2-element tuple or list used to \n bound numerical values.\n default \n Not used by default. Used to specify the default numerical value\n siPrefix=False\n Set to True to display units on numbers\n suffix\n Not used by default. Used to add unit labels to elements.\n \n\n See pyqtgraph ParameterTree for more options. Particularly useful is the\n tip='insert your text' option, which supplies a tooltip!\n \"\"\"\n\n # update the default kwargs\n other_kwargs = dict(type=None)\n other_kwargs.update(kwargs)\n \n # Auto typing\n if other_kwargs['type'] == None: other_kwargs['type'] = type(value).__name__\n \n # Fix 'values' for list objects to be only strings\n if other_kwargs['type'] == 'list':\n for n in range(len(other_kwargs['values'])):\n other_kwargs['values'][n] = str(other_kwargs['values'][n])\n\n # split into (presumably existing) branches and parameter\n s = name.split('/')\n\n # make sure it doesn't already exist\n if not self._find_parameter(s, quiet=True) == None:\n self.print_message(\"Error: '\"+name+\"' already exists.\")\n return self\n\n # get the leaf name off the list.\n p = s.pop(-1)\n\n # create / get the branch on which to add the leaf\n b = self._find_parameter(s, create_missing=True)\n\n # quit out if it pooped\n if b == None: return self\n\n # create the leaf object\n leaf = _g.parametertree.Parameter.create(name=p, value=value, **other_kwargs)\n \n # add it to the tree (different methods for root)\n if b == self._widget: b.addParameters(leaf)\n else: b.addChild(leaf)\n \n # Now set the default value if any\n if name in self._lazy_load:\n v = self._lazy_load.pop(name)\n self._set_value_safe(name, v, True, True)\n \n # Connect it to autosave (will only create unique connections)\n self.connect_any_signal_changed(self.autosave)\n return self"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _get_parameter_dictionary(self, base_name, dictionary, sorted_keys, parameter):\n\n # assemble the key for this parameter\n k = base_name + \"/\" + parameter.name()\n\n # first add this parameter (if it has a value)\n if not parameter.value()==None:\n sorted_keys.append(k[1:])\n dictionary[sorted_keys[-1]] = parameter.value()\n\n # now loop over the children\n for p in parameter.children():\n self._get_parameter_dictionary(k, dictionary, sorted_keys, p)", "response": "Recursively loops over the parameter s children adding the base_name and values to the supplied dictionary."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef send_to_databox_header(self, destination_databox):\n k, d = self.get_dictionary()\n destination_databox.update_headers(d,k)", "response": "Sends all the information currently in the tree to the supplied databox s header in alphabetical order."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the list of parameters and a dictionary of values", "response": "def get_dictionary(self):\n \"\"\"\n Returns the list of parameters and a dictionary of values\n (good for writing to a databox header!)\n\n Return format is sorted_keys, dictionary\n \"\"\"\n\n # output\n k = list()\n d = dict()\n\n # loop over the root items\n for i in range(self._widget.topLevelItemCount()):\n\n # grab the parameter item, and start building the name\n x = self._widget.topLevelItem(i).param\n\n # now start the recursive loop\n self._get_parameter_dictionary('', d, k, x)\n\n return k, d"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns the value of the parameter with the specified name.", "response": "def get_value(self, name):\n \"\"\"\n Returns the value of the parameter with the specified name.\n \"\"\"\n # first clean up the name\n name = self._clean_up_name(name)\n\n # now get the parameter object\n x = self._find_parameter(name.split('/'))\n\n # quit if it pooped.\n if x == None: return None\n\n # get the value and test the bounds\n value = x.value()\n \n # handles the two versions of pyqtgraph\n bounds = None\n \n # For lists, just make sure it's a valid value\n if x.opts['type'] == 'list':\n\n # If it's not one from the master list, choose\n # and return the default value. \n if not value in x.opts['values']:\n \n # Only choose a default if there exists one \n if len(x.opts('values')): \n self.set_value(name, x.opts['values'][0]) \n return x.opts['values'][0]\n \n # Otherwise, just return None and do nothing \n else: return None\n\n # For strings, make sure the returned value is always a string.\n elif x.opts['type'] in ['str']: return str(value)\n \n # Otherwise assume it is a value with bounds or limits (depending on \n # the version of pyqtgraph) \n else: \n if 'limits' in x.opts: bounds = x.opts['limits']\n elif 'bounds' in x.opts: bounds = x.opts['bounds']\n if not bounds == None:\n if not bounds[1]==None and value > bounds[1]: value = bounds[1]\n if not bounds[0]==None and value < bounds[0]: value = bounds[0]\n \n # return it \n return value"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_list_values(self, name):\n # Make sure it's a list\n if not self.get_type(name) in ['list']: \n self.print_message('ERROR: \"'+name+'\" is not a list.')\n return\n \n # Return a copy of the list values\n return list(self.get_widget(name).opts['values'])", "response": "Returns the list values for a list item of the specified name."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef set_value(self, name, value, ignore_error=False, block_user_signals=False):\n # first clean up the name\n name = self._clean_up_name(name)\n\n # If we're supposed to, block the user signals for this parameter\n if block_user_signals: self.block_user_signals(name, ignore_error) \n\n # now get the parameter object\n x = self._find_parameter(name.split('/'), quiet=ignore_error)\n\n # quit if it pooped.\n if x == None: return None\n \n # for lists, make sure the value exists!!\n if x.type() in ['list']:\n \n # Make sure it exists before trying to set it\n if str(value) in list(x.forward.keys()): x.setValue(str(value))\n \n # Otherwise default to the first key\n else: x.setValue(list(x.forward.keys())[0])\n \n # Bail to a hopeful set method for other types\n else: x.setValue(eval(x.opts['type'])(value))\n\n # If we're supposed to unblock the user signals for this parameter\n if block_user_signals: self.unblock_user_signals(name, ignore_error)\n\n return self", "response": "Sets the value of the supplied parameter name to the supplied value."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef save(self, path=None):\n if path==None: \n \n if self._autosettings_path == None: return self\n \n # Get the gui settings directory\n gui_settings_dir = _os.path.join(_cwd, 'egg_settings')\n\n # make sure the directory exists\n if not _os.path.exists(gui_settings_dir): _os.mkdir(gui_settings_dir) \n \n # Assemble the path\n path = _os.path.join(gui_settings_dir, self._autosettings_path)\n \n # make the databox object\n d = _d.databox()\n\n # get the keys and dictionary\n keys, dictionary = self.get_dictionary()\n\n # loop over the keys and add them to the databox header\n for k in keys: \n d.insert_header(k, dictionary[k])\n \n # save it\n try:\n d.save_file(path, force_overwrite=True, header_only=True)\n except:\n print('Warning: could not save '+path.__repr__()+' once. Could be that this is being called too rapidly.')\n \n return self", "response": "Saves all the parameters to a text file using the databox\n functionality."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _set_value_safe(self, k, v, ignore_errors=False, block_user_signals=False):\n # for safety: by default assume it's a repr() with python code\n try:\n self.set_value(k, v, ignore_error = ignore_errors, \n block_user_signals = block_user_signals)\n\n except:\n print(\"TreeDictionary ERROR: Could not set '\"+k+\"' to '\"+v+\"'\")", "response": "Set the value of a key to a value."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncall when the button is clicked.", "response": "def _button_autosave_clicked(self, checked):\n \"\"\"\n Called whenever the button is clicked.\n \"\"\"\n if checked:\n # get the path from the user\n path = _spinmob.dialogs.save(filters=self.file_type)\n\n # abort if necessary\n if not path:\n self.button_autosave.set_checked(False)\n return\n\n # otherwise, save the info!\n self._autosave_directory, filename = _os.path.split(path)\n self._label_path.set_text(filename)\n\n self.save_gui_settings()"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef save_file(self, path=None, force_overwrite=False, just_settings=False, **kwargs):\n # Update the binary mode\n if not 'binary' in kwargs: kwargs['binary'] = self.combo_binary.get_text()\n \n # if it's just the settings file, make a new databox\n if just_settings: d = _d.databox()\n\n # otherwise use the internal databox\n else: d = self\n\n # add all the controls settings to the header\n for x in self._autosettings_controls: self._store_gui_setting(d, x)\n\n # save the file using the skeleton function, so as not to recursively \n # call this one again!\n _d.databox.save_file(d, path, self.file_type, self.file_type, force_overwrite, **kwargs)", "response": "Save the data in the databox to a file."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef load_file(self, path=None, just_settings=False):\n # if it's just the settings file, make a new databox\n if just_settings:\n d = _d.databox()\n header_only = True\n\n # otherwise use the internal databox\n else:\n d = self\n header_only = False\n\n # import the settings if they exist in the header\n if not None == _d.databox.load_file(d, path, filters=self.file_type, header_only=header_only, quiet=just_settings):\n \n # loop over the autosettings and update the gui\n for x in self._autosettings_controls: self._load_gui_setting(x,d)\n\n # always sync the internal data\n self._synchronize_controls()\n\n # plot the data if this isn't just a settings load\n if not just_settings:\n self.plot()\n self.after_load_file()", "response": "Loads a data file and updates the internal data and the GUI."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngenerating a python script for plotting the current object.", "response": "def _autoscript(self):\n \"\"\"\n Automatically generates a python script for plotting. \n \"\"\"\n # This should never happen unless I screwed up.\n if self.combo_autoscript.get_index() == 0: return \"ERROR: Ask Jack.\"\n \n # if there is no data, leave it blank\n if len(self)==0: return \"x = []; y = []; xlabels=[]; ylabels=[]\"\n\n # if there is one column, make up a one-column script\n elif len(self)==1: return \"x = [None]\\ny = [ d[0] ]\\n\\nxlabels=[ 'Data Point' ]\\nylabels=[ 'd[0]' ]\"\n \n # Shared x-axis (column 0)\n elif self.combo_autoscript.get_index() == 1:\n \n # hard code the first columns\n sx = \"x = [ d[0]\"\n sy = \"y = [ d[1]\"\n\n # hard code the first labels\n sxlabels = \"xlabels = '\" +self.ckeys[0]+\"'\"\n sylabels = \"ylabels = [ '\"+self.ckeys[1]+\"'\"\n\n # loop over any remaining columns and append.\n for n in range(2,len(self)):\n sy += \", d[\"+str(n)+\"]\"\n sylabels += \", '\"+self.ckeys[n]+\"'\"\n\n return sx+\" ]\\n\"+sy+\" ]\\n\\n\"+sxlabels+\"\\n\"+sylabels+\" ]\\n\"\n \n \n # Column pairs\n elif self.combo_autoscript.get_index() == 2:\n \n # hard code the first columns\n sx = \"x = [ d[0]\"\n sy = \"y = [ d[1]\"\n \n # hard code the first labels\n sxlabels = \"xlabels = [ '\"+self.ckeys[0]+\"'\"\n sylabels = \"ylabels = [ '\"+self.ckeys[1]+\"'\"\n \n # Loop over the remaining columns and append\n for n in range(1,int(len(self)/2)):\n sx += \", d[\"+str(2*n )+\"]\"\n sy += \", d[\"+str(2*n+1)+\"]\"\n sxlabels += \", '\"+self.ckeys[2*n ]+\"'\"\n sylabels += \", '\"+self.ckeys[2*n+1]+\"'\"\n \n return sx+\" ]\\n\"+sy+\" ]\\n\\n\"+sxlabels+\" ]\\n\"+sylabels+\" ]\\n\"\n print(\"test\")\n \n # Column triples\n elif self.combo_autoscript.get_index() == 3:\n \n # hard code the first columns\n sx = \"x = [ d[0], d[0]\"\n sy = \"y = [ d[1], d[2]\"\n\n # hard code the first labels\n sxlabels = \"xlabels = [ '\"+self.ckeys[0]+\"', '\"+self.ckeys[0]+\"'\"\n sylabels = \"ylabels = [ '\"+self.ckeys[1]+\"', '\"+self.ckeys[2]+\"'\"\n\n # Loop over the remaining columns and append\n for n in range(1,int(len(self)/3)):\n sx += \", d[\"+str(3*n )+\"], d[\"+str(3*n )+\"]\"\n sy += \", d[\"+str(3*n+1)+\"], d[\"+str(3*n+2)+\"]\"\n \n sxlabels += \", '\"+self.ckeys[3*n ]+\"', '\"+self.ckeys[3*n ]+\"'\"\n sylabels += \", '\"+self.ckeys[3*n+1]+\"', '\"+self.ckeys[3*n+2]+\"'\"\n\n return sx+\" ]\\n\"+sy+\" ]\\n\\n\"+sxlabels+\" ]\\n\"+sylabels+\" ]\\n\"\n \n else: return self.autoscript_custom()"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nplots the internal databox.", "response": "def plot(self):\n \"\"\"\n Sets the internal databox to the supplied value and plots it.\n If databox=None, this will plot the internal databox.\n \"\"\"\n\n # if we're disabled or have no data columns, clear everything!\n if not self.button_enabled.is_checked() or len(self) == 0:\n self._set_number_of_plots(0)\n return self\n\n # if there is no script, create a default\n if not self.combo_autoscript.get_index()==0:\n self.script.set_text(self._autoscript())\n\n ##### Try the script and make the curves / plots match\n\n try:\n # get globals for sin, cos etc\n g = _n.__dict__\n g.update(dict(d=self))\n g.update(dict(xlabels='x', ylabels='y'))\n\n # run the script.\n exec(self.script.get_text(), g)\n\n # x & y should now be data arrays, lists of data arrays or Nones\n x = g['x']\n y = g['y']\n\n # make it the right shape\n if x == None: x = [None]\n if y == None: y = [None]\n if not _spinmob.fun.is_iterable(x[0]) and not x[0] == None: x = [x]\n if not _spinmob.fun.is_iterable(y[0]) and not y[0] == None: y = [y]\n if len(x) == 1 and not len(y) == 1: x = x*len(y)\n if len(y) == 1 and not len(x) == 1: y = y*len(x)\n\n\n # xlabels and ylabels should be strings or lists of strings\n xlabels = g['xlabels']\n ylabels = g['ylabels']\n\n # make sure we have exactly the right number of plots\n self._set_number_of_plots(len(x))\n self._update_linked_axes()\n\n # return if there is nothing.\n if len(x) == 0: return\n\n # now plot everything\n for n in range(max(len(x),len(y))-1,-1,-1):\n\n # Create data for \"None\" cases.\n if x[n] is None: x[n] = list(range(len(y[n])))\n if y[n] is None: y[n] = list(range(len(x[n])))\n self._curves[n].setData(x[n],y[n])\n\n # get the labels for the curves\n\n # if it's a string, use the same label for all axes\n if type(xlabels) in [str,type(None)]: xlabel = xlabels\n elif n < len(xlabels): xlabel = xlabels[n]\n else: xlabel = ''\n\n if type(ylabels) in [str,type(None)]: ylabel = ylabels\n elif n < len(ylabels): ylabel = ylabels[n]\n else: ylabel = ''\n\n # set the labels\n i = min(n, len(self.plot_widgets)-1)\n self.plot_widgets[i].setLabel('left', ylabel)\n self.plot_widgets[i].setLabel('bottom', xlabel)\n\n # special case: hide if None\n if xlabel == None: self.plot_widgets[i].getAxis('bottom').showLabel(False)\n if ylabel == None: self.plot_widgets[i].getAxis('left') .showLabel(False)\n\n # unpink the script, since it seems to have worked\n self.script.set_colors('black','white')\n\n # otherwise, look angry and don't autosave\n except: self.script.set_colors('black','pink')\n\n return self"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef autosave(self):\n # make sure we're suppoed to\n if self.button_autosave.is_checked():\n\n # save the file\n self.save_file(_os.path.join(self._autosave_directory, \"%04d \" % (self.number_file.get_value()) + self._label_path.get_text()))\n\n # increment the counter\n self.number_file.increment()", "response": "Autosaves the currently stored data but only if autosave is checked!"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nsynchronize the control based on button configs.", "response": "def _synchronize_controls(self):\n \"\"\"\n Updates the gui based on button configs.\n \"\"\"\n\n # whether the script is visible\n self.grid_script._widget.setVisible(self.button_script.get_value())\n\n # whether we should be able to edit it.\n if not self.combo_autoscript.get_index()==0: self.script.disable()\n else: self.script.enable()"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nadjusts the number of plots and curves to the desired value the gui.", "response": "def _set_number_of_plots(self, n):\n \"\"\"\n Adjusts number of plots & curves to the desired value the gui.\n \"\"\"\n\n # multi plot, right number of plots and curves = great!\n if self.button_multi.is_checked() \\\n and len(self._curves) == len(self.plot_widgets) \\\n and len(self._curves) == n: return\n\n # single plot, right number of curves = great!\n if not self.button_multi.is_checked() \\\n and len(self.plot_widgets) == 1 \\\n and len(self._curves) == n: return\n\n # time to rebuild!\n\n # don't show the plots as they are built\n self.grid_plot.block_events()\n\n # make sure the number of curves is on target\n while len(self._curves) > n: self._curves.pop(-1)\n while len(self._curves) < n: self._curves.append(_g.PlotCurveItem(pen = (len(self._curves), n)))\n\n # figure out the target number of plots\n if self.button_multi.is_checked(): n_plots = n\n else: n_plots = min(n,1)\n\n # clear the plots\n while len(self.plot_widgets):\n\n # pop the last plot widget and remove all items\n p = self.plot_widgets.pop(-1)\n p.clear()\n\n # remove it from the grid\n self.grid_plot.remove_object(p)\n\n # add new plots\n for i in range(n_plots):\n self.plot_widgets.append(self.grid_plot.place_object(_g.PlotWidget(), 0, i, alignment=0))\n\n # loop over the curves and add them to the plots\n for i in range(n):\n self.plot_widgets[min(i,len(self.plot_widgets)-1)].addItem(self._curves[i])\n\n # loop over the ROI's and add them\n if self.ROIs is not None:\n\n for i in range(len(self.ROIs)):\n\n # get the ROIs for this plot\n ROIs = self.ROIs[i]\n\n if not _spinmob.fun.is_iterable(ROIs): ROIs = [ROIs]\n\n # loop over the ROIs for this plot\n for ROI in ROIs:\n\n # determine which plot to add the ROI to\n m = min(i, len(self.plot_widgets)-1)\n\n # add the ROI to the appropriate plot\n if m>=0: self.plot_widgets[m].addItem(ROI)\n\n # show the plots\n self.grid_plot.unblock_events()"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nupdate the linked axes of the current plot item.", "response": "def _update_linked_axes(self):\n \"\"\"\n Loops over the axes and links / unlinks them.\n \"\"\"\n # no axes to link!\n if len(self.plot_widgets) <= 1: return\n\n # get the first plotItem\n a = self.plot_widgets[0].plotItem.getViewBox()\n \n # now loop through all the axes and link / unlink the axes\n for n in range(1,len(self.plot_widgets)):\n \n # Get one of the others\n b = self.plot_widgets[n].plotItem.getViewBox() \n \n # link the axis, but only if it isn't already\n if self.button_link_x.is_checked() and b.linkedView(b.XAxis) == None:\n b.linkView(b.XAxis, a)\n \n # Otherwise, unlink the guy, but only if it's linked to begin with\n elif not self.button_link_x.is_checked() and not b.linkedView(b.XAxis) == None:\n b.linkView(b.XAxis, None)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nfind and links the ConstSpec with the given name.", "response": "def resolve_const_spec(self, name, lineno):\n \"\"\"Finds and links the ConstSpec with the given name.\"\"\"\n\n if name in self.const_specs:\n return self.const_specs[name].link(self)\n\n if '.' in name:\n include_name, component = name.split('.', 1)\n if include_name in self.included_scopes:\n return self.included_scopes[include_name].resolve_const_spec(\n component, lineno\n )\n\n raise ThriftCompilerError(\n 'Unknown constant \"%s\" referenced at line %d%s' % (\n name, lineno, self.__in_path()\n )\n )"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef resolve_type_spec(self, name, lineno):\n\n if name in self.type_specs:\n return self.type_specs[name].link(self)\n\n if '.' in name:\n include_name, component = name.split('.', 1)\n if include_name in self.included_scopes:\n return self.included_scopes[include_name].resolve_type_spec(\n component, lineno\n )\n\n raise ThriftCompilerError(\n 'Unknown type \"%s\" referenced at line %d%s' % (\n name, lineno, self.__in_path()\n )\n )", "response": "Finds and links the TypeSpec with the given name."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nfinds and links the ServiceSpec with the given name.", "response": "def resolve_service_spec(self, name, lineno):\n \"\"\"Finds and links the ServiceSpec with the given name.\"\"\"\n\n if name in self.service_specs:\n return self.service_specs[name].link(self)\n\n if '.' in name:\n include_name, component = name.split('.', 2)\n if include_name in self.included_scopes:\n return self.included_scopes[\n include_name\n ].resolve_service_spec(component, lineno)\n\n raise ThriftCompilerError(\n 'Unknown service \"%s\" referenced at line %d%s' % (\n name, lineno, self.__in_path()\n )\n )"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef add_include(self, name, included_scope, module):\n # The compiler already ensures this. If we still get here with a\n # conflict, that's a bug.\n assert name not in self.included_scopes\n\n self.included_scopes[name] = included_scope\n self.add_surface(name, module)", "response": "Register an imported module into this scope."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nadd the given ServiceSpec into the scope.", "response": "def add_service_spec(self, service_spec):\n \"\"\"Registers the given ``ServiceSpec`` into the scope.\n\n Raises ``ThriftCompilerError`` if the name has already been used.\n \"\"\"\n assert service_spec is not None\n\n if service_spec.name in self.service_specs:\n raise ThriftCompilerError(\n 'Cannot define service \"%s\". That name is already taken.'\n % service_spec.name\n )\n self.service_specs[service_spec.name] = service_spec"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef add_const_spec(self, const_spec):\n if const_spec.name in self.const_specs:\n raise ThriftCompilerError(\n 'Cannot define constant \"%s\". That name is already taken.'\n % const_spec.name\n )\n self.const_specs[const_spec.name] = const_spec", "response": "Adds a ConstSpec to the compliation scope."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nadd a top - level attribute with the given name to the module.", "response": "def add_surface(self, name, surface):\n \"\"\"Adds a top-level attribute with the given name to the module.\"\"\"\n assert surface is not None\n\n if hasattr(self.module, name):\n raise ThriftCompilerError(\n 'Cannot define \"%s\". The name has already been used.' % name\n )\n\n setattr(self.module, name, surface)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nadds the given type to the scope.", "response": "def add_type_spec(self, name, spec, lineno):\n \"\"\"Adds the given type to the scope.\n\n :param str name:\n Name of the new type\n :param spec:\n ``TypeSpec`` object containing information on the type, or a\n ``TypeReference`` if this is meant to be resolved during the\n ``link`` stage.\n :param lineno:\n Line number on which this type is defined.\n \"\"\"\n assert type is not None\n\n if name in self.type_specs:\n raise ThriftCompilerError(\n 'Cannot define type \"%s\" at line %d. '\n 'Another type with that name already exists.'\n % (name, lineno)\n )\n\n self.type_specs[name] = spec"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef coarsen_array(a, level=2, method='mean'):\n if a is None: return None \n \n # make sure it's a numpy array\n a = _n.array(a)\n \n # quickest option\n if level in [0,1,False]: return a\n\n # otherwise assemble the python code to execute\n code = 'a.reshape(-1, level).'+method+'(axis=1)'\n\n # execute, making sure the array can be reshaped!\n try: return eval(code, dict(a=a[0:int(len(a)/level)*level], level=level))\n except:\n print(\"ERROR: Could not coarsen array with method \"+repr(method))\n return a", "response": "Coarsen an array of data."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncoarsens the supplied data set and returns a list of arrays along with the specified quadrature - coarsened arrays of x y uncertainties and ex uncertainties.", "response": "def coarsen_data(x, y, ey=None, ex=None, level=2, exponential=False):\n \"\"\"\n Coarsens the supplied data set. Returns coarsened arrays of x, y, along with\n quadrature-coarsened arrays of ey and ex if specified.\n \n Parameters\n ----------\n x, y\n Data arrays. Can be lists (will convert to numpy arrays).\n These are coarsened by taking an average.\n ey=None, ex=None\n y and x uncertainties. Accepts arrays, lists, or numbers. \n These are coarsened by averaging in quadrature.\n level=2\n For linear coarsening (default, see below), every n=level points will\n be averaged together (in quadrature for errors). For exponential\n coarsening, bins will be spaced by the specified scaling=level factor;\n for example, level=1.4 will group points within 40% of each other's x\n values. This is a great option for log-x plots, as the outcome will be\n evenly spaced.\n exponential=False\n If False, coarsen using linear spacing. If True, the bins will be \n exponentially spaced by the specified level. \n \"\"\"\n \n # Normal coarsening\n if not exponential:\n \n # Coarsen the data\n xc = coarsen_array(x, level, 'mean')\n yc = coarsen_array(y, level, 'mean')\n \n # Coarsen the y error in quadrature\n if not ey is None:\n if not is_iterable(ey): ey = [ey]*len(y)\n eyc = _n.sqrt(coarsen_array(_n.power(ey,2)/level, level, 'mean'))\n \n # Coarsen the x error in quadrature\n if not ex is None:\n if not is_iterable(ey): ex = [ex]*len(x)\n exc = _n.sqrt(coarsen_array(_n.power(ex,2)/level, level, 'mean'))\n \n # Exponential coarsen \n else:\n # Make sure the data are arrays\n x = _n.array(x)\n y = _n.array(y)\n \n # Create the new arrays to fill\n xc = []\n yc = []\n if not ey is None: \n if not is_iterable(ey): ey = _n.array([ey]*len(y))\n eyc = []\n if not ex is None:\n if not is_iterable(ex): ex = _n.array([ex]*len(x))\n exc = []\n \n # Find the first element that is greater than zero \n x0 = x[x>0][0]\n \n # Now loop over the exponential bins\n n = 0\n while x0*level**n < x[-1]:\n \n # Get all the points between x[n] and x[n]*r\n mask = _n.logical_and(x0*level**n <= x, x < x0*level**(n+1))\n \n # Only do something if points exist from this range!\n if len(x[mask]):\n \n # Take the average x value\n xc.append(_n.average(x[mask]))\n yc.append(_n.average(y[mask]))\n \n # do the errors in quadrature\n if not ey is None: eyc.append(_n.sqrt(_n.average((ey**2)[mask])/len(ey[mask])))\n if not ex is None: exc.append(_n.sqrt(_n.average((ex**2)[mask])/len(ex[mask])))\n \n # Increment the counter\n n += 1\n \n # Done exponential loop\n\n # Done coarsening \n # Return depending on situation\n if ey is None and ex is None: return _n.array(xc), _n.array(yc)\n elif ex is None : return _n.array(xc), _n.array(yc), _n.array(eyc)\n elif ey is None : return _n.array(xc), _n.array(yc), _n.array(exc)\n else : return _n.array(xc), _n.array(yc), _n.array(eyc), _n.array(exc)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef coarsen_matrix(Z, xlevel=0, ylevel=0, method='average'):\n\n # coarsen x\n if not ylevel:\n Z_coarsened = Z\n else:\n temp = []\n for z in Z: temp.append(coarsen_array(z, ylevel, method))\n Z_coarsened = _n.array(temp)\n\n # coarsen y\n if xlevel:\n Z_coarsened = Z_coarsened.transpose()\n temp = []\n for z in Z_coarsened: temp.append(coarsen_array(z, xlevel, method))\n Z_coarsened = _n.array(temp).transpose()\n\n return Z_coarsened\n\n\n\n # first coarsen the columns (if necessary)\n if ylevel:\n Z_ycoarsened = []\n for c in Z: Z_ycoarsened.append(coarsen_array(c, ylevel, method))\n Z_ycoarsened = _n.array(Z_ycoarsened)\n\n # now coarsen the rows\n if xlevel: return coarsen_array(Z_ycoarsened, xlevel, method)\n else: return _n.array(Z_ycoarsened)", "response": "This returns a numpy matrix that coarsenes the columns and rows of the tree Z."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn a numpy array over the specified range taking geometric steps.", "response": "def erange(start, end, steps):\n \"\"\"\n Returns a numpy array over the specified range taking geometric steps.\n\n See also numpy.logspace()\n \"\"\"\n if start == 0:\n print(\"Nothing you multiply zero by gives you anything but zero. Try picking something small.\")\n return None\n if end == 0:\n print(\"It takes an infinite number of steps to get to zero. Try a small number?\")\n return None\n\n # figure out our multiplication scale\n x = (1.0*end/start)**(1.0/(steps-1))\n\n # now generate the array\n ns = _n.array(list(range(0,steps)))\n a = start*_n.power(x,ns)\n\n # tidy up the last element (there's often roundoff error)\n a[-1] = end\n\n return a"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning True if the object s value is a number False otherwise.", "response": "def is_a_number(s):\n \"\"\"\n This takes an object and determines whether it's a number or a string\n representing a number.\n \"\"\"\n if _s.fun.is_iterable(s) and not type(s) == str: return False\n\n try:\n float(s)\n return 1\n except:\n try: \n complex(s)\n return 2\n except: \n try: \n complex(s.replace('(','').replace(')','').replace('i','j'))\n return 2\n except: \n return False"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nassembling the covariance matrix from an error vector and a correlation matrix.", "response": "def assemble_covariance(error, correlation):\n \"\"\"\n This takes an error vector and a correlation matrix and assembles the covariance\n \"\"\"\n\n covariance = []\n for n in range(0, len(error)):\n covariance.append([])\n for m in range(0, len(error)):\n covariance[n].append(correlation[n][m]*error[n]*error[m])\n return _n.array(covariance)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning the combined dictionary a and b.", "response": "def combine_dictionaries(a, b):\n \"\"\"\n returns the combined dictionary. a's values preferentially chosen\n \"\"\"\n\n c = {}\n for key in list(b.keys()): c[key]=b[key]\n for key in list(a.keys()): c[key]=a[key]\n return c"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef decompose_covariance(c):\n\n # make it a kickass copy of the original\n c = _n.array(c)\n\n # first get the error vector\n e = []\n for n in range(0, len(c[0])): e.append(_n.sqrt(c[n][n]))\n\n # now cycle through the matrix, dividing by e[1]*e[2]\n for n in range(0, len(c[0])):\n for m in range(0, len(c[0])):\n c[n][m] = c[n][m] / (e[n]*e[m])\n\n return [_n.array(e), _n.array(c)]", "response": "This decomposes a covariance matrix into an error vector and a correlation matrix."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn the derivative of the data with nearest - neighbor slopes.", "response": "def derivative(xdata, ydata):\n \"\"\"\n performs d(ydata)/d(xdata) with nearest-neighbor slopes\n must be well-ordered, returns new arrays [xdata, dydx_data]\n\n neighbors:\n \"\"\"\n D_ydata = []\n D_xdata = []\n for n in range(1, len(xdata)-1):\n D_xdata.append(xdata[n])\n D_ydata.append((ydata[n+1]-ydata[n-1])/(xdata[n+1]-xdata[n-1]))\n\n return [D_xdata, D_ydata]"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nperforming a least - squares linear fit of the data points at each point. Returns an array of x - values and slopes.", "response": "def derivative_fit(xdata, ydata, neighbors=1):\n \"\"\"\n loops over the data points, performing a least-squares linear fit of the\n nearest neighbors at each point. Returns an array of x-values and slopes.\n\n xdata should probably be well-ordered.\n\n neighbors How many data point on the left and right to include.\n \"\"\"\n\n x = []\n dydx = []\n nmax = len(xdata)-1\n\n for n in range(nmax+1):\n # get the indices of the data to fit\n i1 = max(0, n-neighbors)\n i2 = min(nmax, n+neighbors)\n\n # get the sub data to fit\n xmini = _n.array(xdata[i1:i2+1])\n ymini = _n.array(ydata[i1:i2+1])\n\n slope, intercept = fit_linear(xmini, ymini)\n\n # make x the average of the xmini\n x.append(float(sum(xmini))/len(xmini))\n dydx.append(slope)\n\n return _n.array(x), _n.array(dydx)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\napplying a distortion (remapping) to the matrix Z (and x-values X) using function f. returns new_Z, new_X f is an INVERSE function old_x(new_x) Z is a matrix. X is an array where X[n] is the x-value associated with the array Z[n]. new_xmin, new_xmax is the possible range of the distorted x-variable for generating Z points is how many elements the stretched Z should have. \"auto\" means use the same number of bins", "response": "def distort_matrix_X(Z, X, f, new_xmin, new_xmax, subsample=3):\n \"\"\"\n Applies a distortion (remapping) to the matrix Z (and x-values X) using function f.\n returns new_Z, new_X\n\n f is an INVERSE function old_x(new_x)\n\n Z is a matrix. X is an array where X[n] is the x-value associated with the array Z[n].\n\n new_xmin, new_xmax is the possible range of the distorted x-variable for generating Z\n\n points is how many elements the stretched Z should have. \"auto\" means use the same number of bins\n \"\"\"\n\n Z = _n.array(Z)\n X = _n.array(X)\n points = len(Z)*subsample\n\n\n # define a function for searching\n def zero_me(new_x): return f(new_x)-target_old_x\n\n # do a simple search to find the new_x that gives old_x = min(X)\n target_old_x = min(X)\n new_xmin = find_zero_bisect(zero_me, new_xmin, new_xmax, _n.abs(new_xmax-new_xmin)*0.0001)\n target_old_x = max(X)\n new_xmax = find_zero_bisect(zero_me, new_xmin, new_xmax, _n.abs(new_xmax-new_xmin)*0.0001)\n\n # now loop over all the new x values\n new_X = []\n new_Z = []\n bin_width = float(new_xmax-new_xmin)/(points)\n for new_x in frange(new_xmin, new_xmax, bin_width):\n\n # make sure we're in the range of X\n if f(new_x) <= max(X) and f(new_x) >= min(X):\n\n # add this guy to the array\n new_X.append(new_x)\n\n # get the interpolated column\n new_Z.append( interpolate(X,Z,f(new_x)) )\n\n return _n.array(new_Z), _n.array(new_X)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef distort_matrix_Y(Z, Y, f, new_ymin, new_ymax, subsample=3):\n\n # just use the same methodology as before by transposing, distorting X, then\n # transposing back\n new_Z, new_Y = distort_matrix_X(Z.transpose(), Y, f, new_ymin, new_ymax, subsample)\n return new_Z.transpose(), new_Y", "response": "Applies a distortion to the matrix Z and y - values Y using function f."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef dumbguy_minimize(f, xmin, xmax, xstep):\n\n prev = f(xmin)\n this = f(xmin+xstep)\n for x in frange(xmin+xstep,xmax,xstep):\n next = f(x+xstep)\n\n # see if we're on top\n if this < prev and this < next: return x, this\n\n prev = this\n this = next\n\n return x, this", "response": "This just steps x and looks for a peak\n returns x and f"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef elements_are_numbers(array):\n\n # empty case\n if len(array) == 0: return 0\n\n output_value = 1\n for x in array:\n\n # test it and die if it's not a number\n test = is_a_number(x)\n if not test: return False\n\n # mention if it's complex\n output_value = max(output_value,test)\n \n return output_value", "response": "Tests whether the elements of the supplied array are numbers."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef equalize_list_lengths(a,b):\n if not _s.fun.is_iterable(a): a = [a]\n a = list(a)\n while len(a)>len(b): a.pop(-1)\n while len(a)>>\")\n a.reverse()\n return a\n except:\n pass\n\n return ['shell history not available']", "response": "Returns a list of history items for the current shell."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef index(value, array):\n i = array.searchsorted(value)\n if i == len(array): return -1\n else: return i", "response": "Returns the index of the value in the array."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the index of the nearest value in an array", "response": "def index_nearest(value, array):\n \"\"\"\n expects a _n.array\n returns the global minimum of (value-array)^2\n \"\"\"\n\n a = (array-value)**2\n return index(a.min(), a)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef index_next_crossing(value, array, starting_index=0, direction=1):\n\n for n in range(starting_index, len(array)-1):\n if (value-array[n] )*direction >= 0 \\\n and (value-array[n+1])*direction < 0: return n\n\n # no crossing found\n return -1", "response": "Returns the index of the next crossing point with value in the array."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ninvert the increasing function f by f0.", "response": "def invert_increasing_function(f, f0, xmin, xmax, tolerance, max_iterations=100):\n \"\"\"\n This will try try to qickly find a point on the f(x) curve between xmin and xmax that is\n equal to f0 within tolerance.\n \"\"\"\n\n for n in range(max_iterations):\n # start at the middle\n x = 0.5*(xmin+xmax)\n\n df = f(x)-f0\n if _n.fabs(df) < tolerance: return x\n\n # if we're high, set xmin to x etc...\n if df > 0: xmin=x\n else: xmax=x\n\n print(\"Couldn't find value!\")\n return 0.5*(xmin+xmax)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn a single - sided power spectral density of the signal at time t and signal y.", "response": "def psd(t, y, pow2=False, window=None, rescale=False):\n \"\"\"\n Single-sided power spectral density, assuming real valued inputs. This goes \n through the numpy fourier transform process, assembling and returning\n (frequencies, psd) given time and signal data y. \n \n Note it is defined such that sum(psd)*df, where df is the frequency\n spacing, is the variance of the original signal for any range of frequencies.\n This includes the DC and Nyquist components: \n \n sqrt(psd[0]*df) = average value of original time trace\n \n sqrt(psd[-1]*df) = amplitude of Nyquist component (for even # points)\n \n Parameters\n ----------\n t,y\n Time (t) and signal (y) arrays with which to perform the PSD. Note the t\n array is assumed to be evenly spaced.\n\n pow2 = False\n Set this to true if you only want to keep the first 2^n data\n points (speeds up the FFT substantially)\n\n window = None\n can be set to any of the windowing functions in numpy,\n e.g. window='hanning'. \n \n rescale = False\n If True, the FFT will be rescaled by the square root of the ratio of \n variances before and after windowing, such that the integral \n sum(PSD)*df is the variance of the *original* time-domain data.\n\n returns frequencies, psd (y^2/Hz)\n \"\"\"\n\n # do the actual fft\n f, Y = fft(t,y,pow2,window,rescale)\n \n # take twice the negative frequency branch, because it contains the \n # extra frequency point when the number of points is odd.\n f = _n.abs(f[int(len(f)/2)::-1])\n P = _n.abs(Y[int(len(Y)/2)::-1])**2 / (f[1]-f[0])\n\n # Since this is the same as the positive frequency branch, double the\n # appropriate frequencies. For even number of points, there is one\n # extra negative frequency to avoid doubling. For odd, you only need to\n # avoid the DC value.\n \n # For the even\n if len(t)%2 == 0: P[1:len(P)-1] = P[1:len(P)-1]*2\n else: P[1:] = P[1:]*2\n\n return f, P"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef replace_in_files(search, replace, depth=0, paths=None, confirm=True):\n\n # have the user select some files\n if paths==None:\n paths = _s.dialogs.MultipleFiles('DIS AND DAT|*.*')\n if paths == []: return\n\n for path in paths:\n lines = read_lines(path)\n\n if depth: N=min(len(lines),depth)\n else: N=len(lines)\n\n for n in range(0,N):\n if lines[n].find(search) >= 0:\n lines[n] = lines[n].replace(search,replace)\n print(path.split(_os.path.pathsep)[-1]+ ': \"'+lines[n]+'\"')\n\n # only write if we're not confirming\n if not confirm:\n _os.rename(path, path+\".backup\")\n write_to_file(path, join(lines, ''))\n\n if confirm:\n if input(\"yes? \")==\"yes\":\n replace_in_files(search,replace,depth,paths,False)\n\n return", "response": "Replaces the search string in the specified list of files."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nfind lines containing the search string and replaces the whole line with replacement_line.", "response": "def replace_lines_in_files(search_string, replacement_line):\n \"\"\"\n Finds lines containing the search string and replaces the whole line with\n the specified replacement string.\n \"\"\"\n\n\n # have the user select some files\n paths = _s.dialogs.MultipleFiles('DIS AND DAT|*.*')\n if paths == []: return\n\n for path in paths:\n _shutil.copy(path, path+\".backup\")\n lines = read_lines(path)\n for n in range(0,len(lines)):\n if lines[n].find(search_string) >= 0:\n print(lines[n])\n lines[n] = replacement_line.strip() + \"\\n\"\n write_to_file(path, join(lines, ''))\n\n return"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef reverse(array):\n l = list(array)\n l.reverse()\n return _n.array(l)", "response": "returns a reversed numpy array"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef round_sigfigs(x, n=2):\n iterable = is_iterable(x)\n if not iterable: x = [x]\n\n # make a copy to be safe\n x = _n.array(x)\n \n # loop over the elements\n for i in range(len(x)):\n \n # Handle the weird cases\n if not x[i] in [None, _n.inf, _n.nan]:\n sig_figs = -int(_n.floor(_n.log10(abs(x[i]))))+n-1\n x[i] = _n.round(x[i], sig_figs)\n \n if iterable: return x\n else: return x[0]", "response": "Rounds the number to the specified significant figures."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef shift_feature_to_x0(xdata, ydata, x0=0, feature=imax):\n i = feature(ydata)\n return xdata-xdata[i]+x0, ydata", "response": "Shifts xdata ydata by x0"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn smoothed [xdata, ydata, yerror]. Does not destroy the input arrays.", "response": "def smooth_data(xdata, ydata, yerror, amount=1):\n \"\"\"\n Returns smoothed [xdata, ydata, yerror]. Does not destroy the input arrays.\n \"\"\"\n\n new_xdata = smooth_array(_n.array(xdata), amount)\n new_ydata = smooth_array(_n.array(ydata), amount)\n if yerror is None: new_yerror = None\n else: new_yerror = smooth_array(_n.array(yerror), amount)\n\n return [new_xdata, new_ydata, new_yerror]"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsorting a matrix by n elements.", "response": "def sort_matrix(a,n=0):\n \"\"\"\n This will rearrange the array a[n] from lowest to highest, and\n rearrange the rest of a[i]'s in the same way. It is dumb and slow.\n\n Returns a numpy array.\n \"\"\"\n a = _n.array(a)\n return a[:,a[n,:].argsort()]"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef submatrix(matrix,i1,i2,j1,j2):\n\n new = []\n for i in range(i1,i2+1):\n new.append(matrix[i][j1:j2+1])\n return _n.array(new)", "response": "Returns the submatrix of the given matrix."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nremoves all the data except that xdata is between xmin and xmax.", "response": "def trim_data(xmin, xmax, xdata, *args):\n \"\"\"\n Removes all the data except that in which xdata is between xmin and xmax. \n This does not mutilate the input arrays, and additional arrays\n can be supplied via args (provided they match xdata in shape)\n\n xmin and xmax can be None\n \"\"\"\n\n # make sure it's a numpy array\n if not isinstance(xdata, _n.ndarray): xdata = _n.array(xdata)\n\n # make sure xmin and xmax are numbers\n if xmin is None: xmin = min(xdata)\n if xmax is None: xmax = max(xdata)\n\n # get all the indices satisfying the trim condition\n ns = _n.argwhere((xdata >= xmin) & (xdata <= xmax)).transpose()[0]\n\n # trim the xdata\n output = []\n output.append(xdata[ns])\n\n # trim the rest\n for a in args:\n # make sure it's a numpy array\n if not isinstance(a, _n.ndarray): a = _n.array(a)\n output.append(a[ns])\n\n return output"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nfetches a response from the Geocoding API.", "response": "def _fetch(self, searchtype, fields, **kwargs):\n '''Fetch a response from the Geocoding API.'''\n fields['vintage'] = self.vintage\n fields['benchmark'] = self.benchmark\n fields['format'] = 'json'\n\n if 'layers' in kwargs:\n fields['layers'] = kwargs['layers']\n\n returntype = kwargs.get('returntype', 'geographies')\n url = self._geturl(searchtype, returntype)\n\n try:\n with requests.get(url, params=fields, timeout=kwargs.get('timeout')) as r:\n content = r.json()\n if \"addressMatches\" in content.get('result', {}):\n return AddressResult(content)\n\n if \"geographies\" in content.get('result', {}):\n return GeographyResult(content)\n\n raise ValueError()\n\n except (ValueError, KeyError):\n raise ValueError(\"Unable to parse response from Census\")\n\n except RequestException as e:\n raise e"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef coordinates(self, x, y, **kwargs):\n '''Geocode a (lon, lat) coordinate.'''\n kwargs['returntype'] = 'geographies'\n fields = {\n 'x': x,\n 'y': y\n }\n\n return self._fetch('coordinates', fields, **kwargs)", "response": "Geocode a ( lon lat ) coordinate."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef onelineaddress(self, address, **kwargs):\n '''Geocode an an address passed as one string.\n e.g. \"4600 Silver Hill Rd, Suitland, MD 20746\"\n '''\n fields = {\n 'address': address,\n }\n\n return self._fetch('onelineaddress', fields, **kwargs)", "response": "Geocode an address passed as one string."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsends either a CSV file or a list of dicts with the above fields.", "response": "def addressbatch(self, data, **kwargs):\n '''\n Send either a CSV file or data to the addressbatch API.\n According to the Census, \"there is currently an upper limit of 1000 records per batch file.\"\n If a file, must have no header and fields id,street,city,state,zip\n If data, should be a list of dicts with the above fields (although ID is optional)\n '''\n # Does data quack like a file handle?\n if hasattr(data, 'read'):\n return self._post_batch(f=data, **kwargs)\n\n # Check if it's a string file\n elif isinstance(data, string_types):\n with open(data, 'rb') as f:\n return self._post_batch(f=f, **kwargs)\n\n else:\n # Otherwise, assume a list of dicts\n return self._post_batch(data=data, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nloads a colormap from the supplied name.", "response": "def load_colormap(self, name=None):\n \"\"\"\n Loads a colormap of the supplied name. None means used the internal\n name. (See self.get_name())\n \"\"\"\n\n if name == None: name = self.get_name()\n if name == \"\" or not type(name)==str: return \"Error: Bad name.\"\n\n # assemble the path to the colormap\n path = _os.path.join(_settings.path_home, \"colormaps\", name+\".cmap\")\n\n # make sure the file exists\n if not _os.path.exists(path):\n print(\"load_colormap(): Colormap '\"+name+\"' does not exist. Creating.\")\n self.save_colormap(name)\n return\n\n # open the file and get the lines\n f = open(path, 'r')\n x = f.read()\n f.close()\n\n try:\n self._colorpoint_list = eval(x)\n except:\n print(\"Invalid colormap. Overwriting.\")\n self.save_colormap()\n\n # update the image\n self.update_image()\n\n return self"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef save_colormap(self, name=None):\n if name == None: name = self.get_name()\n if name == \"\" or not type(name)==str: return \"Error: invalid name.\"\n\n # get the colormaps directory\n colormaps = _os.path.join(_settings.path_home, 'colormaps')\n\n # make sure we have the colormaps directory\n _settings.MakeDir(colormaps)\n\n # assemble the path to the colormap\n path = _os.path.join(_settings.path_home, 'colormaps', name+\".cmap\")\n\n # open the file and overwrite\n f = open(path, 'w')\n f.write(str(self._colorpoint_list))\n f.close()\n\n return self", "response": "Saves the colormap with the specified name."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef delete_colormap(self, name=None):\n if name == None: name = self.get_name()\n if name == \"\" or not type(name)==str: return \"Error: invalid name.\"\n\n # assemble the path to the colormap\n path = _os.path.join(_settings.path_home, 'colormaps', name+\".cmap\")\n _os.unlink(path)\n\n return self", "response": "Deletes the colormap with the specified name."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef set_name(self, name=\"My Colormap\"):\n if not type(name)==str:\n print(\"set_name(): Name must be a string.\")\n return\n self._name = name\n return self", "response": "Sets the name of the object."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsets which pylab image to tweak.", "response": "def set_image(self, image='auto'):\n \"\"\"\n Set which pylab image to tweak.\n \"\"\"\n if image==\"auto\": image = _pylab.gca().images[0]\n self._image=image\n self.update_image()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef update_image(self):\n if self._image:\n self._image.set_cmap(self.get_cmap())\n _pylab.draw()", "response": "Update the image s cmap."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nremove and returns the specified colorpoint. Will always leave two behind.", "response": "def pop_colorpoint(self, n=0):\n \"\"\"\n Removes and returns the specified colorpoint. Will always leave two behind.\n \"\"\"\n\n # make sure we have more than 2; otherwise don't pop it, just return it\n if len(self._colorpoint_list) > 2:\n\n # do the popping\n x = self._colorpoint_list.pop(n)\n\n # make sure the endpoints are 0 and 1\n self._colorpoint_list[0][0] = 0.0\n self._colorpoint_list[-1][0] = 1.0\n\n # update the image\n self.update_image()\n\n return x\n\n # otherwise just return the indexed item\n else: return self[n]"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef insert_colorpoint(self, position=0.5, color1=[1.0,1.0,0.0], color2=[1.0,1.0,0.0]):\n L = self._colorpoint_list\n\n # if position = 0 or 1, push the end points inward\n if position <= 0.0:\n L.insert(0,[0.0,color1,color2])\n\n elif position >= 1.0:\n L.append([1.0,color1,color2])\n\n # otherwise, find the position where it belongs\n else:\n\n # loop over all the points\n for n in range(len(self._colorpoint_list)):\n # check if it's less than the next one\n if position <= L[n+1][0]:\n # found the place to insert it\n L.insert(n+1,[position,color1,color2])\n break\n\n # update the image with the new cmap\n self.update_image()\n\n return self", "response": "Insert a colorpoint into the list of colorpoints."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nmodify the colorpoint n in the image.", "response": "def modify_colorpoint(self, n, position=0.5, color1=[1.0,1.0,1.0], color2=[1.0,1.0,1.0]):\n \"\"\"\n Changes the values of an existing colorpoint, then updates the colormap.\n \"\"\"\n if n==0.0 : position = 0.0\n elif n==len(self._colorpoint_list)-1: position = 1.0\n else: position = max(self._colorpoint_list[n-1][0], position)\n\n self._colorpoint_list[n] = [position, color1, color2]\n self.update_image()\n self.save_colormap(\"Last Used\")"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ngenerate a pylab colormap object from the colorpoint data.", "response": "def get_cmap(self):\n \"\"\"\n Generates a pylab cmap object from the colorpoint data.\n \"\"\"\n\n # now generate the colormap from the ordered list\n r = []\n g = []\n b = []\n for p in self._colorpoint_list:\n r.append((p[0], p[1][0]*1.0, p[2][0]*1.0))\n g.append((p[0], p[1][1]*1.0, p[2][1]*1.0))\n b.append((p[0], p[1][2]*1.0, p[2][2]*1.0))\n\n # store the formatted dictionary\n c = {'red':r, 'green':g, 'blue':b}\n\n # now set the dang thing\n return _mpl.colors.LinearSegmentedColormap('custom', c)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nbuilding the gui for the current locale.", "response": "def _build_gui(self):\n \"\"\"\n Removes all existing sliders and rebuilds them based on the colormap.\n \"\"\"\n # remove all widgets (should destroy all children too)\n self._central_widget.deleteLater()\n\n # remove all references to other controls\n self._sliders = []\n self._buttons_top_color = []\n self._buttons_bottom_color = []\n self._checkboxes = []\n self._buttons_plus = []\n self._buttons_minus = []\n self._color_dialogs_top = []\n self._color_dialogs_bottom = []\n\n # create the new central widget\n self._central_widget = _qtw.QWidget()\n self._window.setCentralWidget(self._central_widget)\n\n # layout for main widget\n self._layout = _qtw.QGridLayout(self._central_widget)\n self._central_widget.setLayout(self._layout)\n\n # add the list of cmaps\n self._combobox_cmaps = _qtw.QComboBox(self._central_widget)\n self._combobox_cmaps.setEditable(True)\n self._load_cmap_list()\n\n # add the save and delete buttons\n self._button_save = _qtw.QPushButton(\"Save\", self._central_widget)\n self._button_delete = _qtw.QPushButton(\"Delete\", self._central_widget)\n self._button_save.setFixedWidth(70)\n self._button_delete.setFixedWidth(70)\n\n # layouts\n self._layout.addWidget(self._combobox_cmaps, 1,1, 1,3, _qtcore.Qt.Alignment(0))\n self._layout.addWidget(self._button_save, 1,5, 1,1, _qtcore.Qt.Alignment(1))\n self._layout.addWidget(self._button_delete, 1,6, 1,2, _qtcore.Qt.Alignment(1))\n\n # actions\n self._combobox_cmaps.currentIndexChanged.connect(self._signal_load)\n self._button_save .clicked.connect(self._button_save_clicked)\n self._button_delete.clicked.connect(self._button_delete_clicked)\n\n # ensmallen the window\n self._window.resize(10,10)\n\n # now create a control set for each color point\n for n in range(len(self._colorpoint_list)):\n\n c1 = self._colorpoint_list[n][1]\n c2 = self._colorpoint_list[n][2]\n\n # create a top-color button\n self._buttons_top_color.append(_qtw.QPushButton(self._central_widget))\n self._buttons_top_color[-1].setStyleSheet(\"background-color: rgb(\"+str(int(c2[0]*255))+\",\"+str(int(c2[1]*255))+\",\"+str(int(c2[2]*255))+\"); border-radius: 3px;\")\n\n # create a bottom-color button\n self._buttons_bottom_color.append(_qtw.QPushButton(self._central_widget))\n self._buttons_bottom_color[-1].setStyleSheet(\"background-color: rgb(\"+str(int(c1[0]*255))+\",\"+str(int(c1[1]*255))+\",\"+str(int(c1[2]*255))+\"); border-radius: 3px;\")\n\n # create color dialogs\n self._color_dialogs_top.append(_qtw.QColorDialog(self._central_widget))\n self._color_dialogs_top[-1].setCurrentColor(self._buttons_top_color[-1].palette().color(1))\n\n self._color_dialogs_bottom.append(_qtw.QColorDialog(self._central_widget))\n self._color_dialogs_bottom[-1].setCurrentColor(self._buttons_top_color[-1].palette().color(1))\n\n # create link checkboxes\n self._checkboxes.append(_qtw.QCheckBox(self._central_widget))\n self._checkboxes[-1].setChecked(c1==c2)\n\n # create a slider\n self._sliders.append(_qtw.QSlider(self._central_widget))\n self._sliders[-1].setOrientation(_qtcore.Qt.Horizontal)\n self._sliders[-1].setMaximum(1000)\n self._sliders[-1].setValue(int(self._colorpoint_list[n][0]*1000))\n self._sliders[-1].setFixedWidth(250)\n\n # create + and - buttons\n self._buttons_plus.append(_qtw.QPushButton(self._central_widget))\n self._buttons_plus[-1].setText(\"+\")\n self._buttons_plus[-1].setFixedWidth(25)\n\n self._buttons_minus.append(_qtw.QPushButton(self._central_widget))\n self._buttons_minus[-1].setText(\"-\")\n self._buttons_minus[-1].setFixedWidth(25)\n\n # layout\n self._layout.addWidget(self._buttons_bottom_color[-1], n+3,1, _qtcore.Qt.AlignCenter)\n self._layout.addWidget(self._checkboxes[-1], n+3,2, 1,1, _qtcore.Qt.AlignCenter)\n self._layout.addWidget(self._buttons_top_color[-1], n+3,3, _qtcore.Qt.AlignCenter)\n self._layout.addWidget(self._sliders[-1], n+3,4, 1,2, _qtcore.Qt.AlignCenter)\n self._layout.setColumnStretch(5,100)\n self._layout.addWidget(self._buttons_minus[-1], n+3,7, _qtcore.Qt.AlignCenter)\n self._layout.addWidget(self._buttons_plus[-1], n+3,6, _qtcore.Qt.AlignCenter)\n\n # connect the buttons and slider actions to the calls\n self._buttons_bottom_color[-1] .clicked.connect(_partial(self._color_button_clicked, n, 0))\n self._buttons_top_color[-1] .clicked.connect(_partial(self._color_button_clicked, n, 1))\n self._color_dialogs_bottom[-1].currentColorChanged.connect(_partial(self._color_dialog_changed, n, 0))\n self._color_dialogs_top[-1] .currentColorChanged.connect(_partial(self._color_dialog_changed, n, 1))\n\n self._buttons_plus[-1] .clicked.connect(_partial(self._button_plus_clicked, n))\n self._buttons_minus[-1] .clicked.connect(_partial(self._button_minus_clicked, n))\n self._sliders[-1] .valueChanged.connect(_partial(self._slider_changed, n))\n\n\n\n # disable the appropriate sliders\n self._sliders[0] .setDisabled(True)\n self._sliders[-1].setDisabled(True)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nload the selected cmap.", "response": "def _signal_load(self):\n \"\"\"\n Load the selected cmap.\n \"\"\"\n\n # set our name\n self.set_name(str(self._combobox_cmaps.currentText()))\n\n # load the colormap\n self.load_colormap()\n\n # rebuild the interface\n self._build_gui()\n\n self._button_save.setEnabled(False)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nsave the selected cmap.", "response": "def _button_save_clicked(self):\n \"\"\"\n Save the selected cmap.\n \"\"\"\n self.set_name(str(self._combobox_cmaps.currentText()))\n self.save_colormap()\n self._button_save.setEnabled(False)\n self._load_cmap_list()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _button_delete_clicked(self):\n name = str(self._combobox_cmaps.currentText())\n self.delete_colormap(name)\n self._combobox_cmaps.setEditText(\"\")\n self._load_cmap_list()", "response": "Delete the selected cmap."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nupdate the color of the slider.", "response": "def _color_dialog_changed(self, n, top, c):\n \"\"\"\n Updates the color of the slider.\n \"\"\"\n self._button_save.setEnabled(True)\n\n cp = self._colorpoint_list[n]\n\n # if they're linked, set both\n if self._checkboxes[n].isChecked():\n self.modify_colorpoint(n, cp[0], [c.red()/255.0, c.green()/255.0, c.blue()/255.0],\n [c.red()/255.0, c.green()/255.0, c.blue()/255.0])\n self._buttons_top_color [n].setStyleSheet(\"background-color: rgb(\"+str(c.red())+\",\"+str(c.green())+\",\"+str(c.green())+\"); border-radius: 3px;\")\n self._buttons_bottom_color[n].setStyleSheet(\"background-color: rgb(\"+str(c.red())+\",\"+str(c.green())+\",\"+str(c.green())+\"); border-radius: 3px;\")\n\n\n elif top:\n self.modify_colorpoint(n, cp[0], cp[1], [c.red()/255.0, c.green()/255.0, c.blue()/255.0])\n self._buttons_top_color [n].setStyleSheet(\"background-color: rgb(\"+str(c.red())+\",\"+str(c.green())+\",\"+str(c.green())+\"); border-radius: 3px;\")\n\n else:\n self.modify_colorpoint(n, cp[0], [c.red()/255.0, c.green()/255.0, c.blue()/255.0], cp[2])\n self._buttons_bottom_color[n].setStyleSheet(\"background-color: rgb(\"+str(c.red())+\",\"+str(c.green())+\",\"+str(c.green())+\"); border-radius: 3px;\")"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncreate a new colorpoint and insert it into the GUI", "response": "def _button_plus_clicked(self, n):\n \"\"\"\n Create a new colorpoint.\n \"\"\"\n self._button_save.setEnabled(True)\n\n self.insert_colorpoint(self._colorpoint_list[n][0],\n self._colorpoint_list[n][1],\n self._colorpoint_list[n][2])\n self._build_gui()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nremoving a new colorpoint.", "response": "def _button_minus_clicked(self, n):\n \"\"\"\n Remove a new colorpoint.\n \"\"\"\n self._button_save.setEnabled(True)\n\n self.pop_colorpoint(n)\n self._build_gui()"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _slider_changed(self, n):\n self._button_save.setEnabled(True)\n\n self.modify_colorpoint(n, self._sliders[n].value()*0.001, self._colorpoint_list[n][1], self._colorpoint_list[n][2])", "response": "Updates the colormap and plot\n "} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _load_cmap_list(self):\n # store the current name\n name = self.get_name()\n\n # clear the list\n self._combobox_cmaps.blockSignals(True)\n self._combobox_cmaps.clear()\n\n # list the existing contents\n paths = _settings.ListDir('colormaps')\n\n # loop over the paths and add the names to the list\n for path in paths:\n self._combobox_cmaps.addItem(_os.path.splitext(path)[0])\n\n # try to select the current name\n self._combobox_cmaps.setCurrentIndex(self._combobox_cmaps.findText(name))\n self._combobox_cmaps.blockSignals(False)", "response": "Searches the colormaps directory for all files and populates the list."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ndisplays a save dialog and returns the path of the selected file.", "response": "def save(filters='*.*', text='Save THIS, facehead!', default_directory='default_directory', force_extension=None):\n \"\"\"\n Pops up a save dialog and returns the string path of the selected file.\n \n Parameters\n ----------\n filters='*.*'\n Which file types should appear in the dialog.\n text='Save THIS, facehead!'\n Title text for the dialog.\n default_directory='default_directory'\n Key for the spinmob.settings default directory. If you use a name, e.g.\n 'my_defaultypoo', for one call of this function, the next time you use\n the same name, it will start in the last dialog's directory by default.\n force_extension=None\n Setting this to a string, e.g. 'txt', will enforce that the filename \n will have this extension. \n \"\"\"\n # make sure the filters contains \"*.*\" as an option!\n if not '*' in filters.split(';'): filters = filters + \";;All files (*)\"\n \n # if this type of pref doesn't exist, we need to make a new one\n if default_directory in _settings.keys(): default = _settings[default_directory]\n else: default = \"\"\n \n # pop up the dialog\n result = _qtw.QFileDialog.getSaveFileName(None,text,default,filters)\n \n # If Qt5, take the zeroth element\n if _s._qt.VERSION_INFO[0:5] == \"PyQt5\": result = result[0] \n \n # Make sure it's a string\n result = str(result)\n \n # Enforce the extension if necessary\n if not force_extension == None:\n \n # In case the user put \"*.txt\" instead of just \"txt\"\n force_extension = force_extension.replace('*','').replace('.','')\n\n # If it doesn't end with the right extension, add this.\n if not _os.path.splitext(result)[-1][1:] == force_extension:\n result = result + '.' + force_extension\n\n if result == '': return None\n else:\n _settings[default_directory] = _os.path.split(result)[0]\n return result"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef load(filters=\"*.*\", text='Select a file, FACEFACE!', default_directory='default_directory'):\n # make sure the filters contains \"*.*\" as an option!\n if not '*' in filters.split(';'): filters = filters + \";;All files (*)\"\n \n # if this type of pref doesn't exist, we need to make a new one\n if default_directory in _settings.keys(): default = _settings[default_directory]\n else: default = \"\"\n \n # pop up the dialog\n result = _qtw.QFileDialog.getOpenFileName(None,text,default,filters)\n \n # If Qt5, take the zeroth element\n if _s._qt.VERSION_INFO[0:5] == \"PyQt5\": result = result[0]\n \n # Make sure it's a string\n result = str(result) \n \n if result == '': return None\n else:\n _settings[default_directory] = _os.path.split(result)[0]\n return result", "response": "Open a single file in a directory. Returns a string path or None."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef load_multiple(filters=\"*.*\", text='Select some files, FACEFACE!', default_directory='default_directory'):\n # make sure the filters contains \"*.*\" as an option!\n if not '*' in filters.split(';'): filters = filters + \";;All files (*)\"\n \n # if this type of pref doesn't exist, we need to make a new one\n if default_directory in _settings.keys(): default = _settings[default_directory]\n else: default = \"\"\n \n # pop up the dialog\n results = _qtw.QFileDialog.getOpenFileNames(None,text,default,filters)\n \n # If Qt5, take the zeroth element\n if _s._qt.VERSION_INFO[0:5] == \"PyQt5\": results = results[0]\n \n # Make sure it's a string\n result = []\n for r in results: result.append(str(r)) \n \n if len(result)==0: return\n else:\n _settings[default_directory] = _os.path.split(result[0])[0]\n return result", "response": "Open multiple files. Returns a list of string paths or None."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef Dump(self):\n prefs_file = open(self.prefs_path, 'w')\n for n in range(0,len(self.prefs)):\n if len(list(self.prefs.items())[n]) > 1:\n prefs_file.write(str(list(self.prefs.items())[n][0]) + ' = ' +\n str(list(self.prefs.items())[n][1]) + '\\n')\n prefs_file.close()", "response": "Dumps the current preferences to the preferences. txt file."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef link(self):\n\n type_specs = {}\n types = []\n\n for name, type_spec in self.scope.type_specs.items():\n type_spec = type_spec.link(self.scope)\n type_specs[name] = type_spec\n\n if type_spec.surface is not None:\n self.scope.add_surface(name, type_spec.surface)\n types.append(type_spec.surface)\n\n self.scope.type_specs = type_specs\n self.scope.add_surface('__types__', tuple(types))", "response": "Resolve and link all types in the scope."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef install(path, name=None):\n if name is None:\n name = os.path.splitext(os.path.basename(path))[0]\n\n callermod = inspect.getmodule(inspect.stack()[1][0])\n name = '%s.%s' % (callermod.__name__, name)\n\n if name in sys.modules:\n return sys.modules[name]\n\n if not os.path.isabs(path):\n callerfile = callermod.__file__\n path = os.path.normpath(\n os.path.join(os.path.dirname(callerfile), path)\n )\n\n sys.modules[name] = mod = load(path, name=name)\n return mod", "response": "Compiles a Thrift file and installs it as a submodule of the caller."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef loads(self, name, document):\n return self.compiler.compile(name, document).link().surface", "response": "Parse and compile the given Thrift IDL into a surface IDL."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef load(self, path, name=None):\n if name is None:\n name = os.path.splitext(os.path.basename(path))[0]\n # TODO do we care if the file extension is .thrift?\n\n with open(path, 'r') as f:\n document = f.read()\n\n return self.compiler.compile(name, document, path).link().surface", "response": "Load and compile the given Thrift file."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngetting the URL for implicit flow user.", "response": "def get_url_implicit_flow_user(client_id, scope,\n redirect_uri='https://oauth.vk.com/blank.html', display='page',\n response_type='token', version=None, state=None, revoke=1):\n \"\"\"\n https://vk.com/dev/implicit_flow_user\n\n :return: url\n \"\"\"\n url = \"https://oauth.vk.com/authorize\"\n params = {\n \"client_id\": client_id,\n \"scope\": scope,\n \"redirect_uri\": redirect_uri,\n \"display\": display,\n \"response_type\": response_type,\n \"version\": version,\n \"state\": state,\n \"revoke\": revoke\n }\n\n params = {key: value for key, value in params.items() if value is not None}\n return u\"{url}?{params}\".format(url=url, params=urlencode(params))"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_url_authcode_flow_user(client_id, redirect_uri, display=\"page\", scope=None, state=None):\n url = \"https://oauth.vk.com/authorize\"\n params = {\n \"client_id\": client_id,\n \"redirect_uri\": redirect_uri,\n \"display\": display,\n \"response_type\": \"code\"\n }\n\n if scope:\n params['scope'] = scope\n\n if state:\n params['state'] = state\n\n return u\"{url}?{params}\".format(url=url, params=urlencode(params))", "response": "This function returns a URL that can be used to get the authorization code for a user access token."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncall when someone presses the fire button.", "response": "def get_fake_data(*a):\n \"\"\"\n Called whenever someone presses the \"fire\" button.\n \"\"\"\n # add columns of data to the databox\n d['x'] = _n.linspace(0,10,100)\n d['y'] = _n.cos(d['x']) + 0.1*_n.random.rand(100)\n \n # update the curve \n c.setData(d['x'], d['y'])"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsets the behavior of the SpinBox.", "response": "def setOpts(self, **opts):\n \"\"\"\n Changes the behavior of the SpinBox. Accepts most of the arguments \n allowed in :func:`__init__ `.\n \n \"\"\"\n #print opts\n for k in opts:\n if k == 'bounds':\n #print opts[k]\n self.setMinimum(opts[k][0], update=False)\n self.setMaximum(opts[k][1], update=False)\n #for i in [0,1]:\n #if opts[k][i] is None:\n #self.opts[k][i] = None\n #else:\n #self.opts[k][i] = D(unicode(opts[k][i]))\n elif k in ['step', 'minStep']:\n self.opts[k] = D(asUnicode(opts[k]))\n elif k == 'value':\n pass ## don't set value until bounds have been set\n else:\n self.opts[k] = opts[k]\n if 'value' in opts:\n self.setValue(opts['value'])\n \n ## If bounds have changed, update value to match\n if 'bounds' in opts and 'value' not in opts:\n self.setValue() \n \n ## sanity checks:\n if self.opts['int']:\n if 'step' in opts:\n step = opts['step']\n ## not necessary..\n #if int(step) != step:\n #raise Exception('Integer SpinBox must have integer step size.')\n else:\n self.opts['step'] = int(self.opts['step'])\n \n if 'minStep' in opts:\n step = opts['minStep']\n if int(step) != step:\n raise Exception('Integer SpinBox must have integer minStep size.')\n else:\n ms = int(self.opts.get('minStep', 1))\n if ms < 1:\n ms = 1\n self.opts['minStep'] = ms\n \n if 'delay' in opts:\n self.proxy.setDelay(opts['delay'])\n \n self.updateText()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nset the maximum allowed value for this option", "response": "def setMaximum(self, m, update=True):\n \"\"\"Set the maximum allowed value (or None for no limit)\"\"\"\n if m is not None:\n m = D(asUnicode(m))\n self.opts['bounds'][1] = m\n if update:\n self.setValue()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef selectNumber(self):\n le = self.lineEdit()\n text = asUnicode(le.text())\n if self.opts['suffix'] == '':\n le.setSelection(0, len(text))\n else:\n try:\n index = text.index(' ')\n except ValueError:\n return\n le.setSelection(0, index)", "response": "Select the numerical portion of the text to allow quick editing by the user."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the value of this SpinBox.", "response": "def value(self):\n \"\"\"\n Return the value of this SpinBox.\n \n \"\"\"\n if self.opts['int']:\n return int(self.val)\n else:\n return float(self.val)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nsetting the value of the current spin.", "response": "def setValue(self, value=None, update=True, delaySignal=False):\n \"\"\"\n Set the value of this spin. \n If the value is out of bounds, it will be clipped to the nearest boundary.\n If the spin is integer type, the value will be coerced to int.\n Returns the actual value set.\n \n If value is None, then the current value is used (this is for resetting\n the value after bounds, etc. have changed)\n \"\"\"\n \n if value is None:\n value = self.value()\n \n bounds = self.opts['bounds']\n if bounds[0] is not None and value < bounds[0]:\n value = bounds[0]\n if bounds[1] is not None and value > bounds[1]:\n value = bounds[1]\n\n if self.opts['int']:\n value = int(value)\n\n value = D(asUnicode(value))\n if value == self.val:\n return\n prev = self.val\n \n self.val = value\n if update:\n self.updateText(prev=prev)\n \n self.sigValueChanging.emit(self, float(self.val)) ## change will be emitted in 300ms if there are no subsequent changes.\n if not delaySignal:\n self.emitChanged()\n \n return value"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns value of text. Return False if text is invalid raise exception if text is intermediate", "response": "def interpret(self):\n \"\"\"Return value of text. Return False if text is invalid, raise exception if text is intermediate\"\"\"\n strn = self.lineEdit().text()\n suf = self.opts['suffix']\n if len(suf) > 0:\n if strn[-len(suf):] != suf:\n return False\n #raise Exception(\"Units are invalid.\")\n strn = strn[:-len(suf)]\n try:\n val = fn.siEval(strn)\n except:\n #sys.excepthook(*sys.exc_info())\n #print \"invalid\"\n return False\n #print val\n return val"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef editingFinishedEvent(self):\n #print \"Edit finished.\"\n if asUnicode(self.lineEdit().text()) == self.lastText:\n #print \"no text change.\"\n return\n try:\n val = self.interpret()\n except:\n return\n \n if val is False:\n #print \"value invalid:\", str(self.lineEdit().text())\n return\n if val == self.val:\n #print \"no value change:\", val, self.val\n return\n self.setValue(val, delaySignal=False)", "response": "Edit has finished ; set value."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates a factory for the given class.", "response": "def make_factory(self, cls, count):\n \"\"\" Get the generators from the Scaffolding class within the model.\n \"\"\"\n field_names = cls._meta.get_all_field_names()\n fields = {}\n text = []\n finalizer = None\n scaffold = scaffolding.scaffold_for_model(cls)\n\n for field_name in field_names:\n generator = getattr(scaffold, field_name, None)\n if generator:\n if hasattr(generator, 'set_up'):\n generator.set_up(cls, count)\n fields[field_name] = generator\n text.append(u'%s: %s; ' % (field_name, fields[field_name]))\n try:\n self.stdout.write(u'Generator for %s: %s\\n' % (cls, u''.join(text)))\n except models.ObjectDoesNotExist:\n self.stdout.write(u'Generator for %s\\n' % u''.join(text))\n\n if hasattr(scaffold, 'finalize') and hasattr(scaffold.finalize, '__call__'):\n finalizer = scaffold.finalize\n\n return fields, finalizer"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncompute the indel length accounting for wether it is an insertion or deletion.", "response": "def compute_indel_length(fs_df):\n \"\"\"Computes the indel length accounting for wether it is an insertion or\n deletion.\n\n Parameters\n ----------\n fs_df : pd.DataFrame\n mutation input as dataframe only containing indel mutations\n\n Returns\n -------\n indel_len : pd.Series\n length of indels\n \"\"\"\n indel_len = pd.Series(index=fs_df.index)\n indel_len[fs_df['Reference_Allele']=='-'] = fs_df['Tumor_Allele'][fs_df['Reference_Allele']=='-'].str.len()\n indel_len[fs_df['Tumor_Allele']=='-'] = fs_df['Reference_Allele'][fs_df['Tumor_Allele']=='-'].str.len()\n indel_len = indel_len.fillna(0).astype(int)\n return indel_len"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef keep_indels(mut_df,\n indel_len_col=True,\n indel_type_col=True):\n \"\"\"Filters out all mutations that are not indels.\n\n Requires that one of the alleles have '-' indicating either an insertion\n or deletion depending if found in reference allele or somatic allele\n columns, respectively.\n\n Parameters\n ----------\n mut_df : pd.DataFrame\n mutation input file as a dataframe in standard format\n indel_len_col : bool\n whether or not to add a column indicating the length of the indel\n\n Returns\n -------\n mut_df : pd.DataFrame\n mutations with only frameshift mutations kept\n \"\"\"\n # keep only frameshifts\n mut_df = mut_df[is_indel_annotation(mut_df)]\n\n if indel_len_col:\n # calculate length\n mut_df.loc[:, 'indel len'] = compute_indel_length(mut_df)\n\n if indel_type_col:\n is_ins = mut_df['Reference_Allele']=='-'\n is_del = mut_df['Tumor_Allele']=='-'\n mut_df['indel type'] = ''\n mut_df.loc[is_ins, 'indel type'] = 'INS'\n mut_df.loc[is_del, 'indel type'] = 'DEL'\n\n return mut_df", "response": "Filters out all mutations that are not indels."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef keep_frameshifts(mut_df,\n indel_len_col=True):\n \"\"\"Filters out all mutations that are not frameshift indels.\n\n Requires that one of the alleles have '-' indicating either an insertion\n or deletion depending if found in reference allele or somatic allele\n columns, respectively.\n\n Parameters\n ----------\n mut_df : pd.DataFrame\n mutation input file as a dataframe in standard format\n indel_len_col : bool\n whether or not to add a column indicating the length of the frameshift\n\n Returns\n -------\n mut_df : pd.DataFrame\n mutations with only frameshift mutations kept\n \"\"\"\n # keep only frameshifts\n mut_df = mut_df[is_frameshift_annotation(mut_df)]\n\n if indel_len_col:\n # calculate length\n mut_df.loc[:, 'indel len'] = compute_indel_length(mut_df)\n return mut_df", "response": "Filters out all mutations that are not frameshift indels."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_frameshift_lengths(num_bins):\n fs_len = []\n i = 1\n tmp_bins = 0\n while(tmp_bins 1:\n self.pos2context[0] = gene_seq.exon_seq[0] * 2\n self.context2pos.setdefault(gene_seq.exon_seq[0]*2, [])\n self.context2pos[gene_seq.exon_seq[0]*2].append(0)\n elif gene_seq.nuc_context in [1.5, 3]:\n # use the nucleotide context from chasm if nuc\n # context is 1.5 otherwise always use a three\n # nucleotide context\n ncontext = gene_seq.nuc_context\n for i in range(1, len(gene_seq.exon_seq)-1):\n nucs = gene_seq.exon_seq[i-1:i+2]\n if ncontext == 1.5:\n context = prob2020.python.mutation_context.get_chasm_context(nucs)\n else:\n context = nucs\n self.context2pos.setdefault(context, [])\n self.context2pos[context].append(i)\n self.pos2context[i] = context\n\n # sequence context for five prime splice site\n for i, five_ss in enumerate(gene_seq.five_prime_seq):\n first_nucs = five_ss[:3]\n second_nucs = five_ss[1:4]\n first_pos = 2*i + gene_len\n second_pos = 2*i + gene_len + 1\n if ncontext == 1.5:\n first_context = prob2020.python.mutation_context.get_chasm_context(first_nucs)\n second_context = prob2020.python.mutation_context.get_chasm_context(second_nucs)\n else:\n first_context = first_nucs\n second_context = second_nucs\n self.context2pos.setdefault(first_context, [])\n self.context2pos[first_context].append(first_pos)\n self.context2pos.setdefault(second_context, [])\n self.context2pos[second_context].append(second_pos)\n self.pos2context[first_pos] = first_context\n self.pos2context[second_pos] = second_context\n # sequence context for three prime splice site\n for i, three_ss in enumerate(gene_seq.three_prime_seq):\n first_nucs = three_ss[:3]\n second_nucs = three_ss[1:4]\n first_pos = 2*i + gene_len + five_ss_len\n second_pos = 2*i + gene_len + five_ss_len + 1\n if ncontext == 1.5:\n first_context = prob2020.python.mutation_context.get_chasm_context(first_nucs)\n second_context = prob2020.python.mutation_context.get_chasm_context(second_nucs)\n else:\n first_context = first_nucs\n second_context = second_nucs\n self.context2pos.setdefault(first_context, [])\n self.context2pos[first_context].append(first_pos)\n self.context2pos.setdefault(second_context, [])\n self.context2pos[second_context].append(second_pos)\n self.pos2context[first_pos] = first_context\n self.pos2context[second_pos] = second_context\n\n # hack solution for context for first nuc\n if gene_seq.exon_seq:\n first_nuc = gene_seq.exon_seq[0] + gene_seq.exon_seq[:2]\n if ncontext == 1.5:\n first_context = prob2020.python.mutation_context.get_chasm_context(first_nuc)\n else:\n first_context = first_nuc\n self.pos2context[0] = first_context\n self.context2pos.setdefault(first_context, [])\n self.context2pos[first_context].append(0)\n last_nuc = gene_seq.exon_seq[-2:] + gene_seq.exon_seq[-1]\n if ncontext == 1.5:\n last_context = prob2020.python.mutation_context.get_chasm_context(last_nuc)\n else:\n last_context = last_nuc\n last_pos = len(gene_seq.exon_seq) - 1\n self.pos2context[last_pos] = first_context\n self.context2pos.setdefault(last_context, [])\n self.context2pos[last_context].append(last_pos)\n else:\n # case where there is no context,\n # mutations occur with uniform probability at each\n # position\n for i in range(gene_len + five_ss_len + three_ss_len):\n self.pos2context[i] = 'None'\n self.context2pos['None'] = range(gene_len + five_ss_len + three_ss_len)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef random_context_pos(self, num, num_permutations, context):\n # make sure provide context is valid\n if not self.is_valid_context(context):\n error_msg = 'Context ({0}) was never seen in sequence.'.format(context)\n raise ValueError(error_msg)\n\n # make sure sampling is a positive integer\n if num < 1:\n error_msg = ('There must be at least one sample (specified {0}) '\n 'for a context'.format(num))\n raise ValueError(error_msg)\n\n # randomly select from available positions that fit the specified context\n available_pos = self.context2pos[context]\n random_pos = self.prng_dict[context].choice(available_pos, (num_permutations, num))\n return random_pos", "response": "Samples with replacement available positions matching the individual\n sequence context."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a list of random positions w/ replacement which match sequence context.", "response": "def random_pos(self, context_iterable, num_permutations):\n \"\"\"Obtains random positions w/ replacement which match sequence context.\n\n Parameters\n ----------\n context_iterable: iterable containing two element tuple\n Records number of mutations in each context. context_iterable\n should be something like [('AA', 5), ...].\n num_permutations : int\n Number of permutations used in the permutation test.\n\n Returns\n -------\n position_list : list\n Contains context string and the randomly chosen positions\n for that context.\n \"\"\"\n position_list = []\n for contxt, n in context_iterable:\n pos_array = self.random_context_pos(n, num_permutations, contxt)\n position_list.append([contxt, pos_array])\n return position_list"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nhandle parallelization of permutations by splitting work by chromosome.", "response": "def multiprocess_permutation(bed_dict, mut_df, opts, indel_df=None):\n \"\"\"Handles parallelization of permutations by splitting work\n by chromosome.\n \"\"\"\n chroms = sorted(bed_dict.keys(), key=lambda x: len(bed_dict[x]), reverse=True)\n multiprocess_flag = opts['processes']>0\n if multiprocess_flag:\n num_processes = opts['processes']\n else:\n num_processes = 1\n #file_handle = open(opts['output'], 'w')\n file_handle = opts['handle']\n mywriter = csv.writer(file_handle, delimiter='\\t', lineterminator='\\n')\n if opts['maf'] and opts['num_iterations']:\n header = ['Gene', 'strand', 'Chromosome', 'Start_Position',\n 'End_Position', 'Reference_Allele', 'Tumor_Allele',\n 'Context', 'DNA_Change', 'Protein_Change', 'Variant_Classification']\n elif opts['maf']:\n header = ['Gene', 'strand', 'Chromosome', 'Start_Position',\n 'End_Position', 'Reference_Allele', 'Tumor_Allele',\n 'DNA_Change', 'Protein_Change', 'Variant_Classification',\n 'Tumor_Sample', 'Tumor_Type']\n else:\n header = ['Gene', 'ID', 'gene length', 'non-silent snv', 'silent snv', 'nonsense', 'lost stop',\n 'splice site', 'lost start', 'missense', 'recurrent missense',\n 'normalized missense position entropy',]\n # add column header for scores, is user provided one\n if opts['score_dir']:\n header += ['Total Missense MGAEntropy', 'Total Missense VEST Score']\n # add indel columns\n header += ['frameshift indel', 'inframe indel', 'normalized mutation entropy']\n mywriter.writerow(header)\n num_iterations = opts['num_iterations']\n\n # simulate indel counts\n if opts['summary'] and num_iterations:\n fs_cts, inframe_cts, gene_names = indel.simulate_indel_counts(indel_df,\n bed_dict,\n num_iterations,\n opts['seed'])\n name2ix = {gene_names[z]: z for z in range(len(gene_names))}\n # just count observed indels\n elif opts['summary']:\n # get gene names\n gene_names = [mybed.gene_name\n for chrom in bed_dict\n for mybed in bed_dict[chrom]]\n name2ix = {gene_names[z]: z for z in range(len(gene_names))}\n\n # initiate count vectors\n inframe_cts = np.zeros((1, len(gene_names)))\n fs_cts = np.zeros((1, len(gene_names)))\n\n # populate observed counts\n indel_cts_dict = indel_df['Gene'].value_counts().to_dict()\n fs_cts_dict = indel_df[indel.is_frameshift_annotation(indel_df)]['Gene'].value_counts().to_dict()\n for mygene in indel_cts_dict:\n if mygene in name2ix:\n # gene should be found in BED file annotation\n ix = name2ix[mygene]\n fs_cts[0, ix] = 0 if mygene not in fs_cts_dict else fs_cts_dict[mygene]\n inframe_cts[0, ix] = indel_cts_dict[mygene] - fs_cts[0, ix]\n\n # simulate snvs\n obs_result = []\n for i in range(0, len(chroms), num_processes):\n if multiprocess_flag:\n pool = Pool(processes=num_processes)\n tmp_num_proc = len(chroms) - i if i + num_processes > len(chroms) else num_processes\n info_repeat = ((bed_dict[chroms[tmp_ix]], mut_df, opts)\n for tmp_ix in range(i, i+tmp_num_proc))\n process_results = pool.imap(singleprocess_permutation, info_repeat)\n process_results.next = utils.keyboard_exit_wrapper(process_results.next)\n try:\n # iterate through each chromosome result\n for chrom_result in process_results:\n # add columns for indels\n if opts['summary']:\n tmp_chrom_result = []\n for gname, grp in it.groupby(chrom_result, lambda x: x[0]):\n for l, row in enumerate(grp):\n gene_ix = name2ix[gname]\n fs_count = fs_cts[l, gene_ix]\n inframe_count = inframe_cts[l, gene_ix]\n missense_pos_ct = list(row.pop(-1).values()) # missense codon counts\n silent_pos_ct = [1 for l in range(row[4])]\n inactivating_ct = sum(row[5:9]) + fs_count\n tmp_count_list = missense_pos_ct + silent_pos_ct + [inactivating_ct, inframe_count]\n norm_ent = math.normalized_mutation_entropy(tmp_count_list)\n tmp_chrom_result.append(row+[fs_count, inframe_count, norm_ent])\n chrom_result = tmp_chrom_result\n\n # write output to file\n mywriter.writerows(chrom_result)\n except KeyboardInterrupt:\n pool.close()\n pool.join()\n logger.info('Exited by user. ctrl-c')\n sys.exit(0)\n pool.close()\n pool.join()\n else:\n # perform simulation\n info = (bed_dict[chroms[i]], mut_df, opts)\n chrom_results = singleprocess_permutation(info)\n\n # add indel columns\n if opts['summary']:\n tmp_chrom_result = []\n for gname, grp in it.groupby(chrom_results, lambda x: x[0]):\n for l, row in enumerate(grp):\n gene_ix = name2ix[gname]\n fs_count = fs_cts[l, gene_ix]\n inframe_count = inframe_cts[l, gene_ix]\n missense_pos_ct = list(row.pop(-1).values()) # missense codon counts\n silent_pos_ct = [1 for l in range(row[4])]\n inactivating_ct = sum(row[5:9]) + fs_count\n tmp_count_list = missense_pos_ct + silent_pos_ct + [inactivating_ct, inframe_count]\n norm_ent = math.normalized_mutation_entropy(tmp_count_list)\n tmp_chrom_result.append(row+[fs_count, inframe_count, norm_ent])\n chrom_results = tmp_chrom_result\n\n # write to file\n mywriter.writerows(chrom_results)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef multiprocess_permutation(bed_dict, mut_df, opts):\n chroms = sorted(bed_dict.keys())\n multiprocess_flag = opts['processes']>0\n if multiprocess_flag:\n num_processes = opts['processes']\n else:\n num_processes = 1\n num_permutations = opts['num_permutations']\n if not opts['by_sample']:\n obs_result = []\n else:\n uniq_samp = mut_df['Tumor_Sample'].unique()\n obs_result = pd.DataFrame(np.zeros((len(uniq_samp), len(cols))),\n index=uniq_samp, columns=cols)\n\n # initialize list containing output\n if not opts['score_dir']:\n result_list = [[0, 0, 0, 0, 0, 0, 0] for k in range(num_permutations)]\n else:\n result_list = [[0, 0, 0, 0, 0, 0, 0, 0, 0] for k in range(num_permutations)]\n\n # iterate over each chromosome\n for i in range(0, len(chroms), num_processes):\n if multiprocess_flag:\n pool = Pool(processes=num_processes)\n tmp_num_proc = len(chroms) - i if i + num_processes > len(chroms) else num_processes\n info_repeat = ((bed_dict[chroms[tmp_ix]], mut_df, opts)\n for tmp_ix in range(i, i+tmp_num_proc))\n process_results = pool.imap(singleprocess_permutation, info_repeat)\n process_results.next = utils.keyboard_exit_wrapper(process_results.next)\n try:\n for chrom_result, obs_mutations in process_results:\n for j in range(num_permutations):\n result_list[j][0] += chrom_result[j][0]\n result_list[j][1] += chrom_result[j][1]\n result_list[j][2] += chrom_result[j][2]\n result_list[j][3] += chrom_result[j][3]\n result_list[j][4] += chrom_result[j][4]\n result_list[j][5] += chrom_result[j][5]\n result_list[j][6] += chrom_result[j][6]\n if opts['score_dir']:\n result_list[j][7] += chrom_result[j][7]\n result_list[j][8] += chrom_result[j][8]\n\n if not opts['by_sample']:\n obs_result.append(obs_mutations)\n else:\n obs_result = obs_result + obs_mutations\n except KeyboardInterrupt:\n pool.close()\n pool.join()\n logger.info('Exited by user. ctrl-c')\n sys.exit(0)\n pool.close()\n pool.join()\n else:\n info = (bed_dict[chroms[i]], mut_df, opts)\n chrom_result, obs_mutations = singleprocess_permutation(info)\n for j in range(num_permutations):\n result_list[j][0] += chrom_result[j][0]\n result_list[j][1] += chrom_result[j][1]\n result_list[j][2] += chrom_result[j][2]\n result_list[j][3] += chrom_result[j][3]\n result_list[j][4] += chrom_result[j][4]\n result_list[j][5] += chrom_result[j][5]\n result_list[j][6] += chrom_result[j][6]\n if opts['score_dir']:\n result_list[j][7] += chrom_result[j][7]\n result_list[j][8] += chrom_result[j][8]\n if not opts['by_sample']:\n obs_result.append(obs_mutations)\n else:\n obs_result = obs_result + obs_mutations\n\n return result_list, obs_result", "response": "Performs a single - process permutation on a set of work items."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef retrieve_scores(gname, sdir,\n codon_pos, germ_aa, somatic_aa,\n default_mga=5., default_vest=0,\n no_file_flag=-1):\n \"\"\"Retrieves scores from pickle files.\n\n Used by summary script.\n\n \"\"\"\n # get variant types\n #var_class = cutils.get_variant_classification(germ_aa, somatic_aa, codon_pos)\n\n # get information about MGA entropy\n mga_path = os.path.join(sdir, gname+\".mgaentropy.pickle\")\n if os.path.exists(mga_path):\n if sys.version_info < (3,):\n # python 2.7 way\n with open(mga_path) as handle:\n mga_ent = pickle.load(handle)\n else:\n # python 3.X way\n with open(mga_path, 'rb') as handle:\n mga_ent = pickle.load(handle, encoding='latin-1')\n else:\n mga_ent = None\n missense_pos = [p for i, p in enumerate(codon_pos)\n if (germ_aa[i]!=somatic_aa[i]) and\n (germ_aa[i] not in ['-', '*', 'Splice_Site']) and\n (somatic_aa[i] not in ['-', '*', 'Splice_Site'])]\n total_mga_ent = compute_mga_entropy_stat(mga_ent, missense_pos, sum, default_mga)\n\n #mga_ent_ixs = [codon_pos[i] for i in range(len(var_class))\n #if var_class[i] == 'Missense_Mutation']\n #len_mga_ent = len(mga_ent)\n #mga_ent_scores = [mga_ent[ix] for ix in mga_ent_ixs if ix < len_mga_ent]\n #if mga_ent_scores:\n #total_mga_ent = sum(mga_ent_scores)\n #else:\n #total_mga_ent = default_mga\n #else:\n #total_mga_ent = no_file_flag\n\n # get information about VEST scores\n vest_path = os.path.join(sdir, gname+\".vest.pickle\")\n if os.path.exists(vest_path):\n if sys.version_info < (3,):\n # python 2.7 way\n with open(vest_path) as handle:\n vest_score = pickle.load(handle)\n else:\n # python 3.X way\n with open(vest_path, 'rb') as handle:\n vest_score = pickle.load(handle, encoding='latin-1')\n else:\n vest_score = None\n total_vest = compute_vest_stat(vest_score,\n germ_aa, somatic_aa, codon_pos,\n stat_func=sum, default_val=default_vest)\n #vest_scores = [vest_score.get(codon_pos[i]+1, {}).get(germ_aa[i], {}).get(somatic_aa[i], default_vest)\n #for i in range(len(var_class))\n #if var_class[i] == 'Missense_Mutation']\n #total_vest = sum(vest_scores)\n #else:\n #total_vest = no_file_flag\n return total_mga_ent, total_vest", "response": "Retrieves scores from pickle files."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef read_vest_pickle(gname, score_dir):\n vest_path = os.path.join(score_dir, gname+\".vest.pickle\")\n if os.path.exists(vest_path):\n if sys.version_info < (3,):\n with open(vest_path) as handle:\n gene_vest = pickle.load(handle)\n else:\n with open(vest_path, 'rb') as handle:\n gene_vest = pickle.load(handle, encoding='latin-1')\n return gene_vest\n else:\n return None", "response": "Reads in VEST scores for given gene."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncompute missense VEST score statistic for a single mutation list.", "response": "def compute_vest_stat(vest_dict, ref_aa, somatic_aa, codon_pos,\n stat_func=np.mean,\n default_val=0.0):\n \"\"\"Compute missense VEST score statistic.\n\n Note: non-missense mutations are intentially not filtered out and will take\n a default value of zero.\n\n Parameters\n ----------\n vest_dict : dict\n dictionary containing vest scores across the gene of interest\n ref_aa: list of str\n list of reference amino acids\n somatic_aa: list of str\n somatic mutation aa\n codon_pos : list of int\n position of codon in protein sequence\n stat_func : function, default=np.mean\n function that calculates a statistic\n default_val : float\n default value to return if there are no mutations\n\n Returns\n -------\n score_stat : float\n vest score statistic for provided mutation list\n \"\"\"\n # return default value if VEST scores are missing\n if vest_dict is None:\n return default_val\n\n # fetch scores\n myscores = fetch_vest_scores(vest_dict, ref_aa, somatic_aa, codon_pos)\n\n # calculate mean score\n if myscores:\n score_stat = stat_func(myscores)\n else:\n score_stat = default_val\n\n return score_stat"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef compute_mga_entropy_stat(mga_vec, codon_pos,\n stat_func=np.mean,\n default_val=0.0):\n \"\"\"Compute MGA entropy conservation statistic\n\n Parameters\n ----------\n mga_vec : np.array\n numpy vector containing MGA Entropy conservation scores for residues\n codon_pos : list of int\n position of codon in protein sequence\n stat_func : function, default=np.mean\n function that calculates a statistic\n default_val : float\n default value to return if there are no mutations\n\n Returns\n -------\n score_stat : float\n MGA entropy score statistic for provided mutation list\n \"\"\"\n # return default value if VEST scores are missing\n if mga_vec is None:\n return default_val\n\n # fetch scores\n myscores = fetch_mga_scores(mga_vec, codon_pos)\n\n # calculate mean score\n if myscores is not None and len(myscores):\n score_stat = stat_func(myscores)\n else:\n score_stat = default_val\n\n return score_stat", "response": "Compute the MGA Entropy statistic for a given codon."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngetting VEST scores for a given mutation.", "response": "def fetch_vest_scores(vest_dict,\n ref_aa, somatic_aa, codon_pos,\n default_vest=0.0):\n \"\"\"Get VEST scores from pre-computed scores in dictionary.\n\n Note: either all mutations should be missense or non-missense intended\n to have value equal to default.\n\n Parameters\n ----------\n vest_dict : dict\n dictionary containing vest scores across the gene of interest\n ref_aa: list of str\n list of reference amino acids\n somatic_aa: list of str\n somatic mutation aa\n codon_pos: list of int\n position of codon in protein sequence\n default_vest: float, default=0.0\n value to use if VEST score not available for a given mutation\n\n Returns\n -------\n vest_score_list: list\n score results for mutations\n \"\"\"\n vest_score_list = []\n for i in range(len(somatic_aa)):\n # make sure position is valid\n if codon_pos[i] is not None:\n tmp_score = vest_dict.get(codon_pos[i]+1, {}).get(ref_aa[i], {}).get(somatic_aa[i], default_vest)\n else:\n tmp_score = 0.0\n vest_score_list.append(tmp_score)\n return vest_score_list"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef fetch_mga_scores(mga_vec,\n codon_pos,\n default_mga=None):\n \"\"\"Get MGAEntropy scores from pre-computed scores in array.\n\n Parameters\n ----------\n mga_vec : np.array\n numpy vector containing MGA Entropy conservation scores for residues\n codon_pos: list of int\n position of codon in protein sequence\n default_mga: float or None, default=None\n value to use if MGA entropy score not available for a given mutation.\n Drop mutations if no default specified.\n\n Returns\n -------\n mga_ent_scores : np.array\n score results for MGA entropy conservation\n \"\"\"\n # keep only positions in range of MGAEntropy scores\n len_mga = len(mga_vec)\n good_codon_pos = [p for p in codon_pos if p < len_mga]\n\n # get MGAEntropy scores\n if good_codon_pos:\n mga_ent_scores = mga_vec[good_codon_pos]\n else:\n mga_ent_scores = None\n\n return mga_ent_scores", "response": "Get MGA Entropy conservation scores for a given codon."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef read_neighbor_graph_pickle(gname, graph_dir):\n graph_path = os.path.join(graph_dir, gname+\".pickle\")\n if os.path.exists(graph_path):\n with open(graph_path) as handle:\n gene_graph = pickle.load(handle)\n return gene_graph\n else:\n return None", "response": "Reads in neighbor graph for given gene."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef compute_ng_stat(gene_graph, pos_ct, alpha=.5):\n # skip if there are no missense mutations\n if not len(pos_ct):\n return 1.0, 0\n\n max_pos = max(gene_graph)\n codon_vals = np.zeros(max_pos+1)\n\n # smooth out mutation counts\n for pos in pos_ct:\n mut_count = pos_ct[pos]\n\n # update neighbor values\n neighbors = list(gene_graph[pos])\n num_neighbors = len(neighbors)\n codon_vals[neighbors] += alpha*mut_count\n\n # update self-value\n codon_vals[pos] += (1-alpha)*mut_count\n\n # compute the normalized entropy\n #total_cts = float(np.count_nonzero(codon_vals))\n #graph_score = mymath.normalized_mutation_entropy(codon_vals, total_cts=total_cts)\n\n # compute regular entropy\n p = codon_vals / np.sum(codon_vals)\n graph_score = mymath.shannon_entropy(p)\n\n # get coverage\n coverage = np.count_nonzero(p)\n\n return graph_score, coverage", "response": "Compute the clustering score for a gene on its neighbor graph."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncounts the total number of frameshifts in a single mutation.", "response": "def count_frameshift_total(mut_df,\n bed_path,\n use_unmapped=False,\n to_zero_based=False):\n \"\"\"Count frameshifts for each gene.\n\n Parameters\n ----------\n mut_df : pd.DataFrame\n mutation input\n bed_path : str\n path to BED file containing reference tx for genes\n use_unmapped : Bool\n flag indicating whether to include frameshifts not mapping\n to reference tx\n to_zero_based : Bool\n whether to convert end-coordinate to zero based for analysis\n\n Returns\n -------\n fs_cts_df : pd.DataFrame\n contains both total frameshift counts and frameshift counts\n not mappable to the reference transcript.\n \"\"\"\n if to_zero_based:\n mut_df['Start_Position'] = mut_df['Start_Position'] - 1\n\n fs_cts = {} # frameshift count information for each gene\n fs_df = indel.keep_frameshifts(mut_df)\n\n for bed in utils.bed_generator(bed_path):\n gene_df = fs_df[fs_df['Gene']==bed.gene_name]\n\n # find it frameshift actually is on gene annotation\n fs_pos = []\n for ix, row in gene_df.iterrows():\n indel_pos = [row['Start_Position'], row['End_Position']]\n coding_pos = bed.query_position(bed.strand, row['Chromosome'], indel_pos)\n fs_pos.append(coding_pos)\n\n # mark frameshifts that could not be mapped to reference tx\n gene_df['unmapped'] = [(1 if x is None else 0) for x in fs_pos]\n total_fs = len(gene_df)\n unmapped_fs = len(gene_df[gene_df['unmapped']==1])\n\n # filter out frameshifts that did not match reference tx\n if not use_unmapped:\n gene_df = gene_df[gene_df['unmapped']==0]\n total_fs -= unmapped_fs\n\n info = [total_fs, unmapped_fs,]\n fs_cts[bed.gene_name] = info\n\n # prepare counts into a dataframe\n fs_cts_df = pd.DataFrame.from_dict(fs_cts, orient='index')\n cols = ['total', 'unmapped',]\n fs_cts_df.columns = cols\n\n return fs_cts_df"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _fetch_3ss_fasta(fasta, gene_name, exon_num,\n chrom, strand, start, end):\n \"\"\"Retreives the 3' SS sequence flanking the specified exon.\n\n Returns a string in fasta format with the first line containing\n a \">\" and the second line contains the two base pairs of 3' SS.\n\n Parameters\n ----------\n fasta : pysam.Fastafile\n fasta object from pysam\n gene_name : str\n gene name used for fasta seq id\n exon_num : int\n the `exon_num` exon, used for seq id\n chrom : str\n chromsome\n strand : str\n strand, {'+', '-'}\n start : int\n 0-based start position\n end : int\n 0-based end position\n\n Returns\n -------\n ss_fasta : str\n string in fasta format with first line being seq id\n \"\"\"\n\n if strand == '-':\n ss_seq = fasta.fetch(reference=chrom,\n start=end-1,\n end=end+3)\n ss_seq = utils.rev_comp(ss_seq)\n elif strand == '+':\n ss_seq = fasta.fetch(reference=chrom,\n start=start-3,\n end=start+1)\n ss_fasta = '>{0};exon{1};3SS\\n{2}\\n'.format(gene_name,\n exon_num,\n ss_seq.upper())\n return ss_fasta", "response": "Retreives the 3 SS sequence flanking the specified exon."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef fetch_gene_fasta(gene_bed, fasta_obj):\n gene_fasta = ''\n strand = gene_bed.strand\n exons = gene_bed.get_exons()\n if strand == '-':\n exons.reverse() # order exons 5' to 3', so reverse if '-' strand\n\n # iterate over exons\n for i, exon in enumerate(exons):\n exon_seq = fasta_obj.fetch(reference=gene_bed.chrom,\n start=exon[0],\n end=exon[1]).upper()\n if strand == '-':\n exon_seq = utils.rev_comp(exon_seq)\n exon_fasta = '>{0};exon{1}\\n{2}\\n'.format(gene_bed.gene_name,\n i, exon_seq)\n\n # get splice site sequence\n if len(exons) == 1:\n # splice sites don't matter if there is no splicing\n ss_fasta = ''\n elif i == 0:\n # first exon only, get 3' SS\n ss_fasta = _fetch_5ss_fasta(fasta_obj, gene_bed.gene_name, i,\n gene_bed.chrom, strand, exon[0], exon[1])\n elif i == (len(exons) - 1):\n # last exon only, get 5' SS\n ss_fasta = _fetch_3ss_fasta(fasta_obj, gene_bed.gene_name, i,\n gene_bed.chrom, strand, exon[0], exon[1])\n else:\n # middle exon, get bot 5' and 3' SS\n fasta_3ss = _fetch_3ss_fasta(fasta_obj, gene_bed.gene_name, i,\n gene_bed.chrom, strand, exon[0], exon[1])\n fasta_5ss = _fetch_5ss_fasta(fasta_obj, gene_bed.gene_name, i,\n gene_bed.chrom, strand, exon[0], exon[1])\n ss_fasta = fasta_5ss + fasta_3ss\n\n gene_fasta += exon_fasta + ss_fasta\n\n return gene_fasta", "response": "Retreive a single gene in FASTA format."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _reset_seq(self):\n exon_seq_list, five_ss_seq_list, three_ss_seq_list = self._fetch_seq()\n self.exon_seq = ''.join(exon_seq_list)\n self.three_prime_seq = three_ss_seq_list\n self.five_prime_seq = five_ss_seq_list\n self._to_upper()", "response": "Updates the attributes of the object to the default values."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef add_germline_variants(self, germline_nucs, coding_pos):\n if len(germline_nucs) != len(coding_pos):\n raise ValueError('Each germline nucleotide should have a coding position')\n\n es = list(self.exon_seq)\n for i in range(len(germline_nucs)):\n gl_nuc, cpos = germline_nucs[i].upper(), coding_pos[i]\n if not utils.is_valid_nuc(gl_nuc):\n raise ValueError('{0} is not a valid nucleotide'.format(gl_nuc))\n if cpos >= 0:\n es[cpos] = gl_nuc\n self.exon_seq = ''.join(es)", "response": "This method adds potential germline variants into the gene sequence."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nconverting sequences to upper case.", "response": "def _to_upper(self):\n \"\"\"Convert sequences to upper case.\"\"\"\n self.exon_seq = self.exon_seq.upper()\n self.three_prime_seq = [s.upper() for s in self.three_prime_seq]\n self.five_prime_seq = [s.upper() for s in self.five_prime_seq]"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _fetch_seq(self):\n exons = []\n three_prime_ss = []\n five_prime_ss = []\n num_exons = self.bed.get_num_exons()\n for i in range(num_exons):\n # add exon sequence\n tmp_id = '{0};exon{1}'.format(self.bed.gene_name, i)\n tmp_exon = self.fasta.fetch(reference=tmp_id)\n exons.append(tmp_exon)\n\n # add splice site sequence\n tmp_id_3ss = '{0};3SS'.format(tmp_id)\n tmp_id_5ss = '{0};5SS'.format(tmp_id)\n if num_exons == 1:\n pass\n elif i == 0:\n tmp_5ss = self.fasta.fetch(tmp_id_5ss)\n five_prime_ss.append(tmp_5ss)\n elif i == (num_exons - 1):\n tmp_3ss = self.fasta.fetch(tmp_id_3ss)\n three_prime_ss.append(tmp_3ss)\n else:\n tmp_3ss = self.fasta.fetch(tmp_id_3ss)\n tmp_5ss = self.fasta.fetch(tmp_id_5ss)\n three_prime_ss.append(tmp_3ss)\n five_prime_ss.append(tmp_5ss)\n return exons, five_prime_ss, three_prime_ss", "response": "Fetches gene sequence from PySAM fasta object."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef correct_chrom_names(chroms):\n chrom_list = []\n for chrom in chroms:\n # fix chrom numbering\n chrom = str(chrom)\n chrom = chrom.replace('23', 'X')\n chrom = chrom.replace('24', 'Y')\n chrom = chrom.replace('25', 'Mt')\n if not chrom.startswith('chr'):\n chrom = 'chr' + chrom\n chrom_list.append(chrom)\n return chrom_list", "response": "Make sure chromosome names follow UCSC chr convention."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef cummin(x):\n for i in range(1, len(x)):\n if x[i-1] < x[i]:\n x[i] = x[i-1]\n return x", "response": "A python implementation of the cummin function in R"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncalculating the p - value for the number of inactivating SNV mutations.", "response": "def calc_deleterious_p_value(mut_info,\n unmapped_mut_info,\n sc,\n gs,\n bed,\n num_permutations,\n stop_thresh,\n del_threshold,\n pseudo_count,\n seed=None):\n \"\"\"Calculates the p-value for the number of inactivating SNV mutations.\n\n Calculates p-value based on how many simulations exceed the observed value.\n\n Parameters\n ----------\n mut_info : dict\n contains codon and amino acid residue information for mutations mappable\n to provided reference tx.\n unmapped_mut_info : dict\n contains codon/amino acid residue info for mutations that are NOT mappable\n to provided reference tx.\n fs_ct : int\n number of frameshifts for gene\n prob_inactive : float\n proportion of inactivating mutations out of total over all genes\n sc : SequenceContext\n object contains the nucleotide contexts for a gene such that new random\n positions can be obtained while respecting nucleotide context.\n gs : GeneSequence\n contains gene sequence\n bed : BedLine\n just used to return gene name\n num_permutations : int\n number of permutations to perform to estimate p-value. more permutations\n means more precision on the p-value.\n seed : int (Default: None)\n seed number to random number generator (None to be randomly set)\n \"\"\"\n #prng = np.random.RandomState(seed)\n if len(mut_info) > 0:\n mut_info['Coding Position'] = mut_info['Coding Position'].astype(int)\n mut_info['Context'] = mut_info['Coding Position'].apply(lambda x: sc.pos2context[x])\n\n # group mutations by context\n cols = ['Context', 'Tumor_Allele']\n unmapped_mut_df = pd.DataFrame(unmapped_mut_info)\n tmp_df = pd.concat([mut_info[cols], unmapped_mut_df[cols]])\n context_cts = tmp_df['Context'].value_counts()\n context_to_mutations = dict((name, group['Tumor_Allele'])\n for name, group in tmp_df.groupby('Context'))\n\n # get deleterious info for actual mutations\n aa_mut_info = mc.get_aa_mut_info(mut_info['Coding Position'],\n mut_info['Tumor_Allele'].tolist(),\n gs)\n ref_aa = aa_mut_info['Reference AA'] + unmapped_mut_info['Reference AA']\n somatic_aa = aa_mut_info['Somatic AA'] + unmapped_mut_info['Somatic AA']\n codon_pos = aa_mut_info['Codon Pos'] + unmapped_mut_info['Codon Pos']\n num_del = cutils.calc_deleterious_info(ref_aa, somatic_aa, codon_pos)\n #num_del = fs_ct + num_snv_del\n\n # skip permutation test if number of deleterious mutations is not at\n # least meet some user-specified threshold\n if num_del >= del_threshold:\n # perform permutations\n del_p_value = pm.deleterious_permutation(num_del,\n context_cts,\n context_to_mutations,\n sc, # sequence context obj\n gs, # gene sequence obj\n num_permutations,\n stop_thresh,\n pseudo_count)\n else:\n del_p_value = None\n else:\n num_del = 0\n del_p_value = None\n\n result = [bed.gene_name, num_del, del_p_value]\n return result"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef calc_protein_p_value(mut_info,\n unmapped_mut_info,\n sc,\n gs,\n bed,\n graph_dir,\n num_permutations,\n stop_thresh,\n min_recurrent,\n min_fraction):\n \"\"\"Computes the p-value for clustering on a neighbor graph composed\n of codons connected with edges if they are spatially near in 3D protein\n structure.\n\n Parameters\n ----------\n\n\n Returns\n -------\n\n \"\"\"\n if len(mut_info) > 0:\n mut_info['Coding Position'] = mut_info['Coding Position'].astype(int)\n mut_info['Context'] = mut_info['Coding Position'].apply(lambda x: sc.pos2context[x])\n\n # group mutations by context\n cols = ['Context', 'Tumor_Allele']\n unmapped_mut_df = pd.DataFrame(unmapped_mut_info)\n tmp_df = pd.concat([mut_info[cols], unmapped_mut_df[cols]])\n context_cts = tmp_df['Context'].value_counts()\n context_to_mutations = dict((name, group['Tumor_Allele'])\n for name, group in tmp_df.groupby('Context'))\n\n # get vest scores for gene if directory provided\n if graph_dir:\n gene_graph = scores.read_neighbor_graph_pickle(bed.gene_name, graph_dir)\n if gene_graph is None:\n logger.warning('Could not find neighbor graph for {0}, skipping . . .'.format(bed.gene_name))\n else:\n gene_graph = None\n\n # get recurrent info for actual mutations\n aa_mut_info = mc.get_aa_mut_info(mut_info['Coding Position'],\n mut_info['Tumor_Allele'].tolist(),\n gs)\n codon_pos = aa_mut_info['Codon Pos'] + unmapped_mut_info['Codon Pos']\n ref_aa = aa_mut_info['Reference AA'] + unmapped_mut_info['Reference AA']\n somatic_aa = aa_mut_info['Somatic AA'] + unmapped_mut_info['Somatic AA']\n num_recurrent, pos_ent, delta_pos_ent, pos_ct = cutils.calc_pos_info(codon_pos,\n ref_aa,\n somatic_aa,\n min_frac=min_fraction,\n min_recur=min_recurrent)\n try:\n # get vest score for actual mutations\n graph_score, coverage = scores.compute_ng_stat(gene_graph, pos_ct)\n\n # perform simulations to get p-value\n protein_p_value, norm_graph_score = pm.protein_permutation(\n graph_score, len(pos_ct), context_cts,\n context_to_mutations,\n sc, # sequence context obj\n gs, # gene sequence obj\n gene_graph, num_permutations, stop_thresh\n )\n except Exception as err:\n exc_info = sys.exc_info()\n norm_graph_score = 0.0\n protein_p_value = 1.0\n logger.warning('Codon numbering problem with '+bed.gene_name)\n\n else:\n norm_graph_score = 0.0\n protein_p_value = 1.0\n num_recurrent = 0\n\n result = [bed.gene_name, num_recurrent, norm_graph_score, protein_p_value]\n return result", "response": "Calculates the p - value for clustering on a neighbor graph composed of codons connected with edges."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef shannon_entropy(p):\n return -np.sum(np.where(p!=0, p * np.log2(p), 0))", "response": "Calculates the shannon entropy in bits."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncalculating the normalized mutation entropy based on a list of mutation counts.", "response": "def normalized_mutation_entropy(counts, total_cts=None):\n \"\"\"Calculate the normalized mutation entropy based on a list/array\n of mutation counts.\n\n Note: Any grouping of mutation counts together should be done before hand\n\n Parameters\n ----------\n counts : np.array_like\n array/list of mutation counts\n\n Returns\n -------\n norm_ent : float\n normalized entropy of mutation count distribution.\n \"\"\"\n cts = np.asarray(counts, dtype=float)\n if total_cts is None:\n total_cts = np.sum(cts)\n if total_cts > 1:\n p = cts / total_cts\n ent = shannon_entropy(p)\n max_ent = max_shannon_entropy(total_cts)\n norm_ent = ent / max_ent\n else:\n norm_ent = 1.0\n return norm_ent"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncompute the Kullback - Leibler divergence for discrete distributions.", "response": "def kl_divergence(p, q):\n \"\"\"Compute the Kullback-Leibler (KL) divergence for discrete distributions.\n\n Parameters\n ----------\n p : np.array\n \"Ideal\"/\"true\" Probability distribution\n q : np.array\n Approximation of probability distribution p\n\n Returns\n -------\n kl : float\n KL divergence of approximating p with the distribution q\n \"\"\"\n # make sure numpy arrays are floats\n p = p.astype(float)\n q = q.astype(float)\n\n # compute kl divergence\n kl = np.sum(np.where(p!=0, p*np.log2(p/q), 0))\n return kl"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef js_divergence(p, q):\n m = .5 * (p+q)\n js_div = .5*kl_divergence(p, m) + .5*kl_divergence(q, m)\n return js_div", "response": "Compute the Jensen - Shannon Divergence between two discrete distributions."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef js_distance(p, q):\n js_dist = np.sqrt(js_divergence(p, q))\n return js_dist", "response": "Compute the Jensen - Shannon distance between two discrete distributions."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nfilters out UTR regions from the exon list.", "response": "def _filter_utr(self, ex):\n \"\"\"Filter out UTR regions from the exon list (ie retain only coding regions).\n\n Coding regions are defined by the thickStart and thickEnd attributes.\n\n Parameters\n ----------\n ex : list of tuples\n list of exon positions, [(ex1_start, ex1_end), ...]\n\n Returns\n -------\n filtered_exons : list of tuples\n exons with UTR regions \"chopped\" out\n \"\"\"\n # define coding region\n coding_start = int(self.bed_tuple.thickStart)\n coding_end = int(self.bed_tuple.thickEnd)\n if (coding_end - coding_start) < 3:\n # coding regions should have at least one codon, otherwise the\n # region is invalid and does not indicate an actually coding region\n logger.debug('{0} has an invalid coding region specified by thickStart '\n 'and thickEnd (only {1} bps long). This gene is possibly either '\n 'a non-coding transcript or a pseudo gene.'.format(self.gene_name,\n coding_end-coding_start))\n return []\n\n filtered_exons = []\n for exon in ex:\n if exon[0] > coding_end and exon[1] > coding_end:\n # exon has no coding region\n pass\n elif exon[0] < coding_start and exon[1] < coding_start:\n # exon has no coding region\n pass\n elif exon[0] <= coding_start and exon[1] >= coding_end:\n # coding region entirely contained within one exon\n filtered_exons.append((coding_start, coding_end))\n elif exon[0] <= coding_start and exon[1] < coding_end:\n # only beginning of exon contains UTR\n filtered_exons.append((coding_start, exon[1]))\n elif exon[0] > coding_start and exon[1] >= coding_end:\n # only end part of exon contains UTR\n filtered_exons.append((exon[0], coding_end))\n elif exon[0] > coding_start and exon[1] < coding_end:\n # entire exon is coding\n filtered_exons.append(exon)\n else:\n # exon is only a UTR\n pass\n return filtered_exons"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _init_exons(self):\n exon_starts = [self.chrom_start + int(s)\n for s in self.bed_tuple.blockStarts.strip(',').split(',')]\n exon_sizes = list(map(int, self.bed_tuple.blockSizes.strip(',').split(',')))\n\n # get chromosome intervals\n exons = [(exon_starts[i], exon_starts[i] + exon_sizes[i])\n for i in range(len(exon_starts))]\n no_utr_exons = self._filter_utr(exons)\n self.exons = no_utr_exons\n self.exon_lens = [e[1] - e[0] for e in self.exons]\n self.num_exons = len(self.exons)\n self.cds_len = sum(self.exon_lens)\n self.five_ss_len = 2*(self.num_exons-1)\n self.three_ss_len = 2*(self.num_exons-1)\n self._init_splice_site_pos()", "response": "Sets a list of position intervals for each exon."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef init_genome_coordinates(self) :\n self.seqpos2genome = {}\n\n # record genome positions for each sequence position\n seq_pos = 0\n for estart, eend in self.exons:\n for genome_pos in range(estart, eend):\n if self.strand == '+':\n self.seqpos2genome[seq_pos] = genome_pos\n elif self.strand == '-':\n tmp = self.cds_len - seq_pos - 1\n self.seqpos2genome[tmp] = genome_pos\n seq_pos += 1\n\n # recode 5' splice site locations\n for i in range(0, self.five_ss_len):\n seq_pos = self.cds_len + i\n ss_ix = i // 2 # the ss_ix'th 5'ss starting from upstream tx\n pos_in_ss = i % 2 # whether first/second nuc in splice site\n\n # determine genome coordinates for 5' splice site\n if self.strand == '+':\n self.seqpos2genome[seq_pos] = self.exons[ss_ix][1] + pos_in_ss\n else:\n exon_pos = -1 - ss_ix\n self.seqpos2genome[seq_pos] = self.exons[exon_pos][0] - pos_in_ss - 1\n\n # recode 3' splice site locations\n for i in range(0, self.three_ss_len):\n seq_pos = self.cds_len + self.five_ss_len + i\n ss_ix = i // 2 # the ss_ix'th 3'ss starting from upstream tx\n pos_in_ss = i % 2 # whether first/second nuc in splice site\n\n # determine genome coordinates for 3' splice site\n if self.strand == '+':\n self.seqpos2genome[seq_pos] = self.exons[ss_ix+1][0] - 2 + pos_in_ss\n else:\n exon_pos = -1 - ss_ix\n self.seqpos2genome[seq_pos] = self.exons[exon_pos-1][1] + 1 - pos_in_ss", "response": "Creates the self. seqpos2genome dictionary that converts positions relative to the sequence to genome coordinates."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the relative position on the coding sequence for a given genomic position.", "response": "def query_position(self, strand, chr, genome_coord):\n \"\"\"Provides the relative position on the coding sequence for a given\n genomic position.\n\n Parameters\n ----------\n chr : str\n chromosome, provided to check validity of query\n genome_coord : int\n 0-based position for mutation, actually used to get relative coding pos\n\n Returns\n -------\n pos : int or None\n position of mutation in coding sequence, returns None if mutation\n does not match region found in self.exons\n \"\"\"\n # first check if valid\n pos = None # initialize to invalid pos\n if chr != self.chrom:\n #logger.debug('Wrong chromosome queried. You provided {0} but gene is '\n #'on {1}.'.format(chr, self.chrom))\n # return pos\n pass\n\n if type(genome_coord) is list:\n # handle case for indels\n pos_left = self.query_position(strand, chr, genome_coord[0])\n pos_right = self.query_position(strand, chr, genome_coord[1])\n if pos_left is not None or pos_right is not None:\n return [pos_left, pos_right]\n else:\n return None\n\n # return position if contained within coding region or splice site\n for i, (estart, eend) in enumerate(self.exons):\n # in coding region\n if estart <= genome_coord < eend:\n if strand == '+':\n prev_lens = sum(self.exon_lens[:i]) # previous exon lengths\n pos = prev_lens + (genome_coord - estart)\n elif strand == '-':\n prev_lens = sum(self.exon_lens[:i]) # previous exon lengths\n pos = prev_lens + (genome_coord - estart)\n pos = self.cds_len - pos - 1 # flip coords because neg strand\n return pos\n # in splice site\n elif (eend <= genome_coord < eend + 2) and i != self.num_exons-1:\n if strand == '+':\n pos = self.cds_len + 2*i + (genome_coord - eend)\n elif strand == '-':\n pos = self.cds_len + self.five_ss_len + 2*(self.num_exons-(i+2)) + (genome_coord - eend)\n return pos\n # in splice site\n elif (estart - 2 <= genome_coord < estart) and i != 0:\n if strand == '-':\n pos = self.cds_len + 2*(self.num_exons-(i+2)) + (genome_coord - (estart - 2))\n elif strand == '+':\n pos = self.cds_len + self.five_ss_len + 2*(i-1) + (genome_coord - (estart - 2))\n return pos\n\n return pos"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef start_logging(log_file='', log_level='INFO', verbose=False):\n if not log_file:\n # create log directory if it doesn't exist\n log_dir = os.path.abspath('log') + '/'\n if not os.path.isdir(log_dir):\n os.mkdir(log_dir)\n\n # path to new log file\n log_file = log_dir + 'log.run.' + str(datetime.datetime.now()).replace(':', '.') + '.txt'\n\n # logger options\n lvl = logging.DEBUG if log_level.upper() == 'DEBUG' else logging.INFO\n\n # ignore warnings if not in debug\n if log_level.upper() != 'DEBUG':\n warnings.filterwarnings('ignore')\n\n # define logging format\n if verbose:\n myformat = '%(asctime)s - %(name)s - %(levelname)s \\n>>> %(message)s'\n else:\n myformat = '%(message)s'\n\n # create logger\n if not log_file == 'stdout':\n # normal logging to a regular file\n logging.basicConfig(level=lvl,\n format=myformat,\n filename=log_file,\n filemode='w')\n else:\n # logging to stdout\n root = logging.getLogger()\n root.setLevel(lvl)\n stdout_stream = logging.StreamHandler(sys.stdout)\n stdout_stream.setLevel(lvl)\n formatter = logging.Formatter(myformat)\n stdout_stream.setFormatter(formatter)\n root.addHandler(stdout_stream)\n root.propagate = True", "response": "Start logging information into the log directory."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nwriting exception to log file if occured in decorated function. This decorator wrapper is needed for multiprocess logging since otherwise the python multiprocessing module will obscure the actual line of the error.", "response": "def log_error_decorator(f):\n \"\"\"Writes exception to log file if occured in decorated function.\n\n This decorator wrapper is needed for multiprocess logging since otherwise\n the python multiprocessing module will obscure the actual line of the error.\n \"\"\"\n @wraps(f)\n def wrapper(*args, **kwds):\n try:\n result = f(*args, **kwds)\n return result\n except KeyboardInterrupt:\n logger.info('Ctrl-C stopped a process.')\n except Exception as e:\n logger.exception(e)\n raise\n return wrapper"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef filter_list(mylist, bad_ixs):\n # indices need to be in reverse order for filtering\n # to prevent .pop() from yielding eroneous results\n bad_ixs = sorted(bad_ixs, reverse=True)\n for i in bad_ixs:\n mylist.pop(i)\n return mylist", "response": "Removes indices from a list."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets reverse complement of sequence.", "response": "def rev_comp(seq):\n \"\"\"Get reverse complement of sequence.\n\n rev_comp will maintain the case of the sequence.\n\n Parameters\n ----------\n seq : str\n nucleotide sequence. valid {a, c, t, g, n}\n\n Returns\n -------\n rev_comp_seq : str\n reverse complement of sequence\n \"\"\"\n rev_seq = seq[::-1]\n rev_comp_seq = ''.join([base_pairing[s] for s in rev_seq])\n return rev_comp_seq"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\niterates through a BED file yielding parsed BED lines.", "response": "def bed_generator(bed_path):\n \"\"\"Iterates through a BED file yielding parsed BED lines.\n\n Parameters\n ----------\n bed_path : str\n path to BED file\n\n Yields\n ------\n BedLine(line) : BedLine\n A BedLine object which has parsed the individual line in\n a BED file.\n \"\"\"\n with open(bed_path) as handle:\n bed_reader = csv.reader(handle, delimiter='\\t')\n for line in bed_reader:\n yield BedLine(line)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nread a BED file and populates a dictionary separating genes by chromosome.", "response": "def read_bed(file_path, restricted_genes=None):\n \"\"\"Reads BED file and populates a dictionary separating genes\n by chromosome.\n\n Parameters\n ----------\n file_path : str\n path to BED file\n filtered_genes: list\n list of gene names to not use\n\n Returns\n -------\n bed_dict: dict\n dictionary mapping chromosome keys to a list of BED lines\n \"\"\"\n # read in entire bed file into a dict with keys as chromsomes\n bed_dict = OrderedDict()\n for bed_row in bed_generator(file_path):\n is_restrict_flag = restricted_genes is None or bed_row.gene_name in restricted_genes\n if is_restrict_flag:\n bed_dict.setdefault(bed_row.chrom, [])\n bed_dict[bed_row.chrom].append(bed_row)\n sort_chroms = sorted(bed_dict.keys(), key=lambda x: len(bed_dict[x]), reverse=True)\n bed_dict = OrderedDict((chrom, bed_dict[chrom]) for chrom in sort_chroms)\n return bed_dict"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nremove invalid and correct mutations for 1 - based coordinates.", "response": "def _fix_mutation_df(mutation_df, only_unique=False):\n \"\"\"Drops invalid mutations and corrects for 1-based coordinates.\n\n TODO: Be smarter about what coordinate system is put in the provided\n mutations.\n\n Parameters\n ----------\n mutation_df : pd.DataFrame\n user provided mutations\n only_unique : bool\n flag indicating whether only unique mutations for each tumor sample\n should be kept. This avoids issues when the same mutation has\n duplicate reportings.\n\n Returns\n -------\n mutation_df : pd.DataFrame\n mutations filtered for being valid and correct mutation type. Also\n converted 1-base coordinates to 0-based.\n\n \"\"\"\n # only keep allowed mutation types\n orig_len = len(mutation_df) # number of mutations before filtering\n mutation_df = mutation_df[mutation_df.Variant_Classification.isin(variant_snv)] # only keep SNV\n type_len = len(mutation_df) # number of mutations after filtering based on mut type\n\n # log the number of dropped mutations\n log_msg = ('Dropped {num_dropped} mutations after only keeping '\n '{mut_types}. Indels are processed separately.'.format(num_dropped=orig_len-type_len,\n mut_types=', '.join(variant_snv)))\n logger.info(log_msg)\n\n # check if mutations are valid SNVs\n valid_nuc_flag = (mutation_df['Reference_Allele'].apply(is_valid_nuc) & \\\n mutation_df['Tumor_Allele'].apply(is_valid_nuc))\n mutation_df = mutation_df[valid_nuc_flag] # filter bad lines\n mutation_df = mutation_df[mutation_df['Tumor_Allele'].apply(lambda x: len(x)==1)]\n mutation_df = mutation_df[mutation_df['Reference_Allele'].apply(lambda x: len(x)==1)]\n valid_len = len(mutation_df)\n\n # log the number of dropped mutations\n log_msg = ('Dropped {num_dropped} mutations after only keeping '\n 'valid SNVs'.format(num_dropped=type_len-valid_len))\n logger.info(log_msg)\n\n # drop duplicate mutations\n if only_unique:\n dup_cols = ['Tumor_Sample', 'Chromosome', 'Start_Position',\n 'End_Position', 'Reference_Allele', 'Tumor_Allele']\n mutation_df = mutation_df.drop_duplicates(subset=dup_cols)\n\n # log results of de-duplication\n dedup_len = len(mutation_df)\n log_msg = ('Dropped {num_dropped} mutations when removing '\n 'duplicates'.format(num_dropped=valid_len-dedup_len))\n logger.info(log_msg)\n\n # add dummy Protein_Change or Tumor_Type columns if not provided\n # in file\n if 'Tumor_Type' not in mutation_df.columns:\n mutation_df['Tumor_Type'] = ''\n if 'Protein_Change' not in mutation_df.columns:\n mutation_df['Protein_Change'] = ''\n\n # correct for 1-based coordinates\n mutation_df['Start_Position'] = mutation_df['Start_Position'].astype(int) - 1\n return mutation_df"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncalculates the sum of mutations within a particular window around a particular mutated amino acid.", "response": "def calc_windowed_sum(aa_mut_pos,\n germ_aa,\n somatic_aa,\n window=[3]):\n \"\"\"Calculate the sum of mutations within a window around a particular mutated\n codon.\n\n Parameters\n ----------\n aa_mut_pos : list\n list of mutated amino acid positions\n germ_aa : list\n Reference amino acid\n somatic_aa : list\n Somatic amino acid (if missense)\n window : list\n List of windows to calculate for\n\n Returns\n -------\n pos_ctr : dict\n dictionary of mutated positions (key) with associated counts (value)\n pos_sum : dict of dict\n Window size as first key points to dictionary of mutated positions (key)\n with associated mutation count within the window size (value)\n \"\"\"\n pos_ctr, pos_sum = {}, {w: {} for w in window}\n num_pos = len(aa_mut_pos)\n # figure out the missense mutations\n for i in range(num_pos):\n pos = aa_mut_pos[i]\n # make sure mutation is missense\n if germ_aa[i] and somatic_aa[i] and germ_aa[i] != '*' and \\\n somatic_aa[i] != '*' and germ_aa[i] != somatic_aa[i]:\n # should have a position, but if not skip it\n if pos is not None:\n pos_ctr.setdefault(pos, 0)\n pos_ctr[pos] += 1\n\n # calculate windowed sum\n pos_list = sorted(pos_ctr.keys())\n max_window = max(window)\n for ix, pos in enumerate(pos_list):\n tmp_sum = {w: 0 for w in window}\n # go through the same and lower positions\n for k in reversed(range(ix+1)):\n pos2 = pos_list[k]\n if pos2 < pos-max_window: break\n for w in window:\n if pos-w <= pos2:\n tmp_sum[w] += pos_ctr[pos2]\n # go though the higher positions\n for l in range(ix+1, len(pos_list)):\n pos2 = pos_list[l]\n if pos2 > pos+max_window: break\n for w in window:\n if pos2 <= pos+w:\n tmp_sum[w] += pos_ctr[pos2]\n # iterate through all other positions\n #for pos2 in pos_list:\n #for w in window:\n #if pos-w <= pos2 <= pos+w:\n #tmp_sum[w] += pos_ctr[pos2]\n # update windowed counts\n for w in window:\n pos_sum[w][pos] = tmp_sum[w]\n\n return pos_ctr, pos_sum"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_all_context_names(context_num):\n if context_num == 0:\n return ['None']\n elif context_num == 1:\n return ['A', 'C', 'T', 'G']\n elif context_num == 1.5:\n return ['C*pG', 'CpG*', 'TpC*', 'G*pA',\n 'A', 'C', 'T', 'G']\n elif context_num == 2:\n dinucs = list(set(\n [d1+d2\n for d1 in 'ACTG'\n for d2 in 'ACTG']\n ))\n return dinucs\n elif context_num == 3:\n trinucs = list(set(\n [t1+t2+t3\n for t1 in 'ACTG'\n for t2 in 'ACTG'\n for t3 in 'ACTG']\n ))\n return trinucs", "response": "Returns a list of all base context names for a given context number."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_chasm_context(tri_nuc):\n # check if string is correct length\n if len(tri_nuc) != 3:\n raise ValueError('Chasm context requires a three nucleotide string '\n '(Provided: \"{0}\")'.format(tri_nuc))\n\n # try dinuc context if found\n if tri_nuc[1:] == 'CG':\n return 'C*pG'\n elif tri_nuc[:2] == 'CG':\n return 'CpG*'\n elif tri_nuc[:2] == 'TC':\n return 'TpC*'\n elif tri_nuc[1:] == 'GA':\n return 'G*pA'\n else:\n # just return single nuc context\n return tri_nuc[1]", "response": "Returns the ChASM s mutation context acording to CHASM s NUC string."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nretrieves relevant information about the effect of a somatic mutation effect on the amino acid of a gene.", "response": "def get_aa_mut_info(coding_pos, somatic_base, gene_seq):\n \"\"\"Retrieves relevant information about the effect of a somatic\n SNV on the amino acid of a gene.\n\n Information includes the germline codon, somatic codon, codon\n position, germline AA, and somatic AA.\n\n Parameters\n ----------\n coding_pos : iterable of ints\n Contains the base position (0-based) of the mutations\n somatic_base : list of str\n Contains the somatic nucleotide for the mutations\n gene_seq : GeneSequence\n gene sequence\n\n Returns\n -------\n aa_info : dict\n information about the somatic mutation effect on AA's\n \"\"\"\n # if no mutations return empty result\n if not somatic_base:\n aa_info = {'Reference Codon': [],\n 'Somatic Codon': [],\n 'Codon Pos': [],\n 'Reference Nuc': [],\n 'Reference AA': [],\n 'Somatic AA': []}\n return aa_info\n\n # get codon information into three lists\n ref_codon, codon_pos, pos_in_codon, ref_nuc = zip(*[cutils.pos_to_codon(gene_seq, p)\n for p in coding_pos])\n ref_codon, codon_pos, pos_in_codon, ref_nuc = list(ref_codon), list(codon_pos), list(pos_in_codon), list(ref_nuc)\n\n # construct codons for mutations\n mut_codon = [(list(x) if x != 'Splice_Site' else []) for x in ref_codon]\n for i in range(len(mut_codon)):\n # splice site mutations are not in a codon, so skip such mutations to\n # prevent an error\n if pos_in_codon[i] is not None:\n pc = pos_in_codon[i]\n mut_codon[i][pc] = somatic_base[i]\n mut_codon = [(''.join(x) if x else 'Splice_Site') for x in mut_codon]\n\n # output resulting info\n aa_info = {'Reference Codon': ref_codon,\n 'Somatic Codon': mut_codon,\n 'Codon Pos': codon_pos,\n 'Reference Nuc': ref_nuc,\n 'Reference AA': [(utils.codon_table[r] if (r in utils.codon_table) else None)\n for r in ref_codon],\n 'Somatic AA': [(utils.codon_table[s] if (s in utils.codon_table) else None)\n for s in mut_codon]}\n\n return aa_info"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nhandling result from TSG process and converts to dataframe.", "response": "def handle_tsg_results(permutation_result):\n \"\"\"Handles result from TSG results.\n\n Takes in output from multiprocess_permutation function and converts to\n a better formatted dataframe.\n\n Parameters\n ----------\n permutation_result : list\n output from multiprocess_permutation\n\n Returns\n -------\n permutation_df : pd.DataFrame\n formatted output suitable to save\n \"\"\"\n permutation_df = pd.DataFrame(sorted(permutation_result, key=lambda x: x[2] if x[2] is not None else 1.1),\n columns=['gene', 'inactivating count', 'inactivating p-value',\n 'Total SNV Mutations', 'SNVs Unmapped to Ref Tx'])\n permutation_df['inactivating p-value'] = permutation_df['inactivating p-value'].astype('float')\n tmp_df = permutation_df[permutation_df['inactivating p-value'].notnull()]\n\n # get benjamani hochberg adjusted p-values\n permutation_df['inactivating BH q-value'] = np.nan\n permutation_df.loc[tmp_df.index, 'inactivating BH q-value'] = mypval.bh_fdr(tmp_df['inactivating p-value'])\n\n # sort output by p-value. due to no option to specify NaN order in\n # sort, the df needs to sorted descendingly and then flipped\n permutation_df = permutation_df.sort_values(by='inactivating p-value', ascending=False)\n permutation_df = permutation_df.reindex(index=permutation_df.index[::-1])\n\n # order result\n permutation_df = permutation_df.set_index('gene', drop=False)\n col_order = ['gene', 'Total SNV Mutations', 'SNVs Unmapped to Ref Tx',\n #'Total Frameshift Mutations', 'Frameshifts Unmapped to Ref Tx',\n 'inactivating count', 'inactivating p-value',\n 'inactivating BH q-value']\n return permutation_df[col_order]"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef handle_oncogene_results(permutation_result, num_permutations):\n mycols = ['gene', 'num recurrent', 'position entropy',\n 'mean vest score', 'entropy p-value',\n 'vest p-value', 'Total Mutations', 'Unmapped to Ref Tx']\n permutation_df = pd.DataFrame(permutation_result, columns=mycols)\n\n # get benjamani hochberg adjusted p-values\n permutation_df['entropy BH q-value'] = mypval.bh_fdr(permutation_df['entropy p-value'])\n permutation_df['vest BH q-value'] = mypval.bh_fdr(permutation_df['vest p-value'])\n\n # combine p-values\n permutation_df['tmp entropy p-value'] = permutation_df['entropy p-value']\n permutation_df['tmp vest p-value'] = permutation_df['vest p-value']\n permutation_df.loc[permutation_df['entropy p-value']==0, 'tmp entropy p-value'] = 1. / num_permutations\n permutation_df.loc[permutation_df['vest p-value']==0, 'tmp vest p-value'] = 1. / num_permutations\n permutation_df['combined p-value'] = permutation_df[['entropy p-value', 'vest p-value']].apply(mypval.fishers_method, axis=1)\n permutation_df['combined BH q-value'] = mypval.bh_fdr(permutation_df['combined p-value'])\n del permutation_df['tmp vest p-value']\n del permutation_df['tmp entropy p-value']\n\n # order output\n permutation_df = permutation_df.set_index('gene', drop=False) # make sure genes are indices\n permutation_df['num recurrent'] = permutation_df['num recurrent'].fillna(-1).astype(int) # fix dtype isssue\n col_order = ['gene', 'Total Mutations', 'Unmapped to Ref Tx',\n 'num recurrent', 'position entropy',\n 'mean vest score', 'entropy p-value',\n 'vest p-value', 'combined p-value', 'entropy BH q-value',\n 'vest BH q-value', 'combined BH q-value']\n permutation_df = permutation_df.sort_values(by=['combined p-value'])\n return permutation_df[col_order]", "response": "Takes in output from multiprocess_permutation function and converts to\n a better formatted dataframe."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ntake in output from multiprocess_permutation function and converts to a better formatted dataframe.", "response": "def handle_hotmaps_results(permutation_result):\n \"\"\"Takes in output from multiprocess_permutation function and converts to\n a better formatted dataframe.\n\n Parameters\n ----------\n permutation_result : list\n output from multiprocess_permutation\n\n Returns\n -------\n permutation_df : pd.DataFrame\n formatted output suitable to save\n \"\"\"\n if len(permutation_result[0]) == 6:\n mycols = ['gene', 'window length', 'codon position', 'mutation count',\n 'windowed sum', 'p-value']\n else:\n mycols = ['gene', 'window length', 'codon position', 'index', 'mutation count',\n 'windowed sum', 'p-value']\n\n permutation_df = pd.DataFrame(permutation_result, columns=mycols)\n\n # get benjamani hochberg adjusted p-values\n permutation_df['q-value'] = 1\n for w in permutation_df['window length'].unique():\n is_window = permutation_df['window length'] == w\n permutation_df.loc[is_window, 'q-value'] = mypval.bh_fdr(permutation_df.loc[is_window, 'p-value'])\n #permutation_df['q-value'] = mypval.bh_fdr(permutation_df['p-value'])\n\n # order output\n #permutation_df = permutation_df.set_index('gene', drop=False) # make sure genes are indices\n col_order = mycols + ['q-value']\n permutation_df = permutation_df.sort_values(by=['window length', 'p-value'])\n return permutation_df[col_order]"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef handle_protein_results(permutation_result):\n mycols = ['gene', 'num recurrent', 'normalized graph-smoothed position entropy',\n 'normalized graph-smoothed position entropy p-value',\n 'Total Mutations', 'Unmapped to Ref Tx']\n permutation_df = pd.DataFrame(permutation_result, columns=mycols)\n\n # get benjamani hochberg adjusted p-values\n permutation_df['normalized graph-smoothed position entropy BH q-value'] = mypval.bh_fdr(permutation_df['normalized graph-smoothed position entropy p-value'])\n\n # order output\n permutation_df = permutation_df.set_index('gene', drop=False) # make sure genes are indices\n col_order = ['gene', 'Total Mutations', 'Unmapped to Ref Tx',\n 'num recurrent',\n 'normalized graph-smoothed position entropy',\n 'normalized graph-smoothed position entropy p-value',\n 'normalized graph-smoothed position entropy BH q-value']\n permutation_df = permutation_df.sort_values(by=['normalized graph-smoothed position entropy p-value'])\n return permutation_df[col_order]", "response": "Takes in output from multiprocess_permutation function and converts to\n a better formatted dataframe."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef handle_effect_results(permutation_result):\n mycols = ['gene', 'num recurrent', 'num inactivating', 'entropy-on-effect',\n 'entropy-on-effect p-value',\n 'Total Mutations', 'Unmapped to Ref Tx']\n permutation_df = pd.DataFrame(sorted(permutation_result, key=lambda x: x[4] if x[4] is not None else 1.1),\n columns=mycols)\n\n # get benjamani hochberg adjusted p-values\n permutation_df['entropy-on-effect BH q-value'] = mypval.bh_fdr(permutation_df['entropy-on-effect p-value'])\n\n # order output\n permutation_df = permutation_df.set_index('gene', drop=False) # make sure genes are indices\n permutation_df['num recurrent'] = permutation_df['num recurrent'].fillna(-1).astype(int) # fix dtype isssue\n col_order = ['gene', 'Total Mutations', 'Unmapped to Ref Tx',\n 'num recurrent', 'num inactivating', 'entropy-on-effect',\n 'entropy-on-effect p-value', 'entropy-on-effect BH q-value']\n return permutation_df[col_order]", "response": "Takes in output from multiprocess_permutation function and converts to\n a better formatted dataframe."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_frameshift_info(fs_df, bins):\n fs_df = compute_indel_length(fs_df)\n\n # count the number INDELs with length non-dividable by 3\n num_indels = []\n indel_len = []\n num_categories = 0\n i = 1\n while(num_categories=i) & ((fs_df['indel len']%3)>0)])\n num_indels.append(tmp_num)\n indel_len.append(i)\n num_categories += 1\n i += 1\n return indel_len, num_indels", "response": "Counts the number of frameshifts stratified by a given length."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef set_mutation_type(self, mut_type=''):\n if mut_type:\n # user specifies a mutation type\n self.mutation_type = mut_type\n else:\n # mutation type is taken from object attributes\n if not self.is_valid:\n # does not correctly fall into a category\n self.mutation_type = 'not valid'\n elif self.unknown_effect:\n self.mutation_type = 'unknown effect'\n elif self.is_no_protein:\n self.mutation_type = 'no protein'\n elif self.is_missing_info:\n # mutation has a ?\n self.mutation_type = 'missing'\n else:\n # valid mutation type to be counted\n if self.is_lost_stop:\n self.mutation_type = 'Nonstop_Mutation'\n elif self.is_lost_start:\n self.mutation_type = 'Translation_Start_Site'\n elif self.is_synonymous:\n # synonymous must go before missense since mutations\n # can be categorized as synonymous and missense. Although\n # in reality such cases are actually synonymous and not\n # missense mutations.\n self.mutation_type = 'Silent'\n elif self.is_missense:\n self.mutation_type = 'Missense_Mutation'\n elif self.is_indel:\n self.mutation_type = 'In_Frame_Indel'\n elif self.is_nonsense_mutation:\n self.mutation_type = 'Nonsense_Mutation'\n elif self.is_frame_shift:\n self.mutation_type = 'Frame_Shift_Indel'", "response": "Sets the mutation type attribute to a single label based on the attribute flags."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsets amino acid change and position.", "response": "def set_amino_acid(self, aa):\n \"\"\"Set amino acid change and position.\"\"\"\n aa = aa.upper() # make sure it is upper case\n aa = aa[2:] if aa.startswith('P.') else aa # strip \"p.\"\n self.__set_mutation_status() # set flags detailing the type of mutation\n self.__parse_hgvs_syntax(aa)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef __set_mutation_type(self, hgvs_string):\n self.__set_lost_stop_status(hgvs_string)\n self.__set_lost_start_status(hgvs_string)\n self.__set_missense_status(hgvs_string) # missense mutations\n self.__set_indel_status() # indel mutations\n self.__set_frame_shift_status() # check for fs\n self.__set_premature_stop_codon_status(hgvs_string)", "response": "Interpret the mutation type and set appropriate flags."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nsets the self. is_missense flag.", "response": "def __set_missense_status(self, hgvs_string):\n \"\"\"Sets the self.is_missense flag.\"\"\"\n # set missense status\n if re.search('^[A-Z?]\\d+[A-Z?]$', hgvs_string):\n self.is_missense = True\n self.is_non_silent = True\n else:\n self.is_missense = False"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef __set_lost_start_status(self, hgvs_string):\n # set is lost start status\n mymatch = re.search('^([A-Z?])(\\d+)([A-Z?])$', hgvs_string)\n if mymatch:\n grps = mymatch.groups()\n if int(grps[1]) == 1 and grps[0] != grps[2]:\n self.is_lost_start = True\n self.is_non_silent = True\n else:\n self.is_lost_start = False\n else:\n self.is_lost_start = False", "response": "Sets the self. is_lost_start flag."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef __set_frame_shift_status(self):\n if 'fs' in self.hgvs_original:\n self.is_frame_shift = True\n self.is_non_silent = True\n elif re.search('[A-Z]\\d+[A-Z]+\\*', self.hgvs_original):\n # it looks like some mutations dont follow the convention\n # of using 'fs' to indicate frame shift\n self.is_frame_shift = True\n self.is_non_silent = True\n else:\n self.is_frame_shift = False", "response": "Check for frame shift and set the self. is_frame_shift flag."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncheck if the stop codon was mutated to something other than a stop codon.", "response": "def __set_lost_stop_status(self, hgvs_string):\n \"\"\"Check if the stop codon was mutated to something other than\n a stop codon.\"\"\"\n lost_stop_pattern = '^\\*\\d+[A-Z?]+\\*?$'\n if re.search(lost_stop_pattern, hgvs_string):\n self.is_lost_stop = True\n self.is_non_silent = True\n else:\n self.is_lost_stop = False"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef __set_premature_stop_codon_status(self, hgvs_string):\n if re.search('.+\\*(\\d+)?$', hgvs_string):\n self.is_premature_stop_codon = True\n self.is_non_silent = True\n\n # check if it is also a nonsense mutation\n if hgvs_string.endswith('*'):\n self.is_nonsense_mutation = True\n else:\n self.is_nonsense_mutation = False\n else:\n self.is_premature_stop_codon = False\n self.is_nonsense_mutation = False", "response": "Set whether there is a premature stop codon."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef __set_indel_status(self):\n # set indel status\n if \"ins\" in self.hgvs_original:\n # mutation is insertion\n self.is_insertion = True\n self.is_deletion = False\n self.is_indel = True\n self.is_non_silent = True\n elif \"del\" in self.hgvs_original:\n # mutation is deletion\n self.is_deletion = True\n self.is_insertion = False\n self.is_indel = True\n self.is_non_silent = True\n else:\n # not an indel\n self.is_deletion = False\n self.is_insertion = False\n self.is_indel = False", "response": "Sets the status of the indel in the current state of the mutation being an indel."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsetting a flag for unkown effect according to HGVS syntax.", "response": "def __set_unkown_effect(self, hgvs_string):\n \"\"\"Sets a flag for unkown effect according to HGVS syntax. The\n COSMIC database also uses unconventional questionmarks to denote\n missing information.\n\n Args:\n hgvs_string (str): hgvs syntax with \"p.\" removed\n \"\"\"\n # Standard use by HGVS of indicating unknown effect.\n unknown_effect_list = ['?', '(=)', '='] # unknown effect symbols\n if hgvs_string in unknown_effect_list:\n self.unknown_effect = True\n elif \"(\" in hgvs_string:\n # parethesis in HGVS indicate expected outcomes\n self.unknown_effect = True\n else:\n self.unknown_effect = False\n\n # detect if there are missing information. commonly COSMIC will\n # have insertions with p.?_?ins? or deleteions with ?del indicating\n # missing information.\n if \"?\" in hgvs_string:\n self.is_missing_info = True\n else:\n self.is_missing_info = False"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef __set_no_protein(self, hgvs_string):\n no_protein_list = ['0', '0?'] # no protein symbols\n if hgvs_string in no_protein_list:\n self.is_no_protein = True\n self.is_non_silent = True\n else:\n self.is_no_protein = False", "response": "Set a flag for no protein expected."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nconverts HGVS syntax into attributes.", "response": "def __parse_hgvs_syntax(self, aa_hgvs):\n \"\"\"Convert HGVS syntax for amino acid change into attributes.\n\n Specific details of the mutation are stored in attributes like\n self.intial (prior to mutation), sel.pos (mutation position),\n self.mutated (mutation), and self.stop_pos (position of stop codon,\n if any).\n\n Args:\n aa_hgvs (str): amino acid string following HGVS syntax\n \"\"\"\n self.is_valid = True # assume initially the syntax is legitimate\n self.is_synonymous = False # assume not synonymous until proven\n if self.unknown_effect or self.is_no_protein:\n # unknown effect from mutation. usually denoted as p.?\n self.pos = None\n pass\n elif self.is_lost_stop:\n self.initial = aa_hgvs[0]\n self.mutated = re.findall('([A-Z?*]+)$', aa_hgvs)[0]\n self.pos = int(re.findall('^\\*(\\d+)', aa_hgvs)[0])\n self.stop_pos = None\n elif self.is_lost_start:\n self.initial = aa_hgvs[0]\n self.mutated = aa_hgvs[-1]\n self.pos = int(aa_hgvs[1:-1])\n elif self.is_missense:\n self.initial = aa_hgvs[0]\n self.mutated = aa_hgvs[-1]\n self.pos = int(aa_hgvs[1:-1])\n self.stop_pos = None # not a nonsense mutation\n if self.initial == self.mutated:\n self.is_synonymous = True\n self.is_non_silent = False\n elif self.mutated == '*':\n self.is_nonsense_mutation = True\n elif self.is_indel:\n if self.is_insertion:\n if not self.is_missing_info:\n self.initial = re.findall('([A-Z])\\d+', aa_hgvs)[:2] # first two\n self.pos = tuple(map(int, re.findall('[A-Z](\\d+)', aa_hgvs)[:2])) # first two\n self.mutated = re.findall('(?<=INS)[A-Z0-9?*]+', aa_hgvs)[0]\n self.mutated = self.mutated.strip('?') # remove the missing info '?'\n else:\n self.initial = ''\n self.pos = tuple()\n self.mutated = ''\n elif self.is_deletion:\n if not self.is_missing_info:\n self.initial = re.findall('([A-Z])\\d+', aa_hgvs)\n self.pos = tuple(map(int, re.findall('[A-Z](\\d+)', aa_hgvs)))\n self.mutated = re.findall('(?<=DEL)[A-Z]*', aa_hgvs)[0]\n else:\n self.initial = ''\n self.pos = tuple()\n self.mutated = ''\n elif self.is_frame_shift:\n self.initial = aa_hgvs[0]\n self.mutated = ''\n try:\n self.pos = int(re.findall('[A-Z*](\\d+)', aa_hgvs)[0])\n if self.is_premature_stop_codon:\n self.stop_pos = int(re.findall('\\*>?(\\d+)$', aa_hgvs)[0])\n else:\n self.stop_pos = None\n except IndexError:\n # unconventional usage of indicating frameshifts will cause\n # index errors. For example, in some cases 'fs' is not used.\n # In other cases, either amino acids were not included or\n # just designated as a '?'\n self.logger.debug('(Parsing-Problem) frame shift hgvs string: \"%s\"' % aa_hgvs)\n self.pos = None\n self.stop_pos = None\n self.is_missing_info = True\n elif self.is_nonsense_mutation:\n self.initial = aa_hgvs[0]\n self.mutated = '*' # there is actually a stop codon\n self.stop_pos = 0 # indicates same position is stop codon\n try:\n self.pos = int(aa_hgvs[1:-1])\n except ValueError:\n # wierd error of p.E217>D*\n self.is_valid = False\n self.pos = None\n self.logger.debug('(Parsing-Problem) Invalid HGVS Amino Acid '\n 'syntax: ' + aa_hgvs)\n if self.initial == self.mutated:\n # classify nonsense-to-nonsense mutations as synonymous\n self.is_synonymous = True\n self.is_non_silent = False\n else:\n self.is_valid = False # did not match any of the possible cases\n self.logger.debug('(Parsing-Problem) Invalid HGVS Amino Acid '\n 'syntax: ' + aa_hgvs)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nperform null - permutations for the given set of null mutations for a single gene.", "response": "def deleterious_permutation(obs_del,\n context_counts,\n context_to_mut,\n seq_context,\n gene_seq,\n num_permutations=10000,\n stop_criteria=100,\n pseudo_count=0,\n max_batch=25000):\n \"\"\"Performs null-permutations for deleterious mutation statistics\n in a single gene.\n\n Parameters\n ----------\n context_counts : pd.Series\n number of mutations for each context\n context_to_mut : dict\n dictionary mapping nucleotide context to a list of observed\n somatic base changes.\n seq_context : SequenceContext\n Sequence context for the entire gene sequence (regardless\n of where mutations occur). The nucleotide contexts are\n identified at positions along the gene.\n gene_seq : GeneSequence\n Sequence of gene of interest\n num_permutations : int, default: 10000\n number of permutations to create for null\n pseudo_count : int, default: 0\n Pseudo-count for number of deleterious mutations for each\n permutation of the null distribution. Increasing pseudo_count\n makes the statistical test more stringent.\n\n Returns\n -------\n del_count_list : list\n list of deleterious mutation counts under the null\n \"\"\"\n mycontexts = context_counts.index.tolist()\n somatic_base = [base\n for one_context in mycontexts\n for base in context_to_mut[one_context]]\n\n # calculate the # of batches for simulations\n max_batch = min(num_permutations, max_batch)\n num_batches = num_permutations // max_batch\n remainder = num_permutations % max_batch\n batch_sizes = [max_batch] * num_batches\n if remainder:\n batch_sizes += [remainder]\n\n num_sim = 0\n null_del_ct = 0\n for j, batch_size in enumerate(batch_sizes):\n # stop iterations if reached sufficient precision\n if null_del_ct >= stop_criteria:\n #j = j - 1\n break\n\n # get random positions determined by sequence context\n tmp_contxt_pos = seq_context.random_pos(context_counts.iteritems(),\n batch_size)\n tmp_mut_pos = np.hstack(pos_array for base, pos_array in tmp_contxt_pos)\n\n # determine result of random positions\n for i, row in enumerate(tmp_mut_pos):\n # get info about mutations\n tmp_mut_info = mc.get_aa_mut_info(row,\n somatic_base,\n gene_seq)\n\n # calc deleterious mutation info\n tmp_del_count = cutils.calc_deleterious_info(tmp_mut_info['Reference AA'],\n tmp_mut_info['Somatic AA'],\n tmp_mut_info['Codon Pos'])\n\n # update empricial null distribution\n if tmp_del_count >= obs_del: null_del_ct += 1\n\n # stop if reach sufficient precision on p-value\n if null_del_ct >= stop_criteria:\n break\n # update number of simulations\n num_sim += i + 1\n\n #num_sim = j*max_batch + i+1\n del_pval = float(null_del_ct) / (num_sim)\n\n return del_pval"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nperforming null - permutations for position - based mutation statistics in a single gene.", "response": "def position_permutation(obs_stat,\n context_counts,\n context_to_mut,\n seq_context,\n gene_seq,\n gene_vest=None,\n num_permutations=10000,\n stop_criteria=100,\n pseudo_count=0,\n max_batch=25000):\n \"\"\"Performs null-permutations for position-based mutation statistics\n in a single gene.\n\n Parameters\n ----------\n obs_stat : tuple, (recur ct, entropy, delta entropy, mean vest)\n tuple containing the observed statistics\n context_counts : pd.Series\n number of mutations for each context\n context_to_mut : dict\n dictionary mapping nucleotide context to a list of observed\n somatic base changes.\n seq_context : SequenceContext\n Sequence context for the entire gene sequence (regardless\n of where mutations occur). The nucleotide contexts are\n identified at positions along the gene.\n gene_seq : GeneSequence\n Sequence of gene of interest\n num_permutations : int, default: 10000\n number of permutations to create for null\n stop_criteria : int\n stop after stop_criteria iterations are more significant\n then the observed statistic.\n pseudo_count : int, default: 0\n Pseudo-count for number of recurrent missense mutations for each\n permutation for the null distribution. Increasing pseudo_count\n makes the statistical test more stringent.\n\n Returns\n -------\n num_recur_list : list\n list of recurrent mutation counts under the null\n entropy_list : list\n list of position entropy values under the null\n \"\"\"\n # get contexts and somatic base\n mycontexts = context_counts.index.tolist()\n somatic_base = [base\n for one_context in mycontexts\n for base in context_to_mut[one_context]]\n\n # calculate the # of batches for simulations\n max_batch = min(num_permutations, max_batch)\n num_batches = num_permutations // max_batch\n remainder = num_permutations % max_batch\n batch_sizes = [max_batch] * num_batches\n if remainder:\n batch_sizes += [remainder]\n\n obs_recur, obs_ent, obs_delta_ent, obs_vest = obs_stat\n num_sim = 0 # number of simulations\n null_num_recur_ct, null_entropy_ct, null_delta_entropy_ct, null_vest_ct = 0, 0, 0, 0\n for j, batch_size in enumerate(batch_sizes):\n # stop iterations if reached sufficient precision\n if null_vest_ct >= stop_criteria and null_entropy_ct >= stop_criteria:\n break\n\n # get random positions determined by sequence context\n tmp_contxt_pos = seq_context.random_pos(context_counts.iteritems(),\n batch_size)\n tmp_mut_pos = np.hstack(pos_array for base, pos_array in tmp_contxt_pos)\n\n # calculate position-based statistics as a result of random positions\n for i, row in enumerate(tmp_mut_pos):\n # get info about mutations\n tmp_mut_info = mc.get_aa_mut_info(row,\n somatic_base,\n gene_seq)\n\n # calculate position info\n tmp_recur_ct, tmp_entropy, tmp_delta_entropy, _ = cutils.calc_pos_info(tmp_mut_info['Codon Pos'],\n tmp_mut_info['Reference AA'],\n tmp_mut_info['Somatic AA'],\n pseudo_count=pseudo_count,\n is_obs=0)\n # get vest scores\n if gene_vest:\n tmp_vest = scores.compute_vest_stat(gene_vest,\n tmp_mut_info['Reference AA'],\n tmp_mut_info['Somatic AA'],\n tmp_mut_info['Codon Pos'])\n else:\n tmp_vest = 0.0\n\n # update empirical null distribution counts\n if tmp_entropy-utils.epsilon <= obs_ent: null_entropy_ct += 1\n if tmp_vest+utils.epsilon >= obs_vest: null_vest_ct += 1\n\n # stop iterations if reached sufficient precision\n if null_vest_ct >= stop_criteria and null_entropy_ct >= stop_criteria:\n break\n # update the number of simulations\n num_sim += i+1\n\n # calculate p-value from empirical null-distribution\n ent_pval = float(null_entropy_ct) / (num_sim)\n vest_pval = float(null_vest_ct) / (num_sim)\n\n return ent_pval, vest_pval"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nperforms null - permutations for position - based mutation statistics in a single gene.", "response": "def hotmaps_permutation(obs_stat,\n context_counts,\n context_to_mut,\n seq_context,\n gene_seq,\n window,\n num_permutations=10000,\n stop_criteria=100,\n max_batch=25000,\n null_save_path=None):\n \"\"\"Performs null-permutations for position-based mutation statistics\n in a single gene.\n\n Parameters\n ----------\n obs_stat : dict\n dictionary mapping codons to the sum of mutations in a window\n context_counts : pd.Series\n number of mutations for each context\n context_to_mut : dict\n dictionary mapping nucleotide context to a list of observed\n somatic base changes.\n seq_context : SequenceContext\n Sequence context for the entire gene sequence (regardless\n of where mutations occur). The nucleotide contexts are\n identified at positions along the gene.\n gene_seq : GeneSequence\n Sequence of gene of interest\n window : int\n Number of codons to the left/right of a mutated position to consider\n in the window\n num_permutations : int, default: 10000\n number of permutations to create for null\n stop_criteria : int\n stop after stop_criteria iterations are more significant\n then the observed statistic.\n max_batch : int\n maximum number of whole gene simulations to do at once.\n For large number of simulations holding a matrix of M x N,\n where M is the number of mutations and N is the number of simulations,\n can get quite large.\n null_save_path : str or None\n File path to save null distribution. If None, don't save it.\n\n Returns\n -------\n pvals : dict\n Maps mutated codon position to the calculated p-value\n \"\"\"\n # get contexts and somatic base\n mycontexts = context_counts.index.tolist()\n somatic_base = [base\n for one_context in mycontexts\n for base in context_to_mut[one_context]]\n\n # calculate the # of batches for simulations\n max_batch = min(num_permutations, max_batch)\n num_batches = num_permutations // max_batch\n remainder = num_permutations % max_batch\n batch_sizes = [max_batch] * num_batches\n if remainder:\n batch_sizes += [remainder]\n\n # figure out which position has highest value\n max_key = {w: max(obs_stat[w], key=(lambda key: obs_stat[w][key]))\n for w in window}\n\n # setup null dist counts\n null_cts = {w: {k: 0 for k in obs_stat[w]}\n for w in window }\n\n # empirical null distribution (saved if file path provided)\n empirical_null = {w: {} for w in window}\n\n num_sim = 0 # number of simulations\n for j, batch_size in enumerate(batch_sizes):\n # stop iterations if reached sufficient precision\n # stop iterations if reached sufficient precision\n stop_flag = [(null_cts[w][max_key[w]]>=stop_criteria)\n for w in window]\n if all(stop_flag):\n break\n #if null_cts[max_key] >= stop_criteria:\n #break\n\n # get random positions determined by sequence context\n tmp_contxt_pos = seq_context.random_pos(context_counts.iteritems(),\n batch_size)\n tmp_mut_pos = np.hstack(pos_array for base, pos_array in tmp_contxt_pos)\n\n # calculate position-based statistics as a result of random positions\n for i, row in enumerate(tmp_mut_pos):\n # get info about mutations\n tmp_mut_info = mc.get_aa_mut_info(row,\n somatic_base,\n gene_seq)\n\n # calculate position info\n tmp_pos, tmp_sim = utils.calc_windowed_sum(tmp_mut_info['Codon Pos'],\n tmp_mut_info['Reference AA'],\n tmp_mut_info['Somatic AA'],\n window)\n\n # update the counts when the empirical null passes the observed\n for tmp_w in tmp_sim:\n for tmp_key in tmp_sim[tmp_w]:\n # get mutation count for simulation\n val = tmp_sim[tmp_w][tmp_key]\n\n # add to empirical null distribution\n empirical_null[tmp_w].setdefault(val, 0)\n empirical_null[tmp_w][val] += 1\n\n # update counts used for p-value\n for key in null_cts[tmp_w]:\n if val >= obs_stat[tmp_w][key]:\n null_cts[tmp_w][key] += 1\n\n # update the number of simulations\n num_sim += len(tmp_pos)\n\n # stop iterations if reached sufficient precision\n stop_flag = [(null_cts[w][max_key[w]]>=stop_criteria)\n for w in window]\n if all(stop_flag):\n break\n\n # calculate p-value from empirical null-distribution\n pvals = {w: {k: float(null_cts[w][k]) / (num_sim) for k in obs_stat[w]}\n for w in window}\n\n # save empirical distribution\n if null_save_path:\n for w in window:\n # create null distribution\n output = [['mutation_count', 'p-value']]\n sorted_cts = sorted(empirical_null[w].keys())\n tmp_sum = 0\n for i in range(len(sorted_cts)):\n tmp_sum += empirical_null[w][sorted_cts[-(i+1)]]\n tmp_pval = tmp_sum / float(num_sim)\n output.append([sorted_cts[-(i+1)], tmp_pval])\n # save output\n with open(null_save_path.format(w), 'w') as handle:\n mywriter = csv.writer(handle, delimiter='\\t', lineterminator='\\n')\n mywriter.writerows(output)\n\n return pvals"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef protein_permutation(graph_score,\n num_codons_obs,\n context_counts,\n context_to_mut,\n seq_context,\n gene_seq,\n gene_graph,\n num_permutations=10000,\n stop_criteria=100,\n pseudo_count=0):\n \"\"\"Performs null-simulations for position-based mutation statistics\n in a single gene.\n\n Parameters\n ----------\n graph_score : float\n clustering score for observed data\n num_codons_obs : int\n number of codons with missense mutation in observed data\n context_counts : pd.Series\n number of mutations for each context\n context_to_mut : dict\n dictionary mapping nucleotide context to a list of observed\n somatic base changes.\n seq_context : SequenceContext\n Sequence context for the entire gene sequence (regardless\n of where mutations occur). The nucleotide contexts are\n identified at positions along the gene.\n gene_seq : GeneSequence\n Sequence of gene of interest\n num_permutations : int, default: 10000\n number of permutations to create for null\n stop_criteria : int\n stop after stop_criteria iterations are more significant\n then the observed statistic.\n\n Returns\n -------\n protein_pval : float\n p-value for clustering in neighbor graph constructure from protein\n structures\n \"\"\"\n # get contexts and somatic base\n mycontexts = context_counts.index.tolist()\n somatic_base = [base\n for one_context in mycontexts\n for base in context_to_mut[one_context]]\n\n # get random positions determined by sequence context\n tmp_contxt_pos = seq_context.random_pos(context_counts.iteritems(),\n num_permutations)\n tmp_mut_pos = np.hstack(pos_array for base, pos_array in tmp_contxt_pos)\n\n # calculate position-based statistics as a result of random positions\n null_graph_entropy_ct = 0\n coverage_list = []\n num_mut_list = []\n graph_entropy_list = []\n for i, row in enumerate(tmp_mut_pos):\n # calculate the expected value of the relative increase in coverage\n if i == stop_criteria-1:\n rel_inc = [coverage_list[k] / float(num_mut_list[k])\n for k in range(stop_criteria-1)\n if coverage_list[k]]\n exp_rel_inc = np.mean(rel_inc)\n\n # calculate observed statistic\n if num_codons_obs:\n obs_stat = graph_score / np.log2(exp_rel_inc*num_codons_obs)\n else:\n obs_stat = 1.0\n\n # calculate statistics for simulated data\n sim_stat_list = [ent / np.log2(exp_rel_inc*num_mut_list[l])\n for l, ent in enumerate(graph_entropy_list)]\n null_graph_entropy_ct = len([s for s in sim_stat_list\n if s-utils.epsilon <= obs_stat])\n\n # get info about mutations\n tmp_mut_info = mc.get_aa_mut_info(row,\n somatic_base,\n gene_seq)\n\n # calculate position info\n tmp_tuple = cutils.calc_pos_info(tmp_mut_info['Codon Pos'],\n tmp_mut_info['Reference AA'],\n tmp_mut_info['Somatic AA'],\n pseudo_count=pseudo_count,\n is_obs=0)\n _, _, _, tmp_pos_ct = tmp_tuple\n\n # record num of mut codons\n if i < stop_criteria-1:\n tmp_num_mut_codons = len(tmp_pos_ct)\n num_mut_list.append(tmp_num_mut_codons)\n\n # get entropy on graph-smoothed probability distribution\n tmp_graph_entropy, tmp_coverage = scores.compute_ng_stat(gene_graph, tmp_pos_ct)\n\n # record the \"coverage\" in the graph\n if i < stop_criteria-1:\n coverage_list.append(tmp_coverage)\n graph_entropy_list.append(tmp_graph_entropy)\n\n # update empirical null distribution counts\n if i >= stop_criteria:\n #if tmp_graph_entropy-utils.epsilon <= graph_score:\n if tmp_num_mut_codons:\n sim_stat = tmp_graph_entropy / np.log2(exp_rel_inc*tmp_num_mut_codons)\n else:\n sim_stat = 1.0\n\n # add count\n if sim_stat-utils.epsilon <= obs_stat:\n null_graph_entropy_ct += 1\n\n # stop iterations if reached sufficient precision\n if null_graph_entropy_ct >= stop_criteria:\n break\n\n # calculate p-value from empirical null-distribution\n protein_pval = float(null_graph_entropy_ct) / (i+1)\n\n return protein_pval, obs_stat", "response": "Performs null - simulations for position - based mutation statistics in a single gene."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef effect_permutation(context_counts,\n context_to_mut,\n seq_context,\n gene_seq,\n num_permutations=10000,\n pseudo_count=0):\n \"\"\"Performs null-permutations for effect-based mutation statistics\n in a single gene.\n\n Parameters\n ----------\n context_counts : pd.Series\n number of mutations for each context\n context_to_mut : dict\n dictionary mapping nucleotide context to a list of observed\n somatic base changes.\n seq_context : SequenceContext\n Sequence context for the entire gene sequence (regardless\n of where mutations occur). The nucleotide contexts are\n identified at positions along the gene.\n gene_seq : GeneSequence\n Sequence of gene of interest\n num_permutations : int, default: 10000\n number of permutations to create for null\n pseudo_count : int, default: 0\n Pseudo-count for number of recurrent missense mutations for each\n permutation for the null distribution. Increasing pseudo_count\n makes the statistical test more stringent.\n\n Returns\n -------\n effect_entropy_list : list\n list of entropy of effect values under the null\n recur_list : list\n number of recurrent missense mutations\n inactivating_list : list\n number of inactivating mutations\n \"\"\"\n mycontexts = context_counts.index.tolist()\n somatic_base = [base\n for one_context in mycontexts\n for base in context_to_mut[one_context]]\n\n # get random positions determined by sequence context\n tmp_contxt_pos = seq_context.random_pos(context_counts.iteritems(),\n num_permutations)\n tmp_mut_pos = np.hstack(pos_array for base, pos_array in tmp_contxt_pos)\n\n # calculate position-based statistics as a result of random positions\n effect_entropy_list, recur_list, inactivating_list = [], [], []\n for row in tmp_mut_pos:\n # get info about mutations\n tmp_mut_info = mc.get_aa_mut_info(row,\n somatic_base,\n gene_seq)\n\n # calculate position info\n tmp_entropy, tmp_recur, tmp_inactivating = cutils.calc_effect_info(tmp_mut_info['Codon Pos'],\n tmp_mut_info['Reference AA'],\n tmp_mut_info['Somatic AA'],\n pseudo_count=pseudo_count,\n is_obs=0)\n effect_entropy_list.append(tmp_entropy)\n recur_list.append(tmp_recur)\n inactivating_list.append(tmp_inactivating)\n\n return effect_entropy_list, recur_list, inactivating_list", "response": "Performs null - permutations for effect - based mutation statistics in a single gene."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef non_silent_ratio_permutation(context_counts,\n context_to_mut,\n seq_context,\n gene_seq,\n num_permutations=10000):\n \"\"\"Performs null-permutations for non-silent ratio across all genes.\n\n Parameters\n ----------\n context_counts : pd.Series\n number of mutations for each context\n context_to_mut : dict\n dictionary mapping nucleotide context to a list of observed\n somatic base changes.\n seq_context : SequenceContext\n Sequence context for the entire gene sequence (regardless\n of where mutations occur). The nucleotide contexts are\n identified at positions along the gene.\n gene_seq : GeneSequence\n Sequence of gene of interest\n num_permutations : int, default: 10000\n number of permutations to create for null\n\n Returns\n -------\n non_silent_count_list : list of tuples\n list of non-silent and silent mutation counts under the null\n \"\"\"\n mycontexts = context_counts.index.tolist()\n somatic_base = [base\n for one_context in mycontexts\n for base in context_to_mut[one_context]]\n\n # get random positions determined by sequence context\n tmp_contxt_pos = seq_context.random_pos(context_counts.iteritems(),\n num_permutations)\n tmp_mut_pos = np.hstack(pos_array for base, pos_array in tmp_contxt_pos)\n\n # determine result of random positions\n non_silent_count_list = []\n for row in tmp_mut_pos:\n # get info about mutations\n tmp_mut_info = mc.get_aa_mut_info(row,\n somatic_base,\n gene_seq)\n\n # calc deleterious mutation info\n tmp_non_silent = cutils.calc_non_silent_info(tmp_mut_info['Reference AA'],\n tmp_mut_info['Somatic AA'],\n tmp_mut_info['Codon Pos'])\n non_silent_count_list.append(tmp_non_silent)\n return non_silent_count_list", "response": "Performs null - permutations for non - silent ratio across all genes."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef summary_permutation(context_counts,\n context_to_mut,\n seq_context,\n gene_seq,\n score_dir,\n num_permutations=10000,\n min_frac=0.0,\n min_recur=2,\n drop_silent=False):\n \"\"\"Performs null-permutations and summarizes the results as features over\n the gene.\n\n Parameters\n ----------\n context_counts : pd.Series\n number of mutations for each context\n context_to_mut : dict\n dictionary mapping nucleotide context to a list of observed\n somatic base changes.\n seq_context : SequenceContext\n Sequence context for the entire gene sequence (regardless\n of where mutations occur). The nucleotide contexts are\n identified at positions along the gene.\n gene_seq : GeneSequence\n Sequence of gene of interest\n num_permutations : int, default: 10000\n number of permutations to create for null\n drop_silent : bool, default=False\n Flage on whether to drop all silent mutations. Some data sources\n do not report silent mutations, and the simulations should match this.\n\n Returns\n -------\n summary_info_list : list of lists\n list of non-silent and silent mutation counts under the null along\n with information on recurrent missense counts and missense positional\n entropy.\n \"\"\"\n mycontexts = context_counts.index.tolist()\n somatic_base = [base\n for one_context in mycontexts\n for base in context_to_mut[one_context]]\n\n # get random positions determined by sequence context\n tmp_contxt_pos = seq_context.random_pos(context_counts.iteritems(),\n num_permutations)\n tmp_mut_pos = np.hstack(pos_array for base, pos_array in tmp_contxt_pos)\n\n # determine result of random positions\n gene_name = gene_seq.bed.gene_name\n gene_len = gene_seq.bed.cds_len\n summary_info_list = []\n for i, row in enumerate(tmp_mut_pos):\n # get info about mutations\n tmp_mut_info = mc.get_aa_mut_info(row,\n somatic_base,\n gene_seq)\n\n # Get all metrics summarizing each gene\n tmp_summary = cutils.calc_summary_info(tmp_mut_info['Reference AA'],\n tmp_mut_info['Somatic AA'],\n tmp_mut_info['Codon Pos'],\n gene_name,\n score_dir,\n min_frac=min_frac,\n min_recur=min_recur)\n\n # drop silent if needed\n if drop_silent:\n # silent mutation count is index 1\n tmp_summary[1] = 0\n\n # limit the precision of floats\n #pos_ent = tmp_summary[-1]\n #tmp_summary[-1] = '{0:.5f}'.format(pos_ent)\n\n summary_info_list.append([gene_name, i+1, gene_len]+tmp_summary)\n return summary_info_list", "response": "Performs null - permutations and summarizes the results as features over the gene."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef maf_permutation(context_counts,\n context_to_mut,\n seq_context,\n gene_seq,\n num_permutations=10000,\n drop_silent=False):\n \"\"\"Performs null-permutations across all genes and records the results in\n a format like a MAF file. This could be useful for examining the null\n permutations because the alternative approaches always summarize the results.\n With the simulated null-permutations, novel metrics can be applied to create\n an empirical null-distribution.\n\n Parameters\n ----------\n context_counts : pd.Series\n number of mutations for each context\n context_to_mut : dict\n dictionary mapping nucleotide context to a list of observed\n somatic base changes.\n seq_context : SequenceContext\n Sequence context for the entire gene sequence (regardless\n of where mutations occur). The nucleotide contexts are\n identified at positions along the gene.\n gene_seq : GeneSequence\n Sequence of gene of interest\n num_permutations : int, default: 10000\n number of permutations to create for null\n drop_silent : bool, default=False\n Flage on whether to drop all silent mutations. Some data sources\n do not report silent mutations, and the simulations should match this.\n\n Returns\n -------\n maf_list : list of tuples\n list of null mutations with mutation info in a MAF like format\n \"\"\"\n mycontexts = context_counts.index.tolist()\n somatic_base, base_context = zip(*[(base, one_context)\n for one_context in mycontexts\n for base in context_to_mut[one_context]])\n\n # get random positions determined by sequence context\n tmp_contxt_pos = seq_context.random_pos(context_counts.iteritems(),\n num_permutations)\n tmp_mut_pos = np.hstack(pos_array for base, pos_array in tmp_contxt_pos)\n\n # info about gene\n gene_name = gene_seq.bed.gene_name\n strand = gene_seq.bed.strand\n chrom = gene_seq.bed.chrom\n gene_seq.bed.init_genome_coordinates() # map seq pos to genome\n\n # determine result of random positions\n maf_list = []\n for row in tmp_mut_pos:\n # get genome coordinate\n pos2genome = np.vectorize(lambda x: gene_seq.bed.seqpos2genome[x]+1)\n genome_coord = pos2genome(row)\n\n # get info about mutations\n tmp_mut_info = mc.get_aa_mut_info(row,\n somatic_base,\n gene_seq)\n\n # get string describing variant\n var_class = cutils.get_variant_classification(tmp_mut_info['Reference AA'],\n tmp_mut_info['Somatic AA'],\n tmp_mut_info['Codon Pos'])\n\n # prepare output\n for k, mysomatic_base in enumerate(somatic_base):\n # format DNA change\n ref_nuc = tmp_mut_info['Reference Nuc'][k]\n nuc_pos = row[k]\n dna_change = 'c.{0}{1}>{2}'.format(ref_nuc, nuc_pos, mysomatic_base)\n\n # format protein change\n ref_aa = tmp_mut_info['Reference AA'][k]\n somatic_aa = tmp_mut_info['Somatic AA'][k]\n codon_pos = tmp_mut_info['Codon Pos'][k]\n protein_change = 'p.{0}{1}{2}'.format(ref_aa, codon_pos, somatic_aa)\n\n # reverse complement if on negative strand\n if strand == '-':\n ref_nuc = utils.rev_comp(ref_nuc)\n mysomatic_base = utils.rev_comp(mysomatic_base)\n\n # append results\n if drop_silent and var_class[k].decode() == 'Silent': continue\n maf_line = [gene_name, strand, chrom, genome_coord[k], genome_coord[k],\n ref_nuc, mysomatic_base, base_context[k], dna_change,\n protein_change, var_class[k].decode()]\n maf_list.append(maf_line)\n\n return maf_list", "response": "Performs null - permutations across all genes and records the results in a MAF file."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef markdown(value, extensions=settings.MARKDOWN_EXTENSIONS,\n extension_configs=settings.MARKDOWN_EXTENSION_CONFIGS,\n safe=False):\n \"\"\" Render markdown over a given value, optionally using varios extensions.\n\n Default extensions could be defined which MARKDOWN_EXTENSIONS option.\n\n :returns: A rendered markdown\n\n \"\"\"\n return mark_safe(markdown_module.markdown(\n force_text(value), extensions=extensions,\n extension_configs=extension_configs, safe_mode=safe))", "response": "Render a given value optionally using varios extensions."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn script tag with initialization code.", "response": "def editor_js_initialization(selector, **extra_settings):\n \"\"\" Return script tag with initialization code. \"\"\"\n\n init_template = loader.get_template(\n settings.MARKDOWN_EDITOR_INIT_TEMPLATE)\n\n options = dict(\n previewParserPath=reverse('django_markdown_preview'),\n **settings.MARKDOWN_EDITOR_SETTINGS)\n options.update(extra_settings)\n ctx = dict(\n selector=selector,\n extra_settings=simplejson.dumps(options)\n )\n return init_template.render(ctx)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef preview(request):\n if settings.MARKDOWN_PROTECT_PREVIEW:\n user = getattr(request, 'user', None)\n if not user or not user.is_staff:\n from django.contrib.auth.views import redirect_to_login\n return redirect_to_login(request.get_full_path())\n\n return render(\n request, settings.MARKDOWN_PREVIEW_TEMPLATE, dict(\n content=request.POST.get('data', 'No content posted'),\n css=settings.MARKDOWN_STYLE\n ))", "response": "Render preview page.\n\n :returns: A rendered preview"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef register():\n\n admin.site.unregister(FlatPage)\n admin.site.register(FlatPage, LocalFlatPageAdmin)", "response": "Register markdown for flatpages."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef markdown(value, arg=None):\n extensions = (arg and arg.split(',')) or settings.MARKDOWN_EXTENSIONS\n return _markdown(value, extensions=extensions, safe=False)", "response": "Render a given value using the MARKDOWN_EXTENSIONS setting."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nrenders a given value optionally using MARKDOWN_EXTENSIONS.", "response": "def markdown_safe(value, arg=None):\n \"\"\" Render markdown over a given value, optionally using varios extensions.\n\n Default extensions could be defined which MARKDOWN_EXTENSIONS option.\n\n Enables safe mode, which strips raw HTML and only returns HTML generated\n by markdown.\n\n :returns: A rendered markdown.\n\n \"\"\"\n extensions = (arg and arg.split(',')) or settings.MARKDOWN_EXTENSIONS\n return _markdown(value, extensions=extensions, safe=True)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef markdown_editor(selector):\n return dict(\n selector=selector,\n extra_settings=mark_safe(simplejson.dumps(\n dict(previewParserPath=reverse('django_markdown_preview')))))", "response": "Enable markdown editor for given textarea."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef markdown_media_css():\n return dict(\n CSS_SET=posixpath.join(\n settings.MARKDOWN_SET_PATH, settings.MARKDOWN_SET_NAME, 'style.css'\n ),\n CSS_SKIN=posixpath.join(\n 'django_markdown', 'skins', settings.MARKDOWN_EDITOR_SKIN,\n 'style.css'\n )\n )", "response": "Add css requirements to HTML."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nconverting given source from format to another.", "response": "def convert(source, to, format=None, extra_args=(), encoding='utf-8'):\n \"\"\"Convert given `source` from `format` `to` another.\n\n `source` may be either a file path or a string to be converted.\n It's possible to pass `extra_args` if needed. In case `format` is not\n provided, it will try to invert the format based on given `source`.\n\n Raises OSError if pandoc is not found! Make sure it has been installed and\n is available at path.\n\n \"\"\"\n return _convert(\n _read_file, _process_file,\n source, to,\n format, extra_args,\n encoding=encoding)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_pandoc_formats():\n try:\n p = subprocess.Popen(\n ['pandoc', '-h'],\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE)\n except OSError:\n raise OSError(\"You probably do not have pandoc installed.\")\n\n help_text = p.communicate()[0].decode().splitlines(False)\n txt = ' '.join(help_text[1:help_text.index('Options:')])\n\n aux = txt.split('Output formats: ')\n in_ = aux[0].split('Input formats: ')[1].split(',')\n out = aux[1].split(',')\n\n return [f.strip() for f in in_], [f.strip() for f in out]", "response": "Dynamic preprocessor for Pandoc formats.\n Return 2 lists. from_formats and to_formats."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef render(self, name, value, attrs=None, renderer=None):\n html = super(MarkdownWidget, self).render(name, value, attrs, renderer)\n attrs = self.build_attrs(attrs)\n html += editor_js_initialization(\"#%s\" % attrs['id'])\n return mark_safe(html)", "response": "Render the tag with the necessary initialization."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef extendMarkdown(self, md, md_globals):\n md.registerExtension(self)\n\n md.preprocessors.add('graphviz_block',\n InlineGraphvizPreprocessor(md),\n \"_begin\")", "response": "Add InlineGraphvizPreprocessor to the Markdown instance."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef run(self, lines):\n\n text = \"\\n\".join(lines)\n while 1:\n m = BLOCK_RE.search(text)\n if m:\n command = m.group('command')\n # Whitelist command, prevent command injection.\n if command not in SUPPORTED_COMMAMDS:\n raise Exception('Command not supported: %s' % command)\n filename = m.group('filename')\n content = m.group('content')\n filetype = filename[filename.rfind('.')+1:]\n\n args = [command, '-T'+filetype]\n try:\n proc = subprocess.Popen(\n args,\n stdin=subprocess.PIPE,\n stderr=subprocess.PIPE,\n stdout=subprocess.PIPE)\n proc.stdin.write(content.encode('utf-8'))\n\n output, err = proc.communicate()\n\n if filetype == 'svg':\n data_url_filetype = 'svg+xml'\n encoding = 'utf-8'\n img = output.decode(encoding)\n\n if filetype == 'png':\n data_url_filetype = 'png'\n encoding = 'base64'\n output = base64.b64encode(output)\n data_path = \"data:image/%s;%s,%s\" % (\n data_url_filetype,\n encoding,\n output)\n img = \"![\" + filename + \"](\" + data_path + \")\"\n\n text = '%s\\n%s\\n%s' % (\n text[:m.start()], img, text[m.end():])\n\n except Exception as e:\n err = str(e) + ' : ' + str(args)\n return (\n '
Error : ' + err + '
'\n '
' + content + '
').split('\\n')\n\n else:\n break\n return text.split(\"\\n\")", "response": "Match and generate dot code blocks."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nposts data to API.", "response": "def post(self, command, data=None):\n \"\"\"Post data to API.\"\"\"\n now = calendar.timegm(datetime.datetime.now().timetuple())\n if now > self.expiration:\n auth = self.__open(\"/oauth/token\", data=self.oauth)\n self.__sethead(auth['access_token'])\n return self.__open(\"%s%s\" % (self.api, command),\n headers=self.head, data=data)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef __open(self, url, headers=None, data=None, baseurl=\"\"):\n headers = headers or {}\n if not baseurl:\n baseurl = self.baseurl\n req = Request(\"%s%s\" % (baseurl, url), headers=headers)\n _LOGGER.debug(url)\n\n try:\n req.data = urlencode(data).encode('utf-8')\n except TypeError:\n pass\n opener = build_opener()\n\n try:\n resp = opener.open(req)\n charset = resp.info().get('charset', 'utf-8')\n data = json.loads(resp.read().decode(charset))\n opener.close()\n _LOGGER.debug(json.dumps(data))\n return data\n except HTTPError as exception_:\n if exception_.code == 408:\n _LOGGER.debug(\"%s\", exception_)\n return False\n raise TeslaException(exception_.code)", "response": "Use raw urlopen command."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nupdates the parking brake sensor.", "response": "def update(self):\n \"\"\"Update the parking brake sensor.\"\"\"\n self._controller.update(self._id, wake_if_asleep=False)\n data = self._controller.get_drive_params(self._id)\n if data:\n if not data['shift_state'] or data['shift_state'] == 'P':\n self.__state = True\n else:\n self.__state = False"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nupdates the HVAC state.", "response": "def update(self):\n \"\"\"Update the HVAC state.\"\"\"\n self._controller.update(self._id, wake_if_asleep=False)\n\n data = self._controller.get_climate_params(self._id)\n if data:\n if time.time() - self.__manual_update_time > 60:\n self.__is_auto_conditioning_on = (data\n ['is_auto_conditioning_on'])\n self.__is_climate_on = data['is_climate_on']\n self.__driver_temp_setting = (data['driver_temp_setting']\n if data['driver_temp_setting']\n else self.__driver_temp_setting)\n self.__passenger_temp_setting = (data['passenger_temp_setting']\n if\n data['passenger_temp_setting']\n else\n self.__passenger_temp_setting)\n self.__inside_temp = (data['inside_temp'] if data['inside_temp']\n else self.__inside_temp)\n self.__outside_temp = (data['outside_temp'] if data['outside_temp']\n else self.__outside_temp)\n self.__fan_status = data['fan_status']"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nset both the driver and passenger temperature to temp.", "response": "def set_temperature(self, temp):\n \"\"\"Set both the driver and passenger temperature to temp.\"\"\"\n temp = round(temp, 1)\n self.__manual_update_time = time.time()\n data = self._controller.command(self._id, 'set_temps',\n {\"driver_temp\": temp,\n \"passenger_temp\": temp},\n wake_if_asleep=True)\n if data['response']['result']:\n self.__driver_temp_setting = temp\n self.__passenger_temp_setting = temp"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nenable or disable the HVAC.", "response": "def set_status(self, enabled):\n \"\"\"Enable or disable the HVAC.\"\"\"\n self.__manual_update_time = time.time()\n if enabled:\n data = self._controller.command(self._id,\n 'auto_conditioning_start',\n wake_if_asleep=True)\n if data['response']['result']:\n self.__is_auto_conditioning_on = True\n self.__is_climate_on = True\n else:\n data = self._controller.command(self._id,\n 'auto_conditioning_stop',\n wake_if_asleep=True)\n if data['response']['result']:\n self.__is_auto_conditioning_on = False\n self.__is_climate_on = False\n self.update()"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nupdates the charging state of the Tesla Vehicle.", "response": "def update(self):\n \"\"\"Update the charging state of the Tesla Vehicle.\"\"\"\n self._controller.update(self._id, wake_if_asleep=False)\n data = self._controller.get_charging_params(self._id)\n if data and (time.time() - self.__manual_update_time > 60):\n if data['charging_state'] != \"Charging\":\n self.__charger_state = False\n else:\n self.__charger_state = True"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef start_charge(self):\n if not self.__charger_state:\n data = self._controller.command(self._id, 'charge_start',\n wake_if_asleep=True)\n if data and data['response']['result']:\n self.__charger_state = True\n self.__manual_update_time = time.time()", "response": "Start charging the Tesla Vehicle."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nstopping charging the Tesla Vehicle.", "response": "def stop_charge(self):\n \"\"\"Stop charging the Tesla Vehicle.\"\"\"\n if self.__charger_state:\n data = self._controller.command(self._id, 'charge_stop',\n wake_if_asleep=True)\n if data and data['response']['result']:\n self.__charger_state = False\n self.__manual_update_time = time.time()"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nupdating the status of the range setting.", "response": "def update(self):\n \"\"\"Update the status of the range setting.\"\"\"\n self._controller.update(self._id, wake_if_asleep=False)\n data = self._controller.get_charging_params(self._id)\n if data and (time.time() - self.__manual_update_time > 60):\n self.__maxrange_state = data['charge_to_max_range']"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef set_max(self):\n if not self.__maxrange_state:\n data = self._controller.command(self._id, 'charge_max_range',\n wake_if_asleep=True)\n if data['response']['result']:\n self.__maxrange_state = True\n self.__manual_update_time = time.time()", "response": "Set the charger to max range for trips."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef set_standard(self):\n if self.__maxrange_state:\n data = self._controller.command(self._id, 'charge_standard',\n wake_if_asleep=True)\n if data and data['response']['result']:\n self.__maxrange_state = False\n self.__manual_update_time = time.time()", "response": "Set the charger to standard range for daily commute."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nunlock the doors and extend handles where applicable.", "response": "def unlock(self):\n \"\"\"Unlock the doors and extend handles where applicable.\"\"\"\n if self.__lock_state:\n data = self._controller.command(self._id, 'door_unlock',\n wake_if_asleep=True)\n if data['response']['result']:\n self.__lock_state = False\n self.__manual_update_time = time.time()"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncloses the charger door.", "response": "def lock(self):\n \"\"\"Close the charger door.\"\"\"\n if not self.__lock_state:\n data = self._controller.command(self._id, 'charge_port_door_close',\n wake_if_asleep=True)\n if data['response']['result']:\n self.__lock_state = True\n self.__manual_update_time = time.time()"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nwraps a API f so it will attempt to wake the vehicle if asleep. The command f is run once if the vehicle_id was last reported online. Assuming f returns None and wake_if_asleep is True, 5 attempts will be made to wake the vehicle to reissue the command. In addition, if there is a `could_not_wake_buses` error, it will retry the command Args: inst (Controller): The instance of a controller vehicle_id (string): The vehicle to attempt to wake. TODO: This currently requires a vehicle_id, but update() does not; This should also be updated to allow that case wake_if_asleep (bool): Keyword arg to force a vehicle awake. Must be set in the wrapped function f Throws: RetryLimitError", "response": "def wake_up(func):\n # pylint: disable=no-self-argument\n # issue is use of wraps on classmethods which should be replaced:\n # https://hynek.me/articles/decorators/\n \"\"\"Wrap a API f so it will attempt to wake the vehicle if asleep.\n\n The command f is run once if the vehicle_id was last reported\n online. Assuming f returns None and wake_if_asleep is True, 5 attempts\n will be made to wake the vehicle to reissue the command. In addition,\n if there is a `could_not_wake_buses` error, it will retry the command\n\n Args:\n inst (Controller): The instance of a controller\n vehicle_id (string): The vehicle to attempt to wake.\n TODO: This currently requires a vehicle_id, but update() does not; This\n should also be updated to allow that case\n wake_if_asleep (bool): Keyword arg to force a vehicle awake. Must be\n set in the wrapped function f\n Throws:\n RetryLimitError\n \"\"\"\n @wraps(func)\n def wrapped(*args, **kwargs):\n # pylint: disable=too-many-branches,protected-access, not-callable\n def valid_result(result):\n \"\"\"Check if TeslaAPI result succesful.\n\n Parameters\n ----------\n result : tesla API result\n This is the result of a Tesla Rest API call.\n\n Returns\n -------\n bool\n Tesla API failure can be checked in a dict with a bool in\n ['response']['result'], a bool, or None or\n ['response']['reason'] == 'could_not_wake_buses'\n Returns true when a failure state not detected.\n\n \"\"\"\n try:\n return (result is not None and result is not False and\n (result is True or\n (isinstance(result, dict) and\n isinstance(result['response'], dict) and\n ('result' in result['response'] and\n result['response']['result'] is True) or\n ('reason' in result['response'] and\n result['response']['reason'] !=\n 'could_not_wake_buses') or\n ('result' not in result['response']))))\n except TypeError as exception:\n _LOGGER.error(\"Result: %s, %s\", result, exception)\n retries = 0\n sleep_delay = 2\n inst = args[0]\n vehicle_id = args[1]\n result = None\n if (vehicle_id is not None and vehicle_id in inst.car_online and\n inst.car_online[vehicle_id]):\n try:\n result = func(*args, **kwargs)\n except TeslaException:\n pass\n if valid_result(result):\n return result\n _LOGGER.debug(\"wake_up needed for %s -> %s \\n\"\n \"Info: args:%s, kwargs:%s, \"\n \"vehicle_id:%s, car_online:%s\",\n func.__name__, # pylint: disable=no-member\n result, args, kwargs, vehicle_id,\n inst.car_online)\n inst.car_online[vehicle_id] = False\n while ('wake_if_asleep' in kwargs and kwargs['wake_if_asleep']\n and\n # Check online state\n (vehicle_id is None or\n (vehicle_id is not None and\n vehicle_id in inst.car_online and\n not inst.car_online[vehicle_id]))):\n result = inst._wake_up(vehicle_id)\n _LOGGER.debug(\"%s(%s): Wake Attempt(%s): %s\",\n func.__name__, # pylint: disable=no-member,\n vehicle_id,\n retries, result)\n if not result:\n if retries < 5:\n time.sleep(sleep_delay**(retries+2))\n retries += 1\n continue\n else:\n inst.car_online[vehicle_id] = False\n raise RetryLimitError\n else:\n break\n # try function five more times\n retries = 0\n while True:\n try:\n result = func(*args, **kwargs)\n _LOGGER.debug(\"%s(%s): Retry Attempt(%s): %s\",\n func.__name__, # pylint: disable=no-member,\n vehicle_id,\n retries, result)\n except TeslaException:\n pass\n finally:\n retries += 1\n time.sleep(sleep_delay**(retries+1))\n if valid_result(result):\n return result\n if retries >= 5:\n raise RetryLimitError\n return wrapped"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsends a command to the vehicle_id.", "response": "def post(self, vehicle_id, command, data=None, wake_if_asleep=True):\n # pylint: disable=unused-argument\n \"\"\"Send post command to the vehicle_id.\n\n This is a wrapped function by wake_up.\n\n Parameters\n ----------\n vehicle_id : string\n Identifier for the car on the owner-api endpoint. Confusingly it\n is not the vehicle_id field for identifying the car across\n different endpoints.\n https://tesla-api.timdorr.com/api-basics/vehicles#vehicle_id-vs-id\n command : string\n Tesla API command. https://tesla-api.timdorr.com/vehicle/commands\n data : dict\n Optional parameters.\n wake_if_asleep : bool\n Function for wake_up decorator indicating whether a failed response\n should wake up the vehicle or retry.\n\n Returns\n -------\n dict\n Tesla json object.\n\n \"\"\"\n data = data or {}\n return self.__connection.post('vehicles/%i/%s' %\n (vehicle_id, command), data)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get(self, vehicle_id, command, wake_if_asleep=False):\n # pylint: disable=unused-argument\n \"\"\"Send get command to the vehicle_id.\n\n This is a wrapped function by wake_up.\n\n Parameters\n ----------\n vehicle_id : string\n Identifier for the car on the owner-api endpoint. Confusingly it\n is not the vehicle_id field for identifying the car across\n different endpoints.\n https://tesla-api.timdorr.com/api-basics/vehicles#vehicle_id-vs-id\n command : string\n Tesla API command. https://tesla-api.timdorr.com/vehicle/commands\n wake_if_asleep : bool\n Function for wake_up decorator indicating whether a failed response\n should wake up the vehicle or retry.\n\n Returns\n -------\n dict\n Tesla json object.\n\n \"\"\"\n return self.__connection.get('vehicles/%i/%s' % (vehicle_id, command))", "response": "Send get command to the vehicle_id."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nget requested data from vehicle_id.", "response": "def data_request(self, vehicle_id, name, wake_if_asleep=False):\n \"\"\"Get requested data from vehicle_id.\n\n Parameters\n ----------\n vehicle_id : string\n Identifier for the car on the owner-api endpoint. Confusingly it\n is not the vehicle_id field for identifying the car across\n different endpoints.\n https://tesla-api.timdorr.com/api-basics/vehicles#vehicle_id-vs-id\n name: string\n Name of data to be requested from the data_request endpoint which\n rolls ups all data plus vehicle configuration.\n https://tesla-api.timdorr.com/vehicle/state/data\n wake_if_asleep : bool\n Function for underlying api call for whether a failed response\n should wake up the vehicle or retry.\n\n Returns\n -------\n dict\n Tesla json object.\n\n \"\"\"\n return self.get(vehicle_id, 'vehicle_data/%s' % name,\n wake_if_asleep=wake_if_asleep)['response']"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef command(self, vehicle_id, name, data=None, wake_if_asleep=True):\n data = data or {}\n return self.post(vehicle_id, 'command/%s' % name, data,\n wake_if_asleep=wake_if_asleep)", "response": "Post a command to the vehicle_id."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef update(self, car_id=None, wake_if_asleep=False, force=False):\n cur_time = time.time()\n with self.__lock:\n # Update the online cars using get_vehicles()\n last_update = self._last_attempted_update_time\n if (force or cur_time - last_update > self.update_interval):\n cars = self.get_vehicles()\n for car in cars:\n self.car_online[car['id']] = (car['state'] == 'online')\n self._last_attempted_update_time = cur_time\n # Only update online vehicles that haven't been updated recently\n # The throttling is per car's last succesful update\n # Note: This separate check is because there may be individual cars\n # to update.\n update_succeeded = False\n for id_, value in self.car_online.items():\n # If specific car_id provided, only update match\n if (car_id is not None and car_id != id_):\n continue\n if (value and # pylint: disable=too-many-boolean-expressions\n (id_ in self.__update and self.__update[id_]) and\n (force or id_ not in self._last_update_time or\n ((cur_time - self._last_update_time[id_]) >\n self.update_interval))):\n # Only update cars with update flag on\n try:\n data = self.get(id_, 'data', wake_if_asleep)\n except TeslaException:\n data = None\n if data and data['response']:\n response = data['response']\n self.__climate[car_id] = response['climate_state']\n self.__charging[car_id] = response['charge_state']\n self.__state[car_id] = response['vehicle_state']\n self.__driving[car_id] = response['drive_state']\n self.__gui[car_id] = response['gui_settings']\n self.car_online[car_id] = (response['state']\n == 'online')\n self._last_update_time[car_id] = time.time()\n update_succeeded = True\n return update_succeeded", "response": "This command updates all the vehicle attributes in the cache."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nupdating the battery state.", "response": "def update(self):\n \"\"\"Update the battery state.\"\"\"\n self._controller.update(self._id, wake_if_asleep=False)\n data = self._controller.get_charging_params(self._id)\n if data:\n self.__battery_level = data['battery_level']\n self.__charging_state = data['charging_state']"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nupdates the battery range state.", "response": "def update(self):\n \"\"\"Update the battery range state.\"\"\"\n self._controller.update(self._id, wake_if_asleep=False)\n data = self._controller.get_charging_params(self._id)\n if data:\n self.__battery_range = data['battery_range']\n self.__est_battery_range = data['est_battery_range']\n self.__ideal_battery_range = data['ideal_battery_range']\n data = self._controller.get_gui_params(self._id)\n if data:\n if data['gui_distance_units'] == \"mi/hr\":\n self.measurement = 'LENGTH_MILES'\n else:\n self.measurement = 'LENGTH_KILOMETERS'\n self.__rated = (data['gui_range_display'] == \"Rated\")"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn whether the data is from an online vehicle.", "response": "def assumed_state(self):\n # pylint: disable=protected-access\n \"\"\"Return whether the data is from an online vehicle.\"\"\"\n return (not self._controller.car_online[self.id()] and\n (self._controller._last_update_time[self.id()] -\n self._controller._last_wake_up_time[self.id()] >\n self._controller.update_interval))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nupdate the current GPS location.", "response": "def update(self):\n \"\"\"Update the current GPS location.\"\"\"\n self._controller.update(self._id, wake_if_asleep=False)\n data = self._controller.get_drive_params(self._id)\n if data:\n self.__longitude = data['longitude']\n self.__latitude = data['latitude']\n self.__heading = data['heading']\n if self.__longitude and self.__latitude and self.__heading:\n self.__location = {'longitude': self.__longitude,\n 'latitude': self.__latitude,\n 'heading': self.__heading}"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef update(self):\n self._controller.update(self._id, wake_if_asleep=False)\n data = self._controller.get_state_params(self._id)\n if data:\n self.__odometer = data['odometer']\n data = self._controller.get_gui_params(self._id)\n if data:\n if data['gui_distance_units'] == \"mi/hr\":\n self.measurement = 'LENGTH_MILES'\n else:\n self.measurement = 'LENGTH_KILOMETERS'\n self.__rated = (data['gui_range_display'] == \"Rated\")", "response": "Update the odometer and the unit of measurement based on GUI."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates a parameter set for a new light curve dataset.", "response": "def lc(**kwargs):\n \"\"\"\n Create parameters for a new light curve dataset.\n\n Generally, this will be used as an input to the kind argument in\n :meth:`phoebe.frontend.bundle.Bundle.add_dataset`\n\n :parameter **kwargs: defaults for the values of any of the parameters\n :return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly\n created :class:`phoebe.parameters.parameters.Parameter`s\n \"\"\"\n\n obs_params = []\n\n syn_params, constraints = lc_syn(syn=False, **kwargs)\n obs_params += syn_params.to_list()\n #obs_params += lc_dep(**kwargs).to_list()\n\n #~ obs_params += [FloatArrayParameter(qualifier='flag', value=kwargs.get('flag', []), default_unit=None, description='Signal flag')]\n #~ obs_params += [FloatArrayParameter(qualifier='weight', value=kwargs.get('weight', []), default_unit=None, description='Signal weight')]\n\n #~ obs_params += [FloatParameter(qualifier='timeoffset', value=kwargs.get('timeoffset', 0.0), default_unit=u.d, description='Zeropoint date offset for observations')]\n #~ obs_params += [FloatParameter(qualifier='statweight', value=kwargs.get('statweight', 0.0), default_unit=None, description='Statistical weight in overall fitting')]\n\n return ParameterSet(obs_params), constraints"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates parameters for a new radial velocity dataset.", "response": "def rv(**kwargs):\n \"\"\"\n Create parameters for a new radial velocity dataset.\n\n Generally, this will be used as an input to the kind argument in\n :meth:`phoebe.frontend.bundle.Bundle.add_dataset`\n\n :parameter **kwargs: defaults for the values of any of the parameters\n :return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly\n created :class:`phoebe.parameters.parameters.Parameter`s\n \"\"\"\n\n obs_params = []\n\n #obs_params += [FloatParameter(qualifier='statweight', value = kwargs.get('statweight', 1.0), default_unit=u.dimensionless_unscaled, description='Statistical weight in overall fitting')]\n syn_params, constraints = rv_syn(syn=False, **kwargs)\n obs_params += syn_params.to_list()\n #obs_params += rv_dep(**kwargs).to_list()\n\n return ParameterSet(obs_params), constraints"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef lp(**kwargs):\n\n obs_params = []\n\n #obs_params += [FloatParameter(qualifier='statweight', value = kwargs.get('statweight', 1.0), default_unit=u.dimensionless_unscaled, description='Statistical weight in overall fitting')]\n syn_params, constraints = lp_syn(syn=False, **kwargs)\n obs_params += syn_params.to_list()\n #obs_params += rv_dep(**kwargs).to_list()\n\n return ParameterSet(obs_params), constraints", "response": "Create parameters for a new line profile dataset."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncreate a ParameterSet for a new eclipse timing variations dataset.", "response": "def etv(**kwargs):\n \"\"\"\n Create parameters for a new eclipse timing variations dataset.\n\n Generally, this will be used as an input to the kind argument in\n :meth:`phoebe.frontend.bundle.Bundle.add_dataset`\n\n :parameter **kwargs: default for the values of any of the ParameterSet\n :return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly\n created :class:`phoebe.parameters.parameters.Parameter`s\n \"\"\"\n if not conf.devel:\n raise NotImplementedError(\"'etv' dataset not officially supported for this release. Enable developer mode to test.\")\n\n obs_params = []\n\n syn_params, constraints = etv_syn(syn=False, **kwargs)\n obs_params += syn_params.to_list()\n #obs_params += etv_dep(**kwargs).to_list()\n\n return ParameterSet(obs_params), constraints"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncreate a new orbit dataset.", "response": "def orb(**kwargs):\n \"\"\"\n Create parameters for a new orbit dataset.\n\n Generally, this will be used as an input to the kind argument in\n :meth:`phoebe.frontend.bundle.Bundle.add_dataset`\n\n :parameter **kwargs: defaults for the values of any of the parameters\n :return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly\n created :class:`phoebe.parameters.parameters.Parameter`s\n \"\"\"\n\n obs_params = []\n\n #~ obs_params += [FloatArrayParameter(qualifier='exptime', value=kwargs.get('exptime', []), default_unit=u.s, description='Signal exposure time')]\n #~ obs_params += [FloatArrayParameter(qualifier='flag', value=kwargs.get('flag', []), default_unit=None, description='Signal flag')]\n #~ obs_params += [FloatArrayParameter(qualifier='weight', value=kwargs.get('weight', []), default_unit=None, description='Signal weight')]\n\n #~ obs_params += [FloatParameter(qualifier='timeoffset', value=kwargs.get('timeoffset', 0.0), default_unit=u.d, description='Zeropoint date offset for observations')]\n #~ obs_params += [FloatParameter(qualifier='statweight', value=kwargs.get('statweight', 0.0), default_unit=None, description='Statistical weight in overall fitting')]\n\n syn_params, constraints = orb_syn(syn=False, **kwargs)\n obs_params += syn_params.to_list()\n #obs_params += orb_dep(**kwargs).to_list()\n\n return ParameterSet(obs_params), []"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef mesh(**kwargs):\n\n obs_params = []\n\n syn_params, constraints = mesh_syn(syn=False, **kwargs)\n obs_params += syn_params.to_list()\n\n obs_params += [SelectParameter(qualifier='include_times', value=kwargs.get('include_times', []), description='append to times from the following datasets/time standards', choices=['t0@system'])]\n\n obs_params += [SelectParameter(qualifier='columns', value=kwargs.get('columns', []), description='columns to expose within the mesh', choices=_mesh_columns)]\n #obs_params += mesh_dep(**kwargs).to_list()\n\n return ParameterSet(obs_params), constraints", "response": "Creates a parameter set for a new mesh dataset."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns a new set of parameters for using the PHOEBE 2. 0 backend.", "response": "def phoebe(**kwargs):\n \"\"\"\n Compute options for using the PHOEBE 2.0 backend.\n\n Generally, this will be used as an input to the kind argument in\n :meth:`phoebe.frontend.bundle.Bundle.add_compute`\n\n Please see :func:`phoebe.backend.backends.phoebe` for a list of sources to\n cite when using this backend.\n\n :parameter **kwargs: defaults for the values of any of the parameters\n :return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly\n created :class:`phoebe.parameters.parameters.Parameter`s\n \"\"\"\n params = []\n\n params += [BoolParameter(qualifier='enabled', copy_for={'context': 'dataset', 'dataset': '*'}, dataset='_default', value=kwargs.get('enabled', True), description='Whether to create synthetics in compute/fitting run')]\n\n # DYNAMICS\n params += [ChoiceParameter(qualifier='dynamics_method', value=kwargs.get('dynamics_method', 'keplerian'), choices=['keplerian', 'nbody', 'rebound', 'bs'] if conf.devel else ['keplerian'], description='Which method to use to determine the dynamics of components')]\n params += [BoolParameter(qualifier='ltte', value=kwargs.get('ltte', False), description='Correct for light travel time effects')]\n\n if conf.devel:\n params += [BoolParameter(visible_if='dynamics_method:nbody', qualifier='gr', value=kwargs.get('gr', False), description='Whether to account for general relativity effects')]\n params += [FloatParameter(visible_if='dynamics_method:nbody', qualifier='stepsize', value=kwargs.get('stepsize', 0.01), default_unit=None, description='stepsize for the N-body integrator')] # TODO: improve description (and units??)\n params += [ChoiceParameter(visible_if='dynamics_method:nbody', qualifier='integrator', value=kwargs.get('integrator', 'ias15'), choices=['ias15', 'whfast', 'sei', 'leapfrog', 'hermes'], description='Which integrator to use within rebound')]\n\n # params += [FloatParameter(visible_if='dynamics_method:bs', qualifier='stepsize', value=kwargs.get('stepsize', 0.01), default_unit=None, description='stepsize for the N-body integrator')] # TODO: improve description (and units??)\n # params += [FloatParameter(visible_if='dynamics_method:bs', qualifier='orbiterror', value=kwargs.get('orbiterror', 1e-20), default_unit=None, description='orbiterror for the N-body integrator')] # TODO: improve description (and units??)\n\n\n # PHYSICS\n # TODO: should either of these be per-dataset... if so: copy_for={'kind': ['rv_dep', 'lc_dep'], 'dataset': '*'}, dataset='_default' and then edit universe.py to pull for the correct dataset (will need to become dataset-dependent dictionary a la ld_func)\n params += [ChoiceParameter(qualifier='irrad_method', value=kwargs.get('irrad_method', 'wilson'), choices=['none', 'wilson', 'horvat'], description='Which method to use to handle all irradiation effects (reflection, redistribution)')]\n params += [ChoiceParameter(qualifier='boosting_method', value=kwargs.get('boosting_method', 'none'), choices=['none', 'linear'], description='Type of boosting method')]\n\n # TODO: include scattering here? (used to be in lcdep)\n\n #params += [ChoiceParameter(qualifier='irradiation_alg', value=kwargs.get('irradiation_alg', 'point_source'), choices=['full', 'point_source'], description='Type of irradiation algorithm')]\n\n # MESH\n # -- these parameters all need to exist per-component --\n # copy_for = {'kind': ['star', 'disk', 'custombody'], 'component': '*'}\n # means that this should exist for each component (since that has a wildcard) which\n # has a kind in [star, disk, custombody]\n # params += [BoolParameter(qualifier='horizon', value=kwargs.get('horizon', False), description='Store horizon for all meshes (except protomeshes)')]\n params += [ChoiceParameter(copy_for={'kind': ['star', 'envelope'], 'component': '*'}, component='_default', qualifier='mesh_method', value=kwargs.get('mesh_method', 'marching'), choices=['marching', 'wd'] if conf.devel else ['marching'], description='Which method to use for discretizing the surface')]\n params += [IntParameter(visible_if='mesh_method:marching', copy_for={'kind': ['star', 'envelope'], 'component': '*'}, component='_default', qualifier='ntriangles', value=kwargs.get('ntriangles', 1500), limits=(100,None), default_unit=u.dimensionless_unscaled, description='Requested number of triangles (won\\'t be exact).')]\n params += [ChoiceParameter(visible_if='mesh_method:marching', copy_for={'kind': ['star'], 'component': '*'}, component='_default', qualifier='distortion_method', value=kwargs.get('distortion_method', 'roche'), choices=['roche', 'rotstar', 'sphere'], description='Method to use for distorting stars')]\n\n\n if conf.devel:\n # TODO: can we have this computed from ntriangles? - and then do the same for the legacy compute options?\n # NOTE: if removing from developer mode - also need to remove if conf.devel in io.py line ~800\n params += [IntParameter(visible_if='mesh_method:wd', copy_for={'kind': ['star', 'envelope'], 'component': '*'}, component='_default', qualifier='gridsize', value=kwargs.get('gridsize', 60), limits=(10,None), default_unit=u.dimensionless_unscaled, description='Number of meshpoints for WD method')]\n # ------------------------------------------------------\n\n #params += [ChoiceParameter(qualifier='subdiv_alg', value=kwargs.get('subdiv_alg', 'edge'), choices=['edge'], description='Subdivision algorithm')]\n # params += [IntParameter(qualifier='subdiv_num', value=kwargs.get('subdiv_num', 3), limits=(0,None), description='Number of subdivisions')]\n\n\n\n if conf.devel:\n params += [BoolParameter(qualifier='mesh_offset', value=kwargs.get('mesh_offset', True), description='Whether to adjust the mesh to have the correct surface area (TESTING)')]\n params += [FloatParameter(visible_if='mesh_method:marching', qualifier='mesh_init_phi', value=kwargs.get('mesh_init_phi', 0.0), default_unit=u.rad, limits=(0,2*np.pi), description='Initial rotation offset for mesh (TESTING)')]\n\n # DISTORTION\n\n\n # ECLIPSE DETECTION\n params += [ChoiceParameter(qualifier='eclipse_method', value=kwargs.get('eclipse_method', 'native'), choices=['only_horizon', 'graham', 'none', 'visible_partial', 'native', 'wd_horizon'] if conf.devel else ['native'], description='Type of eclipse algorithm')]\n params += [ChoiceParameter(visible_if='eclipse_method:native', qualifier='horizon_method', value=kwargs.get('horizon_method', 'boolean'), choices=['boolean', 'linear'] if conf.devel else ['boolean'], description='Type of horizon method')]\n\n\n\n # PER-COMPONENT\n params += [ChoiceParameter(copy_for = {'kind': ['star'], 'component': '*'}, component='_default', qualifier='atm', value=kwargs.get('atm', 'ck2004'), choices=_atm_choices, description='Atmosphere table')]\n\n # PER-DATASET\n\n # -- these parameters all need to exist per-rvobs or lcobs --\n # copy_for = {'kind': ['rv_dep'], 'component': '*', 'dataset': '*'}\n # means that this should exist for each component/dataset pair with the\n # rv_dep kind\n params += [ChoiceParameter(qualifier='lc_method', copy_for = {'kind': ['lc'], 'dataset': '*'}, dataset='_default', value=kwargs.get('lc_method', 'numerical'), choices=['numerical', 'analytical'] if conf.devel else ['numerical'], description='Method to use for computing LC fluxes')]\n params += [ChoiceParameter(qualifier='fti_method', copy_for = {'kind': ['lc'], 'dataset': '*'}, dataset='_default', value=kwargs.get('fti_method', 'none'), choices=['none', 'oversample'], description='How to handle finite-time integration (when non-zero exptime)')]\n params += [IntParameter(visible_if='fti_method:oversample', qualifier='fti_oversample', copy_for={'kind': ['lc'], 'dataset': '*'}, dataset='_default', value=kwargs.get('fti_oversample', 5), default_unit=u.dimensionless_unscaled, description='Number of times to sample per-datapoint for finite-time integration')]\n params += [ChoiceParameter(qualifier='rv_method', copy_for = {'kind': ['rv'], 'component': '*', 'dataset': '*'}, component='_default', dataset='_default', value=kwargs.get('rv_method', 'flux-weighted'), choices=['flux-weighted', 'dynamical'], description='Method to use for computing RVs (must be flux-weighted for Rossiter-McLaughlin effects)')]\n params += [BoolParameter(visible_if='rv_method:flux-weighted', qualifier='rv_grav', copy_for = {'kind': ['rv'], 'component': '*', 'dataset': '*'}, component='_default', dataset='_default', value=kwargs.get('rv_grav', False), description='Whether gravitational redshift effects are enabled for RVs')]\n\n if conf.devel:\n params += [ChoiceParameter(qualifier='etv_method', copy_for = {'kind': ['etv'], 'component': '*', 'dataset': '*'}, component='_default', dataset='_default', value=kwargs.get('etv_method', 'crossing'), choices=['crossing'], description='Method to use for computing ETVs')]\n params += [FloatParameter(visible_if='etv_method:crossing', qualifier='etv_tol', copy_for = {'kind': ['etv'], 'component': '*', 'dataset': '*'}, component='_default', dataset='_default', value=kwargs.get('etv_tol', 1e-4), default_unit=u.d, description='Precision with which to determine eclipse timings')]\n # -----------------------------------------------------------\n\n\n\n return ParameterSet(params)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngenerates a new set of parameters for using the PHOEBE 1. 0 legacy backend.", "response": "def legacy(**kwargs):\n \"\"\"\n Compute options for using the PHOEBE 1.0 legacy backend (must be\n installed).\n\n Generally, this will be used as an input to the kind argument in\n :meth:`phoebe.frontend.bundle.Bundle.add_compute`\n\n Please see :func:`phoebe.backend.backends.legacy` for a list of sources to\n cite when using this backend.\n\n :parameter **kwargs: defaults for the values of any of the parameters\n :return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly\n created :class:`phoebe.parameters.parameters.Parameter`s\n \"\"\"\n params = []\n\n params += [BoolParameter(qualifier='enabled', copy_for={'context': 'dataset', 'kind': ['lc', 'rv', 'mesh'], 'dataset': '*'}, dataset='_default', value=kwargs.get('enabled', True), description='Whether to create synthetics in compute/fitting run')]\n\n # TODO: the kwargs need to match the qualifier names!\n # TODO: include MORE meshing options\n params += [ChoiceParameter(copy_for = {'kind': ['star'], 'component': '*'}, component='_default', qualifier='atm', value=kwargs.get('atm', 'extern_atmx'), choices=['extern_atmx', 'extern_planckint'], description='Atmosphere table')]\n# params += [ChoiceParameter(copy_for = {'kind': ['star'], 'component': '*'}, component='_default', qualifier='atm', value=kwargs.get('atm', 'kurucz'), choices=['kurucz', 'blackbody'], description='Atmosphere table')]\n# params += [ChoiceParameter(qualifier='morphology', value=kwargs.get('morphology','Detached binary'), choices=['Unconstrained binary system', 'Detached binary', 'Overcontact binary of the W UMa type', 'Overcontact binary not in thermal contact'], description='System type constraint')]\n# params += [BoolParameter(qualifier='cindex', value=kwargs.get('cindex', False), description='Color index constraint')]\n# params += [IntParameter(visible_if='cindex_switch:True', qualifier='cindex', value=kwargs.get('cindex', np.array([1.0])), description='Number of reflections')]\n# params += [BoolParameter(qualifier='heating', value=kwargs.get('heating', True), description='Allow irradiators to heat other components')]\n params += [IntParameter(copy_for={'kind': ['star'], 'component': '*'}, component='_default', qualifier='gridsize', value=kwargs.get('gridsize', 60), limits=(10,None), description='Number of meshpoints for WD')]\n\n params += [ChoiceParameter(qualifier='irrad_method', value=kwargs.get('irrad_method', 'wilson'), choices=['none', 'wilson'], description='Which method to use to handle irradiation/reflection effects')]\n params += [IntParameter(visible_if='irrad_method:wilson', qualifier='refl_num', value=kwargs.get('refl_num', 1), limits=(0,None), description='Number of reflections')]\n\n# params += [BoolParameter(qualifier='msc1', value=kwargs.get('msc1', False), description='Mainsequence Constraint for star 1')]\n# params += [BoolParameter(qualifier='msc2', value=kwargs.get('msc2', False), description='Mainsequence Constraint for star 2')]\n\n\n # TODO: can we come up with a better qualifier for reddening (and be consistent when we enable in phoebe2)\n params += [BoolParameter(qualifier='ie', value=kwargs.get('ie', False), description='Should data be de-reddened')]\n\n # TODO: can we change this to rv_method = ['flux_weighted', 'dynamical'] to be consistent with phoebe2?\n # TODO: can proximity_rv (rv_method) be copied for each dataset (see how this is done for phoebe2)? This would probably mean that the wrapper would need to loop and make separate calls since PHOEBE1 can't handle different settings per-RV dataset\n params += [ChoiceParameter(qualifier='rv_method', copy_for = {'kind': ['rv'], 'component': '*', 'dataset': '*'}, component='_default', dataset='_default',\n value=kwargs.get('rv_method', 'flux-weighted'), choices=['flux-weighted', 'dynamical'], description='Method to use for computing RVs (must be flux-weighted for Rossiter-McLaughlin)')]\n\n return ParameterSet(params)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn a ParameterSet that can be used to create a new a for use with photodynam.", "response": "def photodynam(**kwargs):\n \"\"\"\n Compute options for using Josh Carter's 'photodynam' code as a\n backend (must be installed).\n\n Generally, this will be used as an input to the kind argument in\n :meth:`phoebe.frontend.bundle.Bundle.add_compute`\n\n Please see :func:`phoebe.backend.backends.photodynam` for a list of sources to\n cite when using this backend.\n\n :parameter **kwargs: defaults for the values of any of the parameters\n :return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly\n created :class:`phoebe.parameters.parameters.Parameter`s\n \"\"\"\n if not conf.devel:\n raise NotImplementedError(\"'photodynam' backend not officially supported for this release. Enable developer mode to test.\")\n\n params = []\n\n params += [BoolParameter(qualifier='enabled', copy_for={'context': 'dataset', 'kind': ['lc', 'rv', 'orb'], 'dataset': '*'}, dataset='_default', value=kwargs.get('enabled', True), description='Whether to create synthetics in compute/fitting run')]\n\n params += [FloatParameter(qualifier='stepsize', value=kwargs.get('stepsize', 0.01), default_unit=None, description='blah')]\n params += [FloatParameter(qualifier='orbiterror', value=kwargs.get('orbiterror', 1e-20), default_unit=None, description='blah')]\n\n # TODO: remove this option and instead use time0@system\n #params += [FloatParameter(qualifier='time0', value=kwargs.get('time0', 0.0), default_unit=u.d, description='Time to start the integration')]\n\n return ParameterSet(params)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef jktebop(**kwargs):\n if not conf.devel:\n raise NotImplementedError(\"'jktebop' backend not officially supported for this release. Enable developer mode to test.\")\n\n params = []\n\n params += [BoolParameter(qualifier='enabled', copy_for={'context': 'dataset', 'kind': ['lc'], 'dataset': '*'}, dataset='_default', value=kwargs.get('enabled', True), description='Whether to create synthetics in compute/fitting run')]\n\n params += [FloatParameter(qualifier='ringsize', value=kwargs.get('ringsize', 5), default_unit=u.deg, description='Integ Ring Size')]\n\n return ParameterSet(params)", "response": "Returns a ParameterSet that can be used to create synthetics in a new John Southworth dataset."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef team_required(func=None):\n def decorator(view_func):\n @functools.wraps(view_func, assigned=available_attrs(view_func))\n def _wrapped_view(request, *args, **kwargs):\n slug = kwargs.pop(\"slug\", None)\n if not getattr(request, \"team\", None):\n request.team = get_object_or_404(Team, slug=slug)\n return view_func(request, *args, **kwargs)\n return _wrapped_view\n if func:\n return decorator(func)\n return decorator", "response": "Decorator for views that require a team be supplied wither via a slug in the the\n url pattern or already set on the request object."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef manager_required(func=None):\n def decorator(view_func):\n @team_required\n @login_required\n @functools.wraps(view_func, assigned=available_attrs(view_func))\n def _wrapped_view(request, *args, **kwargs):\n role = request.team.role_for(request.user)\n if role not in [Membership.ROLE_MANAGER, Membership.ROLE_OWNER]:\n raise Http404()\n return view_func(request, *args, **kwargs)\n return _wrapped_view\n if func:\n return decorator(func)\n return decorator", "response": "Decorator for views that require only a team but also that a user be\n logged in and be the manager or owner of that team."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef complete(self, text, state):\n if self.use_main_ns:\n self.namespace = __main__.__dict__\n if state == 0:\n self.matches = self.attr_matches(text)\n try:\n return self.matches[state]\n except IndexError:\n return None", "response": "Return the next possible completion for text."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncomputing matches when the given text contains a dot.", "response": "def attr_matches(self, text):\n \"\"\"Compute matches when text contains a dot.\n\n Assuming the text is of the form NAME.NAME....[NAME], and is\n evaluable in self.namespace, it will be evaluated and its attributes\n (as revealed by dir()) are used as possible completions. (For class\n instances, class members are also considered.)\n\n WARNING: this can still invoke arbitrary C code, if an object\n with a __getattr__ hook is evaluated.\n\n \"\"\"\n \n def _method_or_attr(thisobject, item):\n # decide whether to append a '(' to the end of the attr based\n # on whether its callable\n if hasattr(getattr(thisobject, item), '__call__'):\n return item + '('\n else:\n return item\n \n tb_compl_commands = {\n '.': {},\n '[': {},\n '.get(': {},\n '.set(': {},\n '.filter(': {},\n '.filter_or_get(': {},\n '.get_parameter(': {},\n '.remove_parameter(': {},\n '.remove_parameters_all(': {},\n '.get_value(': {},\n '.set_value(': {},\n '.set_value_all(': {},\n # TODO: default_unit, adjust, prior, posterior, enabled?\n '.get_history(': {'context': 'history'},\n '.remove_history(': {'context': 'history'},\n '.get_component(': {'context': 'system'},\n '.remove_component(': {'context': 'system'},\n '.get_mesh(': {'context': 'mesh'},\n '.remove_mesh(': {'context': 'mesh'},\n '.get_constraint(': {'context': 'constraint'},\n '.remove_constraint(': {'context': 'constraint'},\n '.flip_constraint(': {'context': 'constraint'},\n '.run_constraint(': {'context': 'constraint'},\n '.get_compute(': {'context': 'compute'},\n '.remove_compute(': {'context': 'compute'},\n '.run_compute(': {'context': 'compute'},\n '.get_prior(': {'context': 'prior'}, # TODO: remove_prior, run_prior, enable_prior, disable_prior\n '.get_fitting(': {'context': 'fitting'},\n '.remove_fitting(': {'context': 'fitting'},\n '.run_fitting(': {'context': 'fitting'},\n '.get_posterior(': {'context': 'posterior'}, # TODO: remove_posterior, draw_from_posterior\n '.get_feedback(': {'context': 'feedback'},\n '.remove_feedback(': {'context': 'feedback'},\n # TODO: plots, plugins\n }\n \n expr = None\n for cmd,filter_kwargs in tb_compl_commands.items():\n if cmd in text:\n expr, attr = text.rsplit(cmd, 1)\n #~ if len(attr)==0:\n #~ return []\n if attr[0] not in [\"'\",'\"'] and cmd != '.':\n return []\n else:\n if cmd == '.':\n # then we're just looking for attributes and don't\n # need to offset for the ' or \"\n stringchar = ''\n attr = attr\n else:\n # then we're the first argument of some method\n # and need to account for the starting ' or \"\n stringchar = attr[0]\n attr = attr[1:]\n break\n \n if expr is None:\n # then we haven't found a match\n return []\n \n try:\n thisobject = eval(expr, self.namespace)\n except Exception:\n return []\n \n if cmd == '.':\n # then we're looking for attributes of thisobject (PS or bundle) that start with attr\n words = [_method_or_attr(thisobject, item) for item in dir(thisobject) if item[:len(attr)] == attr]\n else:\n # then we're looking to autocomplete the twig attr for thisobject (PS or bundle)\n words = thisobject.filter_or_get(attr, autocomplete=True, **filter_kwargs)\n \n matches = []\n n = len(attr)\n for word in words:\n matches.append('{}{}{}{}'.format(expr,cmd,stringchar,word))\n return matches"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nextract a list of sorted times and the datasets that need to be computed at each of those times. Any backend can then loop through these times and see what quantities are needed for that time step. Empty copies of synthetics for each applicable dataset are then created and returned so that they can be filled by the given backend. Setting of other meta-data should be handled by the bundle once the backend returns the filled synthetics. :parameter b: the :class:`phoebe.frontend.bundle.Bundle` :return: times (list of floats), infos (list of lists of dictionaries), new_syns (ParameterSet containing all new parameters) :raises NotImplementedError: if for some reason there is a problem getting a unique match to a dataset (shouldn't ever happen unless the user overrides a label)", "response": "def _extract_from_bundle(b, compute, times=None, allow_oversample=False,\n by_time=True, **kwargs):\n \"\"\"\n Extract a list of sorted times and the datasets that need to be\n computed at each of those times. Any backend can then loop through\n these times and see what quantities are needed for that time step.\n\n Empty copies of synthetics for each applicable dataset are then\n created and returned so that they can be filled by the given backend.\n Setting of other meta-data should be handled by the bundle once\n the backend returns the filled synthetics.\n\n :parameter b: the :class:`phoebe.frontend.bundle.Bundle`\n :return: times (list of floats), infos (list of lists of dictionaries),\n new_syns (ParameterSet containing all new parameters)\n :raises NotImplementedError: if for some reason there is a problem getting\n a unique match to a dataset (shouldn't ever happen unless\n the user overrides a label)\n \"\"\"\n provided_times = times\n times = []\n infolists = []\n needed_syns = []\n\n # The general format of the datastructures used within PHOEBE are as follows:\n # if by_time:\n # - times (list within _extract_from_bundle_by_time but then casted to np.array)\n # - infolists (list of , same shape and order as times)\n # - infolist (list of )\n # - info (dict containing information for a given dataset-component computation at the given time)\n #\n # else:\n # - infolist (same as by_time, one per-dataset, called infolists within this function only)\n # - info (same as by_time)\n #\n # The workers then return a similar format:\n # - packetlist (list of )\n # - packet (dict ready to be sent to new_syns.set_value(**packet))\n # where packet has a similar structure to info (but with value and possibly time keys)\n # but packetlist may be longer than infolist (since mesh passband-columns allow\n # now have their own entries.)\n\n for dataset in b.filter(qualifier='enabled', compute=compute, value=True).datasets:\n dataset_ps = b.filter(context='dataset', dataset=dataset)\n dataset_compute_ps = b.filter(context='compute', dataset=dataset, compute=compute, check_visible=False)\n dataset_kind = dataset_ps.exclude(kind='*_dep').kind\n time_qualifier = _timequalifier_by_kind(dataset_kind)\n if dataset_kind in ['lc']:\n # then the Parameters in the model only exist at the system-level\n # and are not tagged by component\n dataset_components = [None]\n elif dataset_kind in ['lp']:\n # TODO: eventually spectra and RVs as well (maybe even LCs and ORBs)\n dataset_components = b.hierarchy.get_stars() + b.hierarchy.get_orbits()\n else:\n dataset_components = b.hierarchy.get_stars()\n\n for component in dataset_components:\n if provided_times:\n this_times = provided_times\n elif dataset_kind == 'mesh':\n this_times = _expand_mesh_times(b, dataset_ps, component)\n elif dataset_kind in ['lp']:\n # then we have Parameters tagged by times, this will probably\n # also apply to spectra.\n this_times = [float(t) for t in dataset_ps.times]\n else:\n timequalifier = _timequalifier_by_kind(dataset_kind)\n timecomponent = component if dataset_kind not in ['mesh', 'lc'] else None\n # print \"*****\", dataset_kind, dataset_ps.kinds, timequalifier, timecomponent\n this_times = dataset_ps.get_value(qualifier=timequalifier, component=timecomponent, unit=u.d)\n\n # we may also need to compute at other times if requested by a\n # mesh with this dataset in datasets@mesh\n # for mesh_datasets_parameter in mesh_datasets_parameters:\n # if dataset in mesh_datasets_parameter.get_value():\n # mesh_obs_ps = b.filter(context='dataset', dataset=mesh_datasets_parameter.dataset, component=None).exclude(kind='*_dep')\n # TODO: not sure about the component=None on the next line... what will this do for rvs with different times per-component?\n # mesh_times = _expand_mesh_times(b, mesh_obs_ps, component=None)\n # this_times = np.unique(np.append(this_times, mesh_times))\n\n if allow_oversample and \\\n dataset_kind in ['lc'] and \\\n b.get_value(qualifier='exptime', dataset=dataset, check_visible=False) > 0 and \\\n dataset_compute_ps.get_value(qualifier='fti_method', **kwargs)=='oversample':\n\n # Then we need to override the times retrieved from the dataset\n # with the oversampled times. Later we'll do an average over\n # the exposure.\n # NOTE: here we assume that the dataset times are at mid-exposure,\n # if we want to allow more flexibility, we'll need a parameter\n # that gives this option and different logic for each case.\n exptime = dataset_ps.get_value(qualifier='exptime', unit=u.d)\n fti_oversample = dataset_compute_ps.get_value(qualifier='fti_oversample', check_visible=False, **kwargs)\n # NOTE: if changing this, also change in bundle.run_compute\n this_times = np.array([np.linspace(t-exptime/2., t+exptime/2., fti_oversample) for t in this_times]).flatten()\n\n if dataset_kind in ['lp']:\n # for line profiles and spectra, we only need to compute synthetic\n # model if there are defined wavelengths\n this_wavelengths = dataset_ps.get_value(qualifier='wavelengths', component=component)\n else:\n this_wavelengths = None\n\n if len(this_times) and (this_wavelengths is None or len(this_wavelengths)):\n info = {'dataset': dataset,\n 'component': component,\n 'kind': dataset_kind,\n 'needs_mesh': _needs_mesh(b, dataset, dataset_kind, component, compute),\n }\n\n if dataset_kind == 'mesh':\n # then we may be requesting passband-dependent columns be\n # copied to the mesh from other datasets based on the values\n # of columns@mesh. Let's store the needed information here,\n # where mesh_datasets and mesh_kinds correspond to each\n # other (but mesh_columns does not).\n info['mesh_columns'] = dataset_ps.get_value('columns', expand=True)\n info['mesh_datasets'] = list(set([c.split('@')[1] for c in info['mesh_columns'] if len(c.split('@'))>1]))\n info['mesh_kinds'] = [b.filter(dataset=ds, context='dataset').exclude(kind='*_dep').kind for ds in info['mesh_datasets']]\n\n if by_time:\n for time_ in this_times:\n # TODO: handle some deltatime allowance here?\n if time_ in times:\n ind = times.index(time_)\n infolists[ind].append(info)\n else:\n times.append(time_)\n infolists.append([info])\n else:\n # TODO: this doesn't appear to be different than needed_syns,\n # unless we change the structure to be per-dataset.\n info['times'] = this_times\n infolists.append(info)\n\n # we need the times for _create_syns but not within the infolists,\n # so we'll do this last and make a copy so times aren't passed\n # to everybody...\n needed_syn_info = info.copy()\n needed_syn_info['times'] = this_times\n needed_syns.append(needed_syn_info)\n\n if by_time and len(times):\n ti = zip(times, infolists)\n ti.sort()\n times, infolists = zip(*ti)\n\n if by_time:\n # print \"*** _extract_from_bundle return(times, infolists, syns)\", times, infolists, needed_syns\n return np.asarray(times), infolists, _create_syns(b, needed_syns)\n else:\n # print \"*** _extract_from_bundle return(infolists, syns)\", infolists, needed_syns\n return infolists, _create_syns(b, needed_syns)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncreate empty synthetics for all the needed_syns", "response": "def _create_syns(b, needed_syns):\n \"\"\"\n Create empty synthetics\n\n :parameter b: the :class:`phoebe.frontend.bundle.Bundle`\n :parameter list needed_syns: list of dictionaries containing kwargs to access\n the dataset (dataset, component, kind)\n :return: :class:`phoebe.parameters.parameters.ParameterSet` of all new parameters\n \"\"\"\n\n # needs_mesh = {info['dataset']: info['kind'] for info in needed_syns if info['needs_mesh']}\n\n params = []\n for needed_syn in needed_syns:\n # print \"*** _create_syns needed_syn\", needed_syn\n # used to be {}_syn\n syn_kind = '{}'.format(needed_syn['kind'])\n # if needed_syn['kind']=='mesh':\n # parameters.dataset.mesh will handle creating the necessary columns\n # needed_syn['dataset_fields'] = needs_mesh\n\n # needed_syn['columns'] = b.get_value(qualifier='columns', dataset=needed_syn['dataset'], context='dataset')\n # datasets = b.get_value(qualifier='datasets', dataset=needed_syn['dataset'], context='dataset')\n # needed_syn['datasets'] = {ds: b.filter(datset=ds, context='dataset').exclude(kind='*_dep').kind for ds in datasets}\n\n # phoebe will compute everything sorted - even if the input times array\n # is out of order, so let's make sure the exposed times array is in\n # the correct (sorted) order\n if 'times' in needed_syn.keys():\n needed_syn['times'].sort()\n\n needed_syn['empty_arrays_len'] = len(needed_syn['times'])\n\n these_params, these_constraints = getattr(_dataset, \"{}_syn\".format(syn_kind.lower()))(**needed_syn)\n # TODO: do we need to handle constraints?\n these_params = these_params.to_list()\n for param in these_params:\n if param._dataset is None:\n # dataset may be set for mesh columns\n param._dataset = needed_syn['dataset']\n\n param._kind = syn_kind\n param._component = needed_syn['component']\n # reset copy_for... model Parameters should never copy\n param._copy_for = {}\n\n # context, model, etc will be handle by the bundle once these are returned\n\n params += these_params\n\n return ParameterSet(params)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _make_packet(qualifier, value, time, info, **kwargs):\n packet = {'dataset': kwargs.get('dataset', info['dataset']),\n 'component': kwargs.get('component', info['component']),\n 'kind': kwargs.get('kind', info['kind']),\n 'qualifier': qualifier,\n 'value': value,\n 'time': time\n }\n\n return packet", "response": "Make a packet from kwargs"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nrun any sanity checks for the given backend", "response": "def run_checks(self, b, compute, times=[], **kwargs):\n \"\"\"\n run any sanity checks to make sure the parameters and options are legal\n for this backend. If they are not, raise an error here to avoid errors\n within the workers.\n\n Any physics-checks that are backend-independent should be in\n Bundle.run_checks, and don't need to be repeated here.\n\n This should be subclassed by all backends, otherwise will throw a\n NotImplementedError\n \"\"\"\n raise NotImplementedError(\"run_checks is not implemented by the {} backend\".format(self.__class__.__name__))"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _fill_syns(self, new_syns, rpacketlists_per_worker):\n # TODO: move to BaseBackendByDataset or BaseBackend?\n logger.debug(\"rank:{}/{} {}._fill_syns\".format(mpi.myrank, mpi.nprocs, self.__class__.__name__))\n\n for packetlists in rpacketlists_per_worker:\n # single worker\n for packetlist in packetlists:\n # single time/dataset\n for packet in packetlist:\n # single parameter\n new_syns.set_value(**packet)\n\n return new_syns", "response": "fills synsets with new ones"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef run(self, b, compute, times=[], **kwargs):\n self.run_checks(b, compute, times, **kwargs)\n\n logger.debug(\"rank:{}/{} calling get_packet_and_syns\".format(mpi.myrank, mpi.nprocs))\n packet, new_syns = self.get_packet_and_syns(b, compute, times, **kwargs)\n\n if mpi.enabled:\n # broadcast the packet to ALL workers\n mpi.comm.bcast(packet, root=0)\n\n # now even the master can become a worker and take on a chunk\n packet['b'] = b\n rpacketlists = self._run_chunk(**packet)\n\n # now receive all packetlists\n rpacketlists_per_worker = mpi.comm.gather(rpacketlists, root=0)\n\n else:\n rpacketlists_per_worker = [self._run_chunk(**packet)]\n\n return self._fill_syns(new_syns, rpacketlists_per_worker)", "response": "run the packet and return a list of synapses"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncomputes the numerical volume of a convex mesh", "response": "def compute_volume(sizes, centers, normals):\n \"\"\"\n Compute the numerical volume of a convex mesh\n\n :parameter array sizes: array of sizes of triangles\n :parameter array centers: array of centers of triangles (x,y,z)\n :parameter array normals: array of normals of triangles (will normalize if not already)\n :return: the volume (float)\n \"\"\"\n # the volume of a slanted triangular cone is A_triangle * (r_vec dot norm_vec) / 3.\n\n # TODO: implement normalizing normals into meshing routines (or at least have them supply normal_mags to the mesh)\n\n\n # TODO: remove this function - should now be returned by the meshing algorithm itself\n # although wd method may currently use this\n normal_mags = np.linalg.norm(normals, axis=1) #np.sqrt((normals**2).sum(axis=1))\n return np.sum(sizes*((centers*normals).sum(axis=1)/normal_mags)/3)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting the transformation matrix R to translate a mesh according to the euler angles.", "response": "def euler_trans_matrix(etheta, elongan, eincl):\n \"\"\"\n Get the transformation matrix R to translate/rotate a mesh according to\n euler angles.\n\n The matrix is\n\n R(long,incl,theta) =\n Rz(pi).Rz(long).Rx(incl).Rz(theta)\n Rz(long).Rx(-incl).Rz(theta).Rz(pi)\n\n where\n\n Rx(u) = 1, 0, 0\n 0, cos(u), -sin(u)\n 0, sin(u), cos(u)\n\n Ry(u) = cos(u), 0, sin(u)\n 0, 1, 0\n -sin(u), 0, cos(u)\n\n Rz(u) = cos(u), -sin(u), 0\n sin(u), cos(u), 0\n 0, 0, 1\n\n Rz(pi) = reflection across z-axis\n\n Note:\n\n R(0,0,0) = -1, 0, 0\n 0, -1, 0\n 0, 0, 1\n\n :parameter float etheta: euler theta angle\n :parameter float elongan: euler long of asc node angle\n :parameter float eincl: euler inclination angle\n :return: matrix with size 3x3\n \"\"\"\n\n s1 = sin(eincl);\n c1 = cos(eincl);\n s2 = sin(elongan);\n c2 = cos(elongan);\n s3 = sin(etheta);\n c3 = cos(etheta);\n c1s3 = c1*s3;\n c1c3 = c1*c3;\n\n return np.array([\n [-c2*c3+s2*c1s3, c2*s3+s2*c1c3, -s2*s1],\n [-s2*c3-c2*c1s3, s2*s3-c2*c1c3, c2*s1],\n [s1*s3, s1*c3, c1]\n ])"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the spin in the plane of a sky of a resource in the system.", "response": "def spin_in_system(incl, long_an):\n \"\"\"\n Spin in the plane of sky of a star given its inclination and \"long_an\"\n\n incl - inclination of the star in the plane of sky\n long_an - longitude of ascending node (equator) of the star in the plane of sky\n\n Return:\n spin - in plane of sky\n \"\"\"\n #print \"*** spin_in_system\", incl, long_an, np.dot(Rz(long_an), np.dot(Rx(-incl), np.array([0,0,1])))\n # Rz(long_an) Rx(incl) [0, 0, 1]\n return np.dot(Rz(long_an), np.dot(Rx(-incl), np.array([0.,0.,1.])))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ntransform the spin s of a star on Kerpler orbit with the specified anomaly elongan and inclination elongan and eincl.", "response": "def spin_in_roche(s, etheta, elongan, eincl):\n \"\"\"\n Transform the spin s of a star on Kerpler orbit with\n\n etheta - true anomaly\n elongan - longitude of ascending node\n eincl - inclination\n\n from in the plane of sky reference frame into\n the Roche reference frame.\n \"\"\"\n # m = Rz(long).Rx(-incl).Rz(theta).Rz(pi)\n m = euler_trans_matrix(etheta, elongan, eincl)\n\n return np.dot(m.T, s)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef transform_position_array(array, pos, euler, is_normal, reverse=False):\n trans_matrix = euler_trans_matrix(*euler)\n\n if not reverse:\n trans_matrix = trans_matrix.T\n\n if isinstance(array, ComputedColumn):\n array = array.for_computations\n\n if is_normal:\n # then we don't do an offset by the position\n return np.dot(np.asarray(array), trans_matrix)\n else:\n return np.dot(np.asarray(array), trans_matrix) + np.asarray(pos)", "response": "Transform any Nx3 position array by translating to a center - of - mass position vector and applying an euler transformation to the positions of the n - mass entry."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef transform_velocity_array(array, pos_array, vel, euler, rotation_vel=(0,0,0)):\n\n trans_matrix = euler_trans_matrix(*euler)\n\n # v_{rot,i} = omega x r_i with omega = rotation_vel\n rotation_component = np.cross(rotation_vel, pos_array, axisb=1)\n orbital_component = np.asarray(vel)\n\n if isinstance(array, ComputedColumn):\n array = array.for_computations\n\n new_vel = np.dot(np.asarray(array)+rotation_component, trans_matrix.T) + orbital_component\n\n return new_vel", "response": "Transform any Nx3 velocity vector array by adding the center - of - mass vel and applying an euler transformation."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef wd_grid_to_mesh_dict(the_grid, q, F, d):\n\n # WD returns a list of triangles with 9 coordinates (v1x, v1y, v1z, v2x, ...)\n triangles_9N = the_grid[:,4:13]\n\n new_mesh = {}\n # force the mesh to be computed at centers rather than the PHOEBE default\n # of computing at vertices and averaging for the centers. This will\n # propogate to all ComputedColumns, which means we'll fill those quanities\n # (ie normgrads, velocities) per-triangle.\n new_mesh['compute_at_vertices'] = False\n # PHOEBE's mesh structure stores vertices in an Nx3 array\n new_mesh['vertices'] = triangles_9N.reshape(-1,3)\n # and triangles as indices pointing to each of the 3 vertices (Nx3)\n new_mesh['triangles'] = np.arange(len(triangles_9N)*3).reshape(-1,3)\n new_mesh['centers'] = the_grid[:,0:3]\n\n new_mesh['tnormals'] = the_grid[:,13:16]\n norms = np.linalg.norm(new_mesh['tnormals'], axis=1)\n new_mesh['normgrads'] = norms\n # TODO: do this the right way by dividing along axis=1 (or using np.newaxis as done for multiplying in ComputedColumns)\n new_mesh['tnormals'] = np.array([tn/n for tn,n in zip(new_mesh['tnormals'], norms)])\n\n # NOTE: there are no vnormals in wd-style mesh\n\n new_mesh['areas'] = the_grid[:,3]\n new_mesh['tareas'] = the_grid[:,18]\n\n # TESTING ONLY - remove this eventually ??? (currently being used\n # to test WD-style eclipse detection by using theta and phi (lat and long)\n # to determine which triangles are in the same \"strip\")\n new_mesh['thetas'] = the_grid[:,16]\n new_mesh['phis'] = the_grid[:,17]\n\n # TODO: get rid of this list comprehension\n # grads = np.array([libphoebe.roche_gradOmega_only(q, F, d, c) for c in new_mesh['centers']])\n\n # new_mesh['normgrads'] = np.sqrt(grads[:,0]**2+grads[:,1]**2+grads[:,2]**2)\n # new_mesh['normgrads'] = norms #np.linalg.norm(grads, axis=1)\n\n # TODO: actually compute the numerical volume (find old code)\n new_mesh['volume'] = compute_volume(new_mesh['areas'], new_mesh['centers'], new_mesh['tnormals'])\n # new_mesh['area'] # TODO: compute surface area??? (not sure if needed)\n new_mesh['velocities'] = np.zeros(new_mesh['centers'].shape)\n\n return new_mesh", "response": "Transform a wd - style mesh to a dictionary of meshes."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the average of the values at each triangle.", "response": "def averages(self):\n \"\"\"\n Access to the average of the values at the vertices for each triangle.\n If the quantities are defined at centers instead of vertices, this\n will return None. Also see :method:`centers`.\n\n :return: numpy array or None\n \"\"\"\n if not self.mesh._compute_at_vertices:\n return None\n\n return np.mean(self.vertices_per_triangle, axis=1)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn the weighted averages of the values at each vertex in the mesh.", "response": "def weighted_averages(self):\n \"\"\"\n Access to the weighted averages of the values at the vertices for each\n triangle based on the weights provided by mesh.weights. This is most\n useful for partially visible triangles when using libphoebe's\n eclipse detection that returns weights for each vertex.\n\n Note that weights by default are set to 1/3 for each vertex, meaning\n that this will provide the same values as :meth:`averages` unless\n the weights are overridden within the mesh.\n\n If the quantities are defined at centers instead of vertices, this will\n return None.\n\n :return: numpy array or None\n \"\"\"\n if not self.mesh._compute_at_vertices:\n return None\n\n vertices_per_triangle = self.vertices_per_triangle\n if vertices_per_triangle.ndim==2:\n # return np.dot(self.vertices_per_triangle, self.mesh.weights)\n return np.sum(vertices_per_triangle*self.mesh.weights, axis=1)\n elif vertices_per_triangle.ndim==3:\n return np.sum(vertices_per_triangle*self.mesh.weights[:,np.newaxis], axis=1)\n else:\n raise NotImplementedError"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nset the quantities at which the user has requested the compute.", "response": "def set_for_computations(self, value):\n \"\"\"\n Set the quantities, either at the vertices or centers depending on the\n settings of the mesh (mesh._compute_at_vertices)\n \"\"\"\n if self.mesh._compute_at_vertices:\n self._vertices = value\n else:\n self._centers = value"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef update_columns_dict(self, kwargs):\n # make sure to do the geometric things that are needed for some of the\n # ComputedColumns first\n for key in ('triangles', 'vertices', 'centers', 'vnormals', 'tnormals'):\n if key in kwargs.keys():\n self.__setitem__(key, kwargs.pop(key))\n\n for k, v in kwargs.items():\n if isinstance(v, float) and k not in self._scalar_fields:\n # Then let's make an array with the correct length full of this\n # scalar\n\n # NOTE: this won't work for Nx3's, but that\n # really shouldn't ever happen since they should be set\n # within the init.\n # v = np.ones(self.Ntriangles)*v\n if self._compute_at_vertices:\n v = np.full(self.Nvertices, v)\n else:\n v = np.full(self.Ntriangles, v)\n\n self.__setitem__(k, v)\n\n if isinstance(v, ComputedColumn):\n # then let's update the mesh instance to correctly handle\n # inheritance\n self.__getitem__(k)._mesh = self", "response": "Update the value of a column or multiple columns by passing as a dict."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef coords_for_observations(self):\n if self._compute_at_vertices:\n return self.vertices - self._pos_center\n else:\n return self.centers - self._pos_center", "response": "Returns the coordinates of the center of the star for each element in the species."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef coords_for_computations(self):\n\n # TODO: need to subtract the position offset if a Mesh (in orbit)\n if self._compute_at_vertices:\n if self.pvertices is not None:\n return self.pvertices - self._pos_center\n else:\n return self.vertices - self._pos_center\n else:\n return self.centers - self._pos_center", "response": "Return the coordinates of the center of the star for each element in the species."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the radius of each element in the mesh", "response": "def rs(self):\n \"\"\"\n Return the radius of each element (either vertices or centers\n depending on the setting in the mesh) with respect to the center of\n the star.\n\n NOTE: unscaled\n\n (ComputedColumn)\n \"\"\"\n rs = np.linalg.norm(self.coords_for_computations, axis=1)\n return ComputedColumn(self, rs)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the projected radius of each element in the star with respect to the center of the star.", "response": "def rprojs(self):\n \"\"\"\n Return the projected (in xy/uv plane) radius of each element (either\n vertices or centers depending on the setting in the mesh) with respect\n to the center of the star.\n\n NOTE: unscaled\n\n (ComputedColumn)\n \"\"\"\n # TODO: should this be moved to Mesh? Even though its surely possible\n # to compute without being placed in orbit, projecting in x,y doesn't\n # make much sense without LOS orientation.\n rprojs = np.linalg.norm(self.coords_for_computations[:,:2], axis=1)\n return ComputedColumn(self, rprojs)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef cosbetas(self):\n coords = self.coords_for_computations\n norms = self.normals_for_computations\n\n # TODO: ditch the list comprehension... I know I figured out how to do\n # this (ie along an axis) with np.dot somewhere else\n # cosbetas = np.array([np.dot(c,n) / (np.linalg.norm(c)*np.linalg.norm(n)) for c,n in zip(coords, norms)])\n\n cosbetas = libphoebe.scalproj_cosangle(\n np.ascontiguousarray(coords),\n np.ascontiguousarray(norms)\n )\n\n return ComputedColumn(self, cosbetas)", "response": "Returns the cosbetas of the current column."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the area in si of the assessment.", "response": "def areas_si(self):\n \"\"\"\n TODO: add documentation\n \"\"\"\n if self._areas is not None:\n return (self.areas*u.solRad**2).to(u.m**2).value\n else:\n return None"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef from_proto(cls, proto_mesh, scale):\n\n mesh = cls(**proto_mesh.items())\n mesh._copy_roche_values()\n mesh._scale_mesh(scale=scale)\n\n return mesh", "response": "Create a new instance of this class from a protobuf mesh."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nscale the mesh of the current object", "response": "def _scale_mesh(self, scale):\n \"\"\"\n TODO: add documentation\n \"\"\"\n pos_ks = ['vertices', 'centers']\n\n # TODO: scale velocities???\n\n # handle scale\n self.update_columns_dict({k: self[k]*scale for k in pos_ks})\n\n self.update_columns(areas=self.areas*(scale**2))\n self._volume *= scale**3\n if self._area is not None:\n # self._area is None for wd meshes\n self._area += scale**2"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef from_proto(cls, proto_mesh, scale,\n pos, vel, euler, euler_vel,\n rotation_vel=(0,0,0),\n component_com_x=None):\n \"\"\"\n Turn a ProtoMesh into a Mesh scaled and placed in orbit.\n\n Update all geometry fields from the proto reference frame, to the\n current system reference frame, given the current position, velocitiy,\n euler angles, and rotational velocity of THIS mesh.\n\n :parameter list pos: current position (x, y, z)\n :parameter list vel: current velocity (vx, vy, vz)\n :parameter list euler: current euler angles (etheta, elongan, eincl)\n :parameter list rotation_vel: rotation velocity vector (polar_dir*freq_rot)\n \"\"\"\n\n mesh = cls(**proto_mesh.items())\n\n mesh._copy_roche_values()\n mesh._scale_mesh(scale=scale)\n mesh._place_in_orbit(pos, vel, euler, euler_vel, rotation_vel, component_com_x)\n\n return mesh", "response": "Turn a ProtoMesh into a Mesh scaled and placed in orbit."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncreating a new mesh from a scaledproto mesh.", "response": "def from_scaledproto(cls, scaledproto_mesh,\n pos, vel, euler, euler_vel,\n rotation_vel=(0,0,0),\n component_com_x=None):\n \"\"\"\n TODO: add documentation\n \"\"\"\n\n mesh = cls(**scaledproto_mesh.items())\n\n # roche coordinates have already been copied\n # so do NOT call mesh._copy_roche_values() here\n mesh._place_in_orbit(pos, vel, euler, euler_vel, rotation_vel, component_com_x)\n\n return mesh"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef visibilities(self):\n if self._visibilities is not None:\n return self._visibilities\n else:\n return np.ones(self.Ntriangles)", "response": "Return the array of visibilities for the current locale."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn a numpy array of the 3 - dimensional weights of the current instance of the class.", "response": "def weights(self):\n \"\"\"\n TODO: add documentation\n\n (Nx3)\n \"\"\"\n if self._weights is not None and len(self._weights):\n return self._weights\n else:\n return np.full((self.Ntriangles, 3), 1./3)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nupdating the columns of the object with the values from kwargs.", "response": "def update_columns_dict(self, kwargs):\n \"\"\"\n TODO: add documentation\n \"\"\"\n super(Mesh, self).update_columns_dict(kwargs)\n\n # if kwargs.get('vnormals', None) is not None or kwargs.get('tnormals', None) is not None:\n # self._compute_mus()\n if kwargs.get('triangles', None) is not None:\n # reset visibilities and velocities so that they are reset\n # when next queried\n self.update_columns(visibilities=None, velocities=None)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef update_columns(self, field, value_dict, inds=None, computed_type=None):\n if not isinstance(value_dict, dict):\n value_dict = {comp_no: value_dict for comp_no in self._dict.keys()}\n\n\n for comp, value in value_dict.items():\n if computed_type is not None:\n # then create the ComputedColumn now to override the default value of compute_at_vertices\n self._dict[comp]._observables[field] = ComputedColumn(self._dict[comp], compute_at_vertices=computed_type=='vertices')\n\n #print \"***\", comp, field, inds, value\n if inds:\n raise NotImplementedError('setting column with indices not yet ported to new meshing')\n # self._dict[comp][field][inds] = value\n else:\n if comp in self._dict.keys():\n self._dict[comp][field] = value\n else:\n meshes = self._dict[self._parent_envelope_of[comp]]\n meshes[comp][field] = value", "response": "update the columns of all components in the current mesh"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_column(self, field, components=None, computed_type='for_observations'):\n def get_field(c, field, computed_type):\n\n if c not in self._dict.keys() and self._parent_envelope_of[c] in self._dict.keys():\n mesh = self._dict[self._parent_envelope_of[c]]\n return mesh.get_column_flat(field, components, computed_type)\n\n mesh = self._dict[c]\n if isinstance(mesh, Meshes):\n # then do this recursively for all components in the Meshes object\n # but don't allow nesting in the dictionary, instead combine\n # all subcomponents into one entry with the current component\n return mesh.get_column_flat(field, mesh._components, computed_type)\n\n f = mesh[field]\n if isinstance(f, ComputedColumn):\n col = getattr(f, computed_type)\n else:\n col = f\n\n return col\n\n if components:\n if isinstance(components, str):\n components = [components]\n else:\n components = self.keys()\n\n return {c: get_field(c, field, computed_type) for c in components}", "response": "get_column returns a dictionary of the components of the current object"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_column_flat(self, field, components=None, computed_type='for_observations'):\n return self.pack_column_flat(self.get_column(field, components, computed_type),\n components,\n offset=field=='triangles')", "response": "get_column_flat - get a single merged value from all meshes"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef pack_column_flat(self, value, components=None, offset=False):\n if components:\n if isinstance(components, str):\n components = [components]\n elif isinstance(components, list):\n components = components\n else:\n raise TypeError(\"components should be list or string, not {}\".format(type(components)))\n elif isinstance(value, dict):\n components = value.keys()\n elif isinstance(value, list):\n components = self._dict.keys()\n value = {c: v for c,v in zip(components, value)}\n\n if offset:\n values = []\n offsetN = 0\n for c in components:\n values.append(value[c]+offsetN)\n offsetN += len(self[c]['vertices'])\n\n else:\n values = [value[c] for c in components]\n\n if len(value[components[0]].shape) > 1:\n return np.vstack(values)\n else:\n return np.hstack(values)", "response": "Pack a column into a flat array."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nunpacking the flat array of values into a dictionary.", "response": "def unpack_column_flat(self, value, components=None, offset=False, computed_type=None):\n \"\"\"\n TODO: add documentation\n TODO: needs testing\n \"\"\"\n if components:\n if isinstance(components, str):\n components = [components]\n else:\n components = self._dict.keys()\n\n # TODO: add this\n\n # we need to split the flat array by the lengths of each mesh\n N_lower = 0\n N_upper = 0\n offsetN = 0.0\n value_dict = {}\n for comp in components:\n if isinstance(self[comp], Meshes):\n # then we need to recursively extract to the underlying meshes\n # pass\n meshes = self[comp]._dict\n else:\n meshes = {comp: self[comp]}\n\n for c, mesh in meshes.items():\n if computed_type=='vertices' or (computed_type is None and mesh._compute_at_vertices):\n N = mesh.Nvertices\n else:\n N = mesh.Ntriangles\n N_upper += N\n value_dict[c] = value[N_lower:N_upper] - offsetN\n if offset:\n offsetN += N\n N_lower += N\n\n return value_dict"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef set_column_flat(self, field, value, components=None, computed_type=None):\n value_dict = self.unpack_column_flat(value, components, computed_type=computed_type)\n self.update_columns(field, value_dict, computed_type=computed_type)", "response": "Set a flat column of the log record."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef replace_elements(self, inds, new_submesh, component):\n self._dict[component] = np.hstack([self._dict[component][~inds], new_submesh])", "response": "Replace elements in the specified component with new_submesh."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nparses parameters in the bundle and call dynamics.", "response": "def dynamics_from_bundle(b, times, compute=None, return_euler=False, **kwargs):\n \"\"\"\n Parse parameters in the bundle and call :func:`dynamics`.\n\n See :func:`dynamics` for more detailed information.\n\n NOTE: you must either provide compute (the label) OR all relevant options\n as kwargs (ltte)\n\n Args:\n b: (Bundle) the bundle with a set hierarchy\n times: (list or array) times at which to run the dynamics\n return_euler: (bool, default=False) whether to include euler angles\n in the return\n\n Returns:\n t, xs, ys, zs, vxs, vys, vzs [, theta, longan, incl].\n t is a numpy array of all times,\n the remaining are a list of numpy arrays (a numpy array per\n star - in order given by b.hierarchy.get_stars()) for the cartesian\n positions and velocities of each star at those same times.\n Euler angles (theta, longan, incl) are only returned if return_euler is\n set to True.\n\n \"\"\"\n\n b.run_delayed_constraints()\n\n computeps = b.get_compute(compute, check_visible=False, force_ps=True)\n ltte = computeps.get_value('ltte', check_visible=False, **kwargs)\n\n # make sure times is an array and not a list\n times = np.array(times)\n\n vgamma = b.get_value('vgamma', context='system', unit=u.solRad/u.d)\n t0 = b.get_value('t0', context='system', unit=u.d)\n\n hier = b.hierarchy\n starrefs = hier.get_stars()\n orbitrefs = hier.get_orbits()\n s = b.filter(context='component')\n\n periods, eccs, smas, t0_perpasses, per0s, long_ans, incls, dpdts, \\\n deccdts, dperdts, components = [],[],[],[],[],[],[],[],[],[],[]\n\n\n for component in starrefs:\n\n # we need to build a list of all orbitlabels underwhich this component\n # belongs. For a simple binary this is just the parent, but for hierarchical\n # systems we need to get the labels of the outer-orbits as well\n ancestororbits = []\n comp = component\n while hier.get_parent_of(comp) in orbitrefs:\n comp = hier.get_parent_of(comp)\n ancestororbits.append(comp)\n\n #print \"***\", component, ancestororbits\n\n periods.append([s.get_value('period', u.d, component=orbit) for orbit in ancestororbits])\n eccs.append([s.get_value('ecc', component=orbit) for orbit in ancestororbits])\n t0_perpasses.append([s.get_value('t0_perpass', u.d, component=orbit) for orbit in ancestororbits])\n per0s.append([s.get_value('per0', u.rad, component=orbit) for orbit in ancestororbits])\n long_ans.append([s.get_value('long_an', u.rad, component=orbit) for orbit in ancestororbits])\n incls.append([s.get_value('incl', u.rad, component=orbit) for orbit in ancestororbits])\n dpdts.append([s.get_value('dpdt', u.d/u.d, component=orbit) for orbit in ancestororbits])\n if conf.devel:\n deccdts.append([s.get_value('deccdt', u.dimensionless_unscaled/u.d, component=orbit) for orbit in ancestororbits])\n else:\n deccdts.append([0.0 for orbit in ancestororbits])\n dperdts.append([s.get_value('dperdt', u.rad/u.d, component=orbit) for orbit in ancestororbits])\n\n # sma needs to be the COMPONENT sma. This is stored in the bundle for stars, but is NOT\n # for orbits in orbits, so we'll need to recompute those from the mass-ratio and sma of\n # the parent orbit.\n\n smas_this = []\n for comp in [component]+ancestororbits[:-1]:\n if comp in starrefs:\n smas_this.append(s.get_value('sma', u.solRad, component=comp))\n else:\n q = s.get_value('q', component=hier.get_parent_of(comp))\n comp_comp = hier.get_primary_or_secondary(comp)\n\n # NOTE: similar logic is also in constraints.comp_sma\n # If changing any of the logic here, it should be changed there as well.\n if comp_comp == 'primary':\n qthing = (1. + 1./q)\n else:\n qthing = (1. + q)\n\n smas_this.append(s.get_value('sma', u.solRad, component=hier.get_parent_of(comp)) / qthing)\n\n smas.append(smas_this)\n\n # components is whether an entry is the primary or secondary in its parent orbit, so here we want\n # to start with component and end one level short of the top-level orbit\n components.append([hier.get_primary_or_secondary(component=comp) for comp in [component]+ancestororbits[:-1]])\n\n\n return dynamics(times, periods, eccs, smas, t0_perpasses, per0s, \\\n long_ans, incls, dpdts, deccdts, dperdts, \\\n components, t0, vgamma, \\\n mass_conservation=True, ltte=ltte, return_euler=return_euler)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncompute the positions and velocites of each star in their nested Keplerian orbits at a given list of times. See :func:`dynamics_from_bundle` for a wrapper around this function which automatically handles passing everything in the correct order and in the correct units. Args: times: (iterable) times at which to compute positions and velocities for each star periods: (iterable) period of the parent orbit for each star [days] eccs: (iterable) eccentricity of the parent orbit for each star smas: (iterable) semi-major axis of the parent orbit for each star [solRad] t0_perpasses: (iterable) t0_perpass of the parent orbit for each star [days] per0s: (iterable) longitudes of periastron of the parent orbit for each star [rad] long_ans: (iterable) longitudes of the ascending node of the parent orbit for each star [rad] incls: (iterable) inclination of the parent orbit for each star [rad] dpdts: (iterable) change in period with respect to time of the parent orbit for each star [days/day] deccdts: (iterable) change in eccentricity with respect to time of the parent orbit for each star [1/day] dperdts: (iterable) change in periastron with respect to time of the parent orbit for each star [rad/d] components: (iterable) component ('primary' or 'secondary') of each star within its parent orbit [string] t0: (float, default=0) time at which all initial values (ie period, per0) are given [days] mass_conservation: (bool, optional) whether to require mass conservation if any of the derivatives (dpdt, dperdt, etc) are non-zero [default: True] return_euler: (bool, default=False) whether to include euler angles in the return Returns: t, xs, ys, zs, vxs, vys, vzs [, theta, longan, incl]. t is a numpy array of all times, the remaining are a list of numpy arrays (a numpy array per star - in order given by b.hierarchy.get_stars()) for the cartesian positions and velocities of each star at those same times. Euler angles (theta, longan, incl) are only returned if return_euler is set to True.", "response": "def dynamics(times, periods, eccs, smas, t0_perpasses, per0s, long_ans, incls,\n dpdts, deccdts, dperdts, components, t0=0.0, vgamma=0.0,\n mass_conservation=True, ltte=False, return_euler=False):\n \"\"\"\n Compute the positions and velocites of each star in their nested\n Keplerian orbits at a given list of times.\n\n See :func:`dynamics_from_bundle` for a wrapper around this function\n which automatically handles passing everything in the correct order\n and in the correct units.\n\n Args:\n times: (iterable) times at which to compute positions and\n velocities for each star\n periods: (iterable) period of the parent orbit for each star\n [days]\n eccs: (iterable) eccentricity of the parent orbit for each star\n smas: (iterable) semi-major axis of the parent orbit for each\n star [solRad]\n t0_perpasses: (iterable) t0_perpass of the parent orbit for each\n star [days]\n per0s: (iterable) longitudes of periastron of the parent orbit\n for each star [rad]\n long_ans: (iterable) longitudes of the ascending node of the\n parent orbit for each star [rad]\n incls: (iterable) inclination of the parent orbit for each\n star [rad]\n dpdts: (iterable) change in period with respect to time of the\n parent orbit for each star [days/day]\n deccdts: (iterable) change in eccentricity with respect to time\n of the parent orbit for each star [1/day]\n dperdts: (iterable) change in periastron with respect to time\n of the parent orbit for each star [rad/d]\n components: (iterable) component ('primary' or 'secondary') of\n each star within its parent orbit [string]\n t0: (float, default=0) time at which all initial values (ie period, per0)\n are given [days]\n mass_conservation: (bool, optional) whether to require mass\n conservation if any of the derivatives (dpdt, dperdt, etc)\n are non-zero [default: True]\n return_euler: (bool, default=False) whether to include euler angles\n in the return\n\n Returns:\n t, xs, ys, zs, vxs, vys, vzs [, theta, longan, incl].\n t is a numpy array of all times,\n the remaining are a list of numpy arrays (a numpy array per\n star - in order given by b.hierarchy.get_stars()) for the cartesian\n positions and velocities of each star at those same times.\n Euler angles (theta, longan, incl) are only returned if return_euler is\n set to True.\n \"\"\"\n # TODO: NOTE: smas must be per-component, not per-orbit\n # TODO: steal some documentation from 2.0a:keplerorbit.py:get_orbit\n # TODO: deal with component number more smartly\n\n def binary_dynamics(times, period, ecc, sma, t0_perpass, per0, long_an,\n incl, dpdt, deccdt, dperdt, component='primary',\n t0=0.0, vgamma=0.0, mass_conservation=True,\n com_pos=(0.,0.,0.), com_vel=(0.,0.,0.), com_euler=(0.,0.,0.)):\n \"\"\"\n \"\"\"\n # TODO: steal some documentation from 2.0a:keplerorbit.py:get_orbit\n\n\n #-- if dpdt is non-zero, the period is actually an array, and the semi-\n # major axis changes to match Kepler's third law (unless\n # `mass_conservation` is set to False)\n if dpdt!=0:\n period_ = period\n period = dpdt*(times-t0) + period_\n if mass_conservation and not np.isscalar(period):\n sma = sma/period[0]**2*period**2\n elif mass_conservation:\n sma = sma/period_**2*period**2\n\n #-- if dperdt is non-zero, the argument of periastron is actually an\n # array\n if dperdt!=0.:\n per0 = dperdt*(times-t0) + per0\n #-- if deccdt is non-zero, the eccentricity is actually an array\n if deccdt!=0:\n ecc = deccdt*(times-t0) + ecc\n\n\n #-- compute orbit\n n = 2*pi/period\n ma = n*(times-t0_perpass)\n E,theta = _true_anomaly(ma,ecc)\n r = sma*(1-ecc*cos(E))\n PR = r*sin(theta)\n #-- compute rdot and thetadot\n l = r*(1+ecc*cos(theta))#-omega))\n L = 2*pi*sma**2/period*sqrt(1-ecc**2)\n rdot = L/l*ecc*sin(theta)#-omega)\n thetadot = L/r**2\n #-- the secondary is half an orbit further than the primary\n if 'sec' in component.lower():\n theta += pi\n #-- take care of Euler angles\n theta_ = theta+per0\n #-- convert to the right coordinate frame\n #-- create some shortcuts\n sin_theta_ = sin(theta_)\n cos_theta_ = cos(theta_)\n sin_longan = sin(long_an)\n cos_longan = cos(long_an)\n #-- spherical coordinates to cartesian coordinates. Note that we actually\n # set incl=-incl (but of course it doesn't show up in the cosine). We\n # do this to match the convention that superior conjunction (primary\n # eclipsed) happens at periastron passage when per0=90 deg.\n x = r*(cos_longan*cos_theta_ - sin_longan*sin_theta_*cos(incl))\n y = r*(sin_longan*cos_theta_ + cos_longan*sin_theta_*cos(incl))\n z = r*(sin_theta_*sin(-incl))\n #-- spherical vectors to cartesian vectors, and then rotated for\n # the Euler angles Omega and i.\n vx_ = cos_theta_*rdot - sin_theta_*r*thetadot\n vy_ = sin_theta_*rdot + cos_theta_*r*thetadot\n vx = cos_longan*vx_ - sin_longan*vy_*cos(incl)\n vy = sin_longan*vx_ + cos_longan*vy_*cos(incl)\n vz = sin(-incl)*vy_\n #-- that's it!\n\n # correct by vgamma (only z-direction)\n # NOTE: vgamma is in the direction of positive RV or negative vz\n vz -= vgamma\n z -= vgamma * (times-t0)\n\n return (x+com_pos[0],y+com_pos[1],z+com_pos[2]),\\\n (vx+com_vel[0],vy+com_vel[1],vz+com_vel[2]),\\\n (theta_,long_an,incl)\n # (theta_+com_euler[0],long_an+com_euler[1],incl+com_euler[2])\n\n def binary_dynamics_nested(times, periods, eccs, smas, \\\n t0_perpasses, per0s, long_ans, incls, dpdts, deccdts, \\\n dperdts, components, t0, vgamma, \\\n mass_conservation):\n\n \"\"\"\n compute the (possibly nested) positions of a single component (ltte should be\n handle externally)\n \"\"\"\n\n\n if not hasattr(periods, '__iter__'):\n # then we don't have to worry about nesting, and each item should\n # be a single value ready to pass to binary_dynamics\n pos, vel, euler = binary_dynamics(times, periods, eccs, smas, t0_perpasses, \\\n per0s, long_ans, incls, dpdts, deccdts, dperdts, components, \\\n t0, vgamma, mass_conservation)\n\n else:\n # Handle nesting - if this star is not in the top-level orbit, then\n # all values should actually be lists. We want to sort by period to handle\n # the outer orbits first and then apply those offsets to the inner-orbit(s)\n\n # let's convert to arrays so we can use argsort easily\n periods = np.array(periods)\n eccs = np.array(eccs)\n smas = np.array(smas)\n t0_perpasses = np.array(t0_perpasses)\n per0s = np.array(per0s)\n long_ans = np.array(long_ans)\n incls = np.array(incls)\n dpdts = np.array(dpdts)\n deccdts = np.array(deccdts)\n dperdts = np.array(dperdts)\n components = np.array(components)\n\n si = periods.argsort()[::-1]\n\n #print \"***\", periods, si\n\n pos = (0.0, 0.0, 0.0)\n vel = (0.0, 0.0, 0.0)\n euler = (0.0, 0.0, 0.0)\n\n for period, ecc, sma, t0_perpass, per0, long_an, incl, dpdt, \\\n deccdt, dperdt, component in zip(periods[si], eccs[si], \\\n smas[si], t0_perpasses[si], per0s[si], long_ans[si], \\\n incls[si], dpdts[si], deccdts[si], dperdts[si], components[si]):\n\n pos, vel, euler = binary_dynamics(times, period, ecc, sma, t0_perpass, \\\n per0, long_an, incl, dpdt, deccdt, dperdt, component, \\\n t0, vgamma, mass_conservation,\n com_pos=pos, com_vel=vel, com_euler=euler)\n\n\n\n return pos, vel, euler\n\n\n\n\n xs, ys, zs = [], [], []\n vxs, vys, vzs = [], [], []\n\n if return_euler:\n ethetas, elongans, eincls = [], [], []\n\n for period, ecc, sma, t0_perpass, per0, long_an, incl, dpdt, deccdt, \\\n dperdt, component in zip(periods, eccs, smas, t0_perpasses, per0s, long_ans, \\\n incls, dpdts, deccdts, dperdts, components):\n\n # We now have the orbital parameters for a single star/component.\n\n if ltte:\n #scale_factor = 1.0/c.c.value * c.R_sun.value/(24*3600.)\n scale_factor = (c.R_sun/c.c).to(u.d).value\n\n def propertime_barytime_residual(t):\n pos, vel, euler = binary_dynamics_nested(time, period, ecc, sma, \\\n t0_perpass, per0, long_an, incl, dpdt, deccdt, \\\n dperdt, components=component, t0=t0, vgamma=vgamma, \\\n mass_conservation=mass_conservation)\n z = pos[2]\n return t - z*scale_factor - time\n\n # Finding that right time is easy with a Newton optimizer:\n propertimes = [newton(propertime_barytime_residual, time) for \\\n time in times]\n propertimes = np.array(propertimes).ravel()\n\n pos, vel, euler = binary_dynamics_nested(propertimes, period, ecc, sma, \\\n t0_perpass, per0, long_an, incl, dpdt, deccdt, \\\n dperdt, components=component, t0=t0, vgamma=vgamma, \\\n mass_conservation=mass_conservation)\n\n\n else:\n pos, vel, euler = binary_dynamics_nested(times, period, ecc, sma, \\\n t0_perpass, per0, long_an, incl, dpdt, deccdt, \\\n dperdt, components=component, t0=t0, vgamma=vgamma, \\\n mass_conservation=mass_conservation)\n\n\n xs.append(pos[0])\n ys.append(pos[1])\n zs.append(pos[2])\n vxs.append(vel[0])\n vys.append(vel[1])\n vzs.append(vel[2])\n if return_euler:\n ethetas.append(euler[0])\n elongans.append([euler[1]]*len(euler[0]))\n eincls.append([euler[2]]*len(euler[0]))\n\n\n # if return_euler:\n # return times, \\\n # xs*u.solRad, ys*u.solRad, zs*u.solRad, \\\n # vxs*u.solRad/u.d, vys*u.solRad/u.d, vzs*u.solRad/u.d, \\\n # ethetas*u.rad, elongans*u.rad, eincls*u.rad\n # else:\n # return times, \\\n # xs*u.solRad, ys*u.solRad, zs*u.solRad, \\\n # vxs*u.solRad/u.d, vys*u.solRad/u.d, vzs*u.solRad/u.d if return_euler:\n\n # d, solRad, solRad/d, rad\n if return_euler:\n return times, \\\n xs, ys, zs, \\\n vxs, vys, vzs, \\\n ethetas, elongans, eincls\n else:\n return times, \\\n xs, ys, zs, \\\n vxs, vys, vzs"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef spot(feature, **kwargs):\n\n params = []\n\n params += [FloatParameter(qualifier=\"colat\", value=kwargs.get('colat', 0.0), default_unit=u.deg, description='Colatitude of the center of the spot wrt spin axes')]\n params += [FloatParameter(qualifier=\"long\", value=kwargs.get('long', 0.0), default_unit=u.deg, description='Longitude of the center of the spot wrt spin axis')]\n params += [FloatParameter(qualifier='radius', value=kwargs.get('radius', 1.0), default_unit=u.deg, description='Angular radius of the spot')]\n # params += [FloatParameter(qualifier='area', value=kwargs.get('area', 1.0), default_unit=u.solRad, description='Surface area of the spot')]\n\n params += [FloatParameter(qualifier='relteff', value=kwargs.get('relteff', 1.0), limits=(0.,None), default_unit=u.dimensionless_unscaled, description='Temperature of the spot relative to the intrinsic temperature')]\n # params += [FloatParameter(qualifier='teff', value=kwargs.get('teff', 10000), default_unit=u.K, description='Temperature of the spot')]\n\n constraints = []\n\n return ParameterSet(params), constraints", "response": "Creates a parameter set that can be used to create a spot in a non - empty sequence."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef pulsation(feature, **kwargs):\n if not conf.devel:\n raise NotImplementedError(\"'pulsation' feature not officially supported for this release. Enable developer mode to test.\")\n\n\n params = []\n\n params += [FloatParameter(qualifier='radamp', value=kwargs.get('radamp', 0.1), default_unit=u.dimensionless_unscaled, description='Relative radial amplitude of the pulsations')]\n params += [FloatParameter(qualifier='freq', value=kwargs.get('freq', 1.0), default_unit=u.d**-1, description='Frequency of the pulsations')]\n params += [IntParameter(qualifier='l', value=kwargs.get('l', 0), default_unit=u.dimensionless_unscaled, description='Non-radial degree l')]\n params += [IntParameter(qualifier='m', value=kwargs.get('m', 0), default_unit=u.dimensionless_unscaled, description='Azimuthal order m')]\n params += [BoolParameter(qualifier='teffext', value=kwargs.get('teffext', False), description='Switch to denote whether Teffs are provided by the external code')]\n\n\n constraints = []\n\n return ParameterSet(params), constraints", "response": "Creates a parameter set for a pulsation feature"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nconverts an animation object from matplotlib into HTML which can then be displayed in an IPython notebook.", "response": "def anim_to_html(anim):\n \"\"\"\n adapted from: http://jakevdp.github.io/blog/2013/05/12/embedding-matplotlib-animations/\n\n This function converts and animation object from matplotlib into HTML which can then\n be embedded in an IPython notebook.\n\n This requires ffmpeg to be installed in order to build the intermediate mp4 file\n\n To get these to display automatically, you need to set animation.Animation._repr_html_ = plotlib.anim_to_html\n (this is done on your behalf by PHOEBE)\n \"\"\"\n if not hasattr(anim, '_encoded_video'):\n with NamedTemporaryFile(suffix='.mp4') as f:\n anim.save(f.name, fps=20, extra_args=['-vcodec', 'libx264'])\n video = open(f.name, \"rb\").read()\n anim._encoded_video = video.encode(\"base64\")\n\n return VIDEO_TAG.format(anim._encoded_video)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef load_lc_data(filename, indep, dep, indweight=None, mzero=None, dir='./'):\n\n \"\"\"\n load dictionary with lc data\n \"\"\"\n if '/' in filename:\n path, filename = os.path.split(filename)\n else:\n # TODO: this needs to change to be directory of the .phoebe file\n path = dir\n\n load_file = os.path.join(path, filename)\n lcdata = np.loadtxt(load_file)\n ncol = len(lcdata[0])\n if dep == 'Magnitude':\n mag = lcdata[:,1]\n flux = 10**(-0.4*(mag-mzero))\n lcdata[:,1] = flux\n\n d = {}\n d['phoebe_lc_time'] = lcdata[:,0]\n d['phoebe_lc_flux'] = lcdata[:,1]\n if indweight==\"Standard deviation\":\n if ncol >= 3:\n d['phoebe_lc_sigmalc'] = lcdata[:,2]\n else:\n logger.warning('A sigma column was mentioned in the .phoebe file but is not present in the lc data file')\n elif indweight ==\"Standard weight\":\n if ncol >= 3:\n sigma = np.sqrt(1/lcdata[:,2])\n d['phoebe_lc_sigmalc'] = sigma\n logger.warning('Standard weight has been converted to Standard deviation.')\n\n else:\n logger.warning('A sigma column was mentioned in the .phoebe file but is not present in the lc data file')\n else:\n logger.warning('Phoebe 2 currently only supports standard deviaton')\n\n# dataset.set_value(check_visible=False, **d)\n\n return d", "response": "load the lc data file into a dictionary with lc data"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nload the rv data file into a dictionary with rv data.", "response": "def load_rv_data(filename, indep, dep, indweight=None, dir='./'):\n\n \"\"\"\n load dictionary with rv data.\n \"\"\"\n\n if '/' in filename:\n path, filename = os.path.split(filename)\n else:\n path = dir\n\n load_file = os.path.join(path, filename)\n rvdata = np.loadtxt(load_file)\n\n d ={}\n d['phoebe_rv_time'] = rvdata[:,0]\n d['phoebe_rv_vel'] = rvdata[:,1]\n ncol = len(rvdata[0])\n\n if indweight==\"Standard deviation\":\n\n if ncol >= 3:\n d['phoebe_rv_sigmarv'] = rvdata[:,2]\n else:\n logger.warning('A sigma column is mentioned in the .phoebe file but is not present in the rv data file')\n elif indweight ==\"Standard weight\":\n if ncol >= 3:\n sigma = np.sqrt(1/rvdata[:,2])\n d['phoebe_rv_sigmarv'] = sigma\n logger.warning('Standard weight has been converted to Standard deviation.')\n else:\n logger.warning('Phoebe 2 currently only supports standard deviaton')\n\n return d"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef det_dataset(eb, passband, dataid, comp, time):\n\n \"\"\"\n Since RV datasets can have values related to each component in phoebe2, but are component specific in phoebe1\n , it is important to determine which dataset to add parameters to. This function will do that.\n eb - bundle\n rvpt - relevant phoebe 1 parameters\n\n \"\"\"\n rvs = eb.get_dataset(kind='rv').datasets\n #first check to see if there are currently in RV datasets\n if dataid == 'Undefined':\n dataid = None\n# if len(rvs) == 0:\n #if there isn't we add one the easy part\n\n try:\n eb._check_label(dataid)\n\n rv_dataset = eb.add_dataset('rv', dataset=dataid, times=[])\n\n except ValueError:\n\n logger.warning(\"The name picked for the radial velocity curve is forbidden. Applying default name instead\")\n rv_dataset = eb.add_dataset('rv', times=[])\n\n# else:\n# #now we have to determine if we add to an existing dataset or make a new one\n# rvs = eb.get_dataset(kind='rv').datasets\n# found = False\n# #set the component of the companion\n#\n# if comp == 'primary':\n# comp_o = 'primary'\n# else:\n# comp_o = 'secondary'\n# for x in rvs:\n# test_dataset = eb.get_dataset(x, check_visible=False)\n#\n#\n# if len(test_dataset.get_value(qualifier='rvs', component=comp_o, check_visible=False)) == 0: #so at least it has an empty spot now check against filter and length\n# # removing reference to time_o. If there are no rvs there should be no times\n# # time_o = test_dataset.get_value('times', component=comp_o)\n# passband_o = test_dataset.get_value('passband')\n#\n# # if np.all(time_o == time) and (passband == passband_o):\n# if (passband == passband_o):\n# rv_dataset = test_dataset\n# found = True\n#\n# if not found:\n# try:\n# eb._check_label(dataid)\n#\n# rv_dataset = eb.add_dataset('rv', dataset=dataid, times=[])\n#\n# except ValueError:\n#\n# logger.warning(\"The name picked for the lightcurve is forbidden. Applying default name instead\")\n# rv_dataset = eb.add_dataset('rv', times=[])\n\n return rv_dataset", "response": "This function will remove the dataset from the RV and passband from the RV and passband of the RV."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef N_to_Ntriangles(N):\n\n theta = np.array([np.pi/2*(k-0.5)/N for k in range(1, N+1)])\n phi = np.array([[np.pi*(l-0.5)/Mk for l in range(1, Mk+1)] for Mk in np.array(1 + 1.3*N*np.sin(theta), dtype=int)])\n Ntri = 2*np.array([len(p) for p in phi]).sum()\n\n return Ntri", "response": "Converts a WD style grid size N to the number of triangles on the\n surface."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef pot_for_component(pot, q, component=1, reverse=False):\n # currently only used by legacy wrapper: consider moving/removing\n if component==1:\n return pot\n elif component==2:\n if reverse:\n return pot/q + 0.5*(q-1)/q\n else:\n return q*pot - 0.5 * (q-1)\n else:\n raise NotImplementedError", "response": "returns the potential for a given component"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef roche_misaligned_critical_requiv(q, F, d, s, scale=1.0):\n volume_critical = libphoebe.roche_misaligned_critical_volume(q, F, d, s)\n logger.debug(\"libphoebe.roche_misaligned_critical_volume(q={}, F={}, d={}, s={}) => {}\".format(q, F, d, s, volume_critical))\n\n requiv_critical = scale * (volume_critical * 3./4 * 1./np.pi)**(1./3)\n # logger.debug(\"roche.roche_misaligned_critical_requiv = {}\".format(requiv_critical))\n\n return requiv_critical", "response": "calculate the misaligned critical requiv"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ntransform polar radius to surface potential", "response": "def rpole_to_pot_aligned(rpole, sma, q, F, d, component=1):\n \"\"\"\n Transforms polar radius to surface potential\n \"\"\"\n q = q_for_component(q, component=component)\n rpole_ = np.array([0, 0, rpole/sma])\n logger.debug(\"libphoebe.roche_Omega(q={}, F={}, d={}, rpole={})\".format(q, F, d, rpole_))\n pot = libphoebe.roche_Omega(q, F, d, rpole_)\n return pot_for_component(pot, component, reverse=True)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef pot_to_rpole_aligned(pot, sma, q, F, d, component=1):\n q = q_for_component(q, component=component)\n Phi = pot_for_component(pot, q, component=component)\n logger.debug(\"libphobe.roche_pole(q={}, F={}, d={}, Omega={})\".format(q, F, d, pot))\n return libphoebe.roche_pole(q, F, d, pot) * sma", "response": "Transforms surface potential to polar radius"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef BinaryRoche (r, D, q, F, Omega=0.0):\n return 1.0/sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]) + q*(1.0/sqrt((r[0]-D)*(r[0]-D)+r[1]*r[1]+r[2]*r[2])-r[0]/D/D) + 0.5*F*F*(1+q)*(r[0]*r[0]+r[1]*r[1]) - Omega", "response": "r Computes a value of the asynchronous eccentric Roche potential."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef dBinaryRochedx (r, D, q, F):\n return -r[0]*(r[0]*r[0]+r[1]*r[1]+r[2]*r[2])**-1.5 -q*(r[0]-D)*((r[0]-D)*(r[0]-D)+r[1]*r[1]+r[2]*r[2])**-1.5 -q/D/D + F*F*(1+q)*r[0]", "response": "Computes a derivative of the potential with respect to x."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncompute second derivative of the potential with respect to x.", "response": "def d2BinaryRochedx2(r, D, q, F):\n \"\"\"\n Computes second derivative of the potential with respect to x.\n\n @param r: relative radius vector (3 components)\n @param D: instantaneous separation\n @param q: mass ratio\n @param F: synchronicity parameter\n \"\"\"\n return (2*r[0]*r[0]-r[1]*r[1]-r[2]*r[2])/(r[0]*r[0]+r[1]*r[1]+r[2]*r[2])**2.5 +\\\n q*(2*(r[0]-D)*(r[0]-D)-r[1]*r[1]-r[2]*r[2])/((r[0]-D)*(r[0]-D)+r[1]*r[1]+r[2]*r[2])**2.5 +\\\n F*F*(1+q)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncomputes a derivative of the potential with respect to y.", "response": "def dBinaryRochedy (r, D, q, F):\n \"\"\"\n Computes a derivative of the potential with respect to y.\n\n @param r: relative radius vector (3 components)\n @param D: instantaneous separation\n @param q: mass ratio\n @param F: synchronicity parameter\n \"\"\"\n return -r[1]*(r[0]*r[0]+r[1]*r[1]+r[2]*r[2])**-1.5 -q*r[1]*((r[0]-D)*(r[0]-D)+r[1]*r[1]+r[2]*r[2])**-1.5 + F*F*(1+q)*r[1]"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncompute a derivative of the potential with respect to z.", "response": "def dBinaryRochedz(r, D, q, F):\n \"\"\"\n Computes a derivative of the potential with respect to z.\n\n @param r: relative radius vector (3 components)\n @param D: instantaneous separation\n @param q: mass ratio\n @param F: synchronicity parameter\n \"\"\"\n return -r[2]*(r[0]*r[0]+r[1]*r[1]+r[2]*r[2])**-1.5 -q*r[2]*((r[0]-D)*(r[0]-D)+r[1]*r[1]+r[2]*r[2])**-1.5"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef dBinaryRochedr (r, D, q, F):\n\n r2 = (r*r).sum()\n r1 = np.sqrt(r2)\n\n return -1./r2 - q*(r1-r[0]/r1*D)/((r[0]-D)*(r[0]-D)+r[1]*r[1]+r[2]*r[2])**1.5 - q*r[0]/r1/D/D + F*F*(1+q)*(1-r[2]*r[2]/r2)*r1", "response": "Computes a derivative of the potential with respect to r."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef settings(**kwargs):\n\n params = []\n\n params += [StringParameter(qualifier='phoebe_version', value=kwargs.get('phoebe_version', __version__), description='Version of PHOEBE - change with caution')]\n params += [BoolParameter(qualifier='log_history', value=kwargs.get('log_history', False), description='Whether to log history (undo/redo)')]\n params += [DictParameter(qualifier='dict_filter', value=kwargs.get('dict_filter', {}), description='Filters to use when using dictionary access')]\n params += [BoolParameter(qualifier='dict_set_all', value=kwargs.get('dict_set_all', False), description='Whether to set all values for dictionary access that returns more than 1 result')]\n\n # params += [ChoiceParameter(qualifier='plotting_backend', value=kwargs.get('plotting_backend', 'mpl'), choices=['mpl', 'mpld3', 'mpl2bokeh', 'bokeh'] if conf.devel else ['mpl'], description='Default backend to use for plotting')]\n\n # problem with try_sympy parameter: it can't be used during initialization... so this may need to be a phoebe-level setting\n # params += [BoolParameter(qualifier='try_sympy', value=kwargs.get('try_sympy', True), description='Whether to use sympy if installed for constraints')]\n\n # This could be complicated - because then we'll have to specifically watch to see when its enabled and then run all constraints - not sure if that is worth the time savings\n # params += [BoolParameter(qualifier='run_constraints', value=kwargs.get('run_constraints', True), description='Whether to run_constraints whenever a parameter changes (warning: turning off will disable constraints until enabled at which point all constraints will be run)')]\n\n return ParameterSet(params)", "response": "Returns a new set of parameters that can be used to initialize a new resource bundle."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nintercept and send to the server if bundle is in client mode.", "response": "def send_if_client(fctn):\n \"\"\"Intercept and send to the server if bundle is in client mode.\"\"\"\n @functools.wraps(fctn)\n def _send_if_client(self, *args, **kwargs):\n fctn_map = {'set_quantity': 'set_value'}\n b = self._bundle\n if b is not None and b.is_client:\n # TODO: self._filter???\n # TODO: args???\n method = fctn_map.get(fctn.__name__, fctn.__name__)\n d = self._filter if hasattr(self, '_filter') \\\n else {'twig': self.twig}\n d['bundleid'] = b._bundleid\n for k, v in kwargs.items():\n d[k] = v\n\n logger.info('emitting to {}({}) to server'.format(method, d))\n b._socketio.emit(method, d)\n\n if fctn.__name__ in ['run_compute', 'run_fitting']:\n # then we're expecting a quick response with an added jobparam\n # let's add that now\n self._bundle.client_update()\n else:\n return fctn(self, *args, **kwargs)\n return _send_if_client"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nintercepts and check updates from server if bundle is in client mode.", "response": "def update_if_client(fctn):\n \"\"\"Intercept and check updates from server if bundle is in client mode.\"\"\"\n @functools.wraps(fctn)\n def _update_if_client(self, *args, **kwargs):\n b = self._bundle\n if b is None or not hasattr(b, 'is_client'):\n return fctn(self, *args, **kwargs)\n elif b.is_client and \\\n (b._last_client_update is None or\n (datetime.now() - b._last_client_update).seconds > 1):\n\n b.client_update()\n return fctn(self, *args, **kwargs)\n return _update_if_client"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns a unique string with length n.", "response": "def _uniqueid(n=30):\n \"\"\"Return a unique string with length n.\n\n :parameter int N: number of character in the uniqueid\n :return: the uniqueid\n :rtype: str\n \"\"\"\n return ''.join(random.SystemRandom().choice(\n string.ascii_uppercase + string.ascii_lowercase)\n for _ in range(n))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nloading a single parameter from a JSON dictionary.", "response": "def parameter_from_json(dictionary, bundle=None):\n \"\"\"Load a single parameter from a JSON dictionary.\n\n :parameter dict dictionary: the dictionary containing the parameter\n information\n :parameter bundle: (optional)\n :return: instantiated :class:`Parameter` object\n \"\"\"\n if isinstance(dictionary, str):\n dictionary = json.loads(dictionary, object_pairs_hook=parse_json)\n\n classname = dictionary.pop('Class')\n\n if classname not in _parameter_class_that_require_bundle:\n bundle = None\n\n # now let's do some dirty magic and get the actual classitself\n # from THIS module. __name__ is a string to lookup this module\n # from the sys.modules dictionary\n cls = getattr(sys.modules[__name__], classname)\n\n return cls._from_json(bundle, **dictionary)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_meta(self, ignore=['uniqueid']):\n return OrderedDict([(k, getattr(self, k))\n for k in _meta_fields_twig\n if k not in ignore])", "response": "Returns a dictionary of all meta - tags that are shared by ALL Parameters."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsets the value of tags for all Parameters in this ParameterSet.", "response": "def set_meta(self, **kwargs):\n \"\"\"Set the value of tags for all Parameters in this ParameterSet.\"\"\"\n for param in self.to_list():\n for k, v in kwargs.items():\n # Here we'll set the attributes (_context, _qualifier, etc)\n if getattr(param, '_{}'.format(k)) is None:\n setattr(param, '_{}'.format(k), v)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef tags(self):\n ret = {}\n for typ in _meta_fields_twig:\n if typ in ['uniqueid', 'plugin', 'feedback', 'fitting', 'history', 'twig', 'uniquetwig']:\n continue\n\n k = '{}s'.format(typ)\n ret[k] = getattr(self, k)\n\n return ret", "response": "Returns a dictionary that lists all available tags that can be used\n for further filtering"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning the common twig of all items in this ParameterSet.", "response": "def common_twig(self):\n \"\"\"\n The twig that is common between all items in this ParameterSet.\n This twig gives a single string which can point back to this ParameterSet\n (but may include other entries as well)\n\n see also :meth:`uniquetwig`\n\n :return: twig (full) of this Parameter\n \"\"\"\n return \"@\".join([getattr(self, k) for k in _meta_fields_twig if self.meta.get(k) is not None])"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _set_meta(self):\n # we want to set meta-fields that are shared by ALL params in the PS\n for field in _meta_fields_twig:\n keys_for_this_field = set([getattr(p, field)\n for p in self.to_list()\n if getattr(p, field) is not None])\n if len(keys_for_this_field)==1:\n setattr(self, '_'+field, list(keys_for_this_field)[0])\n else:\n setattr(self, '_'+field, None)", "response": "Set the meta - fields of the ParameterSet as those that are shared by ALL parameters in the ParameterSet."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets the least unique twig for the parameter given by twig that will return a single result for THIS PS", "response": "def _uniquetwig(self, twig, force_levels=['qualifier']):\n \"\"\"\n get the least unique twig for the parameter given by twig that\n will return this single result for THIS PS\n\n :parameter str twig: a twig that will return a single Parameter from\n THIS PS\n :parameter list force_levels: (optional) a list of \"levels\"\n (eg. context) that should be included whether or not they are\n necessary\n :return: the unique twig\n :rtype: str\n \"\"\"\n for_this_param = self.filter(twig, check_visible=False)\n\n metawargs = {}\n\n # NOTE: self.contexts is INCREDIBLY expensive\n # if len(self.contexts) and 'context' not in force_levels:\n if 'context' not in force_levels:\n # then let's force context to be included\n force_levels.append('context')\n\n for k in force_levels:\n metawargs[k] = getattr(for_this_param, k)\n\n prev_count = len(self)\n # just to fake in case no metawargs are passed at all\n ps_for_this_search = []\n for k in _meta_fields_twig:\n metawargs[k] = getattr(for_this_param, k)\n if getattr(for_this_param, k) is None:\n continue\n\n ps_for_this_search = self.filter(check_visible=False, **metawargs)\n\n if len(ps_for_this_search) < prev_count and k not in force_levels:\n prev_count = len(ps_for_this_search)\n elif k not in force_levels:\n # this didn't help us\n metawargs[k] = None\n\n if len(ps_for_this_search) != 1:\n # TODO: after fixing regex in twig (t0type vs t0)\n # change this to raise Error instead of return\n return twig\n\n # now we go in the other direction and try to remove each to make sure\n # the count goes up\n for k in _meta_fields_twig:\n if metawargs[k] is None or k in force_levels:\n continue\n\n ps_for_this_search = self.filter(check_visible=False,\n **{ki: metawargs[k]\n for ki in _meta_fields_twig\n if ki != k})\n\n if len(ps_for_this_search) == 1:\n # then we didn't need to use this tag\n metawargs[k] = None\n\n # and lastly, we make sure that the tag corresponding to the context\n # is present\n context = for_this_param.context\n if hasattr(for_this_param, context):\n metawargs[context] = getattr(for_this_param, context)\n\n return \"@\".join([metawargs[k]\n for k in _meta_fields_twig\n if metawargs[k] is not None])"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nattach a list of parameters or ParameterSet to this ParameterSet.", "response": "def _attach_params(self, params, **kwargs):\n \"\"\"Attach a list of parameters (or ParameterSet) to this ParameterSet.\n\n :parameter list params: list of parameters, or ParameterSet\n :parameter **kwargs: attributes to set for each parameter (ie tags)\n \"\"\"\n lst = params.to_list() if isinstance(params, ParameterSet) else params\n for param in lst:\n param._bundle = self\n\n for k, v in kwargs.items():\n # Here we'll set the attributes (_context, _qualifier, etc)\n if getattr(param, '_{}'.format(k)) is None:\n setattr(param, '_{}'.format(k), v)\n self._params.append(param)\n\n self._check_copy_for()\n\n return"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nchecking the value of copy_for and make appropriate copies.", "response": "def _check_copy_for(self):\n \"\"\"Check the value of copy_for and make appropriate copies.\"\"\"\n if not self._bundle:\n return\n\n # read the following at your own risk - I just wrote it and it still\n # confuses me and baffles me that it works\n for param in self.to_list():\n if param.copy_for:\n # copy_for tells us how to filter and what set of attributes\n # needs a copy of this parameter\n #\n # copy_for = {'kind': ['star', 'disk', 'custombody'], 'component': '*'}\n # means that this should exist for each component (since that has a wildcard) which\n # has a kind in [star, disk, custombody]\n #\n # copy_for = {'kind': ['rv_dep'], 'component': '*', 'dataset': '*'}\n # means that this should exist for each component/dataset pair with the\n # rv_dep kind\n\n attrs = [k for k,v in param.copy_for.items() if '*' in v]\n # attrs is a list of the attributes for which we need a copy of\n # this parameter for any pair\n\n ps = self._bundle.filter(check_visible=False, check_default=False, force_ps=True, **param.copy_for)\n metawargs = {k:v for k,v in ps.meta.items() if v is not None and k in attrs}\n for k,v in param.meta.items():\n if k not in ['twig', 'uniquetwig'] and k not in attrs:\n metawargs[k] = v\n # metawargs is a list of the shared tags that will be used to filter for\n # existing parameters so that we know whether they already exist or\n # still need to be created\n\n # logger.debug(\"_check_copy_for {}: attrs={}\".format(param.twig, attrs))\n for attrvalues in itertools.product(*(getattr(ps, '{}s'.format(attr)) for attr in attrs)):\n # logger.debug(\"_check_copy_for {}: attrvalues={}\".format(param.twig, attrvalues))\n # for each attrs[i] (ie component), attrvalues[i] (star01)\n # we need to look for this parameter, and if it does not exist\n # then create it by copying param\n\n\n for attr, attrvalue in zip(attrs, attrvalues):\n #if attrvalue=='_default' and not getattr(param, attr):\n # print \"SKIPPING\", attr, attrvalue\n # continue\n metawargs[attr] = attrvalue\n\n # logger.debug(\"_check_copy_for {}: metawargs={}\".format(param.twig, metawargs))\n if not len(self._bundle.filter(check_visible=False, **metawargs)):\n # then we need to make a new copy\n logger.debug(\"copying '{}' parameter for {}\".format(param.qualifier, {attr: attrvalue for attr, attrvalue in zip(attrs, attrvalues)}))\n\n newparam = param.copy()\n\n for attr, attrvalue in zip(attrs, attrvalues):\n setattr(newparam, '_{}'.format(attr), attrvalue)\n\n newparam._copy_for = False\n if newparam._visible_if and newparam._visible_if.lower() == 'false':\n newparam._visible_if = None\n newparam._bundle = self._bundle\n\n self._params.append(newparam)\n\n\n # Now we need to handle copying constraints. This can't be\n # in the previous if statement because the parameters can be\n # copied before constraints are ever attached.\n if hasattr(param, 'is_constraint') and param.is_constraint:\n\n param_constraint = param.is_constraint\n\n copied_param = self._bundle.get_parameter(check_visible=False, check_default=False, **metawargs)\n\n if not copied_param.is_constraint:\n constraint_kwargs = param_constraint.constraint_kwargs.copy()\n for attr, attrvalue in zip(attrs, attrvalues):\n if attr in constraint_kwargs.keys():\n constraint_kwargs[attr] = attrvalue\n\n logger.debug(\"copying constraint '{}' parameter for {}\".format(param_constraint.constraint_func, {attr: attrvalue for attr, attrvalue in zip(attrs, attrvalues)}))\n self.add_constraint(func=param_constraint.constraint_func, **constraint_kwargs)\n\n return"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncheck to see if the label is allowed.", "response": "def _check_label(self, label):\n \"\"\"Check to see if the label is allowed.\"\"\"\n\n if not isinstance(label, str):\n label = str(label)\n\n if label.lower() in _forbidden_labels:\n raise ValueError(\"'{}' is forbidden to be used as a label\"\n .format(label))\n if not re.match(\"^[a-z,A-Z,0-9,_]*$\", label):\n raise ValueError(\"label '{}' is forbidden - only alphabetic, numeric, and '_' characters are allowed in labels\".format(label))\n if len(self.filter(twig=label, check_visible=False)):\n raise ValueError(\"label '{}' is already in use\".format(label))\n if label[0] in ['_']:\n raise ValueError(\"first character of label is a forbidden character\")"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef open(cls, filename):\n filename = os.path.expanduser(filename)\n f = open(filename, 'r')\n if _can_ujson:\n # NOTE: this will not parse the unicode. Bundle.open always calls\n # json instead of ujson for this reason.\n data = ujson.load(f)\n else:\n data = json.load(f, object_pairs_hook=parse_json)\n f.close()\n return cls(data)", "response": "Open a ParameterSet from a JSON - formatted file."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nsave the ParameterSet to a JSON - formatted ASCII file.", "response": "def save(self, filename, incl_uniqueid=False, compact=False):\n \"\"\"\n Save the ParameterSet to a JSON-formatted ASCII file\n\n :parameter str filename: relative or fullpath to the file\n :parameter bool incl_uniqueid: whether to including uniqueids in the\n file (only needed if its necessary to maintain the uniqueids when\n reloading)\n :parameter bool compact: whether to use compact file-formatting (maybe\n be quicker to save/load, but not as easily readable)\n :return: filename\n :rtype: str\n \"\"\"\n filename = os.path.expanduser(filename)\n f = open(filename, 'w')\n if compact:\n if _can_ujson:\n ujson.dump(self.to_json(incl_uniqueid=incl_uniqueid), f,\n sort_keys=False, indent=0)\n else:\n logger.warning(\"for faster compact saving, install ujson\")\n json.dump(self.to_json(incl_uniqueid=incl_uniqueid), f,\n sort_keys=False, indent=0)\n else:\n json.dump(self.to_json(incl_uniqueid=incl_uniqueid), f,\n sort_keys=True, indent=0, separators=(',', ': '))\n f.close()\n\n return filename"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nopening the web interface of the bundle in client mode.", "response": "def ui(self, client='http://localhost:4200', **kwargs):\n \"\"\"\n [NOT IMPLEMENTED]\n\n The bundle must be in client mode in order to open the web-interface.\n See :meth:`Bundle:as_client` to switch to client mode.\n\n :parameter str client: URL of the running client which must be connected\n to the same server as the bundle\n :return: URL of the parameterset of this bundle in the client (will also\n attempt to open webbrowser)\n :rtype: str\n \"\"\"\n if self._bundle is None or not self._bundle.is_client:\n raise ValueError(\"bundle must be in client mode\")\n\n if len(kwargs):\n return self.filter(**kwargs).ui(client=client)\n\n querystr = \"&\".join([\"{}={}\".format(k, v)\n for k, v in self._filter.items()])\n # print self._filter\n url = \"{}/{}?{}\".format(client, self._bundle._bundleid, querystr)\n\n logger.info(\"opening {} in browser\".format(url))\n webbrowser.open(url)\n return url"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nconvert the : class:`ParameterSet` to a list of class:`Parameter objects.", "response": "def to_list(self, **kwargs):\n \"\"\"\n Convert the :class:`ParameterSet` to a list of :class:`Parameter`s\n\n :return: list of class:`Parameter` objects\n \"\"\"\n if kwargs:\n return self.filter(**kwargs).to_list()\n return self._params"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nconvert the : class:`ParameterSet` to a list of the dictionary representation", "response": "def to_list_of_dicts(self, **kwargs):\n \"\"\"\n Convert the :class:`ParameterSet` to a list of the dictionary representation\n of each :class:`Parameter`\n\n :return: list of dicts\n \"\"\"\n if kwargs:\n return self.filter(**kwargs).to_list_of_dicts()\n return [param.to_dict() for param in self._params]"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef to_flat_dict(self, **kwargs):\n if kwargs:\n return self.filter(**kwargs).to_flat_dict()\n return {param.uniquetwig: param for param in self._params}", "response": "Convert the : class:`ParameterSet` to a flat dictionary with keys being\n uniquetwigs to access the parameter and values being the parameter objects themselves."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nconvert the ParameterSet to a structured dictionary with keys at and the ParameterSet of the matching items.", "response": "def to_dict(self, field=None, **kwargs):\n \"\"\"\n Convert the ParameterSet to a structured (nested) dictionary\n to allow traversing the structure from the bottom up\n\n :parameter str field: (optional) build the dictionary with keys at\n a given level/field. Can be any of the keys in\n :func:`meta`. If None, the keys will be the lowest\n level in which Parameters have different values.\n :return: dict of :class:`Parameter`s or :class:`ParameterSet`s\n \"\"\"\n if kwargs:\n return self.filter(**kwargs).to_dict(field=field)\n\n if field is not None:\n keys_for_this_field = set([getattr(p, field)\n for p in self.to_list()\n if getattr(p, field) is not None])\n return {k: self.filter(check_visible=False, **{field: k}) for k in keys_for_this_field}\n\n # we want to find the first level (from the bottom) in which filtering\n # further would shorten the list (ie there are more than one unique\n # item for that field)\n\n # so let's go through the fields in reverse (context up to (but not\n # including) qualifier)\n for field in reversed(_meta_fields_twig[1:]):\n # then get the unique keys in this field among the params in this\n # PS\n keys_for_this_field = set([getattr(p, field)\n for p in self.to_list()\n if getattr(p, field) is not None])\n # and if there are more than one, then return a dictionary with\n # those keys and the ParameterSet of the matching items\n if len(keys_for_this_field) > 1:\n self._next_field = field\n return {k: self.filter(check_visible=False, **{field: k})\n for k in keys_for_this_field}\n\n # if we've survived, then we're at the bottom and only have times or\n # qualifier left\n if self.context in ['hierarchy']:\n self._next_field = 'qualifier'\n return {param.qualifier: param for param in self._params}\n else:\n self._next_field = 'time'\n return {param.time: param for param in self._params}"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nset the value of a Parameter in the ParameterSet.", "response": "def set(self, key, value, **kwargs):\n \"\"\"\n Set the value of a Parameter in the ParameterSet.\n\n If :func:`get` would retrieve a Parameter, this will set the\n value of that parameter.\n\n Or you can provide 'value@...' or 'default_unit@...', etc\n to specify what attribute to set.\n\n :parameter str key: the twig (called key here to be analagous\n to a normal dict)\n :parameter value: value to set\n :parameter **kwargs: other filter parameters (must result in\n returning a single :class:`Parameter`)\n :return: the value of the :class:`Parameter` after setting the\n new value (including converting units if applicable)\n \"\"\"\n twig = key\n\n method = None\n twigsplit = re.findall(r\"[\\w']+\", twig)\n if twigsplit[0] == 'value':\n twig = '@'.join(twigsplit[1:])\n method = 'set_value'\n elif twigsplit[0] == 'quantity':\n twig = '@'.join(twigsplit[1:])\n method = 'set_quantity'\n elif twigsplit[0] in ['unit', 'default_unit']:\n twig = '@'.join(twigsplit[1:])\n method = 'set_default_unit'\n elif twigsplit[0] in ['timederiv']:\n twig = '@'.join(twigsplit[1:])\n method = 'set_timederiv'\n elif twigsplit[0] in ['description']:\n raise KeyError(\"cannot set {} of {}\".format(twigsplit[0], '@'.join(twigsplit[1:])))\n\n if self._bundle is not None and self._bundle.get_setting('dict_set_all').get_value() and len(self.filter(twig=twig, **kwargs)) > 1:\n # then we need to loop through all the returned parameters and call set on them\n for param in self.filter(twig=twig, **kwargs).to_list():\n self.set('{}@{}'.format(method, param.twig) if method is not None else param.twig, value)\n else:\n\n if method is None:\n return self.set_value(twig=twig, value=value, **kwargs)\n else:\n param = self.get_parameter(twig=twig, **kwargs)\n\n return getattr(param, method)(value)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef to_json(self, incl_uniqueid=False):\n lst = []\n for context in _contexts:\n lst += [v.to_json(incl_uniqueid=incl_uniqueid)\n for v in self.filter(context=context,\n check_visible=False,\n check_default=False).to_list()]\n return lst", "response": "Convert the ParameterSet to a json - compatible dictionary."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef filter(self, twig=None, check_visible=True, check_default=True, **kwargs):\n kwargs['check_visible'] = check_visible\n kwargs['check_default'] = check_default\n kwargs['force_ps'] = True\n return self.filter_or_get(twig=twig, **kwargs)", "response": "Filter the ParameterSet based on the meta - tags of the Parameters\n and return another ParameterSet."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get(self, twig=None, check_visible=True, check_default=True, **kwargs):\n kwargs['check_visible'] = check_visible\n kwargs['check_default'] = check_default\n # print \"***\", kwargs\n ps = self.filter(twig=twig, **kwargs)\n if not len(ps):\n # TODO: custom exception?\n raise ValueError(\"0 results found\")\n elif len(ps) != 1:\n # TODO: custom exception?\n raise ValueError(\"{} results found: {}\".format(len(ps), ps.twigs))\n else:\n # then only 1 item, so return the parameter\n return ps._params[0]", "response": "This method returns a single parameter from this ParameterSet."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef exclude(self, twig=None, check_visible=True, **kwargs):\n return self - self.filter(twig=twig,\n check_visible=check_visible,\n **kwargs)", "response": "Exclude the results from this ParameterSet."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_or_create(self, qualifier, new_parameter, **kwargs):\n ps = self.filter_or_get(qualifier=qualifier, **kwargs)\n if isinstance(ps, Parameter):\n return ps, False\n elif len(ps):\n # TODO: custom exception?\n raise ValueError(\"more than 1 result was found\")\n else:\n self._attach_params(ParameterSet([new_parameter]), **kwargs)\n\n logger.debug(\"creating and attaching new parameter: {}\".format(new_parameter.qualifier))\n\n return self.filter_or_get(qualifier=qualifier, **kwargs), True", "response": "Get a Parameter from the ParameterSet if it does not exist create it and return it."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nremoving a Parameter from the ParameterSet", "response": "def _remove_parameter(self, param):\n \"\"\"\n Remove a Parameter from the ParameterSet\n\n :parameter param: the :class:`Parameter` object to be removed\n :type param: :class:`Parameter`\n \"\"\"\n # TODO: check to see if protected (required by a current constraint or\n # by a backend)\n self._params = [p for p in self._params if p != param]"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nremove a Parameter from the ParameterSet", "response": "def remove_parameter(self, twig=None, **kwargs):\n \"\"\"\n Remove a :class:`Parameter` from the ParameterSet\n\n Note: removing Parameters from a ParameterSet will not remove\n them from any parent ParameterSets\n (including the :class:`phoebe.frontend.bundle.Bundle`)\n\n :parameter str twig: the twig to search for the parameter\n :parameter **kwargs: meta-tags to search\n :raises ValueError: if 0 or more than 1 results are found using the\n provided search criteria.\n \"\"\"\n param = self.get(twig=twig, **kwargs)\n\n self._remove_parameter(param)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef remove_parameters_all(self, twig=None, **kwargs):\n params = self.filter(twig=twig, check_visible=False, check_default=False, **kwargs)\n\n for param in params.to_list():\n self._remove_parameter(param)", "response": "Remove all Parameters that match the search from the ParameterSet."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_quantity(self, twig=None, unit=None,\n default=None, t=None, **kwargs):\n \"\"\"\n TODO: add documentation\n \"\"\"\n # TODO: for time derivatives will need to use t instead of time (time\n # gets passed to twig filtering)\n\n if default is not None is not None:\n # then we need to do a filter first to see if parameter exists\n if not len(self.filter(twig=twig, **kwargs)):\n return default\n\n param = self.get_parameter(twig=twig, **kwargs)\n\n if param.qualifier in kwargs.keys():\n # then we have an \"override\" value that was passed, and we should\n # just return that.\n # Example b.get_value('teff', teff=6000) returns 6000\n return kwargs.get(param.qualifier)\n\n return param.get_quantity(unit=unit, t=t)", "response": "get a value from the resource"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nset the quantity of the current parameter.", "response": "def set_quantity(self, twig=None, value=None, **kwargs):\n \"\"\"\n TODO: add documentation\n \"\"\"\n # TODO: handle twig having parameter key (value@, default_unit@, adjust@, etc)\n # TODO: does this return anything (update the docstring)?\n return self.get_parameter(twig=twig, **kwargs).set_quantity(value=value, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets the value of a ParameterSet s object in this ParameterSet.", "response": "def get_value(self, twig=None, unit=None, default=None, t=None, **kwargs):\n \"\"\"\n Get the value of a :class:`Parameter` in this ParameterSet\n\n :parameter str twig: the twig to search for the parameter\n :parameter unit: units for the returned result (if\n applicable). If None or not provided, the value will\n be returned in that Parameter's default_unit (if\n applicable)\n :type unit: str or astropy.units.Unit\n :parameter default: what to return if the parameter cannot be found.\n If this is None (default) then an error will be raised instead.\n Note that the units of default will not be converted.\n :parameter time: time at which to compute the\n value (will only affect time-dependent parameters). If provided\n as a float it is assumed that the units are the same as t0.\n NOTE: this is not fully supported yet, use with caution.\n :parameter **kwargs: meta-tags to search\n :return: value (type depeding on the type of the :class:`Parameter`)\n \"\"\"\n # TODO: for time derivatives will need to use t instead of time (time\n # gets passed to twig filtering)\n\n if default is not None:\n # then we need to do a filter first to see if parameter exists\n if not len(self.filter(twig=twig, **kwargs)):\n return default\n\n param = self.get_parameter(twig=twig, **kwargs)\n\n # if hasattr(param, 'default_unit'):\n # This breaks for constraint parameters\n if isinstance(param, FloatParameter) or\\\n isinstance(param,FloatArrayParameter):\n return param.get_value(unit=unit, t=t, **kwargs)\n\n return param.get_value(**kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef set_value(self, twig=None, value=None, **kwargs):\n # TODO: handle twig having parameter key (value@, default_unit@, adjust@, etc)\n # TODO: does this return anything (update the docstring)?\n if twig is not None and value is None:\n # then try to support value as the first argument if no matches with twigs\n if not isinstance(twig, str):\n value = twig\n twig = None\n\n elif not len(self.filter(twig=twig, check_default=check_default, **kwargs)):\n value = twig\n twig = None\n\n if \"index\" in kwargs.keys():\n return self.get_parameter(twig=twig,\n **kwargs).set_index_value(value=value,\n **kwargs)\n\n if \"time\" in kwargs.keys():\n if not len(self.filter(**kwargs)):\n # then let's try filtering without time and seeing if we get a\n # FloatArrayParameter so that we can use set_index_value instead\n time = kwargs.pop(\"time\")\n\n param = self.get_parameter(twig=twig, **kwargs)\n if not isinstance(param, FloatArrayParameter):\n raise TypeError\n\n # TODO: do we need to be more clever about time qualifier for\n # ETV datasets? TODO: is this robust enough... this won't search\n # for times outside the existing ParameterSet. We could also\n # try param.get_parent_ps().get_parameter('time'), but this\n # won't work when outside the bundle (which is used within\n # backends.py to set fluxes, etc) print \"***\n # get_parameter(qualifier='times', **kwargs)\", {k:v for k,v in\n # kwargs.items() if k not in ['qualifier']}\n time_param = self.get_parameter(qualifier='times', **{k:v for k,v in kwargs.items() if k not in ['qualifier']})\n index = np.where(time_param.get_value()==time)[0]\n\n return param.set_index_value(value=value, index=index, **kwargs)\n\n return self.get_parameter(twig=twig,\n **kwargs).set_value(value=value,\n **kwargs)", "response": "Set the value of a Parameter in this ParameterSet."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsetting the value of all returned ParameterSet s in this ParameterSet.", "response": "def set_value_all(self, twig=None, value=None, check_default=False, **kwargs):\n \"\"\"\n Set the value of all returned :class:`Parameter`s in this ParameterSet.\n\n Any :class:`Parameter` that would be included in the resulting ParameterSet\n from a :func:`filter` call with the same arguments will have\n their value set.\n\n Note: setting the value of a Parameter in a ParameterSet WILL\n change that Parameter across any parent ParameterSets (including\n the :class:`phoebe.frontend.bundle.Bundle`)\n\n :parameter str twig: the twig to search for the parameter\n :parameter value: the value to set. Provide units, if necessary, by\n sending a Quantity object (ie 2.4*u.rad)\n :parameter bool check_default: whether to exclude any default values.\n Defaults to False (unlike all filtering). Note that this\n acts on the current ParameterSet so any filtering done before\n this call will EXCLUDE defaults by default.\n :parameter **kwargs: meta-tags to search\n \"\"\"\n if twig is not None and value is None:\n # then try to support value as the first argument if no matches with twigs\n if not isinstance(twig, str):\n value = twig\n twig = None\n\n elif not len(self.filter(twig=twig, check_default=check_default, **kwargs)):\n value = twig\n twig = None\n\n params = self.filter(twig=twig,\n check_default=check_default,\n **kwargs).to_list()\n\n if not kwargs.pop('ignore_none', False) and not len(params):\n raise ValueError(\"no parameters found\")\n\n for param in params:\n if \"index\" in kwargs.keys():\n return self.get_parameter(twig=twig,\n **kwargs).set_index_value(value=value,\n **kwargs)\n param.set_value(value=value, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nget the default unit for this tag", "response": "def get_default_unit(self, twig=None, **kwargs):\n \"\"\"\n TODO: add documentation\n \"\"\"\n return self.get_parameter(twig=twig, **kwargs).get_default_unit()"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef set_default_unit(self, twig=None, unit=None, **kwargs):\n if twig is not None and unit is None:\n # then try to support value as the first argument if no matches with twigs\n if isinstance(unit, u.Unit) or not isinstance(twig, str):\n unit = twig\n twig = None\n\n elif not len(self.filter(twig=twig, check_default=check_default, **kwargs)):\n unit = twig\n twig = None\n\n return self.get_parameter(twig=twig, **kwargs).set_default_unit(unit)", "response": "Set the default unit of the current resource."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef set_default_unit_all(self, twig=None, unit=None, **kwargs):\n if twig is not None and unit is None:\n # then try to support value as the first argument if no matches with twigs\n if isinstance(unit, u.Unit) or not isinstance(twig, str):\n unit = twig\n twig = None\n\n elif not len(self.filter(twig=twig, check_default=check_default, **kwargs)):\n unit = twig\n twig = None\n\n for param in self.filter(twig=twig, **kwargs).to_list():\n param.set_default_unit(unit)", "response": "Set default unit for all parameters in this resource."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets the description of the current object", "response": "def get_description(self, twig=None, **kwargs):\n \"\"\"\n TODO: add documentation\n \"\"\"\n return self.get_parameter(twig=twig, **kwargs).get_description()"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef plot(self, twig=None, **kwargs):\n if not _use_autofig:\n if os.getenv('PHOEBE_ENABLE_PLOTTING', 'TRUE').upper() != 'TRUE':\n raise ImportError(\"cannot plot because PHOEBE_ENABLE_PLOTTING environment variable is disasbled\")\n else:\n raise ImportError(\"autofig not imported, cannot plot\")\n\n # since we used the args trick above, all other options have to be in kwargs\n save = kwargs.pop('save', False)\n show = kwargs.pop('show', False)\n tight_layout = kwargs.pop('tight_layout', False)\n draw_sidebars = kwargs.pop('draw_sidebars', False)\n draw_title = kwargs.pop('draw_title', False)\n subplot_grid = kwargs.pop('subplot_grid', None)\n animate = kwargs.pop('animate', False)\n time = kwargs.get('time', None) # don't pop since time may be used for filtering\n\n if twig is not None:\n kwargs['twig'] = twig\n\n plot_kwargss = self._unpack_plotting_kwargs(**kwargs)\n\n # this loop handles any of the automatically-generated\n # multiple plotting calls, passing each on to autofig\n for plot_kwargs in plot_kwargss:\n y = plot_kwargs.get('y', [])\n if (isinstance(y, u.Quantity) and isinstance(y.value, float)) or (hasattr(y, 'value') and isinstance(y.value, float)):\n pass\n elif not len(y):\n # a dataset without observational data, for example\n continue\n\n autofig_method = plot_kwargs.pop('autofig_method', 'plot')\n # we kept the qualifiers around so we could do some default-logic,\n # but it isn't necessary to pass them on to autofig.\n plot_kwargs = {k:v for k,v in plot_kwargs.items() if 'qualifier' not in k}\n logger.info(\"calling autofig.{}({})\".format(autofig_method, \", \".join([\"{}={}\".format(k,v if not isinstance(v, np.ndarray) else \"\".format(v.shape)) for k,v in plot_kwargs.items()])))\n func = getattr(self.gcf(), autofig_method)\n\n func(**plot_kwargs)\n\n\n if save or show or animate:\n # NOTE: time, times, will all be included in kwargs\n try:\n return self._show_or_save(save, show, animate,\n draw_sidebars=draw_sidebars,\n draw_title=draw_title,\n tight_layout=tight_layout,\n subplot_grid=subplot_grid,\n **kwargs)\n except Exception as err:\n self.clf()\n raise err\n else:\n afig = self.gcf()\n fig = None\n\n return afig, fig", "response": "This function will plot the data for the current time and time of the current time."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _show_or_save(self, save, show, animate,\n draw_sidebars=True,\n draw_title=True,\n tight_layout=False,\n subplot_grid=None,\n **kwargs):\n \"\"\"\n Draw/animate and show and/or save a autofig plot\n \"\"\"\n if animate and not show and not save:\n logger.warning(\"setting show to True since animate=True and save not provided\")\n show = True\n\n if animate:\n # prefer times over time\n times = kwargs.get('times', kwargs.get('time', None))\n save_kwargs = kwargs.get('save_kwargs', {})\n\n if times is None:\n # then let's try to get all SYNTHETIC times\n # it would be nice to only do ENABLED, but then we have to worry about compute\n # it would also be nice to worry about models... but then you should filter first\n logger.info(\"no times were providing, so defaulting to animate over all dataset times\")\n times = []\n for dataset in self.datasets:\n ps = self.filter(dataset=dataset, context='model')\n if len(ps.times):\n # for the case of meshes/spectra\n times += [float(t) for t in ps.times]\n else:\n for param in ps.filter(qualifier='times').to_list():\n times += list(param.get_value())\n\n times = sorted(list(set(times)))\n\n logger.info(\"calling autofig.animate(i={}, draw_sidebars={}, draw_title={}, tight_layout={}, save={}, show={}, save_kwargs={})\".format(times, draw_sidebars, draw_title, tight_layout, save, show, save_kwargs))\n\n mplanim = self.gcf().animate(i=times,\n draw_sidebars=draw_sidebars,\n draw_title=draw_title,\n tight_layout=tight_layout,\n subplot_grid=subplot_grid,\n save=save,\n show=show,\n save_kwargs=save_kwargs)\n\n afig = self.gcf()\n\n # clear the autofig figure\n self.clf()\n\n return afig, mplanim\n\n else:\n time = kwargs.get('time', None)\n\n if isinstance(time, str):\n time = self.get_value(time, context=['component', 'system'])\n\n logger.info(\"calling autofig.draw(i={}, draw_sidebars={}, draw_title={}, tight_layout={}, save={}, show={})\".format(time, draw_sidebars, draw_title, tight_layout, save, show))\n fig = self.gcf().draw(i=time,\n draw_sidebars=draw_sidebars,\n draw_title=draw_title,\n tight_layout=tight_layout,\n subplot_grid=subplot_grid,\n save=save, show=show)\n # clear the figure so next call will start over and future shows will work\n afig = self.gcf()\n self.clf()\n\n return afig, fig", "response": "Show or save a single object in the system."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef show(self, **kwargs):\n kwargs.setdefault('show', True)\n kwargs.setdefault('save', False)\n kwargs.setdefault('animate', False)\n return self._show_or_save(**kwargs)", "response": "Draw and show the plot."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ndraw and save the plot.", "response": "def savefig(self, filename, **kwargs):\n \"\"\"\n Draw and save the plot.\n\n :parameter str filename: filename to save to. Be careful of extensions here...\n matplotlib accepts many different image formats while other\n backends will only export to html.\n \"\"\"\n filename = os.path.expanduser(filename)\n kwargs.setdefault('show', False)\n kwargs.setdefault('save', filename)\n kwargs.setdefault('animate', False)\n return self._show_or_save(**kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef copy(self):\n s = self.to_json()\n cpy = parameter_from_json(s)\n # TODO: may need to subclass for Parameters that require bundle by using this line instead:\n # cpy = parameter_from_json(s, bundle=self._bundle)\n cpy.set_uniqueid(_uniqueid())\n return cpy", "response": "Deepcopy the parameter (with a new uniqueid). All other tags will remain\n the same... so some other tag should be changed before attaching back to\n a ParameterSet or Bundle.\n\n :return: the copied :class:`Parameter` object"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn a shorter abreviated string reprentation of the parameter", "response": "def to_string_short(self):\n \"\"\"\n see also :meth:`to_string`\n\n :return: a shorter abreviated string reprentation of the parameter\n \"\"\"\n if hasattr(self, 'constrained_by') and len(self.constrained_by) > 0:\n return \"* {:>30}: {}\".format(self.uniquetwig_trunc, self.get_quantity() if hasattr(self, 'quantity') else self.get_value())\n else:\n return \"{:>32}: {}\".format(self.uniquetwig_trunc, self.get_quantity() if hasattr(self, 'quantity') else self.get_value())"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef save(self, filename, incl_uniqueid=False):\n filename = os.path.expanduser(filename)\n f = open(filename, 'w')\n json.dump(self.to_json(incl_uniqueid=incl_uniqueid), f,\n sort_keys=True, indent=0, separators=(',', ': '))\n f.close()\n\n return filename", "response": "Save the Parameter to a JSON - formatted ASCII file."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef to_json(self, incl_uniqueid=False):\n def _parse(k, v):\n \"\"\"\n \"\"\"\n if k=='value':\n if isinstance(self._value, nparray.ndarray):\n if self._value.unit is not None and hasattr(self, 'default_unit'):\n v = self._value.to(self.default_unit).to_dict()\n else:\n v = self._value.to_dict()\n if isinstance(v, u.Quantity):\n v = self.get_value() # force to be in default units\n if isinstance(v, np.ndarray):\n v = v.tolist()\n return v\n elif k=='limits':\n return [vi.value if hasattr(vi, 'value') else vi for vi in v]\n elif v is None:\n return v\n elif isinstance(v, str):\n return v\n elif isinstance(v, dict):\n return v\n elif isinstance(v, float) or isinstance(v, int) or isinstance(v, list):\n return v\n elif _is_unit(v):\n return str(v.to_string())\n else:\n try:\n return str(v)\n except:\n raise NotImplementedError(\"could not parse {} of '{}' to json\".format(k, self.uniquetwig))\n\n return {k: _parse(k, v) for k,v in self.to_dict().items() if (v is not None and k not in ['twig', 'uniquetwig', 'quantity'] and (k!='uniqueid' or incl_uniqueid or self.qualifier=='detached_job'))}", "response": "Returns a JSON - ready dictionary holding all information for this parameter."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_meta(self, ignore=['uniqueid']):\n return OrderedDict([(k, getattr(self, k)) for k in _meta_fields_all if k not in ignore])", "response": "Returns the meta - tag properties for this Parameter\n "} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef uniquetwig(self, ps=None):\n if ps is None:\n ps = self._bundle\n\n if ps is None:\n return self.twig\n return ps._uniquetwig(self.twig)", "response": "Returns the uniquetwig for this parameter."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef twig(self):\n return \"@\".join([getattr(self, k) for k in _meta_fields_twig if getattr(self, k) is not None])", "response": "Returns a single string with the individual\n and the meta tags separated by '@' symbols."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef is_visible(self):\n def is_visible_single(visible_if):\n # visible_if syntax: [ignore,these]qualifier:value\n\n\n if visible_if.lower() == 'false':\n return False\n\n # otherwise we need to find the parameter we're referencing and check its value\n if visible_if[0]=='[':\n remove_metawargs, visible_if = visible_if[1:].split(']')\n remove_metawargs = remove_metawargs.split(',')\n else:\n remove_metawargs = []\n\n qualifier, value = visible_if.split(':')\n\n if 'hierarchy.' in qualifier:\n # TODO: set specific syntax (hierarchy.get_meshables:2)\n # then this needs to do some logic on the hierarchy\n hier = self._bundle.hierarchy\n if not len(hier.get_value()):\n # then hierarchy hasn't been set yet, so we can't do any\n # of these tests\n return True\n\n method = qualifier.split('.')[1]\n\n if value in ['true', 'True']:\n value = True\n elif value in ['false', 'False']:\n value = False\n\n return getattr(hier, method)(self.component) == value\n\n else:\n\n # the parameter needs to have all the same meta data except qualifier\n # TODO: switch this to use self.get_parent_ps ?\n metawargs = {k:v for k,v in self.get_meta(ignore=['twig', 'uniquetwig', 'uniqueid']+remove_metawargs).items() if v is not None}\n metawargs['qualifier'] = qualifier\n # metawargs['twig'] = None\n # metawargs['uniquetwig'] = None\n # metawargs['uniqueid'] = None\n # if metawargs.get('component', None) == '_default':\n # metawargs['component'] = None\n\n try:\n # this call is quite expensive and bloats every get_parameter(check_visible=True)\n param = self._bundle.get_parameter(check_visible=False, check_default=False, **metawargs)\n except ValueError:\n # let's not let this hold us up - sometimes this can happen when copying\n # parameters (from copy_for) in order that the visible_if parameter\n # happens later\n logger.debug(\"parameter not found when trying to determine if visible, {}\".format(metawargs))\n return True\n\n #~ print \"***\", qualifier, param.qualifier, param.get_value(), value\n\n if isinstance(param, BoolParameter):\n if value in ['true', 'True']:\n value = True\n elif value in ['false', 'False']:\n value = False\n\n\n if isinstance(value, str) and value[0] in ['!', '~']:\n return param.get_value() != value[1:]\n elif value=='':\n return len(param.get_value()) > 0\n else:\n return param.get_value() == value\n\n\n if self.visible_if is None:\n return True\n\n if not self._bundle:\n # then we may not be able to do the check, for now let's just return True\n return True\n\n return np.all([is_visible_single(visible_if_i) for visible_if_i in self.visible_if.split(',')])", "response": "returns True if this parameter is currently visible"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_parent_ps(self):\n if self._bundle is None:\n return None\n\n metawargs = {k:v for k,v in self.meta.items() if k not in ['qualifier', 'twig', 'uniquetwig']}\n\n return self._bundle.filter(**metawargs)", "response": "Return a : class:`ParameterSet` of all Parameters in the same\n meta - tags except qualifier twig and uniquetwig"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nexpand the selection to account for wildcards", "response": "def expand_value(self, **kwargs):\n \"\"\"\n expand the selection to account for wildcards\n \"\"\"\n selection = []\n for v in self.get_value(**kwargs):\n for choice in self.choices:\n if v==choice and choice not in selection:\n selection.append(choice)\n elif fnmatch(choice, v) and choice not in selection:\n selection.append(choice)\n\n return selection"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nupdate the value to remove any that are not valid", "response": "def remove_not_valid_selections(self):\n \"\"\"\n update the value to remove any that are (no longer) valid\n \"\"\"\n value = [v for v in self.get_value() if self.valid_selection(v)]\n self.set_value(value)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef within_limits(self, value):\n\n return (self.limits[0] is None or value >= self.limits[0]) and (self.limits[1] is None or value <= self.limits[1])", "response": "check whether a value falls within the set limits"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef within_limits(self, value):\n\n if isinstance(value, int) or isinstance(value, float):\n value = value * self.default_unit\n\n return (self.limits[0] is None or value >= self.limits[0]) and (self.limits[1] is None or value <= self.limits[1])", "response": "check whether a value falls within the set limits"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the expression of the constraint that constrains this parameter", "response": "def is_constraint(self):\n \"\"\"\n returns the expression of the constraint that constrains this parameter\n \"\"\"\n if self._is_constraint is None:\n return None\n return self._bundle.get_parameter(context='constraint', uniqueid=self._is_constraint)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef constrained_by(self):\n if self._is_constraint is None:\n return []\n params = []\n for var in self.is_constraint._vars:\n param = var.get_parameter()\n if param.uniqueid != self.uniqueid:\n params.append(param)\n return params", "response": "returns a list of parameters that constrain this parameter"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning a list of the expressions in which this parameter constrains another", "response": "def in_constraints(self):\n \"\"\"\n returns a list of the expressions in which this parameter constrains another\n \"\"\"\n expressions = []\n for uniqueid in self._in_constraints:\n expressions.append(self._bundle.get_parameter(context='constraint', uniqueid=uniqueid))\n return expressions"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn a list of parameters that are constrained by this parameter", "response": "def constrains(self):\n \"\"\"\n returns a list of parameters that are constrained by this parameter\n \"\"\"\n params = []\n for constraint in self.in_constraints:\n for var in constraint._vars:\n param = var.get_parameter()\n if param.component == constraint.component and param.qualifier == constraint.qualifier:\n if param not in params and param.uniqueid != self.uniqueid:\n params.append(param)\n return params"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn a list of all parameters that are either constrained by or constrain this parameter", "response": "def related_to(self):\n \"\"\"\n returns a list of all parameters that are either constrained by or constrain this parameter\n \"\"\"\n params = []\n constraints = self.in_constraints\n if self.is_constraint is not None:\n constraints.append(self.is_constraint)\n\n for constraint in constraints:\n for var in constraint._vars:\n param = var.get_parameter()\n if param not in params and param.uniqueid != self.uniqueid:\n params.append(param)\n\n return params"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef to_string_short(self):\n opt = np.get_printoptions()\n np.set_printoptions(threshold=8, edgeitems=3, linewidth=opt['linewidth']-len(self.uniquetwig)-2)\n str_ = super(FloatArrayParameter, self).to_string_short()\n np.set_printoptions(**opt)\n return str_", "response": "returns a shorter abreviated string reprentation of the parameter\n "} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef interp_value(self, **kwargs):\n # TODO: add support for units\n # TODO: add support for non-linear interpolation (probably would need to use scipy)?\n # TODO: add support for interpolating in phase_space\n\n if len(kwargs.keys()) > 1:\n raise KeyError(\"interp_value only takes a single qualifier-value pair\")\n\n qualifier, qualifier_interp_value = kwargs.items()[0]\n\n if isinstance(qualifier_interp_value, str):\n # then assume its a twig and try to resolve\n # for example: time='t0_supconj'\n qualifier_interp_value = self._bundle.get_value(qualifier_interp_value, context=['system', 'component'])\n\n parent_ps = self.get_parent_ps()\n\n if qualifier not in parent_ps.qualifiers:\n # TODO: handle plural to singular (having to say\n # interp_value(times=5) is awkward)\n raise KeyError(\"'{}' not valid qualifier (must be one of {})\".format(qualifier, parent_ps.qualifiers))\n\n qualifier_parameter = parent_ps.get(qualifier=qualifier)\n\n if not isinstance(qualifier_parameter, FloatArrayParameter):\n raise KeyError(\"'{}' does not point to a FloatArrayParameter\".format(qualifier))\n\n return np.interp(qualifier_interp_value, qualifier_parameter.get_value(), self.get_value())", "response": "Interpolate to find the value in THIS array given a value from the parameter set ANOTHER array."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nset any property of the underlying nparray object", "response": "def set_property(self, **kwargs):\n \"\"\"\n set any property of the underlying nparray object\n \"\"\"\n if not isinstance(self._value, nparray.ndarray):\n raise ValueError(\"value is not a nparray object\")\n\n for property, value in kwargs.items():\n setattr(self._value, property, value)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _parse_repr(self):\n\n repr_ = self.get_value()\n repr_str = '[\"{}\"]'.format(repr_.replace(', ', '\", \"').replace('(', '\", [\"').replace(')', '\"]')).replace(']\"', '\"]').replace('\"\"', '\"').replace(']\"', ']')\n return json.loads(repr_str)", "response": "parse the representation of the node"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngiving a nested structure from _parse_repr and find the trace route to get to item", "response": "def _recurse_find_trace(self, structure, item, trace=[]):\n \"\"\"\n given a nested structure from _parse_repr and find the trace route to get to item\n \"\"\"\n\n try:\n i = structure.index(item)\n except ValueError:\n for j,substructure in enumerate(structure):\n if isinstance(substructure, list):\n return self._recurse_find_trace(substructure, item, trace+[j])\n else:\n return trace+[i]"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _get_by_trace(self, structure, trace):\n for i in trace:\n structure = structure[i]\n\n return structure", "response": "retrieve an item from the nested structure given a trace"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting the component of all stars in order primary -> secondary", "response": "def get_stars(self):\n \"\"\"\n get 'component' of all stars in order primary -> secondary\n \"\"\"\n l = re.findall(r\"[\\w']+\", self.get_value())\n # now search for indices of star and take the next entry from this flat list\n return [l[i+1] for i,s in enumerate(l) if s=='star']"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_orbits(self):\n #~ l = re.findall(r\"[\\w']+\", self.get_value())\n # now search for indices of orbit and take the next entry from this flat list\n #~ return [l[i+1] for i,s in enumerate(l) if s=='orbit']\n orbits = []\n for star in self.get_stars():\n parent = self.get_parent_of(star)\n if parent not in orbits and parent!='component' and parent is not None:\n orbits.append(parent)\n return orbits", "response": "get list of all orbits in order primary -> secondary"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_children_of(self, component, kind=None):\n\n structure, trace, item = self._get_structure_and_trace(component)\n item_kind, item_label = item.split(':')\n\n if isinstance(kind, str):\n kind = [kind]\n\n if item_kind not in ['orbit']:\n # return None\n return []\n else:\n items = self._get_by_trace(structure, trace[:-1]+[trace[-1]+1])\n # we want to ignore suborbits\n #return [str(ch.split(':')[-1]) for ch in items if isinstance(ch, unicode)]\n return [str(ch.split(':')[-1]) for ch in items if isinstance(ch, unicode) and (kind is None or ch.split(':')[0] in kind)]", "response": "get to component labels of the children of a given component"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_child_of(self, component, ind, kind=None):\n children = self.get_children_of(component, kind=kind)\n if children is None:\n return None\n else:\n return children[ind]", "response": "get a child of a given component"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn whether a given component is the primary or secondary", "response": "def get_primary_or_secondary(self, component, return_ind=False):\n \"\"\"\n return whether a given component is the 'primary' or 'secondary'\n component in its parent orbit\n \"\"\"\n parent = self.get_parent_of(component)\n if parent is None:\n # then this is a single component, not in a binary\n return 'primary'\n\n children_of_parent = self.get_children_of(parent)\n\n ind = children_of_parent.index(component)\n\n if ind > 1:\n return None\n\n if return_ind:\n return ind + 1\n\n return ['primary', 'secondary'][ind]"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn a list of components that are meshable", "response": "def get_meshables(self):\n \"\"\"\n return a list of components that are meshable (generally stars, but handles\n the envelope for an contact_binary)\n \"\"\"\n\n l = re.findall(r\"[\\w']+\", self.get_value())\n # now search for indices of star and take the next entry from this flat list\n meshables = [l[i+1] for i,s in enumerate(l) if s in ['star', 'envelope']]\n\n # now we want to remove any star which has a sibling envelope\n has_sibling_envelope = []\n for item in meshables:\n if self.get_sibling_of(item, kind='envelope'):\n has_sibling_envelope.append(item)\n\n return [m for m in meshables if m not in has_sibling_envelope]"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn True if any component is part of a contact_binary", "response": "def is_contact_binary(self, component):\n \"\"\"\n especially useful for constraints\n\n tells whether any component (star, envelope) is part of a contact_binary\n by checking its siblings for an envelope\n \"\"\"\n if component not in self._is_contact_binary.keys():\n self._update_cache()\n\n return self._is_contact_binary.get(component)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning True if the component is a binary component.", "response": "def is_binary(self, component):\n \"\"\"\n especially useful for constraints\n\n tells whether any component (star, envelope) is part of a binary\n by checking its parent\n \"\"\"\n if component not in self._is_binary.keys():\n self._update_cache()\n\n return self._is_binary.get(component)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef vars(self):\n # cache _var_params\n if self._var_params is None:\n self._var_params = ParameterSet([var.get_parameter() for var in self._vars])\n return self._var_params", "response": "return all the variables in a PS\n "} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets a parameter from those that are variables", "response": "def get_parameter(self, twig=None, **kwargs):\n \"\"\"\n get a parameter from those that are variables\n \"\"\"\n kwargs['twig'] = twig\n kwargs['check_default'] = False\n kwargs['check_visible'] = False\n ps = self.vars.filter(**kwargs)\n if len(ps)==1:\n return ps.get(check_visible=False, check_default=False)\n elif len(ps) > 1:\n # TODO: is this safe? Some constraints may have a parameter listed\n # twice, so we can do this then, but maybe should check to make sure\n # all items have the same uniqueid? Maybe check len(ps.uniqueids)?\n return ps.to_list()[0]\n else:\n raise KeyError(\"no result found\")"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nflip the constraint to solve for any of the parameters in the expression", "response": "def flip_for(self, twig=None, expression=None, **kwargs):\n \"\"\"\n flip the constraint to solve for for any of the parameters in the expression\n\n expression (optional if sympy available, required if not)\n \"\"\"\n\n _orig_expression = self.get_value()\n\n # try to get the parameter from the bundle\n kwargs['twig'] = twig\n newly_constrained_var = self._get_var(**kwargs)\n newly_constrained_param = self.get_parameter(**kwargs)\n\n check_kwargs = {k:v for k,v in newly_constrained_param.meta.items() if k not in ['context', 'twig', 'uniquetwig']}\n check_kwargs['context'] = 'constraint'\n if len(self._bundle.filter(**check_kwargs)):\n raise ValueError(\"'{}' is already constrained\".format(newly_constrained_param.twig))\n\n currently_constrained_var = self._get_var(qualifier=self.qualifier, component=self.component)\n currently_constrained_param = currently_constrained_var.get_parameter() # or self.constrained_parameter\n\n import constraint\n if self.constraint_func is not None and hasattr(constraint, self.constraint_func):\n # then let's see if the method is capable of resolving for use\n # try:\n if True:\n # TODO: this is not nearly general enough, each method takes different arguments\n # and getting solve_for as newly_constrained_param.qualifier\n\n lhs, rhs, constraint_kwargs = getattr(constraint, self.constraint_func)(self._bundle, solve_for=newly_constrained_param, **self.constraint_kwargs)\n # except NotImplementedError:\n # pass\n # else:\n # TODO: this needs to be smarter and match to self._get_var().user_label instead of the current uniquetwig\n\n expression = rhs._value # safe expression\n #~ print \"*** flip by recalling method success!\", expression\n\n # print \"***\", lhs._value, rhs._value\n\n if expression is not None:\n expression = expression\n\n elif _use_sympy:\n\n\n eq_safe = \"({}) - {}\".format(self._value, currently_constrained_var.safe_label)\n\n #~ print \"*** solving {} for {}\".format(eq_safe, newly_constrained_var.safe_label)\n\n expression = sympy.solve(eq_safe, newly_constrained_var.safe_label)[0]\n\n #~ print \"*** solution: {}\".format(expression)\n\n else:\n # TODO: ability for built-in constraints to flip themselves\n # we could access self.kind and re-call that with a new solve_for option?\n raise ValueError(\"must either have sympy installed or provide a new expression\")\n\n self._qualifier = newly_constrained_param.qualifier\n self._component = newly_constrained_param.component\n self._kind = newly_constrained_param.kind\n\n self._value = str(expression)\n # reset the default_unit so that set_default_unit doesn't complain\n # about incompatible units\n self._default_unit = None\n self.set_default_unit(newly_constrained_param.default_unit)\n\n self._update_bookkeeping()\n\n self._add_history(redo_func='flip_constraint', redo_kwargs={'expression': expression, 'uniqueid': newly_constrained_param.uniqueid}, undo_func='flip_constraint', undo_kwargs={'expression': _orig_expression, 'uniqueid': currently_constrained_param.uniqueid})"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_status(self):\n if self._value == 'loaded':\n status = 'loaded'\n\n elif not _is_server and self._bundle is not None and self._server_status is not None:\n if not _can_requests:\n raise ImportError(\"requests module required for external jobs\")\n\n if self._value in ['complete']:\n # then we have no need to bother checking again\n status = self._value\n else:\n url = self._server_status\n logger.info(\"checking job status on server from {}\".format(url))\n # \"{}/{}/parameters/{}\".format(server, bundleid, self.uniqueid)\n r = requests.get(url, timeout=5)\n try:\n rjson = r.json()\n except ValueError:\n # TODO: better exception here - perhaps look for the status code from the response?\n status = self._value\n else:\n status = rjson['data']['attributes']['value']\n\n else:\n\n if self.status_method == 'exists':\n output_exists = os.path.isfile(\"_{}.out\".format(self.uniqueid))\n if output_exists:\n status = 'complete'\n else:\n status = 'unknown'\n else:\n raise NotImplementedError\n\n # here we'll set the value to be the latest CHECKED status for the sake\n # of exporting to JSON and updating the status for clients. get_value\n # will still call status so that it will return the CURRENT value.\n self._value = status\n return status", "response": "Returns the status of the current job."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning a new form with no data and files.", "response": "def get_unbound_form(self):\n \"\"\"\n Overrides behavior of FormView.get_form_kwargs\n when method is POST or PUT\n \"\"\"\n form_kwargs = self.get_form_kwargs()\n # @@@ remove fields that would cause the form to be bound\n # when instantiated\n bound_fields = [\"data\", \"files\"]\n for field in bound_fields:\n form_kwargs.pop(field, None)\n return self.get_form_class()(**form_kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the JSON data returned when a valid form submission occurs.", "response": "def get_form_success_data(self, form):\n \"\"\"\n Allows customization of the JSON data returned when a valid form submission occurs.\n \"\"\"\n data = {\n \"html\": render_to_string(\n \"pinax/teams/_invite_form.html\",\n {\n \"invite_form\": self.get_unbound_form(),\n \"team\": self.team\n },\n request=self.request\n )\n }\n\n membership = self.membership\n if membership is not None:\n if membership.state == Membership.STATE_APPLIED:\n fragment_class = \".applicants\"\n elif membership.state == Membership.STATE_INVITED:\n fragment_class = \".invitees\"\n elif membership.state in (Membership.STATE_AUTO_JOINED, Membership.STATE_ACCEPTED):\n fragment_class = {\n Membership.ROLE_OWNER: \".owners\",\n Membership.ROLE_MANAGER: \".managers\",\n Membership.ROLE_MEMBER: \".members\"\n }[membership.role]\n data.update({\n \"append-fragments\": {\n fragment_class: render_to_string(\n \"pinax/teams/_membership.html\",\n {\n \"membership\": membership,\n \"team\": self.team\n },\n request=self.request\n )\n }\n })\n return data"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nmake sure to get a float make sure to get a float make sure to get a float make sure to get a float make sure to get a float", "response": "def _value(obj):\n \"\"\"\n make sure to get a float\n \"\"\"\n # TODO: this is ugly and makes everything ugly\n # can we handle this with a clean decorator or just requiring that only floats be passed??\n if hasattr(obj, 'value'):\n return obj.value\n elif isinstance(obj, np.ndarray):\n return np.array([o.value for o in obj])\n elif hasattr(obj, '__iter__'):\n return [_value(o) for o in obj]\n return obj"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _estimate_delta(ntriangles, area):\n return np.sqrt(4./np.sqrt(3) * float(area) / float(ntriangles))", "response": "estimate the value for delta to send to marching based on the number of triangles requested and the surface area of the requested triangles"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef from_bundle(cls, b, compute=None, datasets=[], **kwargs):\n\n hier = b.hierarchy\n\n if not len(hier.get_value()):\n raise NotImplementedError(\"Meshing requires a hierarchy to exist\")\n\n\n # now pull general compute options\n if compute is not None:\n if isinstance(compute, str):\n compute_ps = b.get_compute(compute, check_visible=False)\n else:\n # then hopefully compute is the parameterset\n compute_ps = compute\n eclipse_method = compute_ps.get_value(qualifier='eclipse_method', **kwargs)\n horizon_method = compute_ps.get_value(qualifier='horizon_method', check_visible=False, **kwargs)\n dynamics_method = compute_ps.get_value(qualifier='dynamics_method', **kwargs)\n irrad_method = compute_ps.get_value(qualifier='irrad_method', **kwargs)\n boosting_method = compute_ps.get_value(qualifier='boosting_method', **kwargs)\n if conf.devel:\n mesh_init_phi = compute_ps.get_value(qualifier='mesh_init_phi', unit=u.rad, **kwargs)\n else:\n mesh_init_phi = 0.0\n else:\n eclipse_method = 'native'\n horizon_method = 'boolean'\n dynamics_method = 'keplerian'\n irrad_method = 'none'\n boosting_method = 'none'\n mesh_init_phi = 0.0\n\n # NOTE: here we use globals()[Classname] because getattr doesn't work in\n # the current module - now this doesn't really make sense since we only\n # support stars, but eventually the classname could be Disk, Spot, etc\n if 'dynamics_method' in kwargs.keys():\n # already set as default above\n _dump = kwargs.pop('dynamics_method')\n\n meshables = hier.get_meshables()\n def get_distortion_method(hier, compute_ps, component, **kwargs):\n if hier.get_kind_of(component) in ['envelope']:\n return 'roche'\n\n if compute_ps.get_value('mesh_method', component=component, **kwargs)=='wd':\n return 'roche'\n\n return compute_ps.get_value('distortion_method', component=component, **kwargs)\n\n bodies_dict = {comp: globals()[_get_classname(hier.get_kind_of(comp), get_distortion_method(hier, compute_ps, comp, **kwargs))].from_bundle(b, comp, compute, dynamics_method=dynamics_method, mesh_init_phi=mesh_init_phi, datasets=datasets, **kwargs) for comp in meshables}\n\n # envelopes need to know their relationships with the underlying stars\n parent_envelope_of = {}\n for meshable in meshables:\n if hier.get_kind_of(meshable) == 'envelope':\n for starref in hier.get_siblings_of(meshable):\n parent_envelope_of[starref] = meshable\n\n return cls(bodies_dict, eclipse_method=eclipse_method,\n horizon_method=horizon_method,\n dynamics_method=dynamics_method,\n irrad_method=irrad_method,\n boosting_method=boosting_method,\n parent_envelope_of=parent_envelope_of)", "response": "Build a system from a bundle."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_body(self, component):\n if component in self._bodies.keys():\n return self._bodies[component]\n else:\n # then hopefully we're a child star of an contact_binary envelope\n parent_component = self._parent_envelope_of[component]\n return self._bodies[parent_component].get_half(component)", "response": "get_body - Get the body of a given component"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef update_positions(self, time, xs, ys, zs, vxs, vys, vzs,\n ethetas, elongans, eincls,\n ds=None, Fs=None, ignore_effects=False):\n \"\"\"\n TODO: add documentation\n\n all arrays should be for the current time, but iterable over all bodies\n \"\"\"\n self.xs = np.array(_value(xs))\n self.ys = np.array(_value(ys))\n self.zs = np.array(_value(zs))\n\n for starref,body in self.items():\n body.update_position(time, xs, ys, zs, vxs, vys, vzs,\n ethetas, elongans, eincls,\n ds=ds, Fs=Fs, ignore_effects=ignore_effects)", "response": "Update the internal state of the internal state of the object."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef populate_observables(self, time, kinds, datasets, ignore_effects=False):\n\n\n if self.irrad_method is not 'none' and not ignore_effects:\n # TODO: only for kinds that require intensities (i.e. not orbit or\n # dynamical RVs, etc)\n self.handle_reflection()\n\n for kind, dataset in zip(kinds, datasets):\n for starref, body in self.items():\n body.populate_observable(time, kind, dataset)", "response": "Populates the observable objects of all the objects in this object with the given time."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ndetects the eclipses at the horizon and the eclipsed triangles and handle them if they are even possible.", "response": "def handle_eclipses(self, expose_horizon=False, **kwargs):\n \"\"\"\n Detect the triangles at the horizon and the eclipsed triangles, handling\n any necessary subdivision.\n\n :parameter str eclipse_method: name of the algorithm to use to detect\n the horizon or eclipses (defaults to the value set by computeoptions)\n :parameter str subdiv_alg: name of the algorithm to use for subdivision\n (defaults to the value set by computeoptions)\n :parameter int subdiv_num: number of subdivision iterations (defaults\n the value set by computeoptions)\n \"\"\"\n\n eclipse_method = kwargs.get('eclipse_method', self.eclipse_method)\n horizon_method = kwargs.get('horizon_method', self.horizon_method)\n\n # Let's first check to see if eclipses are even possible at these\n # positions. If they are not, then we only have to do horizon\n #\n # To do that, we'll take the conservative max_r for each object\n # and their current positions, and see if the separations are larger\n # than sum of max_rs\n possible_eclipse = False\n if len(self.bodies) == 1:\n if self.bodies[0].__class__.__name__ == 'Envelope':\n possible_eclipse = True\n else:\n possible_eclipse = False\n else:\n logger.debug(\"system.handle_eclipses: determining if eclipses are possible from instantaneous_maxr\")\n max_rs = [body.instantaneous_maxr for body in self.bodies]\n # logger.debug(\"system.handle_eclipses: max_rs={}\".format(max_rs))\n for i in range(0, len(max_rs)-1):\n for j in range(i+1, len(max_rs)):\n proj_sep_sq = sum([(c[i]-c[j])**2 for c in (self.xs,self.ys)])\n max_sep_ecl = max_rs[i] + max_rs[j]\n\n if proj_sep_sq < (1.05*max_sep_ecl)**2:\n # then this pair has the potential for eclipsing triangles\n possible_eclipse = True\n break\n\n if not possible_eclipse and not expose_horizon and horizon_method=='boolean':\n eclipse_method = 'only_horizon'\n\n # meshes is an object which allows us to easily access and update columns\n # in the meshes *in memory*. That is meshes.update_columns will propogate\n # back to the current mesh for each body.\n meshes = self.meshes\n\n # Reset all visibilities to be fully visible to start\n meshes.update_columns('visiblities', 1.0)\n\n ecl_func = getattr(eclipse, eclipse_method)\n\n if eclipse_method=='native':\n ecl_kwargs = {'horizon_method': horizon_method}\n else:\n ecl_kwargs = {}\n\n logger.debug(\"system.handle_eclipses: possible_eclipse={}, expose_horizon={}, calling {} with kwargs {}\".format(possible_eclipse, expose_horizon, eclipse_method, ecl_kwargs))\n\n visibilities, weights, horizon = ecl_func(meshes,\n self.xs, self.ys, self.zs,\n expose_horizon=expose_horizon,\n **ecl_kwargs)\n\n # NOTE: analytic horizons are called in backends.py since they don't\n # actually depend on the mesh at all.\n\n # visiblilities here is a dictionary with keys being the component\n # labels and values being the np arrays of visibilities. We can pass\n # this dictionary directly and the columns will be applied respectively.\n meshes.update_columns('visibilities', visibilities)\n\n # weights is also a dictionary with keys being the component labels\n # and values and np array of weights.\n if weights is not None:\n meshes.update_columns('weights', weights)\n\n return horizon"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef observe(self, dataset, kind, components=None, distance=1.0, l3=0.0, **kwargs):\n\n meshes = self.meshes\n if kind=='lp':\n def sv(p, p0, w):\n # Subsidiary variable:\n return (p0-p)/(w/2)\n\n def lorentzian(sv):\n return 1-1./(1+sv**2)\n\n def gaussian(sv):\n return 1-np.exp(-np.log(2)*sv**2)\n\n profile_func = kwargs.get('profile_func')\n profile_rest = kwargs.get('profile_rest')\n profile_sv = kwargs.get('profile_sv')\n wavelengths = kwargs.get('wavelengths')\n if profile_func == 'gaussian':\n func = gaussian\n elif profile_func == 'lorentzian':\n func = lorentzian\n else:\n raise NotImplementedError(\"profile_func='{}' not supported\".format(profile_func))\n\n visibilities = meshes.get_column_flat('visibilities', components)\n\n abs_intensities = meshes.get_column_flat('abs_intensities:{}'.format(dataset), components)\n # mus here will be from the tnormals of the triangle and will not\n # be weighted by the visibility of the triangle\n mus = meshes.get_column_flat('mus', components)\n areas = meshes.get_column_flat('areas_si', components)\n ldint = meshes.get_column_flat('ldint:{}'.format(dataset), components)\n\n rvs = (meshes.get_column_flat(\"rvs:{}\".format(dataset), components)*u.solRad/u.d).to(u.m/u.s).value\n dls = rvs*profile_rest/c.c.si.value\n\n line = func(sv(wavelengths, profile_rest, profile_sv))\n lines = np.array([np.interp(wavelengths, wavelengths+dl, line) for dl in dls])\n if not np.any(visibilities):\n avg_line = np.full_like(wavelengths, np.nan)\n else:\n avg_line = np.average(lines, axis=0, weights=abs_intensities*areas*mus*ldint*visibilities)\n\n return {'flux_densities': avg_line}\n\n\n elif kind=='rv':\n visibilities = meshes.get_column_flat('visibilities', components)\n\n if np.all(visibilities==0):\n # then no triangles are visible, so we should return nan\n return {'rv': np.nan}\n\n rvs = meshes.get_column_flat(\"rvs:{}\".format(dataset), components)\n abs_intensities = meshes.get_column_flat('abs_intensities:{}'.format(dataset), components)\n # mus here will be from the tnormals of the triangle and will not\n # be weighted by the visibility of the triangle\n mus = meshes.get_column_flat('mus', components)\n areas = meshes.get_column_flat('areas_si', components)\n ldint = meshes.get_column_flat('ldint:{}'.format(dataset), components)\n # NOTE: don't need ptfarea because its a float (same for all\n # elements, regardless of component)\n\n # NOTE: the intensities are already projected but are per unit area\n # so we need to multiply by the /projected/ area of each triangle (thus the extra mu)\n return {'rv': np.average(rvs, weights=abs_intensities*areas*mus*ldint*visibilities)}\n\n elif kind=='lc':\n visibilities = meshes.get_column_flat('visibilities')\n\n if np.all(visibilities==0):\n # then no triangles are visible, so we should return nan -\n # probably shouldn't ever happen for lcs\n return {'flux': np.nan}\n\n intensities = meshes.get_column_flat(\"intensities:{}\".format(dataset), components)\n mus = meshes.get_column_flat('mus', components)\n areas = meshes.get_column_flat('areas_si', components)\n ldint = meshes.get_column_flat('ldint:{}'.format(dataset), components)\n\n # assume that all bodies are using the same passband and therefore\n # will have the same ptfarea. If this assumption is ever a problem -\n # then we will need to build a flat column based on the component\n # of each element so that ptfarea is an array with the same shape\n # as those above\n if isinstance(self.bodies[0], Envelope):\n # for envelopes, we'll make the same assumption and just grab\n # that value stored in the first \"half\"\n ptfarea = self.bodies[0]._halves[0].get_ptfarea(dataset)\n else:\n ptfarea = self.bodies[0].get_ptfarea(dataset)\n\n # intens_proj is the intensity in the direction of the observer per unit surface area of the triangle\n # areas is the area of each triangle\n # areas*mus is the area of each triangle projected in the direction of the observer\n # visibilities is 0 for hidden, 0.5 for partial, 1.0 for visible\n # areas*mus*visibilities is the visibile projected area of each triangle (ie half the area for a partially-visible triangle)\n # so, intens_proj*areas*mus*visibilities is the intensity in the direction of the observer per the observed projected area of that triangle\n # and the sum of these values is the observed flux\n\n # note that the intensities are already projected but are per unit area\n # so we need to multiply by the /projected/ area of each triangle (thus the extra mu)\n\n return {'flux': np.sum(intensities*areas*mus*visibilities)*ptfarea/(distance**2)+l3}\n\n else:\n raise NotImplementedError(\"observe for dataset with kind '{}' not implemented\".format(kind))", "response": "This method returns a dictionary of observable values for a given set of components."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns the maximum r of the instantaneous nucleus of the species.", "response": "def instantaneous_maxr(self):\n \"\"\"\n Recall the maximum r (triangle furthest from the center of the star) of\n this star at the given time\n\n :return: maximum r\n :rtype: float\n \"\"\"\n logger.debug(\"{}.instantaneous_maxr\".format(self.component))\n\n if 'maxr' not in self.inst_vals.keys():\n logger.debug(\"{}.instantaneous_maxr COMPUTING\".format(self.component))\n\n self.inst_vals['maxr'] = max(self.mesh.rs.centers*self._scale)\n\n return self.inst_vals['maxr']"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _get_mass_by_index(self, index):\n if hasattr(index, '__iter__'):\n return sum([self.masses[i] for i in index])\n else:\n return self.masses[index]", "response": "returns the mass by index"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets coordinates by index", "response": "def _get_coords_by_index(self, coords_array, index):\n \"\"\"\n where index can either by an integer or a list of integers (returns some of masses)\n coords_array should be a single array (xs, ys, or zs)\n \"\"\"\n if hasattr(index, '__iter__'):\n # then we want the center-of-mass coordinates\n # TODO: clean this up\n return np.average([_value(coords_array[i]) for i in index],\n weights=[self._get_mass_by_index(i) for i in index])\n else:\n return coords_array[index]"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_standard_mesh(self, scaled=True):\n # TODO: allow this to take etheta and retreive a mesh at that true anomaly\n theta = 0.0\n protomesh = self._standard_meshes[theta] #.copy() # if theta in self._standard_meshes.keys() else self.mesh.copy()\n\n if scaled:\n # TODO: be careful about self._scale... we may want self._instantaneous_scale\n return mesh.ScaledProtoMesh.from_proto(protomesh, self._scale)\n else:\n return protomesh.copy()", "response": "Returns a copy of the standard mesh."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef reset_time(self, time, true_anom, elongan, eincl):\n self.true_anom = true_anom\n self.elongan = elongan\n self.eincl = eincl\n self.time = time\n self.populated_at_time = []\n\n self.reset()\n\n return", "response": "reset the time of the current instance"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nupdate the position of the star into its orbit", "response": "def update_position(self, time,\n xs, ys, zs, vxs, vys, vzs,\n ethetas, elongans, eincls,\n ds=None, Fs=None,\n ignore_effects=False,\n component_com_x=None,\n **kwargs):\n \"\"\"\n Update the position of the star into its orbit\n\n :parameter float time: the current time\n :parameter list xs: a list/array of x-positions of ALL COMPONENTS in the :class:`System`\n :parameter list ys: a list/array of y-positions of ALL COMPONENTS in the :class:`System`\n :parameter list zs: a list/array of z-positions of ALL COMPONENTS in the :class:`System`\n :parameter list vxs: a list/array of x-velocities of ALL COMPONENTS in the :class:`System`\n :parameter list vys: a list/array of y-velocities of ALL COMPONENTS in the :class:`System`\n :parameter list vzs: a list/array of z-velocities of ALL COMPONENTS in the :class:`System`\n :parameter list ethetas: a list/array of euler-thetas of ALL COMPONENTS in the :class:`System`\n :parameter list elongans: a list/array of euler-longans of ALL COMPONENTS in the :class:`System`\n :parameter list eincls: a list/array of euler-incls of ALL COMPONENTS in the :class:`System`\n :parameter list ds: (optional) a list/array of instantaneous distances of ALL COMPONENTS in the :class:`System`\n :parameter list Fs: (optional) a list/array of instantaneous syncpars of ALL COMPONENTS in the :class:`System`\n \"\"\"\n\n self.reset_time(time, ethetas[self.ind_self], elongans[self.ind_self], eincls[self.ind_self])\n\n #-- Get current position/euler information\n # TODO: get rid of this ugly _value stuff\n pos = (_value(xs[self.ind_self]), _value(ys[self.ind_self]), _value(zs[self.ind_self]))\n vel = (_value(vxs[self.ind_self_vel]), _value(vys[self.ind_self_vel]), _value(vzs[self.ind_self_vel]))\n euler = (_value(ethetas[self.ind_self]), _value(elongans[self.ind_self]), _value(eincls[self.ind_self]))\n euler_vel = (_value(ethetas[self.ind_self_vel]), _value(elongans[self.ind_self_vel]), _value(eincls[self.ind_self_vel]))\n\n # TODO: eventually pass etheta to has_standard_mesh\n # TODO: implement reprojection as an option based on a nearby standard?\n if self.needs_remesh or not self.has_standard_mesh():\n logger.debug(\"{}.update_position: remeshing at t={}\".format(self.component, time))\n # track whether we did the remesh or not, so we know if we should\n # compute local quantities if not otherwise necessary\n did_remesh = True\n\n # TODO: allow time dependence on d and F from dynamics\n # d = _value(ds[self.ind_self])\n # F = _value(Fs[self.ind_self])\n\n new_mesh_dict, scale = self._build_mesh(mesh_method=self.mesh_method)\n if self.mesh_method != 'wd':\n new_mesh_dict = self._offset_mesh(new_mesh_dict)\n\n # We only need the gradients where we'll compute local\n # quantities which, for a marching mesh, is at the vertices.\n new_mesh_dict['normgrads'] = new_mesh_dict.pop('vnormgrads', np.array([]))\n\n # And lastly, let's fill the velocities column - with zeros\n # at each of the vertices\n new_mesh_dict['velocities'] = np.zeros(new_mesh_dict['vertices'].shape if self.mesh_method != 'wd' else new_mesh_dict['centers'].shape)\n\n new_mesh_dict['tareas'] = np.array([])\n\n\n # TODO: need to be very careful about self.sma vs self._scale - maybe need to make a self._instantaneous_scale???\n # self._scale = scale\n\n if not self.has_standard_mesh():\n # then we only computed this because we didn't already have a\n # standard_mesh... so let's save this for future use\n # TODO: eventually pass etheta to save_as_standard_mesh\n protomesh = mesh.ProtoMesh(**new_mesh_dict)\n self.save_as_standard_mesh(protomesh)\n\n # Here we'll build a scaledprotomesh directly from the newly\n # marched mesh\n # NOTE that we're using scale from the new\n # mesh rather than self._scale since the instantaneous separation\n # has likely changed since periastron\n scaledprotomesh = mesh.ScaledProtoMesh(scale=scale, **new_mesh_dict)\n\n else:\n logger.debug(\"{}.update_position: accessing standard mesh at t={}\".format(self.component, self.time))\n # track whether we did the remesh or not, so we know if we should\n # compute local quantities if not otherwise necessary\n did_remesh = False\n\n # We still need to go through scaledprotomesh instead of directly\n # to mesh since features may want to process the body-centric\n # coordinates before placing in orbit\n\n # TODO: eventually pass etheta to get_standard_mesh\n scaledprotomesh = self.get_standard_mesh(scaled=True)\n # TODO: can we avoid an extra copy here?\n\n\n if not ignore_effects and len(self.features):\n logger.debug(\"{}.update_position: processing features at t={}\".format(self.component, self.time))\n # First allow features to edit the coords_for_computations (pvertices).\n # Changes here WILL affect future computations for logg, teff,\n # intensities, etc. Note that these WILL NOT affect the\n # coords_for_observations automatically - those should probably be\n # perturbed as well, unless there is a good reason not to.\n for feature in self.features:\n # NOTE: these are ALWAYS done on the protomesh\n coords_for_observations = feature.process_coords_for_computations(scaledprotomesh.coords_for_computations, s=self.polar_direction_xyz, t=self.time)\n if scaledprotomesh._compute_at_vertices:\n scaledprotomesh.update_columns(pvertices=coords_for_observations)\n\n else:\n scaledprotomesh.update_columns(centers=coords_for_observations)\n raise NotImplementedError(\"areas are not updated for changed mesh\")\n\n\n for feature in self.features:\n coords_for_observations = feature.process_coords_for_observations(scaledprotomesh.coords_for_computations, scaledprotomesh.coords_for_observations, s=self.polar_direction_xyz, t=self.time)\n if scaledprotomesh._compute_at_vertices:\n scaledprotomesh.update_columns(vertices=coords_for_observations)\n\n # TODO [DONE?]: centers either need to be supported or we need to report\n # vertices in the frontend as x, y, z instead of centers\n\n updated_props = libphoebe.mesh_properties(scaledprotomesh.vertices,\n scaledprotomesh.triangles,\n tnormals=True,\n areas=True)\n\n scaledprotomesh.update_columns(**updated_props)\n\n else:\n scaledprotomesh.update_columns(centers=coords_for_observations)\n raise NotImplementedError(\"areas are not updated for changed mesh\")\n\n\n # TODO NOW [OPTIMIZE]: get rid of the deepcopy here - but without it the\n # mesh velocities build-up and do terrible things. It may be possible\n # to just clear the velocities in get_standard_mesh()?\n logger.debug(\"{}.update_position: placing in orbit, Mesh.from_scaledproto at t={}\".format(self.component, self.time))\n self._mesh = mesh.Mesh.from_scaledproto(scaledprotomesh.copy(),\n pos, vel, euler, euler_vel,\n self.polar_direction_xyz*self.freq_rot*self._scale,\n component_com_x)\n\n\n # Lastly, we'll recompute physical quantities (not observables) if\n # needed for this time-step.\n # TODO [DONE?]: make sure features smartly trigger needs_recompute_instantaneous\n # TODO: get rid of the or True here... the problem is that we're saving the standard mesh before filling local quantities\n if self.needs_recompute_instantaneous or did_remesh:\n logger.debug(\"{}.update_position: calling compute_local_quantities at t={}\".format(self.component, self.time))\n self.compute_local_quantities(xs, ys, zs, ignore_effects)\n\n return"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\npopulate the internal data structures for the specified dataset.", "response": "def populate_observable(self, time, kind, dataset, **kwargs):\n \"\"\"\n TODO: add documentation\n \"\"\"\n\n if kind in ['mesh', 'orb']:\n return\n\n if time==self.time and dataset in self.populated_at_time and 'pblum' not in kind:\n # then we've already computed the needed columns\n\n # TODO: handle the case of intensities already computed by\n # /different/ dataset (ie RVs computed first and filling intensities\n # and then lc requesting intensities with SAME passband/atm)\n return\n\n new_mesh_cols = getattr(self, '_populate_{}'.format(kind.lower()))(dataset, **kwargs)\n\n for key, col in new_mesh_cols.items():\n\n self.mesh.update_columns_dict({'{}:{}'.format(key, dataset): col})\n\n self.populated_at_time.append(dataset)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nbuilding a star from a bundle.", "response": "def from_bundle(cls, b, component, compute=None,\n mesh_init_phi=0.0, datasets=[], **kwargs):\n \"\"\"\n Build a star from the :class:`phoebe.frontend.bundle.Bundle` and its\n hierarchy.\n\n Usually it makes more sense to call :meth:`System.from_bundle` directly.\n\n :parameter b: the :class:`phoebe.frontend.bundle.Bundle`\n :parameter str component: label of the component in the bundle\n :parameter str compute: name of the computeoptions in the bundle\n :parameter list datasets: list of names of datasets\n :parameter **kwargs: temporary overrides for computeoptions\n :return: an instantiated :class:`Star` object\n \"\"\"\n # TODO [DONE?]: handle overriding options from kwargs\n # TODO [DONE?]: do we need dynamics method???\n\n hier = b.hierarchy\n\n if not len(hier.get_value()):\n raise NotImplementedError(\"Star meshing requires a hierarchy to exist\")\n\n\n label_self = component\n label_sibling = hier.get_stars_of_sibling_of(component)\n label_orbit = hier.get_parent_of(component)\n starrefs = hier.get_stars()\n\n ind_self = starrefs.index(label_self)\n # for the sibling, we may need to handle a list of stars (ie in the case of a hierarchical triple)\n ind_sibling = starrefs.index(label_sibling) if isinstance(label_sibling, str) else [starrefs.index(l) for l in label_sibling]\n comp_no = ['primary', 'secondary'].index(hier.get_primary_or_secondary(component))+1\n\n self_ps = b.filter(component=component, context='component', check_visible=False)\n requiv = self_ps.get_value('requiv', unit=u.solRad)\n\n\n masses = [b.get_value('mass', component=star, context='component', unit=u.solMass) for star in starrefs]\n if b.hierarchy.get_parent_of(component) is not None:\n sma = b.get_value('sma', component=label_orbit, context='component', unit=u.solRad)\n ecc = b.get_value('ecc', component=label_orbit, context='component')\n is_single = False\n else:\n # single star case\n sma = 1.0\n ecc = 0.0\n is_single = True\n\n incl = b.get_value('incl', component=label_orbit, context='component', unit=u.rad)\n long_an = b.get_value('long_an', component=label_orbit, context='component', unit=u.rad)\n\n # NOTE: these may not be used when not visible for contact systems, so\n # Star_roche_envelope_half should ignore and override with\n # aligned/synchronous\n incl_star = self_ps.get_value('incl', unit=u.rad, check_visible=False)\n long_an_star = self_ps.get_value('long_an', unit=u.rad, check_visible=False)\n polar_direction_uvw = mesh.spin_in_system(incl_star, long_an_star)\n # freq_rot for contacts will be provided by that subclass as 2*pi/P_orb since they're always synchronous\n freq_rot = self_ps.get_value('freq', unit=u.rad/u.d)\n\n t0 = b.get_value('t0', context='system', unit=u.d)\n\n teff = b.get_value('teff', component=component, context='component', unit=u.K)\n gravb_bol= b.get_value('gravb_bol', component=component, context='component')\n\n abun = b.get_value('abun', component=component, context='component', check_visible=False)\n irrad_frac_refl = b.get_value('irrad_frac_refl_bol', component=component, context='component')\n\n try:\n rv_grav_override = kwargs.pop('rv_grav', None)\n do_rv_grav = b.get_value('rv_grav', component=component, compute=compute, check_visible=False, rv_grav=rv_grav_override) if compute is not None else False\n except ValueError:\n # rv_grav may not have been copied to this component if no rvs are attached\n do_rv_grav = False\n\n mesh_method_override = kwargs.pop('mesh_method', None)\n mesh_method = b.get_value('mesh_method', component=component, compute=compute, mesh_method=mesh_method_override) if compute is not None else 'marching'\n\n if mesh_method == 'marching':\n ntriangles_override = kwargs.pop('ntriangle', None)\n kwargs['ntriangles'] = b.get_value('ntriangles', component=component, compute=compute, ntriangles=ntriangles_override) if compute is not None else 1000\n distortion_method_override = kwargs.pop('distortion_method', None)\n kwargs['distortion_method'] = b.get_value('distortion_method', component=component, compute=compute, distortion_method=distortion_method_override) if compute is not None else 'roche'\n elif mesh_method == 'wd':\n gridsize_override = kwargs.pop('gridsize', None)\n kwargs['gridsize'] = b.get_value('gridsize', component=component, compute=compute, gridsize=gridsize_override) if compute is not None else 30\n else:\n raise NotImplementedError\n\n features = []\n for feature in b.filter(component=component).features:\n feature_ps = b.filter(feature=feature, component=component)\n feature_cls = globals()[feature_ps.kind.title()]\n features.append(feature_cls.from_bundle(b, feature))\n\n if conf.devel:\n mesh_offset_override = kwargs.pop('mesh_offset', None)\n do_mesh_offset = b.get_value('mesh_offset', compute=compute, mesh_offset=mesh_offset_override)\n else:\n do_mesh_offset = True\n\n datasets_intens = [ds for ds in b.filter(kind=['lc', 'rv', 'lp'], context='dataset').datasets if ds != '_default']\n datasets_lp = [ds for ds in b.filter(kind='lp', context='dataset').datasets if ds != '_default']\n atm_override = kwargs.pop('atm', None)\n atm = b.get_value('atm', compute=compute, component=component, atm=atm_override) if compute is not None else 'blackbody'\n passband_override = kwargs.pop('passband', None)\n passband = {ds: b.get_value('passband', dataset=ds, passband=passband_override) for ds in datasets_intens}\n intens_weighting_override = kwargs.pop('intens_weighting', None)\n intens_weighting = {ds: b.get_value('intens_weighting', dataset=ds, intens_weighting=intens_weighting_override) for ds in datasets_intens}\n ld_func_override = kwargs.pop('ld_func', None)\n ld_func = {ds: b.get_value('ld_func', dataset=ds, component=component, ld_func=ld_func_override) for ds in datasets_intens}\n ld_coeffs_override = kwargs.pop('ld_coeffs', None)\n ld_coeffs = {ds: b.get_value('ld_coeffs', dataset=ds, component=component, check_visible=False, ld_coeffs=ld_coeffs_override) for ds in datasets_intens}\n ld_func_bol_override = kwargs.pop('ld_func_bol', None)\n ld_func['bol'] = b.get_value('ld_func_bol', component=component, context='component', check_visible=False, ld_func_bol=ld_func_bol_override)\n ld_coeffs_bol_override = kwargs.pop('ld_coeffs_bol', None)\n ld_coeffs['bol'] = b.get_value('ld_coeffs_bol', component=component, context='component', check_visible=False, ld_coeffs_bol=ld_coeffs_bol_override)\n profile_rest_override = kwargs.pop('profile_rest', None)\n lp_profile_rest = {ds: b.get_value('profile_rest', dataset=ds, unit=u.nm, profile_rest=profile_rest_override) for ds in datasets_lp}\n\n # we'll pass kwargs on here so they can be overridden by the classmethod\n # of any subclass and then intercepted again by the __init__ by the\n # same subclass. Note: kwargs also hold meshing kwargs which are used\n # by Star.__init__\n return cls(component, comp_no, ind_self, ind_sibling,\n masses, ecc,\n incl, long_an, t0,\n do_mesh_offset,\n mesh_init_phi,\n\n atm,\n datasets,\n passband,\n intens_weighting,\n ld_func,\n ld_coeffs,\n lp_profile_rest,\n requiv,\n sma,\n polar_direction_uvw,\n freq_rot,\n teff,\n gravb_bol,\n abun,\n irrad_frac_refl,\n mesh_method,\n is_single,\n do_rv_grav,\n features,\n **kwargs\n )"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nget current polar direction in Roche ( xyz coordinates", "response": "def polar_direction_xyz(self):\n \"\"\"\n get current polar direction in Roche (xyz) coordinates\n \"\"\"\n return mesh.spin_in_roche(self.polar_direction_uvw,\n self.true_anom, self.elongan, self.eincl).astype(float)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_target_volume(self, etheta=0.0, scaled=False):\n # TODO: make this a function of d instead of etheta?\n logger.debug(\"determining target volume at t={}, theta={}\".format(self.time, etheta))\n\n # TODO: eventually this could allow us to \"break\" volume conservation\n # and have volume be a function of d, with some scaling factor provided\n # by the user as a parameter. Until then, we'll assume volume is\n # conserved which means the volume should always be the same\n volume = 4./3 * np.pi * self.requiv**3\n\n if not scaled:\n return volume / self._scale**3\n else:\n return volume", "response": "get the target volume of the object"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef north_pole_uvw(self):\n # TODO: is this rpole scaling true for all distortion_methods??\n rpole = self.instantaneous_rpole*self.sma\n return self.polar_direction_uvw*rpole+self.mesh._pos", "response": "location of the north pole in the global system frame"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _fill_loggs(self, mesh=None, ignore_effects=False):\n logger.debug(\"{}._fill_loggs\".format(self.component))\n\n if mesh is None:\n mesh = self.mesh\n\n loggs = np.log10(mesh.normgrads.for_computations * g_rel_to_abs(self.masses[self.ind_self], self.sma))\n\n if not ignore_effects:\n for feature in self.features:\n if feature.proto_coords:\n loggs = feature.process_loggs(loggs, mesh.roche_coords_for_computations, s=self.polar_direction_xyz, t=self.time)\n else:\n loggs = feature.process_loggs(loggs, mesh.coords_for_computations, s=self.polar_direction_xyz, t=self.time)\n\n mesh.update_columns(loggs=loggs)\n\n if not self.needs_recompute_instantaneous:\n logger.debug(\"{}._fill_loggs: copying loggs to standard mesh\".format(self.component))\n theta = 0.0\n self._standard_meshes[theta].update_columns(loggs=loggs)", "response": "Fill the loggs of the current instance with the loggs of the current instance."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nfilling the gravbs of the current object with the current gravbs of the current object.", "response": "def _fill_gravs(self, mesh=None, **kwargs):\n \"\"\"\n TODO: add documentation\n\n requires _fill_loggs to have been called\n \"\"\"\n logger.debug(\"{}._fill_gravs\".format(self.component))\n\n if mesh is None:\n mesh = self.mesh\n\n # TODO: rename 'gravs' to 'gdcs' (gravity darkening corrections)\n\n gravs = ((mesh.normgrads.for_computations * g_rel_to_abs(self.masses[self.ind_self], self.sma))/self.instantaneous_gpole)**self.gravb_bol\n\n mesh.update_columns(gravs=gravs)\n\n if not self.needs_recompute_instantaneous:\n logger.debug(\"{}._fill_gravs: copying gravs to standard mesh\".format(self.component))\n theta = 0.0\n self._standard_meshes[theta].update_columns(gravs=gravs)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _fill_teffs(self, mesh=None, ignore_effects=False, **kwargs):\n logger.debug(\"{}._fill_teffs\".format(self.component))\n\n if mesh is None:\n mesh = self.mesh\n\n # Now we can compute the local temperatures.\n # see PHOEBE Legacy scientific reference eq 5.23\n teffs = self.instantaneous_tpole*mesh.gravs.for_computations**0.25\n\n if not ignore_effects:\n for feature in self.features:\n if feature.proto_coords:\n teffs = feature.process_teffs(teffs, mesh.roche_coords_for_computations, s=self.polar_direction_xyz, t=self.time)\n else:\n teffs = feature.process_teffs(teffs, mesh.coords_for_computations, s=self.polar_direction_xyz, t=self.time)\n\n mesh.update_columns(teffs=teffs)\n\n if not self.needs_recompute_instantaneous:\n logger.debug(\"{}._fill_teffs: copying teffs to standard mesh\".format(self.component))\n theta = 0.0\n self._standard_meshes[theta].update_columns(teffs=teffs)", "response": "r Fills the local temperatures of the given mesh with the local temperatures of the given instance of the class."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _fill_abuns(self, mesh=None, abun=0.0):\n logger.debug(\"{}._fill_abuns\".format(self.component))\n\n if mesh is None:\n mesh = self.mesh\n\n # TODO: support from frontend\n\n mesh.update_columns(abuns=abun)\n\n if not self.needs_recompute_instantaneous:\n logger.debug(\"{}._fill_abuns: copying abuns to standard mesh\".format(self.component))\n theta = 0.0\n self._standard_meshes[theta].update_columns(abuns=abun)", "response": "Fill in the internal columns of the internal mesh."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _fill_albedos(self, mesh=None, irrad_frac_refl=0.0):\n logger.debug(\"{}._fill_albedos\".format(self.component))\n\n if mesh is None:\n mesh = self.mesh\n\n mesh.update_columns(irrad_frac_refl=irrad_frac_refl)\n\n if not self.needs_recompute_instantaneous:\n logger.debug(\"{}._fill_albedos: copying albedos to standard mesh\".format(self.component))\n theta = 0.0\n self._standard_meshes[theta].update_columns(irrad_frac_refl=irrad_frac_refl)", "response": "Fill in the albedos of the current instance of the class."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef compute_pblum_scale(self, dataset, pblum, **kwargs):\n logger.debug(\"{}.compute_pblum_scale(dataset={}, pblum={})\".format(self.component, dataset, pblum))\n\n abs_luminosity = self.compute_luminosity(dataset, **kwargs)\n\n # We now want to remember the scale for all intensities such that the\n # luminosity in relative units gives the provided pblum\n pblum_scale = pblum / abs_luminosity\n\n self.set_pblum_scale(dataset, pblum_scale)", "response": "Compute the pblum scale for all intensities in this dataset."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\npopulates columns necessary for an LP dataset", "response": "def _populate_lp(self, dataset, **kwargs):\n \"\"\"\n Populate columns necessary for an LP dataset\n\n This should not be called directly, but rather via :meth:`Body.populate_observable`\n or :meth:`System.populate_observables`\n \"\"\"\n logger.debug(\"{}._populate_lp(dataset={})\".format(self.component, dataset))\n\n profile_rest = kwargs.get('profile_rest', self.lp_profile_rest.get(dataset))\n\n rv_cols = self._populate_rv(dataset, **kwargs)\n\n cols = rv_cols\n # rvs = (rv_cols['rvs']*u.solRad/u.d).to(u.m/u.s).value\n # cols['dls'] = rv_cols['rvs']*profile_rest/c.c.si.value\n\n return cols"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\npopulating the columns necessary for an RV dataset", "response": "def _populate_rv(self, dataset, **kwargs):\n \"\"\"\n Populate columns necessary for an RV dataset\n\n This should not be called directly, but rather via :meth:`Body.populate_observable`\n or :meth:`System.populate_observables`\n \"\"\"\n logger.debug(\"{}._populate_rv(dataset={})\".format(self.component, dataset))\n\n # We need to fill all the flux-related columns so that we can weigh each\n # triangle's rv by its flux in the requested passband.\n lc_cols = self._populate_lc(dataset, **kwargs)\n\n # rv per element is just the z-component of the velocity vectory. Note\n # the change in sign from our right-handed system to rv conventions.\n # These will be weighted by the fluxes when integrating\n\n rvs = -1*self.mesh.velocities.for_computations[:,2]\n\n\n # Gravitational redshift\n if self.do_rv_grav:\n rv_grav = c.G*(self.mass*u.solMass)/(self.instantaneous_rpole*u.solRad)/c.c\n # rvs are in solRad/d internally\n rv_grav = rv_grav.to('solRad/d').value\n\n rvs += rv_grav\n\n cols = lc_cols\n cols['rvs'] = rvs\n return cols"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\npopulate the columns necessary for an LC dataset.", "response": "def _populate_lc(self, dataset, **kwargs):\n \"\"\"\n Populate columns necessary for an LC dataset\n\n This should not be called directly, but rather via :meth:`Body.populate_observable`\n or :meth:`System.populate_observables`\n\n :raises NotImplementedError: if lc_method is not supported\n \"\"\"\n logger.debug(\"{}._populate_lc(dataset={})\".format(self.component, dataset))\n\n lc_method = kwargs.get('lc_method', 'numerical') # TODO: make sure this is actually passed\n\n passband = kwargs.get('passband', self.passband.get(dataset, None))\n intens_weighting = kwargs.get('intens_weighting', self.intens_weighting.get(dataset, None))\n ld_func = kwargs.get('ld_func', self.ld_func.get(dataset, None))\n ld_coeffs = kwargs.get('ld_coeffs', self.ld_coeffs.get(dataset, None)) if ld_func != 'interp' else None\n atm = kwargs.get('atm', self.atm)\n boosting_method = kwargs.get('boosting_method', self.boosting_method)\n\n pblum = kwargs.get('pblum', 4*np.pi)\n\n if lc_method=='numerical':\n\n pb = passbands.get_passband(passband)\n\n if intens_weighting=='photon':\n ptfarea = pb.ptf_photon_area/pb.h/pb.c\n else:\n ptfarea = pb.ptf_area\n\n self.set_ptfarea(dataset, ptfarea)\n\n ldint = pb.ldint(Teff=self.mesh.teffs.for_computations,\n logg=self.mesh.loggs.for_computations,\n abun=self.mesh.abuns.for_computations,\n atm=atm,\n ld_func=ld_func,\n ld_coeffs=ld_coeffs,\n photon_weighted=intens_weighting=='photon')\n\n\n # abs_normal_intensities are the normal emergent passband intensities:\n abs_normal_intensities = pb.Inorm(Teff=self.mesh.teffs.for_computations,\n logg=self.mesh.loggs.for_computations,\n abun=self.mesh.abuns.for_computations,\n atm=atm,\n ldint=ldint,\n photon_weighted=intens_weighting=='photon')\n\n # abs_intensities are the projected (limb-darkened) passband intensities\n # TODO: why do we need to use abs(mus) here?\n abs_intensities = pb.Imu(Teff=self.mesh.teffs.for_computations,\n logg=self.mesh.loggs.for_computations,\n abun=self.mesh.abuns.for_computations,\n mu=abs(self.mesh.mus_for_computations),\n atm=atm,\n ldint=ldint,\n ld_func=ld_func,\n ld_coeffs=ld_coeffs,\n photon_weighted=intens_weighting=='photon')\n\n\n # Beaming/boosting\n if boosting_method == 'none':\n boost_factors = 1.0\n elif boosting_method == 'linear':\n bindex = pb.bindex(Teff=self.mesh.teffs.for_computations,\n logg=self.mesh.loggs.for_computations,\n abun=self.mesh.abuns.for_computations,\n mu=abs(self.mesh.mus_for_computations),\n atm=atm,\n photon_weighted=intens_weighting=='photon')\n\n boost_factors = 1.0 + bindex * self.mesh.velocities.for_computations[:,2]/37241.94167601236\n else:\n raise NotImplementedError(\"boosting_method='{}' not supported\".format(self.boosting_method))\n\n # boosting is aspect dependent so we don't need to correct the\n # normal intensities\n abs_intensities *= boost_factors\n\n # Handle pblum - distance and l3 scaling happens when integrating (in observe)\n # we need to scale each triangle so that the summed normal_intensities over the\n # entire star is equivalent to pblum / 4pi\n normal_intensities = abs_normal_intensities * self.get_pblum_scale(dataset)\n intensities = abs_intensities * self.get_pblum_scale(dataset)\n\n elif lc_method=='analytical':\n raise NotImplementedError(\"analytical fluxes not yet supported\")\n # TODO: this probably needs to be moved into observe or backends.phoebe\n # (assuming it doesn't result in per-triangle quantities)\n\n else:\n raise NotImplementedError(\"lc_method '{}' not recognized\".format(lc_method))\n\n # TODO: do we really need to store all of these if store_mesh==False?\n # Can we optimize by only returning the essentials if we know we don't need them?\n return {'abs_normal_intensities': abs_normal_intensities,\n 'normal_intensities': normal_intensities,\n 'abs_intensities': abs_intensities,\n 'intensities': intensities,\n 'ldint': ldint,\n 'boost_factors': boost_factors}"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef needs_remesh(self):\n # TODO: may be able to get away with removing the features check and just doing for pulsations, etc?\n # TODO: what about dpdt, deccdt, dincldt, etc?\n return len(self.features) > 0 or self.is_misaligned or self.ecc != 0 or self.dynamics_method != 'keplerian'", "response": "Returns True if the star needs to be re - meshed."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _build_mesh(self, mesh_method, **kwargs):\n\n # need the sma to scale between Roche and real units\n sma = kwargs.get('sma', self.sma) # Rsol (same units as coordinates)\n\n mesh_args = self.instantaneous_mesh_args\n\n if mesh_method == 'marching':\n # TODO: do this during mesh initialization only and then keep delta fixed in time??\n ntriangles = kwargs.get('ntriangles', self.ntriangles)\n\n # we need the surface area of the lobe to estimate the correct value\n # to pass for delta to marching. We will later need the volume to\n # expose its value\n logger.debug(\"libphoebe.roche_area_volume{}\".format(mesh_args))\n av = libphoebe.roche_area_volume(*mesh_args,\n choice=2,\n larea=True,\n lvolume=True)\n\n delta = _estimate_delta(ntriangles, av['larea'])\n\n logger.debug(\"libphoebe.roche_marching_mesh{}\".format(mesh_args))\n new_mesh = libphoebe.roche_marching_mesh(*mesh_args,\n delta=delta,\n choice=2,\n full=True,\n max_triangles=ntriangles*2,\n vertices=True,\n triangles=True,\n centers=True,\n vnormals=True,\n tnormals=True,\n cnormals=False,\n vnormgrads=True,\n cnormgrads=False,\n areas=True,\n volume=False,\n init_phi=self.mesh_init_phi)\n\n\n # In addition to the values exposed by the mesh itself, let's report\n # the volume and surface area of the lobe. The lobe area is used\n # if mesh_offseting is required, and the volume is optionally exposed\n # to the user.\n new_mesh['volume'] = av['lvolume'] # * sma**3\n new_mesh['area'] = av['larea'] # * sma**2\n\n scale = sma\n\n elif mesh_method == 'wd':\n N = int(kwargs.get('gridsize', self.gridsize))\n\n # unpack mesh_args\n q, F, d, Phi = mesh_args\n\n the_grid = mesh_wd.discretize_wd_style(N, q, F, d, Phi)\n new_mesh = mesh.wd_grid_to_mesh_dict(the_grid, q, F, d)\n scale = sma\n\n else:\n raise NotImplementedError(\"mesh_method '{}' is not supported\".format(mesh_method))\n\n return new_mesh, scale", "response": "This function builds the mesh for a specific class of class"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _build_mesh(self, mesh_method, **kwargs):\n\n # if we don't provide instantaneous masses or smas, then assume they are\n # not time dependent - in which case they were already stored in the init\n sma = kwargs.get('sma', self.sma) # Rsol (same units as coordinates)\n\n mesh_args = self.instantaneous_mesh_args\n\n if mesh_method == 'marching':\n ntriangles = kwargs.get('ntriangles', self.ntriangles)\n\n av = libphoebe.sphere_area_volume(*mesh_args,\n larea=True,\n lvolume=True)\n\n delta = _estimate_delta(ntriangles, av['larea'])\n\n new_mesh = libphoebe.sphere_marching_mesh(*mesh_args,\n delta=delta,\n full=True,\n max_triangles=ntriangles*2,\n vertices=True,\n triangles=True,\n centers=True,\n vnormals=True,\n tnormals=True,\n cnormals=False,\n vnormgrads=True,\n cnormgrads=False,\n areas=True,\n volume=True,\n init_phi=self.mesh_init_phi)\n\n # In addition to the values exposed by the mesh itself, let's report\n # the volume and surface area of the lobe. The lobe area is used\n # if mesh_offseting is required, and the volume is optionally exposed\n # to the user.\n new_mesh['volume'] = av['lvolume']\n new_mesh['area'] = av['larea']\n\n scale = sma\n\n else:\n raise NotImplementedError(\"mesh_method '{}' is not supported\".format(mesh_method))\n\n return new_mesh, scale", "response": "Builds a mesh from the given parameters."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\npopulates the internal data structures of the observed data structures.", "response": "def populate_observable(self, time, kind, dataset, **kwargs):\n \"\"\"\n TODO: add documentation\n \"\"\"\n\n for half in self._halves:\n half.populate_observable(time, kind, dataset, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ninitializing a Spot feature from the bundle.", "response": "def from_bundle(cls, b, feature):\n \"\"\"\n Initialize a Spot feature from the bundle.\n \"\"\"\n\n feature_ps = b.get_feature(feature)\n\n colat = feature_ps.get_value('colat', unit=u.rad)\n longitude = feature_ps.get_value('long', unit=u.rad)\n\n if len(b.hierarchy.get_stars())>=2:\n star_ps = b.get_component(feature_ps.component)\n orbit_ps = b.get_component(b.hierarchy.get_parent_of(feature_ps.component))\n syncpar = star_ps.get_value('syncpar')\n period = orbit_ps.get_value('period')\n dlongdt = (syncpar - 1) / period * 2 * np.pi\n else:\n star_ps = b.get_component(feature_ps.component)\n dlongdt = star_ps.get_value('freq', unit=u.rad/u.d)\n longitude = np.pi/2\n\n radius = feature_ps.get_value('radius', unit=u.rad)\n relteff = feature_ps.get_value('relteff', unit=u.dimensionless_unscaled)\n\n t0 = b.get_value('t0', context='system', unit=u.d)\n\n return cls(colat, longitude, dlongdt, radius, relteff, t0)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the basis vectors in roche coordinates", "response": "def pointing_vector(self, s, time):\n \"\"\"\n s is the spin vector in roche coordinates\n time is the current time\n \"\"\"\n t = time - self._t0\n longitude = self._longitude + self._dlongdt * t\n\n # define the basis vectors in the spin (primed) coordinates in terms of\n # the Roche coordinates.\n # ez' = s\n # ex' = (ex - s(s.ex)) /|i - s(s.ex)|\n # ey' = s x ex'\n ex = np.array([1., 0., 0.])\n ezp = s\n exp = (ex - s*np.dot(s,ex))\n eyp = np.cross(s, exp)\n\n return np.sin(self._colat)*np.cos(longitude)*exp +\\\n np.sin(self._colat)*np.sin(longitude)*eyp +\\\n np.cos(self._colat)*ezp"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef process_teffs(self, teffs, coords, s=np.array([0., 0., 1.]), t=None):\n if t is None:\n # then assume at t0\n t = self._t0\n\n pointing_vector = self.pointing_vector(s,t)\n logger.debug(\"spot.process_teffs at t={} with pointing_vector={} and radius={}\".format(t, pointing_vector, self._radius))\n\n cos_alpha_coords = np.dot(coords, pointing_vector) / np.linalg.norm(coords, axis=1)\n cos_alpha_spot = np.cos(self._radius)\n\n filter_ = cos_alpha_coords > cos_alpha_spot\n teffs[filter_] = teffs[filter_] * self._relteff\n\n return teffs", "response": "This function takes a set of teffs and a set of coordinates and returns a set of teffs that are within the spot and applies the relteff factor to those teffs."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ninitializing a Pulsation feature from a bundle.", "response": "def from_bundle(cls, b, feature):\n \"\"\"\n Initialize a Pulsation feature from the bundle.\n \"\"\"\n\n feature_ps = b.get_feature(feature)\n freq = feature_ps.get_value('freq', unit=u.d**-1)\n radamp = feature_ps.get_value('radamp', unit=u.dimensionless_unscaled)\n l = feature_ps.get_value('l', unit=u.dimensionless_unscaled)\n m = feature_ps.get_value('m', unit=u.dimensionless_unscaled)\n teffext = feature_ps.get_value('teffext')\n\n GM = c.G.to('solRad3 / (solMass d2)').value*b.get_value(qualifier='mass', component=feature_ps.component, context='component', unit=u.solMass)\n R = b.get_value(qualifier='rpole', component=feature_ps.component, section='component', unit=u.solRad)\n\n tanamp = GM/R**3/freq**2\n\n return cls(radamp, freq, l, m, tanamp, teffext)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef project_onto_potential(r, pot_name, *args):\n\n pot = globals()[pot_name]\n dpdx = globals()['d%sdx'%(pot_name)]\n dpdy = globals()['d%sdy'%(pot_name)]\n dpdz = globals()['d%sdz'%(pot_name)]\n dpdr = globals()['d%sdr'%(pot_name)]\n\n n_iter = 0\n\n rmag, rmag0 = np.sqrt((r*r).sum()), 0\n lam, nu = r[0]/rmag, r[2]/rmag\n dc = np.array((lam, np.sqrt(1-lam*lam-nu*nu), nu)) # direction cosines -- must not change during reprojection\n D, q, F, p0 = args\n\n while np.abs(rmag-rmag0) > 1e-12 and n_iter < 100:\n rmag0 = rmag\n rmag = rmag0 - pot(rmag0*dc, *args)/dpdr(rmag0*dc, *args[:-1])\n n_iter += 1\n if n_iter == 100:\n logger.warning('projection did not converge')\n\n r = rmag*dc\n\n return MeshVertex(r, dpdx, dpdy, dpdz, *args[:-1])", "response": "project a mesh onto a potential"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef open(cls, filename):\n filename = os.path.expanduser(filename)\n logger.debug(\"importing from {}\".format(filename))\n f = open(filename, 'r')\n data = json.load(f, object_pairs_hook=parse_json)\n f.close()\n b = cls(data)\n\n version = b.get_value('phoebe_version')\n phoebe_version_import = StrictVersion(version if version != 'devel' else '2.1.2')\n phoebe_version_this = StrictVersion(__version__ if __version__ != 'devel' else '2.1.2')\n\n logger.debug(\"importing from PHOEBE v {} into v {}\".format(phoebe_version_import, phoebe_version_this))\n\n # update the entry in the PS, so if this is saved again it will have the new version\n b.set_value('phoebe_version', __version__)\n\n if phoebe_version_import == phoebe_version_this:\n return b\n elif phoebe_version_import > phoebe_version_this:\n warning = \"importing from a newer version ({}) of PHOEBE, this may or may not work, consider updating\".format(phoebe_version_import)\n print(\"WARNING: {}\".format(warning))\n logger.warning(warning)\n return b\n\n if phoebe_version_import < StrictVersion(\"2.1.2\"):\n b._import_before_v211 = True\n warning = \"Importing from an older version ({}) of PHOEBE which did not support constraints in solar units. All constraints will remain in SI, but calling set_hierarchy will likely fail.\".format(phoebe_version_import)\n print(\"WARNING: {}\".format(warning))\n logger.warning(warning)\n\n if phoebe_version_import < StrictVersion(\"2.1.0\"):\n logger.warning(\"importing from an older version ({}) of PHOEBE into version {}\".format(phoebe_version_import, phoebe_version_this))\n\n def _ps_dict(ps):\n return {p.qualifier: p.get_quantity() if hasattr(p, 'get_quantity') else p.get_value() for p in ps.to_list()}\n\n # rpole -> requiv: https://github.com/phoebe-project/phoebe2/pull/300\n dict_stars = {}\n for star in b.hierarchy.get_stars():\n ps_star = b.filter(context='component', component=star)\n dict_stars[star] = _ps_dict(ps_star)\n\n # TODO: actually do the translation\n rpole = dict_stars[star].pop('rpole', 1.0*u.solRad).to(u.solRad).value\n # PHOEBE 2.0 didn't have syncpar for contacts\n if len(b.filter('syncpar', component=star)):\n F = b.get_value('syncpar', component=star, context='component')\n else:\n F = 1.0\n parent_orbit = b.hierarchy.get_parent_of(star)\n component = b.hierarchy.get_primary_or_secondary(star, return_ind=True)\n sma = b.get_value('sma', component=parent_orbit, context='component', unit=u.solRad)\n q = b.get_value('q', component=parent_orbit, context='component')\n d = 1 - b.get_value('ecc', component=parent_orbit)\n\n logger.info(\"roche.rpole_to_requiv_aligned(rpole={}, sma={}, q={}, F={}, d={}, component={})\".format(rpole, sma, q, F, d, component))\n dict_stars[star]['requiv'] = roche.rpole_to_requiv_aligned(rpole, sma, q, F, d, component=component)\n\n b.remove_component(star)\n\n\n for star, dict_star in dict_stars.items():\n logger.info(\"attempting to update component='{}' to new version requirements\".format(star))\n b.add_component('star', component=star, check_label=False, **dict_star)\n\n\n dict_envs = {}\n for env in b.hierarchy.get_envelopes():\n ps_env = b.filter(context='component', component=env)\n dict_envs[env] = _ps_dict(ps_env)\n b.remove_component(env)\n\n for env, dict_env in dict_envs.items():\n logger.info(\"attempting to update component='{}' to new version requirements\".format(env))\n b.add_component('envelope', component=env, check_label=False, **dict_env)\n\n # TODO: this probably will fail once more than one contacts are\n # supported, but will never need that for 2.0->2.1 since\n # multiples aren't supported (yet) call b.set_hierarchy() to\n\n # reset all hieararchy-dependent constraints (including\n # pot<->requiv)\n b.set_hierarchy()\n\n primary = b.hierarchy.get_stars()[0]\n b.flip_constraint('pot', component=env, solve_for='requiv@{}'.format(primary), check_nan=False)\n b.set_value('pot', component=env, context='component', value=dict_env['pot'])\n b.flip_constraint('requiv', component=primary, solve_for='pot', check_nan=False)\n\n # reset all hieararchy-dependent constraints\n b.set_hierarchy()\n\n # mesh datasets: https://github.com/phoebe-project/phoebe2/pull/261, https://github.com/phoebe-project/phoebe2/pull/300\n for dataset in b.filter(context='dataset', kind='mesh').datasets:\n logger.info(\"attempting to update mesh dataset='{}' to new version requirements\".format(dataset))\n ps_mesh = b.filter(context='dataset', kind='mesh', dataset=dataset)\n dict_mesh = _ps_dict(ps_mesh)\n # NOTE: we will not remove (or update) the dataset from any existing models\n b.remove_dataset(dataset, context=['dataset', 'constraint', 'compute'])\n if len(b.filter(dataset=dataset, context='model')):\n logger.warning(\"existing model for dataset='{}' models={} will not be removed, but likely will not work with new plotting updates\".format(dataset, b.filter(dataset=dataset, context='model').models))\n\n b.add_dataset('mesh', dataset=dataset, check_label=False, **dict_mesh)\n\n # vgamma definition: https://github.com/phoebe-project/phoebe2/issues/234\n logger.info(\"updating vgamma to new version requirements\")\n b.set_value('vgamma', -1*b.get_value('vgamma'))\n\n # remove phshift parameter: https://github.com/phoebe-project/phoebe2/commit/1fa3a4e1c0f8d80502101e1b1e750f5fb14115cb\n logger.info(\"removing any phshift parameters for new version requirements\")\n b.remove_parameters_all(qualifier='phshift')\n\n # colon -> long: https://github.com/phoebe-project/phoebe2/issues/211\n logger.info(\"removing any colon parameters for new version requirements\")\n b.remove_parameters_all(qualifier='colon')\n\n # make sure constraints are updated according to conf.interactive_constraints\n b.run_delayed_constraints()\n\n return b", "response": "Open a new bundle from a JSON - formatted PHOEBE 2 file. This is a constructor so should be called as : class:`Bundle` object."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nload a new bundle from a phoebe server.", "response": "def from_server(cls, bundleid, server='http://localhost:5555',\n as_client=True):\n \"\"\"Load a new bundle from a server.\n\n [NOT IMPLEMENTED]\n\n Load a bundle from a phoebe server. This is a constructor so should be\n called as:\n\n >>> b = Bundle.from_server('asdf', as_client=False)\n\n :parameter str bundleid: the identifier given to the bundle by the\n server\n :parameter str server: the host (and port) of the server\n :parameter bool as_client: whether to attach in client mode\n (default: True)\n \"\"\"\n if not conf.devel:\n raise NotImplementedError(\"'from_server' not officially supported for this release. Enable developer mode to test.\")\n\n # TODO: run test message on server, if localhost and fails, attempt to\n # launch?\n url = \"{}/{}/json\".format(server, bundleid)\n logger.info(\"downloading bundle from {}\".format(url))\n r = requests.get(url, timeout=5)\n rjson = r.json()\n\n b = cls(rjson['data'])\n\n if as_client:\n b.as_client(as_client, server=server,\n bundleid=rjson['meta']['bundleid'])\n\n logger.warning(\"This bundle is in client mode, meaning all\\\n computations will be handled by the server at {}. To disable\\\n client mode, call as_client(False) or in the future pass\\\n as_client=False to from_server\".format(server))\n\n return b"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef from_legacy(cls, filename, add_compute_legacy=True, add_compute_phoebe=True):\n logger.warning(\"importing from legacy is experimental until official 1.0 release\")\n filename = os.path.expanduser(filename)\n return io.load_legacy(filename, add_compute_legacy, add_compute_phoebe)", "response": "Load a bundle from a PHOEBE 1. 0 Legacy file."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nload a bundle with a default triple system.", "response": "def default_triple(cls, inner_as_primary=True, inner_as_overcontact=False,\n starA='starA', starB='starB', starC='starC',\n inner='inner', outer='outer',\n contact_envelope='contact_envelope'):\n \"\"\"Load a bundle with a default triple system.\n\n Set inner_as_primary based on what hierarchical configuration you want.\n\n inner_as_primary = True:\n\n starA - starB -- starC\n\n inner_as_primary = False:\n\n starC -- starA - starB\n\n This is a constructor, so should be called as:\n\n >>> b = Bundle.default_triple_primary()\n\n :parameter bool inner_as_primary: whether the inner-binary should be\n the primary component of the outer-orbit\n :return: instantiated :class:`Bundle` object\n \"\"\"\n if not conf.devel:\n raise NotImplementedError(\"'default_triple' not officially supported for this release. Enable developer mode to test.\")\n\n b = cls()\n b.add_star(component=starA)\n b.add_star(component=starB)\n b.add_star(component=starC)\n b.add_orbit(component=inner, period=1)\n b.add_orbit(component=outer, period=10)\n\n if inner_as_overcontact:\n b.add_envelope(component=contact_envelope)\n inner_hier = _hierarchy.binaryorbit(b[inner],\n b[starA],\n b[starB],\n b[contact_envelope])\n else:\n inner_hier = _hierarchy.binaryorbit(b[inner], b[starA], b[starB])\n\n if inner_as_primary:\n hierstring = _hierarchy.binaryorbit(b[outer], inner_hier, b[starC])\n else:\n hierstring = _hierarchy.binaryorbit(b[outer], b[starC], inner_hier)\n b.set_hierarchy(hierstring)\n\n b.add_constraint(constraint.keplers_third_law_hierarchical,\n outer, inner)\n\n # TODO: does this constraint need to be rebuilt when things change?\n # (ie in set_hierarchy)\n\n b.add_compute()\n\n return b"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsaving the bundle to a JSON - formatted ASCII file.", "response": "def save(self, filename, clear_history=True, incl_uniqueid=False,\n compact=False):\n \"\"\"Save the bundle to a JSON-formatted ASCII file.\n\n :parameter str filename: relative or full path to the file\n :parameter bool clear_history: whether to clear history log\n items before saving (default: True)\n :parameter bool incl_uniqueid: whether to including uniqueids in the\n file (only needed if its necessary to maintain the uniqueids when\n reloading)\n :parameter bool compact: whether to use compact file-formatting (maybe\n be quicker to save/load, but not as easily readable)\n :return: the filename\n \"\"\"\n if clear_history:\n # TODO: let's not actually clear history,\n # but rather skip the context when saving\n self.remove_history()\n\n # TODO: add option for clear_models, clear_feedback\n # NOTE: PS.save will handle os.path.expanduser\n return super(Bundle, self).save(filename, incl_uniqueid=incl_uniqueid,\n compact=compact)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nexporting the current object to a legacy file.", "response": "def export_legacy(self, filename):\n \"\"\"\n TODO: add docs\n \"\"\"\n logger.warning(\"exporting to legacy is experimental until official 1.0 release\")\n filename = os.path.expanduser(filename)\n return io.pass_to_legacy(self, filename)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _on_socket_push_updates(self, resp):\n # TODO: check to make sure resp['meta']['bundleid']==bundleid ?\n # TODO: handle added parameters\n # TODO: handle removed (isDeleted) parameters\n\n for item in resp['data']:\n if item['id'] in self.uniqueids:\n # then we're updating something in the parameter (or deleting)\n param = self.get_parameter(uniqueid=item['id'])\n for attr, value in item['attributes'].items():\n if hasattr(param, \"_{}\".format(attr)):\n logger.info(\"updates from server: setting {}@{}={}\".\n format(attr, param.twig, value))\n setattr(param, \"_{}\".format(attr), value)\n else:\n self._attach_param_from_server(item)", "response": "Handle incoming socket push updates from server"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nattach a parameter to the bundle based on the item.", "response": "def _attach_param_from_server(self, item):\n \"\"\"\n [NOT IMPLEMENTED]\n \"\"\"\n if isinstance(item, list):\n for itemi in item:\n self._attach_param_from_server(itemi)\n else:\n # then we need to add a new parameter\n d = item['attributes']\n d['uniqueid'] = item['id']\n param = parameters.parameter_from_json(d, bundle=self)\n\n metawargs = {}\n self._attach_params([param], **metawargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef as_client(self, as_client=True, server='http://localhost:5555',\n bundleid=None):\n \"\"\"\n [NOT IMPLEMENTED]\n \"\"\"\n if as_client:\n if not _can_client:\n raise ImportError(\"dependencies to support client mode not met - see docs\")\n\n server_running = self._test_server(server=server,\n start_if_fail=True)\n if not server_running:\n raise ValueError(\"server {} is not running\".format(server))\n\n server_split = server.split(':')\n host = ':'.join(server_split[:-1])\n port = int(float(server_split[-1] if len(server_split) else 8000))\n self._socketio = SocketIO(host, port, BaseNamespace)\n self._socketio.on('connect', self._on_socket_connect)\n self._socketio.on('disconnect', self._on_socket_disconnect)\n\n self._socketio.on('push updates', self._on_socket_push_updates)\n\n if not bundleid:\n upload_url = \"{}/upload\".format(server)\n logger.info(\"uploading bundle to server {}\".format(upload_url))\n data = json.dumps(self.to_json(incl_uniqueid=True))\n r = requests.post(upload_url, data=data, timeout=5)\n bundleid = r.json()['meta']['bundleid']\n\n self._socketio.emit('subscribe bundle', {'bundleid': bundleid})\n\n self._bundleid = bundleid\n\n self._is_client = server\n logger.info(\"connected as client to server at {}:{}\".\n format(host, port))\n\n else:\n logger.warning(\"This bundle is now permanently detached from the instance\\\n on the server and will not receive future updates. To start a client\\\n in sync with the version on the server or other clients currently \\\n subscribed, you must instantiate a new bundle with Bundle.from_server.\")\n\n if hasattr(self, '_socketIO') and self._socketIO is not None:\n self._socketio.emit('unsubscribe bundle', {'bundleid': bundleid})\n self._socketIO.disconnect()\n self._socketIO = None\n\n self._bundleid = None\n self._is_client = False", "response": "Subscribe to the server and set as_client flag."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nupdating the bundle with the latest entries from the client.", "response": "def client_update(self):\n \"\"\"\n [NOT IMPLEMENTED]\n \"\"\"\n if not self.is_client:\n raise ValueError(\"Bundle is not in client mode, cannot update\")\n\n logger.info(\"updating client...\")\n # wait briefly to pickup any missed messages, which should then fire\n # the corresponding callbacks and update the bundle\n self._socketio.wait(seconds=1)\n self._last_client_update = datetime.now()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _default_label(self, base, context, **kwargs):\n\n kwargs['context'] = context\n params = len(getattr(self.filter(check_visible=False,**kwargs), '{}s'.format(context)))\n\n return \"{}{:02d}\".format(base, params+1)", "response": "This function returns a default label given a base label and a context and keyword arguments"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_setting(self, twig=None, **kwargs):\n if twig is not None:\n kwargs['twig'] = twig\n kwargs['context'] = 'setting'\n return self.filter_or_get(**kwargs)", "response": "Filter in the setting context"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nadding a new log to the history", "response": "def _add_history(self, redo_func, redo_kwargs, undo_func, undo_kwargs,\n **kwargs):\n \"\"\"\n Add a new log (undo/redoable) to this history context\n\n :parameter str redo_func: function to redo the action, must be a\n method of :class:`Bundle`\n :parameter dict redo_kwargs: kwargs to pass to the redo_func. Each\n item must be serializable (float or str, not objects)\n :parameter str undo_func: function to undo the action, must be a\n method of :class:`Bundle`\n :parameter dict undo_kwargs: kwargs to pass to the undo_func. Each\n item must be serializable (float or str, not objects)\n :parameter str history: label of the history parameter\n :raises ValueError: if the label for this history item is forbidden or\n already exists\n \"\"\"\n if not self.history_enabled:\n return\n\n param = HistoryParameter(self, redo_func, redo_kwargs,\n undo_func, undo_kwargs)\n\n metawargs = {'context': 'history',\n 'history': kwargs.get('history', self._default_label('hist', **{'context': 'history'}))}\n\n self._check_label(metawargs['history'])\n\n self._attach_params([param], **metawargs)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_history(self, i=None):\n ps = self.filter(context='history')\n # if not len(ps):\n # raise ValueError(\"no history recorded\")\n\n if i is not None:\n return ps.to_list()[i]\n else:\n return ps", "response": "Get a history item by index."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef remove_history(self, i=None):\n if i is None:\n self.remove_parameters_all(context='history')\n else:\n param = self.get_history(i=i)\n self.remove_parameter(uniqueid=param.uniqueid)", "response": "Removes a history item from the bundle by index."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef undo(self, i=-1):\n\n _history_enabled = self.history_enabled\n param = self.get_history(i)\n self.disable_history()\n param.undo()\n # TODO: do we really want to remove this? then what's the point of redo?\n self.remove_parameter(uniqueid=param.uniqueid)\n if _history_enabled:\n self.enable_history()", "response": "Undo an item in the history logs\n "} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nset the hierarchy of the current system.", "response": "def set_hierarchy(self, *args, **kwargs):\n \"\"\"\n Set the hierarchy of the system.\n\n See tutorial on building a system.\n\n TODO: provide documentation\n args can be\n - string representation (preferably passed through hierarchy already)\n - func and strings/PSs/params to pass to function\n \"\"\"\n\n if self._import_before_v211:\n raise ValueError(\"This bundle was created before constraints in solar units were supported and therefore cannot call set_hierarchy. Either downgrade PHOEBE or re-create this system from scratch if you need to change the hierarchy.\")\n\n # need to run any constraints since some may be deleted and rebuilt\n changed_params = self.run_delayed_constraints()\n\n\n _old_param = self.get_hierarchy()\n\n if len(args) == 1 and isinstance(args[0], str):\n repr_ = args[0]\n kind = None\n\n elif len(args) == 0:\n if 'value' in kwargs.keys():\n repr_ = kwargs['value']\n kind = None\n else:\n repr_ = self.get_hierarchy().get_value()\n kind = None\n\n else:\n func = _get_add_func(hierarchy, args[0])\n func_args = args[1:]\n\n repr_ = func(*func_args)\n\n kind = func.func_name\n\n hier_param = HierarchyParameter(value=repr_,\n description='Hierarchy representation')\n\n self.remove_parameters_all(qualifier='hierarchy', context='system')\n\n metawargs = {'context': 'system'}\n self._attach_params([hier_param], **metawargs)\n\n # cache hierarchy param so we don't need to do a filter everytime we\n # want to access it in is_visible, etc\n self._hierarchy_param = hier_param\n\n self._handle_pblum_defaults()\n # self._handle_dataset_selectparams()\n\n # Handle inter-PS constraints\n starrefs = hier_param.get_stars()\n\n # user_interactive_constraints = conf.interactive_constraints\n # conf.interactive_constraints_off()\n\n for component in self.hierarchy.get_envelopes():\n # we need two of the three [comp_env] + self.hierarchy.get_siblings_of(comp_env) to have constraints\n logger.debug('re-creating requiv constraints')\n existing_requiv_constraints = self.filter(constraint_func='requiv_to_pot', component=[component]+self.hierarchy.get_siblings_of(component))\n if len(existing_requiv_constraints) == 2:\n # do we need to rebuild these?\n continue\n elif len(existing_requiv_constraints)==0:\n for component_requiv in self.hierarchy.get_siblings_of(component):\n pot_parameter = self.get_parameter(qualifier='pot', component=self.hierarchy.get_envelope_of(component_requiv), context='component')\n requiv_parameter = self.get_parameter(qualifier='requiv', component=component_requiv, context='component')\n if len(pot_parameter.constrained_by):\n solve_for = requiv_parameter.uniquetwig\n else:\n solve_for = pot_parameter.uniquetwig\n\n self.add_constraint(constraint.requiv_to_pot, component_requiv,\n constraint=self._default_label('requiv_to_pot', context='constraint'),\n solve_for=solve_for)\n else:\n raise NotImplementedError(\"expected 0 or 2 existing requiv_to_pot constraints\")\n\n logger.debug('re-creating fillout_factor (contact) constraint for {}'.format(component))\n if len(self.filter(context='constraint',\n constraint_func='fillout_factor',\n component=component)):\n constraint_param = self.get_constraint(constraint_func='fillout_factor',\n component=component)\n self.remove_constraint(constraint_func='fillout_factor',\n component=component)\n self.add_constraint(constraint.fillout_factor, component,\n solve_for=constraint_param.constrained_parameter.uniquetwig,\n constraint=constraint_param.constraint)\n else:\n self.add_constraint(constraint.fillout_factor, component,\n constraint=self._default_label('fillout_factor', context='constraint'))\n\n logger.debug('re-creating pot_min (contact) constraint for {}'.format(component))\n if len(self.filter(context='constraint',\n constraint_func='potential_contact_min',\n component=component)):\n constraint_param = self.get_constraint(constraint_func='potential_contact_min',\n component=component)\n self.remove_constraint(constraint_func='potential_contact_min',\n component=component)\n self.add_constraint(constraint.potential_contact_min, component,\n solve_for=constraint_param.constrained_parameter.uniquetwig,\n constraint=constraint_param.constraint)\n else:\n self.add_constraint(constraint.potential_contact_min, component,\n constraint=self._default_label('pot_min', context='constraint'))\n\n logger.debug('re-creating pot_max (contact) constraint for {}'.format(component))\n if len(self.filter(context='constraint',\n constraint_func='potential_contact_max',\n component=component)):\n constraint_param = self.get_constraint(constraint_func='potential_contact_max',\n component=component)\n self.remove_constraint(constraint_func='potential_contact_max',\n component=component)\n self.add_constraint(constraint.potential_contact_max, component,\n solve_for=constraint_param.constrained_parameter.uniquetwig,\n constraint=constraint_param.constraint)\n else:\n self.add_constraint(constraint.potential_contact_max, component,\n constraint=self._default_label('pot_max', context='constraint'))\n\n for component in self.hierarchy.get_stars():\n if len(starrefs)==1:\n pass\n # we'll do the potential constraint either way\n else:\n logger.debug('re-creating mass constraint for {}'.format(component))\n # TODO: will this cause problems if the constraint has been flipped?\n if len(self.filter(context='constraint',\n constraint_func='mass',\n component=component)):\n constraint_param = self.get_constraint(constraint_func='mass',\n component=component)\n self.remove_constraint(constraint_func='mass',\n component=component)\n self.add_constraint(constraint.mass, component,\n solve_for=constraint_param.constrained_parameter.uniquetwig,\n constraint=constraint_param.constraint)\n else:\n self.add_constraint(constraint.mass, component,\n constraint=self._default_label('mass', context='constraint'))\n\n\n logger.debug('re-creating comp_sma constraint for {}'.format(component))\n # TODO: will this cause problems if the constraint has been flipped?\n if len(self.filter(context='constraint',\n constraint_func='comp_sma',\n component=component)):\n constraint_param = self.get_constraint(constraint_func='comp_sma',\n component=component)\n self.remove_constraint(constraint_func='comp_sma',\n component=component)\n self.add_constraint(constraint.comp_sma, component,\n solve_for=constraint_param.constrained_parameter.uniquetwig,\n constraint=constraint_param.constraint)\n else:\n self.add_constraint(constraint.comp_sma, component,\n constraint=self._default_label('comp_sma', context='constraint'))\n\n logger.debug('re-creating rotation_period constraint for {}'.format(component))\n # TODO: will this cause problems if the constraint has been flipped?\n if len(self.filter(context='constraint',\n constraint_func='rotation_period',\n component=component)):\n constraint_param = self.get_constraint(constraint_func='rotation_period',\n component=component)\n self.remove_constraint(constraint_func='rotation_period',\n component=component)\n self.add_constraint(constraint.rotation_period, component,\n solve_for=constraint_param.constrained_parameter.uniquetwig,\n constraint=constraint_param.constraint)\n else:\n self.add_constraint(constraint.rotation_period, component,\n constraint=self._default_label('rotation_period', context='constraint'))\n\n if self.hierarchy.is_contact_binary(component):\n # then we're in a contact binary and need to create pot<->requiv constraints\n # NOTE: pot_min and pot_max are handled above at the envelope level\n logger.debug('re-creating requiv_detached_max (contact) constraint for {}'.format(component))\n if len(self.filter(context='constraint',\n constraint_func='requiv_detached_max',\n component=component)):\n # then we're changing from detached to contact so should remove the detached constraint first\n self.remove_constraint(constraint_func='requiv_detached_max', component=component)\n\n logger.debug('re-creating requiv_contact_max (contact) constraint for {}'.format(component))\n if len(self.filter(context='constraint',\n constraint_func='requiv_contact_max',\n component=component)):\n constraint_param = self.get_constraint(constraint_func='requiv_contact_max',\n component=component)\n self.remove_constraint(constraint_func='requiv_contact_max',\n component=component)\n self.add_constraint(constraint.requiv_contact_max, component,\n solve_for=constraint_param.constrained_parameter.uniquetwig,\n constraint=constraint_param.constraint)\n else:\n self.add_constraint(constraint.requiv_contact_max, component,\n constraint=self._default_label('requiv_max', context='constraint'))\n\n logger.debug('re-creating requiv_contact_min (contact) constraint for {}'.format(component))\n if len(self.filter(context='constraint',\n constraint_func='requiv_contact_min',\n component=component)):\n constraint_param = self.get_constraint(constraint_func='requiv_contact_min',\n component=component)\n self.remove_constraint(constraint_func='requiv_contact_min',\n component=component)\n self.add_constraint(constraint.requiv_contact_min, component,\n solve_for=constraint_param.constrained_parameter.uniquetwig,\n constraint=constraint_param.constraint)\n else:\n self.add_constraint(constraint.requiv_contact_min, component,\n constraint=self._default_label('requiv_min', context='constraint'))\n\n else:\n # then we're in a detached/semi-detached system\n # let's make sure we remove any requiv_to_pot constraints\n if len(self.filter(context='constraint',\n constraint_func='requiv_to_pot',\n component=component)):\n self.remove_constraint(constraint_func='requiv_to_pot', component=component)\n\n logger.debug('re-creating requiv_max (detached) constraint for {}'.format(component))\n if len(self.filter(context='constraint',\n constraint_func='requiv_contact_max',\n component=component)):\n # then we're changing from contact to detached so should remove the detached constraint first\n self.remove_constraint(constraint_func='requiv_contact_max', component=component)\n\n logger.debug('re-creating requiv_detached_max (detached) constraint for {}'.format(component))\n if len(self.filter(context='constraint',\n constraint_func='requiv_detached_max',\n component=component)):\n constraint_param = self.get_constraint(constraint_func='requiv_detached_max',\n component=component)\n self.remove_constraint(constraint_func='requiv_detached_max',\n component=component)\n self.add_constraint(constraint.requiv_detached_max, component,\n solve_for=constraint_param.constrained_parameter.uniquetwig,\n constraint=constraint_param.constraint)\n else:\n self.add_constraint(constraint.requiv_detached_max, component,\n constraint=self._default_label('requiv_max', context='constraint'))\n\n logger.debug('re-creating pitch constraint for {}'.format(component))\n # TODO: will this cause problems if the constraint has been flipped?\n # TODO: what if the user disabled/removed this constraint?\n if len(self.filter(context='constraint',\n constraint_func='pitch',\n component=component)):\n constraint_param = self.get_constraint(constraint_func='pitch',\n component=component)\n self.remove_constraint(constraint_func='pitch',\n component=component)\n self.add_constraint(constraint.pitch, component,\n solve_for=constraint_param.constrained_parameter.uniquetwig,\n constraint=constraint_param.constraint)\n else:\n self.add_constraint(constraint.pitch, component,\n constraint=self._default_label('pitch', context='constraint'))\n\n logger.debug('re-creating yaw constraint for {}'.format(component))\n # TODO: will this cause problems if the constraint has been flipped?\n # TODO: what if the user disabled/removed this constraint?\n if len(self.filter(context='constraint',\n constraint_func='yaw',\n component=component)):\n constraint_param = self.get_constraint(constraint_func='yaw',\n component=component)\n self.remove_constraint(constraint_func='yaw',\n component=component)\n self.add_constraint(constraint.yaw, component,\n solve_for=constraint_param.constrained_parameter.uniquetwig,\n constraint=constraint_param.constraint)\n else:\n self.add_constraint(constraint.yaw, component,\n constraint=self._default_label('yaw', context='constraint'))\n\n # if user_interactive_constraints:\n # conf.interactive_constraints_on()\n # self.run_delayed_constraints()\n\n\n redo_kwargs = {k: v for k, v in hier_param.to_dict().items()\n if v not in [None, ''] and\n k not in ['uniqueid', 'uniquetwig', 'twig',\n 'Class', 'context', 'qualifier',\n 'description']}\n if _old_param is None:\n # this will fake the undo-ability to raise an error saying it\n # cannot be undone\n undo_kwargs = {'uniqueid': None}\n else:\n undo_kwargs = {k: v for k, v in _old_param.to_dict().items()\n if v not in [None, ''] and\n k not in ['uniqueid', 'uniquetwig', 'twig',\n 'Class', 'context', 'qualifier',\n 'description']}\n self._add_history(redo_func='set_hierarchy',\n redo_kwargs=redo_kwargs,\n undo_func='set_hierarchy',\n undo_kwargs=undo_kwargs)\n\n return"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning a new ParameterSet with only the system tags in the current context", "response": "def get_system(self, twig=None, **kwargs):\n \"\"\"\n Filter in the 'system' context\n\n :parameter str twig: twig to use for filtering\n :parameter **kwargs: any other tags to do the filter\n (except twig or context)\n\n :return: :class:`phoebe.parameters.parameters.Parameter` or\n :class:`phoebe.parameters.parameters.ParameterSet`\n \"\"\"\n if twig is not None:\n kwargs['twig'] = twig\n kwargs['context'] = 'system'\n return self.filter(**kwargs)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef run_checks(self, **kwargs):\n\n # make sure all constraints have been run\n changed_params = self.run_delayed_constraints()\n\n hier = self.hierarchy\n if hier is None:\n return True, ''\n for component in hier.get_stars():\n kind = hier.get_kind_of(component)\n comp_ps = self.get_component(component)\n\n if not len(comp_ps):\n return False, \"component '{}' in the hierarchy is not in the bundle\".format(component)\n\n parent = hier.get_parent_of(component)\n parent_ps = self.get_component(parent)\n if kind in ['star']:\n # ignore the single star case\n if parent:\n # contact systems MUST by synchronous\n if hier.is_contact_binary(component):\n if self.get_value(qualifier='syncpar', component=component, context='component', **kwargs) != 1.0:\n return False,\\\n 'contact binaries must by synchronous, but syncpar@{}!=1'.format(component)\n\n if self.get_value(qualifier='ecc', component=parent, context='component', **kwargs) != 0.0:\n return False,\\\n 'contact binaries must by circular, but ecc@{}!=0'.format(component)\n\n if self.get_value(qualifier='pitch', component=component, context='component', **kwargs) != 0.0:\n return False,\\\n 'contact binaries must be aligned, but pitch@{}!=0'.format(component)\n\n if self.get_value(qualifier='yaw', component=component, context='component', **kwargs) != 0.0:\n return False,\\\n 'contact binaries must be aligned, but yaw@{}!=0'.format(component)\n\n # MUST NOT be overflowing at PERIASTRON (d=1-ecc, etheta=0)\n\n requiv = comp_ps.get_value('requiv', unit=u.solRad, **kwargs)\n requiv_max = comp_ps.get_value('requiv_max', unit=u.solRad, **kwargs)\n\n\n\n if hier.is_contact_binary(component):\n if np.isnan(requiv) or requiv > requiv_max:\n return False,\\\n '{} is overflowing at L2/L3 (requiv={}, requiv_max={})'.format(component, requiv, requiv_max)\n\n\n requiv_min = comp_ps.get_value('requiv_min')\n\n if np.isnan(requiv) or requiv <= requiv_min:\n return False,\\\n '{} is underflowing at L1 and not a contact system (requiv={}, requiv_min={})'.format(component, requiv, requiv_min)\n elif requiv <= requiv_min * 1.001:\n return False,\\\n 'requiv@{} is too close to requiv_min (within 0.1% of critical). Use detached/semidetached model instead.'.format(component)\n\n else:\n if requiv > requiv_max:\n return False,\\\n '{} is overflowing at periastron (requiv={}, requiv_max={})'.format(component, requiv, requiv_max)\n\n else:\n raise NotImplementedError(\"checks not implemented for type '{}'\".format(kind))\n\n # we also need to make sure that stars don't overlap each other\n # so we'll check for each pair of stars (see issue #70 on github)\n # TODO: rewrite overlap checks\n for orbitref in []: #hier.get_orbits():\n if len(hier.get_children_of(orbitref)) == 2:\n q = self.get_value(qualifier='q', component=orbitref, context='component', **kwargs)\n ecc = self.get_value(qualifier='ecc', component=orbitref, context='component', **kwargs)\n\n starrefs = hier.get_children_of(orbitref)\n if hier.get_kind_of(starrefs[0]) != 'star' or hier.get_kind_of(starrefs[1]) != 'star':\n # print \"***\", hier.get_kind_of(starrefs[0]), hier.get_kind_of(starrefs[1])\n continue\n if self.get_value(qualifier='pitch', component=starrefs[0])!=0.0 or \\\n self.get_value(qualifier='pitch', component=starrefs[1])!=0.0 or \\\n self.get_value(qualifier='yaw', component=starrefs[0])!=0.0 or \\\n self.get_value(qualifier='yaw', component=starrefs[1])!=0.0:\n\n # we cannot run this test for misaligned cases\n continue\n\n comp0 = hier.get_primary_or_secondary(starrefs[0], return_ind=True)\n comp1 = hier.get_primary_or_secondary(starrefs[1], return_ind=True)\n q0 = roche.q_for_component(q, comp0)\n q1 = roche.q_for_component(q, comp1)\n\n F0 = self.get_value(qualifier='syncpar', component=starrefs[0], context='component', **kwargs)\n F1 = self.get_value(qualifier='syncpar', component=starrefs[1], context='component', **kwargs)\n\n pot0 = self.get_value(qualifier='pot', component=starrefs[0], context='component', **kwargs)\n pot0 = roche.pot_for_component(pot0, q0, comp0)\n\n pot1 = self.get_value(qualifier='pot', component=starrefs[1], context='component', **kwargs)\n pot1 = roche.pot_for_component(pot1, q1, comp1)\n\n xrange0 = libphoebe.roche_xrange(q0, F0, 1.0-ecc, pot0+1e-6, choice=0)\n xrange1 = libphoebe.roche_xrange(q1, F1, 1.0-ecc, pot1+1e-6, choice=0)\n\n if xrange0[1]+xrange1[1] > 1.0-ecc:\n return False,\\\n 'components in {} are overlapping at periastron (change ecc@{}, syncpar@{}, or syncpar@{}).'.format(orbitref, orbitref, starrefs[0], starrefs[1])\n\n\n # check to make sure passband supports the selected atm\n for pbparam in self.filter(qualifier='passband').to_list():\n pb = pbparam.get_value()\n pbatms = _pbtable[pb]['atms']\n # NOTE: atms are not attached to datasets, but per-compute and per-component\n for atmparam in self.filter(qualifier='atm', kind='phoebe').to_list():\n atm = atmparam.get_value()\n if atm not in pbatms:\n return False, \"'{}' passband ({}) does not support atm='{}' ({}).\".format(pb, pbparam.twig, atm, atmparam.twig)\n\n # check length of ld_coeffs vs ld_func and ld_func vs atm\n def ld_coeffs_len(ld_func, ld_coeffs):\n # current choices for ld_func are:\n # ['interp', 'uniform', 'linear', 'logarithmic', 'quadratic', 'square_root', 'power', 'claret', 'hillen', 'prsa']\n if ld_func == 'interp':\n return True,\n elif ld_func in ['linear'] and len(ld_coeffs)==1:\n return True,\n elif ld_func in ['logarithmic', 'square_root', 'quadratic'] and len(ld_coeffs)==2:\n return True,\n elif ld_func in ['power'] and len(ld_coeffs)==4:\n return True,\n else:\n return False, \"ld_coeffs={} wrong length for ld_func='{}'.\".format(ld_coeffs, ld_func)\n\n for component in self.hierarchy.get_stars():\n # first check ld_coeffs_bol vs ld_func_bol\n ld_func = str(self.get_value(qualifier='ld_func_bol', component=component, context='component', check_visible=False, **kwargs))\n ld_coeffs = np.asarray(self.get_value(qualifier='ld_coeffs_bol', component=component, context='component', check_visible=False, **kwargs))\n check = ld_coeffs_len(ld_func, ld_coeffs)\n if not check[0]:\n return check\n\n if ld_func != 'interp':\n check = libphoebe.ld_check(ld_func, ld_coeffs)\n if not check:\n return False, 'ld_coeffs_bol={} not compatible for ld_func_bol=\\'{}\\'.'.format(ld_coeffs, ld_func)\n\n for dataset in self.datasets:\n if dataset=='_default' or self.get_dataset(dataset=dataset, kind='*dep').kind not in ['lc_dep', 'rv_dep']:\n continue\n ld_func = str(self.get_value(qualifier='ld_func', dataset=dataset, component=component, context='dataset', **kwargs))\n ld_coeffs = np.asarray(self.get_value(qualifier='ld_coeffs', dataset=dataset, component=component, context='dataset', check_visible=False, **kwargs))\n if ld_coeffs is not None:\n check = ld_coeffs_len(ld_func, ld_coeffs)\n if not check[0]:\n return check\n\n if ld_func != 'interp':\n check = libphoebe.ld_check(ld_func, ld_coeffs)\n if not check:\n return False, 'ld_coeffs={} not compatible for ld_func=\\'{}\\'.'.format(ld_coeffs, ld_func)\n\n if ld_func=='interp':\n for compute in kwargs.get('computes', self.computes):\n atm = self.get_value(qualifier='atm', component=component, compute=compute, context='compute', **kwargs)\n if atm != 'ck2004':\n return False, \"ld_func='interp' only supported by atm='ck2004'. Either change atm@{} or ld_func@{}@{}\".format(component, component, dataset)\n\n # mesh-consistency checks\n for compute in self.computes:\n mesh_methods = [p.get_value() for p in self.filter(qualifier='mesh_method', compute=compute, force_ps=True).to_list()]\n if 'wd' in mesh_methods:\n if len(set(mesh_methods)) > 1:\n return False, \"all (or none) components must use mesh_method='wd'.\"\n\n #### WARNINGS ONLY ####\n # let's check teff vs gravb_bol and irrad_frac_refl_bol\n for component in self.hierarchy.get_stars():\n teff = self.get_value(qualifier='teff', component=component, context='component', unit=u.K, **kwargs)\n gravb_bol = self.get_value(qualifier='gravb_bol', component=component, context='component', **kwargs)\n\n if teff >= 8000. and gravb_bol < 0.9:\n return None, \"'{}' probably has a radiative atm (teff={:.0f}K>8000K), for which gravb_bol=1.00 might be a better approx than gravb_bol={:.2f}.\".format(component, teff, gravb_bol)\n elif teff <= 6600. and gravb_bol >= 0.9:\n return None, \"'{}' probably has a convective atm (teff={:.0f}K<6600K), for which gravb_bol=0.32 might be a better approx than gravb_bol={:.2f}.\".format(component, teff, gravb_bol)\n elif gravb_bol < 0.32 or gravb_bol > 1.00:\n return None, \"'{}' has intermittent temperature (6600K= 8000. and irrad_frac_refl_bol < 0.8:\n return None, \"'{}' probably has a radiative atm (teff={:.0f}K>8000K), for which irrad_frac_refl_bol=1.00 might be a better approx than irrad_frac_refl_bol={:.2f}.\".format(component, teff, irrad_frac_refl_bol)\n elif teff <= 6600. and irrad_frac_refl_bol >= 0.75:\n return None, \"'{}' probably has a convective atm (teff={:.0f}K<6600K), for which irrad_frac_refl_bol=0.6 might be a better approx than irrad_frac_refl_bol={:.2f}.\".format(component, teff, irrad_frac_refl_bol)\n elif irrad_frac_refl_bol < 0.6:\n return None, \"'{}' has intermittent temperature (6600K>> b.add_feature(feature.spot, component='mystar')\n\n or\n\n >>> b.add_feature('spot', 'mystar', colat=90)\n\n Available kinds include:\n * :func:`phoebe.parameters.feature.spot`\n\n :parameter kind: function to call that returns a\n ParameterSet or list of parameters. This must either be\n a callable function that accepts nothing but default values,\n or the name of a function (as a string) that can be found in the\n :mod:`phoebe.parameters.feature` module (ie. 'spot')\n :type kind: str or callable\n :parameter str component: name of the component to attach the feature\n :parameter str feature: (optional) name of the newly-created feature\n :parameter **kwargs: default value for any of the newly-created\n parameters\n :return: :class:`phoebe.parameters.parameters.ParameterSet` of\n all parameters that have been added\n :raises NotImplementedError: if required constraint is not implemented\n \"\"\"\n func = _get_add_func(_feature, kind)\n\n if kwargs.get('feature', False) is None:\n # then we want to apply the default below, so let's pop for now\n _ = kwargs.pop('feature')\n\n kwargs.setdefault('feature',\n self._default_label(func.func_name,\n **{'context': 'feature',\n 'kind': func.func_name}))\n\n self._check_label(kwargs['feature'])\n\n if component is None:\n stars = self.hierarchy.get_meshables()\n if len(stars) == 1:\n component = stars[0]\n else:\n raise ValueError(\"must provide component\")\n\n if component not in self.components:\n raise ValueError('component not recognized')\n\n component_kind = self.filter(component=component, context='component').kind\n if not _feature._component_allowed_for_feature(func.func_name, component_kind):\n raise ValueError(\"{} does not support component with kind {}\".format(func.func_name, component_kind))\n\n params, constraints = func(**kwargs)\n\n metawargs = {'context': 'feature',\n 'component': component,\n 'feature': kwargs['feature'],\n 'kind': func.func_name}\n\n self._attach_params(params, **metawargs)\n\n redo_kwargs = deepcopy(kwargs)\n redo_kwargs['func'] = func.func_name\n self._add_history(redo_func='add_feature',\n redo_kwargs=redo_kwargs,\n undo_func='remove_feature',\n undo_kwargs={'feature': kwargs['feature']})\n\n for constraint in constraints:\n self.add_constraint(*constraint)\n\n #return params\n # NOTE: we need to call get_ in order to make sure all metawargs are applied\n return self.get_feature(**metawargs)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_feature(self, feature=None, **kwargs):\n if feature is not None:\n kwargs['feature'] = feature\n kwargs['context'] = 'feature'\n return self.filter(**kwargs)", "response": "Filter in the proerty context\n "} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nremoves a feature from the bundle.", "response": "def remove_feature(self, feature=None, **kwargs):\n \"\"\"\n [NOT IMPLEMENTED]\n\n Remove a 'feature' from the bundle\n\n :raises NotImplementedError: because this isn't implemented yet\n \"\"\"\n self._kwargs_checks(kwargs)\n\n # Let's avoid deleting ALL features from the matching contexts\n if feature is None and not len(kwargs.items()):\n raise ValueError(\"must provide some value to filter for features\")\n\n kwargs['feature'] = feature\n\n # Let's avoid the possibility of deleting a single parameter\n kwargs['qualifier'] = None\n\n # Let's also avoid the possibility of accidentally deleting system\n # parameters, etc\n kwargs.setdefault('context', ['feature'])\n\n self.remove_parameters_all(**kwargs)\n\n self._add_history(redo_func='remove_feature',\n redo_kwargs=kwargs,\n undo_func=None,\n undo_kwargs={})\n\n return"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef rename_feature(self, old_feature, new_feature):\n # TODO: raise error if old_feature not found?\n\n self._check_label(new_feature)\n self._rename_label('feature', old_feature, new_feature)", "response": "Change the label of a feature attached to the Bundle\n ."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef add_spot(self, component=None, feature=None, **kwargs):\n if component is None:\n if len(self.hierarchy.get_stars())==1:\n component = self.hierarchy.get_stars()[0]\n else:\n raise ValueError(\"must provide component for spot\")\n\n kwargs.setdefault('component', component)\n kwargs.setdefault('feature', feature)\n return self.add_feature('spot', **kwargs)", "response": "Shortcut to add_feature but with kind = spot"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef remove_spot(self, feature=None, **kwargs):\n kwargs.setdefault('kind', 'spot')\n return self.remove_feature(feature, **kwargs)", "response": "Shortcut to remove_feature but with kind = spot"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_component(self, kind, **kwargs):\n\n func = _get_add_func(component, kind)\n\n if kwargs.get('component', False) is None:\n # then we want to apply the default below, so let's pop for now\n _ = kwargs.pop('component')\n\n kwargs.setdefault('component',\n self._default_label(func.func_name,\n **{'context': 'component',\n 'kind': func.func_name}))\n\n if kwargs.pop('check_label', True):\n self._check_label(kwargs['component'])\n\n params, constraints = func(**kwargs)\n\n metawargs = {'context': 'component',\n 'component': kwargs['component'],\n 'kind': func.func_name}\n\n self._attach_params(params, **metawargs)\n\n redo_kwargs = deepcopy(kwargs)\n redo_kwargs['func'] = func.func_name\n self._add_history(redo_func='add_component',\n redo_kwargs=redo_kwargs,\n undo_func='remove_component',\n undo_kwargs={'component': kwargs['component']})\n\n for constraint in constraints:\n self.add_constraint(*constraint)\n\n # since we've already processed (so that we can get the new qualifiers),\n # we'll only raise a warning\n self._kwargs_checks(kwargs, warning_only=True)\n\n # return params\n return self.get_component(**metawargs)", "response": "Add a new component to the system."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nfiltering in the component context", "response": "def get_component(self, component=None, **kwargs):\n \"\"\"\n Filter in the 'component' context\n\n :parameter str component: name of the component (optional)\n :parameter **kwargs: any other tags to do the filter\n (except component or context)\n :return: :class:`phoebe.parameters.parameters.ParameterSet`\n \"\"\"\n if component is not None:\n kwargs['component'] = component\n kwargs['context'] = 'component'\n return self.filter(**kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef remove_component(self, component, **kwargs):\n # NOTE: run_checks will check if an entry is in the hierarchy but has no parameters\n kwargs['component'] = component\n # NOTE: we do not remove from 'model' by default\n kwargs['context'] = ['component', 'constraint', 'dataset', 'compute']\n self.remove_parameters_all(**kwargs)", "response": "Removes a component from the bundle"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nchanging the label of a component attached to the Bundle .", "response": "def rename_component(self, old_component, new_component):\n \"\"\"\n Change the label of a component attached to the Bundle\n\n :parameter str old_component: the current name of the component\n (must exist)\n :parameter str new_component: the desired new name of the component\n (must not exist)\n :return: None\n :raises ValueError: if the new_component is forbidden\n \"\"\"\n # TODO: raise error if old_component not found?\n\n # even though _rename_tag will call _check_label again, we should\n # do it first so that we can raise any errors BEFORE we start messing\n # with the hierarchy\n self._check_label(new_component)\n # changing hierarchy must be called first since it needs to access\n # the kind of old_component\n if len([c for c in self.components if new_component in c]):\n logger.warning(\"hierarchy may not update correctly with new component\")\n self.hierarchy.rename_component(old_component, new_component)\n\n self._rename_label('component', old_component, new_component)\n\n self._handle_dataset_selectparams()"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef add_orbit(self, component=None, **kwargs):\n kwargs.setdefault('component', component)\n return self.add_component('orbit', **kwargs)", "response": "Shortcut to add_component but with kind = orbit"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef remove_orbit(self, component=None, **kwargs):\n kwargs.setdefault('kind', 'orbit')\n return self.remove_component(component, **kwargs)", "response": "Shortcut to remove_component but with kind = orbit"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_star(self, component=None, **kwargs):\n kwargs.setdefault('kind', 'star')\n return self.get_component(component, **kwargs)", "response": "Shortcut to get_component but with kind = star"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef add_envelope(self, component=None, **kwargs):\n kwargs.setdefault('component', component)\n return self.add_component('envelope', **kwargs)", "response": "Shortcut to add_envelope but with kind = envelope"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the ephemeris of a given component.", "response": "def get_ephemeris(self, component=None, t0='t0_supconj', **kwargs):\n \"\"\"\n Get the ephemeris of a component (star or orbit)\n\n :parameter str component: name of the component. If not given,\n component will default to the top-most level of the current\n hierarchy\n :parameter t0: qualifier of the parameter to be used for t0\n :type t0: str\n :parameter **kwargs: any value passed through kwargs will override the\n ephemeris retrieved by component (ie period, t0, dpdt).\n Note: be careful about units - input values will not be converted.\n :return: dictionary containing period, t0 (t0_supconj if orbit),\n dpdt (as applicable)\n :rtype: dict\n \"\"\"\n\n if component is None:\n component = self.hierarchy.get_top()\n\n if kwargs.get('shift', False):\n raise ValueError(\"support for phshift was removed as of 2.1. Please pass t0 instead.\")\n\n ret = {}\n\n ps = self.filter(component=component, context='component')\n\n if ps.kind in ['orbit']:\n ret['period'] = ps.get_value(qualifier='period', unit=u.d)\n if isinstance(t0, str):\n ret['t0'] = ps.get_value(qualifier=t0, unit=u.d)\n elif isinstance(t0, float) or isinstance(t0, int):\n ret['t0'] = t0\n else:\n raise ValueError(\"t0 must be string (qualifier) or float\")\n ret['dpdt'] = ps.get_value(qualifier='dpdt', unit=u.d/u.d)\n elif ps.kind in ['star']:\n # TODO: consider renaming period to prot\n ret['period'] = ps.get_value(qualifier='period', unit=u.d)\n else:\n raise NotImplementedError\n\n for k,v in kwargs.items():\n ret[k] = v\n\n return ret"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nconverts a time to phases", "response": "def to_phase(self, time, component=None, t0='t0_supconj', **kwargs):\n \"\"\"\n Get the phase(s) of a time(s) for a given ephemeris\n\n :parameter time: time to convert to phases (should be in same system\n as t0s)\n :type time: float, list, or array\n :parameter t0: qualifier of the parameter to be used for t0\n :type t0: str\n :parameter str component: component for which to get the ephemeris.\n If not given, component will default to the top-most level of the\n current hierarchy\n :parameter **kwargs: any value passed through kwargs will override the\n ephemeris retrieved by component (ie period, t0, dpdt).\n Note: be careful about units - input values will not be converted.\n :return: phase (float) or phases (array)\n \"\"\"\n\n if kwargs.get('shift', False):\n raise ValueError(\"support for phshift was removed as of 2.1. Please pass t0 instead.\")\n\n ephem = self.get_ephemeris(component=component, t0=t0, **kwargs)\n\n if isinstance(time, list):\n time = np.array(time)\n elif isinstance(time, Parameter):\n time = time.get_value(u.d)\n elif isinstance(time, str):\n time = self.get_value(time, u.d)\n\n t0 = ephem.get('t0', 0.0)\n period = ephem.get('period', 1.0)\n dpdt = ephem.get('dpdt', 0.0)\n\n if dpdt != 0:\n phase = np.mod(1./dpdt * np.log(period + dpdt*(time-t0)), 1.0)\n else:\n phase = np.mod((time-t0)/period, 1.0)\n\n if isinstance(phase, float):\n if phase > 0.5:\n phase -= 1\n else:\n # then should be an array\n phase[phase > 0.5] -= 1\n\n return phase"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nconverting the ephemeris time to a given phase.", "response": "def to_time(self, phase, component=None, t0='t0_supconj', **kwargs):\n \"\"\"\n Get the time(s) of a phase(s) for a given ephemeris\n\n :parameter phase: phase to convert to times (should be in\n same system as t0s)\n :type phase: float, list, or array\n ` :parameter str component: component for which to get the ephemeris.\n If not given, component will default to the top-most level of the\n current hierarchy\n :parameter t0: qualifier of the parameter to be used for t0\n :type t0: str\n :parameter **kwargs: any value passed through kwargs will override the\n ephemeris retrieved by component (ie period, t0, dpdt).\n Note: be careful about units - input values will not be converted.\n :return: time (float) or times (array)\n \"\"\"\n\n if kwargs.get('shift', False):\n raise ValueError(\"support for phshift was removed as of 2.1. Please pass t0 instead.\")\n\n ephem = self.get_ephemeris(component=component, t0=t0, **kwargs)\n\n if isinstance(phase, list):\n phase = np.array(phase)\n\n t0 = ephem.get('t0', 0.0)\n period = ephem.get('period', 1.0)\n dpdt = ephem.get('dpdt', 0.0)\n\n # if changing this, also see parameters.constraint.time_ephem\n if dpdt != 0:\n time = t0 + 1./dpdt*(np.exp(dpdt*(phase))-period)\n else:\n time = t0 + (phase)*period\n\n return time"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef add_dataset(self, kind, component=None, **kwargs):\n\n sing_plural = {}\n sing_plural['time'] = 'times'\n sing_plural['flux'] = 'fluxes'\n sing_plural['sigma'] = 'sigmas'\n sing_plural['rv'] = 'rvs'\n\n func = _get_add_func(_dataset, kind.lower()\n if isinstance(kind, str)\n else kind)\n\n kwargs.setdefault('dataset',\n self._default_label(func.func_name,\n **{'context': 'dataset',\n 'kind': func.func_name}))\n\n if kwargs.pop('check_label', True):\n self._check_label(kwargs['dataset'])\n\n kind = func.func_name\n\n # Let's remember if the user passed components or if they were automatically assigned\n user_provided_components = component or kwargs.get('components', False)\n\n if kind == 'lc':\n allowed_components = [None]\n default_components = allowed_components\n elif kind in ['rv', 'orb']:\n allowed_components = self.hierarchy.get_stars() # + self.hierarchy.get_orbits()\n default_components = self.hierarchy.get_stars()\n # TODO: how are we going to handle overcontacts dynamical vs flux-weighted\n elif kind in ['mesh']:\n # allowed_components = self.hierarchy.get_meshables()\n allowed_components = [None]\n # allowed_components = self.hierarchy.get_stars()\n # TODO: how will this work when changing hierarchy to add/remove the common envelope?\n default_components = allowed_components\n elif kind in ['etv']:\n hier = self.hierarchy\n stars = hier.get_stars()\n # only include components in which the sibling is also a star that\n # means that the companion in a triple cannot be timed, because how\n # do we know who it's eclipsing?\n allowed_components = [s for s in stars if hier.get_sibling_of(s) in stars]\n default_components = allowed_components\n elif kind in ['lp']:\n # TODO: need to think about what this should be for contacts...\n allowed_components = self.hierarchy.get_stars() + self.hierarchy.get_orbits()\n default_components = [self.hierarchy.get_top()]\n\n else:\n allowed_components = [None]\n default_components = [None]\n\n # Let's handle the case where the user accidentally sends components\n # instead of component\n if kwargs.get('components', None) and component is None:\n logger.warning(\"assuming you meant 'component' instead of 'components'\")\n components = kwargs.pop('components')\n else:\n components = component\n\n if isinstance(components, str):\n components = [components]\n elif hasattr(components, '__iter__'):\n components = components\n elif components is None:\n components = default_components\n else:\n raise NotImplementedError\n\n # Let's handle the case where the user accidentally sends singular\n # instead of plural (since we used to have this)\n # TODO: use parameter._singular_to_plural?\n for singular, plural in sing_plural.items():\n if kwargs.get(singular, None) is not None and kwargs.get(plural, None) is None:\n logger.warning(\"assuming you meant '{}' instead of '{}'\".format(plural, singular))\n kwargs[plural] = kwargs.pop(singular)\n\n if not np.all([component in allowed_components\n for component in components]):\n raise ValueError(\"'{}' not a recognized/allowable component\".format(component))\n\n obs_metawargs = {'context': 'dataset',\n 'kind': kind,\n 'dataset': kwargs['dataset']}\n\n if kind in ['lp']:\n # then times needs to be passed now to duplicate and tag the Parameters\n # correctly\n obs_kwargs = {'times': kwargs.pop('times', [])}\n else:\n obs_kwargs = {}\n\n obs_params, constraints = func(**obs_kwargs)\n self._attach_params(obs_params, **obs_metawargs)\n\n for constraint in constraints:\n # TODO: tricky thing here will be copying the constraints\n self.add_constraint(*constraint)\n\n dep_func = _get_add_func(_dataset, \"{}_dep\".format(kind))\n dep_metawargs = {'context': 'dataset',\n 'kind': '{}_dep'.format(kind),\n 'dataset': kwargs['dataset']}\n dep_params = dep_func()\n self._attach_params(dep_params, **dep_metawargs)\n\n # Now we need to apply any kwargs sent by the user. There are a few\n # scenarios (and each kwargs could fall into different ones):\n # times = [0,1,2]\n # in this case, we want to apply time across all of the components that\n # are applicable for this dataset kind AND to _default so that any\n # future components added to the system are copied appropriately\n # times = [0,1,2], components=['primary', 'secondary']\n # in this case, we want to apply the value for time across components\n # but time@_default should remain empty (it will not copy for components\n # added in the future)\n # times = {'primary': [0,1], 'secondary': [0,1,2]}\n # here, regardless of the components, we want to apply these to their\n # individually requested parameters. We won't touch _default unless\n # its included in the dictionary\n\n # this needs to happen before kwargs get applied so that the default\n # values can be overridden by the supplied kwargs\n self._handle_pblum_defaults()\n self._handle_dataset_selectparams()\n\n for k, v in kwargs.items():\n if isinstance(v, dict):\n for component, value in v.items():\n logger.debug(\"setting value of dataset parameter: qualifier={}, dataset={}, component={}, value={}\".format(k, kwargs['dataset'], component, value))\n try:\n self.set_value_all(qualifier=k,\n dataset=kwargs['dataset'],\n component=component,\n value=value,\n check_visible=False,\n ignore_none=True)\n except:\n self.remove_dataset(dataset=kwargs['dataset'])\n raise ValueError(\"could not set value for {}={}, dataset has not been added\".format(k, value))\n\n elif k in ['dataset']:\n pass\n else:\n # for dataset kinds that include passband dependent AND\n # independent parameters, we need to carefully default on\n # what component to use when passing the defaults\n if kind in ['rv', 'lp'] and k in ['ld_func', 'ld_coeffs',\n 'passband', 'intens_weighting',\n 'profile_rest', 'profile_func', 'profile_sv']:\n # passband-dependent (ie lc_dep) parameters do not have\n # assigned components\n components_ = None\n elif components == [None]:\n components_ = None\n elif user_provided_components:\n components_ = components\n else:\n components_ = components+['_default']\n\n logger.debug(\"setting value of dataset parameter: qualifier={}, dataset={}, component={}, value={}\".format(k, kwargs['dataset'], components_, v))\n try:\n self.set_value_all(qualifier=k,\n dataset=kwargs['dataset'],\n component=components_,\n value=v,\n check_visible=False,\n ignore_none=True)\n except:\n self.remove_dataset(dataset=kwargs['dataset'])\n raise ValueError(\"could not set value for {}={}, dataset has not been added\".format(k, v))\n\n\n redo_kwargs = deepcopy({k:v if not isinstance(v, nparray.ndarray) else v.to_json() for k,v in kwargs.items()})\n redo_kwargs['func'] = func.func_name\n self._add_history(redo_func='add_dataset',\n redo_kwargs=redo_kwargs,\n undo_func='remove_dataset',\n undo_kwargs={'dataset': kwargs['dataset']})\n\n # since we've already processed (so that we can get the new qualifiers),\n # we'll only raise a warning\n self._kwargs_checks(kwargs, warning_only=True)\n\n return self.filter(dataset=kwargs['dataset'])", "response": "Add a new dataset to the bundle."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nfilters in the dataset context", "response": "def get_dataset(self, dataset=None, **kwargs):\n \"\"\"\n Filter in the 'dataset' context\n\n :parameter str dataset: name of the dataset (optional)\n :parameter **kwargs: any other tags to do the filter\n (except dataset or context)\n :return: :class:`phoebe.parameters.parameters.ParameterSet`\n \"\"\"\n if dataset is not None:\n kwargs['dataset'] = dataset\n\n kwargs['context'] = 'dataset'\n if 'kind' in kwargs.keys():\n # since we switched how dataset kinds are named, let's just\n # automatically handle switching to lowercase\n kwargs['kind'] = kwargs['kind'].lower()\n return self.filter(**kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nremove a dataset from the Bundle. This removes all matching Parameters from the dataset, model, and constraint contexts (by default if the context tag is not provided). You must provide some sort of filter or this will raise an Error (so that all Parameters are not accidentally removed). :parameter str dataset: name of the dataset :parameter **kwargs: any other tags to do the filter (except qualifier and dataset) :raises ValueError: if no filter is provided", "response": "def remove_dataset(self, dataset=None, **kwargs):\n \"\"\" Remove a dataset from the Bundle.\n\n This removes all matching Parameters from the dataset, model, and\n constraint contexts (by default if the context tag is not provided).\n\n You must provide some sort of filter or this will raise an Error (so\n that all Parameters are not accidentally removed).\n\n :parameter str dataset: name of the dataset\n :parameter **kwargs: any other tags to do the filter (except qualifier\n and dataset)\n :raises ValueError: if no filter is provided\n \"\"\"\n\n self._kwargs_checks(kwargs)\n\n # Let's avoid deleting ALL parameters from the matching contexts\n if dataset is None and not len(kwargs.items()):\n raise ValueError(\"must provide some value to filter for datasets\")\n\n # let's handle deps if kind was passed\n kind = kwargs.get('kind', None)\n\n if kind is not None:\n if isinstance(kind, str):\n kind = [kind]\n kind_deps = []\n for kind_i in kind:\n dep = '{}_dep'.format(kind_i)\n if dep not in kind:\n kind_deps.append(dep)\n kind = kind + kind_deps\n kwargs['kind'] = kind\n\n\n if dataset is None:\n # then let's find the list of datasets that match the filter,\n # we'll then use dataset to do the removing. This avoids leaving\n # pararameters behind that don't specifically match the filter\n # (ie if kind is passed as 'rv' we still want to remove parameters\n # with datasets that are RVs but belong to a different kind in\n # another context like compute)\n dataset = self.filter(**kwargs).datasets\n kwargs['kind'] = None\n\n\n kwargs['dataset'] = dataset\n # Let's avoid the possibility of deleting a single parameter\n kwargs['qualifier'] = None\n # Let's also avoid the possibility of accidentally deleting system\n # parameters, etc\n kwargs.setdefault('context', ['dataset', 'model', 'constraint', 'compute'])\n\n # ps = self.filter(**kwargs)\n # logger.info('removing {} parameters (this is not undoable)'.\\\n # format(len(ps)))\n\n # print \"*** kwargs\", kwargs, len(ps)\n self.remove_parameters_all(**kwargs)\n # not really sure why we need to call this twice, but it seems to do\n # the trick\n self.remove_parameters_all(**kwargs)\n\n self._handle_dataset_selectparams()\n\n # TODO: check to make sure that trying to undo this\n # will raise an error saying this is not undo-able\n self._add_history(redo_func='remove_dataset',\n redo_kwargs={'dataset': dataset},\n undo_func=None,\n undo_kwargs={})\n\n return"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nchanging the label of a dataset attached to the Bundle", "response": "def rename_dataset(self, old_dataset, new_dataset):\n \"\"\"\n Change the label of a dataset attached to the Bundle\n\n :parameter str old_dataset: the current name of the dataset\n (must exist)\n :parameter str new_dataset: the desired new name of the dataset\n (must not exist)\n :return: None\n :raises ValueError: if the new_dataset is forbidden\n \"\"\"\n # TODO: raise error if old_component not found?\n\n self._check_label(new_dataset)\n self._rename_label('dataset', old_dataset, new_dataset)\n self._handle_dataset_selectparams()"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef enable_dataset(self, dataset=None, **kwargs):\n kwargs['context'] = 'compute'\n kwargs['dataset'] = dataset\n kwargs['qualifier'] = 'enabled'\n self.set_value_all(value=True, **kwargs)\n\n self._add_history(redo_func='enable_dataset',\n redo_kwargs={'dataset': dataset},\n undo_func='disable_dataset',\n undo_kwargs={'dataset': dataset})\n\n return self.get_dataset(dataset=dataset)", "response": "Enable a 'dataset'. Datasets that are enabled will be computed\n during :meth:`run_compute` and included in the cost function\n during :meth:`run_fitting`.\n\n If compute is not provided, the dataset will be enabled across all\n compute options.\n\n :parameter str dataset: name of the dataset\n :parameter **kwargs: any other tags to do the filter\n (except dataset or context)\n :return: :class:`phoebe.parameters.parameters.ParameterSet`\n of the enabled dataset"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef disable_dataset(self, dataset=None, **kwargs):\n kwargs['context'] = 'compute'\n kwargs['dataset'] = dataset\n kwargs['qualifier'] = 'enabled'\n self.set_value_all(value=False, **kwargs)\n\n self._add_history(redo_func='disable_dataset',\n redo_kwargs={'dataset': dataset},\n undo_func='enable_dataset',\n undo_kwargs={'dataset': dataset})\n\n return self.get_dataset(dataset=dataset)", "response": "Disable a 'dataset'. Datasets that are enabled will be computed\n during :meth:`run_compute` and included in the cost function\n during :meth:`run_fitting`.\n\n If compute is not provided, the dataset will be disabled across all\n compute options.\n\n :parameter str dataset: name of the dataset\n :parameter **kwargs: any other tags to do the filter\n (except dataset or context)\n :return: :class:`phoebe.parameters.parameters.ParameterSet`\n of the disabled dataset"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nadding a constraint to the current object", "response": "def add_constraint(self, *args, **kwargs):\n \"\"\"\n TODO: add documentation\n\n args can be string representation (length 1)\n func and strings to pass to function\n \"\"\"\n # TODO: be smart enough to take kwargs (especially for undoing a\n # remove_constraint) for kind, value (expression),\n\n redo_kwargs = deepcopy(kwargs)\n\n if len(args) == 1 and \\\n isinstance(args[0], str) and \\\n not _get_add_func(_constraint, args[0],\n return_none_if_not_found=True):\n # then only the expression has been passed,\n # we just need to pass it on to constraints.custom\n\n func = constraint.custom\n func_args = args\n\n elif len(args) == 2 and \\\n all([isinstance(arg, Parameter) or\n isinstance(arg, ConstraintParameter) for arg in args]):\n # then we have 2 constraint expressions\n\n func = constraint.custom\n func_args = args\n\n elif len(args) == 0:\n # then everything is passed through kwargs\n if 'kind' in kwargs.keys():\n func = _get_add_func(_constraint, kwargs['kind'])\n elif 'func' in kwargs.keys():\n func = _get_add_func(_constraint, kwargs['func'])\n elif 'constraint_func' in kwargs.keys():\n func = _get_add_func(_constraint, kwargs['constraint_func'])\n else:\n func = constraint.custom\n\n func_args = []\n\n # constraint_param = ConstraintParameter(self, **kwargs)\n\n else:\n # then we've been passed the function in constraints and its\n # arguments\n\n func = _get_add_func(_constraint, args[0])\n func_args = args[1:]\n\n if 'solve_for' in kwargs.keys():\n # solve_for is a twig, we need to pass the parameter\n kwargs['solve_for'] = self.get_parameter(kwargs['solve_for'])\n\n lhs, rhs, constraint_kwargs = func(self, *func_args, **kwargs)\n # NOTE that any component parameters required have already been\n # created by this point\n\n constraint_param = ConstraintParameter(self,\n qualifier=lhs.qualifier,\n component=lhs.component,\n dataset=lhs.dataset,\n feature=lhs.feature,\n kind=lhs.kind,\n model=lhs.model,\n constraint_func=func.__name__,\n constraint_kwargs=constraint_kwargs,\n in_solar_units=func.__name__ not in constraint.list_of_constraints_requiring_si,\n value=rhs,\n default_unit=lhs.default_unit,\n description='expression that determines the constraint')\n\n\n newly_constrained_param = constraint_param.get_constrained_parameter()\n check_kwargs = {k:v for k,v in newly_constrained_param.meta.items() if k not in ['context', 'twig', 'uniquetwig']}\n check_kwargs['context'] = 'constraint'\n if len(self._bundle.filter(**check_kwargs)):\n raise ValueError(\"'{}' is already constrained\".format(newly_constrained_param.twig))\n\n metawargs = {'context': 'constraint',\n 'kind': func.func_name}\n\n params = ParameterSet([constraint_param])\n constraint_param._update_bookkeeping()\n self._attach_params(params, **metawargs)\n\n redo_kwargs['func'] = func.func_name\n\n self._add_history(redo_func='add_constraint',\n redo_kwargs=redo_kwargs,\n undo_func='remove_constraint',\n undo_kwargs={'uniqueid': constraint_param.uniqueid})\n\n # we should run it now to make sure everything is in-sync\n if conf.interactive_constraints:\n self.run_constraint(uniqueid=constraint_param.uniqueid, skip_kwargs_checks=True)\n else:\n self._delayed_constraints.append(constraint_param.uniqueid)\n\n return params"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning a ParameterSet object with the constraint name", "response": "def get_constraint(self, twig=None, **kwargs):\n \"\"\"\n Filter in the 'constraint' context\n\n :parameter str constraint: name of the constraint (optional)\n :parameter **kwargs: any other tags to do the filter\n (except constraint or context)\n :return: :class:`phoebe.parameters.parameters.ParameterSet`\n \"\"\"\n if twig is not None:\n kwargs['twig'] = twig\n kwargs['context'] = 'constraint'\n return self.get(**kwargs)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nremove a constraint from the bundle", "response": "def remove_constraint(self, twig=None, **kwargs):\n \"\"\"\n Remove a 'constraint' from the bundle\n\n :parameter str twig: twig to filter for the constraint\n :parameter **kwargs: any other tags to do the filter\n (except twig or context)\n \"\"\"\n # let's run delayed constraints first to ensure that we get the same\n # results in interactive and non-interactive modes as well as to make\n # sure we don't have delayed constraints for the constraint we're\n # about to remove. This could perhaps be optimized by searching\n # for this/these constraints and only running/removing those, but\n # probably isn't worth the savings.\n changed_params = self.run_delayed_constraints()\n\n kwargs['twig'] = twig\n redo_kwargs = deepcopy(kwargs)\n\n kwargs['context'] = 'constraint'\n\n # we'll get the constraint so that we can undo the bookkeeping\n # and also reproduce an undo command\n constraint = self.get_parameter(**kwargs)\n\n # undo parameter bookkeeping\n constraint._remove_bookkeeping()\n\n # and finally remove it\n self.remove_parameter(**kwargs)\n\n\n undo_kwargs = {k: v for k, v in constraint.to_dict().items()\n if v is not None and\n k not in ['uniqueid', 'uniquetwig', 'twig',\n 'Class', 'context']}\n self._add_history(redo_func='remove_constraint',\n redo_kwargs=redo_kwargs,\n undo_func='add_constraint',\n undo_kwargs=undo_kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nflips an existing constraint to solve for a different parameter", "response": "def flip_constraint(self, twig=None, solve_for=None, **kwargs):\n \"\"\"\n Flip an existing constraint to solve for a different parameter\n\n :parameter str twig: twig to filter the constraint\n :parameter solve_for: twig or actual parameter object of the new\n parameter which this constraint should constraint (solve for).\n :type solve_for: str or :class:`phoebe.parameters.parameters.Parameter\n :parameter **kwargs: any other tags to do the filter\n (except twig or context)\n\n \"\"\"\n self._kwargs_checks(kwargs, additional_allowed_keys=['check_nan'])\n\n kwargs['twig'] = twig\n redo_kwargs = deepcopy(kwargs)\n undo_kwargs = deepcopy(kwargs)\n\n changed_params = self.run_delayed_constraints()\n\n param = self.get_constraint(**kwargs)\n\n if kwargs.pop('check_nan', True) and np.any(np.isnan([p.get_value() for p in param.vars.to_list()])):\n raise ValueError(\"cannot flip constraint while the value of {} is nan\".format([p.twig for p in param.vars.to_list() if np.isnan(p.get_value())]))\n\n if solve_for is None:\n return param\n if isinstance(solve_for, Parameter):\n solve_for = solve_for.uniquetwig\n\n redo_kwargs['solve_for'] = solve_for\n undo_kwargs['solve_for'] = param.constrained_parameter.uniquetwig\n\n logger.info(\"flipping constraint '{}' to solve for '{}'\".format(param.uniquetwig, solve_for))\n param.flip_for(solve_for)\n\n result = self.run_constraint(uniqueid=param.uniqueid, skip_kwargs_checks=True)\n\n self._add_history(redo_func='flip_constraint',\n redo_kwargs=redo_kwargs,\n undo_func='flip_constraint',\n undo_kwargs=undo_kwargs)\n\n return param"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef run_constraint(self, twig=None, return_parameter=False, **kwargs):\n if not kwargs.get('skip_kwargs_checks', False):\n self._kwargs_checks(kwargs)\n\n kwargs['twig'] = twig\n kwargs['context'] = 'constraint'\n # kwargs['qualifier'] = 'expression'\n kwargs['check_visible'] = False\n kwargs['check_default'] = False\n # print \"***\", kwargs\n expression_param = self.get_parameter(**kwargs)\n\n kwargs = {}\n kwargs['twig'] = None\n # TODO: this might not be the case, we just know its not in constraint\n kwargs['qualifier'] = expression_param.qualifier\n kwargs['component'] = expression_param.component\n kwargs['dataset'] = expression_param.dataset\n kwargs['feature'] = expression_param.feature\n kwargs['context'] = []\n if kwargs['component'] is not None:\n kwargs['context'] += ['component']\n if kwargs['dataset'] is not None:\n kwargs['context'] += ['dataset']\n if kwargs['feature'] is not None:\n kwargs['context'] += ['feature']\n\n kwargs['check_visible'] = False\n kwargs['check_default'] = False\n constrained_param = self.get_parameter(**kwargs)\n\n result = expression_param.result\n\n constrained_param.set_value(result, force=True, run_constraints=True)\n\n logger.debug(\"setting '{}'={} from '{}' constraint\".format(constrained_param.uniquetwig, result, expression_param.uniquetwig))\n\n if return_parameter:\n return constrained_param\n else:\n return result", "response": "Run a given constraint now and set the value of the constrained parameter."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncomputes the passband luminosities that will be applied to the system and then re - compute the pblums for the specified compute.", "response": "def compute_pblums(self, compute=None, **kwargs):\n \"\"\"\n Compute the passband luminosities that will be applied to the system,\n following all coupling, etc, as well as all relevant compute options\n (ntriangles, distortion_method, etc). The exposed passband luminosities\n (and any coupling) are computed at t0@system.\n\n This method is only for convenience and will be recomputed internally\n within run_compute. Alternatively, you can create a mesh dataset\n and request any specific pblum to be exposed (per-time).\n\n :parameter str compute: label of the compute options (note required if\n only one is attached to the bundle)\n :parameter component: (optional) label of the component(s) requested\n :type component: str or list of strings\n :parameter dataset: (optional) label of the dataset(s) requested\n :type dataset: str or list of strings\n :parameter component: (optional) label of the component(s) requested\n :type component: str or list of strings\n :return: dictionary with keys @ and computed pblums\n as values (as quantity objects, default units of W)\n\n \"\"\"\n datasets = kwargs.pop('dataset', self.datasets)\n components = kwargs.pop('component', self.components)\n\n # don't allow things like model='mymodel', etc\n forbidden_keys = parameters._meta_fields_filter\n self._kwargs_checks(kwargs, additional_forbidden_keys=forbidden_keys)\n\n if compute is None:\n if len(self.computes)==1:\n compute = self.computes[0]\n else:\n raise ValueError(\"must provide compute\")\n\n system = backends.PhoebeBackend()._create_system_and_compute_pblums(self, compute, **kwargs)\n\n pblums = {}\n for component, star in system.items():\n if component not in components:\n continue\n for dataset in star._pblum_scale.keys():\n if dataset not in datasets:\n continue\n pblums[\"{}@{}\".format(component, dataset)] = float(star.compute_luminosity(dataset)) * u.W\n\n return pblums"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef add_compute(self, kind=compute.phoebe, **kwargs):\n func = _get_add_func(_compute, kind)\n\n kwargs.setdefault('compute',\n self._default_label(func.func_name,\n **{'context': 'compute',\n 'kind': func.func_name}))\n\n self._check_label(kwargs['compute'])\n\n params = func(**kwargs)\n # TODO: similar kwargs logic as in add_dataset (option to pass dict to\n # apply to different components this would be more complicated here if\n # allowing to also pass to different datasets\n\n metawargs = {'context': 'compute',\n 'kind': func.func_name,\n 'compute': kwargs['compute']}\n\n logger.info(\"adding {} '{}' compute to bundle\".format(metawargs['kind'], metawargs['compute']))\n self._attach_params(params, **metawargs)\n\n redo_kwargs = deepcopy(kwargs)\n redo_kwargs['func'] = func.func_name\n self._add_history(redo_func='add_compute',\n redo_kwargs=redo_kwargs,\n undo_func='remove_compute',\n undo_kwargs={'compute': kwargs['compute']})\n\n\n # since we've already processed (so that we can get the new qualifiers),\n # we'll only raise a warning\n self._kwargs_checks(kwargs, warning_only=True)\n\n return self.get_compute(**metawargs)", "response": "Adds a set of compute options for a given backend to the bundle."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nfilters in the compute context", "response": "def get_compute(self, compute=None, **kwargs):\n \"\"\"\n Filter in the 'compute' context\n\n :parameter str compute: name of the compute options (optional)\n :parameter **kwargs: any other tags to do the filter\n (except compute or context)\n :return: :class:`phoebe.parameters.parameters.ParameterSet`\n \"\"\"\n if compute is not None:\n kwargs['compute'] = compute\n kwargs['context'] = 'compute'\n return self.filter(**kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef remove_compute(self, compute, **kwargs):\n kwargs['compute'] = compute\n kwargs['context'] = 'comute'\n self.remove_parameters_all(**kwargs)", "response": "Remove a compute from the bundle\n "} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nchange the label of a compute attached to the Bundle", "response": "def rename_compute(self, old_compute, new_compute):\n \"\"\"\n Change the label of a compute attached to the Bundle\n\n :parameter str old_compute: the current name of the compute options\n (must exist)\n :parameter str new_compute: the desired new name of the compute options\n (must not exist)\n :return: None\n :raises ValueError: if the new_compute is forbidden\n \"\"\"\n # TODO: raise error if old_compute not found?\n\n self._check_label(new_compute)\n self._rename_label('compute', old_compute, new_compute)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nrunning a forward model of the system on the enabled dataset using a specified set of compute options. To attach and set custom values for compute options, including choosing which backend to use, see: * :meth:`add_compute` To define the dataset types and times at which the model should be computed see: * :meth:`add_dataset` To disable or enable existing datasets see: * :meth:`enable_dataset` * :meth:`disable_dataset` :parameter str compute: (optional) name of the compute options to use. If not provided or None, run_compute will use an existing set of attached compute options if only 1 exists. If more than 1 exist, then compute becomes a required argument. If no compute options exist, then this will use default options and create and attach a new set of compute options with a default label. :parameter str model: (optional) name of the resulting model. If not provided this will default to 'latest'. NOTE: existing models with the same name will be overwritten - including 'latest' :parameter bool datach: [EXPERIMENTAL] whether to detach from the computation run, or wait for computations to complete. If detach is True, see :meth:`get_model` and :meth:`phoebe.parameters.parameters.JobParameter` for details on how to check the job status and retrieve the results. Alternatively, you can provide the server location (host and port) as a string to detach and the bundle will temporarily enter client mode, submit the job to the server, and leave client mode. The resulting :meth:`phoebe.parameters.parameters.JobParameter` will then contain the necessary information to pull the results from the server at anytime in the future. :parameter list times: [EXPERIMENTAL] override the times at which to compute the model. NOTE: this only (temporarily) replaces the time array for datasets with times provided (ie empty time arrays are still ignored). So if you attach a rv to a single component, the model will still only compute for that single component. ALSO NOTE: this option is ignored if detach=True (at least for now). :parameter **kwargs: any values in the compute options to temporarily override for this single compute run (parameter values will revert after run_compute is finished) :return: :class:`phoebe.parameters.parameters.ParameterSet` of the newly-created model containing the synthetic data.", "response": "def run_compute(self, compute=None, model=None, detach=False,\n times=None, **kwargs):\n \"\"\"\n Run a forward model of the system on the enabled dataset using\n a specified set of compute options.\n\n To attach and set custom values for compute options, including choosing\n which backend to use, see:\n * :meth:`add_compute`\n\n To define the dataset types and times at which the model should be\n computed see:\n * :meth:`add_dataset`\n\n To disable or enable existing datasets see:\n * :meth:`enable_dataset`\n * :meth:`disable_dataset`\n\n :parameter str compute: (optional) name of the compute options to use.\n If not provided or None, run_compute will use an existing set of\n attached compute options if only 1 exists. If more than 1 exist,\n then compute becomes a required argument. If no compute options\n exist, then this will use default options and create and attach\n a new set of compute options with a default label.\n :parameter str model: (optional) name of the resulting model. If not\n provided this will default to 'latest'. NOTE: existing models\n with the same name will be overwritten - including 'latest'\n :parameter bool datach: [EXPERIMENTAL] whether to detach from the computation run,\n or wait for computations to complete. If detach is True, see\n :meth:`get_model` and :meth:`phoebe.parameters.parameters.JobParameter`\n for details on how to check the job status and retrieve the results.\n Alternatively, you can provide the server location (host and port) as\n a string to detach and the bundle will temporarily enter client mode,\n submit the job to the server, and leave client mode. The resulting\n :meth:`phoebe.parameters.parameters.JobParameter` will then contain\n the necessary information to pull the results from the server at anytime\n in the future.\n :parameter list times: [EXPERIMENTAL] override the times at which to compute the model.\n NOTE: this only (temporarily) replaces the time array for datasets\n with times provided (ie empty time arrays are still ignored). So if\n you attach a rv to a single component, the model will still only\n compute for that single component. ALSO NOTE: this option is ignored\n if detach=True (at least for now).\n :parameter **kwargs: any values in the compute options to temporarily\n override for this single compute run (parameter values will revert\n after run_compute is finished)\n :return: :class:`phoebe.parameters.parameters.ParameterSet` of the\n newly-created model containing the synthetic data.\n \"\"\"\n if isinstance(detach, str):\n # then we want to temporarily go in to client mode\n self.as_client(server=detach)\n self.run_compute(compute=compute, model=model, time=time, **kwargs)\n self.as_client(False)\n return self.get_model(model)\n\n # protomesh and pbmesh were supported kwargs in 2.0.x but are no longer\n # so let's raise an error if they're passed here\n if 'protomesh' in kwargs.keys():\n raise ValueError(\"protomesh is no longer a valid option\")\n if 'pbmesh' in kwargs.keys():\n raise ValueError(\"pbmesh is no longer a valid option\")\n\n if model is None:\n model = 'latest'\n\n if model in self.models:\n logger.warning(\"overwriting model: {}\".format(model))\n self.remove_model(model)\n\n self._check_label(model)\n\n if isinstance(times, float) or isinstance(times, int):\n times = [times]\n\n # handle case where compute is not provided\n if compute is None:\n computes = self.get_compute(**kwargs).computes\n if len(computes)==0:\n # NOTE: this doesn't take **kwargs since we want those to be\n # temporarily overriden as is the case when the compute options\n # are already attached\n self.add_compute()\n computes = self.get_compute().computes\n # now len(computes) should be 1 and will trigger the next\n # if statement\n\n if len(computes)==1:\n compute = computes[0]\n elif len(computes)>1:\n raise ValueError(\"must provide label of compute options since more than one are attached\")\n\n # handle the ability to send multiple compute options/backends - here\n # we'll just always send a list of compute options\n if isinstance(compute, str):\n computes = [compute]\n else:\n computes = compute\n\n # if interactive mode was ever off, let's make sure all constraints\n # have been run before running system checks or computing the model\n changed_params = self.run_delayed_constraints()\n\n # any kwargs that were used just to filter for get_compute should be\n # removed so that they aren't passed on to all future get_value(...\n # **kwargs) calls\n for k in parameters._meta_fields_filter:\n if k in kwargs.keys():\n dump = kwargs.pop(k)\n\n # we'll wait to here to run kwargs and system checks so that\n # add_compute is already called if necessary\n self._kwargs_checks(kwargs, ['skip_checks', 'jobid'])\n\n if not kwargs.get('skip_checks', False):\n passed, msg = self.run_checks(computes=computes, **kwargs)\n if passed is None:\n # then just raise a warning\n logger.warning(msg)\n if passed is False:\n # then raise an error\n raise ValueError(\"system failed to pass checks: {}\".format(msg))\n\n # let's first make sure that there is no duplication of enabled datasets\n datasets = []\n # compute_ so we don't write over compute which we need if detach=True\n for compute_ in computes:\n # TODO: filter by value instead of if statement once implemented\n for enabled_param in self.filter(qualifier='enabled',\n compute=compute_,\n context='compute').to_list():\n if enabled_param.get_value():\n item = (enabled_param.dataset, enabled_param.component)\n if item in datasets:\n raise ValueError(\"dataset {}@{} is enabled in multiple compute options\".format(item[0], item[1]))\n datasets.append(item)\n\n # now if we're supposed to detach we'll just prepare the job for submission\n # either in another subprocess or through some queuing system\n if detach and mpi.within_mpirun:\n logger.warning(\"cannot detach when within mpirun, ignoring\")\n detach = False\n\n if (detach or mpi.enabled) and not mpi.within_mpirun:\n if detach:\n logger.warning(\"detach support is EXPERIMENTAL\")\n\n if times is not None:\n # TODO: support overriding times with detached - issue here is\n # that it isn't necessarilly trivially to send this array\n # through the script. May need to convert to list first to\n # avoid needing to import numpy?\n logger.warning(\"overriding time is not supported within detach - ignoring\")\n\n # we'll track everything through the model name as well as\n # a random string, to avoid any conflicts\n jobid = kwargs.get('jobid', parameters._uniqueid())\n\n # we'll build a python script that can replicate this bundle as it\n # is now, run compute, and then save the resulting model\n script_fname = \"_{}.py\".format(jobid)\n f = open(script_fname, 'w')\n f.write(\"import os; os.environ['PHOEBE_ENABLE_PLOTTING'] = 'FALSE'; os.environ['PHOEBE_ENABLE_SYMPY'] = 'FALSE'; os.environ['PHOEBE_ENABLE_ONLINE_PASSBANDS'] = 'FALSE';\\n\")\n f.write(\"import phoebe; import json\\n\")\n # TODO: can we skip the history context? And maybe even other models\n # or datasets (except times and only for run_compute but not run_fitting)\n f.write(\"bdict = json.loads(\\\"\\\"\\\"{}\\\"\\\"\\\")\\n\".format(json.dumps(self.to_json())))\n f.write(\"b = phoebe.Bundle(bdict)\\n\")\n # TODO: make sure this works with multiple computes\n compute_kwargs = kwargs.items()+[('compute', compute), ('model', model)]\n compute_kwargs_string = ','.join([\"{}={}\".format(k,\"\\'{}\\'\".format(v) if isinstance(v, str) else v) for k,v in compute_kwargs])\n f.write(\"model_ps = b.run_compute({})\\n\".format(compute_kwargs_string))\n f.write(\"model_ps.save('_{}.out', incl_uniqueid=True)\\n\".format(jobid))\n f.close()\n\n script_fname = os.path.abspath(script_fname)\n cmd = mpi.detach_cmd.format(script_fname)\n # TODO: would be nice to catch errors caused by the detached script...\n # but that would probably need to be the responsibility of the\n # jobparam to return a failed status and message\n subprocess.call(cmd, shell=True)\n\n # create model parameter and attach (and then return that instead of None)\n job_param = JobParameter(self,\n location=os.path.dirname(script_fname),\n status_method='exists',\n retrieve_method='local',\n uniqueid=jobid)\n\n metawargs = {'context': 'model', 'model': model}\n self._attach_params([job_param], **metawargs)\n\n if isinstance(detach, str):\n self.save(detach)\n\n if not detach:\n return job_param.attach()\n else:\n logger.info(\"detaching from run_compute. Call get_model('{}').attach() to re-attach\".format(model))\n\n # return self.get_model(model)\n return job_param\n\n for compute in computes:\n\n computeparams = self.get_compute(compute=compute)\n\n if not computeparams.kind:\n raise KeyError(\"could not recognize backend from compute: {}\".format(compute))\n\n logger.info(\"running {} backend to create '{}' model\".format(computeparams.kind, model))\n compute_class = getattr(backends, '{}Backend'.format(computeparams.kind.title()))\n # compute_func = getattr(backends, computeparams.kind)\n\n metawargs = {'compute': compute, 'model': model, 'context': 'model'} # dataset, component, etc will be set by the compute_func\n\n params = compute_class().run(self, compute, times=times, **kwargs)\n\n\n # average over any exposure times before attaching parameters\n if computeparams.kind == 'phoebe':\n # TODO: we could eventually do this for all backends - we would\n # just need to copy the computeoption parameters into each backend's\n # compute PS, and include similar logic for oversampling that is\n # currently in backends._extract_info_from_bundle_by_time into\n # backends._extract_info_from_bundle_by_dataset. We'd also\n # need to make sure that exptime is not being passed to any\n # alternate backend - and ALWAYS handle it here\n for dataset in params.datasets:\n # not all dataset-types currently support exposure times.\n # Once they do, this ugly if statement can be removed\n if len(self.filter(dataset=dataset, qualifier='exptime')):\n exptime = self.get_value(qualifier='exptime', dataset=dataset, context='dataset', unit=u.d)\n if exptime > 0:\n if self.get_value(qualifier='fti_method', dataset=dataset, compute=compute, context='compute', **kwargs)=='oversample':\n times_ds = self.get_value(qualifier='times', dataset=dataset, context='dataset')\n # exptime = self.get_value(qualifier='exptime', dataset=dataset, context='dataset', unit=u.d)\n fti_oversample = self.get_value(qualifier='fti_oversample', dataset=dataset, compute=compute, context='compute', check_visible=False, **kwargs)\n # NOTE: this is hardcoded for LCs which is the\n # only dataset that currently supports oversampling,\n # but this will need to be generalized if/when\n # we expand that support to other dataset kinds\n fluxes = np.zeros(times_ds.shape)\n\n # the oversampled times and fluxes will be\n # sorted according to times this may cause\n # exposures to \"overlap\" each other, so we'll\n # later need to determine which times (and\n # therefore fluxes) belong to which datapoint\n times_oversampled_sorted = params.get_value('times', dataset=dataset)\n fluxes_oversampled = params.get_value('fluxes', dataset=dataset)\n\n for i,t in enumerate(times_ds):\n # rebuild the unsorted oversampled times - see backends._extract_from_bundle_by_time\n # TODO: try to optimize this by having these indices returned by the backend itself\n times_oversampled_this = np.linspace(t-exptime/2., t+exptime/2., fti_oversample)\n sample_inds = np.searchsorted(times_oversampled_sorted, times_oversampled_this)\n\n fluxes[i] = np.mean(fluxes_oversampled[sample_inds])\n\n params.set_value(qualifier='times', dataset=dataset, value=times_ds)\n params.set_value(qualifier='fluxes', dataset=dataset, value=fluxes)\n\n\n self._attach_params(params, **metawargs)\n\n redo_kwargs = deepcopy(kwargs)\n redo_kwargs['compute'] = computes if len(computes)>1 else computes[0]\n redo_kwargs['model'] = model\n\n self._add_history(redo_func='run_compute',\n redo_kwargs=redo_kwargs,\n undo_func='remove_model',\n undo_kwargs={'model': model})\n\n return self.get_model(model)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_model(self, model=None, **kwargs):\n if model is not None:\n kwargs['model'] = model\n kwargs['context'] = 'model'\n return self.filter(**kwargs)", "response": "Filter in the model context\n "} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef remove_model(self, model, **kwargs):\n kwargs['model'] = model\n kwargs['context'] = 'model'\n self.remove_parameters_all(**kwargs)", "response": "Remove a model from the bundle"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef rename_model(self, old_model, new_model):\n # TODO: raise error if old_feature not found?\n\n self._check_label(new_model)\n self._rename_label('model', old_model, new_model)", "response": "Change the label of a model attached to the Bundle\n ."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nadds a prior parameter to the current object.", "response": "def add_prior(self, twig=None, **kwargs):\n \"\"\"\n [NOT IMPLEMENTED]\n\n :raises NotImplementedError: because it isn't\n \"\"\"\n\n raise NotImplementedError\n\n param = self.get_parameter(twig=twig, **kwargs)\n # TODO: make sure param is a float parameter?\n\n func = _get_add_func(_distributions, 'prior')\n\n # TODO: send smart defaults for priors based on limits of parameter\n params = func(**kwargs)\n\n metawargs = {k: v for k, v in params.meta.items()\n if k not in ['uniqueid', 'uniquetwig', 'twig']}\n metawargs['context'] = 'prior'\n\n logger.info(\"adding prior on '{}' parameter\".format(param.uniquetwig))\n self._attach_params(params, **metawargs)\n\n redo_kwargs = deepcopy(kwargs)\n redo_kwargs['func'] = func.func_name\n self._add_history(redo_func='add_prior',\n redo_kwargs=redo_kwargs,\n undo_func='remove_prior',\n undo_kwargs={'twig': param.uniquetwig})\n\n # return params\n return self.get_prior(**metawargs)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_prior(self, twig=None, **kwargs):\n raise NotImplementedError\n kwargs['context'] = 'prior'\n return self.filter(twig=twig, **kwargs)", "response": "Returns a copy of the current object with the prior context."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns a new object with the given fitting.", "response": "def get_fitting(self, fitting=None, **kwargs):\n \"\"\"\n [NOT IMPLEMENTED]\n\n :raises NotImplementedError: because it isn't\n \"\"\"\n raise NotImplementedError\n if fitting is not None:\n kwargs['fitting'] = fitting\n kwargs['context'] = 'fitting'\n return self.filter(**kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a new QuerySet with the posterior entries.", "response": "def get_posterior(self, twig=None, feedback=None, **kwargs):\n \"\"\"\n [NOT IMPLEMENTED]\n\n :raises NotImplementedError: because it isn't\n \"\"\"\n raise NotImplementedError\n kwargs['context'] = 'posterior'\n return self.filter(twig=twig, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_feedback(self, feedback=None, **kwargs):\n raise NotImplementedError\n if feedback is not None:\n kwargs['feedback'] = feedback\n kwargs['context'] = 'feedback'\n return self.filter(**kwargs)", "response": "Returns a new EntryCollection with feedback entries."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nconvert a twig parameter to a unique ID.", "response": "def _twig_to_uniqueid(bundle, twig, **kwargs):\n \"\"\"\n kwargs are passed on to filter\n \"\"\"\n res = bundle.filter(twig=twig, force_ps=True, check_visible=False, check_default=False, **kwargs)\n # we force_ps instead of checking is_instance(res, ParameterSet) to avoid\n # having to import from parameters\n if len(res) == 1:\n #~ print \"twighelpers._twig_to_uniqueid len(res): {}, res: {}\".format(len(res), res)\n return res.to_list()[0].uniqueid\n else:\n raise ValueError(\"did not return a single unique match to a parameter for '{}'\".format(twig))"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef update_user_label(self):\n self._user_label = _uniqueid_to_uniquetwig(self._bundle, self.unique_label)\n self._set_curly_label()", "response": "Updates the user_label attribute of the current locale based on the least unique_twig from the bundle"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_parameter(self):\n if not self.is_param:\n raise ValueError(\"this var does not point to a parameter\")\n\n # this is quite expensive, so let's cache the parameter object so we only\n # have to filter on the first time this is called\n if self._parameter is None:\n self._parameter = self._bundle.get_parameter(uniqueid=self.unique_label, check_visible=False, check_default=False)\n\n return self._parameter", "response": "get the parameter object for this var"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nprocessing kwargs for AxDimension instances by stripping off the prefix of the prefix and removing off the appropriate direction", "response": "def _process_dimension_kwargs(direction, kwargs):\n \"\"\"\n process kwargs for AxDimension instances by stripping off the prefix\n for the appropriate direction\n \"\"\"\n acceptable_keys = ['unit', 'pad', 'lim', 'label']\n # if direction in ['s']:\n # acceptable_keys += ['mode']\n processed_kwargs = {}\n for k,v in kwargs.items():\n if k.startswith(direction):\n processed_key = k.lstrip(direction)\n else:\n processed_key = k\n\n if processed_key in acceptable_keys:\n processed_kwargs[processed_key] = v\n\n return processed_kwargs"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef calls_sorted(self):\n def _z(call):\n if isinstance(call.z.value, np.ndarray):\n return np.mean(call.z.value.flatten())\n elif isinstance(call.z.value, float) or isinstance(call.z.value, int):\n return call.z.value\n else:\n # put it at the back\n return -np.inf\n\n calls = self._calls\n zs = np.array([_z(c) for c in calls])\n sorted_inds = zs.argsort()\n # TODO: ugh, this is ugly. Test to find the optimal way to sort\n # while still ending up with a list\n return _call.make_callgroup(np.array(calls)[sorted_inds].tolist())", "response": "Returns a list of all the calls in z\n sorted in z\n "} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef consistent_with_call(self, call):\n if len(self.calls) == 0:\n return True, ''\n\n msg = []\n\n if not _consistent_allow_none(call._axorder, self._axorder):\n msg.append('inconsistent axorder, {} != {}'.format(call.axorder, self.axorder))\n\n if not _consistent_allow_none(call._axpos, self._axpos):\n msg.append('inconsistent axpos, {} != {}'.format(call.axpos, self.axpos))\n\n if call._axorder == self._axorder and call._axorder is not None:\n # then despite other conflicts, attempt to put on same axes\n return True, ''\n\n if call._axpos == self._axpos and call._axpos is not None:\n # then despite other conflicts, attempt to put on same axes\n return True, ''\n\n # TODO: include s, c, fc, ec, etc and make these checks into loops\n if call.x.unit.physical_type != self.x.unit.physical_type:\n msg.append('inconsitent xunit, {} != {}'.format(call.x.unit, self.x.unit))\n if call.y.unit.physical_type != self.y.unit.physical_type:\n msg.append('inconsitent yunit, {} != {}'.format(call.y.unit, self.y.unit))\n if call.z.unit.physical_type != self.z.unit.physical_type:\n msg.append('inconsitent zunit, {} != {}'.format(call.z.unit, self.z.unit))\n if call.i.unit.physical_type != self.i.unit.physical_type:\n msg.append('inconsistent iunit, {} != {}'.format(call.i.unit, self.i.unit))\n if call.i.is_reference or self.i.is_reference:\n if call.i.reference != self.i.reference:\n msg.append('inconsistent i reference, {} != {}'.format(call.i.reference, self.i.reference))\n\n if not _consistent_allow_none(call.title, self.title):\n msg.append('inconsistent axes title, {} != {}'.format(call.title, self.title))\n\n # here we send the protected _label so that we get None instead of empty string\n if not _consistent_allow_none(call.x._label, self.x._label):\n msg.append('inconsitent xlabel, {} != {}'.format(call.x.label, self.x.label))\n if not _consistent_allow_none(call.y._label, self.y._label):\n msg.append('inconsitent ylabel, {} != {}'.format(call.y.label, self.y.label))\n if not _consistent_allow_none(call.z._label, self.z._label):\n msg.append('inconsitent zlabel, {} != {}'.format(call.z.label, self.z.label))\n\n\n if len(msg):\n return False, ', '.join(msg)\n else:\n return True, ''", "response": "check to see if a new call would be consistent with this Axes instance"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef lim(self, lim):\n if lim is None:\n self._lim = lim\n return\n\n typeerror_msg = \"lim must be of type tuple, float, None, or in ['fixed', 'symmetric', 'frame', 'sliding']\"\n\n if isinstance(lim, str):\n if lim in ['fixed', 'symmetric', 'frame', 'sliding']:\n self._lim = lim\n return\n else:\n raise ValueError(typeerror_msg)\n\n if isinstance(lim, int):\n lim = float(lim)\n\n if isinstance(lim, float):\n if lim <= 0.0:\n raise ValueError(\"lim cannot be <= 0\")\n self._lim = lim\n return\n\n if not isinstance(lim, tuple):\n try:\n lim = tuple(lim)\n except:\n raise TypeError(typeerror_msg)\n\n if not len(lim)==2:\n raise ValueError('lim must have length 2')\n\n for l in lim:\n if not (isinstance(l, float) or isinstance(l, int) or l is None):\n raise ValueError(\"each item in limit must be of type float, int, or None\")\n\n self._lim = lim", "response": "set the limit of the available log entries"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting the value at a give value of i", "response": "def get_value(self, i, indeps=None):\n \"\"\"\n access the interpolated value at a give value of i (independent-variable)\n\n if indeps is not passed, then the entire range of indeps over all calls is assumed\n \"\"\"\n if self.value is None:\n return None\n\n if i is None:\n return np.median(self.value)\n\n if indeps is None:\n indeps_all_calls = list(set(np.concatenate([common.tolist(call.i.get_value(unit=self.axes.i.unit)) for call in self.axes.calls])))\n indeps = np.linspace(np.nanmin(indeps_all_calls),\n np.nanmax(indeps_all_calls),\n len(self.value))\n\n if len(indeps) != len(self.value):\n raise ValueError(\"indeps and value must have same length\")\n\n return np.interp(i, indeps, self.value)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngenerates a new system parameter set for all the parameters in the order they appear.", "response": "def system(**kwargs):\n \"\"\"\n Generally, this will automatically be added to a newly initialized\n :class:`phoebe.frontend.bundle.Bundle`\n\n :parameter **kwargs: defaults for the values of any of the parameters\n :return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly\n created :class:`phoebe.parameters.parameters.Parameter`s\n \"\"\"\n params = []\n\n params += [FloatParameter(qualifier='t0', value=kwargs.get('t0', 0.0), default_unit=u.d, description='Time at which all values are provided')]\n\n # TODO: re-enable these once they're incorporated into orbits (dynamics) correctly.\n params += [FloatParameter(qualifier='ra', value=kwargs.get('ra', 0.0), default_unit=u.deg, description='Right ascension')]\n params += [FloatParameter(qualifier='dec', value=kwargs.get('dec', 0.0), default_unit=u.deg, description='Declination')]\n params += [StringParameter(qualifier='epoch', value=kwargs.get('epoch', 'J2000'), description='Epoch of coordinates')]\n #params += [FloatParameter(qualifier='pmra', value=kwargs.get('pmra', 0.0), default_unit=u.mas/u.yr, description='Proper motion in right ascension')]\n #params += [FloatParameter(qualifier='pmdec', value=kwargs.get('pmdec', 0.0), default_unit=u.mas/u.yr, description='Proper motion in declination')]\n\n params += [FloatParameter(qualifier='distance', value=kwargs.get('distance', 1.0), default_unit=u.m, description='Distance to the system')]\n params += [FloatParameter(qualifier='vgamma', value=kwargs.get('vgamma', 0.0), default_unit=u.km/u.s, description='Systemic velocity (in the direction of positive RV or negative vz)')]\n\n return ParameterSet(params)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef create_teams(obj, user, access):\n for field_name, access_types in access.items():\n id_field = \"{}_id\".format(field_name)\n # Check that the team is associated with the object via a FK...\n if hasattr(obj, id_field) and getattr(obj, id_field) is None:\n # ...and there is no existing related team.\n # TODO - the team name needs to be able to create a unique\n # slug that's < 50 characters long.\n # TODO - this is just a workaround:\n next_pk = next(iter(instance.pk for instance in obj.__class__.objects.order_by(\"-pk\")), 0) + 1 # this is a thing a beauty. ;-)\n team_name = u\"{} for {} {}\".format(\n field_name, obj._meta.model_name, next_pk)\n new_team = Team(\n name=team_name,\n member_access=access_types[0],\n manager_access=access_types[1],\n creator=user)\n new_team.save()\n setattr(obj, field_name, new_team)\n return obj", "response": "Create new teams associated with the object."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _to_component(obj, allow_hierarchy=True):\n\n def _to_str(obj):\n return \"{}:{}\".format(obj.kind, obj.component)\n\n if isinstance(obj, str):\n # TODO: check to see if valid?, use allow_hierarchy\n # TODO: when passed labels this is going to give the wrong thing, but that might be fixable in the HierarchyParameter set_value check\n #~ raise NotImplementedError # need to get object from bundle and pass to _to_str\n return obj\n elif isinstance(obj, ParameterSet):\n # TODO: be smarter about this and don't assume only 1 will be returned\n if 'repr' in obj.to_flat_dict().keys():\n return obj.get_value(qualifier='repr')\n\n else:\n # make sure we only have things in the 'component' context\n obj = obj.filter(context='component')\n return _to_str(obj)\n\n elif isinstance(obj, Parameter):\n if obj.qualifier == 'repr':\n return obj.get_value()\n else:\n return _to_str(obj)\n\n else:\n raise NotImplementedError(\"could not parse {}\".format(obj))", "response": "Convert an object to a string or ParameterSet or a nested hierarchy."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef binaryorbit(orbit, comp1, comp2, envelope=None):\n\n if envelope:\n return '{}({}, {}, {})'.format(_to_component(orbit, False), _to_component(comp1), _to_component(comp2), _to_component(envelope, False))\n else:\n return '{}({}, {})'.format(_to_component(orbit, False), _to_component(comp1), _to_component(comp2))", "response": "Builds a string representation of a binary hierarchy containing a 2 components orbit."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef nekmin(omega_in,q,x0=0.5,z0=0.5):\n\n '''Computes the position of the neck (minimal radius) in an contact_binary star1'''\n\n def Omega_xz(q,x,z):\n return 1./np.sqrt(x**2+z**2)+q/np.sqrt((1-x)**2+z**2)+(q+1)*x**2/2.-q*x\n\n def Omega_xy(q,x,y):\n return 1./np.sqrt(x**2+y**2)+q/np.sqrt((1-x)**2+y**2)+(q+1)*(x**2+y**2)/2.-q*x\n\n def dOmegadx_z(q,x,z):\n return -x/(x**2+z**2)**(3./2)+q*(1-x)/((1-x)**2+z**2)**(3./2.)+(q+1)*x-q\n\n def dOmegadx_y(q,x,y):\n return -x/(x**2+y**2)**(3./2)+q*(1-x)/((1-x)**2+y**2)**(3./2.)+(q+1)*x-q\n\n def dOmegadz(q,x,z):\n return -z/(x**2+z**2)**(3./2)-q*z/((1-x)**2+z**2)**(3./2.)\n\n def dOmegady(q,x,y):\n return -y/(x**2+y**2)**(3./2)-q*y/((1-x)**2+y**2)**(3./2.)+(q+1)*y\n\n def d2Omegadx2_z(q,x,z):\n return (2*x**2-z**2)/(x**2+z**2)**(5./2)+q*(2*(1-x)**2-z**2)/((1-x)**2+z**2)**(5./2)+(q+1)\n\n def d2Omegadx2_y(q,x,y):\n return (2*x**2-y**2)/(x**2+y**2)**(5./2)+q*(2*(1-x)**2-y**2)/((1-x)**2+y**2)**(5./2)+(q+1)\n\n def d2Omegadxdz(q,x,z):\n return 3*x*z/(x**2+z**2)**(5./2)-3*q*x*(1-x)/((1-x)**2+z**2)**(5./2)\n\n def d2Omegadxdy(q,x,y):\n return 3*x*y/(x**2+y**2)**(5./2)-3*q*x*(1-x)/((1-x)**2+y**2)**(5./2)\n\n xz,z = x0,z0\n dxz, dz = 1.,1.\n\n # find solution in xz plane\n while abs(dxz)>1e-8 and abs(dz)>1e-8:\n\n delz = 1.\n z=0.05\n while abs(delz) > 0.000001:\n delom = omega_in - Omega_xz(q,xz,z)\n delz = delom/dOmegadz(q,xz,z)\n z = abs(z+delz)\n\n DN = np.array([[dOmegadx_z(q,xz,z),dOmegadz(q,xz,z)],[d2Omegadx2_z(q,xz,z),d2Omegadxdz(q,xz,z)]])\n EN = np.array([omega_in-Omega_xz(q,xz,z),(-1)*dOmegadx_z(q,xz,z)])\n\n a,b,c,d = DN[0][0],DN[0][1],DN[1][0],DN[1][1]\n\n if (a*d-b*c)!=0.:\n DNINV = 1./(a*d-b*c)*np.array([[d,(-1)*b],[(-1)*c,d]])\n #DNINV = inv(DN)\n\n dd = np.dot(DNINV,EN)\n dxz,dz = dd[0],dd[1]\n xz=xz+dxz\n z=z+dz\n else:\n xz = xz+0.5\n z = z+0.5\n dxz = 1.\n dz = 1.\n\n return xz,z", "response": "Computes the position of the neck in an contact_binary star1"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef compute_frac_areas(new_mesh,xmin):\n\n '''Computes fractional areas belonging to each triangle vertex that are a result\n of the intersection with the neck plane and assigns them to each vertex per triangle.'''\n\n def intersection(point1,point2,xmin):\n ''' point1 = (x1,y1,z1), point2 = (x2,y2,z2\n parameters for matrices: a1=(z2-z1)/(x2-x1), b1=0, c1=-1, d1=z1-(z2-z1)/(x2-x1)*x1\n a2= , b2=(z2-z1)/(y2-y1), c2=-1, d2=z1-(z2-z1)/(y2-y1)*y1\n a3=1, b3=0, c3=0, d3=-xmin'''\n\n a1 = (point2[2]-point1[2])/(point2[0]-point1[0])\n b1 = 0.\n c1 = -1.\n d1 = point1[2] - point1[0]*(point2[2]-point1[2])/(point2[0]-point1[0])\n\n b2 = (point2[2]-point1[2])/(point2[1]-point1[1])\n a2 = 0.\n c1 = -1.\n d1 = point1[2] - point1[1]*(point2[2]-point1[2])/(point2[1]-point1[1])\n\n a3, b3, c3, d3 = 1., 0., 0., (-1)*xmin\n\n\n R_c = np.array([[a1,b1,c1],[a2,b2,c2],[a3,b3,c3]])\n R_d = np.array([[a1,b1,c1,d1],[a2,b2,c2,d2],[a3,b3,c3,d3]])\n\n # condition for one-point intersection\n if np.linalg.matrix_rank(R_c)==3 and np.linalg.matrix_rank(R_d)==3:\n # build the arrays whose determinants are used to compute the intersection coords\n x_arr = np.array([[d1,b1,c1],[d2,b2,c2],[d3,b3,c3]])\n y_arr = np.array([[a1,d1,c1],[a2,d2,c2],[a3,d3,c3]])\n z_arr = np.array([[a1,b1,d1],[a2,b2,d2],[a3,b3,d3]])\n\n x = np.linalg.det(x_arr)/np.linalg.det(R_c)\n y = np.linalg.det(y_arr)/np.linalg.det(R_c)\n z = np.linalg.det(z_arr)/np.linalg.det(R_c)\n\n # next we need to check whether the point is actually on the line section that makes the triangle\n x_low,x_up = min(point1[0],point2[0]),max(point1[0],point2[0])\n y_low,y_up = min(point1[1],point2[1]),max(point1[1],point2[1])\n z_low,z_up = min(point1[2],point2[2]),max(point1[2],point2[2])\n\n if x>x_low and xy_low and yz_low and z 1:\n prefilter = False\n else:\n prefilter = False\n\n return [ndimage.map_coordinates(pixelgrid[...,i],p_coord, order=order,\n prefilter=prefilter, mode=mode, cval=cval) \\\n for i in range(np.shape(pixelgrid)[-1])]"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef cinterpolate(p, axis_values, pixelgrid):\n res = libphoebe.interp(p, axis_values, pixelgrid)\n return res", "response": "Interpolates in a grid prepared by create_pixeltypegrid."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef logger(*args, **kwargs):\n if mpi.within_mpirun and mpi.myrank == 0:\n # tell the workers to invoke the same logger\n mpi.comm.bcast({'worker_command': 'logger', 'args': args, 'kwargs': kwargs}, root=0)\n\n return _utils.get_basic_logger(*args, **kwargs)", "response": "shortcut to utils. get_basic_logger"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn a basic logger for the given level and file.", "response": "def get_basic_logger(clevel='WARNING',flevel='DEBUG',\n style=\"default\",filename=None,filemode='w'):\n \"\"\"\n Return a basic logger via a log file and/or terminal.\n\n Example 1: log only to the console, accepting levels \"INFO\" and above\n\n >>> logger = utils.get_basic_logger()\n\n Example 2: log only to the console, accepting levels \"DEBUG\" and above\n\n >>> logger = utils.get_basic_logger(clevel='DEBUG')\n\n Example 3: log only to a file, accepting levels \"DEBUG\" and above\n\n >>> logger = utils.get_basic_logger(clevel=None,filename='mylog.log')\n\n Example 4: log only to a file, accepting levels \"INFO\" and above\n\n >>> logger = utils.get_basic_logger(clevel=None,flevel='INFO',filename='mylog.log')\n\n Example 5: log to the terminal (INFO and above) and file (DEBUG and above)\n\n >>> logger = utils.get_basic_logger(filename='mylog.log')\n\n The different logging styles are:\n\n C{style='default'}::\n\n Wed, 13 Feb 2013 08:47 root INFO Some information\n\n C{style='grandpa'}::\n\n # INFO Some information\n\n C{style='minimal'}::\n\n Some information\n\n @param style: logger style\n @type style: str, one of 'default','grandpa','minimal'\n \"\"\"\n name = \"\"\n #-- define formats\n if style=='default':\n format = '%(asctime)s %(name)-12s %(levelname)-7s %(message)s'\n datefmt = '%a, %d %b %Y %H:%M'\n elif style=='grandpa':\n format = '# %(levelname)-7s %(message)s'\n datefmt = '%a, %d %b %Y %H:%M'\n elif style=='minimal':\n format = ''\n datefmt = '%a, %d %b %Y %H:%M'\n\n if style=='trace':\n formatter = MyFormatter()\n else:\n formatter = logging.Formatter(fmt=format,datefmt=datefmt)\n\n\n if clevel: clevel = logging.__dict__[clevel.upper()]\n if flevel: flevel = logging.__dict__[flevel.upper()]\n\n #-- set up basic configuration.\n # The basicConfig sets up one default logger. If you give a filename, it's\n # a FileHandler, otherwise a StreamHandler.\n #-- If we want console and filename, first set up a basic FileHandler, then\n # add terminal StreamHandler\n if filename is not None:\n if flevel is None:\n level = clevel\n else:\n level = flevel\n logging.basicConfig(level=level,\n format=format,datefmt=datefmt,\n filename=filename,filemode=filemode)\n fh = logging.FileHandler(filename)\n fh.setLevel(flevel)\n fh.setFormatter(formatter)\n logging.getLogger(name).addHandler(fh)\n if filename is not None and clevel:\n # define a Handler which writes INFO messages or higher to the sys.stderr\n ch = logging.StreamHandler()\n ch.setLevel(clevel)\n # tell the handler to use this format\n ch.setFormatter(formatter)\n logging.getLogger(name).addHandler(ch)\n #-- If we only want a console:\n else:\n logging.basicConfig(level=clevel,format=format,datefmt=datefmt,\n filename=filename,filemode=filemode)\n #-- fix filename logging\n if filename is not None:\n logging.getLogger(name).handlers[0].level = flevel\n return logging.getLogger(name)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef parse_json(pairs):\n new_pairs = []\n for key, value in pairs:\n if isinstance(value, unicode):\n value = value.encode('utf-8')\n if isinstance(key, unicode):\n key = key.encode('utf-8')\n new_pairs.append((key, value))\n return dict(new_pairs)", "response": "parse_json returns a dict of key - value pairs"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef direction(self, direction):\n if not isinstance(direction, str):\n raise TypeError(\"direction must be of type str\")\n\n accepted_values = ['i', 'x', 'y', 'z', 's', 'c']\n if direction not in accepted_values:\n raise ValueError(\"must be one of: {}\".format(accepted_values))\n\n self._direction = direction", "response": "set the direction of the log entry"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef interpolate_at_i(self, i, unit=None):\n if isinstance(self.call.i._value, float):\n if self.call.i._value==i:\n return self._to_unit(self._value, unit)\n else:\n return None\n\n # we can't call i._value here because that may point to a string, and\n # we want this to resolve the array\n i_value = self.call.i.get_value(linebreak=False, sort_by_indep=False)\n if len(i_value) != len(self._value):\n raise ValueError(\"length mismatch with independent-variable\")\n\n sort_inds = i_value.argsort()\n indep_value = i_value[sort_inds]\n this_value = self._value[sort_inds]\n return self._to_unit(np.interp(i, indep_value, this_value, left=np.nan, right=np.nan), unit)", "response": "interpolate the value at a give value of i"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _sort_by_indep(self, func='get_value', i=None, iunit=None, unit=None,\n uncover=None, trail=None, linebreak=None,\n sort_by_indep=None):\n\n \"\"\"\n must be called before (or within) _do_linebreak\n \"\"\"\n\n if sort_by_indep is None:\n # TODO: add property of the call?\n sort_by_indep = True\n\n indep_array = self.call.i.get_value(i=i,\n unit=iunit,\n uncover=uncover,\n trail=trail,\n linebreak=False,\n sort_by_indep=False)\n\n this_array = getattr(self, func)(i=i,\n unit=unit,\n uncover=uncover,\n trail=trail,\n linebreak=False,\n sort_by_indep=False)\n\n if not (isinstance(indep_array, np.ndarray) and len(indep_array)==len(this_array)):\n sort_by_indep = False\n\n if sort_by_indep:\n # TODO: it might be nice to buffer this at the call level, so making\n # multiple get_value calls doesn't have to recompute the sort-order\n sort_inds = indep_array.argsort()\n return this_array[sort_inds]\n else:\n return this_array", "response": "Internal method to sort the array of the items in the log entry by indep."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_value(self, i=None, unit=None,\n uncover=None, trail=None,\n linebreak=None, sort_by_indep=None,\n attr='_value'):\n \"\"\"\n access the value for a given value of i (independent-variable) depending\n on which effects (i.e. uncover) are enabled.\n\n If uncover, trail, or linebreak are None (default), then the value from\n the parent Call will be used.\n \"\"\"\n\n value = getattr(self, attr) # could be self._value or self._error\n if value is None:\n return None\n\n if uncover is None:\n uncover = self.call.uncover\n\n if trail is None:\n trail = self.call.trail\n\n if linebreak is None:\n linebreak = self.call.linebreak\n\n if sort_by_indep is None:\n # TODO: make this a property of the call?\n sort_by_indep = True\n\n if isinstance(value, str) or isinstance(value, float):\n if i is None:\n return self._to_unit(value, unit)\n elif isinstance(self.call.i.value, float):\n # then we still want to \"select\" based on the value of i\n if self._filter_at_i(i):\n return value\n else:\n return None\n else:\n # then we should show either way. For example - a color or\n # axhline even with i given won't change in i\n return self._to_unit(value, unit)\n\n # from here on we're assuming the value is an array, so let's just check\n # to be sure\n if not isinstance(value, np.ndarray):\n raise NotImplementedError(\"value/error must be a numpy array\")\n\n\n if linebreak is not False:\n return self._do_linebreak(func='get{}'.format(attr),\n i=i,\n unit=unit,\n uncover=uncover,\n trail=trail,\n linebreak=linebreak,\n sort_by_indep=sort_by_indep)\n\n if sort_by_indep is not False:\n # if we've made it here, linebreak should already be False (if\n # linebreak was True, then we'd be within _do_linebreak and those\n # get_value calls pass linebreak=False)\n return self._sort_by_indep(func='get{}'.format(attr),\n i=i,\n unit=unit,\n uncover=uncover,\n trail=trail,\n linebreak=False,\n sort_by_indep=sort_by_indep)\n\n # from here on, linebreak==False and sort_by_indep==False (if either\n # were True, then we're within those functions and asking for the original\n # array)\n if i is None:\n if len(value.shape)==1:\n return self._to_unit(value, unit)\n else:\n if isinstance(self.call, Plot):\n return self._to_unit(value.T, unit)\n else:\n return self._to_unit(value, unit)\n\n # filter the data as necessary\n filter_ = self._filter_at_i(i, uncover=uncover, trail=trail)\n\n if isinstance(self.call.i.value, float):\n if filter_:\n return self._to_unit(value, unit)\n else:\n return None\n\n if len(value.shape)==1:\n # then we're dealing with a flat 1D array\n if attr == '_value':\n if trail is not False:\n trail_i = self._get_trail_min(i)\n first_point = self.interpolate_at_i(trail_i)\n\n\n if uncover:\n last_point = self.interpolate_at_i(i)\n else:\n first_point = np.nan\n last_point = np.nan\n\n if uncover and trail is not False:\n concat = (np.array([first_point]),\n value[filter_],\n np.array([last_point]))\n elif uncover:\n concat = (value[filter_],\n np.array([last_point]))\n\n elif trail:\n concat = (np.array([first_point]),\n value[filter_])\n else:\n return self._to_unit(value[filter_], unit)\n\n return self._to_unit(np.concatenate(concat), unit)\n\n else:\n # then we need to \"select\" based on the indep and the value\n if isinstance(self.call, Plot):\n return self._to_unit(value[filter_].T, unit)\n else:\n return self._to_unit(value[filter_], unit)", "response": "get the value of a given attribute of a given i"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _set_value(self, value):\n if value is None:\n self._value = value\n return\n\n # handle casting to acceptable types\n if isinstance(value, list) or isinstance(value, tuple):\n value = np.array(value)\n if isinstance(value, int):\n value = float(value)\n if isinstance(value, u.Quantity):\n if self.unit == u.dimensionless_unscaled:\n # then take the unit from quantity and apply it\n self.unit = value.unit\n value = value.value\n else:\n # then convert to the requested unit\n value = value.to(self.unit).value\n\n # handle setting based on type\n if isinstance(value, np.ndarray):\n # if len(value.shape) != 1:\n # raise ValueError(\"value must be a flat array\")\n\n self._value = value\n elif isinstance(value, float):\n # TODO: do we want to cast to np.array([value])??\n # this will most likely be used for axhline/axvline\n self._value = value\n # elif isinstance(value, str):\n # TODO: then need to pull from the bundle??? Or will this happen\n # at a higher level\n elif self.direction=='c' and isinstance(value, str):\n self._value = common.coloralias.map(value)\n else:\n raise TypeError(\"value must be of type array (or similar)\")", "response": "set the value of the attribute"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_error(self, i=None, unit=None,\n uncover=None, trail=None,\n linebreak=None, sort_by_indep=None):\n \"\"\"\n access the error for a given value of i (independent-variable) depending\n on which effects (i.e. uncover) are enabled.\n \"\"\"\n return self.get_value(i=i, unit=unit,\n uncover=uncover, trail=trail,\n linebreak=linebreak, sort_by_indep=sort_by_indep,\n attr='_error')", "response": "get the error for a given value of i"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsets the error of the current object", "response": "def error(self, error):\n \"\"\"\n set the error\n \"\"\"\n # TODO: check length with value?\n # TODO: type checks (similar to value)\n if self.direction not in ['x', 'y', 'z'] and error is not None:\n raise ValueError(\"error only accepted for x, y, z dimensions\")\n\n if isinstance(error, u.Quantity):\n error = error.to(self.unit).value\n\n self._error = error"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsetting the label of the object", "response": "def label(self, label):\n \"\"\"\n set the label\n \"\"\"\n if self.direction in ['i'] and label is not None:\n raise ValueError(\"label not accepted for indep dimension\")\n\n\n if label is None:\n self._label = label\n return\n\n if not isinstance(label, str):\n try:\n label = str(label)\n except:\n raise TypeError(\"label must be of type str\")\n\n self._label = label"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nset the value of the value", "response": "def value(self, value):\n \"\"\"\n set the value\n \"\"\"\n # for the indep direction we also allow a string which points to one\n # of the other available dimensions\n # TODO: support c, fc, ec?\n if isinstance(value, common.basestring) and value in ['x', 'y', 'z']:\n # we'll cast just to get rid of any python2 unicodes\n self._value = str(value)\n dimension = value\n self._unit = getattr(self.call, dimension).unit\n return\n\n # NOTE: cannot do super on setter directly, see this python\n # bug: https://bugs.python.org/issue14965 and discussion:\n # https://mail.python.org/pipermail/python-dev/2010-April/099672.html\n super(CallDimensionI, self)._set_value(value)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef rotfreq_to_omega(rotfreq, scale=c.R_sun.si.value, solar_units=False):\n if solar_units:\n omega = rotfreq / (2*np.pi) / np.sqrt(c.GM_sun.to(u.solRad**3/u.d**2).value/scale**3)\n else:\n # then SI units\n omega = rotfreq / (2*np.pi) / np.sqrt(c.GM_sun.value/scale**3)\n\n # print \"*** rotstar.rotfreq_to_omega\", rotfreq, scale, solar_units, omega\n\n return omega", "response": "Convert a rotation frequency to an omega value."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _ensure_tuple(item):\n if isinstance(item, tuple):\n return item\n elif isinstance(item, list):\n return tuple(item)\n elif isinstance(item, np.ndarray):\n return tuple(item.tolist())\n else:\n raise NotImplementedError", "response": "Ensure that the passed item is a tuple. If it is not then return the item itself."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nparses parameters in the bundle and call :func:`dynamics`. See :func:`dynamics` for more detailed information. NOTE: you must either provide compute (the label) OR all relevant options as kwargs (ltte, stepsize, gr, integrator) Args: b: (Bundle) the bundle with a set hierarchy times: (list or array) times at which to run the dynamics stepsize: (float, optional) stepsize for the integration [default: 0.01] orbiterror: (float, optional) orbiterror for the integration [default: 1e-16] ltte: (bool, default False) whether to account for light travel time effects. gr: (bool, default False) whether to account for general relativity effects. Returns: t, xs, ys, zs, vxs, vys, vzs. t is a numpy array of all times, the remaining are a list of numpy arrays (a numpy array per star - in order given by b.hierarchy.get_stars()) for the cartesian positions and velocities of each star at those same times.", "response": "def dynamics_from_bundle(b, times, compute=None, return_roche_euler=False, use_kepcart=False, **kwargs):\n \"\"\"\n Parse parameters in the bundle and call :func:`dynamics`.\n\n See :func:`dynamics` for more detailed information.\n\n NOTE: you must either provide compute (the label) OR all relevant options\n as kwargs (ltte, stepsize, gr, integrator)\n\n Args:\n b: (Bundle) the bundle with a set hierarchy\n times: (list or array) times at which to run the dynamics\n stepsize: (float, optional) stepsize for the integration\n [default: 0.01]\n orbiterror: (float, optional) orbiterror for the integration\n [default: 1e-16]\n ltte: (bool, default False) whether to account for light travel time effects.\n gr: (bool, default False) whether to account for general relativity effects.\n\n Returns:\n t, xs, ys, zs, vxs, vys, vzs. t is a numpy array of all times,\n the remaining are a list of numpy arrays (a numpy array per\n star - in order given by b.hierarchy.get_stars()) for the cartesian\n positions and velocities of each star at those same times.\n\n \"\"\"\n\n b.run_delayed_constraints()\n\n hier = b.hierarchy\n\n computeps = b.get_compute(compute, check_visible=False, force_ps=True)\n stepsize = computeps.get_value('stepsize', check_visible=False, **kwargs)\n ltte = computeps.get_value('ltte', check_visible=False, **kwargs)\n gr = computeps.get_value('gr', check_visible=False, **kwargs)\n integrator = computeps.get_value('integrator', check_visible=False, **kwargs)\n\n starrefs = hier.get_stars()\n orbitrefs = hier.get_orbits() if use_kepcart else [hier.get_parent_of(star) for star in starrefs]\n\n def mean_anom(t0, t0_perpass, period):\n # TODO: somehow make this into a constraint where t0 and mean anom\n # are both in the compute options if dynamic_method==nbody\n # (one is constrained from the other and the orbit.... nvm, this gets ugly)\n return 2 * np.pi * (t0 - t0_perpass) / period\n\n masses = [b.get_value('mass', u.solMass, component=component, context='component') * c.G.to('AU3 / (Msun d2)').value for component in starrefs] # GM\n smas = [b.get_value('sma', u.AU, component=component, context='component') for component in orbitrefs]\n eccs = [b.get_value('ecc', component=component, context='component') for component in orbitrefs]\n incls = [b.get_value('incl', u.rad, component=component, context='component') for component in orbitrefs]\n per0s = [b.get_value('per0', u.rad, component=component, context='component') for component in orbitrefs]\n long_ans = [b.get_value('long_an', u.rad, component=component, context='component') for component in orbitrefs]\n t0_perpasses = [b.get_value('t0_perpass', u.d, component=component, context='component') for component in orbitrefs]\n periods = [b.get_value('period', u.d, component=component, context='component') for component in orbitrefs]\n\n if return_roche_euler:\n # rotperiods are only needed to compute instantaneous syncpars\n rotperiods = [b.get_value('period', u.d, component=component, context='component') for component in starrefs]\n else:\n rotperiods = None\n\n vgamma = b.get_value('vgamma', context='system', unit=u.AU/u.d)\n t0 = b.get_value('t0', context='system', unit=u.d)\n\n # mean_anoms = [mean_anom(t0, t0_perpass, period) for t0_perpass, period in zip(t0_perpasses, periods)]\n mean_anoms = [b.get_value('mean_anom', u.rad, component=component, context='component') for component in orbitrefs]\n\n return dynamics(times, masses, smas, eccs, incls, per0s, long_ans, \\\n mean_anoms, rotperiods, t0, vgamma, stepsize, ltte, gr,\n integrator, use_kepcart=use_kepcart, return_roche_euler=return_roche_euler)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef dynamics_from_bundle_bs(b, times, compute=None, return_roche_euler=False, **kwargs):\n\n stepsize = 0.01\n orbiterror = 1e-16\n computeps = b.get_compute(compute, check_visible=False, force_ps=True)\n ltte = computeps.get_value('ltte', check_visible=False, **kwargs)\n\n\n hier = b.hierarchy\n\n starrefs = hier.get_stars()\n orbitrefs = hier.get_orbits()\n\n def mean_anom(t0, t0_perpass, period):\n # TODO: somehow make this into a constraint where t0 and mean anom\n # are both in the compute options if dynamic_method==nbody\n # (one is constrained from the other and the orbit.... nvm, this gets ugly)\n return 2 * np.pi * (t0 - t0_perpass) / period\n\n masses = [b.get_value('mass', u.solMass, component=component, context='component') * c.G.to('AU3 / (Msun d2)').value for component in starrefs] # GM\n smas = [b.get_value('sma', u.AU, component=component, context='component') for component in orbitrefs]\n eccs = [b.get_value('ecc', component=component, context='component') for component in orbitrefs]\n incls = [b.get_value('incl', u.rad, component=component, context='component') for component in orbitrefs]\n per0s = [b.get_value('per0', u.rad, component=component, context='component') for component in orbitrefs]\n long_ans = [b.get_value('long_an', u.rad, component=component, context='component') for component in orbitrefs]\n t0_perpasses = [b.get_value('t0_perpass', u.d, component=component, context='component') for component in orbitrefs]\n periods = [b.get_value('period', u.d, component=component, context='component') for component in orbitrefs]\n\n vgamma = b.get_value('vgamma', context='system', unit=u.solRad/u.d)\n t0 = b.get_value('t0', context='system', unit=u.d)\n\n # mean_anoms = [mean_anom(t0, t0_perpass, period) for t0_perpass, period in zip(t0_perpasses, periods)]\n mean_anoms = [b.get_value('mean_anom', u.rad, component=component, context='component') for component in orbitrefs]\n\n return dynamics_bs(times, masses, smas, eccs, incls, per0s, long_ans, \\\n mean_anoms, t0, vgamma, stepsize, orbiterror, ltte,\n return_roche_euler=return_roche_euler)", "response": "Parse parameters in the bundle and call dynamics function."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef dynamics_bs(times, masses, smas, eccs, incls, per0s, long_ans, mean_anoms,\n t0=0.0, vgamma=0.0, stepsize=0.01, orbiterror=1e-16, ltte=False,\n return_roche_euler=False):\n \"\"\"\n Burlisch-Stoer integration of orbits to give positions and velocities\n of any given number of stars in hierarchical orbits. This code\n currently uses the NBody code in Josh Carter's photodynam code\n available here:\n\n [[TODO: include link]]\n\n If using the Nbody mode in PHOEBE, please cite him as well:\n\n [[TODO: include citation]]\n\n See :func:`dynamics_from_bundle` for a wrapper around this function\n which automatically handles passing everything in the correct order\n and in the correct units.\n\n For each iterable input, stars and orbits should be listed in order\n from primary -> secondary for each nested hierarchy level. Each\n iterable for orbits should have length one less than those for each\n star (ie if 3 masses are provided, then 2 smas, eccs, etc need to\n be provided)\n\n Args:\n times: (iterable) times at which to compute positions and\n velocities for each star\n masses: (iterable) mass for each star in [solMass]\n smas: (iterable) semi-major axis for each orbit [AU]\n eccs: (iterable) eccentricities for each orbit\n incls: (iterable) inclinations for each orbit [rad]\n per0s: (iterable) longitudes of periastron for each orbit [rad]\n long_ans: (iterable) longitudes of the ascending node for each\n orbit [rad]\n mean_anoms: (iterable) mean anomalies for each orbit\n t0: (float) time at which to start the integrations\n stepsize: (float, optional) stepsize of the integrations\n [default: 0.01]\n orbiterror: (float, optional) orbiterror of the integrations\n [default: 1e-16]\n ltte: (bool, default False) whether to account for light travel time effects.\n\n Returns:\n t, xs, ys, zs, vxs, vys, vzs. t is a numpy array of all times,\n the remaining are a list of numpy arrays (a numpy array per\n star - in order given by b.hierarchy.get_stars()) for the cartesian\n positions and velocities of each star at those same times.\n\n \"\"\"\n\n if not _can_bs:\n raise ImportError(\"photodynam is not installed (http://github.com/phoebe-project/photodynam)\")\n\n times = _ensure_tuple(times)\n masses = _ensure_tuple(masses)\n smas = _ensure_tuple(smas)\n eccs = _ensure_tuple(eccs)\n incls = _ensure_tuple(incls)\n per0s = _ensure_tuple(per0s)\n long_ans = _ensure_tuple(long_ans)\n mean_anoms = _ensure_tuple(mean_anoms)\n\n # TODO: include vgamma!!!!\n # print \"*** bs.do_dynamics\", masses, smas, eccs, incls, per0s, long_ans, mean_anoms, t0\n d = photodynam.do_dynamics(times, masses, smas, eccs, incls, per0s, long_ans, mean_anoms, t0, stepsize, orbiterror, ltte, return_roche_euler)\n # d is in the format: {'t': (...), 'x': ( (1,2,3), (1,2,3), ...), 'y': ..., 'z': ...}\n\n nobjects = len(masses)\n ntimes = len(times)\n\n # TODO: need to return euler angles... if that even makes sense?? Or maybe we\n # need to make a new place in orbit??\n\n au_to_solrad = (1*u.AU).to(u.solRad).value\n\n ts = np.array(d['t'])\n xs = [(-1*np.array([d['x'][ti][oi] for ti in range(ntimes)])*au_to_solrad) for oi in range(nobjects)]\n ys = [(-1*np.array([d['y'][ti][oi] for ti in range(ntimes)])*au_to_solrad) for oi in range(nobjects)]\n zs = [(np.array([d['z'][ti][oi] for ti in range(ntimes)])*au_to_solrad) for oi in range(nobjects)]\n vxs = [(-1*np.array([d['vx'][ti][oi] for ti in range(ntimes)])*au_to_solrad) for oi in range(nobjects)]\n vys = [(-1*np.array([d['vy'][ti][oi] for ti in range(ntimes)])*au_to_solrad) for oi in range(nobjects)]\n vzs = [(np.array([d['vz'][ti][oi] for ti in range(ntimes)])*au_to_solrad) for oi in range(nobjects)]\n\n if return_roche_euler:\n # raise NotImplementedError(\"euler angles for BS not currently supported\")\n # a (sma), e (ecc), in (incl), o (per0?), ln (long_an?), m (mean_anom?)\n ds = [(np.array([d['kepl_a'][ti][oi] for ti in range(ntimes)])*au_to_solrad) for oi in range(nobjects)]\n # TODO: fix this\n Fs = [(np.array([1.0 for ti in range(ntimes)])*au_to_solrad) for oi in range(nobjects)]\n # TODO: check to make sure this is the right angle\n # TODO: need to add np.pi for secondary component?\n # true anomaly + periastron\n ethetas = [(np.array([d['kepl_o'][ti][oi]+d['kepl_m'][ti][oi]+np.pi/2 for ti in range(ntimes)])*au_to_solrad) for oi in range(nobjects)]\n # elongans = [(np.array([d['kepl_ln'][ti][oi]+long_ans[0 if oi==0 else oi-1] for ti in range(ntimes)])*au_to_solrad) for oi in range(nobjects)]\n elongans = [(np.array([d['kepl_ln'][ti][oi]+long_ans[0 if oi==0 else oi-1] for ti in range(ntimes)])*au_to_solrad) for oi in range(nobjects)]\n # eincls = [(np.array([d['kepl_in'][ti][oi]+incls[0 if oi==0 else oi-1] for ti in range(ntimes)])*au_to_solrad) for oi in range(nobjects)]\n eincls = [(np.array([d['kepl_in'][ti][oi]+np.pi-incls[0 if oi==0 else oi-1] for ti in range(ntimes)])*au_to_solrad) for oi in range(nobjects)]\n\n\n\n # d, solRad, solRad/d, rad\n return ts, xs, ys, zs, vxs, vys, vzs, ds, Fs, ethetas, elongans, eincls\n\n else:\n\n # d, solRad, solRad/d\n return ts, xs, ys, zs, vxs, vys, vzs", "response": "This function is used to generate a Burlisch - Stoer integration of orbits for a given set of time intervals."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning a dictionary of all visible or partial triangles that are only horizon.", "response": "def only_horizon(meshes, xs, ys, zs, expose_horizon=False):\n \"\"\"\n Check all visible or partial triangles to see if they're behind the horizon,\n by checking the direction of the z-component of the normals (ie hidden if mu<0)\n \"\"\"\n\n # if visibility == 0, it should remain 0\n # if visibility == 0.5, it should stay 0.5 if mu > 0 else it should become 0\n # if visibility == 1, it should stay 1 if mu > 0 else it should become 0\n\n # this can all by easily done by multiplying by int(mu>0) (1 if visible, 0 if hidden)\n\n return {comp_no: mesh.visibilities * (mesh.mus > 0).astype(int) for comp_no, mesh in meshes.items()}, None, None"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef native(meshes, xs, ys, zs, expose_horizon=False, horizon_method='boolean'):\n\n centers_flat = meshes.get_column_flat('centers')\n vertices_flat = meshes.get_column_flat('vertices')\n triangles_flat = meshes.get_column_flat('triangles') # should handle offset automatically\n\n if horizon_method=='boolean':\n normals_flat = meshes.get_column_flat('tnormals')\n elif horizon_method=='linear':\n normals_flat = meshes.get_column_flat('vnormals')\n else:\n raise NotImplementedError\n\n # viewing_vector is defined as star -> earth\n # NOTE: this will need to flip if we change the convention on the z-direction\n viewing_vector = np.array([0., 0., 1.])\n\n\n # we need to send in ALL vertices but only the visible triangle information\n info = libphoebe.mesh_visibility(viewing_vector,\n vertices_flat,\n triangles_flat,\n normals_flat,\n tvisibilities=True,\n taweights=True,\n method=horizon_method,\n horizon=expose_horizon)\n\n visibilities = meshes.unpack_column_flat(info['tvisibilities'], computed_type='triangles')\n weights = meshes.unpack_column_flat(info['taweights'], computed_type='triangles')\n\n if expose_horizon:\n horizons = info['horizon']\n\n # TODO: we need to do this per component and somehow return them in\n # a predictable order or as a dictionary a la the other returned quantities\n horizons = [vertices_flat[horizon_i] for horizon_i in horizons]\n\n else:\n horizons = None\n\n return visibilities, weights, horizons", "response": "This function returns the native representation of a single resource in a single resource."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncompute the convex graham of a set of meshes.", "response": "def graham(meshes, xs, ys, zs, expose_horizon=False):\n \"\"\"\n convex_graham\n \"\"\"\n\n distance_factor = 1.0 # TODO: make this an option (what does it even do)?\n\n # first lets handle the horizon\n visibilities, weights, horizon = only_horizon(meshes, xs, ys, zs)\n\n # Order the bodies from front to back. We can do this through zs (which are\n # the z-coordinate positions of each body in the system.\n # Those indices are in the same order as the meshes['comp_no']\n # (needing to add 1 because comp_no starts at 1 not 0)\n\n # TODO: I don't think this whole comp_no thing is going to work with nested\n # triples.. may need to rethink the whole meshes.component_by_no thing\n\n front_to_back_comp_nos = np.argsort(zs)[::-1]+1\n nbodies = len(front_to_back_comp_nos)\n\n for i_front in range(0, nbodies-1):\n # for a binary, i_front will only be 0\n comp_no_front = front_to_back_comp_nos[i_front]\n comp_front = meshes.component_by_no(comp_no_front)\n mesh_front = meshes[comp_front]\n visibility_front = visibilities[comp_front]\n\n for i_back in range(i_front+1, nbodies):\n # for a binary, i_back will only be 1\n comp_no_back = front_to_back_comp_nos[i_back]\n comp_back = meshes.component_by_no(comp_no_back)\n mesh_back = meshes[comp_back]\n visibility_back = visibilities[comp_back]\n\n # If mesh_back is entirely hidden, then we can skip any checks and\n # leave it hidden. Note that here we need to check visibility instead\n # of mesh['visibility'] or mesh_back['visibility'] since we are not\n # adjusting those values in memory.\n if np.all(visibility_back==0.0):\n continue\n\n # Determine a scale factor for the triangle\n min_size_back = mesh_back.areas.min()\n distance = distance_factor * 2.0/3**0.25*np.sqrt(min_size_back)\n\n # Select only those triangles that are not hidden\n tri_back_vis = mesh_back.vertices_per_triangle[visibility_back > 0.0].reshape(-1,9)\n tri_front_vis = mesh_front.vertices_per_triangle[visibility_front > 0.0].reshape(-1,9)\n\n back = np.vstack([tri_back_vis[:,0:2], tri_back_vis[:,3:5], tri_back_vis[:,6:8]])\n front = np.vstack([tri_front_vis[:,0:2], tri_front_vis[:,3:5], tri_front_vis[:,6:8]])\n\n # Star in front ---> star in back\n if not front.shape[0]:\n continue\n\n hull, inside = _graham_scan_inside_hull(front, back)\n hidden = inside.reshape(3,-1).all(axis=0)\n visible = ~(inside.reshape(3,-1).any(axis=0))\n\n # Triangles that are partially hidden are those that are not\n # completely hidden, but do have at least one vertex hidden\n partial = ~hidden & ~visible\n\n # These returned visibilities are only from mesh_back_vis\n # So to apply back to our master visibilities parameter, we need\n # to find the correct inds.\n visibility_back[visibility_back > 0.0] = 1.0*visible + 0.5*partial\n\n\n ###################################################\n ## TODO: port the following code to be included ##\n ###################################################\n\n\n # It's possible that some triangles that do not have overlapping\n # vertices, still are partially covered (i.e. if the objects fall\n # in between the vertices)\n #visible1 = ~star1.mesh['hidden']\n #backc = star1.mesh['center'][visible1,0:2]\n #if not len(backc):\n # continue\n #\n #subset_partial = closest_points(hull, backc, distance)\n #\n #if not len(subset_partial):\n # continue\n #\n #arg = np.where(visible1)[0][subset_partial]\n #star1.mesh['visible'][arg] = False\n #star1.mesh['partial'][arg] = True\n\n\n return visibilities, None, None"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _get_system_ps(b, item, context='component'):\n # TODO: make this a decorator?\n if isinstance(item, list) and len(item)==1:\n item = item[0]\n\n if isinstance(item, ParameterSet):\n return item.filter(context=context, check_visible=False)\n elif isinstance(item, str):\n return b.filter(item, context=context, check_visible=False)\n else:\n logger.debug(\"_get_system_ps got {}\".format(item))\n raise NotImplementedError(\"_get_system_ps does not support item with type: {}\".format(type(item)))", "response": "Get the system PS from a list of parametersets"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef roche_requiv_L1(q, syncpar, ecc, sma, incl_star, long_an_star, incl_orb, long_an_orb, compno=1):\n return ConstraintParameter(q._bundle, \"requiv_L1(%s, %d)\" % (\", \".join([\"{%s}\" % (param.uniquetwig if hasattr(param, 'uniquetwig') else param.expr) for param in (q, syncpar, ecc, sma, incl_star, long_an_star, incl_orb, long_an_orb)]), compno))", "response": "Create a constraint parameter that is used for roche requiv_L1."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncreate a constraint parameter that is used for roche requiv contacting a single site.", "response": "def roche_requiv_contact_L1(q, sma, compno=1):\n \"\"\"\n TODO: add documentation\n \"\"\"\n return ConstraintParameter(q._bundle, \"requiv_contact_L1(%s, %d)\" % (\", \".join([\"{%s}\" % (param.uniquetwig if hasattr(param, 'uniquetwig') else param.expr) for param in (q, sma)]), compno))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef requiv_to_pot_contact(requiv, q, sma, compno=1):\n return ConstraintParameter(requiv._bundle, \"requiv_to_pot_contact({}, {}, {}, {})\".format(_get_expr(requiv), _get_expr(q), _get_expr(sma), compno))", "response": "Returns a constraint parameter that can be used to convert a requiv to a potential contact."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a constraint parameter that can be used to create a requiv contact constraint.", "response": "def pot_to_requiv_contact(pot, q, sma, compno=1):\n \"\"\"\n TODO: add documentation\n \"\"\"\n return ConstraintParameter(pot._bundle, \"pot_to_requiv_contact({}, {}, {}, {})\".format(_get_expr(pot), _get_expr(q), _get_expr(sma), compno))"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns a constraint parameter for the esinw2per0 constraint.", "response": "def esinw2per0(ecc, esinw):\n \"\"\"\n TODO: add documentation\n \"\"\"\n return ConstraintParameter(ecc._bundle, \"esinw2per0({}, {})\".format(_get_expr(ecc), _get_expr(esinw)))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef ecosw2per0(ecc, ecosw):\n # print \"***\", \"ecosw2per0({}, {})\".format(_get_expr(ecc), _get_expr(ecosw))\n return ConstraintParameter(ecc._bundle, \"ecosw2per0({}, {})\".format(_get_expr(ecc), _get_expr(ecosw)))", "response": "Returns a constraint parameter for the ecosw2per0 constraint."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef t0_perpass_to_supconj(t0_perpass, period, ecc, per0):\n return ConstraintParameter(t0_perpass._bundle, \"t0_perpass_to_supconj({}, {}, {}, {})\".format(_get_expr(t0_perpass), _get_expr(period), _get_expr(ecc), _get_expr(per0)))", "response": "t0_perpass_to_supconj is a constraint parameter that is used to convert per - pass period ecc and per0 to supconj period."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef t0_supconj_to_perpass(t0_supconj, period, ecc, per0):\n return ConstraintParameter(t0_supconj._bundle, \"t0_supconj_to_perpass({}, {}, {}, {})\".format(_get_expr(t0_supconj), _get_expr(period), _get_expr(ecc), _get_expr(per0)))", "response": "t0_supconj_to_perpass - > t0_supconj_to_perpass"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef t0_ref_to_supconj(t0_ref, period, ecc, per0):\n return ConstraintParameter(t0_ref._bundle, \"t0_ref_to_supconj({}, {}, {}, {})\".format(_get_expr(t0_ref), _get_expr(period), _get_expr(ecc), _get_expr(per0)))", "response": "t0_ref_to_supconj - converts t0_ref to supplementary t0_ref"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates a constraint for asini in an orbit.", "response": "def asini(b, orbit, solve_for=None):\n \"\"\"\n Create a constraint for asini in an orbit.\n\n If any of the required parameters ('asini', 'sma', 'incl') do not\n exist in the orbit, they will be created.\n\n :parameter b: the :class:`phoebe.frontend.bundle.Bundle`\n :parameter str orbit: the label of the orbit in which this\n constraint should be built\n :parameter str solve_for: if 'asini' should not be the derived/constrained\n parameter, provide which other parameter should be derived\n (ie 'sma' or 'incl')\n :returns: lhs (Parameter), rhs (ConstraintParameter), args (list of arguments\n that were passed to this function)\n \"\"\"\n\n orbit_ps = _get_system_ps(b, orbit)\n\n # We want to get the parameters in THIS orbit, but calling through\n # the bundle in case we need to create it.\n # To do that, we need to know the search parameters to get items from this PS.\n metawargs = orbit_ps.meta\n metawargs.pop('qualifier')\n\n # Now we'll define the parameters in case they don't exist and need to be created\n sma_def = FloatParameter(qualifier='sma', value=8.0, default_unit=u.solRad, description='Semi major axis')\n incl_def = FloatParameter(qualifier='incl', value=90.0, default_unit=u.deg, description='Orbital inclination angle')\n asini_def = FloatParameter(qualifier='asini', value=8.0, default_unit=u.solRad, description='Projected semi major axis')\n\n # And now call get_or_create on the bundle\n sma, created = b.get_or_create('sma', sma_def, **metawargs)\n incl, created = b.get_or_create('incl', incl_def, **metawargs)\n asini, created = b.get_or_create('asini', asini_def, **metawargs)\n\n if solve_for in [None, asini]:\n lhs = asini\n rhs = sma * sin(incl)\n\n elif solve_for == sma:\n lhs = sma\n rhs = asini / sin(incl)\n\n elif solve_for == incl:\n lhs = incl\n rhs = arcsin(asini/sma)\n\n else:\n raise NotImplementedError\n\n #- return lhs, rhs, args_as_pss\n return lhs, rhs, {'orbit': orbit}"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef esinw(b, orbit, solve_for=None, **kwargs):\n orbit_ps = _get_system_ps(b, orbit)\n\n metawargs = orbit_ps.meta\n metawargs.pop('qualifier')\n\n esinw_def = FloatParameter(qualifier='esinw', value=0.0, default_unit=u.dimensionless_unscaled, limits=(-1.0,1.0), description='Eccentricity times sin of argument of periastron')\n esinw, created = b.get_or_create('esinw', esinw_def, **metawargs)\n\n ecc = b.get_parameter(qualifier='ecc', **metawargs)\n per0 = b.get_parameter(qualifier='per0', **metawargs)\n\n if solve_for in [None, esinw]:\n lhs = esinw\n rhs = ecc * sin(per0)\n elif solve_for == ecc:\n lhs = ecc\n rhs = esinw / sin(per0)\n elif solve_for == per0:\n lhs = per0\n #rhs = arcsin(esinw/ecc)\n rhs = esinw2per0(ecc, esinw)\n\n else:\n raise NotImplementedError\n\n return lhs, rhs, {'orbit': orbit}", "response": "Create a constraint for esinw in an orbit."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef ecosw(b, orbit, solve_for=None, **kwargs):\n orbit_ps = _get_system_ps(b, orbit)\n\n metawargs = orbit_ps.meta\n metawargs.pop('qualifier')\n\n ecosw_def = FloatParameter(qualifier='ecosw', value=0.0, default_unit=u.dimensionless_unscaled, limits=(-1.0,1.0), description='Eccentricity times cos of argument of periastron')\n ecosw, created = b.get_or_create('ecosw', ecosw_def, **metawargs)\n\n ecc = b.get_parameter(qualifier='ecc', **metawargs)\n per0 = b.get_parameter(qualifier='per0', **metawargs)\n\n if solve_for in [None, ecosw]:\n lhs = ecosw\n rhs = ecc * cos(per0)\n\n elif solve_for == ecc:\n lhs = ecc\n rhs = ecosw / cos(per0)\n elif solve_for == per0:\n lhs = per0\n #rhs = arccos(ecosw/ecc)\n rhs = ecosw2per0(ecc, ecosw)\n else:\n raise NotImplementedError\n\n return lhs, rhs, {'orbit': orbit}", "response": "Create a constraint for ecosw in an orbit."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef t0_perpass_supconj(b, orbit, solve_for=None, **kwargs):\n\n orbit_ps = _get_system_ps(b, orbit)\n\n metawargs = orbit_ps.meta\n metawargs.pop('qualifier')\n\n # by default both t0s exist in an orbit, so we don't have to worry about creating either\n t0_perpass = b.get_parameter(qualifier='t0_perpass', **metawargs)\n t0_supconj = b.get_parameter(qualifier='t0_supconj', **metawargs)\n period = b.get_parameter(qualifier='period', **metawargs)\n ecc = b.get_parameter(qualifier='ecc', **metawargs)\n per0 = b.get_parameter(qualifier='per0', **metawargs)\n\n if solve_for in [None, t0_perpass]:\n lhs = t0_perpass\n rhs = t0_supconj_to_perpass(t0_supconj, period, ecc, per0)\n\n elif solve_for == t0_supconj:\n lhs = t0_supconj\n rhs = t0_perpass_to_supconj(t0_perpass, period, ecc, per0)\n\n\n\n else:\n raise NotImplementedError\n\n return lhs, rhs, {'orbit': orbit}", "response": "Create a constraint for t0_perpass and t0_supconj in an orbit"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef t0_ref_supconj(b, orbit, solve_for=None, **kwargs):\n\n orbit_ps = _get_system_ps(b, orbit)\n\n metawargs = orbit_ps.meta\n metawargs.pop('qualifier')\n\n # by default both t0s exist in an orbit, so we don't have to worry about creating either\n t0_ref = b.get_parameter(qualifier='t0_ref', **metawargs)\n t0_supconj = b.get_parameter(qualifier='t0_supconj', **metawargs)\n period = b.get_parameter(qualifier='period', **metawargs)\n ecc = b.get_parameter(qualifier='ecc', **metawargs)\n per0 = b.get_parameter(qualifier='per0', **metawargs)\n\n if solve_for in [None, t0_ref]:\n lhs = t0_ref\n rhs = t0_supconj_to_ref(t0_supconj, period, ecc, per0)\n\n elif solve_for == t0_supconj:\n lhs = t0_supconj\n rhs = t0_ref_to_supconj(t0_ref, period, ecc, per0)\n\n else:\n raise NotImplementedError\n\n return lhs, rhs, {'orbit': orbit}", "response": "Create a constraint for t0_ref in an orbit - allowing translating between t0_ref and t0_supconj."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _true_anom_to_phase(true_anom, period, ecc, per0):\n phshift = 0\n\n mean_anom = true_anom - (ecc*sin(true_anom))*u.deg\n\n Phi = (mean_anom + per0) / (360*u.deg) - 1./4\n\n # phase = Phi - (phshift - 0.25 + per0/(360*u.deg)) * period\n phase = (Phi*u.d - (phshift - 0.25 + per0/(360*u.deg)) * period)*(u.cycle/u.d)\n\n return phase", "response": "Convert true anom to phase"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngets PH_SUPCONJ for orbit b", "response": "def ph_supconj(b, orbit, solve_for=None, **kwargs):\n \"\"\"\n TODO: add documentation\n \"\"\"\n orbit_ps = _get_system_ps(b, orbit)\n\n # metawargs = orbit_ps.meta\n #metawargs.pop('qualifier')\n\n # t0_ph0 and phshift both exist by default, so we don't have to worry about creating either\n # t0_ph0 = orbit_ps.get_parameter(qualifier='t0_ph0')\n # phshift = orbit_ps.get_parameter(qualifier='phshift')\n ph_supconj = orbit_ps.get_parameter(qualifier='ph_supconj')\n per0 = orbit_ps.get_parameter(qualifier='per0')\n ecc = orbit_ps.get_parameter(qualifier='ecc')\n period = orbit_ps.get_parameter(qualifier='period')\n\n # true_anom_supconj = pi/2 - per0\n # mean_anom_supconj = true_anom_supconj - ecc*sin(true_anom_supconj)\n # ph_supconj = (mean_anom_supconj + per0) / (2 * pi) - 1/4\n\n if solve_for in [None, ph_supconj]:\n lhs = ph_supconj\n\n #true_anom_supconj = np.pi/2*u.rad - per0\n true_anom_supconj = -1*(per0 - 360*u.deg)\n\n rhs = _true_anom_to_phase(true_anom_supconj, period, ecc, per0)\n\n #elif solve_for in [per0]:\n # raise NotImplementedError(\"phshift constraint does not support solving for per0 yet\")\n else:\n raise NotImplementedError\n\n return lhs, rhs, {'orbit': orbit}"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates a constraint for frequency given a period", "response": "def freq(b, component, solve_for=None, **kwargs):\n \"\"\"\n Create a constraint for frequency (either orbital or rotational) given a period.\n\n freq = 2 * pi / period\n\n :parameter b: the :class:`phoebe.frontend.bundle.Bundle`\n :parameter str component: the label of the orbit or component in which this\n constraint should be built\n :parameter str solve_for: if 'freq' should not be the derived/constrained\n parameter, provide which other parameter should be derived\n (ie 'period')\n :returns: lhs (Parameter), rhs (ConstraintParameter), args (list of arguments\n that were passed to this function)\n \"\"\"\n\n component_ps = _get_system_ps(b, component)\n\n #metawargs = component_ps.meta\n #metawargs.pop('qualifier')\n\n period = component_ps.get_parameter(qualifier='period', check_visible=False)\n freq = component_ps.get_parameter(qualifier='freq', check_visible=False)\n\n if solve_for in [None, freq]:\n lhs = freq\n rhs = 2 * np.pi / period\n\n elif solve_for == period:\n lhs = period\n rhs = 2 * np.pi / freq\n\n else:\n raise NotImplementedError\n\n return lhs, rhs, {'component': component}"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef keplers_third_law_hierarchical(b, orbit1, orbit2, solve_for=None, **kwargs):\n\n hier = b.hierarchy\n\n orbit1_ps = _get_system_ps(b, orbit1)\n orbit2_ps = _get_system_ps(b, orbit2)\n\n sma1 = orbit1_ps.get_parameter(qualifier='sma')\n sma2 = orbit2_ps.get_parameter(qualifier='sma')\n\n q1 = orbit1_ps.get_parameter(qualifier='q')\n q2 = orbit2_ps.get_parameter(qualifier='q')\n\n period1 = orbit1_ps.get_parameter(qualifier='period')\n period2 = orbit2_ps.get_parameter(qualifier='period')\n\n # NOTE: orbit1 is the outer, so we need to check orbit2... which will\n # be the OPPOSITE component as that of the mass we're solving for\n if hier.get_primary_or_secondary(orbit2_ps.component) == 'primary':\n qthing1 = 1.0+q1\n else:\n qthing1 = 1.0+1./q1\n\n if solve_for in [None, sma1]:\n lhs = sma1\n rhs = (sma2**3 * qthing1 * period1**2/period2**2)**\"(1./3)\"\n else:\n # TODO: add other options to solve_for\n raise NotImplementedError\n\n return lhs, rhs, {'orbit1': orbit1, 'orbit2': orbit2}", "response": "Keplers three - law hierarchical orbit."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef irrad_frac(b, component, solve_for=None, **kwargs):\n\n comp_ps = b.get_component(component=component)\n\n irrad_frac_refl_bol = comp_ps.get_parameter(qualifier='irrad_frac_refl_bol')\n irrad_frac_lost_bol = comp_ps.get_parameter(qualifier='irrad_frac_lost_bol')\n\n if solve_for in [irrad_frac_lost_bol, None]:\n lhs = irrad_frac_lost_bol\n rhs = 1.0 - irrad_frac_refl_bol\n elif solve_for in [irrad_frac_refl_bol]:\n lhs = irrad_frac_refl_bol\n rhs = 1.0 - irrad_frac_lost_bol\n else:\n raise NotImplementedError\n\n return lhs, rhs, {'component': component}", "response": "Creates a constraint to ensure that energy is conserved and all incident\n light is accounted for the given component."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef semidetached(b, component, solve_for=None, **kwargs):\n comp_ps = b.get_component(component=component)\n\n requiv = comp_ps.get_parameter(qualifier='requiv')\n requiv_critical = comp_ps.get_parameter(qualifier='requiv_max')\n\n if solve_for in [requiv, None]:\n lhs = requiv\n rhs = 1.0*requiv_critical\n else:\n raise NotImplementedError\n\n return lhs, rhs, {'component': component}", "response": "Create a constraint to force requiv to be semidetached"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncreating a constraint for the mass of a star based on Kepler s third - order component orbit.", "response": "def mass(b, component, solve_for=None, **kwargs):\n \"\"\"\n Create a constraint for the mass of a star based on Kepler's third\n law from its parent orbit.\n\n If 'mass' does not exist in the component, it will be created\n\n :parameter b: the :class:`phoebe.frontend.bundle.Bundle`\n :parameter str component: the label of the star in which this\n constraint should be built\n :parameter str solve_for: if 'mass' should not be the derived/constrained\n parameter, provide which other parameter should be derived\n (ie 'q', sma', 'period')\n :returns: lhs (Parameter), rhs (ConstraintParameter), args (list of arguments\n that were passed to this function)\n :raises NotImplementedError: if the hierarchy is not found\n :raises NotImplementedError: if the value of solve_for is not yet implemented\n \"\"\"\n # TODO: optimize this - this is currently by far the most expensive constraint (due mostly to the parameter multiplication)\n\n hier = b.get_hierarchy()\n if not len(hier.get_value()):\n # TODO: change to custom error type to catch in bundle.add_component\n # TODO: check whether the problem is 0 hierarchies or more than 1\n raise NotImplementedError(\"constraint for mass requires hierarchy\")\n\n component_ps = _get_system_ps(b, component)\n\n parentorbit = hier.get_parent_of(component)\n parentorbit_ps = _get_system_ps(b, parentorbit)\n\n metawargs = component_ps.meta\n metawargs.pop('qualifier')\n mass_def = FloatParameter(qualifier='mass', value=1.0, default_unit=u.solMass, description='Mass')\n mass, created = b.get_or_create('mass', mass_def, **metawargs)\n\n metawargs = parentorbit_ps.meta\n metawargs.pop('qualifier')\n sma = b.get_parameter(qualifier='sma', **metawargs)\n period = b.get_parameter(qualifier='period', **metawargs)\n q = b.get_parameter(qualifier='q', **metawargs)\n\n G = c.G.to('solRad3 / (solMass d2)')\n G.keep_in_solar_units = True\n\n if hier.get_primary_or_secondary(component) == 'primary':\n qthing = 1.0+q\n else:\n qthing = 1.0+1./q\n\n if solve_for in [None, mass]:\n\n lhs = mass\n rhs = (4*np.pi**2 * sma**3 ) / (period**2 * qthing * G)\n\n elif solve_for==sma:\n\n lhs = sma\n rhs = ((mass * period**2 * qthing * G)/(4 * np.pi**2))**\"(1./3)\"\n\n elif solve_for==period:\n\n lhs = period\n rhs = ((4 * np.pi**2 * sma**3)/(mass * qthing * G))**\"(1./2)\"\n\n elif solve_for==q:\n # TODO: implement this so that one mass can be solved for sma and the\n # other can be solved for q. The tricky thing is that we actually\n # have qthing here... so we'll probably need to handle the primary\n # vs secondary case separately.\n raise NotImplementedError\n\n else:\n # TODO: solve for other options\n raise NotImplementedError\n\n return lhs, rhs, {'component': component}"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates a constraint for the semi - major axes of the star in which the component is in the parent orbit.", "response": "def comp_sma(b, component, solve_for=None, **kwargs):\n \"\"\"\n Create a constraint for the star's semi-major axes WITHIN its\n parent orbit. This is NOT the same as the semi-major axes OF\n the parent orbit\n\n If 'sma' does not exist in the component, it will be created\n\n :parameter b: the :class:`phoebe.frontend.bundle.Bundle`\n :parameter str component: the label of the star in which this\n constraint should be built\n :parameter str solve_for: if 'sma@star' should not be the derived/constrained\n parameter, provide which other parameter should be derived\n (ie 'sma@orbit', 'q')\n :returns: lhs (Parameter), rhs (ConstraintParameter), args (list of arguments\n that were passed to this function)\n \"\"\"\n hier = b.get_hierarchy()\n if not len(hier.get_value()):\n # TODO: change to custom error type to catch in bundle.add_component\n # TODO: check whether the problem is 0 hierarchies or more than 1\n raise NotImplementedError(\"constraint for comp_sma requires hierarchy\")\n\n component_ps = _get_system_ps(b, component)\n\n parentorbit = hier.get_parent_of(component)\n parentorbit_ps = _get_system_ps(b, parentorbit)\n\n metawargs = component_ps.meta\n metawargs.pop('qualifier')\n compsma_def = FloatParameter(qualifier='sma', value=4.0, default_unit=u.solRad, description='Semi major axis of the component in the orbit')\n compsma, created = b.get_or_create('sma', compsma_def, **metawargs)\n\n metawargs = parentorbit_ps.meta\n metawargs.pop('qualifier')\n sma = b.get_parameter(qualifier='sma', **metawargs)\n q = b.get_parameter(qualifier='q', **metawargs)\n\n # NOTE: similar logic is also in dynamics.keplerian.dynamics_from_bundle to\n # handle nested hierarchical orbits. If changing any of the logic here,\n # it should be changed there as well.\n\n if hier.get_primary_or_secondary(component) == 'primary':\n qthing = (1. + 1./q)\n else:\n qthing = (1. + q)\n\n\n if solve_for in [None, compsma]:\n lhs = compsma\n rhs = sma / qthing\n\n elif solve_for == sma:\n lhs = sma\n rhs = compsma * qthing\n\n else:\n raise NotImplementedError\n\n return lhs, rhs, {'component': component}"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates a constraint to determine the requiv_detached_max value of a given component.", "response": "def requiv_detached_max(b, component, solve_for=None, **kwargs):\n \"\"\"\n Create a constraint to determine the critical (at L1) value of\n requiv.\n\n :parameter b: the :class:`phoebe.frontend.bundle.Bundle`\n :parameter str component: the label of the star in which this\n constraint should be built\n :parameter str solve_for: if 'requiv_max' should not be the derived/constrained\n parameter, provide which other parameter should be derived\n :returns: lhs (Parameter), rhs (ConstraintParameter), args (list of arguments\n that were passed to this function)\n \"\"\"\n\n hier = b.get_hierarchy()\n if not len(hier.get_value()):\n # TODO: change to custom error type to catch in bundle.add_component\n # TODO: check whether the problem is 0 hierarchies or more than 1\n raise NotImplementedError(\"constraint for requiv_detached_max requires hierarchy\")\n\n\n component_ps = _get_system_ps(b, component)\n\n parentorbit = hier.get_parent_of(component)\n\n\n if parentorbit == 'component':\n raise ValueError(\"cannot constrain requiv_detached_max for single star\")\n\n parentorbit_ps = _get_system_ps(b, parentorbit)\n\n requiv_max = component_ps.get_parameter(qualifier='requiv_max')\n q = parentorbit_ps.get_parameter(qualifier='q')\n syncpar = component_ps.get_parameter(qualifier='syncpar')\n ecc = parentorbit_ps.get_parameter(qualifier='ecc')\n sma = parentorbit_ps.get_parameter(qualifier='sma')\n incl_star = component_ps.get_parameter(qualifier='incl')\n long_an_star = component_ps.get_parameter(qualifier='long_an')\n incl_orbit = parentorbit_ps.get_parameter(qualifier='incl')\n long_an_orbit = parentorbit_ps.get_parameter(qualifier='long_an')\n\n if solve_for in [None, requiv_max]:\n lhs = requiv_max\n\n rhs = roche_requiv_L1(q, syncpar, ecc, sma,\n incl_star, long_an_star,\n incl_orbit, long_an_orbit,\n hier.get_primary_or_secondary(component, return_ind=True))\n else:\n raise NotImplementedError(\"requiv_detached_max can only be solved for requiv_max\")\n\n return lhs, rhs, {'component': component}"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef potential_contact_min(b, component, solve_for=None, **kwargs):\n hier = b.get_hierarchy()\n if not len(hier.get_value()):\n # TODO: change to custom error type to catch in bundle.add_component\n # TODO: check whether the problem is 0 hierarchies or more than 1\n raise NotImplementedError(\"constraint for requiv_contact_min requires hierarchy\")\n\n\n component_ps = _get_system_ps(b, component)\n\n parentorbit = hier.get_parent_of(component)\n parentorbit_ps = _get_system_ps(b, parentorbit)\n\n pot_min = component_ps.get_parameter(qualifier='pot_min')\n q = parentorbit_ps.get_parameter(qualifier='q')\n\n if solve_for in [None, pot_min]:\n lhs = pot_min\n\n rhs = roche_potential_contact_L23(q)\n else:\n raise NotImplementedError(\"potential_contact_min can only be solved for requiv_min\")\n\n return lhs, rhs, {'component': component}", "response": "Create a constraint to determine the minimum potential contact for a given component."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates a constraint to determine the maximum potential contact for a given component.", "response": "def potential_contact_max(b, component, solve_for=None, **kwargs):\n \"\"\"\n Create a constraint to determine the critical (at L1) value of\n potential at which a constact will underflow. This will only be used\n for contacts for pot_min\n\n :parameter b: the :class:`phoebe.frontend.bundle.Bundle`\n :parameter str component: the label of the star in which this\n constraint should be built\n :parameter str solve_for: if 'pot_max' should not be the derived/constrained\n parameter, provide which other parameter should be derived\n :returns: lhs (Parameter), rhs (ConstraintParameter), args (list of arguments\n that were passed to this function)\n \"\"\"\n hier = b.get_hierarchy()\n if not len(hier.get_value()):\n # TODO: change to custom error type to catch in bundle.add_component\n # TODO: check whether the problem is 0 hierarchies or more than 1\n raise NotImplementedError(\"constraint for requiv_contact_max requires hierarchy\")\n\n\n component_ps = _get_system_ps(b, component)\n\n parentorbit = hier.get_parent_of(component)\n parentorbit_ps = _get_system_ps(b, parentorbit)\n\n pot_max = component_ps.get_parameter(qualifier='pot_max')\n q = parentorbit_ps.get_parameter(qualifier='q')\n\n if solve_for in [None, pot_max]:\n lhs = pot_max\n\n rhs = roche_potential_contact_L1(q)\n else:\n raise NotImplementedError(\"potential_contact_max can only be solved for requiv_max\")\n\n return lhs, rhs, {'component': component}"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef requiv_contact_min(b, component, solve_for=None, **kwargs):\n hier = b.get_hierarchy()\n if not len(hier.get_value()):\n # TODO: change to custom error type to catch in bundle.add_component\n # TODO: check whether the problem is 0 hierarchies or more than 1\n raise NotImplementedError(\"constraint for requiv_contact_min requires hierarchy\")\n\n\n component_ps = _get_system_ps(b, component)\n\n parentorbit = hier.get_parent_of(component)\n parentorbit_ps = _get_system_ps(b, parentorbit)\n\n requiv_min = component_ps.get_parameter(qualifier='requiv_min')\n q = parentorbit_ps.get_parameter(qualifier='q')\n sma = parentorbit_ps.get_parameter(qualifier='sma')\n\n if solve_for in [None, requiv_min]:\n lhs = requiv_min\n\n rhs = roche_requiv_contact_L1(q, sma, hier.get_primary_or_secondary(component, return_ind=True))\n else:\n raise NotImplementedError(\"requiv_contact_min can only be solved for requiv_min\")\n\n return lhs, rhs, {'component': component}", "response": "Create a constraint to determine the minimum requiv contact for a given component."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef requiv_contact_max(b, component, solve_for=None, **kwargs):\n hier = b.get_hierarchy()\n if not len(hier.get_value()):\n # TODO: change to custom error type to catch in bundle.add_component\n # TODO: check whether the problem is 0 hierarchies or more than 1\n raise NotImplementedError(\"constraint for requiv_contact_max requires hierarchy\")\n\n\n component_ps = _get_system_ps(b, component)\n\n parentorbit = hier.get_parent_of(component)\n parentorbit_ps = _get_system_ps(b, parentorbit)\n\n requiv_max = component_ps.get_parameter(qualifier='requiv_max')\n q = parentorbit_ps.get_parameter(qualifier='q')\n sma = parentorbit_ps.get_parameter(qualifier='sma')\n\n if solve_for in [None, requiv_max]:\n lhs = requiv_max\n\n rhs = roche_requiv_contact_L23(q, sma, hier.get_primary_or_secondary(component, return_ind=True))\n else:\n raise NotImplementedError(\"requiv_contact_max can only be solved for requiv_max\")\n\n return lhs, rhs, {'component': component}", "response": "This function returns a constraint to determine the maximum requiv contact for a given component."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef fillout_factor(b, component, solve_for=None, **kwargs):\n hier = b.get_hierarchy()\n if not len(hier.get_value()):\n # TODO: change to custom error type to catch in bundle.add_component\n # TODO: check whether the problem is 0 hierarchies or more than 1\n raise NotImplementedError(\"constraint for requiv_contact_max requires hierarchy\")\n\n\n component_ps = _get_system_ps(b, component)\n\n parentorbit = hier.get_parent_of(component)\n parentorbit_ps = _get_system_ps(b, parentorbit)\n\n pot = component_ps.get_parameter(qualifier='pot')\n fillout_factor = component_ps.get_parameter(qualifier='fillout_factor')\n q = parentorbit_ps.get_parameter(qualifier='q')\n\n if solve_for in [None, fillout_factor]:\n lhs = fillout_factor\n\n rhs = roche_pot_to_fillout_factor(q, pot)\n elif solve_for in [pot]:\n lhs = pot\n\n rhs = roche_fillout_factor_to_pot(q, fillout_factor)\n else:\n raise NotImplementedError(\"fillout_factor can not be solved for {}\".format(solve_for))\n\n return lhs, rhs, {'component': component}", "response": "Create a constraint to determine the fillout factor of a contact envelope."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncreating a constraint for the rotation period of a star given its orbital and synchronicity parameters.", "response": "def rotation_period(b, component, solve_for=None, **kwargs):\n \"\"\"\n Create a constraint for the rotation period of a star given its orbital\n period and synchronicity parameters.\n\n :parameter b: the :class:`phoebe.frontend.bundle.Bundle`\n :parameter str component: the label of the star in which this\n constraint should be built\n :parameter str solve_for: if 'period@star' should not be the derived/constrained\n parameter, provide which other parameter should be derived\n (ie 'syncpar@star', 'period@orbit')\n :returns: lhs (Parameter), rhs (ConstraintParameter), args (list of arguments\n that were passed to this function)\n \"\"\"\n hier = b.get_hierarchy()\n if not len(hier.get_value()):\n # TODO: change to custom error type to catch in bundle.add_component\n # TODO: check whether the problem is 0 hierarchies or more than 1\n raise NotImplementedError(\"constraint for comp_sma requires hierarchy\")\n\n component_ps = _get_system_ps(b, component)\n\n parentorbit = hier.get_parent_of(component)\n parentorbit_ps = _get_system_ps(b, parentorbit)\n\n metawargs = component_ps.meta\n metawargs.pop('qualifier')\n period_star = b.get_parameter(qualifier='period', **metawargs)\n syncpar_star = b.get_parameter(qualifier='syncpar', **metawargs)\n\n\n metawargs = parentorbit_ps.meta\n metawargs.pop('qualifier')\n period_orbit = b.get_parameter(qualifier='period', **metawargs)\n\n if solve_for in [None, period_star]:\n lhs = period_star\n rhs = period_orbit / syncpar_star\n\n elif solve_for == syncpar_star:\n lhs = syncpar_star\n rhs = period_orbit / period_star\n\n elif solve_for == period_orbit:\n lhs = period_orbit\n rhs = syncpar_star * period_star\n\n else:\n raise NotImplementedError\n\n return lhs, rhs, {'component': component}"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef pitch(b, component, solve_for=None, **kwargs):\n\n hier = b.get_hierarchy()\n if not len(hier.get_value()):\n # TODO: change to custom error type to catch in bundle.add_component\n # TODO: check whether the problem is 0 hierarchies or more than 1\n raise NotImplementedError(\"constraint for pitch requires hierarchy\")\n\n component_ps = _get_system_ps(b, component)\n\n parentorbit = hier.get_parent_of(component)\n parentorbit_ps = _get_system_ps(b, parentorbit)\n\n incl_comp = component_ps.get_parameter(qualifier='incl')\n pitch_comp = component_ps.get_parameter(qualifier='pitch')\n incl_orb = parentorbit_ps.get_parameter(qualifier='incl')\n\n if solve_for in [None, incl_comp]:\n lhs = incl_comp\n rhs = incl_orb + pitch_comp\n\n elif solve_for == incl_orb:\n lhs = incl_orb\n rhs = incl_comp - pitch_comp\n\n elif solve_for == pitch_comp:\n lhs = pitch_comp\n rhs = incl_comp - incl_orb\n\n else:\n raise NotImplementedError\n\n return lhs, rhs, {'component': component}", "response": "Create a constraint for the inclination of a star relative to its parent orbit"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates a constraint for inclination of a star relative to its parent orbit", "response": "def yaw(b, component, solve_for=None, **kwargs):\n \"\"\"\n Create a constraint for the inclination of a star relative to its parent orbit\n\n :parameter b: the :class:`phoebe.frontend.bundle.Bundle`\n :parameter str component: the label of the star in which this\n constraint should be built\n :parameter str solve_for: if 'long_an@star' should not be the derived/constrained\n parameter, provide which other parameter should be derived\n (ie 'long_an@orbit', 'yaw@star')\n :returns: lhs (Parameter), rhs (ConstraintParameter), args (list of arguments\n that were passed to this function)\n \"\"\"\n\n hier = b.get_hierarchy()\n if not len(hier.get_value()):\n # TODO: change to custom error type to catch in bundle.add_component\n # TODO: check whether the problem is 0 hierarchies or more than 1\n raise NotImplementedError(\"constraint for yaw requires hierarchy\")\n\n component_ps = _get_system_ps(b, component)\n\n parentorbit = hier.get_parent_of(component)\n parentorbit_ps = _get_system_ps(b, parentorbit)\n\n long_an_comp = component_ps.get_parameter(qualifier='long_an')\n yaw_comp = component_ps.get_parameter(qualifier='yaw')\n long_an_orb = parentorbit_ps.get_parameter(qualifier='long_an')\n\n if solve_for in [None, long_an_comp]:\n lhs = long_an_comp\n rhs = long_an_orb + yaw_comp\n\n elif solve_for == long_an_orb:\n lhs = long_an_orb\n rhs = long_an_comp - yaw_comp\n\n elif solve_for == yaw_comp:\n lhs = yaw_comp\n rhs = long_an_comp - long_an_orb\n\n else:\n raise NotImplementedError\n\n return lhs, rhs, {'component': component}"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef time_ephem(b, component, dataset, solve_for=None, **kwargs):\n hier = b.get_hierarchy()\n if not len(hier.get_value()):\n # TODO: change to custom error type to catch in bundle.add_component\n # TODO: check whether the problem is 0 hierarchies or more than 1\n raise NotImplementedError(\"constraint for time_ecl requires hierarchy\")\n\n if component=='_default':\n # need to do this so that the constraint won't fail before being copied\n parentorbit = hier.get_top()\n else:\n parentorbit = hier.get_parent_of(component)\n\n parentorbit_ps = _get_system_ps(b, parentorbit)\n\n filterwargs = {}\n if component is not None:\n filterwargs['component'] = component\n if dataset is not None:\n filterwargs['dataset'] = dataset\n\n time_ephem = b.get_parameter(qualifier='time_ephems', **filterwargs)\n t0 = parentorbit_ps.get_parameter(qualifier='t0_supconj') # TODO: make sure t0_supconj makes sense here\n period = parentorbit_ps.get_parameter(qualifier='period')\n phshift = parentorbit_ps.get_parameter(qualifier='phshift')\n dpdt = parentorbit_ps.get_parameter(qualifier='dpdt')\n esinw_ = parentorbit_ps.get_parameter(qualifier='esinw')\n\n N = b.get_parameter(qualifier='Ns', **filterwargs)\n\n if solve_for in [None, time_ephem]:\n\n # TODO: N is always an int, but we want to include the expected phase of eclipse (ie N+ph_ecl) based on which component and esinw/ecosw\n # then we can have bundle.add_component automatically default to add all components instead of just the primary\n\n # same as Bundle.to_time except phase can be > 1\n lhs = time_ephem\n # we have to do a trick here since dpdt is in sec/yr and floats are\n # assumed to have the same unit during subtraction or addition.\n one = 1.0*(u.s/u.s)\n if component!='_default' and hier.get_primary_or_secondary(component)=='secondary':\n # TODO: make sure this constraint updates if the hierarchy changes?\n N = N + 0.5 + esinw_ # TODO: check this\n rhs = t0 + ((N - phshift) * period) / (-1 * (N - phshift) * dpdt + one)\n #rhs = (N-phshift)*period\n else:\n raise NotImplementedError\n\n return lhs, rhs, {'component': component, 'dataset': dataset}", "response": "get the time_ephemeris of a bundle"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncomputing the ETV column from the time_ephems and time_ecls", "response": "def etv(b, component, dataset, solve_for=None, **kwargs):\n \"\"\"\n compute the ETV column from the time_ephem and time_ecl columns (used in the\n ETV dataset)\n \"\"\"\n\n time_ephem = b.get_parameter(qualifier='time_ephems', component=component, dataset=dataset, context=['dataset', 'model']) # need to provide context to avoid getting the constraint\n time_ecl = b.get_parameter(qualifier='time_ecls', component=component, dataset=dataset)\n etv = b.get_parameter(qualifier='etvs', component=component, dataset=dataset)\n\n if solve_for in [None, etv]:\n lhs = etv\n rhs = time_ecl - time_ephem\n else:\n raise NotImplementedError\n\n return lhs, rhs, {'component': component, 'dataset': dataset}"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef is_float(value):\n return isinstance(value, float) or isinstance(value, int) or isinstance(value, np.float64), float(value)", "response": "must be a float"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncheck if value is a positive integer or a tuple or list of positive integers", "response": "def is_valid_shape(value):\n \"\"\"must be a positive integer or a tuple/list of positive integers\"\"\"\n if is_int_positive(value):\n return True, value\n elif isinstance(value, tuple) or isinstance(value, list):\n for v in value:\n if not is_int_positive(v):\n return False, value\n return True, value\n else:\n return False, value"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef quantity(self):\n if not _has_astropy:\n raise ImportError(\"astropy must be installed for unit/quantity support\")\n\n if self.unit is None:\n raise ValueError(\"unit is not set, cannot convert to quantity\")\n\n return self.array * self.unit", "response": "Returns the underlying astropy quantity"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef to(self, unit):\n if not _has_astropy:\n raise ImportError(\"astropy must be installed for unit/quantity support\")\n\n if self.unit is None:\n raise ValueError(\"no units currently set\")\n\n if not is_unit_or_unitstring(unit)[0]:\n raise ValueError(\"unit not recognized\")\n\n mult_factor = self.unit.to(unit)\n copy = self.copy() * mult_factor\n copy.unit = unit\n return copy", "response": "Returns a new nparray object with the new units converted to the given unit."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef to_dict(self):\n def _json_safe(v):\n if isinstance(v, np.ndarray):\n return v.tolist()\n elif is_unit(v)[0]:\n return v.to_string()\n else:\n return v\n d = {k:_json_safe(v) for k,v in self._descriptors.items()}\n d['nparray'] = self.__class__.__name__.lower()\n return d", "response": "Dump a representation of the nparray object to a dictionary."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ndump a representation of the nparray object to a json - formatted file.", "response": "def to_file(self, filename, **kwargs):\n \"\"\"\n dump a representation of the nparray object to a json-formatted file.\n The nparray object should then be able to be fully restored via\n nparray.from_file\n\n @parameter str filename: path to the file to be created (will overwrite\n if already exists)\n @rtype: str\n @returns: the filename\n \"\"\"\n f = open(filename, 'w')\n f.write(self.to_json(**kwargs))\n f.close()\n return filename"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef array(self):\n return np.arange(self.start, self.stop, self.step)", "response": "return the underlying numpy array"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef to_linspace(self):\n num = int((self.stop-self.start)/(self.step))\n return Linspace(self.start, self.stop-self.step, num)", "response": "convert from arange to linspace"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning the underlying numpy array", "response": "def array(self):\n \"\"\"\n return the underlying numpy array\n \"\"\"\n return np.linspace(self.start, self.stop, self.num, self.endpoint)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef to_arange(self):\n arr, step = np.linspace(self.start, self.stop, self.num, self.endpoint, retstep=True)\n return Arange(self.start, self.stop+step, step)", "response": "convert from linspace to arange"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the underlying numpy array", "response": "def array(self):\n \"\"\"\n return the underlying numpy array\n \"\"\"\n return np.logspace(self.start, self.stop, self.num, self.endpoint, self.base)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning the underlying numpy array", "response": "def array(self):\n \"\"\"\n return the underlying numpy array\n \"\"\"\n return np.geomspace(self.start, self.stop, self.num, self.endpoint)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef to_linspace(self):\n if hasattr(self.shape, '__len__'):\n raise NotImplementedError(\"can only convert flat Full arrays to linspace\")\n return Linspace(self.fill_value, self.fill_value, self.shape)", "response": "convert from full to linspace"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nget the underlying numpy array", "response": "def array(self):\n \"\"\"\n return the underlying numpy array\n \"\"\"\n return np.eye(self.M, self.N, self.k)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef crossing(b, component, time, dynamics_method='keplerian', ltte=True, tol=1e-4, maxiter=1000):\n\n\n def projected_separation_sq(time, b, dynamics_method, cind1, cind2, ltte=True):\n \"\"\"\n \"\"\"\n #print \"*** projected_separation_sq\", time, dynamics_method, cind1, cind2, ltte\n\n\n times = np.array([time])\n\n if dynamics_method in ['nbody', 'rebound']:\n # TODO: make sure that this takes systemic velocity and corrects positions and velocities (including ltte effects if enabled)\n ts, xs, ys, zs, vxs, vys, vzs = dynamics.nbody.dynamics_from_bundle(b, times, compute=None, ltte=ltte)\n\n elif dynamics_method=='bs':\n ts, xs, ys, zs, vxs, vys, vzs = dynamics.nbody.dynamics_from_bundle_bs(b, times, compute, ltte=ltte)\n\n elif dynamics_method=='keplerian':\n # TODO: make sure that this takes systemic velocity and corrects positions and velocities (including ltte effects if enabled)\n ts, xs, ys, zs, vxs, vys, vzs = dynamics.keplerian.dynamics_from_bundle(b, times, compute=None, ltte=ltte, return_euler=False)\n\n else:\n raise NotImplementedError\n\n\n return (xs[cind2][0]-xs[cind1][0])**2 + (ys[cind2][0]-ys[cind1][0])**2\n\n\n # TODO: optimize this by allowing to pass cind1 and cind2 directly (and fallback to this if they aren't)\n starrefs = b.hierarchy.get_stars()\n cind1 = starrefs.index(component)\n cind2 = starrefs.index(b.hierarchy.get_sibling_of(component))\n\n # TODO: provide options for tol and maxiter (in the frontend computeoptionsp)?\n return newton(projected_separation_sq, x0=time, args=(b, dynamics_method, cind1, cind2, ltte), tol=tol, maxiter=maxiter)", "response": "Crossing a component with a given time."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef requiv_contact_L1(q, sma, compno, **kwargs):\n return requiv_L1(q=q, syncpar=1, ecc=0, sma=sma, incl_star=0, long_an_star=0, incl_orb=0, long_an_orb=0, compno=compno, **kwargs)", "response": "This function is used to make sure that the requiv is aligned and not circular."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncompute the time shift between superior conjuction and periastron passage.", "response": "def _delta_t_supconj_perpass(period, ecc, per0):\n \"\"\"\n time shift between superior conjuction and periastron passage\n \"\"\"\n ups_sc = np.pi/2-per0\n E_sc = 2*np.arctan( np.sqrt((1-ecc)/(1+ecc)) * np.tan(ups_sc/2) )\n M_sc = E_sc - ecc*np.sin(E_sc)\n return period*(M_sc/2./np.pi)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nloads an nparray object from a dictionary", "response": "def from_dict(d):\n \"\"\"\n load an nparray object from a dictionary\n\n @parameter str d: dictionary representing the nparray object\n \"\"\"\n if isinstance(d, str):\n return from_json(d)\n\n if not isinstance(d, dict):\n raise TypeError(\"argument must be of type dict\")\n if 'nparray' not in d.keys():\n raise ValueError(\"input dictionary missing 'nparray' entry\")\n\n classname = d.pop('nparray').title()\n return getattr(_wrappers, classname)(**d)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef from_json(j):\n if isinstance(j, dict):\n return from_dict(j)\n\n if not (isinstance(j, str) or isinstance(j, unicode)):\n raise TypeError(\"argument must be of type str\")\n\n return from_dict(json.loads(j))", "response": "load an nparray object from a json - formatted string"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef from_file(filename):\n f = open(filename, 'r')\n j = json.load(f)\n f.close()\n\n return from_dict(j)", "response": "Load an nparray object from a json file"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef monkeypatch():\n np.array = array\n np.arange = arange\n np.linspace = linspace\n np.logspace = logspace\n np.geomspace = geomspace\n np.full = full\n np.full_like = full_like\n np.zeros = zeros\n np.zeros_like = zeros_like\n np.ones = ones\n np.ones_like = ones_like\n np.eye = eye", "response": "Monkeypatch numpy functions to call those provided by nparray instead."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ninstalling a passband from a local file.", "response": "def install_passband(fname, local=True):\n \"\"\"\n Install a passband from a local file. This simply copies the file into the\n install path - but beware that clearing the installation will clear the\n passband as well\n\n If local=False, you must have permissions to access the installation directory\n \"\"\"\n pbdir = _pbdir_local if local else _pbdir_global\n shutil.copy(fname, pbdir)\n init_passband(os.path.join(pbdir, fname))"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef download_passband(passband, local=True):\n if passband not in list_online_passbands():\n raise ValueError(\"passband '{}' not available\".format(passband))\n\n pbdir = _pbdir_local if local else _pbdir_global\n\n passband_fname = _online_passbands[passband]['fname']\n passband_fname_local = os.path.join(pbdir, passband_fname)\n url = 'http://github.com/phoebe-project/phoebe2-tables/raw/master/passbands/{}'.format(passband_fname)\n logger.info(\"downloading from {} and installing to {}...\".format(url, passband_fname_local))\n try:\n urllib.urlretrieve(url, passband_fname_local)\n except IOError:\n raise IOError(\"unable to download {} passband - check connection\".format(passband))\n else:\n init_passband(passband_fname_local)", "response": "Download and install a given passband from the repository."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncomputes monochromatic blackbody intensity in W using the the Planck function.", "response": "def _planck(self, lam, Teff):\n \"\"\"\n Computes monochromatic blackbody intensity in W/m^3 using the\n Planck function.\n\n @lam: wavelength in m\n @Teff: effective temperature in K\n\n Returns: monochromatic blackbody intensity\n \"\"\"\n\n return 2*self.h*self.c*self.c/lam**5 * 1./(np.exp(self.h*self.c/lam/self.k/Teff)-1)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncomputes the derivative of the monochromatic blackbody intensity using the Planck function.", "response": "def _planck_deriv(self, lam, Teff):\n \"\"\"\n Computes the derivative of the monochromatic blackbody intensity using\n the Planck function.\n\n @lam: wavelength in m\n @Teff: effective temperature in K\n\n Returns: the derivative of monochromatic blackbody intensity\n \"\"\"\n\n expterm = np.exp(self.h*self.c/lam/self.k/Teff)\n return 2*self.h*self.c*self.c/self.k/Teff/lam**7 * (expterm-1)**-2 * (self.h*self.c*expterm-5*lam*self.k*Teff*(expterm-1))"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _planck_spi(self, lam, Teff):\n\n hclkt = self.h*self.c/lam/self.k/Teff\n expterm = np.exp(hclkt)\n return hclkt * expterm/(expterm-1)", "response": "Compute the spectral index of monochromatic blackbody intensity using the Planck function."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncomputing the mean passband intensity using blackbody atmosphere.", "response": "def _bb_intensity(self, Teff, photon_weighted=False):\n \"\"\"\n Computes mean passband intensity using blackbody atmosphere:\n\n I_pb^E = \\int_\\lambda I(\\lambda) P(\\lambda) d\\lambda / \\int_\\lambda P(\\lambda) d\\lambda\n I_pb^P = \\int_\\lambda \\lambda I(\\lambda) P(\\lambda) d\\lambda / \\int_\\lambda \\lambda P(\\lambda) d\\lambda\n\n Superscripts E and P stand for energy and photon, respectively.\n\n @Teff: effective temperature in K\n @photon_weighted: photon/energy switch\n\n Returns: mean passband intensity using blackbody atmosphere.\n \"\"\"\n\n if photon_weighted:\n pb = lambda w: w*self._planck(w, Teff)*self.ptf(w)\n return integrate.quad(pb, self.wl[0], self.wl[-1])[0]/self.ptf_photon_area\n else:\n pb = lambda w: self._planck(w, Teff)*self.ptf(w)\n return integrate.quad(pb, self.wl[0], self.wl[-1])[0]/self.ptf_area"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncomputing the mean boosting index using blackbody atmosphere.", "response": "def _bindex_blackbody(self, Teff, photon_weighted=False):\n \"\"\"\n Computes the mean boosting index using blackbody atmosphere:\n\n B_pb^E = \\int_\\lambda I(\\lambda) P(\\lambda) B(\\lambda) d\\lambda / \\int_\\lambda I(\\lambda) P(\\lambda) d\\lambda\n B_pb^P = \\int_\\lambda \\lambda I(\\lambda) P(\\lambda) B(\\lambda) d\\lambda / \\int_\\lambda \\lambda I(\\lambda) P(\\lambda) d\\lambda\n\n Superscripts E and P stand for energy and photon, respectively.\n\n @Teff: effective temperature in K\n @photon_weighted: photon/energy switch\n\n Returns: mean boosting index using blackbody atmosphere.\n \"\"\"\n\n if photon_weighted:\n num = lambda w: w*self._planck(w, Teff)*self.ptf(w)*self._planck_spi(w, Teff)\n denom = lambda w: w*self._planck(w, Teff)*self.ptf(w)\n return integrate.quad(num, self.wl[0], self.wl[-1], epsabs=1e10, epsrel=1e-8)[0]/integrate.quad(denom, self.wl[0], self.wl[-1], epsabs=1e10, epsrel=1e-6)[0]\n else:\n num = lambda w: self._planck(w, Teff)*self.ptf(w)*self._planck_spi(w, Teff)\n denom = lambda w: self._planck(w, Teff)*self.ptf(w)\n return integrate.quad(num, self.wl[0], self.wl[-1], epsabs=1e10, epsrel=1e-8)[0]/integrate.quad(denom, self.wl[0], self.wl[-1], epsabs=1e10, epsrel=1e-6)[0]"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncomputing the blackbody response for the given set of effective temperatures.", "response": "def compute_blackbody_response(self, Teffs=None):\n \"\"\"\n Computes blackbody intensities across the entire range of\n effective temperatures. It does this for two regimes, energy-weighted\n and photon-weighted. It then fits a cubic spline to the log(I)-Teff\n values and exports the interpolation functions _log10_Inorm_bb_energy\n and _log10_Inorm_bb_photon.\n\n @Teffs: an array of effective temperatures. If None, a default\n array from ~300K to ~500000K with 97 steps is used. The default\n array is uniform in log10 scale.\n\n Returns: n/a\n \"\"\"\n\n if Teffs is None:\n log10Teffs = np.linspace(2.5, 5.7, 97) # this corresponds to the 316K-501187K range.\n Teffs = 10**log10Teffs\n\n # Energy-weighted intensities:\n log10ints_energy = np.array([np.log10(self._bb_intensity(Teff, photon_weighted=False)) for Teff in Teffs])\n self._bb_func_energy = interpolate.splrep(Teffs, log10ints_energy, s=0)\n self._log10_Inorm_bb_energy = lambda Teff: interpolate.splev(Teff, self._bb_func_energy)\n\n # Photon-weighted intensities:\n log10ints_photon = np.array([np.log10(self._bb_intensity(Teff, photon_weighted=True)) for Teff in Teffs])\n self._bb_func_photon = interpolate.splrep(Teffs, log10ints_photon, s=0)\n self._log10_Inorm_bb_photon = lambda Teff: interpolate.splev(Teff, self._bb_func_photon)\n\n self.content.append('blackbody')\n self.atmlist.append('blackbody')"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef compute_ck2004_response(self, path, verbose=False):\n\n models = glob.glob(path+'/*M1.000*')\n Nmodels = len(models)\n\n # Store the length of the filename extensions for parsing:\n offset = len(models[0])-models[0].rfind('.')\n\n Teff, logg, abun = np.empty(Nmodels), np.empty(Nmodels), np.empty(Nmodels)\n InormE, InormP = np.empty(Nmodels), np.empty(Nmodels)\n\n if verbose:\n print('Computing Castelli & Kurucz (2004) passband intensities for %s:%s. This will take a while.' % (self.pbset, self.pbname))\n\n for i, model in enumerate(models):\n #~ spc = np.loadtxt(model).T -- waaay slower\n spc = np.fromfile(model, sep=' ').reshape(-1,2).T\n\n Teff[i] = float(model[-17-offset:-12-offset])\n logg[i] = float(model[-11-offset:-9-offset])/10\n sign = 1. if model[-9-offset]=='P' else -1.\n abun[i] = sign*float(model[-8-offset:-6-offset])/10\n\n spc[0] /= 1e10 # AA -> m\n spc[1] *= 1e7 # erg/s/cm^2/A -> W/m^3\n wl = spc[0][(spc[0] >= self.ptf_table['wl'][0]) & (spc[0] <= self.ptf_table['wl'][-1])]\n fl = spc[1][(spc[0] >= self.ptf_table['wl'][0]) & (spc[0] <= self.ptf_table['wl'][-1])]\n fl *= self.ptf(wl)\n flP = fl*wl\n InormE[i] = np.log10(fl.sum()/self.ptf_area*(wl[1]-wl[0])) # energy-weighted intensity\n InormP[i] = np.log10(flP.sum()/self.ptf_photon_area*(wl[1]-wl[0])) # photon-weighted intensity\n if verbose:\n if 100*i % (len(models)) == 0:\n print('%d%% done.' % (100*i/(len(models)-1)))\n\n # Store axes (Teff, logg, abun) and the full grid of Inorm, with\n # nans where the grid isn't complete.\n self._ck2004_axes = (np.unique(Teff), np.unique(logg), np.unique(abun))\n\n self._ck2004_energy_grid = np.nan*np.ones((len(self._ck2004_axes[0]), len(self._ck2004_axes[1]), len(self._ck2004_axes[2]), 1))\n self._ck2004_photon_grid = np.nan*np.ones((len(self._ck2004_axes[0]), len(self._ck2004_axes[1]), len(self._ck2004_axes[2]), 1))\n for i, I0 in enumerate(InormE):\n self._ck2004_energy_grid[Teff[i] == self._ck2004_axes[0], logg[i] == self._ck2004_axes[1], abun[i] == self._ck2004_axes[2], 0] = I0\n for i, I0 in enumerate(InormP):\n self._ck2004_photon_grid[Teff[i] == self._ck2004_axes[0], logg[i] == self._ck2004_axes[1], abun[i] == self._ck2004_axes[2], 0] = I0\n\n # Tried radial basis functions but they were just terrible.\n #~ self._log10_Inorm_ck2004 = interpolate.Rbf(self._ck2004_Teff, self._ck2004_logg, self._ck2004_met, self._ck2004_Inorm, function='linear')\n self.content.append('ck2004')\n self.atmlist.append('ck2004')", "response": "Compute the Castelli & Kurucz response for the given file."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef compute_ck2004_intensities(self, path, particular=None, verbose=False):\n models = os.listdir(path)\n if particular != None:\n models = [particular]\n Nmodels = len(models)\n\n # Store the length of the filename extensions for parsing:\n offset = len(models[0])-models[0].rfind('.')\n\n Teff, logg, abun, mu = np.empty(Nmodels), np.empty(Nmodels), np.empty(Nmodels), np.empty(Nmodels)\n ImuE, ImuP = np.empty(Nmodels), np.empty(Nmodels)\n boostingE, boostingP = np.empty(Nmodels), np.empty(Nmodels)\n\n if verbose:\n print('Computing Castelli-Kurucz intensities for %s:%s. This will take a long while.' % (self.pbset, self.pbname))\n\n for i, model in enumerate(models):\n #spc = np.loadtxt(path+'/'+model).T -- waaay slower\n spc = np.fromfile(path+'/'+model, sep=' ').reshape(-1,2).T\n spc[0] /= 1e10 # AA -> m\n spc[1] *= 1e7 # erg/s/cm^2/A -> W/m^3\n\n Teff[i] = float(model[-17-offset:-12-offset])\n logg[i] = float(model[-11-offset:-9-offset])/10\n sign = 1. if model[-9-offset]=='P' else -1.\n abun[i] = sign*float(model[-8-offset:-6-offset])/10\n mu[i] = float(model[-5-offset:-offset])\n\n # trim the spectrum at passband limits:\n keep = (spc[0] >= self.ptf_table['wl'][0]) & (spc[0] <= self.ptf_table['wl'][-1])\n wl = spc[0][keep]\n fl = spc[1][keep]\n\n # make a log-scale copy for boosting and fit a Legendre\n # polynomial to the Imu envelope by way of sigma clipping;\n # then compute a Legendre series derivative to get the\n # boosting index; we only take positive fluxes to keep the\n # log well defined.\n\n lnwl = np.log(wl[fl > 0])\n lnfl = np.log(fl[fl > 0]) + 5*lnwl\n\n # First Legendre fit to the data:\n envelope = np.polynomial.legendre.legfit(lnwl, lnfl, 5)\n continuum = np.polynomial.legendre.legval(lnwl, envelope)\n diff = lnfl-continuum\n sigma = np.std(diff)\n clipped = (diff > -sigma)\n\n # Sigma clip to get the continuum:\n while True:\n Npts = clipped.sum()\n envelope = np.polynomial.legendre.legfit(lnwl[clipped], lnfl[clipped], 5)\n continuum = np.polynomial.legendre.legval(lnwl, envelope)\n diff = lnfl-continuum\n\n # clipping will sometimes unclip already clipped points\n # because the fit is slightly different, which can lead\n # to infinite loops. To prevent that, we never allow\n # clipped points to be resurrected, which is achieved\n # by the following bitwise condition (array comparison):\n clipped = clipped & (diff > -sigma)\n\n if clipped.sum() == Npts:\n break\n\n derivative = np.polynomial.legendre.legder(envelope, 1)\n boosting_index = np.polynomial.legendre.legval(lnwl, derivative)\n\n # calculate energy (E) and photon (P) weighted fluxes and\n # their integrals.\n\n flE = self.ptf(wl)*fl\n flP = wl*flE\n flEint = flE.sum()\n flPint = flP.sum()\n\n # calculate mean boosting coefficient and use it to get\n # boosting factors for energy (E) and photon (P) weighted\n # fluxes.\n\n boostE = (flE[fl > 0]*boosting_index).sum()/flEint\n boostP = (flP[fl > 0]*boosting_index).sum()/flPint\n boostingE[i] = boostE\n boostingP[i] = boostP\n\n ImuE[i] = np.log10(flEint/self.ptf_area*(wl[1]-wl[0])) # energy-weighted intensity\n ImuP[i] = np.log10(flPint/self.ptf_photon_area*(wl[1]-wl[0])) # photon-weighted intensity\n\n if verbose:\n if 100*i % (len(models)) == 0:\n print('%d%% done.' % (100*i/(len(models)-1)))\n\n # Store axes (Teff, logg, abun, mu) and the full grid of Imu,\n # with nans where the grid isn't complete. Imu-s come in two\n # flavors: energy-weighted intensities and photon-weighted\n # intensities, based on the detector used.\n\n self._ck2004_intensity_axes = (np.unique(Teff), np.unique(logg), np.unique(abun), np.append(np.array(0.0,), np.unique(mu)))\n self._ck2004_Imu_energy_grid = np.nan*np.ones((len(self._ck2004_intensity_axes[0]), len(self._ck2004_intensity_axes[1]), len(self._ck2004_intensity_axes[2]), len(self._ck2004_intensity_axes[3]), 1))\n self._ck2004_Imu_photon_grid = np.nan*np.ones((len(self._ck2004_intensity_axes[0]), len(self._ck2004_intensity_axes[1]), len(self._ck2004_intensity_axes[2]), len(self._ck2004_intensity_axes[3]), 1))\n self._ck2004_boosting_energy_grid = np.nan*np.ones((len(self._ck2004_intensity_axes[0]), len(self._ck2004_intensity_axes[1]), len(self._ck2004_intensity_axes[2]), len(self._ck2004_intensity_axes[3]), 1))\n self._ck2004_boosting_photon_grid = np.nan*np.ones((len(self._ck2004_intensity_axes[0]), len(self._ck2004_intensity_axes[1]), len(self._ck2004_intensity_axes[2]), len(self._ck2004_intensity_axes[3]), 1))\n\n # Set the limb (mu=0) to 0; in log this actually means\n # flux=1W/m2, but for all practical purposes that is still 0.\n self._ck2004_Imu_energy_grid[:,:,:,0,:] = 0.0\n self._ck2004_Imu_photon_grid[:,:,:,0,:] = 0.0\n self._ck2004_boosting_energy_grid[:,:,:,0,:] = 0.0\n self._ck2004_boosting_photon_grid[:,:,:,0,:] = 0.0\n\n for i, Imu in enumerate(ImuE):\n self._ck2004_Imu_energy_grid[Teff[i] == self._ck2004_intensity_axes[0], logg[i] == self._ck2004_intensity_axes[1], abun[i] == self._ck2004_intensity_axes[2], mu[i] == self._ck2004_intensity_axes[3], 0] = Imu\n for i, Imu in enumerate(ImuP):\n self._ck2004_Imu_photon_grid[Teff[i] == self._ck2004_intensity_axes[0], logg[i] == self._ck2004_intensity_axes[1], abun[i] == self._ck2004_intensity_axes[2], mu[i] == self._ck2004_intensity_axes[3], 0] = Imu\n for i, Bavg in enumerate(boostingE):\n self._ck2004_boosting_energy_grid[Teff[i] == self._ck2004_intensity_axes[0], logg[i] == self._ck2004_intensity_axes[1], abun[i] == self._ck2004_intensity_axes[2], mu[i] == self._ck2004_intensity_axes[3], 0] = Bavg\n for i, Bavg in enumerate(boostingP):\n self._ck2004_boosting_photon_grid[Teff[i] == self._ck2004_intensity_axes[0], logg[i] == self._ck2004_intensity_axes[1], abun[i] == self._ck2004_intensity_axes[2], mu[i] == self._ck2004_intensity_axes[3], 0] = Bavg\n\n self.content.append('ck2004_all')", "response": "Compute the direction - dependent passband intensities using Castelli - Kurucz model atmospheres."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef export_legacy_ldcoeffs(self, models, filename=None, photon_weighted=True):\n\n if photon_weighted:\n grid = self._ck2004_ld_photon_grid\n else:\n grid = self._ck2004_ld_energy_grid\n\n if filename is not None:\n import time\n f = open(filename, 'w')\n f.write('# PASS_SET %s\\n' % self.pbset)\n f.write('# PASSBAND %s\\n' % self.pbname)\n f.write('# VERSION 1.0\\n\\n')\n f.write('# Exported from PHOEBE-2 passband on %s\\n' % (time.ctime()))\n f.write('# The coefficients are computed for the %s-weighted regime.\\n\\n' % ('photon' if photon_weighted else 'energy'))\n\n mods = np.loadtxt(models)\n for mod in mods:\n Tindex = np.argwhere(self._ck2004_intensity_axes[0] == mod[0])[0][0]\n lindex = np.argwhere(self._ck2004_intensity_axes[1] == mod[1]/10)[0][0]\n mindex = np.argwhere(self._ck2004_intensity_axes[2] == mod[2]/10)[0][0]\n if filename is None:\n print('%6.3f '*11 % tuple(grid[Tindex, lindex, mindex].tolist()))\n else:\n f.write(('%6.3f '*11+'\\n') % tuple(self._ck2004_ld_photon_grid[Tindex, lindex, mindex].tolist()))\n\n if filename is not None:\n f.close()", "response": "Exports CK2004 limb darkening coefficients to a PHOEBE - 2 compatible format."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncomputes the integrated limb darkening profiles for the 2004 atmospheres.", "response": "def compute_ck2004_ldints(self):\n \"\"\"\n Computes integrated limb darkening profiles for ck2004 atmospheres.\n These are used for intensity-to-flux transformations. The evaluated\n integral is:\n\n ldint = 2 \\pi \\int_0^1 Imu mu dmu\n \"\"\"\n\n if 'ck2004_all' not in self.content:\n print('Castelli & Kurucz (2004) intensities are not computed yet. Please compute those first.')\n return None\n\n ldaxes = self._ck2004_intensity_axes\n ldtable = self._ck2004_Imu_energy_grid\n pldtable = self._ck2004_Imu_photon_grid\n\n self._ck2004_ldint_energy_grid = np.nan*np.ones((len(ldaxes[0]), len(ldaxes[1]), len(ldaxes[2]), 1))\n self._ck2004_ldint_photon_grid = np.nan*np.ones((len(ldaxes[0]), len(ldaxes[1]), len(ldaxes[2]), 1))\n\n mu = ldaxes[3]\n Imu = 10**ldtable[:,:,:,:]/10**ldtable[:,:,:,-1:]\n pImu = 10**pldtable[:,:,:,:]/10**pldtable[:,:,:,-1:]\n\n # To compute the fluxes, we need to evaluate \\int_0^1 2pi Imu mu dmu.\n\n for a in range(len(ldaxes[0])):\n for b in range(len(ldaxes[1])):\n for c in range(len(ldaxes[2])):\n\n ldint = 0.0\n pldint = 0.0\n for i in range(len(mu)-1):\n ki = (Imu[a,b,c,i+1]-Imu[a,b,c,i])/(mu[i+1]-mu[i])\n ni = Imu[a,b,c,i]-ki*mu[i]\n ldint += ki/3*(mu[i+1]**3-mu[i]**3) + ni/2*(mu[i+1]**2-mu[i]**2)\n\n pki = (pImu[a,b,c,i+1]-pImu[a,b,c,i])/(mu[i+1]-mu[i])\n pni = pImu[a,b,c,i]-pki*mu[i]\n pldint += pki/3*(mu[i+1]**3-mu[i]**3) + pni/2*(mu[i+1]**2-mu[i]**2)\n\n self._ck2004_ldint_energy_grid[a,b,c] = 2*ldint\n self._ck2004_ldint_photon_grid[a,b,c] = 2*pldint\n\n self.content.append('ck2004_ldint')"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ninterpolate passband - stored table of LD model coefficients.", "response": "def interpolate_ck2004_ldcoeffs(self, Teff=5772., logg=4.43, abun=0.0, atm='ck2004', ld_func='power', photon_weighted=False):\n \"\"\"\n Interpolate the passband-stored table of LD model coefficients.\n \"\"\"\n\n if 'ck2004_ld' not in self.content:\n print('Castelli & Kurucz (2004) limb darkening coefficients are not computed yet. Please compute those first.')\n return None\n\n if photon_weighted:\n table = self._ck2004_ld_photon_grid\n else:\n table = self._ck2004_ld_energy_grid\n\n if not hasattr(Teff, '__iter__'):\n req = np.array(((Teff, logg, abun),))\n ld_coeffs = libphoebe.interp(req, self._ck2004_intensity_axes[0:3], table)[0]\n else:\n req = np.vstack((Teff, logg, abun)).T\n ld_coeffs = libphoebe.interp(req, self._ck2004_intensity_axes[0:3], table).T\n\n if ld_func == 'linear':\n return ld_coeffs[0:1]\n elif ld_func == 'logarithmic':\n return ld_coeffs[1:3]\n elif ld_func == 'square_root':\n return ld_coeffs[3:5]\n elif ld_func == 'quadratic':\n return ld_coeffs[5:7]\n elif ld_func == 'power':\n return ld_coeffs[7:11]\n elif ld_func == 'all':\n return ld_coeffs\n else:\n print('ld_func=%s is invalid; please choose from [linear, logarithmic, square_root, quadratic, power, all].')\n return None"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nimports WD s atmcof and reads in all Legendre polynomials for the passed passband.", "response": "def import_wd_atmcof(self, plfile, atmfile, wdidx, Nabun=19, Nlogg=11, Npb=25, Nints=4):\n \"\"\"\n Parses WD's atmcof and reads in all Legendre polynomials for the\n given passband.\n\n @plfile: path and filename of atmcofplanck.dat\n @atmfile: path and filename of atmcof.dat\n @wdidx: WD index of the passed passband. This can be automated\n but it's not a high priority.\n @Nabun: number of metallicity nodes in atmcof.dat. For the 2003 version\n the number of nodes is 19.\n @Nlogg: number of logg nodes in atmcof.dat. For the 2003 version\n the number of nodes is 11.\n @Npb: number of passbands in atmcof.dat. For the 2003 version\n the number of passbands is 25.\n @Nints: number of temperature intervals (input lines) per entry.\n For the 2003 version the number of lines is 4.\n \"\"\"\n\n # Initialize the external atmcof module if necessary:\n # PERHAPS WD_DATA SHOULD BE GLOBAL??\n self.wd_data = libphoebe.wd_readdata(plfile, atmfile)\n\n # That is all that was necessary for *_extern_planckint() and\n # *_extern_atmx() functions. However, we also want to support\n # circumventing WD subroutines and use WD tables directly. For\n # that, we need to do a bit more work.\n\n # Store the passband index for use in planckint() and atmx():\n self.extern_wd_idx = wdidx\n\n # Break up the table along axes and extract a single passband data:\n atmtab = np.reshape(self.wd_data['atm_table'], (Nabun, Npb, Nlogg, Nints, -1))\n atmtab = atmtab[:, wdidx, :, :, :]\n\n # Finally, reverse the metallicity axis because it is sorted in\n # reverse order in atmcof:\n self.extern_wd_atmx = atmtab[::-1, :, :, :]\n self.content += ['extern_planckint', 'extern_atmx']\n self.atmlist += ['extern_planckint', 'extern_atmx']"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _log10_Inorm_extern_planckint(self, Teff):\n\n log10_Inorm = libphoebe.wd_planckint(Teff, self.extern_wd_idx, self.wd_data[\"planck_table\"])\n\n return log10_Inorm", "response": "Internal function to compute normal passband intensities using the external WD machinery that employs blackbody approximation."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef set_keyspace(self, keyspace):\n self.keyspace = keyspace\n dfrds = []\n for p in self._protos:\n dfrds.append(p.submitRequest(ManagedThriftRequest(\n 'set_keyspace', keyspace)))\n return defer.gatherResults(dfrds)", "response": "switch all connections to another keyspace"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nsolve an optimization problem.", "response": "def solve(self,problem):\n \"\"\"\n Solves optimization problem.\n\n Parameters\n ----------\n problem : Object\n \"\"\"\n \n # Local vars\n norm2 = self.norm2\n norminf = self.norminf\n parameters = self.parameters\n \n # Parameters\n tol = parameters['tol']\n maxiter = parameters['maxiter']\n quiet = parameters['quiet']\n sigma = parameters['sigma']\n eps = parameters['eps']\n eps_cold = parameters['eps_cold']\n\n # Problem\n if not isinstance(problem,QuadProblem):\n problem = cast_problem(problem)\n quad_problem = QuadProblem(None,None,None,None,None,None,problem=problem)\n else:\n quad_problem = problem\n self.problem = problem\n self.quad_problem = quad_problem\n\n # Linsolver\n self.linsolver = new_linsolver(parameters['linsolver'],'symmetric')\n\n # Reset\n self.reset()\n\n # Checks\n if not np.all(problem.l <= problem.u):\n raise OptSolverError_NoInterior(self)\n \n # Data\n self.H = quad_problem.H\n self.g = quad_problem.g\n self.A = quad_problem.A\n self.AT = quad_problem.A.T\n self.b = quad_problem.b\n self.l = quad_problem.l-tol/10.\n self.u = quad_problem.u+tol/10.\n self.n = quad_problem.H.shape[0]\n self.m = quad_problem.A.shape[0]\n self.e = np.ones(self.n)\n self.I = eye(self.n,format='coo')\n self.Onm = coo_matrix((self.n,self.m))\n self.Omm = coo_matrix((self.m,self.m))\n\n # Initial primal\n if quad_problem.x is None:\n self.x = (self.u + self.l)/2.\n else:\n self.x = np.maximum(np.minimum(quad_problem.x,problem.u),problem.l)\n\n # Initial duals\n if quad_problem.lam is None:\n self.lam = np.zeros(self.m)\n else:\n self.lam = quad_problem.lam.copy()\n if quad_problem.mu is None:\n self.mu = np.ones(self.x.size)*eps_cold\n else:\n self.mu = np.maximum(quad_problem.mu,eps)\n if quad_problem.pi is None:\n self.pi = np.ones(self.x.size)*eps_cold\n else:\n self.pi = np.maximum(quad_problem.pi,eps)\n\n # Check interior\n try:\n assert(np.all(self.l < self.x)) \n assert(np.all(self.x < self.u))\n assert(np.all(self.mu > 0))\n assert(np.all(self.pi > 0))\n except AssertionError:\n raise OptSolverError_Infeasibility(self)\n\n # Init vector\n self.y = np.hstack((self.x,self.lam,self.mu,self.pi))\n\n # Complementarity measures\n self.eta_mu = np.dot(self.mu,self.u-self.x)/self.x.size\n self.eta_pi = np.dot(self.pi,self.x-self.l)/self.x.size\n\n # Objective scaling\n fdata = self.func(self.y)\n self.obj_sca = np.maximum(norminf(self.g+self.H*self.x)/10.,1.)\n self.H = self.H/self.obj_sca\n self.g = self.g/self.obj_sca\n fdata = self.func(self.y)\n\n # Header\n if not quiet:\n print('\\nSolver: IQP')\n print('-----------')\n \n # Outer\n s = 0.\n self.k = 0\n while True:\n\n # Complementarity measures\n self.eta_mu = np.dot(self.mu,self.u-self.x)/self.x.size\n self.eta_pi = np.dot(self.pi,self.x-self.l)/self.x.size\n \n # Init eval\n fdata = self.func(self.y)\n fmax = norminf(fdata.f)\n gmax = norminf(fdata.GradF)\n \n # Done\n if fmax < tol and sigma*np.maximum(self.eta_mu,self.eta_pi) < tol:\n self.set_status(self.STATUS_SOLVED)\n self.set_error_msg('')\n return\n\n # Target\n tau = sigma*norminf(fdata.GradF)\n \n # Header\n if not quiet:\n if self.k > 0:\n print('')\n print('{0:^3s}'.format('iter'), end=' ')\n print('{0:^9s}'.format('phi'), end=' ')\n print('{0:^9s}'.format('fmax'), end=' ')\n print('{0:^9s}'.format('gmax'), end=' ')\n print('{0:^8s}'.format('cu'), end=' ')\n print('{0:^8s}'.format('cl'), end=' ')\n print('{0:^8s}'.format('s'))\n \n # Inner\n while True:\n \n # Eval\n fdata = self.func(self.y)\n fmax = norminf(fdata.f)\n gmax = norminf(fdata.GradF)\n compu = norminf(self.mu*(self.u-self.x))\n compl = norminf(self.pi*(self.x-self.l))\n phi = (0.5*np.dot(self.x,self.H*self.x)+np.dot(self.g,self.x))*self.obj_sca\n \n # Show progress\n if not quiet:\n print('{0:^3d}'.format(self.k), end=' ')\n print('{0:^9.2e}'.format(phi), end=' ')\n print('{0:^9.2e}'.format(fmax), end=' ')\n print('{0:^9.2e}'.format(gmax), end=' ')\n print('{0:^8.1e}'.format(compu), end=' ')\n print('{0:^8.1e}'.format(compl), end=' ')\n print('{0:^8.1e}'.format(s))\n \n # Done\n if gmax < tau:\n break\n\n # Done\n if fmax < tol and np.maximum(compu,compl) < tol:\n break\n\n # Maxiters\n if self.k >= maxiter:\n raise OptSolverError_MaxIters(self)\n \n # Search direction\n ux = self.u-self.x\n xl = self.x-self.l\n D1 = spdiags(self.mu/ux,0,self.n,self.n,format='coo')\n D2 = spdiags(self.pi/xl,0,self.n,self.n,format='coo')\n fbar = np.hstack((-fdata.rd+fdata.ru/ux-fdata.rl/xl,fdata.rp))\n if self.A.shape[0] > 0:\n Jbar = bmat([[tril(self.H)+D1+D2,None],\n [-self.A,self.Omm]],format='coo')\n else:\n Jbar = bmat([[tril(self.H)+D1+D2]],\n format='coo')\n try:\n if not self.linsolver.is_analyzed():\n self.linsolver.analyze(Jbar)\n pbar = self.linsolver.factorize_and_solve(Jbar,fbar)\n except RuntimeError:\n raise OptSolverError_BadLinSystem(self)\n px = pbar[:self.n]\n pmu = (-fdata.ru + self.mu*px)/ux\n ppi = (-fdata.rl - self.pi*px)/xl\n p = np.hstack((pbar,pmu,ppi))\n\n # Steplength bounds\n indices = px > 0\n s1 = np.min(np.hstack(((1.-eps)*(self.u-self.x)[indices]/px[indices],np.inf)))\n indices = px < 0\n s2 = np.min(np.hstack(((eps-1.)*(self.x-self.l)[indices]/px[indices],np.inf)))\n indices = pmu < 0\n s3 = np.min(np.hstack(((eps-1.)*self.mu[indices]/pmu[indices],np.inf)))\n indices = ppi < 0\n s4 = np.min(np.hstack(((eps-1.)*self.pi[indices]/ppi[indices],np.inf)))\n smax = np.min([s1,s2,s3,s4])\n \n # Line search\n s,fdata = self.line_search(self.y,p,fdata.F,fdata.GradF,self.func,smax)\n\n # Update x\n self.y += s*p\n self.k += 1\n self.x,self.lam,self.mu,self.pi = self.extract_components(self.y)\n\n # Check\n try:\n assert(np.all(self.x < self.u))\n assert(np.all(self.x > self.l))\n assert(np.all(self.mu > 0))\n assert(np.all(self.pi > 0))\n except AssertionError:\n raise OptSolverError_Infeasibility(self)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef factorize(self, A):\n \n A = csc_matrix(A)\n if self.prop == self.SYMMETRIC:\n A = (A + A.T) - triu(A)\n\n self.lu = self.umfpack.splu(A)", "response": "Factorizes A.\n\n Parameters\n ----------\n A : matrix\n For symmetric systems, should contain only lower diagonal part."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nretrying this factory s connection.", "response": "def retry(self):\n \"\"\"\n Retry this factory's connection. It is assumed that a previous\n connection was attempted and failed- either before or after a\n successful connection.\n \"\"\"\n\n if self.connector is None:\n raise ValueError(\"No connector to retry\")\n if self.service is None:\n return\n self.connector.connect()"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nprepares a connection to the Cassandra server.", "response": "def prep_connection(self, creds=None, keyspace=None, node_auto_discovery=True):\n \"\"\"\n Do login and set_keyspace tasks as necessary, and also check this\n node's idea of the Cassandra ring. Expects that our connection is\n alive.\n\n Return a Deferred that will fire with the ring information, or be\n errbacked if something goes wrong.\n \"\"\"\n\n d = defer.succeed(None)\n\n if creds is not None:\n d.addCallback(lambda _: self.my_login(creds))\n if keyspace is not None:\n d.addCallback(lambda _: self.my_set_keyspace(keyspace))\n if node_auto_discovery:\n d.addCallback(lambda _: self.my_describe_ring(keyspace))\n\n return d"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef my_pick_non_system_keyspace(self):\n d = self.my_describe_keyspaces()\n\n def pick_non_system(klist):\n for k in klist:\n if k.name not in SYSTEM_KEYSPACES:\n return k.name\n err = NoKeyspacesAvailable(\"Can't gather information about the \"\n \"Cassandra ring; no non-system \"\n \"keyspaces available\")\n warn(err)\n raise err\n d.addCallback(pick_non_system)\n return d", "response": "Return a dictionary of keyspaces that are not system."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef finish_and_die(self):\n self.logstate('finish_and_die')\n self.stop_working_on_queue()\n if self.jobphase != 'pending_request':\n self.stopFactory()", "response": "Stop working on the queue and die if there is no pending request."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_connection_score(self, node):\n\n # TODO: this should ideally take node history into account\n\n conntime = node.seconds_until_connect_ok()\n if conntime > 0:\n self.log(\"not considering %r for new connection; has %r left on \"\n \"connect blackout\" % (node, conntime))\n return -conntime\n numconns = self.num_connectors_to(node)\n if numconns >= self.max_connections_per_node:\n return float('-Inf')\n return sys.maxint - numconns", "response": "Returns a numeric value that determines this node s score for adding a new connection to a node."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nadjust the target pool size.", "response": "def adjustPoolSize(self, newsize):\n \"\"\"\n Change the target pool size. If we have too many connections already,\n ask some to finish what they're doing and die (preferring to kill\n connections to the node that already has the most connections). If\n we have too few, create more.\n \"\"\"\n\n if newsize < 0:\n raise ValueError(\"pool size must be nonnegative\")\n self.log(\"Adjust pool size from %d to %d.\" % (self.target_pool_size, newsize))\n self.target_pool_size = newsize\n self.kill_excess_pending_conns()\n self.kill_excess_conns()\n self.fill_pool()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef fill_pool(self):\n\n time_since_last_called = self.fill_pool_throttle\n if self.fill_pool_last_called is not None:\n time_since_last_called = time() - self.fill_pool_last_called\n need = self.target_pool_size - self.num_connectors()\n if need <= 0 or (self.throttle_timer is not None and self.throttle_timer.active()):\n return\n elif time_since_last_called < self.fill_pool_throttle:\n self.log(\"Filling pool too quickly, calling again in %.1f seconds\" % self.fill_pool_throttle)\n self._set_fill_pool_timer()\n return\n else:\n try:\n for num, node in izip(xrange(need), self.choose_nodes_to_connect()):\n self.make_conn(node)\n self.fill_pool_last_called = time()\n\n except NoNodesAvailable, e:\n waittime = e.args[0]\n pending_requests = len(self.request_queue.pending)\n if self.on_insufficient_nodes:\n self.on_insufficient_nodes(self.num_active_conns(),\n self.target_pool_size,\n pending_requests,\n waittime if waittime != float('Inf') else None)\n self.schedule_future_fill_pool(e.args[0])\n if self.num_connectors() == 0 and pending_requests > 0:\n if self.on_insufficient_conns:\n self.on_insufficient_conns(self.num_connectors(),\n pending_requests)", "response": "Fill the pool with connections as necessary to meet the target pool size."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef resubmit(self, req, keyspace, req_d, retries):\n self.log('resubmitting %s request' % (req.method,))\n self.pushRequest_really(req, keyspace, req_d, retries)\n try:\n self.request_queue.pending.remove((req, keyspace, req_d, retries))\n except ValueError:\n # it's already been scooped up\n pass\n else:\n self.request_queue.pending.insert(0, (req, keyspace, req_d, retries))", "response": "Push a request to the front of the queue and add it to the pending list."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef set_keyspace(self, keyspace):\n\n # push a real set_keyspace on some (any) connection; the idea is that\n # if it succeeds there, it is likely to succeed everywhere, and vice\n # versa. don't bother waiting for all connections to change- some of\n # them may be doing long blocking tasks and by the time they're done,\n # the keyspace might be changed again anyway\n d = self.pushRequest(ManagedThriftRequest('set_keyspace', keyspace))\n\n def store_keyspace(_):\n self.keyspace = keyspace\n d.addCallback(store_keyspace)\n return d", "response": "Set the keyspace for this cluster and return a Deferred that will fire once the connection is made to that keyspace."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef keyspaceConnection(self, keyspace, consistency=ConsistencyLevel.ONE):\n conn = CassandraKeyspaceConnection(self, keyspace)\n return CassandraClient(conn, consistency=consistency)", "response": "Returns a CassandraClient instance which uses this CassandraClusterPool\n by way of a CassandraKeyspaceConnection."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nsplit a string by commas but allow escaped commas.", "response": "def split_by_commas(maybe_s: str) -> Tuple[str, ...]:\n \"\"\"Split a string by commas, but allow escaped commas.\n - If maybe_s is falsey, returns an empty tuple\n - Ignore backslashed commas\n \"\"\"\n if not maybe_s:\n return ()\n parts: List[str] = []\n split_by_backslash = maybe_s.split(r'\\,')\n for split_by_backslash_part in split_by_backslash:\n splitby_comma = split_by_backslash_part.split(',')\n if parts:\n parts[-1] += ',' + splitby_comma[0]\n else:\n parts.append(splitby_comma[0])\n parts.extend(splitby_comma[1:])\n return tuple(parts)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef login(self, auth_request):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_login(auth_request)\n return d", "response": "Send login request and return a Deferred that resolves with the sequence id."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsets the keyspace for the next iteration", "response": "def set_keyspace(self, keyspace):\n \"\"\"\n Parameters:\n - keyspace\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_set_keyspace(keyspace)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get(self, key, column_path, consistency_level):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_get(key, column_path, consistency_level)\n return d", "response": "Get the Column or SuperColumn at the given column_path."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_slice(self, key, column_parent, predicate, consistency_level):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_get_slice(key, column_parent, predicate, consistency_level)\n return d", "response": "Get the list of columns contained by column_parent that match the given SlicePredicate."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_count(self, key, column_parent, predicate, consistency_level):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_get_count(key, column_parent, predicate, consistency_level)\n return d", "response": "returns the number of columns matching predicate for a particular key column_parent and optionally supercolumn"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nperforms a get_slice for column_parent and predicate for the given keys in parallel. Parameters: - keys - column_parent - predicate - consistency_level", "response": "def multiget_slice(self, keys, column_parent, predicate, consistency_level):\n \"\"\"\n Performs a get_slice for column_parent and predicate for the given keys in parallel.\n\n Parameters:\n - keys\n - column_parent\n - predicate\n - consistency_level\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_multiget_slice(keys, column_parent, predicate, consistency_level)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef multiget_count(self, keys, column_parent, predicate, consistency_level):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_multiget_count(keys, column_parent, predicate, consistency_level)\n return d", "response": "This method is used to get the count of the given list of keys in a given column parent and predicate."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn a subset of columns for a contiguous range of keys", "response": "def get_range_slices(self, column_parent, predicate, range, consistency_level):\n \"\"\"\n returns a subset of columns for a contiguous range of keys.\n\n Parameters:\n - column_parent\n - predicate\n - range\n - consistency_level\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_get_range_slices(column_parent, predicate, range, consistency_level)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn a range of columns wrapping to the next rows if necessary to collect max_results", "response": "def get_paged_slice(self, column_family, range, start_column, consistency_level):\n \"\"\"\n returns a range of columns, wrapping to the next rows if necessary to collect max_results.\n\n Parameters:\n - column_family\n - range\n - start_column\n - consistency_level\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_get_paged_slice(column_family, range, start_column, consistency_level)\n return d"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn the subset of columns specified in SlicePredicate for the rows matching the IndexClause", "response": "def get_indexed_slices(self, column_parent, index_clause, column_predicate, consistency_level):\n \"\"\"\n Returns the subset of columns specified in SlicePredicate for the rows matching the IndexClause\n @deprecated use get_range_slices instead with range.row_filter specified\n\n Parameters:\n - column_parent\n - index_clause\n - column_predicate\n - consistency_level\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_get_indexed_slices(column_parent, index_clause, column_predicate, consistency_level)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef insert(self, key, column_parent, column, consistency_level):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_insert(key, column_parent, column, consistency_level)\n return d", "response": "Insert a Column at the given column_parent. column_family and optional column_parent. super_column. Returns a Deferred that fires when the insert completes."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nadding a new value to the sequence number.", "response": "def add(self, key, column_parent, column, consistency_level):\n \"\"\"\n Increment or decrement a counter.\n\n Parameters:\n - key\n - column_parent\n - column\n - consistency_level\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_add(key, column_parent, column, consistency_level)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef remove(self, key, column_path, timestamp, consistency_level):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_remove(key, column_path, timestamp, consistency_level)\n return d", "response": "Remove data from the specified key at the given timestamp."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nremoving a counter at the specified location.", "response": "def remove_counter(self, key, path, consistency_level):\n \"\"\"\n Remove a counter at the specified location.\n Note that counters have limited support for deletes: if you remove a counter, you must wait to issue any following update\n until the delete has reached all the nodes and all of them have been fully compacted.\n\n Parameters:\n - key\n - path\n - consistency_level\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_remove_counter(key, path, consistency_level)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nmutates many columns or super columns for many row keys.", "response": "def batch_mutate(self, mutation_map, consistency_level):\n \"\"\"\n Mutate many columns or super columns for many row keys. See also: Mutation.\n\n mutation_map maps key to column family to a list of Mutation objects to take place at that scope.\n *\n\n Parameters:\n - mutation_map\n - consistency_level\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_batch_mutate(mutation_map, consistency_level)\n return d"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ntruncate the column family with the given name.", "response": "def truncate(self, cfname):\n \"\"\"\n Truncate will mark and entire column family as deleted.\n From the user's perspective a successful call to truncate will result complete data deletion from cfname.\n Internally, however, disk space will not be immediatily released, as with all deletes in cassandra, this one\n only marks the data as deleted.\n The operation succeeds only if all hosts in the cluster at available and will throw an UnavailableException if\n some hosts are down.\n\n Parameters:\n - cfname\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_truncate(cfname)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef describe_schema_versions(self, ):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_describe_schema_versions()\n return d", "response": "returns a list of hosts that are connected to each schema version present in the cluster"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef describe_keyspaces(self, ):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_describe_keyspaces()\n return d", "response": "returns a list of the defined keyspaces in this cluster"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef describe_cluster_name(self, ):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_describe_cluster_name()\n return d", "response": "get the cluster name"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget the thrift api version", "response": "def describe_version(self, ):\n \"\"\"\n get the thrift api version\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_describe_version()\n return d"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets the token ring", "response": "def describe_ring(self, keyspace):\n \"\"\"\n get the token ring: a map of ranges to host addresses,\n represented as a set of TokenRange instead of a map from range\n to list of endpoints, because you can't use Thrift structs as\n map keys:\n https://issues.apache.org/jira/browse/THRIFT-162\n\n for the same reason, we can't return a set here, even though\n order is neither important nor predictable.\n\n Parameters:\n - keyspace\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_describe_ring(keyspace)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef describe_token_map(self, ):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_describe_token_map()\n return d", "response": "get the mapping between token and node"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef describe_partitioner(self, ):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_describe_partitioner()\n return d", "response": "returns the partitioner used by this cluster"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns the snitch used by this cluster", "response": "def describe_snitch(self, ):\n \"\"\"\n returns the snitch used by this cluster\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_describe_snitch()\n return d"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef describe_keyspace(self, keyspace):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_describe_keyspace(keyspace)\n return d", "response": "describe specified keyspace\n\n Parameters:\n - keyspace"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef describe_splits(self, cfName, start_token, end_token, keys_per_split):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_describe_splits(cfName, start_token, end_token, keys_per_split)\n return d", "response": "describe splits of a CF"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the UUID for the next trace query in this connection", "response": "def trace_next_query(self, ):\n \"\"\"\n Enables tracing for the next query in this connection and returns the UUID for that trace session\n The next query will be traced idependently of trace probability and the returned UUID can be used to query the trace keyspace\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_trace_next_query()\n return d"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef describe_splits_ex(self, cfName, start_token, end_token, keys_per_split):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_describe_splits_ex(cfName, start_token, end_token, keys_per_split)\n return d", "response": "This method returns a Deferred that fires when the list of keys_per_split tokens are found in the split list."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nadd a column family to the current schema id. Returns the new schema id.", "response": "def system_add_column_family(self, cf_def):\n \"\"\"\n adds a column family. returns the new schema id.\n\n Parameters:\n - cf_def\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_system_add_column_family(cf_def)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ndrops a column family. Returns the new schema id.", "response": "def system_drop_column_family(self, column_family):\n \"\"\"\n drops a column family. returns the new schema id.\n\n Parameters:\n - column_family\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_system_drop_column_family(column_family)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nadd a keyspace and any column families that are part of it. returns the new schema id.", "response": "def system_add_keyspace(self, ks_def):\n \"\"\"\n adds a keyspace and any column families that are part of it. returns the new schema id.\n\n Parameters:\n - ks_def\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_system_add_keyspace(ks_def)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ndrop a keyspace and any column families that are part of it", "response": "def system_drop_keyspace(self, keyspace):\n \"\"\"\n drops a keyspace and any column families that are part of it. returns the new schema id.\n\n Parameters:\n - keyspace\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_system_drop_keyspace(keyspace)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nupdates properties of a keyspace returns the new schema id", "response": "def system_update_keyspace(self, ks_def):\n \"\"\"\n updates properties of a keyspace. returns the new schema id.\n\n Parameters:\n - ks_def\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_system_update_keyspace(ks_def)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef system_update_column_family(self, cf_def):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_system_update_column_family(cf_def)\n return d", "response": "updates properties of a column family returns the new schema id."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nexecuting a CQL query and returns a Deferred that resolves with the result.", "response": "def execute_cql_query(self, query, compression):\n \"\"\"\n Executes a CQL (Cassandra Query Language) statement and returns a\n CqlResult containing the results.\n\n Parameters:\n - query\n - compression\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_execute_cql_query(query, compression)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef execute_cql3_query(self, query, compression, consistency):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_execute_cql3_query(query, compression, consistency)\n return d", "response": "Execute a CQL query and return a Deferred that resolves when the result is received."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nprepare a CQL query by compiling and returning a CQL statement", "response": "def prepare_cql_query(self, query, compression):\n \"\"\"\n Prepare a CQL (Cassandra Query Language) statement by compiling and returning\n - the type of CQL statement\n - an id token of the compiled CQL stored on the server side.\n - a count of the discovered bound markers in the statement\n\n Parameters:\n - query\n - compression\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_prepare_cql_query(query, compression)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef prepare_cql3_query(self, query, compression):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_prepare_cql3_query(query, compression)\n return d", "response": "Prepares a CQL query and returns a Deferred that resolves when the query completes."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nexecute a prepared CQL query by passing an id token and a list of variables to bind and returns a CqlResult containing the results.", "response": "def execute_prepared_cql_query(self, itemId, values):\n \"\"\"\n Executes a prepared CQL (Cassandra Query Language) statement by passing an id token and a list of variables\n to bind and returns a CqlResult containing the results.\n\n Parameters:\n - itemId\n - values\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_execute_prepared_cql_query(itemId, values)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef execute_prepared_cql3_query(self, itemId, values, consistency):\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_execute_prepared_cql3_query(itemId, values, consistency)\n return d", "response": "Execute a prepared CQL query."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsets the CQL version of the current node.", "response": "def set_cql_version(self, version):\n \"\"\"\n @deprecated This is now a no-op. Please use the CQL3 specific methods instead.\n\n Parameters:\n - version\n \"\"\"\n self._seqid += 1\n d = self._reqs[self._seqid] = defer.Deferred()\n self.send_set_cql_version(version)\n return d"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef new_linsolver(name,prop):\n \n if name == 'mumps':\n return LinSolverMUMPS(prop)\n elif name == 'superlu':\n return LinSolverSUPERLU(prop)\n elif name == 'umfpack':\n return LinSolverUMFPACK(prop)\n elif name == 'default':\n try:\n return new_linsolver('mumps',prop)\n except ImportError:\n return new_linsolver('superlu',prop) \n else:\n raise ValueError('invalid linear solver name')", "response": "Returns a new linear solver."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncast a problem object with known interface as OptProblem.", "response": "def cast_problem(problem):\n \"\"\"\n Casts problem object with known interface as OptProblem.\n\n Parameters\n ----------\n problem : Object\n \"\"\"\n \n # Optproblem\n if isinstance(problem,OptProblem):\n return problem\n \n # Other\n else:\n \n # Type Base\n if (not hasattr(problem,'G') or \n (problem.G.shape[0] == problem.G.shape[1] and\n problem.G.shape[0] == problem.G.nnz and\n np.all(problem.G.row == problem.G.col) and \n np.all(problem.G.data == 1.))):\n return create_problem_from_type_base(problem)\n\n # Type A\n else:\n return create_problem_from_type_A(problem)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates OptProblem from type - base problem.", "response": "def create_problem_from_type_base(problem):\n \"\"\"\n Creates OptProblem from type-base problem.\n\n Parameters\n ----------\n problem : Object\n \"\"\"\n\n p = OptProblem()\n\n # Init attributes\n p.phi = problem.phi\n p.gphi = problem.gphi \n p.Hphi = problem.Hphi \n p.A = problem.A\n p.b = problem.b\n p.f = problem.f\n p.J = problem.J\n p.H_combined = problem.H_combined\n p.u = problem.u\n p.l = problem.l\n p.x = problem.x\n \n p.P = None\n p.lam = None\n p.nu = None\n p.mu = None\n p.pi = None\n \n p.wrapped_problem = problem\n \n # Methods\n def eval(cls,x):\n cls.wrapped_problem.eval(x)\n cls.phi = cls.wrapped_problem.phi\n cls.gphi = cls.wrapped_problem.gphi\n cls.Hphi = cls.wrapped_problem.Hphi\n cls.f = cls.wrapped_problem.f\n cls.J = cls.wrapped_problem.J\n \n def combine_H(cls,coeff,ensure_psd=False):\n cls.wrapped_problem.combine_H(coeff,ensure_psd)\n cls.H_combined = cls.wrapped_problem.H_combined\n \n p.eval = MethodType(eval,p)\n p.combine_H = MethodType(combine_H,p)\n\n # Return\n return p"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create_problem_from_type_A(problem):\n \n p = OptProblem()\n \n nx = problem.get_num_primal_variables()\n nz = problem.G.shape[0]\n\n p.phi = problem.phi\n p.gphi = np.hstack((problem.gphi,np.zeros(nz)))\n p.Hphi = coo_matrix((problem.Hphi.data,(problem.Hphi.row,problem.Hphi.col)),shape=(nx+nz,nx+nz)) \n p.A = bmat([[problem.A,None],[problem.G,-eye(nz)]],format='coo')\n p.b = np.hstack((problem.b,np.zeros(nz)))\n p.f = problem.f\n p.J = coo_matrix((problem.J.data,(problem.J.row,problem.J.col)),shape=(problem.J.shape[0],nx+nz))\n p.H_combined = coo_matrix((problem.H_combined.data,(problem.H_combined.row,problem.H_combined.col)),shape=(nx+nz,nx+nz))\n p.u = np.hstack((problem.get_upper_limits(),problem.u)) \n p.l = np.hstack((problem.get_lower_limits(),problem.l))\n p.x = np.hstack((problem.x,np.zeros(nz)))\n\n p.P = None\n p.lam = None\n p.nu = None\n p.mu = None\n p.pi = None\n \n p.wrapped_problem = problem\n\n def eval(cls,xz):\n x = xz[:nx]\n z = xz[nx:]\n prob = cls.wrapped_problem\n prob.eval(x)\n cls.phi = prob.phi\n cls.gphi = np.hstack((prob.gphi,np.zeros(nz)))\n cls.Hphi = coo_matrix((prob.Hphi.data,(prob.Hphi.row,prob.Hphi.col)),shape=(nx+nz,nx+nz))\n cls.f = prob.f\n cls.J = coo_matrix((prob.J.data,(prob.J.row,prob.J.col)),shape=(prob.J.shape[0],nx+nz))\n\n def combine_H(cls,coeff,ensure_psd=False):\n prob = cls.wrapped_problem\n prob.combine_H(coeff,ensure_psd=ensure_psd)\n cls.H_combined = coo_matrix((prob.H_combined.data,(prob.H_combined.row,prob.H_combined.col)),shape=(nx+nz,nx+nz))\n \n def recover_primal_variables(cls,x):\n return x[:nx]\n \n def recover_dual_variables(cls,lam,nu,mu,pi):\n prob = cls.wrapped_problem\n return lam[:prob.A.shape[0]],nu,mu[nx:],pi[nx:]\n \n p.eval = MethodType(eval,p)\n p.combine_H = MethodType(combine_H,p)\n p.recover_primal_variables = MethodType(recover_primal_variables,p)\n p.recover_dual_variables = MethodType(recover_dual_variables,p)\n\n # Return\n return p", "response": "Creates OptProblem from type - A problem."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nrecovers dual variables for original problem.", "response": "def recover_dual_variables(self,lam,nu,mu,pi):\n \"\"\"\n Recovers dual variables for original problem.\n\n Parameters\n ----------\n lam : ndarray\n nu : ndarray\n mu : ndarray\n pi : ndarray\n \"\"\"\n\n return lam,nu,mu,pi"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_num_primal_variables(self):\n\n if self.x is not None:\n return self.x.size\n if self.gphi is not None:\n return self.gphi.size\n if self.Hphi is not None:\n return self.Hphi.shape[0]\n if self.A is not None:\n return self.A.shape[1]\n if self.J is not None:\n return self.J.shape[1]\n if self.u is not None:\n return self.u.size\n if self.l is not None:\n return self.l.size\n return 0", "response": "Gets the number of primal variables in the current object."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting the results of the .", "response": "def get_results(self):\n \"\"\"\n Gets results.\n\n Returns\n -------\n results : dictionary\n \"\"\"\n\n return {'status': self.status,\n 'error_msg': self.error_msg,\n 'k': self.k,\n 'x': self.x,\n 'lam': self.lam*self.obj_sca,\n 'nu': self.nu*self.obj_sca,\n 'mu': self.mu*self.obj_sca,\n 'pi': self.pi*self.obj_sca}"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nfinding steplength along search direction p that satisfies the strong Wolfe conditions. Parameters ---------- x : current point (ndarray) p : search direction (ndarray) F : function value at `x` (float) GradF : gradient of function at `x` (ndarray) func : function of `x` that returns function object with attributes `F` and `GradF` (function) smax : maximum allowed steplength (float) Returns ------- s : stephlength that satisfies the Wolfe conditions (float).", "response": "def line_search(self,x,p,F,GradF,func,smax=np.inf,maxiter=40):\n \"\"\"\n Finds steplength along search direction p that \n satisfies the strong Wolfe conditions.\n \n Parameters\n ----------\n x : current point (ndarray)\n p : search direction (ndarray)\n F : function value at `x` (float)\n GradF : gradient of function at `x` (ndarray)\n func : function of `x` that returns function object with attributes `F` and `GradF` (function)\n smax : maximum allowed steplength (float)\n \n Returns\n -------\n s : stephlength that satisfies the Wolfe conditions (float). \n \"\"\"\n \n # Parameters of line search\n c1 = 1e-4\n c2 = 5e-1\n\n # Initialize lower bound, upper bound and step\n l = 0.\n if 1. < smax:\n s = 1.\n else:\n s = smax\n u = np.NaN \n\n phi = F\n dphi = np.dot(GradF,p)\n \n # Check that p is descent direction\n if dphi >= 0:\n raise OptSolverError_BadSearchDir(self)\n\n # Bisection\n for i in range(0,maxiter):\n \n xsp = x+s*p\n \n fdata = func(xsp)\n phis = fdata.F\n dphis = np.dot(fdata.GradF,p)\n \n if phis > phi + c1*s*dphi:\n u = s\n elif dphis > 0 and dphis > -c2*dphi:\n u = s\n elif dphis < 0 and -dphis > -c2*dphi:\n l = s\n if s >= smax:\n return s,fdata\n else:\n return s,fdata\n\n if np.isnan(u):\n s = np.min([2.*s,smax])\n else:\n s = (l + u)/2.\n\n raise OptSolverError_LineSearch(self)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef reset(self):\n \n self.k = 0.\n self.x = np.zeros(0)\n self.lam = np.zeros(0)\n self.nu = np.zeros(0)\n self.mu = np.zeros(0)\n self.pi = np.zeros(0)\n self.status = self.STATUS_UNKNOWN\n self.error_msg = ''\n self.obj_sca = 1.", "response": "Resets the internal state of the object to its initial state."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nset solver parameters. Parameters ---------- parameters : dict", "response": "def set_parameters(self,parameters):\n \"\"\"\n Sets solver parameters.\n\n Parameters\n ----------\n parameters : dict\n \"\"\"\n \n for key,value in list(parameters.items()):\n if key in self.parameters:\n self.parameters[key] = value"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef factorize_and_solve(self, A, b):\n\n self.factorize(A)\n\n return self.solve(b)", "response": "Factorizes A and solves Ax = b."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsolves the objective of the object in the specified problem.", "response": "def solve(self,problem):\n \"\"\"\n Solves optimization problem.\n \n Parameters\n ----------\n problem : Object\n \"\"\"\n \n # Local vars\n norm2 = self.norm2\n norminf = self.norminf\n parameters = self.parameters\n \n # Parameters\n feastol = parameters['feastol']\n optol = parameters['optol']\n maxiter = parameters['maxiter']\n quiet = parameters['quiet']\n sigma = parameters['sigma']\n eps = parameters['eps']\n ls_maxiter = parameters['line_search_maxiter']\n\n # Problem\n problem = cast_problem(problem)\n self.problem = problem\n \n # Linsolver\n self.linsolver = new_linsolver(parameters['linsolver'],'symmetric')\n\n # Reset\n self.reset()\n\n # Checks\n if not np.all(problem.l <= problem.u):\n raise OptSolverError_NoInterior(self)\n\n # Constants\n self.A = problem.A\n self.AT = problem.A.T\n self.b = problem.b\n self.u = problem.u+feastol/10.\n self.l = problem.l-feastol/10.\n self.n = problem.get_num_primal_variables()\n self.m1 = problem.get_num_linear_equality_constraints()\n self.m2 = problem.get_num_nonlinear_equality_constraints()\n self.e = np.ones(self.n)\n self.I = eye(self.n,format='coo')\n self.Omm1 = coo_matrix((self.m1,self.m1))\n self.Omm2 = coo_matrix((self.m2,self.m2))\n\n # Initial primal\n if problem.x is None:\n self.x = (self.u + self.l)/2.\n else:\n self.x = np.maximum(np.minimum(problem.x,problem.u),problem.l)\n\n # Initial duals\n if problem.lam is None:\n self.lam = np.zeros(problem.get_num_linear_equality_constraints())\n else:\n self.lam = problem.lam.copy()\n if problem.nu is None:\n self.nu = np.zeros(problem.get_num_nonlinear_equality_constraints())\n else:\n self.nu = problem.nu.copy()\n self.mu = np.minimum(1./(self.u-self.x), 1.)\n self.pi = np.minimum(1./(self.x-self.l), 1.)\n\n # Init vector\n self.y = np.hstack((self.x,self.lam,self.nu,self.mu,self.pi))\n\n # Average violation of complementarity slackness\n self.eta_mu = (np.dot(self.mu,self.u-self.x)/self.x.size) if self.x.size else 0.\n self.eta_pi = (np.dot(self.pi,self.x-self.l)/self.x.size) if self.x.size else 0.\n\n # Objective scaling\n fdata = self.func(self.y)\n self.obj_sca = np.maximum(norminf(problem.gphi)/10.,1.)\n fdata = self.func(self.y)\n\n # Header\n if not quiet:\n print('\\nSolver: inlp')\n print('------------')\n \n # Outer\n s = 0.\n self.k = 0\n while True:\n\n # Average violation of complementarity slackness\n self.eta_mu = (np.dot(self.mu,self.u-self.x)/self.x.size) if self.x.size else 0.\n self.eta_pi = (np.dot(self.pi,self.x-self.l)/self.x.size) if self.x.size else 0.\n \n # Init eval\n fdata = self.func(self.y)\n\n # Target\n tau = sigma*norminf(fdata.GradF)\n \n # Header\n if not quiet:\n if self.k > 0:\n print('')\n print('{0:^3s}'.format('iter'),end=' ')\n print('{0:^9s}'.format('phi'),end=' ')\n print('{0:^9s}'.format('pres'),end=' ')\n print('{0:^9s}'.format('dres'),end=' ')\n print('{0:^9s}'.format('gmax'),end=' ')\n print('{0:^8s}'.format('cu'),end=' ')\n print('{0:^8s}'.format('cl'),end=' ')\n print('{0:^8s}'.format('alpha'))\n \n # Inner\n while True:\n \n # Eval\n fdata = self.func(self.y)\n pres = norminf(np.hstack((fdata.rp1,fdata.rp2)))\n dres = norminf(np.hstack((fdata.rd,fdata.ru,fdata.rl)))\n gmax = norminf(fdata.GradF) \n compu = norminf(self.mu*(self.u-self.x))\n compl = norminf(self.pi*(self.x-self.l))\n phi = problem.phi\n \n # Show progress\n if not quiet:\n print('{0:^3d}'.format(self.k),end=' ')\n print('{0:^9.2e}'.format(phi),end=' ')\n print('{0:^9.2e}'.format(pres),end=' ')\n print('{0:^9.2e}'.format(dres),end=' ')\n print('{0:^9.2e}'.format(gmax),end=' ')\n print('{0:^8.1e}'.format(compu),end=' ')\n print('{0:^8.1e}'.format(compl),end=' ')\n print('{0:^8.1e}'.format(s))\n\n # Done\n if self.k > 0 and pres < feastol and dres < optol and sigma*np.maximum(self.eta_mu,self.eta_pi) < optol:\n self.set_status(self.STATUS_SOLVED)\n self.set_error_msg('')\n return\n \n # Done\n if gmax < tau: \n break\n\n # Done \n if pres < feastol and dres < optol and np.maximum(compu,compl) < optol:\n break\n\n # Maxiters\n if self.k >= maxiter:\n raise OptSolverError_MaxIters(self)\n \n # Search direction\n ux = self.u-self.x\n xl = self.x-self.l\n D1 = spdiags(self.mu/ux,0,self.n,self.n,format='coo')\n D2 = spdiags(self.pi/xl,0,self.n,self.n,format='coo')\n fbar = np.hstack((-fdata.rd+fdata.ru/ux-fdata.rl/xl,fdata.rp1,fdata.rp2))\n Hbar = coo_matrix((np.concatenate((problem.Hphi.data/self.obj_sca,\n problem.H_combined.data,\n D1.data,\n D2.data)),\n (np.concatenate((problem.Hphi.row,\n problem.H_combined.row,\n D1.row,\n D2.row)),\n np.concatenate((problem.Hphi.col,\n problem.H_combined.col,\n D1.col,\n D2.col)))))\n Jbar = bmat([[Hbar,None,None],\n [-self.A,self.Omm1,None],\n [-problem.J,None,self.Omm2]],\n format='coo')\n try:\n if not self.linsolver.is_analyzed():\n self.linsolver.analyze(Jbar)\n pbar = self.linsolver.factorize_and_solve(Jbar,fbar)\n except RuntimeError:\n raise OptSolverError_BadLinSystem(self)\n px = pbar[:self.n]\n pmu = (-fdata.ru + self.mu*px)/ux\n ppi = (-fdata.rl - self.pi*px)/xl\n p = np.hstack((pbar,pmu,ppi))\n\n # Steplength bounds\n indices = px > 0\n s1 = np.min(np.hstack(((self.u-self.x)[indices]/px[indices],np.inf)))\n indices = px < 0\n s2 = np.min(np.hstack(((self.l-self.x)[indices]/px[indices],np.inf)))\n indices = pmu < 0\n s3 = np.min(np.hstack((-self.mu[indices]/pmu[indices],np.inf)))\n indices = ppi < 0\n s4 = np.min(np.hstack((-self.pi[indices]/ppi[indices],np.inf)))\n smax = (1.-eps)*np.min([s1,s2,s3,s4])\n spmax = (1.-eps)*np.min([s1,s2])\n sdmax = (1.-eps)*np.min([s3,s4])\n \n # Line search\n try:\n s, fdata = self.line_search(self.y, p, fdata.F, fdata.GradF, self.func, smax=smax, maxiter=ls_maxiter)\n\n # Update point\n self.y += s*p\n self.x, self.lam, self.nu, self.mu, self.pi = self.extract_components(self.y)\n \n except OptSolverError_LineSearch:\n sp = np.minimum(1., spmax)\n sd = np.minimum(1., sdmax)\n s = np.minimum(sp,sd)\n\n # Update point\n self.x += sp*px\n self.lam += sd*pbar[self.x.size:self.x.size+self.lam.size]\n self.nu += sd*pbar[self.x.size+self.lam.size:]\n self.mu += sd*pmu\n self.pi += sd*ppi\n self.y = np.hstack((self.x,self.lam,self.nu,self.mu,self.pi))\n\n # Update iters\n self.k += 1 \n\n # Check\n try:\n assert(np.all(self.x < self.u))\n assert(np.all(self.x > self.l))\n assert(np.all(self.mu > 0))\n assert(np.all(self.pi > 0))\n except AssertionError:\n raise OptSolverError_Infeasibility(self)\n\n # Update iters\n self.k += 1"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncopies all files inside src into dest while formatting the contents of the files into the output.", "response": "def format_files(src: str, dest: str, **fmt_vars: str) -> None:\n \"\"\"Copies all files inside src into dest while formatting the contents\n of the files into the output.\n\n For example, a file with the following contents:\n\n {foo} bar {baz}\n\n and the vars {'foo': 'herp', 'baz': 'derp'}\n\n will end up in the output as\n\n herp bar derp\n :param text src: Source directory.\n :param text dest: Destination directory.\n :param dict fmt_vars: Vars to format into the files.\n \"\"\"\n assert os.path.exists(src)\n assert os.path.exists(dest)\n # Only at the root. Could be made more complicated and recursive later\n for filename in os.listdir(src):\n if filename.endswith(EXCLUDED_EXTENSIONS):\n continue\n # Flat directory structure\n elif not os.path.isfile(os.path.join(src, filename)):\n continue\n with open(os.path.join(src, filename)) as f:\n output_contents = f.read().format(**fmt_vars)\n with open(os.path.join(dest, filename), 'w') as file_obj:\n file_obj.write(output_contents)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nanalyze the structure of A and runs the analysis process.", "response": "def analyze(self,A):\n \"\"\"\n Analyzes structure of A.\n\n Parameters\n ----------\n \n A : matrix \n For symmetric systems, should contain only lower diagonal part.\n \"\"\"\n\n A = coo_matrix(A)\n \n self.mumps.set_shape(A.shape[0])\n self.mumps.set_centralized_assembled_rows_cols(A.row+1,A.col+1)\n self.mumps.run(job=1)\n\n self.analyzed = True"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nfactorize A. Parameters ---------- A : matrix For symmetric systems, should contain only lower diagonal part.", "response": "def factorize(self,A):\n \"\"\"\n Factorizes A.\n\n Parameters\n ----------\n A : matrix\n For symmetric systems, should contain only lower diagonal part.\n \"\"\"\n\n A = coo_matrix(A)\n\n self.mumps.set_centralized_assembled_values(A.data)\n self.mumps.run(job=2)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsolving system Ax = b. getLogicalEntryPoint", "response": "def solve(self,b):\n \"\"\"\n Solves system Ax=b.\n \n Parameters\n ----------\n b : ndarray\n \n Returns\n -------\n x : ndarray\n \"\"\"\n\n x = b.copy()\n self.mumps.set_rhs(x)\n self.mumps.run(job=3)\n\n return x"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nfactorize A and sovles Ax = b.", "response": "def factorize_and_solve(self,A,b):\n \"\"\"\n Factorizes A and sovles Ax=b.\n\n Parameters\n ----------\n A : matrix\n b : ndarray\n\n Returns\n -------\n x : ndarray\n \"\"\"\n\n A = coo_matrix(A)\n\n x = b.copy()\n self.mumps.set_centralized_assembled_values(A.data)\n self.mumps.set_rhs(x)\n self.mumps.run(job=5)\n\n return x"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef set_centralized_sparse(self, A):\n if self.myid != 0:\n return\n\n A = A.tocoo()\n n = A.shape[0]\n assert A.shape == (n, n), \"Expected a square matrix.\"\n self.set_shape(n)\n self.set_centralized_assembled(A.row+1, A.col+1, A.data)", "response": "Set the assembled matrix on processor 0."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef set_centralized_assembled(self, irn, jcn, a):\n self.set_centralized_assembled_rows_cols(irn, jcn)\n self.set_centralized_assembled_values(a)", "response": "Set the assembled matrix on processor 0."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsets the assembled rows and columns on processor 0.", "response": "def set_centralized_assembled_rows_cols(self, irn, jcn):\n \"\"\"Set assembled matrix indices on processor 0.\n\n The row and column indices (irn & jcn) should be one based.\n \"\"\"\n if self.myid != 0:\n return\n assert irn.size == jcn.size\n self._refs.update(irn=irn, jcn=jcn)\n self.id.nz = irn.size\n self.id.irn = self.cast_array(irn)\n self.id.jcn = self.cast_array(jcn)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsetting the assembled matrix values on processor 0.", "response": "def set_centralized_assembled_values(self, a):\n \"\"\"Set assembled matrix values on processor 0.\"\"\"\n if self.myid != 0:\n return\n assert a.size == self.id.nz\n self._refs.update(a=a)\n self.id.a = self.cast_array(a)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsetting the distributed assembled matrix.", "response": "def set_distributed_assembled(self, irn_loc, jcn_loc, a_loc):\n \"\"\"Set the distributed assembled matrix.\n\n Distributed assembled matrices require setting icntl(18) != 0.\n \"\"\"\n self.set_distributed_assembled_rows_cols(irn_loc, jcn_loc)\n self.set_distributed_assembled_values(a_loc)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef set_distributed_assembled_rows_cols(self, irn_loc, jcn_loc):\n assert irn_loc.size == jcn_loc.size\n\n self._refs.update(irn_loc=irn_loc, jcn_loc=jcn_loc)\n self.id.nz_loc = irn_loc.size\n self.id.irn_loc = self.cast_array(irn_loc)\n self.id.jcn_loc = self.cast_array(jcn_loc)", "response": "Set the distributed assembled matrix row & column numbers."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nset the distributed assembled matrix values.", "response": "def set_distributed_assembled_values(self, a_loc):\n \"\"\"Set the distributed assembled matrix values.\n\n Distributed assembled matrices require setting icntl(18) != 0.\n \"\"\"\n assert a_loc.size == self._refs['irn_loc'].size\n self._refs.update(a_loc=a_loc)\n self.id.a_loc = self.cast_array(a_loc)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nset the right hand side. This matrix will be modified in place.", "response": "def set_rhs(self, rhs):\n \"\"\"Set the right hand side. This matrix will be modified in place.\"\"\"\n assert rhs.size == self.id.n\n self._refs.update(rhs=rhs)\n self.id.rhs = self.cast_array(rhs)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ndeleting the MUMPS context and release all array references.", "response": "def destroy(self):\n \"\"\"Delete the MUMPS context and release all array references.\"\"\"\n if self.id is not None and self._mumps_c is not None:\n self.id.job = -2 # JOB_END\n self._mumps_c(self.id)\n self.id = None\n self._refs = None"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncall MUMPS checking for errors in the return code.", "response": "def mumps(self):\n \"\"\"Call MUMPS, checking for errors in the return code.\n\n The desired job should have already been set using `ctx.set_job(...)`.\n As a convenience, you may wish to call `ctx.run(job=...)` which sets\n the job and calls MUMPS.\n \"\"\"\n self._mumps_c(self.id)\n if self.id.infog[0] < 0:\n raise RuntimeError(\"MUMPS error: %d\" % self.id.infog[0])"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nmodify the data flow origin state and key at the same time to modify data flow origin state and key.", "response": "def modify_origin(self, from_state, from_key):\n \"\"\"Set both from_state and from_key at the same time to modify data flow origin\n\n :param str from_state: State id of the origin state\n :param int from_key: Data port id of the origin port\n :raises exceptions.ValueError: If parameters have wrong types or the new data flow is not valid\n \"\"\"\n if not isinstance(from_state, string_types):\n raise ValueError(\"Invalid data flow origin port: from_state must be a string\")\n if not isinstance(from_key, int):\n raise ValueError(\"Invalid data flow origin port: from_key must be of type int\")\n\n old_from_state = self.from_state\n old_from_key = self.from_key\n self._from_state = from_state\n self._from_key = from_key\n\n valid, message = self._check_validity()\n if not valid:\n self._from_state = old_from_state\n self._from_key = old_from_key\n raise ValueError(\"The data flow origin could not be changed: {0}\".format(message))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef modify_target(self, to_state, to_key):\n if not isinstance(to_state, string_types):\n raise ValueError(\"Invalid data flow target port: from_state must be a string\")\n if not isinstance(to_key, int):\n raise ValueError(\"Invalid data flow target port: from_outcome must be of type int\")\n\n old_to_state = self.to_state\n old_to_key = self.to_key\n self._to_state = to_state\n self._to_key = to_key\n\n valid, message = self._check_validity()\n if not valid:\n self._to_state = old_to_state\n self._to_key = old_to_key\n raise ValueError(\"The data flow target could not be changed: {0}\".format(message))", "response": "Modify the state id and key of the data flow target."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nregisters callback methods for triggered actions", "response": "def register_actions(self, shortcut_manager):\n \"\"\"Register callback methods for triggered actions\n\n :param rafcon.gui.shortcut_manager.ShortcutManager shortcut_manager: Shortcut Manager Object holding mappings\n between shortcuts and actions.\n \"\"\"\n shortcut_manager.add_callback_for_action(\"copy\", self._copy)\n shortcut_manager.add_callback_for_action(\"paste\", self._paste)\n shortcut_manager.add_callback_for_action(\"cut\", self._cut)\n shortcut_manager.add_callback_for_action(\"undo\", self._undo)\n shortcut_manager.add_callback_for_action(\"redo\", self._redo)\n shortcut_manager.add_callback_for_action(\"apply\", self._apply)\n shortcut_manager.add_callback_for_action(\"open_external_editor\", self._open_external_editor)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\napply checks and adjustments of the TextBuffer and TextView after every change in buffer. The method re-apply the tag (style) for the buffer. It avoids changes while editable-property set to False which are caused by a bug in the GtkSourceView2. GtkSourceView2 is the default used TextView widget here. The text buffer is reset after every change to last stored source-text by a respective work around which suspends any generation of undo items and avoids a recursive call of the method set_enabled by observing its while_in_set_enabled flag. :param TextBuffer source: :return:", "response": "def code_changed(self, source):\n \"\"\" Apply checks and adjustments of the TextBuffer and TextView after every change in buffer.\n\n The method re-apply the tag (style) for the buffer. It avoids changes while editable-property set to False\n which are caused by a bug in the GtkSourceView2. GtkSourceView2 is the default used TextView widget here.\n The text buffer is reset after every change to last stored source-text by a respective work around which\n suspends any generation of undo items and avoids a recursive call of the method set_enabled by observing\n its while_in_set_enabled flag.\n\n :param TextBuffer source:\n :return:\n \"\"\"\n\n # work around to avoid changes at all (e.g. by enter-key) if text view property editable is False\n # TODO if SourceView3 is used in future check if this can be skipped\n if not self.view.textview.get_editable() and not self.view.while_in_set_enabled:\n if hasattr(self.view.get_buffer(), 'begin_not_undoable_action'):\n self.view.get_buffer().begin_not_undoable_action()\n self.view.set_enabled(False, self.source_text)\n if hasattr(self.view.get_buffer(), 'end_not_undoable_action'):\n self.view.get_buffer().end_not_undoable_action()\n\n if self.view:\n self.view.apply_tag('default')"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ntriggers when the Apply - Shortcut is clicked.", "response": "def apply_clicked(self, button):\n \"\"\"Triggered when the Apply-Shortcut in the editor is triggered.\n\n \"\"\"\n if isinstance(self.model.state, LibraryState):\n return\n\n self.set_script_text(self.view.get_text())"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef set_enabled(self, on, text=None):\n self.while_in_set_enabled = True\n\n # Apply color scheme by set text 'workaround' (with current buffer source)\n self.set_text(self.get_text()) if text is None else self.set_text(text)\n\n if on:\n self.textview.set_editable(True)\n self.apply_tag('default')\n else:\n self.apply_tag('deactivated')\n self.textview.set_editable(on)\n self.while_in_set_enabled = False", "response": "Set the default input or deactivated style scheme by set text workaround."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef post_init(setup_config):\n logger.info(\"Run post-initiation hook of {} plugin.\".format(__file__.split(os.path.sep)[-2]))\n\n from . import core_template_observer\n # Example 1: initiate observer some elements of the execution engine\n core_template_observer.ExecutionEngineObserver()\n\n # Example 2: initiate observer execution status\n core_template_observer.ExecutionStatusObserver()\n\n from . import gtkmvc_template_observer\n # Example 3: gtkmvc3 general modification observer\n # initiate observer of root_state model-object which already implements a power full recursive notification pattern\n gtkmvc_template_observer.RootStateModificationObserver()\n\n # Example 4: gtkmvc3 meta signal observer\n gtkmvc_template_observer.MetaSignalModificationObserver()", "response": "This function is called by the plugin after the plugin is initialized."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nremoves all models of states that have a state model with parent relation in the list The function filters the list of models, so that for no model in the list, one of it (grand-)parents is also in the list. E.g. if the input models consists of a hierarchy state with two of its child states, the resulting list only contains the hierarchy state. :param set models: The set of selected models :return: The reduced set of selected models :rtype: set", "response": "def reduce_to_parent_states(models):\n \"\"\"Remove all models of states that have a state model with parent relation in the list\n\n The function filters the list of models, so that for no model in the list, one of it (grand-)parents is also in\n the list. E.g. if the input models consists of a hierarchy state with two of its child states, the resulting list\n only contains the hierarchy state.\n\n :param set models: The set of selected models\n :return: The reduced set of selected models\n :rtype: set\n \"\"\"\n models = set(models) # Ensure that models is a set and that we do not operate on the parameter itself\n models_to_remove = set()\n # check all models\n for model in models:\n parent_m = model.parent\n # check if any (grand-)parent is already in the selection, if so, remove the child\n while parent_m is not None:\n if parent_m in models:\n models_to_remove.add(model)\n break\n parent_m = parent_m.parent\n for model in models_to_remove:\n models.remove(model)\n if models_to_remove:\n logger.debug(\"The selection has been reduced, as it may not contain elements whose children are also selected\")\n return models"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef extend_selection():\n from rafcon.gui.singleton import main_window_controller\n currently_pressed_keys = main_window_controller.currently_pressed_keys if main_window_controller else set()\n if any(key in currently_pressed_keys for key in [constants.EXTEND_SELECTION_KEY,\n constants.EXTEND_SELECTION_KEY_ALT]):\n return True\n return False", "response": "Checks if the selection is to be extended"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nchecking types of passed models for correctness and in case raise exception", "response": "def _check_model_types(self, models):\n \"\"\" Check types of passed models for correctness and in case raise exception\n\n :rtype: set\n :returns: set of models that are valid for the class\"\"\"\n if not hasattr(models, \"__iter__\"):\n models = {models}\n if not all([isinstance(model, (AbstractStateModel, StateElementModel)) for model in models]):\n raise TypeError(\"The selection supports only models with base class AbstractStateModel or \"\n \"StateElementModel, see handed elements {0}\".format(models))\n return models if isinstance(models, set) else set(models)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nadd the passed models to the selection", "response": "def add(self, models):\n \"\"\" Adds the passed model(s) to the selection\"\"\"\n if models is None:\n return\n\n models = self._check_model_types(models)\n self._selected.update(models)\n self._selected = reduce_to_parent_states(self._selected)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nremoves the passed model from the selection", "response": "def remove(self, models):\n \"\"\" Removed the passed model(s) from the selection\"\"\"\n models = self._check_model_types(models)\n for model in models:\n if model in self._selected:\n self._selected.remove(model)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsetting the selection to the passed model s", "response": "def set(self, models):\n \"\"\" Sets the selection to the passed model(s) \"\"\"\n # Do not add None values to selection\n if models is None:\n models = set()\n\n models = self._check_model_types(models)\n if len(models) > 1:\n models = reduce_to_parent_states(models)\n\n self._selected = set(models)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nhandle the selection for a specific core_class elements of a specific TreeStore.", "response": "def handle_prepared_selection_of_core_class_elements(self, core_class, models):\n \"\"\"Handles the selection for TreeStore widgets maintaining lists of a specific `core_class` elements\n\n If widgets hold a TreeStore with elements of a specific `core_class`, the local selection of that element\n type is handled by that widget. This method is called to integrate the local selection with the overall\n selection of the state machine.\n\n If no modifier key (indicating to extend the selection) is pressed, the state machine selection is set to the\n passed selection. If the selection is to be extended, the state machine collection will consist of the widget\n selection plus all previously selected elements not having the core class `core_class`.\n\n :param State | StateElement core_class: The core class of the elements the widget handles\n :param models: The list of models that are currently being selected locally\n \"\"\"\n if extend_selection():\n self._selected.difference_update(self.get_selected_elements_of_core_class(core_class))\n else:\n self._selected.clear()\n\n models = self._check_model_types(models)\n if len(models) > 1:\n models = reduce_to_parent_states(models)\n\n self._selected.update(models)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nhandles the new selection for generic widgets.", "response": "def handle_new_selection(self, models):\n \"\"\"Handles the selection for generic widgets\n\n This is a helper method for generic widgets that want to modify the selection. These widgets can pass a list\n of newly selected (or clicked on) models.\n\n The method looks at the previous selection, the passed models and the list of pressed (modifier) keys:\n\n * If no modifier key is pressed, the previous selection is cleared and the new selection is set to the passed\n models\n * If the extend-selection modifier key is pressed, elements of `models` that are _not_ in the previous\n selection are selected, those that are in the previous selection are deselected\n\n :param models: The list of models that are newly selected/clicked on\n \"\"\"\n models = self._check_model_types(models)\n\n if extend_selection():\n already_selected_elements = models & self._selected\n newly_selected_elements = models - self._selected\n self._selected.difference_update(already_selected_elements)\n self._selected.update(newly_selected_elements)\n else:\n self._selected = models\n self._selected = reduce_to_parent_states(self._selected)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef focus(self, model):\n if model is None:\n del self.focus\n return\n\n self._check_model_types(model)\n self.add(model)\n focus_msg = FocusSignalMsg(model, self._focus)\n self._focus = model\n self._selected.add(model)\n self._selected = reduce_to_parent_states(self._selected)\n self.focus_signal.emit(focus_msg)", "response": "Sets the passed model as focused element\n "} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef update_core_element_lists(self):\n def get_selected_elements_of_core_class(core_class):\n return set(element for element in self._selected if isinstance(element.core_element, core_class))\n self._states = get_selected_elements_of_core_class(State)\n self._transitions = get_selected_elements_of_core_class(Transition)\n self._data_flows = get_selected_elements_of_core_class(DataFlow)\n self._input_data_ports = get_selected_elements_of_core_class(InputDataPort)\n self._output_data_ports = get_selected_elements_of_core_class(OutputDataPort)\n self._scoped_variables = get_selected_elements_of_core_class(ScopedVariable)\n self._outcomes = get_selected_elements_of_core_class(Outcome)", "response": "Maintains inner lists of selected elements with a specific core element class"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_selected_elements_of_core_class(self, core_element_type):\n if core_element_type is Outcome:\n return self.outcomes\n elif core_element_type is InputDataPort:\n return self.input_data_ports\n elif core_element_type is OutputDataPort:\n return self.output_data_ports\n elif core_element_type is ScopedVariable:\n return self.scoped_variables\n elif core_element_type is Transition:\n return self.transitions\n elif core_element_type is DataFlow:\n return self.data_flows\n elif core_element_type is State:\n return self.states\n raise RuntimeError(\"Invalid core element type: \" + core_element_type)", "response": "Returns all selected elements of the specified core element type as state element class"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncheck whether the given model is selected by the user.", "response": "def is_selected(self, model):\n \"\"\"Checks whether the given model is selected\n\n :param model:\n :return: True if the model is within the selection, False else\n :rtype: bool\n \"\"\"\n if model is None:\n return len(self._selected) == 0\n return model in self._selected"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef execute_command_with_path_in_process(command, path, shell=False, cwd=None, logger=None):\n if logger is None:\n logger = _logger\n logger.debug(\"Opening path with command: {0} {1}\".format(command, path))\n # This splits the command in a matter so that the command gets called in a separate shell and thus\n # does not lock the window.\n args = shlex.split('{0} \"{1}\"'.format(command, path))\n try:\n subprocess.Popen(args, shell=shell, cwd=cwd)\n return True\n except OSError as e:\n logger.error('The operating system raised an error: {}'.format(e))\n return False", "response": "Executes a specific command in a separate process with a path as argument."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nexecutes a specific command in a separate process", "response": "def execute_command_in_process(command, shell=False, cwd=None, logger=None):\n \"\"\" Executes a specific command in a separate process\n\n :param command: the command to be executed\n :param bool shell: Whether to use a shell\n :param str cwd: The working directory of the command\n :param logger: optional logger instance which can be handed from other module\n :return: None\n \"\"\"\n if logger is None:\n logger = _logger\n logger.debug(\"Run shell command: {0}\".format(command))\n try:\n subprocess.Popen(command, shell=shell, cwd=cwd)\n return True\n except OSError as e:\n logger.error('The operating system raised an error: {}'.format(e))\n return False"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nadding models for each child state of the state", "response": "def _load_child_state_models(self, load_meta_data):\n \"\"\"Adds models for each child state of the state\n\n :param bool load_meta_data: Whether to load the meta data of the child state\n \"\"\"\n self.states = {}\n # Create model for each child class\n child_states = self.state.states\n for child_state in child_states.values():\n # Create hierarchy\n model_class = get_state_model_class_for_state(child_state)\n if model_class is not None:\n self._add_model(self.states, child_state, model_class, child_state.state_id, load_meta_data)\n else:\n logger.error(\"Unknown state type '{type:s}'. Cannot create model.\".format(type=type(child_state)))"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _load_scoped_variable_models(self):\n self.scoped_variables = []\n for scoped_variable in self.state.scoped_variables.values():\n self._add_model(self.scoped_variables, scoped_variable, ScopedVariableModel)", "response": "Adds models for each scoped variable in the state"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nadding models for each data flow in the state", "response": "def _load_data_flow_models(self):\n \"\"\" Adds models for each data flow of the state \"\"\"\n self.data_flows = []\n for data_flow in self.state.data_flows.values():\n self._add_model(self.data_flows, data_flow, DataFlowModel)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _load_transition_models(self):\n self.transitions = []\n for transition in self.state.transitions.values():\n self._add_model(self.transitions, transition, TransitionModel)", "response": "Loads the transition models for each state"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nprepare the model for destruction.", "response": "def prepare_destruction(self, recursive=True):\n \"\"\"Prepares the model for destruction\n\n Recursively un-registers all observers and removes references to child models. Extends the destroy method of\n the base class by child elements of a container state.\n \"\"\"\n # logger.verbose(\"Prepare destruction container state ...\")\n if recursive:\n for scoped_variable in self.scoped_variables:\n scoped_variable.prepare_destruction()\n for connection in self.transitions[:] + self.data_flows[:]:\n connection.prepare_destruction()\n for state in self.states.values():\n state.prepare_destruction(recursive)\n del self.scoped_variables[:]\n del self.transitions[:]\n del self.data_flows[:]\n self.states.clear()\n self.scoped_variables = None\n self.transitions = None\n self.data_flows = None\n self.states = None\n super(ContainerStateModel, self).prepare_destruction(recursive)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the scoped variable model for the given data port id", "response": "def get_scoped_variable_m(self, data_port_id):\n \"\"\"Returns the scoped variable model for the given data port id\n\n :param data_port_id: The data port id to search for\n :return: The model of the scoped variable with the given id\n \"\"\"\n for scoped_variable_m in self.scoped_variables:\n if scoped_variable_m.scoped_variable.data_port_id == data_port_id:\n return scoped_variable_m\n return None"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nsearches and returns the model of a data port", "response": "def get_data_port_m(self, data_port_id):\n \"\"\"Searches and returns the model of a data port of a given state\n\n The method searches a port with the given id in the data ports of the given state model. If the state model\n is a container state, not only the input and output data ports are looked at, but also the scoped variables.\n\n :param data_port_id: The data port id to be searched\n :return: The model of the data port or None if it is not found\n \"\"\"\n\n for scoped_var_m in self.scoped_variables:\n if scoped_var_m.scoped_variable.data_port_id == data_port_id:\n return scoped_var_m\n\n return StateModel.get_data_port_m(self, data_port_id)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_transition_m(self, transition_id):\n for transition_m in self.transitions:\n if transition_m.transition.transition_id == transition_id:\n return transition_m\n return None", "response": "Searches and returns the transition model with the given id"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsearches and returns the data flow model with the given id", "response": "def get_data_flow_m(self, data_flow_id):\n \"\"\"Searches and return the data flow model with the given in the given container state model\n\n :param data_flow_id: The data flow id to be searched\n :return: The model of the data flow or None if it is not found\n \"\"\"\n for data_flow_m in self.data_flows:\n if data_flow_m.data_flow.data_flow_id == data_flow_id:\n return data_flow_m\n return None"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef store_meta_data(self, copy_path=None):\n super(ContainerStateModel, self).store_meta_data(copy_path)\n for state_key, state in self.states.items():\n state.store_meta_data(copy_path)", "response": "Store meta data of container states to the filesystem\n "} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ndismiss current meta data and copy meta data from given state model", "response": "def copy_meta_data_from_state_m(self, source_state_m):\n \"\"\"Dismiss current meta data and copy meta data from given state model\n\n In addition to the state model method, also the meta data of container states is copied. Then, the meta data\n of child states are recursively copied.\n\n :param source_state_m: State model to load the meta data from\n \"\"\"\n for scoped_variable_m in self.scoped_variables:\n source_scoped_variable_m = source_state_m.get_scoped_variable_m(\n scoped_variable_m.scoped_variable.data_port_id)\n scoped_variable_m.meta = deepcopy(source_scoped_variable_m.meta)\n\n for transition_m in self.transitions:\n source_transition_m = source_state_m.get_transition_m(transition_m.transition.transition_id)\n transition_m.meta = deepcopy(source_transition_m.meta)\n\n for data_flow_m in self.data_flows:\n source_data_flow_m = source_state_m.get_data_flow_m(data_flow_m.data_flow.data_flow_id)\n data_flow_m.meta = deepcopy(source_data_flow_m.meta)\n\n for state_key, state in self.states.items():\n state.copy_meta_data_from_state_m(source_state_m.states[state_key])\n\n super(ContainerStateModel, self).copy_meta_data_from_state_m(source_state_m)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _generate_element_meta_data(self, meta_data):\n super(ContainerStateModel, self)._generate_element_meta_data(meta_data)\n for transition_m in self.transitions:\n self._copy_element_meta_data_to_meta_file_data(meta_data, transition_m, \"transition\",\n transition_m.transition.transition_id)\n for data_flow_m in self.data_flows:\n self._copy_element_meta_data_to_meta_file_data(meta_data, data_flow_m, \"data_flow\",\n data_flow_m.data_flow.data_flow_id)\n for scoped_variable_m in self.scoped_variables:\n self._copy_element_meta_data_to_meta_file_data(meta_data, scoped_variable_m, \"scoped_variable\",\n scoped_variable_m.scoped_variable.data_port_id)", "response": "Generate meta data for the container state elements and add it to the given meta data dictionary."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nshow a user dialog for a specific folder selection", "response": "def open_folder(query, default_path=None):\n \"\"\"Shows a user dialog for folder selection\n \n A dialog is opened with the prompt `query`. The current path is set to the last path that was opened/created. The \n roots of all libraries is added to the list of shortcut folders.\n \n :param str query: Prompt asking the user for a specific folder\n :param str default_path: Path to use if user does not specify one \n :return: Path selected by the user or `default_path` if no path was specified or None if none of the paths is valid\n :rtype: str\n \"\"\"\n from gi.repository import Gtk\n from os.path import expanduser, pathsep, dirname, isdir\n from rafcon.gui.singleton import main_window_controller\n from rafcon.gui.runtime_config import global_runtime_config\n last_path = global_runtime_config.get_config_value('LAST_PATH_OPEN_SAVE', \"\")\n selected_filename = None\n if last_path and isdir(last_path):\n selected_filename = last_path.split(pathsep)[-1]\n last_path = dirname(last_path)\n else:\n last_path = expanduser('~')\n\n dialog = Gtk.FileChooserDialog(query,\n None,\n Gtk.FileChooserAction.SELECT_FOLDER,\n (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,\n Gtk.STOCK_OPEN, Gtk.ResponseType.OK))\n # Allows confirming with Enter and double-click\n dialog.set_default_response(Gtk.ResponseType.OK)\n if main_window_controller:\n dialog.set_transient_for(main_window_controller.view.get_top_widget())\n dialog.set_current_folder(last_path)\n if selected_filename is not None:\n dialog.select_filename(selected_filename)\n\n dialog.set_show_hidden(False)\n\n # Add library roots to list of shortcut folders\n add_library_root_path_to_shortcut_folders_of_dialog(dialog)\n\n response = dialog.run()\n\n if response != Gtk.ResponseType.OK:\n dialog.destroy()\n if default_path and os.path.isdir(default_path):\n return default_path\n return None\n\n path = dialog.get_filename()\n dialog.destroy()\n\n if os.path.isdir(path):\n global_runtime_config.set_config_value('LAST_PATH_OPEN_SAVE', path)\n return path\n return None"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nshows a user dialog to create a new folder.", "response": "def create_folder(query, default_name=None, default_path=None):\n \"\"\"Shows a user dialog for folder creation\n \n A dialog is opened with the prompt `query`. The current path is set to the last path that was opened/created. The \n roots of all libraries is added to the list of shortcut folders.\n \n :param str query: Prompt asking the user for a specific folder\n :param str default_name: Default name of the folder to be created \n :param str default_path: Path in which the folder is created if the user doesn't specify a path \n :return: Path created by the user or `default_path`\\`default_name` if no path was specified or None if none of the\n paths is valid\n :rtype: str\n \"\"\"\n from gi.repository import Gtk\n from os.path import expanduser, dirname, join, exists, isdir\n from rafcon.core.storage.storage import STATEMACHINE_FILE\n from rafcon.gui.singleton import main_window_controller\n from rafcon.gui.runtime_config import global_runtime_config\n last_path = global_runtime_config.get_config_value('LAST_PATH_OPEN_SAVE', \"\")\n\n if last_path and isdir(last_path) and not exists(join(last_path, STATEMACHINE_FILE)):\n pass\n elif last_path:\n last_path = dirname(last_path)\n else:\n last_path = expanduser('~')\n\n dialog = Gtk.FileChooserDialog(query,\n None,\n Gtk.FileChooserAction.CREATE_FOLDER,\n (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,\n Gtk.STOCK_SAVE, Gtk.ResponseType.OK))\n # Allows confirming with Enter and double-click\n dialog.set_default_response(Gtk.ResponseType.OK)\n if main_window_controller:\n dialog.set_transient_for(main_window_controller.view.get_top_widget())\n dialog.set_current_folder(last_path)\n if default_name:\n dialog.set_current_name(default_name)\n dialog.set_show_hidden(False)\n\n # Add library roots to list of shortcut folders\n add_library_root_path_to_shortcut_folders_of_dialog(dialog)\n\n response = dialog.run()\n\n if response != Gtk.ResponseType.OK:\n dialog.destroy()\n if default_path and default_name:\n default = os.path.join(default_path, default_name)\n if os.path.isdir(default):\n return default\n return None\n\n path = dialog.get_filename()\n dialog.destroy()\n\n if os.path.isdir(path):\n global_runtime_config.set_config_value('LAST_PATH_OPEN_SAVE', path)\n return path\n return None"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncreate a new state - machine when the user clicks on the + next to the tabs", "response": "def add_state_machine(widget, event=None):\n \"\"\"Create a new state-machine when the user clicks on the '+' next to the tabs\"\"\"\n logger.debug(\"Creating new state-machine...\")\n root_state = HierarchyState(\"new root state\")\n state_machine = StateMachine(root_state)\n rafcon.core.singleton.state_machine_manager.add_state_machine(state_machine)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncalling when the View was registered", "response": "def register_view(self, view):\n \"\"\"Called when the View was registered\"\"\"\n super(StateMachinesEditorController, self).register_view(view)\n self.view['notebook'].connect('switch-page', self.on_switch_page)\n\n # Add all already open state machines\n for state_machine in self.model.state_machines.values():\n self.add_graphical_state_machine_editor(state_machine)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef register_actions(self, shortcut_manager):\n shortcut_manager.add_callback_for_action('close', self.on_close_shortcut)\n\n # Call register_action of parent in order to register actions for child controllers\n super(StateMachinesEditorController, self).register_actions(shortcut_manager)", "response": "Register callback methods fot triggered actions."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef close_state_machine(self, widget, page_number, event=None):\n page = widget.get_nth_page(page_number)\n for tab_info in self.tabs.values():\n if tab_info['page'] is page:\n state_machine_m = tab_info['state_machine_m']\n self.on_close_clicked(event, state_machine_m, None, force=False)\n return", "response": "Triggered when the close button in the tab is clicked"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef on_close_shortcut(self, *args):\n state_machine_m = self.model.get_selected_state_machine_model()\n if state_machine_m is None:\n return\n self.on_close_clicked(None, state_machine_m, None, force=False)", "response": "Close selected state machine"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef add_graphical_state_machine_editor(self, state_machine_m):\n assert isinstance(state_machine_m, StateMachineModel)\n\n sm_id = state_machine_m.state_machine.state_machine_id\n logger.debug(\"Create new graphical editor for state machine with id %s\" % str(sm_id))\n\n graphical_editor_view = GraphicalEditorGaphasView(state_machine_m)\n graphical_editor_ctrl = GraphicalEditorGaphasController(state_machine_m, graphical_editor_view)\n\n self.add_controller(sm_id, graphical_editor_ctrl)\n\n tab, tab_label = create_tab_header('', self.on_close_clicked, self.on_mouse_right_click,\n state_machine_m, 'refused')\n set_tab_label_texts(tab_label, state_machine_m, state_machine_m.state_machine.marked_dirty)\n\n page = graphical_editor_view['main_frame']\n\n self.view.notebook.append_page(page, tab)\n self.view.notebook.set_tab_reorderable(page, True)\n page.show_all()\n\n self.tabs[sm_id] = {'page': page,\n 'state_machine_m': state_machine_m,\n 'file_system_path': state_machine_m.state_machine.file_system_path,\n 'marked_dirty': state_machine_m.state_machine.marked_dirty,\n 'root_state_name': state_machine_m.state_machine.root_state.name}\n\n self.observe_model(state_machine_m)\n graphical_editor_view.show()\n self.view.notebook.show()\n self.last_focused_state_machine_ids.append(sm_id)", "response": "Add a new state machine to the state machine view"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef notification_selected_sm_changed(self, model, prop_name, info):\n selected_state_machine_id = self.model.selected_state_machine_id\n if selected_state_machine_id is None:\n return\n\n page_id = self.get_page_num(selected_state_machine_id)\n\n # to retrieve the current tab colors\n number_of_pages = self.view[\"notebook\"].get_n_pages()\n old_label_colors = list(range(number_of_pages))\n for p in range(number_of_pages):\n page = self.view[\"notebook\"].get_nth_page(p)\n label = self.view[\"notebook\"].get_tab_label(page).get_child().get_children()[0]\n\n # old_label_colors[p] = label.get_style().fg[Gtk.StateType.NORMAL]\n old_label_colors[p] = label.get_style_context().get_color(Gtk.StateType.NORMAL)\n\n if not self.view.notebook.get_current_page() == page_id:\n self.view.notebook.set_current_page(page_id)\n\n # set the old colors\n for p in range(number_of_pages):\n page = self.view[\"notebook\"].get_nth_page(p)\n label = self.view[\"notebook\"].get_tab_label(page).get_child().get_children()[0]\n # Gtk TODO\n style = label.get_style_context()", "response": "When a new state machine is selected make sure the tab is open"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nupdates the tab label if needed because system path root state name or marked_dirty flag changed .", "response": "def update_state_machine_tab_label(self, state_machine_m):\n \"\"\" Updates tab label if needed because system path, root state name or marked_dirty flag changed\n\n :param StateMachineModel state_machine_m: State machine model that has changed\n :return:\n \"\"\"\n sm_id = state_machine_m.state_machine.state_machine_id\n if sm_id in self.tabs:\n sm = state_machine_m.state_machine\n # create new tab label if tab label properties are not up to date\n if not self.tabs[sm_id]['marked_dirty'] == sm.marked_dirty or \\\n not self.tabs[sm_id]['file_system_path'] == sm.file_system_path or \\\n not self.tabs[sm_id]['root_state_name'] == sm.root_state.name:\n\n label = self.view[\"notebook\"].get_tab_label(self.tabs[sm_id][\"page\"]).get_child().get_children()[0]\n set_tab_label_texts(label, state_machine_m, unsaved_changes=sm.marked_dirty)\n\n self.tabs[sm_id]['file_system_path'] = sm.file_system_path\n self.tabs[sm_id]['marked_dirty'] = sm.marked_dirty\n self.tabs[sm_id]['root_state_name'] = sm.root_state.name\n else:\n logger.warning(\"State machine '{0}' tab label can not be updated there is no tab.\".format(sm_id))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ntrigger when the close button of a state machine tab is clicked", "response": "def on_close_clicked(self, event, state_machine_m, result, force=False):\n \"\"\"Triggered when the close button of a state machine tab is clicked\n\n Closes state machine if it is saved. Otherwise gives the user the option to 'Close without Saving' or to 'Cancel\n the Close Operation'\n\n :param state_machine_m: The selected state machine model.\n \"\"\"\n from rafcon.core.singleton import state_machine_execution_engine, state_machine_manager\n force = True if event is not None and hasattr(event, 'state') \\\n and event.get_state() & Gdk.ModifierType.SHIFT_MASK \\\n and event.get_state() & Gdk.ModifierType.CONTROL_MASK else force\n\n def remove_state_machine_m():\n state_machine_id = state_machine_m.state_machine.state_machine_id\n if state_machine_id in self.model.state_machine_manager.state_machines:\n self.model.state_machine_manager.remove_state_machine(state_machine_id)\n\n def push_sm_running_dialog():\n\n message_string = \"The state machine is still running. Are you sure you want to close?\"\n dialog = RAFCONButtonDialog(message_string, [\"Stop and close\", \"Cancel\"],\n message_type=Gtk.MessageType.QUESTION, parent=self.get_root_window())\n response_id = dialog.run()\n dialog.destroy()\n if response_id == 1:\n logger.debug(\"State machine execution is being stopped\")\n state_machine_execution_engine.stop()\n state_machine_execution_engine.join()\n # wait for gui is needed; otherwise the signals related to the execution engine cannot\n # be processed properly by the state machine under destruction\n rafcon.gui.utils.wait_for_gui()\n remove_state_machine_m()\n return True\n elif response_id == 2:\n logger.debug(\"State machine execution will keep running\")\n return False\n\n def push_sm_dirty_dialog():\n\n sm_id = state_machine_m.state_machine.state_machine_id\n root_state_name = state_machine_m.root_state.state.name\n message_string = \"There are unsaved changes in the state machine '{0}' with id {1}. Do you want to close \" \\\n \"the state machine anyway?\".format(root_state_name, sm_id)\n dialog = RAFCONButtonDialog(message_string, [\"Close without saving\", \"Cancel\"],\n message_type=Gtk.MessageType.QUESTION, parent=self.get_root_window())\n response_id = dialog.run()\n dialog.destroy()\n if response_id == 1: # Close without saving pressed\n remove_state_machine_m()\n return True\n else:\n logger.debug(\"Closing of state machine canceled\")\n return False\n\n # sm running\n if not state_machine_execution_engine.finished_or_stopped() and \\\n state_machine_manager.active_state_machine_id == state_machine_m.state_machine.state_machine_id:\n return push_sm_running_dialog()\n # close is forced -> sm not saved\n elif force:\n remove_state_machine_m()\n return True\n # sm dirty -> save sm request dialog\n elif state_machine_m.state_machine.marked_dirty:\n return push_sm_dirty_dialog()\n else:\n remove_state_machine_m()\n return True"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef close_all_pages(self):\n state_machine_m_list = [tab['state_machine_m'] for tab in self.tabs.values()]\n for state_machine_m in state_machine_m_list:\n self.on_close_clicked(None, state_machine_m, None, force=True)", "response": "Closes all tabs of the state machines editor."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef refresh_state_machines(self, state_machine_ids):\n # remember current selected state machine id\n currently_selected_sm_id = None\n if self.model.get_selected_state_machine_model():\n currently_selected_sm_id = self.model.get_selected_state_machine_model().state_machine.state_machine_id\n\n # create a dictionary from state machine id to state machine path and one for tab page number for recovery\n state_machine_path_by_sm_id = {}\n page_num_by_sm_id = {}\n for sm_id, sm in self.model.state_machine_manager.state_machines.items():\n # the sm.base_path is only None if the state machine has never been loaded or saved before\n if sm_id in state_machine_ids and sm.file_system_path is not None:\n state_machine_path_by_sm_id[sm_id] = sm.file_system_path\n page_num_by_sm_id[sm_id] = self.get_page_num(sm_id)\n\n # close all state machine in list and remember if one was not closed\n for sm_id in state_machine_ids:\n was_closed = self.on_close_clicked(None, self.model.state_machines[sm_id], None, force=True)\n if not was_closed and sm_id in page_num_by_sm_id:\n logger.info(\"State machine with id {0} will not be re-open because was not closed.\".format(sm_id))\n del state_machine_path_by_sm_id[sm_id]\n del page_num_by_sm_id[sm_id]\n\n # reload state machines from file system\n try:\n self.model.state_machine_manager.open_state_machines(state_machine_path_by_sm_id)\n except AttributeError as e:\n logger.warning(\"Not all state machines were re-open because {0}\".format(e))\n import rafcon.gui.utils\n rafcon.gui.utils.wait_for_gui() # TODO check again this is needed to secure that all sm-models are generated\n\n # recover tab arrangement\n self.rearrange_state_machines(page_num_by_sm_id)\n\n # recover initial selected state machine and case handling if now state machine is open anymore\n if currently_selected_sm_id:\n # case if only unsaved state machines are open\n if currently_selected_sm_id in self.model.state_machine_manager.state_machines:\n self.set_active_state_machine(currently_selected_sm_id)", "response": "Refreshes the state machine tabs with the given list of state machines."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef refresh_all_state_machines(self):\n self.refresh_state_machines(list(self.model.state_machine_manager.state_machines.keys()))", "response": "Refreshes all state machines tabs"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nswitch the state machine execution engine for the main window controller listens to.", "response": "def switch_state_machine_execution_engine(self, new_state_machine_execution_engine):\n \"\"\"\n Switch the state machine execution engine the main window controller listens to.\n :param new_state_machine_execution_engine: the new state machine execution engine for this controller\n :return:\n \"\"\"\n # relieve old one\n self.relieve_model(self.state_machine_execution_model)\n\n # register new\n self.state_machine_execution_model = new_state_machine_execution_engine\n self.observe_model(self.state_machine_execution_model)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nmarking all state machines with execution - running style class.", "response": "def execution_engine_model_changed(self, model, prop_name, info):\n \"\"\"High light active state machine. \"\"\"\n\n notebook = self.view['notebook']\n active_state_machine_id = self.model.state_machine_manager.active_state_machine_id\n\n if active_state_machine_id is None:\n # un-mark all state machine that are marked with execution-running style class\n for tab in self.tabs.values():\n label = notebook.get_tab_label(tab['page']).get_child().get_children()[0]\n if label.get_style_context().has_class(constants.execution_running_style_class):\n label.get_style_context().remove_class(constants.execution_running_style_class)\n else:\n # mark active state machine with execution-running style class\n page = self.get_page_for_state_machine_id(active_state_machine_id)\n if page:\n label = notebook.get_tab_label(page).get_child().get_children()[0]\n label.get_style_context().add_class(constants.execution_running_style_class)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning a tuple that holds the state as yaml - strings and its meta data in a dictionary.", "response": "def get_state_tuple(state, state_m=None):\n \"\"\" Generates a tuple that holds the state as yaml-strings and its meta data in a dictionary.\n The tuple consists of:\n [0] json_str for state,\n [1] dict of child_state tuples,\n [2] dict of model_meta-data of self and elements\n [3] path of state in state machine\n [4] script_text\n [5] file system path\n [6] semantic data\n # states-meta - [state-, transitions-, data_flows-, outcomes-, inputs-, outputs-, scopes, states-meta]\n\n :param rafcon.core.states.state.State state: The state that should be stored\n :return: state_tuple tuple\n \"\"\"\n state_str = json.dumps(state, cls=JSONObjectEncoder,\n indent=4, check_circular=False, sort_keys=True)\n\n state_tuples_dict = {}\n if isinstance(state, ContainerState):\n # print(state.states, \"\\n\")\n for child_state_id, child_state in state.states.items():\n # print(\"child_state: %s\" % child_state_id, child_state, \"\\n\")\n state_tuples_dict[child_state_id] = get_state_tuple(child_state)\n\n state_meta_dict = {} if state_m is None else get_state_element_meta(state_m)\n\n script_content = state.script.script if isinstance(state, ExecutionState) else None\n\n state_tuple = (state_str, state_tuples_dict, state_meta_dict, state.get_path(), script_content,\n state.file_system_path, copy.deepcopy(state.semantic_data))\n\n return state_tuple"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef meta_dump_or_deepcopy(meta):\n if DEBUG_META_REFERENCES: # debug copy\n from rafcon.gui.helpers.meta_data import meta_data_reference_check\n meta_data_reference_check(meta)\n return copy.deepcopy(meta)", "response": "Function to observe meta data vivi - dict copy process and to debug it at one point"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef undo(self):\n # logger.verbose(\"#H# STATE_MACHINE_UNDO STARTED\")\n state = self.state_machine.root_state\n\n self.set_root_state_to_version(state, self.before_storage)", "response": "General Undo that takes all elements in the parent and\n and sets the state to the root state."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nget the ImageSurface object if possible and return it.", "response": "def get_cached_image(self, width, height, zoom, parameters=None, clear=False):\n \"\"\"Get ImageSurface object, if possible, cached\n\n The method checks whether the image was already rendered. This is done by comparing the passed size and\n parameters with those of the last image. If they are equal, the cached image is returned. Otherwise,\n a new ImageSurface with the specified dimensions is created and returned.\n :param width: The width of the image\n :param height: The height of the image\n :param zoom: The current scale/zoom factor\n :param parameters: The parameters used for the image\n :param clear: If True, the cache is emptied, thus the image won't be retrieved from cache\n :returns: The flag is True when the image is retrieved from the cache, otherwise False; The cached image\n surface or a blank one with the desired size; The zoom parameter when the image was stored\n :rtype: bool, ImageSurface, float\n \"\"\"\n global MAX_ALLOWED_AREA\n if not parameters:\n parameters = {}\n\n if self.__compare_parameters(width, height, zoom, parameters) and not clear:\n return True, self.__image, self.__zoom\n\n # Restrict image surface size to prevent excessive use of memory\n while True:\n try:\n self.__limiting_multiplicator = 1\n area = width * zoom * self.__zoom_multiplicator * height * zoom * self.__zoom_multiplicator\n if area > MAX_ALLOWED_AREA:\n self.__limiting_multiplicator = sqrt(MAX_ALLOWED_AREA / area)\n\n image = ImageSurface(self.__format, int(ceil(width * zoom * self.multiplicator)),\n int(ceil(height * zoom * self.multiplicator)))\n break # If we reach this point, the area was successfully allocated and we can break the loop\n except Error:\n MAX_ALLOWED_AREA *= 0.8\n\n self.__set_cached_image(image, width, height, zoom, parameters)\n return False, self.__image, zoom"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ndraws a cached image on the context", "response": "def copy_image_to_context(self, context, position, rotation=0, zoom=None):\n \"\"\"Draw a cached image on the context\n\n :param context: The Cairo context to draw on\n :param position: The position od the image\n \"\"\"\n if not zoom:\n zoom = self.__zoom\n zoom_multiplicator = zoom * self.multiplicator\n context.save()\n context.scale(1. / zoom_multiplicator, 1. / zoom_multiplicator)\n\n image_position = round(position[0] * zoom_multiplicator), round(position[1] * zoom_multiplicator)\n context.translate(*image_position)\n context.rotate(rotation)\n context.set_source_surface(self.__image, 0, 0)\n\n context.paint()\n context.restore()"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates a temporary cairo context for the image surface", "response": "def get_context_for_image(self, zoom):\n \"\"\"Creates a temporary cairo context for the image surface\n\n :param zoom: The current scaling factor\n :return: Cairo context to draw on\n \"\"\"\n cairo_context = Context(self.__image)\n cairo_context.scale(zoom * self.multiplicator, zoom * self.multiplicator)\n return cairo_context"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef __compare_parameters(self, width, height, zoom, parameters):\n\n # Deactivated caching\n if not global_gui_config.get_config_value('ENABLE_CACHING', True):\n return False\n\n # Empty cache\n if not self.__image:\n return False\n\n # Changed image size\n if self.__width != width or self.__height != height:\n return False\n\n # Current zoom greater then prepared zoom\n if zoom > self.__zoom * self.__zoom_multiplicator:\n return False\n\n # Current zoom much smaller than prepared zoom, causes high memory usage and imperfect anti-aliasing\n if zoom < self.__zoom / self.__zoom_multiplicator:\n return False\n\n # Changed drawing parameter\n for key in parameters:\n try:\n if key not in self.__last_parameters or self.__last_parameters[key] != parameters[key]:\n return False\n except (AttributeError, ValueError):\n # Some values cannot be compared and raise an exception on comparison (e.g. numpy.ndarray). In this\n # case, just return False and do not cache.\n try:\n # Catch at least the ndarray-case, as this could occure relatively often\n import numpy\n if isinstance(self.__last_parameters[key], numpy.ndarray):\n return numpy.array_equal(self.__last_parameters[key], parameters[key])\n except ImportError:\n return False\n return False\n\n return True", "response": "Compare the parameters of a cached image with the current image size and the current zoom and the parameters used for the image."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef post_setup_plugins(parser_result):\n if not isinstance(parser_result, dict):\n parser_result = vars(parser_result)\n plugins.run_post_inits(parser_result)", "response": "Calls the post init hubs\n "} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nensures that the environmental variable RAFCON_LIB_PATH is existent and sets up the internal version of the object.", "response": "def setup_environment():\n \"\"\"Ensures that the environmental variable RAFCON_LIB_PATH is existent\n \"\"\"\n try:\n from gi.repository import GLib\n user_data_folder = GLib.get_user_data_dir()\n except ImportError:\n user_data_folder = join(os.path.expanduser(\"~\"), \".local\", \"share\")\n rafcon_root_path = dirname(realpath(rafcon.__file__))\n user_library_folder = join(user_data_folder, \"rafcon\", \"libraries\")\n\n # The RAFCON_LIB_PATH points to a path with common RAFCON libraries\n # If the env variable is not set, we have to determine it. In the future, this should always be\n # ~/.local/share/rafcon/libraries, but for backward compatibility, also a relative RAFCON path is supported\n if not os.environ.get('RAFCON_LIB_PATH', None):\n if exists(user_library_folder):\n os.environ['RAFCON_LIB_PATH'] = user_library_folder\n else:\n os.environ['RAFCON_LIB_PATH'] = join(dirname(dirname(rafcon_root_path)), 'share', 'libraries')\n\n # Install dummy _ builtin function in case i18.setup_l10n() is not called\n if sys.version_info >= (3,):\n import builtins as builtins23\n else:\n import __builtin__ as builtins23\n if \"_\" not in builtins23.__dict__:\n builtins23.__dict__[\"_\"] = lambda s: s"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nsetting up the argument parser with the required arguments", "response": "def setup_argument_parser():\n \"\"\"Sets up teh parser with the required arguments\n\n :return: The parser object\n \"\"\"\n default_config_path = filesystem.get_default_config_path()\n filesystem.create_path(default_config_path)\n\n parser = core_singletons.argument_parser\n parser.add_argument('-o', '--open', type=parse_state_machine_path, dest='state_machine_path', metavar='path',\n nargs='+', help=\"specify directories of state-machines that shall be opened. The path must \"\n \"contain a statemachine.json file\")\n parser.add_argument('-c', '--config', type=config_path, metavar='path', dest='config_path',\n default=default_config_path, nargs='?', const=default_config_path,\n help=\"path to the configuration file config.yaml. Use 'None' to prevent the generation of \"\n \"a config file and use the default configuration. Default: {0}\".format(default_config_path))\n parser.add_argument('-r', '--remote', action='store_true', help=\"remote control mode\")\n parser.add_argument('-s', '--start_state_path', metavar='path', dest='start_state_path', default=None, nargs='?',\n help=\"path within a state machine to the state that should be launched. The state path \"\n \"consists of state ids (e.g. QPOXGD/YVWJKZ whereof QPOXGD is the root state and YVWJKZ \"\n \"it's child state to start from).\")\n return parser"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nload the core configuration from the specified path and uses its content for further setup", "response": "def setup_configuration(config_path):\n \"\"\"Loads the core configuration from the specified path and uses its content for further setup\n\n :param config_path: Path to the core config file\n \"\"\"\n if config_path is not None:\n config_path, config_file = filesystem.separate_folder_path_and_file_name(config_path)\n global_config.load(config_file=config_file, path=config_path)\n else:\n global_config.load(path=config_path)\n\n # Initialize libraries\n core_singletons.library_manager.initialize()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nexecute the specified state machine", "response": "def open_state_machine(state_machine_path):\n \"\"\"Executes the specified state machine\n\n :param str state_machine_path: The file path to the state machine\n :return StateMachine: The loaded state machine\n \"\"\"\n sm = storage.load_state_machine_from_path(state_machine_path)\n core_singletons.state_machine_manager.add_state_machine(sm)\n\n return sm"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef wait_for_state_machine_finished(state_machine):\n global _user_abort\n\n from rafcon.core.states.execution_state import ExecutionState\n if not isinstance(state_machine.root_state, ExecutionState):\n while len(state_machine.execution_histories[0]) < 1:\n time.sleep(0.1)\n else:\n time.sleep(0.5)\n\n while state_machine.root_state.state_execution_status is not StateExecutionStatus.INACTIVE:\n try:\n state_machine.root_state.concurrency_queue.get(timeout=1)\n # this check triggers if the state machine could not be stopped in the signal handler\n if _user_abort:\n return\n except Empty:\n pass\n # no logger output here to make it easier for the parser\n logger.verbose(\"RAFCON live signal\")", "response": "wait for a state machine to finish its execution\n "} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nstop the reactor if the state machine has finished.", "response": "def stop_reactor_on_state_machine_finish(state_machine):\n \"\"\" Wait for a state machine to be finished and stops the reactor\n\n :param state_machine: the state machine to synchronize with\n \"\"\"\n wait_for_state_machine_finished(state_machine)\n from twisted.internet import reactor\n if reactor.running:\n plugins.run_hook(\"pre_destruction\")\n reactor.callFromThread(reactor.stop)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\nasync def wait_until_ready(\n self, timeout: Optional[float] = None, no_raise: bool = False\n ) -> bool:\n \"\"\"\n Waits for the underlying node to become ready.\n\n If no_raise is set, returns false when a timeout occurs instead of propogating TimeoutError.\n A timeout of None means to wait indefinitely.\n \"\"\"\n if self.node.ready.is_set():\n return True\n\n try:\n return await self.node.wait_until_ready(timeout=timeout)\n except asyncio.TimeoutError:\n if no_raise:\n return False\n else:\n raise", "response": "Waits until the underlying node is ready."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nconnects to the voice channel associated with this Player.", "response": "async def connect(self):\n \"\"\"\n Connects to the voice channel associated with this Player.\n \"\"\"\n await self.node.join_voice_channel(self.channel.guild.id, self.channel.id)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\nasync def move_to(self, channel: discord.VoiceChannel):\n if channel.guild != self.channel.guild:\n raise TypeError(\"Cannot move to a different guild.\")\n\n self.channel = channel\n await self.connect()", "response": "Moves this player to a voice channel."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ndisconnect this player from its voice channel.", "response": "async def disconnect(self, requested=True):\n \"\"\"\n Disconnects this player from it's voice channel.\n \"\"\"\n if self.state == PlayerState.DISCONNECTING:\n return\n\n await self.update_state(PlayerState.DISCONNECTING)\n if not requested:\n log.debug(\n f\"Forcing player disconnect for guild {self.channel.guild.id}\"\n f\" due to player manager request.\"\n )\n\n guild_id = self.channel.guild.id\n voice_ws = self.node.get_voice_ws(guild_id)\n\n if not voice_ws.closed:\n await voice_ws.voice_state(guild_id, None)\n\n await self.node.destroy_guild(guild_id)\n await self.close()\n\n self.manager.remove_player(self)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\nasync def handle_event(self, event: \"node.LavalinkEvents\", extra):\n if event == LavalinkEvents.TRACK_END:\n if extra == TrackEndReason.FINISHED:\n await self.play()\n else:\n self._is_playing = False", "response": "Handles various events related to a given event."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\nasync def handle_player_update(self, state: \"node.PlayerState\"):\n if state.position > self.position:\n self._is_playing = True\n self.position = state.position", "response": "Handles player updates from lavalink."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nadd a track to the queue.", "response": "def add(self, requester: discord.User, track: Track):\n \"\"\"\n Adds a track to the queue.\n\n Parameters\n ----------\n requester : discord.User\n User who requested the track.\n track : Track\n Result from any of the lavalink track search methods.\n \"\"\"\n track.requester = requester\n self.queue.append(track)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nstarts playback from lavalink.", "response": "async def play(self):\n \"\"\"\n Starts playback from lavalink.\n \"\"\"\n if self.repeat and self.current is not None:\n self.queue.append(self.current)\n\n self.current = None\n self.position = 0\n self._paused = False\n\n if not self.queue:\n await self.stop()\n else:\n self._is_playing = True\n if self.shuffle:\n track = self.queue.pop(randrange(len(self.queue)))\n else:\n track = self.queue.pop(0)\n\n self.current = track\n log.debug(\"Assigned current.\")\n await self.node.play(self.channel.guild.id, track)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\nasync def stop(self):\n await self.node.stop(self.channel.guild.id)\n self.queue = []\n self.current = None\n self.position = 0\n self._paused = False", "response": "Stops playback from lavalink."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\npause the current song.", "response": "async def pause(self, pause: bool = True):\n \"\"\"\n Pauses the current song.\n\n Parameters\n ----------\n pause : bool\n Set to ``False`` to resume.\n \"\"\"\n self._paused = pause\n await self.node.pause(self.channel.guild.id, pause)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\nasync def set_volume(self, volume: int):\n self._volume = max(min(volume, 150), 0)\n await self.node.volume(self.channel.guild.id, self.volume)", "response": "Sets the volume of the current object."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nseek to a position.", "response": "async def seek(self, position: int):\n \"\"\"\n If the track allows it, seeks to a position.\n\n Parameters\n ----------\n position : int\n Between 0 and track length.\n \"\"\"\n if self.current.seekable:\n position = max(min(position, self.current.length), 0)\n await self.node.seek(self.channel.guild.id, position)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncreate a new Player object for the given channel.", "response": "async def create_player(self, channel: discord.VoiceChannel) -> Player:\n \"\"\"\n Connects to a discord voice channel.\n\n This function is safe to repeatedly call as it will return an existing\n player if there is one.\n\n Parameters\n ----------\n channel\n\n Returns\n -------\n Player\n The created Player object.\n \"\"\"\n if self._already_in_guild(channel):\n p = self.get_player(channel.guild.id)\n await p.move_to(channel)\n else:\n p = Player(self, channel)\n await p.connect()\n self._player_dict[channel.guild.id] = p\n await self.refresh_player_state(p)\n return p"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_player(self, guild_id: int) -> Player:\n if guild_id in self._player_dict:\n return self._player_dict[guild_id]\n raise KeyError(\"No such player for that guild.\")", "response": "Gets a Player object from a guild ID."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef resource_filename(package_or_requirement, resource_name):\n if pkg_resources.resource_exists(package_or_requirement, resource_name):\n return pkg_resources.resource_filename(package_or_requirement, resource_name)\n\n path = _search_in_share_folders(package_or_requirement, resource_name)\n\n if path:\n return path\n\n raise RuntimeError(\"Resource {} not found in {}\".format(package_or_requirement, resource_name))", "response": "Similar to pkg_resources. resource_filename but also looks in a predefined list of paths to find the resource in which the resource is found via pkg_resources\n ."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn True if the resource exists in the resource tree otherwise False.", "response": "def resource_exists(package_or_requirement, resource_name):\n \"\"\"\n Similar to pkg_resources.resource_exists but if the resource it not found via pkg_resources\n it also looks in a predefined list of paths in order to find the resource\n\n :param package_or_requirement: the module in which the resource resides\n :param resource_name: the name of the resource\n :return: a flag if the file exists\n :rtype: bool\n \"\"\"\n if pkg_resources.resource_exists(package_or_requirement, resource_name):\n return True\n\n path = _search_in_share_folders(package_or_requirement, resource_name)\n\n return True if path else False"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef resource_string(package_or_requirement, resource_name):\n with open(resource_filename(package_or_requirement, resource_name), 'r') as resource_file:\n return resource_file.read()", "response": "Returns the content of a resource in the order they were found"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef load_plugins():\n plugins = os.environ.get('RAFCON_PLUGIN_PATH', '')\n plugin_list = set(plugins.split(os.pathsep))\n global plugin_dict\n for plugin_path in plugin_list:\n if not plugin_path:\n continue\n plugin_path = os.path.expandvars(os.path.expanduser(plugin_path)).strip()\n if not os.path.exists(plugin_path):\n logger.error(\"The specified plugin path does not exist: {}\".format(plugin_path))\n continue\n dir_name, plugin_name = os.path.split(plugin_path.rstrip('/'))\n logger.info(\"Found plugin '{}' at {}\".format(plugin_name, plugin_path))\n sys.path.insert(0, dir_name)\n if plugin_name in plugin_dict:\n logger.error(\"Plugin '{}' already loaded\".format(plugin_name))\n else:\n try:\n module = importlib.import_module(plugin_name)\n plugin_dict[plugin_name] = module\n logger.info(\"Successfully loaded plugin '{}'\".format(plugin_name))\n except ImportError as e:\n logger.error(\"Could not import plugin '{}': {}\\n{}\".format(plugin_name, e, str(traceback.format_exc())))", "response": "Loads all plugins specified in the RAFCON_PLUGIN_PATH environment variable and stores them in the global plugin_dict."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nrun the passed hook on all registered plugins ArcGIS plugins", "response": "def run_hook(hook_name, *args, **kwargs):\n \"\"\"Runs the passed hook on all registered plugins\n\n The function checks, whether the hook is available in the plugin.\n\n :param hook_name: Name of the hook, corresponds to the function name being called\n :param args: Arguments\n :param kwargs: Keyword arguments\n \"\"\"\n for module in plugin_dict.values():\n if hasattr(module, \"hooks\") and callable(getattr(module.hooks, hook_name, None)):\n getattr(module.hooks, hook_name)(*args, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ndetermine the model class required for holding a given state instance", "response": "def get_state_model_class_for_state(state):\n \"\"\"Determines the model required for the given state class\n\n :param state: Instance of a state (ExecutionState, BarrierConcurrencyState, ...)\n :return: The model class required for holding such a state instance\n \"\"\"\n from rafcon.gui.models.state import StateModel\n from rafcon.gui.models.container_state import ContainerStateModel\n from rafcon.gui.models.library_state import LibraryStateModel\n if isinstance(state, ContainerState):\n return ContainerStateModel\n elif isinstance(state, LibraryState):\n return LibraryStateModel\n elif isinstance(state, State):\n return StateModel\n else:\n logger.warning(\"There is not model for state of type {0} {1}\".format(type(state), state))\n return None"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef update_is_start(self):\n self.is_start = self.state.is_root_state or \\\n self.parent is None or \\\n isinstance(self.parent.state, LibraryState) or \\\n self.state.state_id == self.state.parent.start_state_id", "response": "Updates the is_start property of the state\n initCause"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\npreparing the model for destruction.", "response": "def prepare_destruction(self, recursive=True):\n \"\"\"Prepares the model for destruction\n\n Recursively un-registers all observers and removes references to child models\n \"\"\"\n if self.state is None:\n logger.verbose(\"Multiple calls of prepare destruction for {0}\".format(self))\n self.destruction_signal.emit()\n try:\n self.unregister_observer(self)\n except KeyError: # Might happen if the observer was already unregistered\n logger.verbose(\"Observer already unregistered!\")\n pass\n if recursive:\n if self.income:\n self.income.prepare_destruction()\n for port in self.input_data_ports[:] + self.output_data_ports[:] + self.outcomes[:]:\n port.prepare_destruction()\n del self.input_data_ports[:]\n del self.output_data_ports[:]\n del self.outcomes[:]\n self.state = None\n self.input_data_ports = None\n self.output_data_ports = None\n self.income = None\n self.outcomes = None\n # History TODO: these are needed by the modification history\n # self.action_signal = None\n # self.meta_signal = None\n # self.destruction_signal = None\n self.observe = None\n super(AbstractStateModel, self).prepare_destruction()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_state_machine_m(self, two_factor_check=True):\n from rafcon.gui.singleton import state_machine_manager_model\n state_machine = self.state.get_state_machine()\n if state_machine:\n if state_machine.state_machine_id in state_machine_manager_model.state_machines:\n sm_m = state_machine_manager_model.state_machines[state_machine.state_machine_id]\n if not two_factor_check or sm_m.get_state_model_by_path(self.state.get_path()) is self:\n return sm_m\n else:\n logger.debug(\"State model requesting its state machine model parent seems to be obsolete. \"\n \"This is a hint to duplicated models and dirty coding\")\n\n return None", "response": "Get the respective state machine model"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns the input data port model for the given data port id", "response": "def get_input_data_port_m(self, data_port_id):\n \"\"\"Returns the input data port model for the given data port id\n\n :param data_port_id: The data port id to search for\n :return: The model of the data port with the given id\n \"\"\"\n for data_port_m in self.input_data_ports:\n if data_port_m.data_port.data_port_id == data_port_id:\n return data_port_m\n return None"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_output_data_port_m(self, data_port_id):\n for data_port_m in self.output_data_ports:\n if data_port_m.data_port.data_port_id == data_port_id:\n return data_port_m\n return None", "response": "Returns the output data port model for the given data port id"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nsearch and returns the model of a data port of a given state model", "response": "def get_data_port_m(self, data_port_id):\n \"\"\"Searches and returns the model of a data port of a given state\n\n The method searches a port with the given id in the data ports of the given state model. If the state model\n is a container state, not only the input and output data ports are looked at, but also the scoped variables.\n\n :param data_port_id: The data port id to be searched\n :return: The model of the data port or None if it is not found\n \"\"\"\n from itertools import chain\n data_ports_m = chain(self.input_data_ports, self.output_data_ports)\n for data_port_m in data_ports_m:\n if data_port_m.data_port.data_port_id == data_port_id:\n return data_port_m\n return None"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_outcome_m(self, outcome_id):\n for outcome_m in self.outcomes:\n if outcome_m.outcome.outcome_id == outcome_id:\n return outcome_m\n return False", "response": "Returns the outcome model with the given id"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef meta_changed(self, model, prop_name, info):\n msg = info.arg\n # print(\"meta_changed state: \", model, prop_name, info)\n if msg.notification is None:\n # Meta data of this state was changed, add information about notification to the signal message\n notification = Notification(model, prop_name, info)\n msg = msg._replace(notification=notification)\n # print(\"DONE0 \", msg)\n\n if self.parent is not None:\n # Notify parent about change of meta data\n info.arg = msg\n self.parent.meta_changed(model, prop_name, info)\n # print(\"DONE1 \", msg)\n elif not msg.change.startswith('sm_notification_'): # Prevent recursive call\n # If we are the root state, inform the state machine model by emitting our own meta signal.\n # To make the signal distinguishable for a change of meta data to our state, the change property of\n # the message is prepended with 'sm_notification_'\n msg = msg._replace(change='sm_notification_' + msg.change)\n self.meta_signal.emit(msg)\n # print(\"DONE2 \", msg)\n else:\n # print(\"DONE3 NOTHING\")\n pass", "response": "This method notifies the parent state about changes made to the meta data of the current state of the object."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nload the meta data of the state model from the file system.", "response": "def load_meta_data(self, path=None):\n \"\"\"Load meta data of state model from the file system\n\n The meta data of the state model is loaded from the file system and stored in the meta property of the model.\n Existing meta data is removed. Also the meta data of all state elements (data ports, outcomes,\n etc) are loaded, as those stored in the same file as the meta data of the state.\n\n This is either called on the __init__ of a new state model or if a state model for a container state is created,\n which then calls load_meta_data for all its children.\n\n :param str path: Optional file system path to the meta data file. If not given, the path will be derived from\n the state's path on the filesystem\n :return: if meta data file was loaded True otherwise False\n :rtype: bool\n \"\"\"\n # TODO: for an Execution state this method is called for each hierarchy level again and again, still?? check it!\n # print(\"1AbstractState_load_meta_data: \", path, not path)\n if not path:\n path = self.state.file_system_path\n # print(\"2AbstractState_load_meta_data: \", path)\n if path is None:\n self.meta = Vividict({})\n return False\n path_meta_data = os.path.join(path, storage.FILE_NAME_META_DATA)\n\n # TODO: Should be removed with next minor release\n if not os.path.exists(path_meta_data):\n logger.debug(\"Because meta data was not found in {0} use backup option {1}\"\n \"\".format(path_meta_data, os.path.join(path, storage.FILE_NAME_META_DATA_OLD)))\n path_meta_data = os.path.join(path, storage.FILE_NAME_META_DATA_OLD)\n # TODO use the following logger message to debug meta data load process and to avoid maybe repetitive loads\n # if not os.path.exists(path_meta_data):\n # logger.info(\"path not found {0}\".format(path_meta_data))\n\n try:\n # print(\"try to load meta data from {0} for state {1}\".format(path_meta_data, self.state))\n tmp_meta = storage.load_data_file(path_meta_data)\n except ValueError as e:\n # if no element which is newly generated log a warning\n # if os.path.exists(os.path.dirname(path)):\n # logger.debug(\"Because '{1}' meta data of {0} was not loaded properly.\".format(self, e))\n if not path.startswith(constants.RAFCON_TEMP_PATH_STORAGE) and not os.path.exists(os.path.dirname(path)):\n logger.debug(\"Because '{1}' meta data of {0} was not loaded properly.\".format(self, e))\n tmp_meta = {}\n\n # JSON returns a dict, which must be converted to a Vividict\n tmp_meta = Vividict(tmp_meta)\n\n if tmp_meta:\n self._parse_for_element_meta_data(tmp_meta)\n # assign the meta data to the state\n self.meta = tmp_meta\n self.meta_signal.emit(MetaSignalMsg(\"load_meta_data\", \"all\", True))\n return True\n else:\n # print(\"nothing to parse\", tmp_meta)\n return False"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nstoring the meta data of the state model to the file system.", "response": "def store_meta_data(self, copy_path=None):\n \"\"\"Save meta data of state model to the file system\n\n This method generates a dictionary of the meta data of the state together with the meta data of all state\n elements (data ports, outcomes, etc.) and stores it on the filesystem.\n Secure that the store meta data method is called after storing the core data otherwise the last_stored_path is\n maybe wrong or None.\n The copy path is considered to be a state machine file system path but not the current one but e.g.\n of a as copy saved state machine. The meta data will be stored in respective relative state folder in the state\n machine hierarchy. This folder has to exist.\n Dues the core elements of the state machine has to be stored first.\n\n :param str copy_path: Optional copy path if meta data is not stored to the file system path of state machine\n \"\"\"\n if copy_path:\n meta_file_path_json = os.path.join(copy_path, self.state.get_storage_path(), storage.FILE_NAME_META_DATA)\n else:\n if self.state.file_system_path is None:\n logger.error(\"Meta data of {0} can be stored temporary arbitrary but by default first after the \"\n \"respective state was stored and a file system path is set.\".format(self))\n return\n meta_file_path_json = os.path.join(self.state.file_system_path, storage.FILE_NAME_META_DATA)\n meta_data = deepcopy(self.meta)\n self._generate_element_meta_data(meta_data)\n storage_utils.write_dict_to_json(meta_data, meta_file_path_json)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ndismiss current meta data and copy meta data from given state model", "response": "def copy_meta_data_from_state_m(self, source_state_m):\n \"\"\"Dismiss current meta data and copy meta data from given state model\n\n The meta data of the given state model is used as meta data for this state. Also the meta data of all state\n elements (data ports, outcomes, etc.) is overwritten with the meta data of the elements of the given state.\n\n :param source_state_m: State model to load the meta data from\n \"\"\"\n\n self.meta = deepcopy(source_state_m.meta)\n\n for input_data_port_m in self.input_data_ports:\n source_data_port_m = source_state_m.get_input_data_port_m(input_data_port_m.data_port.data_port_id)\n input_data_port_m.meta = deepcopy(source_data_port_m.meta)\n for output_data_port_m in self.output_data_ports:\n source_data_port_m = source_state_m.get_output_data_port_m(output_data_port_m.data_port.data_port_id)\n output_data_port_m.meta = deepcopy(source_data_port_m.meta)\n for outcome_m in self.outcomes:\n source_outcome_m = source_state_m.get_outcome_m(outcome_m.outcome.outcome_id)\n outcome_m.meta = deepcopy(source_outcome_m.meta)\n self.income.meta = deepcopy(source_state_m.income.meta)\n\n self.meta_signal.emit(MetaSignalMsg(\"copy_state_m\", \"all\", True))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nload meta data for state elements in the state meta data file.", "response": "def _parse_for_element_meta_data(self, meta_data):\n \"\"\"Load meta data for state elements\n\n The meta data of the state meta data file also contains the meta data for state elements (data ports,\n outcomes, etc). This method parses the loaded meta data for each state element model. The meta data of the\n elements is removed from the passed dictionary.\n\n :param meta_data: Dictionary of loaded meta data\n \"\"\"\n # print(\"_parse meta data\", meta_data)\n for data_port_m in self.input_data_ports:\n self._copy_element_meta_data_from_meta_file_data(meta_data, data_port_m, \"input_data_port\",\n data_port_m.data_port.data_port_id)\n for data_port_m in self.output_data_ports:\n self._copy_element_meta_data_from_meta_file_data(meta_data, data_port_m, \"output_data_port\",\n data_port_m.data_port.data_port_id)\n for outcome_m in self.outcomes:\n self._copy_element_meta_data_from_meta_file_data(meta_data, outcome_m, \"outcome\",\n outcome_m.outcome.outcome_id)\n if \"income\" in meta_data:\n if \"gui\" in meta_data and \"editor_gaphas\" in meta_data[\"gui\"] and \\\n \"income\" in meta_data[\"gui\"][\"editor_gaphas\"]: # chain necessary to prevent key generation\n del meta_data[\"gui\"][\"editor_gaphas\"][\"income\"]\n elif \"gui\" in meta_data and \"editor_gaphas\" in meta_data[\"gui\"] and \\\n \"income\" in meta_data[\"gui\"][\"editor_gaphas\"]: # chain necessary to prevent key generation in meta data\n meta_data[\"income\"][\"gui\"][\"editor_gaphas\"] = meta_data[\"gui\"][\"editor_gaphas\"][\"income\"]\n del meta_data[\"gui\"][\"editor_gaphas\"][\"income\"]\n self._copy_element_meta_data_from_meta_file_data(meta_data, self.income, \"income\", \"\")"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _copy_element_meta_data_from_meta_file_data(meta_data, element_m, element_name, element_id):\n meta_data_element_id = element_name + str(element_id)\n meta_data_element = meta_data[meta_data_element_id]\n # print(meta_data_element_id, element_m, meta_data_element)\n element_m.meta = meta_data_element\n del meta_data[meta_data_element_id]", "response": "Helper method to copy the meta data of the given element from the given meta data dictionary."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ngenerating meta data for state elements and add it to the given meta data dictionary.", "response": "def _generate_element_meta_data(self, meta_data):\n \"\"\"Generate meta data for state elements and add it to the given dictionary\n\n This method retrieves the meta data of the state elements (data ports, outcomes, etc) and adds it to the\n given meta data dictionary.\n\n :param meta_data: Dictionary of meta data\n \"\"\"\n for data_port_m in self.input_data_ports:\n self._copy_element_meta_data_to_meta_file_data(meta_data, data_port_m, \"input_data_port\",\n data_port_m.data_port.data_port_id)\n for data_port_m in self.output_data_ports:\n self._copy_element_meta_data_to_meta_file_data(meta_data, data_port_m, \"output_data_port\",\n data_port_m.data_port.data_port_id)\n for outcome_m in self.outcomes:\n self._copy_element_meta_data_to_meta_file_data(meta_data, outcome_m, \"outcome\",\n outcome_m.outcome.outcome_id)\n\n self._copy_element_meta_data_to_meta_file_data(meta_data, self.income, \"income\", \"\")"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef save_file_data(self, path):\n sm = self.model.state.get_state_machine()\n if sm.marked_dirty and not self.saved_initial:\n try:\n # Save the file before opening it to update the applied changes. Use option create_full_path=True\n # to assure that temporary state_machines' script files are saved to\n # (their path doesnt exist when not saved)\n filesystem.write_file(os.path.join(path, storage.SCRIPT_FILE),\n self.view.get_text(), create_full_path=True)\n except IOError as e:\n # Only happens if the file doesnt exist yet and would be written to the temp folder.\n # The method write_file doesnt create the path\n logger.error('The operating system raised an error: {}'.format(e))\n self.saved_initial = True", "response": "Saves the file data to the given path."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nload the script file and sets the script text.", "response": "def load_and_set_file_content(self, file_system_path):\n \"\"\" Implements the abstract method of the ExternalEditor class.\n \"\"\"\n content = filesystem.read_file(file_system_path, storage.SCRIPT_FILE)\n if content is not None:\n self.set_script_text(content)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ntrigger when the Apply button in source editor is clicked.", "response": "def apply_clicked(self, button):\n \"\"\"Triggered when the Apply button in the source editor is clicked.\n\n \"\"\"\n if isinstance(self.model.state, LibraryState):\n logger.warning(\"It is not allowed to modify libraries.\")\n self.view.set_text(\"\")\n return\n\n # Ugly workaround to give user at least some feedback about the parser\n # Without the loop, this function would block the GTK main loop and the log message would appear after the\n # function has finished\n # TODO: run parser in separate thread\n while Gtk.events_pending():\n Gtk.main_iteration_do(False)\n\n # get script\n current_text = self.view.get_text()\n\n # Directly apply script if linter was deactivated\n if not self.view['pylint_check_button'].get_active():\n self.set_script_text(current_text)\n return\n\n logger.debug(\"Parsing execute script...\")\n with open(self.tmp_file, \"w\") as text_file:\n text_file.write(current_text)\n\n # clear astroid module cache, see http://stackoverflow.com/questions/22241435/pylint-discard-cached-file-state\n MANAGER.astroid_cache.clear()\n lint_config_file = resource_filename(rafcon.__name__, \"pylintrc\")\n args = [\"--rcfile={}\".format(lint_config_file)] # put your own here\n with contextlib.closing(StringIO()) as dummy_buffer:\n json_report = JSONReporter(dummy_buffer.getvalue())\n try:\n lint.Run([self.tmp_file] + args, reporter=json_report, exit=False)\n except:\n logger.exception(\"Could not run linter to check script\")\n os.remove(self.tmp_file)\n\n if json_report.messages:\n def on_message_dialog_response_signal(widget, response_id):\n if response_id == 1:\n self.set_script_text(current_text)\n else:\n logger.debug(\"The script was not saved\")\n widget.destroy()\n\n message_string = \"Are you sure that you want to save this file?\\n\\nThe following errors were found:\"\n\n line = None\n for message in json_report.messages:\n (error_string, line) = self.format_error_string(message)\n message_string += \"\\n\\n\" + error_string\n\n # focus line of error\n if line:\n tbuffer = self.view.get_buffer()\n start_iter = tbuffer.get_start_iter()\n start_iter.set_line(int(line)-1)\n tbuffer.place_cursor(start_iter)\n message_string += \"\\n\\nThe line was focused in the source editor.\"\n self.view.scroll_to_cursor_onscreen()\n\n # select state to show source editor\n sm_m = state_machine_manager_model.get_state_machine_model(self.model)\n if sm_m.selection.get_selected_state() is not self.model:\n sm_m.selection.set(self.model)\n\n dialog = RAFCONButtonDialog(message_string, [\"Save with errors\", \"Do not save\"],\n on_message_dialog_response_signal,\n message_type=Gtk.MessageType.WARNING, parent=self.get_root_window())\n result = dialog.run()\n else:\n self.set_script_text(current_text)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef split_text(text_to_split):\n assert isinstance(text_to_split, string_types)\n try:\n time, rest = text_to_split.split(': ', 1)\n source, message = rest.split(':', 1)\n except ValueError:\n time = source = \"\"\n message = text_to_split\n return time.strip(), source.strip(), message.strip()", "response": "Split the debug text into two parts Time Log Name Debug message"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nupdates auto scroll mode.", "response": "def update_auto_scroll_mode(self):\n \"\"\" Register or un-register signals for follow mode \"\"\"\n if self._enables['CONSOLE_FOLLOW_LOGGING']:\n if self._auto_scroll_handler_id is None:\n self._auto_scroll_handler_id = self.text_view.connect(\"size-allocate\", self._auto_scroll)\n else:\n if self._auto_scroll_handler_id is not None:\n self.text_view.disconnect(self._auto_scroll_handler_id)\n self._auto_scroll_handler_id = None"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nscroll to the end of the text view", "response": "def _auto_scroll(self, *args):\n \"\"\" Scroll to the end of the text view \"\"\"\n adj = self['scrollable'].get_vadjustment()\n adj.set_value(adj.get_upper() - adj.get_page_size())"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_line_number_next_to_cursor_with_string_within(self, s):\n line_number, _ = self.get_cursor_position()\n text_buffer = self.text_view.get_buffer()\n line_iter = text_buffer.get_iter_at_line(line_number)\n\n # find closest before line with string within\n before_line_number = None\n while line_iter.backward_line():\n if s in self.get_text_of_line(line_iter):\n before_line_number = line_iter.get_line()\n break\n\n # find closest after line with string within\n after_line_number = None\n while line_iter.forward_line():\n if s in self.get_text_of_line(line_iter):\n after_line_number = line_iter.get_line()\n break\n\n # take closest one to current position\n if after_line_number is not None and before_line_number is None:\n return after_line_number, after_line_number - line_number\n elif before_line_number is not None and after_line_number is None:\n return before_line_number, line_number - before_line_number\n elif after_line_number is not None and before_line_number is not None:\n after_distance = after_line_number - line_number\n before_distance = line_number - before_line_number\n if after_distance < before_distance:\n return after_line_number, after_distance\n else:\n return before_line_number, before_distance\n else:\n return None, None", "response": "Find the line number next occurrence of a string with respect to the cursor position."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsets a global variable in the local variable cache", "response": "def set_variable(self, key, value, per_reference=False, access_key=None, data_type=None):\n \"\"\"Sets a global variable\n\n :param key: the key of the global variable to be set\n :param value: the new value of the global variable\n :param per_reference: a flag to decide if the variable should be stored per reference or per value\n :param access_key: if the variable was explicitly locked with the rafcon.state lock_variable\n :raises exceptions.RuntimeError: if a wrong access key is passed\n \"\"\"\n key = str(key) # Ensure that we have the same string type for all keys (under Python2 and 3!)\n if self.variable_exist(key):\n if data_type is None:\n data_type = self.__global_variable_type_dictionary[key]\n else:\n if data_type is None:\n data_type = type(None)\n assert isinstance(data_type, type)\n self.check_value_and_type(value, data_type)\n\n with self.__global_lock:\n unlock = True\n if self.variable_exist(key):\n if self.is_locked(key) and self.__access_keys[key] != access_key:\n raise RuntimeError(\"Wrong access key for accessing global variable\")\n elif self.is_locked(key):\n unlock = False\n else:\n access_key = self.lock_variable(key, block=True)\n else:\n self.__variable_locks[key] = Lock()\n access_key = self.lock_variable(key, block=True)\n\n # --- variable locked\n if per_reference:\n self.__global_variable_dictionary[key] = value\n self.__global_variable_type_dictionary[key] = data_type\n self.__variable_references[key] = True\n else:\n self.__global_variable_dictionary[key] = copy.deepcopy(value)\n self.__global_variable_type_dictionary[key] = data_type\n self.__variable_references[key] = False\n # --- release variable\n\n if unlock:\n self.unlock_variable(key, access_key)\n\n logger.debug(\"Global variable '{}' was set to value '{}' with type '{}'\".format(key, value, data_type.__name__))"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_variable(self, key, per_reference=None, access_key=None, default=None):\n key = str(key)\n if self.variable_exist(key):\n unlock = True\n if self.is_locked(key):\n if self.__access_keys[key] == access_key:\n unlock = False\n else:\n if not access_key:\n access_key = self.lock_variable(key, block=True)\n else:\n raise RuntimeError(\"Wrong access key for accessing global variable\")\n else:\n access_key = self.lock_variable(key, block=True)\n\n # --- variable locked\n if self.variable_can_be_referenced(key):\n if per_reference or per_reference is None:\n return_value = self.__global_variable_dictionary[key]\n else:\n return_value = copy.deepcopy(self.__global_variable_dictionary[key])\n else:\n if per_reference:\n self.unlock_variable(key, access_key)\n raise RuntimeError(\"Variable cannot be accessed by reference\")\n else:\n return_value = copy.deepcopy(self.__global_variable_dictionary[key])\n # --- release variable\n\n if unlock:\n self.unlock_variable(key, access_key)\n return return_value\n else:\n # logger.warning(\"Global variable '{0}' not existing, returning default value\".format(key))\n return default", "response": "Fetches the value of a global variable in the local variable cache."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nchecks whether the value of the variable can be returned by reference by reference", "response": "def variable_can_be_referenced(self, key):\n \"\"\"Checks whether the value of the variable can be returned by reference\n\n :param str key: Name of the variable\n :return: True if value of variable can be returned by reference, False else\n \"\"\"\n key = str(key)\n return key in self.__variable_references and self.__variable_references[key]"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef delete_variable(self, key):\n key = str(key)\n if self.is_locked(key):\n raise RuntimeError(\"Global variable is locked\")\n\n with self.__global_lock:\n if key in self.__global_variable_dictionary:\n access_key = self.lock_variable(key, block=True)\n del self.__global_variable_dictionary[key]\n self.unlock_variable(key, access_key)\n del self.__variable_locks[key]\n del self.__variable_references[key]\n else:\n raise AttributeError(\"Global variable %s does not exist!\" % str(key))\n\n logger.debug(\"Global variable %s was deleted!\" % str(key))", "response": "Deletes a global variable in the local variable cache."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nlocking a global variable in blocking mode", "response": "def lock_variable(self, key, block=False):\n \"\"\"Locks a global variable\n\n :param key: the key of the global variable to be locked\n :param block: a flag to specify if to wait for locking the variable in blocking mode\n \"\"\"\n key = str(key)\n # watch out for releasing the __dictionary_lock properly\n try:\n if key in self.__variable_locks:\n # acquire without arguments is blocking\n lock_successful = self.__variable_locks[key].acquire(False)\n if lock_successful or block:\n if (not lock_successful) and block: # case: lock could not be acquired => wait for it as block=True\n duration = 0.\n loop_time = 0.1\n while not self.__variable_locks[key].acquire(False):\n time.sleep(loop_time)\n duration += loop_time\n if int(duration*10) % 20 == 0:\n # while loops informs the user about long locked variables\n logger.verbose(\"Variable '{2}' is locked and thread {0} waits already {1} seconds to \"\n \"access it.\".format(currentThread(), duration, key))\n access_key = global_variable_id_generator()\n self.__access_keys[key] = access_key\n return access_key\n else:\n logger.warning(\"Global variable {} already locked\".format(str(key)))\n return False\n else:\n logger.error(\"Global variable key {} does not exist\".format(str(key)))\n return False\n except Exception as e:\n logger.error(\"Exception thrown: {}\".format(str(e)))\n return False"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef unlock_variable(self, key, access_key, force=False):\n key = str(key)\n if self.__access_keys[key] == access_key or force:\n if key in self.__variable_locks:\n if self.is_locked(key):\n self.__variable_locks[key].release()\n return True\n else:\n logger.error(\"Global variable {} is not locked, thus cannot unlock it\".format(str(key)))\n return False\n else:\n raise AttributeError(\"Global variable %s does not exist!\" % str(key))\n else:\n raise RuntimeError(\"Wrong access key for accessing global variable\")", "response": "Unlocks a global variable with the given key."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef set_locked_variable(self, key, access_key, value):\n return self.set_variable(key, value, per_reference=False, access_key=access_key)", "response": "Set an already locked global variable"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the value of a global variable that is already locked by the given key", "response": "def get_locked_variable(self, key, access_key):\n \"\"\"Returns the value of an global variable that is already locked\n\n :param key: the key of the global variable\n :param access_key: the access_key to the global variable that is already locked\n \"\"\"\n return self.get_variable(key, per_reference=False, access_key=access_key)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef is_locked(self, key):\n key = str(key)\n if key in self.__variable_locks:\n return self.__variable_locks[key].locked()\n return False", "response": "Returns the status of the lock of a global variable"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_all_keys_starting_with(self, start_key):\n start_key = str(start_key)\n output_list = []\n if len(self.__global_variable_dictionary) == 0:\n return output_list\n for g_key in self.__global_variable_dictionary.keys():\n # string comparison\n if g_key and start_key in g_key:\n output_list.append(g_key)\n return output_list", "response": "Returns all keys which start with a certain pattern defined in start_key."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef global_variable_dictionary(self):\n dict_copy = {}\n for key, value in self.__global_variable_dictionary.items():\n if key in self.__variable_references and self.__variable_references[key]:\n dict_copy[key] = value\n else:\n dict_copy[key] = copy.deepcopy(value)\n\n return dict_copy", "response": "Property for the _global_variable_dictionary field"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nchecks if a given value is of a specific type", "response": "def check_value_and_type(value, data_type):\n \"\"\"Checks if a given value is of a specific type\n\n :param value: the value to check\n :param data_type: the type to be checked upon\n :return:\n \"\"\"\n if value is not None and data_type is not type(None):\n # if not isinstance(value, data_type):\n if not type_inherits_of_type(data_type, type(value)):\n raise TypeError(\n \"Value: '{0}' is not of data type: '{1}', value type: {2}\".format(value, data_type, type(value)))"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef glue(self, pos):\n # Distance between border of rectangle and point\n # Equation from http://stackoverflow.com/a/18157551/3568069\n dx = max(self.point.x - self.width / 2. - pos[0], 0, pos[0] - (self.point.x + self.width / 2.))\n dy = max(self.point.y - self.height / 2. - pos[1], 0, pos[1] - (self.point.y + self.height / 2.))\n dist = sqrt(dx*dx + dy*dy)\n return self.point, dist", "response": "Calculates the distance between the given position and the port\n "} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef format_time(time):\n h, r = divmod(time / 1000, 3600)\n m, s = divmod(r, 60)\n\n return \"%02d:%02d:%02d\" % (h, m, s)", "response": "Formats the given time into HHMMSS format."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef __destroy(self):\n if self.parent:\n self.parent.remove_controller(self)\n else:\n self.destroy()", "response": "Remove controller from parent controller and destroy it self."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef register_view(self, view):\n super(PreferencesWindowController, self).register_view(view)\n self.view['add_library_button'].connect('clicked', self._on_add_library)\n self.view[\"remove_library_button\"].connect('clicked', self._on_remove_library)\n\n self.view['config_tree_view'].connect(\"button_press_event\", self._on_row_clicked_trigger_toggle_of_boolean,\n self.core_config_model, self.core_list_store)\n self.view['gui_tree_view'].connect(\"button_press_event\", self._on_row_clicked_trigger_toggle_of_boolean,\n self.gui_config_model, self.gui_list_store)\n\n self.view['core_config_value_renderer'].set_property('editable', True)\n self.view['core_config_value_renderer'].connect('edited', self._on_config_value_changed, self.core_config_model,\n self.core_list_store)\n\n self.view['gui_config_value_renderer'].set_property('editable', True)\n self.view['gui_config_value_renderer'].connect('edited', self._on_config_value_changed, self.gui_config_model,\n self.gui_list_store)\n\n self.view['shortcut_config_value_renderer'].set_property('editable', True)\n self.view['shortcut_config_value_renderer'].connect('edited', self._on_shortcut_changed)\n\n self.view['library_config_key_renderer'].set_property('editable', True)\n self.view['library_config_key_renderer'].connect('edited', self._on_library_name_changed)\n\n self.view['library_config_value_renderer'].set_property('editable', True)\n self.view['library_config_value_renderer'].connect('edited', self._on_library_path_changed)\n\n self.view['config_tree_view'].set_model(self.core_list_store)\n self.view['library_tree_view'].set_model(self.library_list_store)\n self.view['gui_tree_view'].set_model(self.gui_list_store)\n self.view['shortcut_tree_view'].set_model(self.shortcut_list_store)\n\n self.view['apply_button'].connect(\"clicked\", self._on_apply_button_clicked)\n self.view['ok_button'].connect('clicked', self._on_ok_button_clicked)\n self.view['cancel_button'].connect('clicked', self._on_cancel_button_clicked)\n\n self.view['import_button'].connect(\"clicked\", self._on_import_config)\n self.view['export_button'].connect('clicked', self._on_export_config)\n\n self.view['preferences_window'].connect('delete_event', self._on_delete_event)\n\n self.update_all()", "response": "Called when the View was registered"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nhandling changes in config values", "response": "def _handle_config_update(self, config_m, config_key):\n \"\"\"Handles changes in config values\n\n The method ensure that the correct list stores are updated with the new values.\n\n\n :param ConfigModel config_m: The config model that has been changed\n :param config_key: The config key who's value has been changed\n :return:\n \"\"\"\n if config_key == \"LIBRARY_PATHS\":\n self.update_libraries_list_store()\n if config_key == \"SHORTCUTS\":\n self.update_shortcut_settings()\n else:\n self.update_config_value(config_m, config_key)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef update_all(self):\n self.update_path_labels()\n self.update_core_config_list_store()\n self.update_gui_config_list_store()\n self.update_libraries_list_store()\n self.update_shortcut_settings()\n self.check_for_preliminary_config()", "response": "Shorthand method to update all collection information"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef check_for_preliminary_config(self):\n if any([self.model.preliminary_config, self.gui_config_model.preliminary_config]):\n self.view['apply_button'].set_sensitive(True)\n else:\n self.view['apply_button'].set_sensitive(False)", "response": "Activates the Apply button if there are preliminary changes\n "} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nupdates labels showing config paths", "response": "def update_path_labels(self):\n \"\"\"Update labels showing config paths\n \"\"\"\n self.view['core_label'].set_text(\"Core Config Path: \" + str(self.core_config_model.config.config_file_path))\n self.view['gui_label'].set_text(\"GUI Config Path: \" + str(self.gui_config_model.config.config_file_path))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef update_config_value(self, config_m, config_key):\n config_value = config_m.get_current_config_value(config_key)\n if config_m is self.core_config_model:\n list_store = self.core_list_store\n elif config_m is self.gui_config_model:\n list_store = self.gui_list_store\n else:\n return\n self._update_list_store_entry(list_store, config_key, config_value)", "response": "Updates the corresponding list store entry for a changed config value"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _update_list_store_entry(self, list_store, config_key, config_value):\n for row_num, row in enumerate(list_store):\n if row[self.KEY_STORAGE_ID] == config_key:\n row[self.VALUE_STORAGE_ID] = str(config_value)\n row[self.TOGGLE_VALUE_STORAGE_ID] = config_value\n return row_num", "response": "Helper method to update a list store entry"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _update_list_store(config_m, list_store, ignore_keys=None):\n ignore_keys = [] if ignore_keys is None else ignore_keys\n list_store.clear()\n for config_key in sorted(config_m.config.keys):\n if config_key in ignore_keys:\n continue\n config_value = config_m.get_current_config_value(config_key)\n # (config_key, text, text_visible, toggle_activatable, toggle_visible, text_editable, toggle_state)\n if isinstance(config_value, bool):\n list_store.append((str(config_key), str(config_value), False, True, True, False, config_value))\n else:\n list_store.append((str(config_key), str(config_value), True, False, False, True, config_value))", "response": "Generic method to update list store for a given config model"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef update_libraries_list_store(self):\n self.library_list_store.clear()\n libraries = self.core_config_model.get_current_config_value(\"LIBRARY_PATHS\", use_preliminary=True, default={})\n library_names = sorted(libraries.keys())\n for library_name in library_names:\n library_path = libraries[library_name]\n self.library_list_store.append((library_name, library_path))", "response": "Updates the list store for the libraries that are currently loaded."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nupdates the list store for the shortcuts", "response": "def update_shortcut_settings(self):\n \"\"\"Creates the list store for the shortcuts\n \"\"\"\n self.shortcut_list_store.clear()\n shortcuts = self.gui_config_model.get_current_config_value(\"SHORTCUTS\", use_preliminary=True, default={})\n actions = sorted(shortcuts.keys())\n for action in actions:\n keys = shortcuts[action]\n self.shortcut_list_store.append((str(action), str(keys)))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _select_row_by_column_value(tree_view, list_store, column, value):\n for row_num, iter_elem in enumerate(list_store):\n if iter_elem[column] == value:\n tree_view.set_cursor(row_num)\n return row_num", "response": "Helper method to select a tree view row by column value"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _on_add_library(self, *event):\n self.view['library_tree_view'].grab_focus()\n if react_to_event(self.view, self.view['library_tree_view'], event):\n temp_library_name = \"\" % self._lib_counter\n self._lib_counter += 1\n library_config = self.core_config_model.get_current_config_value(\"LIBRARY_PATHS\", use_preliminary=True,\n default={})\n library_config[temp_library_name] = \"\"\n self.core_config_model.set_preliminary_config_value(\"LIBRARY_PATHS\", library_config)\n self._select_row_by_column_value(self.view['library_tree_view'], self.library_list_store,\n self.KEY_STORAGE_ID, temp_library_name)\n return True", "response": "Callback method handling the addition of a new library"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _on_remove_library(self, *event):\n self.view['library_tree_view'].grab_focus()\n if react_to_event(self.view, self.view['library_tree_view'], event):\n path = self.view[\"library_tree_view\"].get_cursor()[0]\n if path is not None:\n library_name = self.library_list_store[int(path[0])][0]\n library_config = self.core_config_model.get_current_config_value(\"LIBRARY_PATHS\", use_preliminary=True,\n default={})\n del library_config[library_name]\n self.core_config_model.set_preliminary_config_value(\"LIBRARY_PATHS\", library_config)\n if len(self.library_list_store) > 0:\n self.view['library_tree_view'].set_cursor(min(path[0], len(self.library_list_store) - 1))\n return True", "response": "Callback method handling the removal of an existing library"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _on_checkbox_toggled(self, renderer, path, config_m, config_list_store):\n config_key = config_list_store[int(path)][self.KEY_STORAGE_ID]\n config_value = bool(config_list_store[int(path)][self.TOGGLE_VALUE_STORAGE_ID])\n config_value ^= True\n config_m.set_preliminary_config_value(config_key, config_value)", "response": "Callback method that is called when a checkbox is toggled"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\napplies the configurations and closes the window", "response": "def _on_ok_button_clicked(self, *args):\n \"\"\"OK button clicked: Applies the configurations and closes the window\n \"\"\"\n if self.core_config_model.preliminary_config or self.gui_config_model.preliminary_config:\n self._on_apply_button_clicked()\n self.__destroy()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ndismiss preliminary config and close the window", "response": "def _on_cancel_button_clicked(self, *args):\n \"\"\"Cancel button clicked: Dismiss preliminary config and close the window\n \"\"\"\n self.core_config_model.preliminary_config.clear()\n self.gui_config_model.preliminary_config.clear()\n self.__destroy()"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _on_apply_button_clicked(self, *args):\n refresh_required = self.core_config_model.apply_preliminary_config()\n refresh_required |= self.gui_config_model.apply_preliminary_config()\n\n if not self.gui_config_model.config.get_config_value(\"SESSION_RESTORE_ENABLED\"):\n import rafcon.gui.backup.session as backup_session\n logger.info(\"Removing current session\")\n backup_session.reset_session()\n if refresh_required:\n from rafcon.gui.singleton import main_window_controller\n main_window_controller.get_controller('menu_bar_controller').on_refresh_all_activate(None, None)\n self._popup_message()", "response": "Apply button clicked: Apply the configuration"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _on_shortcut_changed(self, renderer, path, new_shortcuts):\n action = self.shortcut_list_store[int(path)][self.KEY_STORAGE_ID]\n old_shortcuts = self.gui_config_model.get_current_config_value(\"SHORTCUTS\", use_preliminary=True)[action]\n\n from ast import literal_eval\n try:\n new_shortcuts = literal_eval(new_shortcuts)\n if not isinstance(new_shortcuts, list) and \\\n not all([isinstance(shortcut, string_types) for shortcut in new_shortcuts]):\n raise ValueError()\n except (ValueError, SyntaxError):\n logger.warning(\"Shortcuts must be a list of strings\")\n new_shortcuts = old_shortcuts\n\n shortcuts = self.gui_config_model.get_current_config_value(\"SHORTCUTS\", use_preliminary=True, default={})\n shortcuts[action] = new_shortcuts\n self.gui_config_model.set_preliminary_config_value(\"SHORTCUTS\", shortcuts)\n self._select_row_by_column_value(self.view['shortcut_tree_view'], self.shortcut_list_store,\n self.KEY_STORAGE_ID, action)", "response": "Callback for when a new entry in the list store has changed"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _on_config_value_changed(self, renderer, path, new_value, config_m, list_store):\n config_key = list_store[int(path)][self.KEY_STORAGE_ID]\n old_value = config_m.get_current_config_value(config_key, use_preliminary=True)\n\n if old_value == new_value:\n return\n\n # Try to maintain the correct data type, which is extracted from the old value\n if isinstance(old_value, bool):\n if new_value in [\"True\", \"true\"]:\n new_value = True\n elif new_value in [\"False\", \"false\"]:\n new_value = False\n else:\n logger.warning(\"'{}' must be a boolean value\".format(config_key))\n new_value = old_value\n elif isinstance(old_value, (int, float)):\n try:\n new_value = int(new_value)\n except ValueError:\n try:\n new_value = float(new_value)\n except ValueError:\n logger.warning(\"'{}' must be a numeric value\".format(config_key))\n new_value = old_value\n\n config_m.set_preliminary_config_value(config_key, new_value)", "response": "Callback for when a config value is changed"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _config_chooser_dialog(self, title_text, description):\n dialog = Gtk.Dialog(title_text, self.view[\"preferences_window\"],\n flags=0, buttons=\n (Gtk.STOCK_CANCEL, Gtk.ResponseType.REJECT,\n Gtk.STOCK_OK, Gtk.ResponseType.ACCEPT))\n label = Gtk.Label(label=description)\n label.set_padding(xpad=10, ypad=10)\n dialog.vbox.pack_start(label, True, True, 0)\n label.show()\n self._gui_checkbox = Gtk.CheckButton(label=\"GUI Config\")\n dialog.vbox.pack_start(self._gui_checkbox, True, True, 0)\n self._gui_checkbox.show()\n self._core_checkbox = Gtk.CheckButton(label=\"Core Config\")\n self._core_checkbox.show()\n dialog.vbox.pack_start(self._core_checkbox, True, True, 0)\n response = dialog.run()\n dialog.destroy()\n return response", "response": "Function that opens a dialog to select which config shall be exported"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nchecking if an object is inside the values of a dict.", "response": "def check_if_dict_contains_object_reference_in_values(object_to_check, dict_to_check):\n \"\"\" Method to check if an object is inside the values of a dict.\n A simple object_to_check in dict_to_check.values() does not work as it uses the __eq__ function of the object\n and not the object reference.\n\n :param object_to_check: The target object.\n :param dict_to_check: The dict to search in.\n :return:\n \"\"\"\n for key, value in dict_to_check.items():\n if object_to_check is value:\n return True\n return False"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef update_data_flows(model, data_flow_dict, tree_dict_combos):\n data_flow_dict['internal'] = {}\n data_flow_dict['external'] = {}\n tree_dict_combos['internal'] = {}\n tree_dict_combos['external'] = {}\n\n # free input ports and scopes are real to_keys and real states\n [free_to_port_internal, from_ports_internal] = find_free_keys(model)\n [free_to_port_external, from_ports_external] = find_free_keys(model.parent)\n\n def take_from_dict(from_dict, key):\n if key in from_dict:\n return from_dict[key]\n else:\n logger.warning(\"Key '%s' is not in %s\" % (key, from_dict))\n pass\n\n # from_state, to_key, to_state, to_key, external\n if isinstance(model, ContainerStateModel):\n for data_flow in list(model.state.data_flows.values()): # model.data_flow_list_store:\n\n # TREE STORE LABEL\n # check if from Self_state\n if data_flow.from_state == model.state.state_id:\n from_state = model.state\n from_state_label = 'self.' + model.state.name + '.' + data_flow.from_state\n else:\n if take_from_dict(model.state.states, data_flow.from_state):\n from_state = take_from_dict(model.state.states, data_flow.from_state)\n from_state_label = from_state.name + '.' + data_flow.from_state\n else:\n # print(data_flow.from_state, data_flow.from_key, data_flow.to_state, data_flow.to_key)\n logger.warning(\"DO break in ctrl/data_flow.py -1\")\n break\n # check if to Self_state\n if data_flow.to_state == model.state.state_id:\n to_state = model.state\n to_state_label = 'self.' + model.state.name + '.' + data_flow.to_state\n else:\n if take_from_dict(model.state.states, data_flow.to_state):\n to_state = take_from_dict(model.state.states, data_flow.to_state)\n to_state_label = to_state.name + '.' + data_flow.to_state\n else:\n # print(data_flow.from_state, data_flow.from_key, data_flow.to_state, data_flow.to_key)\n logger.warning(\"DO break in ctrl/data_flow.py 0\")\n break\n\n from_key_port = from_state.get_data_port_by_id(data_flow.from_key)\n from_key_label = ''\n if from_key_port is not None:\n from_key_label = PORT_TYPE_TAG.get(type(from_key_port), 'None') + '.' + \\\n from_key_port.data_type.__name__ + '.' + \\\n from_key_port.name\n\n to_key_port = to_state.get_data_port_by_id(data_flow.to_key)\n # to_key_label = ''\n if to_key_port is not None:\n to_key_label = PORT_TYPE_TAG.get(type(to_key_port), 'None') + '.' + \\\n (to_key_port.data_type.__name__ or 'None') + '.' + \\\n to_key_port.name\n data_flow_dict['internal'][data_flow.data_flow_id] = {'from_state': from_state_label,\n 'from_key': from_key_label,\n 'to_state': to_state_label,\n 'to_key': to_key_label}\n\n # ALL INTERNAL COMBOS\n from_states_store = Gtk.ListStore(GObject.TYPE_STRING)\n to_states_store = Gtk.ListStore(GObject.TYPE_STRING)\n if isinstance(model, ContainerStateModel):\n if model.state.state_id in free_to_port_internal or model.state.state_id == data_flow.to_state:\n to_states_store.append(['self.' + model.state.name + '.' + model.state.state_id])\n if model.state.state_id in from_ports_internal or model.state.state_id == data_flow.from_state:\n from_states_store.append(['self.' + model.state.name + '.' + model.state.state_id])\n for state_model in list(model.states.values()):\n if state_model.state.state_id in free_to_port_internal or \\\n state_model.state.state_id == data_flow.to_state:\n to_states_store.append([state_model.state.name + '.' + state_model.state.state_id])\n if state_model.state.state_id in from_ports_internal or \\\n state_model.state.state_id == data_flow.from_state:\n from_states_store.append([state_model.state.name + '.' + state_model.state.state_id])\n\n from_keys_store = Gtk.ListStore(GObject.TYPE_STRING)\n if model.state.state_id == data_flow.from_state:\n # print(\"input_ports\", model.state.input_data_ports)\n # print(type(model))\n if isinstance(model, ContainerStateModel):\n # print(\"scoped_variables\", model.state.scoped_variables)\n combined_ports = {}\n combined_ports.update(model.state.scoped_variables)\n combined_ports.update(model.state.input_data_ports)\n get_key_combos(combined_ports, from_keys_store, data_flow.from_key)\n else:\n get_key_combos(model.state.input_data_ports, from_keys_store, data_flow.from_key)\n else:\n # print(\"output_ports\", model.states[data_flow.from_state].state.output_data_ports)\n get_key_combos(model.state.states[data_flow.from_state].output_data_ports,\n from_keys_store, data_flow.from_key)\n\n to_keys_store = Gtk.ListStore(GObject.TYPE_STRING)\n if model.state.state_id == data_flow.to_state:\n # print(\"output_ports\", model.state.output_data_ports)\n # print(type(model))\n if isinstance(model, ContainerStateModel):\n # print(\"scoped_variables\", model.state.scoped_variables)\n combined_ports = {}\n combined_ports.update(model.state.scoped_variables)\n combined_ports.update(model.state.output_data_ports)\n get_key_combos(combined_ports, to_keys_store, data_flow.to_key)\n else:\n get_key_combos(model.state.output_data_ports, to_keys_store, data_flow.to_key)\n else:\n # print(\"input_ports\", model.states[data_flow.to_state].state.input_data_ports)\n get_key_combos(model.state.states[data_flow.to_state].input_data_ports\n , to_keys_store, data_flow.to_key)\n tree_dict_combos['internal'][data_flow.data_flow_id] = {'from_state': from_states_store,\n 'from_key': from_keys_store,\n 'to_state': to_states_store,\n 'to_key': to_keys_store}\n # print(\"internal\", data_flow_dict['internal'][data_flow.data_flow_id])\n\n if not model.state.is_root_state:\n for data_flow in list(model.parent.state.data_flows.values()): # model.parent.data_flow_list_store:\n\n # TREE STORE LABEL\n # check if from Self_state\n if model.state.state_id == data_flow.from_state:\n from_state = model.state\n from_state_label = 'self.' + model.state.name + '.' + data_flow.from_state\n else:\n if model.parent.state.state_id == data_flow.from_state:\n from_state = model.parent.state\n from_state_label = 'parent.' + model.parent.state.name + '.' + data_flow.from_state\n else:\n if take_from_dict(model.parent.state.states, data_flow.from_state):\n from_state = take_from_dict(model.parent.state.states, data_flow.from_state)\n from_state_label = from_state.name + '.' + data_flow.from_state\n else:\n # print(\"#\", data_flow.from_state, data_flow.from_key, data_flow.to_state, data_flow.to_key)\n logger.warning(\"DO break in ctrl/data_flow.py 1\")\n break\n\n # check if to Self_state\n if model.state.state_id == data_flow.to_state:\n to_state = model.state\n to_state_label = 'self.' + model.state.name + '.' + data_flow.to_state\n else:\n if model.parent.state.state_id == data_flow.to_state:\n to_state = model.parent.state\n to_state_label = 'parent.' + model.parent.state.name + '.' + data_flow.to_state\n else:\n if take_from_dict(model.parent.state.states, data_flow.to_state):\n to_state = take_from_dict(model.parent.state.states, data_flow.to_state)\n to_state_label = to_state.name + '.' + data_flow.to_state\n else:\n # print(\"##\", data_flow.from_state, data_flow.from_key, data_flow.to_state, data_flow.to_key)\n logger.warning(\"DO break in ctrl/data_flow.py 2\")\n break\n if model.state.state_id in [data_flow.from_state, data_flow.to_state]:\n from_key_port = from_state.get_data_port_by_id(data_flow.from_key)\n if from_key_port is None:\n continue\n from_key_label = PORT_TYPE_TAG.get(type(from_key_port), 'None') + '.' + \\\n from_key_port.data_type.__name__ + '.' + \\\n from_key_port.name\n to_key_port = to_state.get_data_port_by_id(data_flow.to_key)\n if to_key_port is None:\n continue\n to_key_label = PORT_TYPE_TAG.get(type(to_key_port), 'None') + '.' + \\\n to_key_port.data_type.__name__ + '.' + \\\n to_key_port.name\n data_flow_dict['external'][data_flow.data_flow_id] = {'from_state': from_state_label,\n 'from_key': from_key_label,\n 'to_state': to_state_label,\n 'to_key': to_key_label}\n\n # ALL EXTERNAL COMBOS\n if model.state.state_id in [data_flow.from_state, data_flow.to_state]:\n\n # only self-state\n from_states_store = Gtk.ListStore(GObject.TYPE_STRING)\n for state_id in list(from_ports_external.keys()):\n if model.parent.state.state_id == state_id:\n state_model = model.parent\n else:\n state_model = model.parent.states[state_id]\n if state_model.state.state_id == model.state.state_id:\n from_states_store.append(['self.' + state_model.state.name + '.' + state_model.state.state_id])\n else:\n from_states_store.append([state_model.state.name + '.' + state_model.state.state_id])\n # from_states_store.append(['self.' + model.state.name + '.' + model.state.state_id])\n\n # only outports of self\n from_keys_store = Gtk.ListStore(GObject.TYPE_STRING)\n if model.parent.state.state_id == data_flow.from_state:\n # print(\"output_ports\", model.parent.states[data_flow.from_state].state.output_data_ports)\n combined_ports = {}\n combined_ports.update(model.parent.state.input_data_ports)\n combined_ports.update(model.parent.state.scoped_variables)\n get_key_combos(combined_ports, from_keys_store, data_flow.to_key)\n elif data_flow.from_state in [state_m.state.state_id for state_m in list(model.parent.states.values())]:\n get_key_combos(model.parent.state.states[data_flow.from_state].output_data_ports,\n from_keys_store, data_flow.to_key)\n else:\n logger.error(\n \"---------------- FAILURE %s ------------- external from_state PARENT or STATES\" % model.state.state_id)\n\n # all states and parent-state\n to_states_store = Gtk.ListStore(GObject.TYPE_STRING)\n for state_id in list(free_to_port_external.keys()):\n if model.parent.state.state_id == state_id:\n state_model = model.parent\n else:\n state_model = model.parent.states[state_id]\n if state_model.state.state_id == model.state.state_id:\n to_states_store.append(['self.' + state_model.state.name + '.' + state_model.state.state_id])\n else:\n to_states_store.append([state_model.state.name + '.' + state_model.state.state_id])\n\n # all keys of actual to-state\n to_keys_store = Gtk.ListStore(GObject.TYPE_STRING)\n if get_state_model(model.parent, data_flow.to_state):\n to_state_model = get_state_model(model.parent, data_flow.to_state)\n from_state_model = get_state_model(model.parent, data_flow.to_state)\n act_from_key_port = from_state_model.state.get_data_port_by_id(data_flow.from_key)\n act_to_key_port = to_state_model.state.get_data_port_by_id(data_flow.to_key)\n\n # first actual port\n to_keys_store.append([PORT_TYPE_TAG.get(type(act_to_key_port), 'None') + '.#' +\n str(act_to_key_port.data_port_id) + '.' +\n act_to_key_port.data_type.__name__ + '.' +\n act_to_key_port.name])\n\n # second all other possible ones\n if to_state_model.state is model.state.parent:\n possible_port_ids = list(to_state_model.state.output_data_ports.keys()) + list(to_state_model.state.scoped_variables.keys())\n else:\n possible_port_ids = list(to_state_model.state.input_data_ports.keys())\n for port_id in possible_port_ids:\n port = to_state_model.state.get_data_port_by_id(port_id)\n # to_state = get_state_model(model.parent, data_flow.to_state).state\n if not (PORT_TYPE_TAG.get(type(act_from_key_port), 'None') == 'SV' and port.data_port_id == data_flow.from_key)\\\n and port is not act_to_key_port:\n to_keys_store.append([PORT_TYPE_TAG.get(type(port), 'None') + '.#' +\n str(port.data_port_id) + '.' +\n port.data_type.__name__ + '.' +\n port.name])\n\n tree_dict_combos['external'][data_flow.data_flow_id] = {'from_state': from_states_store,\n 'from_key': from_keys_store,\n 'to_state': to_states_store,\n 'to_key': to_keys_store}\n # print(\"external\", data_flow_dict['external'][data_flow.data_flow_id])\n\n # print(\"ALL SCANNED: \", data_flow_dict['internal'].keys(), data_flow_dict['external'].keys(), \\)\n # tree_dict_combos['internal'].keys(), tree_dict_combos['external'].keys()\n return free_to_port_internal, free_to_port_external, from_ports_internal, from_ports_external", "response": "Updates the data flow dictionary and combo dictionary of the widget according handed model."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef register_view(self, view):\n super(StateDataFlowsListController, self).register_view(view)\n\n def cell_text(column, cell_renderer, model, iter, container_model):\n df_id = model.get_value(iter, self.ID_STORAGE_ID)\n in_external = 'external' if model.get_value(iter, self.IS_EXTERNAL_STORAGE_ID) else 'internal'\n\n if column.get_title() == 'Source State':\n cell_renderer.set_property(\"model\", self.tree_dict_combos[in_external][df_id]['from_state'])\n cell_renderer.set_property(\"text-column\", 0)\n cell_renderer.set_property(\"has-entry\", False)\n elif column.get_title() == 'Source Port':\n cell_renderer.set_property(\"model\", self.tree_dict_combos[in_external][df_id]['from_key'])\n cell_renderer.set_property(\"text-column\", 0)\n cell_renderer.set_property(\"has-entry\", False)\n elif column.get_title() == 'Target State':\n cell_renderer.set_property(\"model\", self.tree_dict_combos[in_external][df_id]['to_state'])\n cell_renderer.set_property(\"text-column\", 0)\n cell_renderer.set_property(\"has-entry\", False)\n elif column.get_title() == 'Target Port':\n cell_renderer.set_property(\"model\", self.tree_dict_combos[in_external][df_id]['to_key'])\n cell_renderer.set_property(\"text-column\", 0)\n cell_renderer.set_property(\"has-entry\", False)\n else:\n logger.warning(\"Column has no cell_data_func %s %s\" % (column.get_name(), column.get_title()))\n\n view['from_state_col'].set_cell_data_func(view['from_state_combo'], cell_text, self.model)\n view['to_state_col'].set_cell_data_func(view['to_state_combo'], cell_text, self.model)\n view['from_key_col'].set_cell_data_func(view['from_key_combo'], cell_text, self.model)\n view['to_key_col'].set_cell_data_func(view['to_key_combo'], cell_text, self.model)\n\n if self.model.state.get_next_upper_library_root_state():\n view['from_state_combo'].set_property(\"editable\", False)\n view['from_key_combo'].set_property(\"editable\", False)\n view['to_state_combo'].set_property(\"editable\", False)\n view['to_key_combo'].set_property(\"editable\", False)\n else:\n self.connect_signal(view['from_state_combo'], \"edited\", self.on_combo_changed_from_state)\n self.connect_signal(view['from_key_combo'], \"edited\", self.on_combo_changed_from_key)\n self.connect_signal(view['to_state_combo'], \"edited\", self.on_combo_changed_to_state)\n self.connect_signal(view['to_key_combo'], \"edited\", self.on_combo_changed_to_key)\n\n self.tree_view.connect(\"grab-focus\", self.on_focus)\n self.update(initiator='\"register view\"')", "response": "Called when the View was registered"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nremove respective core element of handed data flow model", "response": "def remove_core_element(self, model):\n \"\"\"Remove respective core element of handed data flow model\n\n :param DataFlowModel model: Data Flow model which core element should be removed\n :return:\n \"\"\"\n assert model.data_flow.parent is self.model.state or model.data_flow.parent is self.model.parent.state\n gui_helper_state_machine.delete_core_element_of_model(model)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncalling when the view was registered", "response": "def register_view(self, view):\n \"\"\"Called when the View was registered\n\n Can be used e.g. to connect signals. Here, the destroy signal is connected to close the application\n \"\"\"\n super(StateDataFlowsEditorController, self).register_view(view)\n view['add_d_button'].connect('clicked', self.df_list_ctrl.on_add)\n view['remove_d_button'].connect('clicked', self.df_list_ctrl.on_remove)\n view['connected_to_d_checkbutton'].connect('toggled', self.toggled_button, 'data_flows_external')\n view['internal_d_checkbutton'].connect('toggled', self.toggled_button, 'data_flows_internal')\n\n if isinstance(self.model.state, LibraryState):\n view['internal_d_checkbutton'].set_sensitive(False)\n view['internal_d_checkbutton'].set_active(False)\n\n if self.model.parent is not None and isinstance(self.model.parent.state, LibraryState) or \\\n self.model.state.get_next_upper_library_root_state():\n view['add_d_button'].set_sensitive(False)\n view['remove_d_button'].set_sensitive(False)\n\n if self.model.state.is_root_state:\n self.df_list_ctrl.view_dict['data_flows_external'] = False\n view['connected_to_d_checkbutton'].set_active(False)\n\n if not isinstance(self.model, ContainerStateModel):\n self.df_list_ctrl.view_dict['data_flows_internal'] = False\n view['internal_d_checkbutton'].set_active(False)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nstarts the execution of the root state.", "response": "def start(self):\n \"\"\"Starts the execution of the root state.\n \"\"\"\n # load default input data for the state\n self._root_state.input_data = self._root_state.get_default_input_values_for_state(self._root_state)\n self._root_state.output_data = self._root_state.create_output_dictionary_for_state(self._root_state)\n new_execution_history = self._add_new_execution_history()\n new_execution_history.push_state_machine_start_history_item(self, run_id_generator())\n self._root_state.start(new_execution_history)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef join(self):\n self._root_state.join()\n # execution finished, close execution history log file (if present)\n if len(self._execution_histories) > 0:\n if self._execution_histories[-1].execution_history_storage is not None:\n set_read_and_writable_for_all = global_config.get_config_value(\"EXECUTION_LOG_SET_READ_AND_WRITABLE_FOR_ALL\", False)\n self._execution_histories[-1].execution_history_storage.close(set_read_and_writable_for_all)\n from rafcon.core.states.state import StateExecutionStatus\n self._root_state.state_execution_status = StateExecutionStatus.INACTIVE", "response": "Wait for root state to finish execution"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_port_area(self, view):\n state_v = self.parent\n center = self.handle.pos\n margin = self.port_side_size / 4.\n if self.side in [SnappedSide.LEFT, SnappedSide.RIGHT]:\n height, width = self.port_size\n else:\n width, height = self.port_size\n upper_left = center[0] - width / 2 - margin, center[1] - height / 2 - margin\n lower_right = center[0] + width / 2 + margin, center[1] + height / 2 + margin\n port_upper_left = view.get_matrix_i2v(state_v).transform_point(*upper_left)\n port_lower_right = view.get_matrix_i2v(state_v).transform_point(*lower_right)\n size = port_lower_right[0] - port_upper_left[0], port_lower_right[1] - port_upper_left[1]\n return port_upper_left[0], port_upper_left[1], size[0], size[1]", "response": "Calculates the drawing area affected by the port."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ndraw the simple state port of a simple state.", "response": "def _draw_simple_state_port(self, context, direction, color, transparency):\n \"\"\"Draw the port of a simple state (ExecutionState, LibraryState)\n\n Connector for execution states can only be connected to the outside. Thus the connector fills the whole\n border of the state.\n\n :param context: Cairo context\n :param direction: The direction the port is pointing to\n :param color: Desired color of the port\n :param transparency: The level of transparency\n \"\"\"\n c = context\n\n width, height = self.port_size\n c.set_line_width(self.port_side_size * 0.03 * self._port_image_cache.multiplicator)\n\n # Save/restore context, as we move and rotate the connector to the desired pose\n c.save()\n c.rel_move_to(self.port_side_size / 2., self.port_side_size / 2.)\n PortView._rotate_context(c, direction)\n PortView._draw_single_connector(c, width, height)\n c.restore()\n\n # Colorize the generated connector path\n if self.connected:\n c.set_source_rgba(*gap_draw_helper.get_col_rgba(color, transparency))\n else:\n c.set_source_rgb(*gui_config.gtk_colors['PORT_UNCONNECTED'].to_floats())\n c.fill_preserve()\n c.set_source_rgba(*gap_draw_helper.get_col_rgba(color, transparency))\n c.stroke()"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _draw_container_state_port(self, context, direction, color, transparency):\n c = context\n\n width, height = self.port_size\n c.set_line_width(self.port_side_size / constants.BORDER_WIDTH_OUTLINE_WIDTH_FACTOR *\n self._port_image_cache.multiplicator)\n\n # Save/restore context, as we move and rotate the connector to the desired pose\n cur_point = c.get_current_point()\n c.save()\n c.rel_move_to(self.port_side_size / 2., self.port_side_size / 2.)\n PortView._rotate_context(c, direction)\n PortView._draw_inner_connector(c, width, height)\n c.restore()\n\n if self.connected_incoming:\n c.set_source_rgba(*gap_draw_helper.get_col_rgba(color, transparency))\n else:\n c.set_source_rgb(*gui_config.gtk_colors['PORT_UNCONNECTED'].to_floats())\n c.fill_preserve()\n c.set_source_rgba(*gap_draw_helper.get_col_rgba(color, transparency))\n c.stroke()\n\n c.move_to(*cur_point)\n c.save()\n c.rel_move_to(self.port_side_size / 2., self.port_side_size / 2.)\n PortView._rotate_context(c, direction)\n PortView._draw_outer_connector(c, width, height)\n c.restore()\n\n if self.connected_outgoing:\n c.set_source_rgba(*gap_draw_helper.get_col_rgba(color, transparency))\n else:\n c.set_source_rgb(*gui_config.gtk_colors['PORT_UNCONNECTED'].to_floats())\n c.fill_preserve()\n c.set_source_rgba(*gap_draw_helper.get_col_rgba(color, transparency))\n c.stroke()", "response": "Draw the port of a container state."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ndraw the connector for execution states in a single context.", "response": "def _draw_single_connector(context, width, height):\n \"\"\"Draw the connector for execution states\n\n Connector for execution states can only be connected to the outside. Thus the connector fills the whole\n border of the state.\n\n :param context: Cairo context\n :param float port_size: The side length of the port\n \"\"\"\n c = context\n # Current pos is center\n # Arrow is drawn upright\n\n arrow_height = height / 2.0\n\n # First move to bottom left corner\n c.rel_move_to(-width / 2., height / 2.)\n # Draw line to bottom right corner\n c.rel_line_to(width, 0)\n # Draw line to upper right corner\n c.rel_line_to(0, -(height - arrow_height))\n # Draw line to center top (arrow)\n c.rel_line_to(-width / 2., -arrow_height)\n # Draw line to upper left corner\n c.rel_line_to(-width / 2., arrow_height)\n # Draw line back to the origin (lower left corner)\n c.close_path()"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ndraw the connector for container states", "response": "def _draw_inner_connector(context, width, height):\n \"\"\"Draw the connector for container states\n\n Connector for container states can be connected from the inside and the outside. Thus the connector is split\n in two parts: A rectangle on the inside and an arrow on the outside. This methods draws the inner rectangle.\n\n :param context: Cairo context\n :param float port_size: The side length of the port\n \"\"\"\n c = context\n # Current pos is center\n # Arrow is drawn upright\n\n gap = height / 6.\n connector_height = (height - gap) / 2.\n\n # First move to bottom left corner\n c.rel_move_to(-width / 2., height / 2.)\n\n # Draw inner connector (rectangle)\n c.rel_line_to(width, 0)\n c.rel_line_to(0, -connector_height)\n c.rel_line_to(-width, 0)\n c.close_path()"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ndrawing the outer connector for container states.", "response": "def _draw_outer_connector(context, width, height):\n \"\"\"Draw the outer connector for container states\n\n Connector for container states can be connected from the inside and the outside. Thus the connector is split\n in two parts: A rectangle on the inside and an arrow on the outside. This method draws the outer arrow.\n\n :param context: Cairo context\n :param float port_size: The side length of the port\n \"\"\"\n c = context\n # Current pos is center\n # Arrow is drawn upright\n\n arrow_height = height / 2.5\n gap = height / 6.\n connector_height = (height - gap) / 2.\n\n # Move to bottom left corner of outer connector\n c.rel_move_to(-width / 2., -gap / 2.)\n\n # Draw line to bottom right corner\n c.rel_line_to(width, 0)\n # Draw line to upper right corner\n c.rel_line_to(0, -(connector_height - arrow_height))\n # Draw line to center top (arrow)\n c.rel_line_to(-width / 2., -arrow_height)\n # Draw line to upper left corner\n c.rel_line_to(-width / 2., arrow_height)\n # Draw line back to the origin (lower left corner)\n c.close_path()"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _draw_rectangle(context, width, height):\n c = context\n # First move to upper left corner\n c.rel_move_to(-width / 2., -height / 2.)\n # Draw closed rectangle\n c.rel_line_to(width, 0)\n c.rel_line_to(0, height)\n c.rel_line_to(-width, 0)\n c.close_path()", "response": "Draw a rectangle of the current size"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nmoves the current position to position and rotates the context according to direction", "response": "def _rotate_context(context, direction):\n \"\"\"Moves the current position to 'position' and rotates the context according to 'direction'\n\n :param context: Cairo context\n :param direction: Direction enum\n \"\"\"\n if direction is Direction.UP:\n pass\n elif direction is Direction.RIGHT:\n context.rotate(deg2rad(90))\n elif direction is Direction.DOWN:\n context.rotate(deg2rad(180))\n elif direction is Direction.LEFT:\n context.rotate(deg2rad(-90))"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef draw_name(self, context, transparency, only_calculate_size=False):\n c = context\n cairo_context = c\n if isinstance(c, CairoBoundingBoxContext):\n cairo_context = c._cairo\n # c.set_antialias(Antialias.GOOD)\n\n side_length = self.port_side_size\n\n layout = PangoCairo.create_layout(cairo_context)\n font_name = constants.INTERFACE_FONT\n font_size = gap_draw_helper.FONT_SIZE\n font = FontDescription(font_name + \" \" + str(font_size))\n layout.set_font_description(font)\n layout.set_text(self.name, -1)\n\n ink_extents, logical_extents = layout.get_extents()\n extents = [extent / float(SCALE) for extent in [logical_extents.x, logical_extents.y,\n logical_extents.width, logical_extents.height]]\n real_name_size = extents[2], extents[3]\n desired_height = side_length * 0.75\n scale_factor = real_name_size[1] / desired_height\n\n # Determine the size of the text, increase the width to have more margin left and right of the text\n margin = side_length / 4.\n name_size = real_name_size[0] / scale_factor, desired_height\n name_size_with_margin = name_size[0] + margin * 2, name_size[1] + margin * 2\n\n # Only the size is required, stop here\n if only_calculate_size:\n return name_size_with_margin\n\n # Current position is the center of the port rectangle\n c.save()\n if self.side is SnappedSide.RIGHT or self.side is SnappedSide.LEFT:\n c.rotate(deg2rad(-90))\n c.rel_move_to(-name_size[0] / 2, -name_size[1] / 2)\n c.scale(1. / scale_factor, 1. / scale_factor)\n c.rel_move_to(-extents[0], -extents[1])\n\n c.set_source_rgba(*gap_draw_helper.get_col_rgba(self.text_color, transparency))\n PangoCairo.update_layout(cairo_context, layout)\n PangoCairo.show_layout(cairo_context, layout)\n c.restore()\n\n return name_size_with_margin", "response": "Draws the name of the port."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ndraws the rectangle path for the port.", "response": "def _draw_rectangle_path(self, context, width, height, only_get_extents=False):\n \"\"\"Draws the rectangle path for the port\n\n The rectangle is correctly rotated. Height therefore refers to the border thickness and width to the length\n of the port.\n\n :param context: The context to draw on\n :param float width: The width of the rectangle\n :param float height: The height of the rectangle\n \"\"\"\n c = context\n\n # Current position is the center of the rectangle\n c.save()\n if self.side is SnappedSide.LEFT or self.side is SnappedSide.RIGHT:\n c.rotate(deg2rad(90))\n c.rel_move_to(-width / 2., - height / 2.)\n c.rel_line_to(width, 0)\n c.rel_line_to(0, height)\n c.rel_line_to(-width, 0)\n c.close_path()\n c.restore()\n\n if only_get_extents:\n extents = c.path_extents()\n c.new_path()\n return extents"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _get_port_center_position(self, width):\n x, y = self.pos.x.value, self.pos.y.value\n if self.side is SnappedSide.TOP or self.side is SnappedSide.BOTTOM:\n if x - width / 2. < 0:\n x = width / 2\n elif x + width / 2. > self.parent.width:\n x = self.parent.width - width / 2.\n else:\n if y - width / 2. < 0:\n y = width / 2\n elif y + width / 2. > self.parent.height:\n y = self.parent.height - width / 2.\n return x, y", "response": "Calculates the center position of the port rectangle."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef register_actions(self, shortcut_manager):\n shortcut_manager.add_callback_for_action('rename', self.rename_selected_state)\n super(StatesEditorController, self).register_actions(shortcut_manager)", "response": "Register callback methods for triggered actions\n "} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the state model of the currently open tab", "response": "def get_current_state_m(self):\n \"\"\"Returns the state model of the currently open tab\"\"\"\n page_id = self.view.notebook.get_current_page()\n if page_id == -1:\n return None\n page = self.view.notebook.get_nth_page(page_id)\n state_identifier = self.get_state_identifier_for_page(page)\n return self.tabs[state_identifier]['state_m']"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef state_machine_manager_notification(self, model, property, info):\n if self.current_state_machine_m is not None:\n selection = self.current_state_machine_m.selection\n if len(selection.states) > 0:\n self.activate_state_tab(selection.get_selected_state())", "response": "Triggered whenever a new state machine is created or selected."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef clean_up_tabs(self):\n tabs_to_close = []\n for state_identifier, tab_dict in list(self.tabs.items()):\n if tab_dict['sm_id'] not in self.model.state_machine_manager.state_machines:\n tabs_to_close.append(state_identifier)\n for state_identifier, tab_dict in list(self.closed_tabs.items()):\n if tab_dict['sm_id'] not in self.model.state_machine_manager.state_machines:\n tabs_to_close.append(state_identifier)\n for state_identifier in tabs_to_close:\n self.close_page(state_identifier, delete=True)", "response": "Method remove state - tabs for those no state machine exists anymore."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nobserves all open state machines and their root states", "response": "def state_machines_set_notification(self, model, prop_name, info):\n \"\"\"Observe all open state machines and their root states\n \"\"\"\n if info['method_name'] == '__setitem__':\n state_machine_m = info.args[1]\n self.observe_model(state_machine_m)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nrelives models of closed state machine", "response": "def state_machines_del_notification(self, model, prop_name, info):\n \"\"\"Relive models of closed state machine\n \"\"\"\n if info['method_name'] == '__delitem__':\n state_machine_m = info[\"result\"]\n try:\n self.relieve_model(state_machine_m)\n except KeyError:\n pass\n self.clean_up_tabs()"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_state_editor(self, state_m):\n state_identifier = self.get_state_identifier(state_m)\n\n if state_identifier in self.closed_tabs:\n state_editor_ctrl = self.closed_tabs[state_identifier]['controller']\n state_editor_view = state_editor_ctrl.view\n handler_id = self.closed_tabs[state_identifier]['source_code_changed_handler_id']\n source_code_view_is_dirty = self.closed_tabs[state_identifier]['source_code_view_is_dirty']\n del self.closed_tabs[state_identifier] # pages not in self.closed_tabs and self.tabs at the same time\n else:\n state_editor_view = StateEditorView()\n if isinstance(state_m, LibraryStateModel):\n state_editor_view['main_notebook_1'].set_current_page(\n state_editor_view['main_notebook_1'].page_num(state_editor_view.page_dict[\"Data Linkage\"]))\n state_editor_ctrl = StateEditorController(state_m, state_editor_view)\n self.add_controller(state_identifier, state_editor_ctrl)\n if state_editor_ctrl.get_controller('source_ctrl') and state_m.state.get_next_upper_library_root_state() is None:\n # observe changed to set the mark dirty flag\n handler_id = state_editor_view.source_view.get_buffer().connect('changed', self.script_text_changed,\n state_m)\n self.view.get_top_widget().connect('draw', state_editor_view.source_view.on_draw)\n else:\n handler_id = None\n source_code_view_is_dirty = False\n\n (tab, inner_label, sticky_button) = create_tab_header('', self.on_tab_close_clicked,\n self.on_toggle_sticky_clicked, state_m)\n set_tab_label_texts(inner_label, state_m, source_code_view_is_dirty)\n\n state_editor_view.get_top_widget().title_label = inner_label\n state_editor_view.get_top_widget().sticky_button = sticky_button\n\n page_content = state_editor_view.get_top_widget()\n page_id = self.view.notebook.prepend_page(page_content, tab)\n page = self.view.notebook.get_nth_page(page_id)\n self.view.notebook.set_tab_reorderable(page, True)\n page.show_all()\n\n self.view.notebook.show()\n self.tabs[state_identifier] = {'page': page, 'state_m': state_m,\n 'controller': state_editor_ctrl, 'sm_id': self.model.selected_state_machine_id,\n 'is_sticky': False,\n 'source_code_view_is_dirty': source_code_view_is_dirty,\n 'source_code_changed_handler_id': handler_id}\n return page_id", "response": "Triggered whenever a state is selected."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nupdate the gui elements according text buffer changes", "response": "def script_text_changed(self, text_buffer, state_m):\n \"\"\" Update gui elements according text buffer changes\n\n Checks if the dirty flag needs to be set and the tab label to be updated.\n\n :param TextBuffer text_buffer: Text buffer of the edited script\n :param rafcon.gui.models.state.StateModel state_m: The state model related to the text buffer\n :return:\n \"\"\"\n\n state_identifier = self.get_state_identifier(state_m)\n if state_identifier in self.tabs:\n tab_list = self.tabs\n elif state_identifier in self.closed_tabs:\n tab_list = self.closed_tabs\n else:\n logger.warning('It was tried to check a source script of a state with no state-editor')\n return\n if tab_list[state_identifier]['controller'].get_controller('source_ctrl') is None:\n logger.warning('It was tried to check a source script of a state with no source-editor')\n return\n current_text = tab_list[state_identifier]['controller'].get_controller('source_ctrl').view.get_text()\n old_is_dirty = tab_list[state_identifier]['source_code_view_is_dirty']\n source_script_state_m = state_m.state_copy if isinstance(state_m, LibraryStateModel) else state_m\n # remove next two lines and tab is also set dirty for source scripts inside of a LibraryState (maybe in future)\n if isinstance(state_m, LibraryStateModel) or state_m.state.get_next_upper_library_root_state() is not None:\n return\n if source_script_state_m.state.script_text == current_text:\n tab_list[state_identifier]['source_code_view_is_dirty'] = False\n else:\n tab_list[state_identifier]['source_code_view_is_dirty'] = True\n if old_is_dirty is not tab_list[state_identifier]['source_code_view_is_dirty']:\n self.update_tab_label(source_script_state_m)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ndestroying the desired page and disconnects the page from signals and removes interconnection to parent - controller or observables.", "response": "def destroy_page(self, tab_dict):\n \"\"\" Destroys desired page\n\n Disconnects the page from signals and removes interconnection to parent-controller or observables.\n\n :param tab_dict: Tab-dictionary that holds all necessary information of a page and state-editor.\n \"\"\"\n # logger.info(\"destroy page %s\" % tab_dict['controller'].model.state.get_path())\n if tab_dict['source_code_changed_handler_id'] is not None:\n handler_id = tab_dict['source_code_changed_handler_id']\n if tab_dict['controller'].view.source_view.get_buffer().handler_is_connected(handler_id):\n tab_dict['controller'].view.source_view.get_buffer().disconnect(handler_id)\n else:\n logger.warning(\"Source code changed handler of state {0} was already removed.\".format(tab_dict['state_m']))\n self.remove_controller(tab_dict['controller'])"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nclose the desired page with the specified state_identifier.", "response": "def close_page(self, state_identifier, delete=True):\n \"\"\"Closes the desired page\n\n The page belonging to the state with the specified state_identifier is closed. If the deletion flag is set to\n False, the controller of the page is stored for later usage.\n\n :param state_identifier: Identifier of the page's state\n :param delete: Whether to delete the controller (deletion is necessary if teh state is deleted)\n \"\"\"\n # delete old controller references\n if delete and state_identifier in self.closed_tabs:\n self.destroy_page(self.closed_tabs[state_identifier])\n del self.closed_tabs[state_identifier]\n\n # check for open page of state\n if state_identifier in self.tabs:\n page_to_close = self.tabs[state_identifier]['page']\n current_page_id = self.view.notebook.page_num(page_to_close)\n if not delete:\n self.closed_tabs[state_identifier] = self.tabs[state_identifier]\n else:\n self.destroy_page(self.tabs[state_identifier])\n del self.tabs[state_identifier]\n # Finally remove the page, this triggers the callback handles on_switch_page\n self.view.notebook.remove_page(current_page_id)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the identifier and page of a given state model", "response": "def find_page_of_state_m(self, state_m):\n \"\"\"Return the identifier and page of a given state model\n\n :param state_m: The state model to be searched\n :return: page containing the state and the state_identifier\n \"\"\"\n for state_identifier, page_info in list(self.tabs.items()):\n if page_info['state_m'] is state_m:\n return page_info['page'], state_identifier\n return None, None"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef on_tab_close_clicked(self, event, state_m):\n [page, state_identifier] = self.find_page_of_state_m(state_m)\n if page:\n self.close_page(state_identifier, delete=False)", "response": "Triggered when the states - editor close button is clicked\n Closes the tab."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef close_all_pages(self):\n states_to_be_closed = []\n for state_identifier in self.tabs:\n states_to_be_closed.append(state_identifier)\n for state_identifier in states_to_be_closed:\n self.close_page(state_identifier, delete=False)", "response": "Closes all tabs of the states editor"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nclose all tabs of the states editor for a specific sm_id", "response": "def close_pages_for_specific_sm_id(self, sm_id):\n \"\"\"Closes all tabs of the states editor for a specific sm_id\"\"\"\n states_to_be_closed = []\n for state_identifier in self.tabs:\n state_m = self.tabs[state_identifier][\"state_m\"]\n if state_m.state.get_state_machine().state_machine_id == sm_id:\n states_to_be_closed.append(state_identifier)\n for state_identifier in states_to_be_closed:\n self.close_page(state_identifier, delete=False)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nupdates state selection when the active tab was changed", "response": "def on_switch_page(self, notebook, page_pointer, page_num, user_param1=None):\n \"\"\"Update state selection when the active tab was changed\n \"\"\"\n page = notebook.get_nth_page(page_num)\n\n # find state of selected tab\n for tab_info in list(self.tabs.values()):\n if tab_info['page'] is page:\n state_m = tab_info['state_m']\n sm_id = state_m.state.get_state_machine().state_machine_id\n selected_state_m = self.current_state_machine_m.selection.get_selected_state()\n\n # If the state of the selected tab is not in the selection, set it there\n if selected_state_m is not state_m and sm_id in self.model.state_machine_manager.state_machines:\n self.model.selected_state_machine_id = sm_id\n self.current_state_machine_m.selection.set(state_m)\n return"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nopens the tab for the given state model.", "response": "def activate_state_tab(self, state_m):\n \"\"\"Opens the tab for the specified state model\n\n The tab with the given state model is opened or set to foreground.\n\n :param state_m: The desired state model (the selected state)\n \"\"\"\n # The current shown state differs from the desired one\n current_state_m = self.get_current_state_m()\n if current_state_m is not state_m:\n state_identifier = self.get_state_identifier(state_m)\n\n # The desired state is not open, yet\n if state_identifier not in self.tabs:\n # add tab for desired state\n page_id = self.add_state_editor(state_m)\n self.view.notebook.set_current_page(page_id)\n # bring tab for desired state into foreground\n else:\n page = self.tabs[state_identifier]['page']\n page_id = self.view.notebook.page_num(page)\n self.view.notebook.set_current_page(page_id)\n\n self.keep_only_sticked_and_selected_tabs()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef keep_only_sticked_and_selected_tabs(self):\n # Only if the user didn't deactivate this behaviour\n if not global_gui_config.get_config_value('KEEP_ONLY_STICKY_STATES_OPEN', True):\n return\n\n page_id = self.view.notebook.get_current_page()\n # No tabs are open\n if page_id == -1:\n return\n\n page = self.view.notebook.get_nth_page(page_id)\n current_state_identifier = self.get_state_identifier_for_page(page)\n\n states_to_be_closed = []\n # Iterate over all tabs\n for state_identifier, tab_info in list(self.tabs.items()):\n # If the tab is currently open, keep it open\n if current_state_identifier == state_identifier:\n continue\n # If the tab is sticky, keep it open\n if tab_info['is_sticky']:\n continue\n # Otherwise close it\n states_to_be_closed.append(state_identifier)\n\n for state_identifier in states_to_be_closed:\n self.close_page(state_identifier, delete=False)", "response": "Keep only sticked and selected tabs"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nchecking whether the name of a state was changed and change the tab label accordingly.", "response": "def notify_state_name_change(self, model, prop_name, info):\n \"\"\"Checks whether the name of a state was changed and change the tab label accordingly\n \"\"\"\n # avoid updates or checks because of execution status updates\n if is_execution_status_update_notification_from_state_machine_model(prop_name, info):\n return\n\n overview = NotificationOverview(info, False, self.__class__.__name__)\n changed_model = overview['model'][-1]\n method_name = overview['method_name'][-1]\n if isinstance(changed_model, AbstractStateModel) and method_name in ['name', 'script_text']:\n self.update_tab_label(changed_model)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef update_tab_label(self, state_m):\n state_identifier = self.get_state_identifier(state_m)\n if state_identifier not in self.tabs and state_identifier not in self.closed_tabs:\n return\n tab_info = self.tabs[state_identifier] if state_identifier in self.tabs else self.closed_tabs[state_identifier]\n page = tab_info['page']\n set_tab_label_texts(page.title_label, state_m, tab_info['source_code_view_is_dirty'])", "response": "Update all tab labels that are currently open for the given state_m."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the state identifier for a given page", "response": "def get_state_identifier_for_page(self, page):\n \"\"\"Return the state identifier for a given page\n \"\"\"\n for identifier, page_info in list(self.tabs.items()):\n if page_info[\"page\"] is page: # reference comparison on purpose\n return identifier"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef rename_selected_state(self, key_value, modifier_mask):\n selection = self.current_state_machine_m.selection\n if len(selection.states) == 1 and len(selection) == 1:\n selected_state = selection.get_selected_state()\n self.activate_state_tab(selected_state)\n _, state_identifier = self.find_page_of_state_m(selected_state)\n state_controller = self.tabs[state_identifier]['controller']\n state_controller.rename()", "response": "Callback method for shortcut action rename_selected_state"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef generate_semantic_data_key(used_semantic_keys):\n semantic_data_id_counter = -1\n while True:\n semantic_data_id_counter += 1\n if \"semantic data key \" + str(semantic_data_id_counter) not in used_semantic_keys:\n break\n return \"semantic data key \" + str(semantic_data_id_counter)", "response": "Generate a new and unique semantic data key."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ngenerate a new state id for a state.", "response": "def state_id_generator(size=STATE_ID_LENGTH, chars=string.ascii_uppercase, used_state_ids=None):\n \"\"\" Create a new and unique state id\n\n Generates an id for a state. It randomly samples from random ascii uppercase letters size times\n and concatenates them. If the id already exists it draws a new one.\n\n :param size: the length of the generated keys\n :param chars: the set of characters a sample draws from\n :param list used_state_ids: Handed list of ids already in use\n :rtype: str\n :return: new_state_id\n \"\"\"\n new_state_id = ''.join(random.choice(chars) for x in range(size))\n while used_state_ids is not None and new_state_id in used_state_ids:\n new_state_id = ''.join(random.choice(chars) for x in range(size))\n return new_state_id"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngenerating a new id for a global variable.", "response": "def global_variable_id_generator(size=10, chars=string.ascii_uppercase):\n \"\"\" Create a new and unique global variable id\n\n Generates an id for a global variable. It randomly samples from random ascii uppercase letters size times\n and concatenates them. If the id already exists it draws a new one.\n\n :param size: the length of the generated keys\n :param chars: the set of characters a sample draws from\n \"\"\"\n new_global_variable_id = ''.join(random.choice(chars) for x in range(size))\n while new_global_variable_id in used_global_variable_ids:\n new_global_variable_id = ''.join(random.choice(chars) for x in range(size))\n used_global_variable_ids.append(new_global_variable_id)\n return new_global_variable_id"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef set(self):\n glColor4f(self.r, self.g, self.b, self.a)", "response": "Set the color as current OpenGL color"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nconfiguring viewport This method is called when the widget is resized or a redraw is triggered.", "response": "def _configure(self, *args):\n \"\"\"Configure viewport\n\n This method is called when the widget is resized or something triggers a redraw. The method configures the\n view to show all elements in an orthogonal perspective.\n \"\"\"\n # Obtain a reference to the OpenGL drawable\n # and rendering context.\n gldrawable = self.get_gl_drawable()\n glcontext = self.get_gl_context()\n\n # logger.debug(\"configure\")\n # OpenGL begin\n if not gldrawable or not gldrawable.gl_begin(glcontext):\n return False\n\n # Draw on the full viewport\n glViewport(0, 0, self.get_allocation().width, self.get_allocation().height)\n\n glMatrixMode(GL_PROJECTION)\n glLoadIdentity()\n\n # Orthogonal view with correct aspect ratio\n self._apply_orthogonal_view()\n\n glMatrixMode(GL_MODELVIEW)\n glLoadIdentity()\n\n # OpenGL end\n gldrawable.gl_end()\n\n return False"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncalculate the ratio between pixel and OpenGL s view coordinates.", "response": "def pixel_to_size_ratio(self):\n \"\"\"Calculates the ratio between pixel and OpenGL distances\n\n OpenGL keeps its own coordinate system. This method can be used to transform between pixel and OpenGL\n coordinates.\n\n :return: pixel/size ratio\n \"\"\"\n left, right, _, _ = self.get_view_coordinates()\n width = right - left\n display_width = self.get_allocation().width\n return display_width / float(width)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nexposing the drawing routine to the current rendering context.", "response": "def expose_init(self, *args):\n \"\"\"Process the drawing routine\n \"\"\"\n # Obtain a reference to the OpenGL drawable\n # and rendering context.\n gldrawable = self.get_gl_drawable()\n glcontext = self.get_gl_context()\n\n # OpenGL begin\n if not gldrawable or not gldrawable.gl_begin(glcontext):\n return False\n\n # logger.debug(\"expose_init\")\n\n # Reset buffers\n glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)\n\n # Prepare name stack\n glInitNames()\n glPushName(0)\n self.name_counter = 1\n\n return False"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ndraws a state with the given properties This method is called by the controller to draw the specified (container) state. :param name: Name of the state :param pos: position (x, y) of the state :param size: size (width, height) of the state :param outcomes: outcomes of the state (list with outcome objects) :param input_ports_m: input ports of the state :param output_ports_m: output ports of the state :param selected: whether to display the state as selected :param active: in which color to display the state :param depth: The z layer :return: The OpenGL id and the positions of teh outcomes (as dictionary with outcome id as key)", "response": "def draw_state(self, name, pos, size, outcomes=None, input_ports_m=None, output_ports_m=None, selected=False,\n active=False, depth=0):\n \"\"\"Draw a state with the given properties\n\n This method is called by the controller to draw the specified (container) state.\n\n :param name: Name of the state\n :param pos: position (x, y) of the state\n :param size: size (width, height) of the state\n :param outcomes: outcomes of the state (list with outcome objects)\n :param input_ports_m: input ports of the state\n :param output_ports_m: output ports of the state\n :param selected: whether to display the state as selected\n :param active: in which color to display the state\n :param depth: The z layer\n :return: The OpenGL id and the positions of teh outcomes (as dictionary with outcome id as key)\n \"\"\"\n if not outcomes:\n outcomes = []\n if not input_ports_m:\n input_ports_m = []\n if not output_ports_m:\n output_ports_m = []\n # \"Generate\" unique ID for each object\n opengl_id = self.name_counter\n self.name_counter += 1\n glPushName(opengl_id)\n self._set_closest_stroke_width(1.5)\n\n width = size[0]\n height = size[1]\n\n # First draw the face of the rectangular, then the outline\n fill_color = self.state_color\n border_color = self.border_color\n\n from rafcon.core.states.state import StateExecutionStatus\n border_width = min(size) / 10.\n if active is StateExecutionStatus.ACTIVE:\n fill_color = self.state_active_color\n elif active is StateExecutionStatus.EXECUTE_CHILDREN:\n fill_color = self.state_child_active_color\n elif active is StateExecutionStatus.WAIT_FOR_NEXT_STATE:\n fill_color = self.state_waiting_for_next_state_color\n\n if selected:\n border_width *= 2\n border_color = self.border_selected_color\n\n self._draw_rect(pos[0], pos[0] + width, pos[1] - height, pos[1], depth, border_width,\n fill_color, border_color)\n\n # Put the name of the state in the upper left corner of the state\n margin = min(size) / 12.\n font_size = min(size) / 8.\n name = self._shorten_string(name, font_size, width - 2 * margin)\n self._write_string(name, pos[0] + margin, pos[1] - margin, font_size, self.state_name_color, True,\n False, depth=depth + 0.01)\n\n resize_length = min(width, height) / 8.\n p1 = (pos[0] + width, pos[1] - height)\n p2 = (p1[0] - resize_length, p1[1])\n p3 = (p1[0], p1[1] + resize_length)\n self._draw_triangle(p1, p2, p3, depth + 0.01, border_width, fill_color, border_color)\n\n # Draw outcomes as circle on the right side of the state\n # Every state has at least the default outcomes \"aborted\" and \"preempted\"\n num_outcomes = max(0, len(outcomes))\n if num_outcomes < 2:\n # logger.warning(\"Expecting at least 2 outcomes, found {num:d}\".format(num=num_outcomes))\n pass\n else:\n num_outcomes -= 2\n i = 0\n outcome_pos = {}\n outcome_radius = min(min(height, width) / 23., min(height, width) / (2. * num_outcomes + 3))\n for key in outcomes:\n # Color of outcome is defined by its type, \"aborted\", \"preempted\" or else\n outcome_name = outcomes[key].name\n color = self.outcome_plain_color\n\n # Distribute outcomes (as circles) on the right edge of the state\n outcome_x = pos[0] + width\n outcome_y = pos[1] - height / (num_outcomes + 1) * (i + 1)\n\n if outcome_name in [\"aborted\", \"preempted\"]:\n step = min(size) / 10.\n outcome_y = pos[1]\n if outcome_name == \"aborted\":\n outcome_x -= step\n color = self.outcome_aborted_color\n else:\n outcome_x -= 3 * step\n color = self.outcome_preempted_color\n else:\n outcome_font_size = font_size * 0.5\n max_outcome_name_width = width / 2.\n outcome_name = self._shorten_string(outcome_name, outcome_font_size, max_outcome_name_width)\n outcome_name_pos_x = outcome_x - margin\n outcome_name_pos_y = outcome_y + outcome_font_size * 0.65\n self._write_string(outcome_name, outcome_name_pos_x, outcome_name_pos_y, outcome_font_size,\n self.state_name_color, align_right=True, depth=depth+0.1)\n i += 1\n\n color.set()\n outcome_pos[key] = (outcome_x, outcome_y)\n\n self._draw_circle(outcome_x, outcome_y, outcome_radius, depth + 0.1, fill_color=color)\n\n # Draw \"income\" as a half circle of the left center\n income_pos = (pos[0], pos[1] - height / 2)\n self._draw_circle(income_pos[0], income_pos[1], outcome_radius, depth + 0.1, fill_color=self.income_color,\n from_angle=1.5 * pi, to_angle=0.5 * pi)\n\n # Draw input and output data ports\n port_radius = margin / 4.\n num_ports = len(input_ports_m) + len(output_ports_m)\n if num_ports > 0:\n max_name_width = 0\n margin = min(size) / 10.0\n max_allowed_name_width = 0.7 * width - margin\n str_height = height / 12.0\n\n # Determine the maximum width of all port labels\n for ports in [input_ports_m, output_ports_m]:\n for port_m in ports:\n str_width = self._string_width(port_m.data_port.name, str_height)\n if str_width > max_name_width:\n max_name_width = str_width\n\n #fill_color = self.port_color if not selected else self.state_selected_color\n\n port_width = min(max_name_width, max_allowed_name_width)\n port_pos_left_x = pos[0] + (width - port_width - margin) / 2\n port_pos_right_x = port_pos_left_x + port_width + margin\n port_pos_bottom_y = pos[1] - height - num_ports * (str_height + margin)\n self._draw_rect(port_pos_left_x, port_pos_right_x, port_pos_bottom_y, pos[1] - height, depth, border_width,\n fill_color, border_color)\n\n def draw_port(port_m, num, is_input):\n\n port_name = self._shorten_string(port_m.data_port.name, str_height, port_width)\n\n string_pos_x = port_pos_left_x + margin / 2.\n if not is_input:\n string_pos_x += port_width\n string_pos_y = pos[1] - height - margin / 2. - num * (str_height + margin)\n self._write_string(port_name, string_pos_x, string_pos_y,\n str_height, self.port_name_color, False, not is_input, depth + 0.01)\n\n circle_pos_x = port_pos_left_x if is_input else port_pos_right_x\n circle_pos_y = string_pos_y - margin / 2.\n self._draw_circle(circle_pos_x, circle_pos_y, port_radius, depth + 0.02, stroke_width=margin / 5.,\n border_color=self.port_name_color, fill_color=self.port_connector_fill_color)\n return circle_pos_x, circle_pos_y\n\n output_num = 0\n for port_m in input_ports_m:\n con_pos_x, con_pos_y = draw_port(port_m, output_num, True)\n port_m.temp['gui']['editor']['outer_connector_radius'] = port_radius\n port_m.temp['gui']['editor']['outer_connector_pos'] = (con_pos_x, con_pos_y)\n output_num += 1\n\n for port_m in output_ports_m:\n con_pos_x, con_pos_y = draw_port(port_m, output_num, False)\n port_m.temp['gui']['editor']['outer_connector_radius'] = port_radius\n port_m.temp['gui']['editor']['outer_connector_pos'] = (con_pos_x, con_pos_y)\n output_num += 1\n\n glPopName()\n return opengl_id, income_pos, outcome_pos, outcome_radius, resize_length"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ndraw a state with the given properties This method is called by the controller to draw the specified transition. :param tuple from_pos: Starting position :param tuple to_pos: Ending position :param float width: A measure for the width of a transition line :param list waypoints: A list of optional waypoints to connect in between :param bool selected: Whether the transition shell be shown as active/selected :param float depth: The Z layer :return: The OpenGL id of the transition :rtype: int", "response": "def draw_transition(self, from_pos, to_pos, width, waypoints=None, selected=False, depth=0):\n \"\"\"Draw a state with the given properties\n\n This method is called by the controller to draw the specified transition.\n\n :param tuple from_pos: Starting position\n :param tuple to_pos: Ending position\n :param float width: A measure for the width of a transition line\n :param list waypoints: A list of optional waypoints to connect in between\n :param bool selected: Whether the transition shell be shown as active/selected\n :param float depth: The Z layer\n :return: The OpenGL id of the transition\n :rtype: int\n \"\"\"\n if not waypoints:\n waypoints = []\n # \"Generate\" unique ID for each object\n id = self.name_counter\n self.name_counter += 1\n\n glPushName(id)\n self._set_closest_stroke_width(width)\n\n color = self.transition_color if not selected else self.transition_selected_color\n color.set()\n\n points = [from_pos]\n points.extend(waypoints)\n\n last_p = to_pos # Transition endpoint\n sec_last_p = points[len(points) - 1] # Point before endpoint\n # Calculate max possible arrow length\n length = min(width, dist(sec_last_p, last_p) / 2.)\n mid, p2, p3 = self._calculate_arrow_points(last_p, sec_last_p, length)\n self._draw_triangle(last_p, p2, p3, depth, fill_color=color)\n\n points.append(mid)\n\n # Draw the transitions as simple straight line connecting start- way- and endpoints\n glBegin(GL_LINE_STRIP)\n for point in points:\n glVertex3f(point[0], point[1], depth)\n glEnd()\n\n self._set_closest_stroke_width(width / 1.5)\n for waypoint in waypoints:\n self._draw_circle(waypoint[0], waypoint[1], width / 6., depth + 1, fill_color=color)\n\n glPopName()\n\n return id"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _write_string(self, string, pos_x, pos_y, height, color, bold=False, align_right=False, depth=0.):\n stroke_width = height / 8.\n if bold:\n stroke_width = height / 5.\n color.set()\n self._set_closest_stroke_width(stroke_width)\n glMatrixMode(GL_MODELVIEW)\n glPushMatrix()\n pos_y -= height\n if not align_right:\n glTranslatef(pos_x, pos_y, depth)\n else:\n width = self._string_width(string, height)\n glTranslatef(pos_x - width, pos_y, depth)\n font_height = 119.5 # According to https://www.opengl.org/resources/libraries/glut/spec3/node78.html\n scale_factor = height / font_height\n glScalef(scale_factor, scale_factor, scale_factor)\n for c in string:\n # glTranslatef(0, 0, 0)\n glutStrokeCharacter(GLUT_STROKE_ROMAN, ord(c))\n # width = glutStrokeWidth(GLUT_STROKE_ROMAN, ord(c))\n\n glPopMatrix()", "response": "Write a string with a simple OpenGL method."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef prepare_selection(self, pos_x, pos_y, width, height):\n glSelectBuffer(self.name_counter * 6)\n viewport = glGetInteger(GL_VIEWPORT)\n\n glMatrixMode(GL_PROJECTION)\n glPushMatrix()\n\n glRenderMode(GL_SELECT)\n\n glLoadIdentity()\n\n if width < 1:\n width = 1\n if height < 1:\n height = 1\n pos_x += width / 2.\n pos_y += height / 2.\n # The system y axis is inverse to the OpenGL y axis\n gluPickMatrix(pos_x, viewport[3] - pos_y + viewport[1], width, height, viewport)\n\n self._apply_orthogonal_view()", "response": "Prepares the selection rendering for the object being clicked on."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef find_selection():\n hits = glRenderMode(GL_RENDER)\n\n glMatrixMode(GL_PROJECTION)\n glPopMatrix()\n glMatrixMode(GL_MODELVIEW)\n\n return hits", "response": "Finds the selected ids of the selected objects"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _set_closest_stroke_width(self, width):\n\n # Adapt line width to zooming level\n width *= self.pixel_to_size_ratio() / 6.\n stroke_width_range = glGetFloatv(GL_LINE_WIDTH_RANGE)\n stroke_width_granularity = glGetFloatv(GL_LINE_WIDTH_GRANULARITY)\n\n if width < stroke_width_range[0]:\n glLineWidth(stroke_width_range[0])\n return\n if width > stroke_width_range[1]:\n glLineWidth(stroke_width_range[1])\n return\n glLineWidth(round(width / stroke_width_granularity) * stroke_width_granularity)", "response": "Sets the line width to the closest supported one\n "} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _draw_circle(self, pos_x, pos_y, radius, depth, stroke_width=1., fill_color=None, border_color=None,\n from_angle=0., to_angle=2 * pi):\n \"\"\"Draws a circle\n\n Draws a circle with a line segment a desired position with desired size.\n\n :param float pos_x: Center x position\n :param float pos_y: Center y position\n :param float depth: The Z layer\n :param float radius: Radius of the circle\n \"\"\"\n\n visible = False\n # Check whether circle center is in the viewport\n if not self.point_outside_view((pos_x, pos_y)):\n visible = True\n # Check whether at least on point on the border of the circle is within the viewport\n if not visible:\n for i in range(0, 8):\n angle = 2 * pi / 8. * i\n x = pos_x + cos(angle) * radius\n y = pos_y + sin(angle) * radius\n if not self.point_outside_view((x, y)):\n visible = True\n break\n if not visible:\n return False\n\n angle_sum = to_angle - from_angle\n if angle_sum < 0:\n angle_sum = float(to_angle + 2 * pi - from_angle)\n segments = self.pixel_to_size_ratio() * radius * 1.5\n segments = max(4, segments)\n segments = int(round(segments * angle_sum / (2. * pi)))\n\n types = []\n if fill_color is not None:\n types.append(GL_POLYGON)\n if border_color is not None:\n types.append(GL_LINE_LOOP)\n\n for type in types:\n if type == GL_POLYGON:\n fill_color.set()\n else:\n self._set_closest_stroke_width(stroke_width)\n border_color.set()\n glBegin(type)\n angle = from_angle\n for i in range(0, segments):\n x = pos_x + cos(angle) * radius\n y = pos_y + sin(angle) * radius\n glVertex3f(x, y, depth)\n angle += angle_sum / (segments - 1)\n if angle > 2 * pi:\n angle -= 2 * pi\n if i == segments - 2:\n angle = to_angle\n glEnd()\n\n return True", "response": "Draws a circle with a desired position with desired size."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _apply_orthogonal_view(self):\n left, right, bottom, top = self.get_view_coordinates()\n glOrtho(left, right, bottom, top, -10, 0)", "response": "Apply orthogonal view to the current view"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncluster the time series with the same number of hours and cluster_weights.", "response": "def tsam_cluster(timeseries_df, typical_periods=10, how='daily'):\n \"\"\"\n Parameters\n ----------\n df : pd.DataFrame\n DataFrame with timeseries to cluster\n\n Returns\n -------\n timeseries : pd.DataFrame\n Clustered timeseries\n \"\"\"\n\n if how == 'daily':\n hours = 24\n if how == 'weekly':\n hours = 168\n\n aggregation = tsam.TimeSeriesAggregation(\n timeseries_df,\n noTypicalPeriods=typical_periods,\n rescaleClusterPeriods=False,\n hoursPerPeriod=hours,\n clusterMethod='hierarchical')\n\n timeseries = aggregation.createTypicalPeriods()\n cluster_weights = aggregation.clusterPeriodNoOccur\n\n # get the medoids/ the clusterCenterIndices\n clusterCenterIndices = aggregation.clusterCenterIndices\n\n # get all index for every hour of that day of the clusterCenterIndices\n start = []\n # get the first hour of the clusterCenterIndices (days start with 0)\n for i in clusterCenterIndices:\n start.append(i * hours)\n\n # get a list with all hours belonging to the clusterCenterIndices\n nrhours = []\n for j in start:\n nrhours.append(j)\n x = 1\n while x < hours:\n j = j + 1\n nrhours.append(j)\n x = x + 1\n\n # get the origial Datetimeindex\n dates = timeseries_df.iloc[nrhours].index\n\n return timeseries, cluster_weights, dates, hours"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nupdate the snapshots weights and dataframes based on the original data in the network and the medoids created by clustering .", "response": "def update_data_frames(network, cluster_weights, dates, hours):\n \"\"\" Updates the snapshots, snapshots weights and the dataframes based on\n the original data in the network and the medoids created by clustering\n these original data.\n\n Parameters\n -----------\n network : pyPSA network object\n cluster_weights: dictionary\n dates: Datetimeindex\n\n\n Returns\n -------\n network\n\n \"\"\"\n network.snapshot_weightings = network.snapshot_weightings.loc[dates]\n network.snapshots = network.snapshot_weightings.index\n\n # set new snapshot weights from cluster_weights\n snapshot_weightings = []\n for i in cluster_weights.values():\n x = 0\n while x < hours:\n snapshot_weightings.append(i)\n x += 1\n for i in range(len(network.snapshot_weightings)):\n network.snapshot_weightings[i] = snapshot_weightings[i]\n\n # put the snapshot in the right order\n network.snapshots.sort_values()\n network.snapshot_weightings.sort_index()\n\n return network"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ninstalls necessary GUI resources By default, RAFCON should be installed via `setup.py` (`pip install rafcon`). Thereby, all resources are being installed. However, if this is not the case, one can set the `RAFCON_CHECK_INSTALLATION` env variable to `True`. Then, the installation will be performed before starting the GUI.", "response": "def setup_installation():\n \"\"\"Install necessary GUI resources\n \n By default, RAFCON should be installed via `setup.py` (`pip install rafcon`). Thereby, all resources are being \n installed. However, if this is not the case, one can set the `RAFCON_CHECK_INSTALLATION` env variable to `True`. \n Then, the installation will be performed before starting the GUI. \n \"\"\"\n if os.environ.get(\"RAFCON_CHECK_INSTALLATION\", False) == \"True\":\n rafcon_root = os.path.dirname(rafcon.__file__)\n installation.assets_folder = os.path.join(rafcon_root, 'gui', 'assets')\n installation.share_folder = os.path.join(os.path.dirname(os.path.dirname(rafcon_root)), 'share')\n installation.install_fonts(logger, restart=True)\n installation.install_gtk_source_view_styles(logger)\n installation.install_libraries(logger, overwrite=False)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsetting up the argument parser with the required arguments", "response": "def setup_argument_parser():\n \"\"\"Sets up teh parser with the required arguments\n\n :return: The parser object\n \"\"\"\n default_config_path = filesystem.get_default_config_path()\n filesystem.create_path(default_config_path)\n\n parser = core_singletons.argument_parser\n parser.add_argument('-n', '--new', action='store_true', help=_(\"whether to create a new state-machine\"))\n parser.add_argument('-o', '--open', action='store', nargs='*', type=parse_state_machine_path,\n dest='state_machine_paths', metavar='path',\n help=_(\"specify directories of state-machines that shall be opened. Paths must contain a \"\n \"statemachine.json file\"))\n parser.add_argument('-c', '--config', action='store', type=config_path, metavar='path', dest='config_path',\n default=default_config_path, nargs='?', const=default_config_path,\n help=_(\"path to the configuration file config.yaml. Use 'None' to prevent the generation of a \"\n \"config file and use the default configuration. Default: {0}\"\n \"\").format(default_config_path))\n parser.add_argument('-g', '--gui_config', action='store', type=config_path, metavar='path', dest='gui_config_path',\n default=default_config_path, nargs='?', const=default_config_path,\n help=_(\"path to the configuration file gui_config.yaml. \"\n \"Use 'None' to prevent the generation of a config file and use the default \"\n \"configuration. Default: {0}\").format(default_config_path))\n parser.add_argument('-ss', '--start_state_machine', dest='start_state_machine_flag', action='store_true',\n help=_(\"a flag to specify if the first state machine of -o should be started after opening\"))\n parser.add_argument('-s', '--start_state_path', metavar='path', dest='start_state_path', default=None, nargs='?',\n help=_(\"path within a state machine to the state that should be launched which consists of \"\n \"state ids e.g. QPOXGD/YVWJKZ where QPOXGD is the root state and YVWJKZ its child states\"\n \" to start from.\"))\n parser.add_argument('-q', '--quit', dest='quit_flag', action='store_true',\n help=_(\"a flag to specify if the gui should quit after launching a state machine\"))\n return parser"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncall when the View was registered", "response": "def register_view(self, view):\n \"\"\"Called when the View was registered\"\"\"\n super(ScopedVariableListController, self).register_view(view)\n\n view['name_col'].add_attribute(view['name_text'], 'text', self.NAME_STORAGE_ID)\n if not isinstance(self.model.state, LibraryState) and self.model.state.get_next_upper_library_root_state() is None:\n view['name_text'].set_property(\"editable\", True)\n view['data_type_col'].add_attribute(view['data_type_text'], 'text', self.DATA_TYPE_NAME_STORAGE_ID)\n if not isinstance(self.model.state, LibraryState) and self.model.state.get_next_upper_library_root_state() is None:\n view['data_type_text'].set_property(\"editable\", True)\n if isinstance(view, ScopedVariablesListView):\n view['default_value_col'].add_attribute(view['default_value_text'], 'text', self.DEFAULT_VALUE_STORAGE_ID)\n if not isinstance(self.model.state, LibraryState) and self.model.state.get_next_upper_library_root_state() is None:\n view['default_value_text'].set_property(\"editable\", True)\n self._apply_value_on_edited_and_focus_out(view['default_value_text'],\n self.apply_new_scoped_variable_default_value)\n\n self._apply_value_on_edited_and_focus_out(view['name_text'], self.apply_new_scoped_variable_name)\n self._apply_value_on_edited_and_focus_out(view['data_type_text'], self.apply_new_scoped_variable_type)\n\n if isinstance(self.model, ContainerStateModel):\n self.reload_scoped_variables_list_store()"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef register_actions(self, shortcut_manager):\n shortcut_manager.add_callback_for_action(\"delete\", self.remove_action_callback)\n shortcut_manager.add_callback_for_action(\"add\", self.add_action_callback)\n shortcut_manager.add_callback_for_action(\"copy\", self.copy_action_callback)\n shortcut_manager.add_callback_for_action(\"cut\", self.cut_action_callback)\n shortcut_manager.add_callback_for_action(\"paste\", self.paste_action_callback)", "response": "Register callback methods for triggered actions\n "} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef paste_action_callback(self, *event):\n if react_to_event(self.view, self.tree_view, event) and self.active_entry_widget is None:\n if not global_clipboard.model_copies[\"scoped_variables\"] and \\\n (global_clipboard.model_copies[\"input_data_ports\"] or\n global_clipboard.model_copies[\"output_data_ports\"]):\n global_clipboard.paste(self.model, limited=['scoped_variables'], convert=True)\n else:\n global_clipboard.paste(self.model, limited=['scoped_variables'])\n return True", "response": "Callback method for paste action"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncreate a new scoped variable with default values", "response": "def on_add(self, widget, data=None):\n \"\"\"Create a new scoped variable with default values\"\"\"\n if isinstance(self.model, ContainerStateModel):\n try:\n scoped_var_ids = gui_helper_state_machine.add_scoped_variable_to_selected_states(selected_states=[self.model])\n if scoped_var_ids:\n self.select_entry(scoped_var_ids[self.model.state])\n except ValueError as e:\n logger.warning(\"The scoped variable couldn't be added: {0}\".format(e))\n return False\n\n return True"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nremoves respective core element of handed scoped variable model", "response": "def remove_core_element(self, model):\n \"\"\"Remove respective core element of handed scoped variable model\n\n :param ScopedVariableModel model: Scoped variable model which core element should be removed\n :return:\n \"\"\"\n assert model.scoped_variable.parent is self.model.state\n gui_helper_state_machine.delete_core_element_of_model(model)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\napplies the new name of the scoped variable defined by path to the new name.", "response": "def apply_new_scoped_variable_name(self, path, new_name):\n \"\"\"Applies the new name of the scoped variable defined by path\n\n :param str path: The path identifying the edited variable\n :param str new_name: New name\n \"\"\"\n data_port_id = self.list_store[path][self.ID_STORAGE_ID]\n try:\n if self.model.state.scoped_variables[data_port_id].name != new_name:\n self.model.state.scoped_variables[data_port_id].name = new_name\n except TypeError as e:\n logger.error(\"Error while changing port name: {0}\".format(e))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef apply_new_scoped_variable_type(self, path, new_variable_type_str):\n data_port_id = self.list_store[path][self.ID_STORAGE_ID]\n try:\n if self.model.state.scoped_variables[data_port_id].data_type.__name__ != new_variable_type_str:\n self.model.state.scoped_variables[data_port_id].change_data_type(new_variable_type_str)\n except ValueError as e:\n logger.error(\"Error while changing data type: {0}\".format(e))", "response": "Applies the new data type of the scoped variable defined by path"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\napply the new default value of the scoped variable defined by path", "response": "def apply_new_scoped_variable_default_value(self, path, new_default_value_str):\n \"\"\"Applies the new default value of the scoped variable defined by path\n\n :param str path: The path identifying the edited variable\n :param str new_default_value_str: New default value as string\n \"\"\"\n data_port_id = self.get_list_store_row_from_cursor_selection()[self.ID_STORAGE_ID]\n try:\n if str(self.model.state.scoped_variables[data_port_id].default_value) != new_default_value_str:\n self.model.state.scoped_variables[data_port_id].default_value = new_default_value_str\n except (TypeError, AttributeError) as e:\n logger.error(\"Error while changing default value: {0}\".format(e))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreload the scoped variable list store from the data port models", "response": "def reload_scoped_variables_list_store(self):\n \"\"\"Reloads the scoped variable list store from the data port models\"\"\"\n\n if isinstance(self.model, ContainerStateModel):\n tmp = self.get_new_list_store()\n for sv_model in self.model.scoped_variables:\n data_type = sv_model.scoped_variable.data_type\n # get name of type (e.g. ndarray)\n data_type_name = data_type.__name__\n # get module of type, e.g. numpy\n data_type_module = data_type.__module__\n # if the type is not a builtin type, also show the module\n if data_type_module not in ['__builtin__', 'builtins']:\n data_type_name = data_type_module + '.' + data_type_name\n tmp.append([sv_model.scoped_variable.name, data_type_name,\n str(sv_model.scoped_variable.default_value), sv_model.scoped_variable.data_port_id,\n sv_model])\n tms = Gtk.TreeModelSort(model=tmp)\n tms.set_sort_column_id(0, Gtk.SortType.ASCENDING)\n tms.set_sort_func(0, compare_variables)\n tms.sort_column_changed()\n tmp = tms\n self.list_store.clear()\n for elem in tmp:\n self.list_store.append(elem[:])\n else:\n raise RuntimeError(\"The reload_scoped_variables_list_store function should be never called for \"\n \"a non Container State Model\")"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nstore reference backup information for all open tabs into runtime config", "response": "def store_session():\n \"\"\" Stores reference backup information for all open tabs into runtime config\n\n The backup of never stored tabs (state machines) and not stored state machine changes will be triggered a last\n time to secure data lose.\n \"\"\"\n from rafcon.gui.singleton import state_machine_manager_model, global_runtime_config\n from rafcon.gui.models.auto_backup import AutoBackupModel\n from rafcon.gui.models import AbstractStateModel\n from rafcon.gui.singleton import main_window_controller\n # check if there are dirty state machines -> use backup file structure maybe it is already stored\n for sm_m in state_machine_manager_model.state_machines.values():\n if sm_m.auto_backup:\n if sm_m.state_machine.marked_dirty:\n sm_m.auto_backup.perform_temp_storage()\n else:\n # generate a backup\n sm_m.auto_backup = AutoBackupModel(sm_m)\n\n # collect order of tab state machine ids from state machines editor and find selected state machine page number\n state_machines_editor_ctrl = main_window_controller.get_controller('state_machines_editor_ctrl')\n number_of_pages = state_machines_editor_ctrl.view['notebook'].get_n_pages()\n selected_page_number = None\n list_of_tab_meta = []\n for page_number in range(number_of_pages):\n page = state_machines_editor_ctrl.view['notebook'].get_nth_page(page_number)\n sm_id = state_machines_editor_ctrl.get_state_machine_id_for_page(page)\n if sm_id == state_machine_manager_model.selected_state_machine_id:\n selected_page_number = page_number\n\n # backup state machine selection\n selection_of_sm = []\n for model in state_machine_manager_model.state_machines[sm_id].selection.get_all():\n if isinstance(model, AbstractStateModel):\n # TODO extend to full range of selection -> see core_identifier action-module\n selection_of_sm.append(model.state.get_path())\n\n list_of_tab_meta.append({'backup_meta': state_machine_manager_model.state_machines[sm_id].auto_backup.meta,\n 'selection': selection_of_sm})\n\n # store final state machine backup meta data to backup session tabs and selection for the next run\n global_runtime_config.set_config_value('open_tabs', list_of_tab_meta)\n global_runtime_config.set_config_value('selected_state_machine_page_number', selected_page_number)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef restore_session_from_runtime_config():\n # TODO add a dirty lock for a crashed rafcon instance also into backup session feature\n # TODO in case a dialog is needed to give the user control\n # TODO combine this and auto-backup in one structure/controller/observer\n from rafcon.gui.singleton import state_machine_manager_model, global_runtime_config, global_gui_config\n from rafcon.gui.models.auto_backup import recover_state_machine_from_backup\n from rafcon.gui.singleton import main_window_controller\n # check if session storage exists\n open_tabs = global_runtime_config.get_config_value('open_tabs', None)\n if open_tabs is None:\n logger.info(\"No session found for recovery\")\n return\n\n # load and restore state machines like they were opened before\n open_sm = []\n for idx, tab_meta_dict in enumerate(open_tabs):\n start_time = time.time()\n backup_meta_dict = tab_meta_dict['backup_meta']\n from_backup_path = None\n open_sm.append(None)\n # TODO do this decision before storing or maybe store the last stored time in the auto backup?!\n # pick folder name dependent on time, and backup meta data existence\n # problem is that the backup time is maybe not the best choice\n if 'last_backup' in backup_meta_dict:\n last_backup_time = storage_utils.get_float_time_for_string(backup_meta_dict['last_backup']['time'])\n if 'last_saved' in backup_meta_dict:\n last_save_time = storage_utils.get_float_time_for_string(backup_meta_dict['last_saved']['time'])\n backup_marked_dirty = backup_meta_dict['last_backup']['marked_dirty']\n if last_backup_time > last_save_time and backup_marked_dirty:\n from_backup_path = backup_meta_dict['last_backup']['file_system_path']\n else:\n from_backup_path = backup_meta_dict['last_backup']['file_system_path']\n elif 'last_saved' in backup_meta_dict:\n # print(\"### open last saved\", sm_meta_dict['last_saved']['file_system_path'])\n pass\n else:\n logger.error(\"A tab was stored into session storage dictionary {0} without any recovery path\"\n \"\".format(backup_meta_dict))\n continue\n\n # check in case that the backup folder is valid or use last saved path\n if from_backup_path is not None and not os.path.isdir(from_backup_path):\n logger.warning(\"The backup of tab {0} from backup path {1} was not possible. \"\n \"The last saved path will be used for recovery, which could result is loss of changes.\"\n \"\".format(idx, from_backup_path))\n from_backup_path = None\n\n # open state machine\n if from_backup_path is not None:\n # open state machine, recover mark dirty flags, cleans dirty lock files\n logger.info(\"Restoring from backup {0}\".format(from_backup_path))\n state_machine_m = recover_state_machine_from_backup(from_backup_path)\n else:\n if 'last_saved' not in backup_meta_dict or backup_meta_dict['last_saved']['file_system_path'] is None:\n continue\n path = backup_meta_dict['last_saved']['file_system_path']\n if not os.path.isdir(path):\n logger.warning(\"The tab can not be open. The backup of tab {0} from common path {1} was not \"\n \"possible.\".format(idx, path))\n continue\n # logger.info(\"backup from last saved\", path, sm_meta_dict)\n state_machine = storage.load_state_machine_from_path(path)\n state_machine_manager_model.state_machine_manager.add_state_machine(state_machine)\n wait_for_gui()\n state_machine_m = state_machine_manager_model.state_machines[state_machine.state_machine_id]\n\n duration = time.time() - start_time\n stat = state_machine_m.state_machine.root_state.get_states_statistics(0)\n logger.info(\"It took {0:.3}s to restore {1} states with {2} hierarchy levels.\".format(duration, stat[0], stat[1]))\n\n open_sm[idx] = state_machine_m\n\n global_runtime_config.extend_recently_opened_by_current_open_state_machines()\n\n wait_for_gui()\n\n # restore all state machine selections separate to avoid states-editor and state editor creation problems\n for idx, tab_meta_dict in enumerate(open_tabs):\n state_machine_m = open_sm[idx]\n if state_machine_m is None: # state machine could not be open\n return\n\n # restore state machine selection\n selected_model_set = []\n for core_element_identifier in tab_meta_dict['selection']:\n selected_model_set.append(state_machine_m.get_state_model_by_path(core_element_identifier))\n state_machine_m.selection.set(selected_model_set)\n\n # restore backup-ed tab selection\n selected_page_number = global_runtime_config.get_config_value('selected_state_machine_page_number', None)\n if selected_page_number is not None:\n selected_state_machine_page_number = selected_page_number\n if selected_state_machine_page_number is None:\n return\n state_machines_editor_ctrl = main_window_controller.get_controller('state_machines_editor_ctrl')\n if not state_machines_editor_ctrl.view['notebook'].get_n_pages() >= selected_page_number:\n logger.warning(\"Page id {0} does not exist so session restore can not re-create selection.\"\n \"\".format(selected_page_number))\n return\n notebook = state_machines_editor_ctrl.view['notebook']\n page = state_machines_editor_ctrl.on_switch_page(notebook, None, selected_page_number)\n selected_sm_id = state_machine_manager_model.selected_state_machine_id\n if not selected_sm_id == state_machines_editor_ctrl.get_state_machine_id_for_page(page):\n logger.warning(\"Selection of page was not set correctly so session restore can not re-create selection.\")\n return", "response": "Restore stored tabs from runtime config"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef add_logging_level(level_name, level_num, method_name=None):\n if not method_name:\n method_name = level_name.lower()\n\n if hasattr(logging, level_name):\n raise AttributeError('{} already defined in logging module'.format(level_name))\n if hasattr(logging, method_name):\n raise AttributeError('{} already defined in logging module'.format(method_name))\n if hasattr(logging.getLoggerClass(), method_name):\n raise AttributeError('{} already defined in logger class'.format(method_name))\n\n # This method was inspired by the answers to Stack Overflow post\n # http://stackoverflow.com/q/2183233/2988730, especially\n # http://stackoverflow.com/a/13638084/2988730\n def log_for_level(self, message, *args, **kwargs):\n if self.isEnabledFor(level_num):\n self._log(level_num, message, args, **kwargs)\n def log_to_root(message, *args, **kwargs):\n logging.log(level_num, message, *args, **kwargs)\n\n logging.addLevelName(level_num, level_name)\n setattr(logging, level_name, level_num)\n setattr(logging.getLoggerClass(), method_name, log_for_level)\n setattr(logging, method_name, log_to_root)", "response": "Add a new logging level to the logging module and the currently configured logging class."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_logger(name):\n if name in existing_loggers:\n return existing_loggers[name]\n\n # Ensure that all logger are within the RAFCON root namespace\n namespace = name if name.startswith(rafcon_root + \".\") else rafcon_root + \".\" + name\n logger = logging.getLogger(namespace)\n logger.propagate = True\n existing_loggers[name] = logger\n\n return logger", "response": "Returns a logger object with the given name"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nopens a state machine from respective file system path.", "response": "def open_state_machine(path=None, recent_opened_notification=False):\n \"\"\" Open a state machine from respective file system path\n\n :param str path: file system path to the state machine\n :param bool recent_opened_notification: flags that indicates that this call also should update recently open\n\n :rtype rafcon.core.state_machine.StateMachine\n :return: opened state machine\n \"\"\"\n start_time = time.time()\n if path is None:\n if interface.open_folder_func is None:\n logger.error(\"No function defined for opening a folder\")\n return\n load_path = interface.open_folder_func(\"Please choose the folder of the state machine\")\n if load_path is None:\n return\n else:\n load_path = path\n\n if state_machine_manager.is_state_machine_open(load_path):\n logger.info(\"State machine already open. Select state machine instance from path {0}.\".format(load_path))\n sm = state_machine_manager.get_open_state_machine_of_file_system_path(load_path)\n gui_helper_state.gui_singletons.state_machine_manager_model.selected_state_machine_id = sm.state_machine_id\n return state_machine_manager.get_open_state_machine_of_file_system_path(load_path)\n\n state_machine = None\n try:\n state_machine = storage.load_state_machine_from_path(load_path)\n state_machine_manager.add_state_machine(state_machine)\n if recent_opened_notification:\n global_runtime_config.update_recently_opened_state_machines_with(state_machine)\n duration = time.time() - start_time\n stat = state_machine.root_state.get_states_statistics(0)\n logger.info(\"It took {0:.2}s to load {1} states with {2} hierarchy levels.\".format(duration, stat[0], stat[1]))\n except (AttributeError, ValueError, IOError) as e:\n logger.error('Error while trying to open state machine: {0}'.format(e))\n\n return state_machine"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef save_state_machine(delete_old_state_machine=False, recent_opened_notification=False, as_copy=False, copy_path=None):\n\n state_machine_manager_model = rafcon.gui.singleton.state_machine_manager_model\n states_editor_ctrl = rafcon.gui.singleton.main_window_controller.get_controller('states_editor_ctrl')\n\n state_machine_m = state_machine_manager_model.get_selected_state_machine_model()\n if state_machine_m is None:\n logger.warning(\"Can not 'save state machine' because no state machine is selected.\")\n return False\n\n previous_path = state_machine_m.state_machine.file_system_path\n previous_marked_dirty = state_machine_m.state_machine.marked_dirty\n all_tabs = list(states_editor_ctrl.tabs.values())\n all_tabs.extend(states_editor_ctrl.closed_tabs.values())\n dirty_source_editor_ctrls = [tab_dict['controller'].get_controller('source_ctrl') for tab_dict in all_tabs if\n tab_dict['source_code_view_is_dirty'] is True and\n tab_dict['state_m'].state.get_state_machine().state_machine_id ==\n state_machine_m.state_machine.state_machine_id]\n\n for dirty_source_editor_ctrl in dirty_source_editor_ctrls:\n state = dirty_source_editor_ctrl.model.state\n message_string = \"The source code of the state '{}' (path: {}) has not been applied yet and would \" \\\n \"therefore not be saved.\\n\\nDo you want to apply the changes now?\" \\\n \"\".format(state.name, state.get_path())\n if global_gui_config.get_config_value(\"AUTO_APPLY_SOURCE_CODE_CHANGES\", False):\n dirty_source_editor_ctrl.apply_clicked(None)\n else:\n dialog = RAFCONButtonDialog(message_string, [\"Apply\", \"Ignore changes\"],\n message_type=Gtk.MessageType.WARNING, parent=states_editor_ctrl.get_root_window())\n response_id = dialog.run()\n state = dirty_source_editor_ctrl.model.state\n if response_id == 1: # Apply changes\n logger.debug(\"Applying source code changes of state '{}'\".format(state.name))\n dirty_source_editor_ctrl.apply_clicked(None)\n\n elif response_id == 2: # Ignore changes\n logger.debug(\"Ignoring source code changes of state '{}'\".format(state.name))\n else:\n logger.warning(\"Response id: {} is not considered\".format(response_id))\n return False\n dialog.destroy()\n\n save_path = state_machine_m.state_machine.file_system_path\n if not as_copy and save_path is None or as_copy and copy_path is None:\n if not save_state_machine_as(as_copy=as_copy):\n return False\n return True\n\n logger.debug(\"Saving state machine to {0}\".format(save_path))\n\n state_machine_m = state_machine_manager_model.get_selected_state_machine_model()\n sm_path = state_machine_m.state_machine.file_system_path\n\n storage.save_state_machine_to_path(state_machine_m.state_machine, copy_path if as_copy else sm_path,\n delete_old_state_machine=delete_old_state_machine, as_copy=as_copy)\n if recent_opened_notification:\n global_runtime_config.update_recently_opened_state_machines_with(state_machine_m.state_machine)\n state_machine_m.store_meta_data(copy_path=copy_path if as_copy else None)\n logger.debug(\"Saved state machine and its meta data.\")\n library_manager_model.state_machine_was_stored(state_machine_m, previous_path)\n return True", "response": "Save selected state machine"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef save_state_machine_as(path=None, recent_opened_notification=False, as_copy=False):\n\n state_machine_manager_model = rafcon.gui.singleton.state_machine_manager_model\n selected_state_machine_model = state_machine_manager_model.get_selected_state_machine_model()\n if selected_state_machine_model is None:\n logger.warning(\"Can not 'save state machine as' because no state machine is selected.\")\n return False\n\n if path is None:\n if interface.create_folder_func is None:\n logger.error(\"No function defined for creating a folder\")\n return False\n folder_name = selected_state_machine_model.state_machine.root_state.name\n path = interface.create_folder_func(\"Please choose a root folder and a folder name for the state-machine. \"\n \"The default folder name is the name of the root state.\",\n format_default_folder_name(folder_name))\n if path is None:\n logger.warning(\"No valid path specified\")\n return False\n\n previous_path = selected_state_machine_model.state_machine.file_system_path\n if not as_copy:\n marked_dirty = selected_state_machine_model.state_machine.marked_dirty\n recent_opened_notification = recent_opened_notification and (not previous_path == path or marked_dirty)\n selected_state_machine_model.state_machine.file_system_path = path\n\n result = save_state_machine(delete_old_state_machine=True,\n recent_opened_notification=recent_opened_notification,\n as_copy=as_copy, copy_path=path)\n library_manager_model.state_machine_was_stored(selected_state_machine_model, previous_path)\n return result", "response": "Saves the selected state machine to path"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nsave selected state as separate state machine", "response": "def save_selected_state_as():\n \"\"\"Save selected state as separate state machine\n\n :return True if successfully stored, False if the storing process was canceled or stopped by condition fail\n :rtype bool:\n :raises exceptions.ValueError: If dialog response ids are out of bounds\n \"\"\"\n\n state_machine_manager_model = rafcon.gui.singleton.state_machine_manager_model\n selection = state_machine_manager_model.get_selected_state_machine_model().selection\n selected_state = selection.get_selected_state()\n state_machine_id = state_machine_manager_model.get_selected_state_machine_model().state_machine.state_machine_id\n if len(selection.states) == 1:\n state_m = copy.copy(selected_state)\n sm_m = StateMachineModel(StateMachine(root_state=state_m.state))\n sm_m.root_state = state_m\n path = interface.create_folder_func(\"Please choose a root folder and a folder name for the state-machine your \"\n \"state is saved in. The default folder name is the name of state.\",\n format_default_folder_name(selected_state.state.name))\n if path:\n storage.save_state_machine_to_path(sm_m.state_machine, base_path=path)\n sm_m.store_meta_data()\n else:\n logger.warning(\"No valid path specified\")\n return False\n\n def open_as_state_machine_saved_state_as_separate_state_machine():\n logger.debug(\"Open state machine.\")\n try:\n open_state_machine(path=path, recent_opened_notification=True)\n except (ValueError, IOError) as e:\n logger.error('Error while trying to open state machine: {0}'.format(e))\n\n # check if state machine is in library path\n root_window = rafcon.gui.singleton.main_window_controller.get_root_window()\n if library_manager.is_os_path_within_library_root_paths(path):\n\n _library_path, _library_name = \\\n library_manager.get_library_path_and_name_for_os_path(sm_m.state_machine.file_system_path)\n overwrote_old_lib = library_manager.is_library_in_libraries(_library_path, _library_name)\n\n message_string = \"You stored your state machine in a path that is within the library root paths. \" \\\n \"Thereby your state machine can be used as a library state.\\n\\n\"\\\n \"Do you want to:\"\n\n table_header = [\"Option\", \"Description\"]\n table_data = [(True, \"Substitute the original state by this new library state.\"),\n (True, \"Open the newly created library state machine.\")]\n if overwrote_old_lib:\n table_data.append((False, \"Refresh all open state machines, as an already existing library was \"\n \"overwritten.\"))\n\n dialog = RAFCONCheckBoxTableDialog(message_string,\n button_texts=(\"Apply\", \"Cancel\"),\n table_header=table_header, table_data=table_data,\n message_type=Gtk.MessageType.QUESTION,\n parent=root_window,\n width=800, standalone=False)\n response_id = dialog.run()\n if response_id == 1: # Apply pressed\n\n if overwrote_old_lib and dialog.list_store[2][0]: # refresh all open state machine selected\n logger.debug(\"Refresh all is triggered.\")\n refresh_all()\n else: # if not all was refreshed at least the libraries are refreshed\n logger.debug(\"Library refresh is triggered.\")\n refresh_libraries()\n\n if dialog.list_store[0][0]: # Substitute saved state with Library selected\n logger.debug(\"Substitute saved state with Library.\")\n if dialog.list_store[0][0] or dialog.list_store[0][1]:\n refresh_libraries()\n state_machine_manager_model.selected_state_machine_id = state_machine_id\n [library_path, library_name] = library_manager.get_library_path_and_name_for_os_path(path)\n state = library_manager.get_library_instance(library_path, library_name)\n try:\n substitute_selected_state(state, as_template=False)\n except ValueError as e:\n logger.error('Error while trying to open state machine: {0}'.format(e))\n if dialog.list_store[1][0]: # Open as state machine saved state as separate state machine selected\n open_as_state_machine_saved_state_as_separate_state_machine()\n elif response_id in [2, -4]: # Cancel or Close pressed\n pass\n else:\n raise ValueError(\"Response id: {} is not considered\".format(response_id))\n dialog.destroy()\n else:\n # Offer to open saved state machine dialog\n message_string = \"Should the newly created state machine be opened?\"\n dialog = RAFCONButtonDialog(message_string, [\"Open\", \"Do not open\"],\n message_type=Gtk.MessageType.QUESTION,\n parent=root_window)\n response_id = dialog.run()\n if response_id == 1: # Apply pressed\n open_as_state_machine_saved_state_as_separate_state_machine()\n elif response_id in [2, -4]: # Cancel or Close pressed\n pass\n else:\n raise ValueError(\"Response id: {} is not considered\".format(response_id))\n dialog.destroy()\n\n return True\n else:\n logger.warning(\"Multiple states can not be saved as state machine directly. Group them before.\")\n return False"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef is_state_machine_stopped_to_proceed(selected_sm_id=None, root_window=None):\n # check if the/a state machine is still running\n if not state_machine_execution_engine.finished_or_stopped():\n if selected_sm_id is None or selected_sm_id == state_machine_manager.active_state_machine_id:\n\n message_string = \"A state machine is still running. This state machine can only be refreshed\" \\\n \"when not longer running.\"\n dialog = RAFCONButtonDialog(message_string, [\"Stop execution and refresh\",\n \"Keep running and do not refresh\"],\n message_type=Gtk.MessageType.QUESTION,\n parent=root_window)\n response_id = dialog.run()\n state_machine_stopped = False\n if response_id == 1:\n state_machine_execution_engine.stop()\n state_machine_stopped = True\n elif response_id == 2:\n logger.debug(\"State machine will stay running and no refresh will be performed!\")\n dialog.destroy()\n\n return state_machine_stopped\n return True", "response": "Checks if a specific state machine is stopped and in case request user by dialog window how to proceed\n "} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef refresh_selected_state_machine():\n\n selected_sm_id = rafcon.gui.singleton.state_machine_manager_model.selected_state_machine_id\n selected_sm = state_machine_manager.state_machines[selected_sm_id]\n state_machines_editor_ctrl = rafcon.gui.singleton.main_window_controller.get_controller('state_machines_editor_ctrl')\n states_editor_ctrl = rafcon.gui.singleton.main_window_controller.get_controller('states_editor_ctrl')\n\n if not is_state_machine_stopped_to_proceed(selected_sm_id, state_machines_editor_ctrl.get_root_window()):\n return\n\n # check if the a dirty flag is still set\n all_tabs = list(states_editor_ctrl.tabs.values())\n all_tabs.extend(states_editor_ctrl.closed_tabs.values())\n dirty_source_editor = [tab_dict['controller'] for tab_dict in all_tabs if\n tab_dict['source_code_view_is_dirty'] is True]\n if selected_sm.marked_dirty or dirty_source_editor:\n\n message_string = \"Are you sure you want to reload the currently selected state machine?\\n\\n\" \\\n \"The following elements have been modified and not saved. \" \\\n \"These changes will get lost:\"\n message_string = \"%s\\n* State machine #%s and name '%s'\" % (\n message_string, str(selected_sm_id), selected_sm.root_state.name)\n for ctrl in dirty_source_editor:\n if ctrl.model.state.get_state_machine().state_machine_id == selected_sm_id:\n message_string = \"%s\\n* Source code of state with name '%s' and path '%s'\" % (\n message_string, ctrl.model.state.name, ctrl.model.state.get_path())\n dialog = RAFCONButtonDialog(message_string, [\"Reload anyway\", \"Cancel\"],\n message_type=Gtk.MessageType.WARNING, parent=states_editor_ctrl.get_root_window())\n response_id = dialog.run()\n dialog.destroy()\n if response_id == 1: # Reload anyway\n pass\n else:\n logger.debug(\"Refresh of selected state machine canceled\")\n return\n\n library_manager.clean_loaded_libraries()\n refresh_libraries()\n states_editor_ctrl.close_pages_for_specific_sm_id(selected_sm_id)\n state_machines_editor_ctrl.refresh_state_machine_by_id(selected_sm_id)", "response": "Reloads the currently selected state machine."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef refresh_all(force=False):\n state_machines_editor_ctrl = rafcon.gui.singleton.main_window_controller.get_controller('state_machines_editor_ctrl')\n states_editor_ctrl = rafcon.gui.singleton.main_window_controller.get_controller('states_editor_ctrl')\n\n if force:\n pass # no checks direct refresh\n else:\n\n # check if a state machine is still running\n if not is_state_machine_stopped_to_proceed(root_window=states_editor_ctrl.get_root_window()):\n return\n\n # check if the a dirty flag is still set\n all_tabs = list(states_editor_ctrl.tabs.values())\n all_tabs.extend(states_editor_ctrl.closed_tabs.values())\n dirty_source_editor = [tab_dict['controller'] for tab_dict in all_tabs if\n tab_dict['source_code_view_is_dirty'] is True]\n if state_machine_manager.has_dirty_state_machine() or dirty_source_editor:\n\n message_string = \"Are you sure you want to reload the libraries and all state machines?\\n\\n\" \\\n \"The following elements have been modified and not saved. \" \\\n \"These changes will get lost:\"\n for sm_id, sm in state_machine_manager.state_machines.items():\n if sm.marked_dirty:\n message_string = \"%s\\n* State machine #%s and name '%s'\" % (\n message_string, str(sm_id), sm.root_state.name)\n for ctrl in dirty_source_editor:\n message_string = \"%s\\n* Source code of state with name '%s' and path '%s'\" % (\n message_string, ctrl.model.state.name, ctrl.model.state.get_path())\n dialog = RAFCONButtonDialog(message_string, [\"Reload anyway\", \"Cancel\"],\n message_type=Gtk.MessageType.WARNING, parent=states_editor_ctrl.get_root_window())\n response_id = dialog.run()\n dialog.destroy()\n if response_id == 1: # Reload anyway\n pass\n else:\n logger.debug(\"Refresh canceled\")\n return\n\n library_manager.clean_loaded_libraries()\n refresh_libraries()\n states_editor_ctrl.close_all_pages()\n state_machines_editor_ctrl.refresh_all_state_machines()", "response": "Remove all libraries and state machines and reloads them freshly from the file system"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef delete_core_element_of_model(model, raise_exceptions=False, recursive=True, destroy=True, force=False):\n if isinstance(model, AbstractStateModel) and model.state.is_root_state:\n logger.warning(\"Deletion is not allowed. {0} is root state of state machine.\".format(model.core_element))\n return False\n state_m = model.parent\n if state_m is None:\n msg = \"Model has no parent from which it could be deleted from\"\n if raise_exceptions:\n raise ValueError(msg)\n logger.error(msg)\n return False\n if is_selection_inside_of_library_state(selected_elements=[model]):\n logger.warning(\"Deletion is not allowed. Element {0} is inside of a library.\".format(model.core_element))\n return False\n assert isinstance(state_m, StateModel)\n\n state = state_m.state\n core_element = model.core_element\n\n try:\n if core_element in state:\n state.remove(core_element, recursive=recursive, destroy=destroy, force=force)\n return True\n return False\n except (AttributeError, ValueError) as e:\n if raise_exceptions:\n raise\n logger.error(\"The model '{}' for core element '{}' could not be deleted: {}\".format(model, core_element, e))\n return False", "response": "Deletes the respective core element of handed model of state machine."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ndelete all respective core elements for the given models.", "response": "def delete_core_elements_of_models(models, raise_exceptions=True, recursive=True, destroy=True, force=False):\n \"\"\"Deletes all respective core elements for the given models\n\n Calls the :func:`delete_core_element_of_model` for all given models.\n\n :param models: A single model or a list of models of respective core element to be deleted\n :param bool raise_exceptions: Whether to raise exceptions or log error messages in case of an error\n :param bool destroy: Access the destroy flag of the core remove methods\n :return: The number of models that were successfully deleted\n \"\"\"\n # If only one model is given, make a list out of it\n if not hasattr(models, '__iter__'):\n models = [models]\n return sum(delete_core_element_of_model(model, raise_exceptions, recursive=recursive, destroy=destroy, force=force)\n for model in models)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef is_selection_inside_of_library_state(state_machine_m=None, selected_elements=None):\n if state_machine_m is None:\n state_machine_m = rafcon.gui.singleton.state_machine_manager_model.get_selected_state_machine_model()\n if state_machine_m is None and selected_elements is None:\n return False\n selection_in_lib = []\n selected_elements = state_machine_m.selection.get_all() if selected_elements is None else selected_elements\n for model in selected_elements:\n # check if model is element of child state or the root state (or its scoped variables) of a LibraryState\n state_m = model if isinstance(model.core_element, State) else model.parent\n selection_in_lib.append(state_m.state.get_next_upper_library_root_state() is not None)\n # check if model is part of the shell (io-port or outcome) of a LibraryState\n if not isinstance(model.core_element, State) and isinstance(state_m, LibraryStateModel):\n selection_in_lib.append(True)\n if any(selection_in_lib):\n return True\n return False", "response": "Checks if selected elements are inside of library state"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ntriggers when the Add State button is pressed.", "response": "def add_new_state(state_machine_m, state_type):\n \"\"\"Triggered when shortcut keys for adding a new state are pressed, or Menu Bar \"Edit, Add State\" is clicked.\n\n Adds a new state only if the parent state (selected state) is a container state, and if the graphical editor or\n the state machine tree are in focus.\n \"\"\"\n assert isinstance(state_machine_m, StateMachineModel)\n\n if state_type not in list(StateType):\n state_type = StateType.EXECUTION\n\n if len(state_machine_m.selection.states) != 1:\n logger.warning(\"Please select exactly one desired parent state, before adding a new state\")\n return\n state_m = state_machine_m.selection.get_selected_state()\n if is_selection_inside_of_library_state(selected_elements=[state_m]):\n logger.warning(\"Add new state is not performed because selected target state is inside of a library state.\")\n return\n\n if isinstance(state_m, StateModel):\n return gui_helper_state.add_state(state_m, state_type)\n else:\n logger.warning(\"Add new state is not performed because target state indication has to be a {1} not {0}\"\n \"\".format(state_m.__class__.__name__, StateModel.__name__))\n\n # TODO this code can not be reached -> recover again? -> e.g. feature select transition add's state to parent\n if isinstance(state_m, (TransitionModel, DataFlowModel)) or \\\n isinstance(state_m, (DataPortModel, OutcomeModel)) and isinstance(state_m.parent, ContainerStateModel):\n return gui_helper_state.add_state(state_m.parent, state_type)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef insert_state_into_selected_state(state, as_template=False):\n smm_m = rafcon.gui.singleton.state_machine_manager_model\n\n if not isinstance(state, State):\n logger.warning(\"A state is needed to be insert not {0}\".format(state))\n return False\n\n if not smm_m.selected_state_machine_id:\n logger.warning(\"Please select a container state within a state machine first\")\n return False\n\n selection = smm_m.state_machines[smm_m.selected_state_machine_id].selection\n if len(selection.states) > 1:\n logger.warning(\"Please select exactly one state for the insertion\")\n return False\n\n if len(selection.states) == 0:\n logger.warning(\"Please select a state for the insertion\")\n return False\n\n if is_selection_inside_of_library_state(selected_elements=[selection.get_selected_state()]):\n logger.warning(\"State is not insert because target state is inside of a library state.\")\n return False\n\n gui_helper_state.insert_state_as(selection.get_selected_state(), state, as_template)\n\n return True", "response": "Adds a state into the selected state"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef substitute_selected_state(state, as_template=False, keep_name=False):\n # print(\"substitute_selected_state\", state, as_template)\n assert isinstance(state, State)\n from rafcon.core.states.barrier_concurrency_state import DeciderState\n if isinstance(state, DeciderState):\n raise ValueError(\"State of type DeciderState can not be substituted.\")\n\n smm_m = rafcon.gui.singleton.state_machine_manager_model\n if not smm_m.selected_state_machine_id:\n logger.error(\"Selected state machine can not be found, please select a state within a state machine first.\")\n return False\n\n selection = smm_m.state_machines[smm_m.selected_state_machine_id].selection\n selected_state_m = selection.get_selected_state()\n if len(selection.states) != 1:\n logger.error(\"Please select exactly one state for the substitution\")\n return False\n if is_selection_inside_of_library_state(selected_elements=[selected_state_m]):\n logger.warning(\"Substitute is not performed because target state is inside of a library state.\")\n return\n\n gui_helper_state.substitute_state_as(selected_state_m, state, as_template, keep_name)\n\n return True", "response": "Substitute the selected state with the handed state"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef construct_partial_network(self, cluster, scenario):\n\n #Create an empty network\n partial_network = Network()\n\n # find all lines that have at least one bus inside the cluster\n busflags = (self.buses['cluster'] == cluster)\n\n def is_bus_in_cluster(conn):\n return busflags[conn]\n\n # Copy configurations to new network\n partial_network.snapshots = self.original_network.snapshots\n partial_network.snapshot_weightings = (self.original_network\n .snapshot_weightings)\n partial_network.carriers = self.original_network.carriers\n\n # Collect all connectors that have some node inside the cluster\n\n external_buses = pd.DataFrame()\n\n line_types = ['lines', 'links', 'transformers']\n for line_type in line_types:\n # Copy all lines that reside entirely inside the cluster ...\n setattr(partial_network, line_type,\n filter_internal_connector(\n getattr(self.original_network, line_type),\n is_bus_in_cluster))\n\n # ... and their time series\n # TODO: These are all time series, not just the ones from lines\n # residing entirely in side the cluster.\n # Is this a problem?\n setattr(partial_network, line_type + '_t',\n getattr(self.original_network, line_type + '_t'))\n\n # Copy all lines whose `bus0` lies within the cluster\n left_external_connectors = filter_left_external_connector(\n getattr(self.original_network, line_type),\n is_bus_in_cluster)\n\n if not left_external_connectors.empty:\n f = lambda x: self.idx_prefix + self.clustering.busmap.loc[x]\n ca_option = pd.get_option('mode.chained_assignment')\n pd.set_option('mode.chained_assignment', None)\n left_external_connectors.loc[:, 'bus0'] = (\n left_external_connectors.loc[:, 'bus0'].apply(f))\n pd.set_option('mode.chained_assignment', ca_option)\n external_buses = pd.concat((external_buses,\n left_external_connectors.bus0))\n\n # Copy all lines whose `bus1` lies within the cluster\n right_external_connectors = filter_right_external_connector(\n getattr(self.original_network, line_type),\n is_bus_in_cluster)\n if not right_external_connectors.empty:\n f = lambda x: self.idx_prefix + self.clustering.busmap.loc[x]\n ca_option = pd.get_option('mode.chained_assignment')\n pd.set_option('mode.chained_assignment', None)\n right_external_connectors.loc[:, 'bus1'] = (\n right_external_connectors.loc[:, 'bus1'].apply(f))\n pd.set_option('mode.chained_assignment', ca_option)\n external_buses = pd.concat((external_buses,\n right_external_connectors.bus1))\n\n # Collect all buses that are contained in or somehow connected to the\n # cluster\n\n buses_in_lines = self.buses[busflags].index\n\n bus_types = ['loads', 'generators', 'stores', 'storage_units',\n 'shunt_impedances']\n\n # Copy all values that are part of the cluster\n partial_network.buses = self.original_network.buses[\n self.original_network.buses.index.isin(buses_in_lines)]\n\n # Collect all buses that are external, but connected to the cluster ...\n externals_to_insert = self.clustered_network.buses[\n self.clustered_network.buses.index.isin(\n map(lambda x: x[0][len(self.idx_prefix):],\n external_buses.values))]\n\n # ... prefix them to avoid name clashes with buses from the original\n # network ...\n self.reindex_with_prefix(externals_to_insert)\n\n # .. and insert them as well as their time series\n partial_network.buses = (partial_network.buses\n .append(externals_to_insert))\n partial_network.buses_t = self.original_network.buses_t\n\n # TODO: Rename `bustype` to on_bus_type\n for bustype in bus_types:\n # Copy loads, generators, ... from original network to network copy\n setattr(partial_network, bustype,\n filter_buses(getattr(self.original_network, bustype),\n buses_in_lines))\n\n # Collect on-bus components from external, connected clusters\n buses_to_insert = filter_buses(\n getattr(self.clustered_network, bustype),\n map(lambda x: x[0][len(self.idx_prefix):],\n external_buses.values))\n\n # Prefix their external bindings\n buses_to_insert.loc[:, 'bus'] = (\n self.idx_prefix +\n buses_to_insert.loc[:, 'bus'])\n\n setattr(partial_network, bustype,\n getattr(partial_network, bustype).append(buses_to_insert))\n\n # Also copy their time series\n setattr(partial_network,\n bustype + '_t',\n getattr(self.original_network, bustype + '_t'))\n # Note: The code above copies more than necessary, because it\n # copies every time series for `bustype` from the original\n # network and not only the subset belonging to the partial\n # network. The commented code below tries to filter the time\n # series accordingly, but there must be bug somewhere because\n # using it, the time series in the clusters and sums of the\n # time series after disaggregation don't match up.\n \"\"\"\n series = getattr(self.original_network, bustype + '_t')\n partial_series = type(series)()\n for s in series:\n partial_series[s] = series[s].loc[\n :,\n getattr(partial_network, bustype)\n .index.intersection(series[s].columns)]\n setattr(partial_network, bustype + '_t', partial_series)\n \"\"\"\n\n # Just a simple sanity check\n # TODO: Remove when sure that disaggregation will not go insane anymore\n for line_type in line_types:\n assert (getattr(partial_network, line_type).bus0.isin(\n partial_network.buses.index).all())\n assert (getattr(partial_network, line_type).bus1.isin(\n partial_network.buses.index).all())\n\n return partial_network, external_buses", "response": "Construct a partial network that has been merged into a single cluster."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef solve(self, scenario, solver):\n clusters = set(self.clustering.busmap.values)\n n = len(clusters)\n self.stats = {'clusters': pd.DataFrame(\n index=sorted(clusters),\n columns=[\"decompose\", \"spread\", \"transfer\"])}\n profile = cProfile.Profile()\n for i, cluster in enumerate(sorted(clusters)):\n print('---')\n print('Decompose cluster %s (%d/%d)' % (cluster, i+1, n))\n profile.enable()\n t = time.time()\n partial_network, externals = self.construct_partial_network(\n cluster,\n scenario)\n profile.disable()\n self.stats['clusters'].loc[cluster, 'decompose'] = time.time() - t\n print('Decomposed in ',\n self.stats['clusters'].loc[cluster, 'decompose'])\n t = time.time()\n profile.enable()\n self.solve_partial_network(cluster, partial_network, scenario,\n solver)\n profile.disable()\n self.stats['clusters'].loc[cluster, 'spread'] = time.time() - t\n print('Result distributed in ',\n self.stats['clusters'].loc[cluster, 'spread'])\n profile.enable()\n t = time.time()\n self.transfer_results(partial_network, externals)\n profile.disable()\n self.stats['clusters'].loc[cluster, 'transfer'] = time.time() - t\n print('Results transferred in ',\n self.stats['clusters'].loc[cluster, 'transfer'])\n\n profile.enable()\n t = time.time()\n print('---')\n fs = (mc(\"sum\"), mc(\"sum\"))\n for bt, ts in (\n ('generators', {'p': fs, 'q': fs}),\n ('storage_units', {'p': fs, 'state_of_charge': fs, 'q': fs})):\n print(\"Attribute sums, {}, clustered - disaggregated:\" .format(bt))\n cnb = getattr(self.clustered_network, bt)\n onb = getattr(self.original_network, bt)\n print(\"{:>{}}: {}\".format('p_nom_opt', 4 + len('state_of_charge'),\n reduce(lambda x, f: f(x), fs[:-1], cnb['p_nom_opt'])\n -\n reduce(lambda x, f: f(x), fs[:-1], onb['p_nom_opt'])))\n\n print(\"Series sums, {}, clustered - disaggregated:\" .format(bt))\n cnb = getattr(self.clustered_network, bt + '_t')\n onb = getattr(self.original_network, bt + '_t')\n for s in ts:\n print(\"{:>{}}: {}\".format(s, 4 + len('state_of_charge'),\n reduce(lambda x, f: f(x), ts[s], cnb[s])\n -\n reduce(lambda x, f: f(x), ts[s], onb[s])))\n profile.disable()\n self.stats['check'] = time.time() - t\n print('Checks computed in ', self.stats['check'])", "response": "Solve each cluster into separate units and try to optimize them separately"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef modify_origin(self, from_state, from_outcome):\n if not (from_state is None and from_outcome is None):\n if not isinstance(from_state, string_types):\n raise ValueError(\"Invalid transition origin port: from_state must be a string\")\n if not isinstance(from_outcome, int):\n raise ValueError(\"Invalid transition origin port: from_outcome must be of type int\")\n\n old_from_state = self.from_state\n old_from_outcome = self.from_outcome\n self._from_state = from_state\n self._from_outcome = from_outcome\n\n valid, message = self._check_validity()\n if not valid:\n self._from_state = old_from_state\n self._from_outcome = old_from_outcome\n raise ValueError(\"The transition origin could not be changed: {0}\".format(message))", "response": "Modify the state of the transition origin at the same time."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef modify_target(self, to_state, to_outcome=None):\n if not (to_state is None and (to_outcome is not int and to_outcome is not None)):\n if not isinstance(to_state, string_types):\n raise ValueError(\"Invalid transition target port: to_state must be a string\")\n if not isinstance(to_outcome, int) and to_outcome is not None:\n raise ValueError(\"Invalid transition target port: to_outcome must be of type int or None (if to_state \"\n \"is of type str)\")\n\n old_to_state = self.to_state\n old_to_outcome = self.to_outcome\n self._to_state = to_state\n self._to_outcome = to_outcome\n\n valid, message = self._check_validity()\n if not valid:\n self._to_state = old_to_state\n self._to_outcome = old_to_outcome\n raise ValueError(\"The transition target could not be changed: {0}\".format(message))", "response": "Modify the target state and outcome of a transition."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncalling when the view was registered", "response": "def register_view(self, view):\n \"\"\"Called when the View was registered\"\"\"\n ExtendedController.register_view(self, view) # no super to avoid sm based selection initialization\n view['name_text'].set_property('editable', True)\n view['value_text'].set_property('editable', True)\n view['type_text'].set_property('editable', True)\n\n self.tree_view.connect('key-press-event', self.tree_view_keypress_callback)\n self._apply_value_on_edited_and_focus_out(view['name_text'], self.apply_new_global_variable_name)\n self._apply_value_on_edited_and_focus_out(view['value_text'], self.apply_new_global_variable_value)\n self._apply_value_on_edited_and_focus_out(view['type_text'], self.apply_new_global_variable_type)\n view['new_global_variable_button'].connect('clicked', self.on_add)\n view['delete_global_variable_button'].connect('clicked', self.on_remove)\n view['lock_global_variable_button'].connect('clicked', self.on_lock)\n view['unlock_global_variable_button'].connect('clicked', self.on_unlock)\n self._tree_selection.set_mode(Gtk.SelectionMode.MULTIPLE)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef global_variable_is_editable(self, gv_name, intro_message='edit'):\n if self.model.global_variable_manager.is_locked(gv_name):\n logger.error(\"{1} of global variable '{0}' is not possible, as it is locked\".format(gv_name, intro_message))\n return False\n return True", "response": "Check whether global variable is editable"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ntriggering when the add button is clicked.", "response": "def on_add(self, widget, data=None):\n \"\"\"Create a global variable with default value and select its row\n\n Triggered when the add button in the global variables tab is clicked.\n \"\"\"\n gv_name = \"new_global_%s\" % self.global_variable_counter\n self.global_variable_counter += 1\n try:\n self.model.global_variable_manager.set_variable(gv_name, None)\n except (RuntimeError, AttributeError, TypeError) as e:\n logger.warning(\"Addition of new global variable '{0}' failed: {1}\".format(gv_name, e))\n self.select_entry(gv_name)\n return True"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef on_lock(self, widget, data=None):\n path_list = None\n if self.view is not None:\n model, path_list = self.tree_view.get_selection().get_selected_rows()\n models = [self.list_store[path][self.MODEL_STORAGE_ID] for path in path_list] if path_list else []\n if models:\n if len(models) > 1:\n self._logger.warning(\"Please select only one element to be locked.\")\n try:\n self.model.global_variable_manager.lock_variable(models[0])\n except AttributeError as e:\n self._logger.warning(\"The respective core element of {1}.list_store couldn't be locked. -> {0}\"\n \"\".format(e, self.__class__.__name__))\n return True\n else:\n self._logger.warning(\"Please select an element to be locked.\")", "response": "Locks respective selected core element"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nremove the respective core element of handed global variable name", "response": "def remove_core_element(self, model):\n \"\"\"Remove respective core element of handed global variable name\n\n :param str model: String that is the key/gv_name of core element which should be removed\n :return:\n \"\"\"\n gv_name = model\n if self.global_variable_is_editable(gv_name, \"Deletion\"):\n try:\n self.model.global_variable_manager.delete_variable(gv_name)\n except AttributeError as e:\n logger.warning(\"The respective global variable '{1}' couldn't be removed. -> {0}\"\n \"\".format(e, model))"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\napply the new global variable name according handed string", "response": "def apply_new_global_variable_name(self, path, new_gv_name):\n \"\"\"Change global variable name/key according handed string\n\n Updates the global variable name only if different and already in list store.\n\n :param path: The path identifying the edited global variable tree view row, can be str, int or tuple.\n :param str new_gv_name: New global variable name\n \"\"\"\n gv_name = self.list_store[path][self.NAME_STORAGE_ID]\n if gv_name == new_gv_name or not self.global_variable_is_editable(gv_name, 'Name change'):\n return\n\n data_value = self.model.global_variable_manager.get_representation(gv_name)\n data_type = self.model.global_variable_manager.get_data_type(gv_name)\n\n try:\n self.model.global_variable_manager.delete_variable(gv_name)\n self.model.global_variable_manager.set_variable(new_gv_name, data_value, data_type=data_type)\n gv_name = new_gv_name\n except (AttributeError, RuntimeError, TypeError) as e:\n logger.warning(\"Can not apply new name '{0}'\".format(e))\n self.update_global_variables_list_store()\n self.select_entry(gv_name)\n\n # informing the tab key feature handler function about the changed core element id\n if hasattr(self.tree_view_keypress_callback.__func__, \"core_element_id\"):\n self.tree_view_keypress_callback.__func__.core_element_id = gv_name"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\napply the new value according handed string to the global variable tree view row.", "response": "def apply_new_global_variable_value(self, path, new_value_as_string):\n \"\"\"Change global variable value according handed string\n\n Updates the global variable value only if new value string is different to old representation.\n\n :param path: The path identifying the edited global variable tree view row, can be str, int or tuple.\n :param str new_value_as_string: New global variable value as string\n \"\"\"\n if self.list_store[path][self.DATA_TYPE_AS_STRING_STORAGE_ID] == new_value_as_string:\n return\n gv_name = self.list_store[path][self.NAME_STORAGE_ID]\n if not self.global_variable_is_editable(gv_name, 'Change of value'):\n return\n data_type = self.model.global_variable_manager.get_data_type(gv_name)\n old_value = self.model.global_variable_manager.get_representation(gv_name)\n\n # preserve type especially if type=NoneType\n if issubclass(data_type, (type(old_value), type(None))):\n old_type = data_type\n if issubclass(data_type, type(None)):\n old_type = type(old_value)\n logger.debug(\"Trying to parse '{}' to type '{}' of old global variable value '{}'\".format(\n new_value_as_string, old_type.__name__, old_value))\n try:\n new_value = type_helpers.convert_string_value_to_type_value(new_value_as_string, old_type)\n except (AttributeError, ValueError) as e:\n if issubclass(data_type, type(None)):\n new_value = new_value_as_string\n logger.warning(\"New value '{}' stored as string, previous value '{}' of global variable '{}' was \"\n \"of type '{}'\".format(new_value, old_value, gv_name, type(old_value).__name__))\n else:\n logger.warning(\"Restoring old value of global variable '{}': {}\".format(gv_name, e))\n return\n else:\n logger.error(\"Global variable '{}' with inconsistent value data type '{}' and data type '{}'\".format(\n gv_name, [type(old_value).__name__, type(None).__name__], data_type.__name__))\n return\n\n try:\n self.model.global_variable_manager.set_variable(gv_name, new_value, data_type=data_type)\n except (RuntimeError, AttributeError, TypeError) as e:\n logger.error(\"Error while setting global variable '{0}' to value '{1}' -> Exception: {2}\".format(\n gv_name, new_value, e))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef apply_new_global_variable_type(self, path, new_data_type_as_string):\n if self.list_store[path][self.DATA_TYPE_AS_STRING_STORAGE_ID] == new_data_type_as_string:\n return\n gv_name = self.list_store[path][self.NAME_STORAGE_ID]\n if not self.global_variable_is_editable(gv_name, 'Type change'):\n return\n old_value = self.model.global_variable_manager.get_representation(gv_name)\n\n # check if valid data type string\n try:\n new_data_type = type_helpers.convert_string_to_type(new_data_type_as_string)\n except (AttributeError, ValueError) as e:\n logger.error(\"Could not change data type to '{0}': {1}\".format(new_data_type_as_string, e))\n return\n assert isinstance(new_data_type, type)\n\n # convert old value\n if issubclass(new_data_type, type(None)):\n new_value = old_value\n else: # new_data_type in [str, float, int, list, dict, tuple, bool]:\n try:\n new_value = new_data_type(old_value)\n except (ValueError, TypeError) as e:\n new_value = new_data_type()\n logger.warning(\"Old value '{}' of global variable '{}' could not be parsed to new type '{}' and is \"\n \"therefore resetted: {}\".format(old_value, gv_name, new_data_type.__name__, e))\n\n # set value in global variable manager\n try:\n self.model.global_variable_manager.set_variable(gv_name, new_value, data_type=new_data_type)\n except (ValueError, RuntimeError, TypeError) as e:\n logger.error(\"Could not set new value unexpected failure '{0}' to value '{1}' -> Exception: {2}\"\n \"\".format(gv_name, new_value, e))", "response": "Applies the new data type according handed string to the global variable tree view row."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nhandle GTKMVC3 notification from global variable manager", "response": "def assign_notification_from_gvm(self, model, prop_name, info):\n \"\"\"Handles gtkmvc3 notification from global variable manager\n\n Calls update of whole list store in case new variable was added. Avoids to run updates without reasonable change.\n Holds tree store and updates row elements if is-locked or global variable value changes.\n \"\"\"\n\n if info['method_name'] in ['set_locked_variable'] or info['result'] is Exception:\n return\n\n if info['method_name'] in ['lock_variable', 'unlock_variable']:\n key = info.kwargs.get('key', info.args[1]) if len(info.args) > 1 else info.kwargs['key']\n if key in self.list_store_iterators:\n gv_row_path = self.list_store.get_path(self.list_store_iterators[key])\n self.list_store[gv_row_path][self.IS_LOCKED_AS_STRING_STORAGE_ID] = \\\n str(self.model.global_variable_manager.is_locked(key))\n elif info['method_name'] in ['set_variable', 'delete_variable']:\n if info['method_name'] == 'set_variable':\n key = info.kwargs.get('key', info.args[1]) if len(info.args) > 1 else info.kwargs['key']\n if key in self.list_store_iterators:\n gv_row_path = self.list_store.get_path(self.list_store_iterators[key])\n self.list_store[gv_row_path][self.VALUE_AS_STRING_STORAGE_ID] = \\\n str(self.model.global_variable_manager.get_representation(key))\n self.list_store[gv_row_path][self.DATA_TYPE_AS_STRING_STORAGE_ID] = \\\n self.model.global_variable_manager.get_data_type(key).__name__\n return\n self.update_global_variables_list_store()\n else:\n logger.warning('Notification that is not handled')"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nupdate the global variable list store with the current values of all global variables.", "response": "def update_global_variables_list_store(self):\n \"\"\"Updates the global variable list store\n\n Triggered after creation or deletion of a variable has taken place.\n \"\"\"\n # logger.info(\"update\")\n self.list_store_iterators = {}\n self.list_store.clear()\n keys = self.model.global_variable_manager.get_all_keys()\n keys.sort()\n for key in keys:\n iter = self.list_store.append([key,\n self.model.global_variable_manager.get_data_type(key).__name__,\n str(self.model.global_variable_manager.get_representation(key)),\n str(self.model.global_variable_manager.is_locked(key)),\n ])\n self.list_store_iterators[key] = iter"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef convert_libraries_in_path(config_path, lib_path, target_path=None):\n for lib in os.listdir(lib_path):\n if os.path.isdir(os.path.join(lib_path, lib)) and not '.' == lib[0]:\n if os.path.exists(os.path.join(os.path.join(lib_path, lib), \"statemachine.yaml\")) or \\\n os.path.exists(os.path.join(os.path.join(lib_path, lib), \"statemachine.json\")):\n if not target_path:\n convert(config_path, os.path.join(lib_path, lib))\n else:\n convert(config_path, os.path.join(lib_path, lib), os.path.join(target_path, lib))\n else:\n if not target_path:\n convert_libraries_in_path(config_path, os.path.join(lib_path, lib))\n else:\n convert_libraries_in_path(config_path, os.path.join(lib_path, lib), os.path.join(target_path, lib))\n else:\n if os.path.isdir(os.path.join(lib_path, lib)) and '.' == lib[0]:\n logger.debug(\"lib_root_path/lib_path .*-folder are ignored if within lib_path, \"\n \"e.g. -> {0} -> full path is {1}\".format(lib, os.path.join(lib_path, lib)))", "response": "This function resaves all libraries found at the spcified path"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_view_for_id(self, view_class, element_id, parent_item=None):\n from rafcon.gui.mygaphas.items.state import StateView\n from rafcon.gui.mygaphas.items.connection import DataFlowView, TransitionView\n if parent_item is None:\n items = self.get_all_items()\n else:\n items = self.get_children(parent_item)\n for item in items:\n if view_class is StateView and isinstance(item, StateView) and item.model.state.state_id == element_id:\n return item\n if view_class is TransitionView and isinstance(item, TransitionView) and \\\n item.model.transition.transition_id == element_id:\n return item\n if view_class is DataFlowView and isinstance(item, DataFlowView) and \\\n item.model.data_flow.data_flow_id == element_id:\n return item\n return None", "response": "Searches and returns the View for the given id and type"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nupdates canvas and handle all events in the gtk queue", "response": "def wait_for_update(self, trigger_update=False):\n \"\"\"Update canvas and handle all events in the gtk queue\n\n :param bool trigger_update: Whether to call update_now() or not\n \"\"\"\n if trigger_update:\n self.update_now()\n\n from gi.repository import Gtk\n from gi.repository import GLib\n from threading import Event\n event = Event()\n\n # Handle all events from gaphas, but not from gtkmvc3\n # Make use of the priority, which is higher for gaphas then for gtkmvc3\n def priority_handled(event):\n event.set()\n priority = (GLib.PRIORITY_HIGH_IDLE + GLib.PRIORITY_DEFAULT_IDLE) / 2\n # idle_add is necessary here, as we do not want to block the user from interacting with the GUI\n # while gaphas is redrawing\n GLib.idle_add(priority_handled, event, priority=priority)\n while not event.is_set():\n Gtk.main_iteration()"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn two delegating variables. Each variable should contain a value attribute with the real value. Each variable should contain a value attribute with the real value. Each variable should contain a value attribute with the real value.", "response": "def _get_value(self):\n \"\"\"\n Return two delegating variables. Each variable should contain\n a value attribute with the real value.\n \"\"\"\n x, y = self._point.x, self._point.y\n self._px, self._py = self._item_point.canvas.get_matrix_i2i(self._item_point,\n self._item_target).transform_point(x, y)\n return self._px, self._py"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nconvert a string into a type or class.", "response": "def convert_string_to_type(string_value):\n \"\"\"Converts a string into a type or class\n\n :param string_value: the string to be converted, e.g. \"int\"\n :return: The type derived from string_value, e.g. int\n \"\"\"\n # If the parameter is already a type, return it\n if string_value in ['None', type(None).__name__]:\n return type(None)\n if isinstance(string_value, type) or isclass(string_value):\n return string_value\n\n # Get object associated with string\n # First check whether we are having a built in type (int, str, etc)\n if sys.version_info >= (3,):\n import builtins as builtins23\n else:\n import __builtin__ as builtins23\n if hasattr(builtins23, string_value):\n obj = getattr(builtins23, string_value)\n if type(obj) is type:\n return obj\n # If not, try to locate the module\n try:\n obj = locate(string_value)\n except ErrorDuringImport as e:\n raise ValueError(\"Unknown type '{0}'\".format(e))\n\n # Check whether object is a type\n if type(obj) is type:\n return locate(string_value)\n\n # Check whether object is a class\n if isclass(obj):\n return obj\n\n # Raise error if none is the case\n raise ValueError(\"Unknown type '{0}'\".format(string_value))"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef type_inherits_of_type(inheriting_type, base_type):\n assert isinstance(inheriting_type, type) or isclass(inheriting_type)\n assert isinstance(base_type, type) or isclass(base_type)\n\n if inheriting_type == base_type:\n return True\n else:\n if len(inheriting_type.__bases__) != 1:\n return False\n return type_inherits_of_type(inheriting_type.__bases__[0], base_type)", "response": "Checks whether inheriting_type inherits from base_type."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef clear_results_db(session):\n '''Used to clear the result tables in the OEDB. Caution!\n This deletes EVERY RESULT SET!'''\n\n from egoio.db_tables.model_draft import EgoGridPfHvResultBus as BusResult,\\\n EgoGridPfHvResultBusT as BusTResult,\\\n EgoGridPfHvResultStorage as StorageResult,\\\n EgoGridPfHvResultStorageT as StorageTResult,\\\n EgoGridPfHvResultGenerator as GeneratorResult,\\\n EgoGridPfHvResultGeneratorT as GeneratorTResult,\\\n EgoGridPfHvResultLine as LineResult,\\\n EgoGridPfHvResultLineT as LineTResult,\\\n EgoGridPfHvResultLoad as LoadResult,\\\n EgoGridPfHvResultLoadT as LoadTResult,\\\n EgoGridPfHvResultTransformer as TransformerResult,\\\n EgoGridPfHvResultTransformerT as TransformerTResult,\\\n EgoGridPfHvResultMeta as ResultMeta\n print('Are you sure that you want to clear all results in the OEDB?')\n choice = ''\n while choice not in ['y', 'n']:\n choice = input('(y/n): ')\n if choice == 'y':\n print('Are you sure?')\n choice2 = ''\n while choice2 not in ['y', 'n']:\n choice2 = input('(y/n): ')\n if choice2 == 'y':\n print('Deleting all results...')\n session.query(BusResult).delete()\n session.query(BusTResult).delete()\n session.query(StorageResult).delete()\n session.query(StorageTResult).delete()\n session.query(GeneratorResult).delete()\n session.query(GeneratorTResult).delete()\n session.query(LoadResult).delete()\n session.query(LoadTResult).delete()\n session.query(LineResult).delete()\n session.query(LineTResult).delete()\n session.query(TransformerResult).delete()\n session.query(TransformerTResult).delete()\n session.query(ResultMeta).delete()\n session.commit()\n else:\n print('Deleting aborted!')\n else:\n print('Deleting aborted!')", "response": "This function deletes all the tables in the database."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef results_to_oedb(session, network, args, grid='hv', safe_results=False):\n # Update generator_ids when k_means clustering to get integer ids\n if args['network_clustering_kmeans'] != False:\n new_index=pd.DataFrame(index = network.generators.index)\n new_index['new']=range(len(network.generators))\n\n for col in (network.generators_t):\n if not network.generators_t[col].empty:\n network.generators_t[col].columns =\\\n new_index.new[network.generators_t[col].columns]\n\n network.generators.index = range(len(network.generators))\n \n # moved this here to prevent error when not using the mv-schema\n import datetime\n if grid.lower() == 'mv':\n print('MV currently not implemented')\n elif grid.lower() == 'hv':\n from egoio.db_tables.model_draft import\\\n EgoGridPfHvResultBus as BusResult,\\\n EgoGridPfHvResultBusT as BusTResult,\\\n EgoGridPfHvResultStorage as StorageResult,\\\n EgoGridPfHvResultStorageT as StorageTResult,\\\n EgoGridPfHvResultGenerator as GeneratorResult,\\\n EgoGridPfHvResultGeneratorT as GeneratorTResult,\\\n EgoGridPfHvResultLine as LineResult,\\\n EgoGridPfHvResultLineT as LineTResult,\\\n EgoGridPfHvResultLoad as LoadResult,\\\n EgoGridPfHvResultLoadT as LoadTResult,\\\n EgoGridPfHvResultTransformer as TransformerResult,\\\n EgoGridPfHvResultTransformerT as TransformerTResult,\\\n EgoGridPfHvResultMeta as ResultMeta,\\\n EgoGridPfHvSource as Source\n else:\n print('Please enter mv or hv!')\n\n print('Uploading results to db...')\n # get last result id and get new one\n last_res_id = session.query(func.max(ResultMeta.result_id)).scalar()\n if last_res_id == None:\n new_res_id = 1\n else:\n new_res_id = last_res_id + 1\n\n # result meta data\n res_meta = ResultMeta()\n meta_misc = []\n for arg, value in args.items():\n if arg not in dir(res_meta) and arg not in ['db', 'lpfile',\n 'results', 'export']:\n meta_misc.append([arg, str(value)])\n\n res_meta.result_id = new_res_id\n res_meta.scn_name = args['scn_name']\n res_meta.calc_date = datetime.datetime.now()\n res_meta.user_name = args['user_name']\n res_meta.method = args['method']\n res_meta.start_snapshot = args['start_snapshot']\n res_meta.end_snapshot = args['end_snapshot']\n res_meta.safe_results = safe_results\n res_meta.snapshots = network.snapshots.tolist()\n res_meta.solver = args['solver']\n res_meta.settings = meta_misc\n\n session.add(res_meta)\n session.commit()\n\n # get source_id\n sources = pd.read_sql(session.query(Source).statement, session.bind)\n for gen in network.generators.index:\n if network.generators.carrier[gen] not in sources.name.values:\n new_source = Source()\n new_source.source_id = session.query(\n func.max(Source.source_id)).scalar()+1\n new_source.name = network.generators.carrier[gen]\n session.add(new_source)\n session.commit()\n sources = pd.read_sql(\n session.query(Source).statement, session.bind)\n try:\n old_source_id = int(\n sources.source_id[\n sources.name == network.generators.carrier[gen]])\n network.generators.set_value(gen, 'source', int(old_source_id))\n except:\n print(\n 'Source ' + network.generators.carrier[gen] + \n ' is not in the source table!')\n for stor in network.storage_units.index:\n if network.storage_units.carrier[stor] not in sources.name.values:\n new_source = Source()\n new_source.source_id = session.query(\n func.max(Source.source_id)).scalar()+1\n new_source.name = network.storage_units.carrier[stor]\n session.add(new_source)\n session.commit()\n sources = pd.read_sql(\n session.query(Source).statement, session.bind)\n try:\n old_source_id = int(\n sources.source_id[\n sources.name == network.storage_units.carrier[stor]])\n network.storage_units.set_value(stor, 'source', int(old_source_id))\n except:\n print(\n 'Source ' + network.storage_units.carrier[stor] +\n ' is not in the source table!')\n\n whereismyindex = {BusResult: network.buses.index,\n LoadResult: network.loads.index,\n LineResult: network.lines.index,\n TransformerResult: network.transformers.index,\n StorageResult: network.storage_units.index,\n GeneratorResult: network.generators.index,\n BusTResult: network.buses.index,\n LoadTResult: network.loads.index,\n LineTResult: network.lines.index,\n TransformerTResult: network.transformers.index,\n StorageTResult: network.storage_units.index,\n GeneratorTResult: network.generators.index}\n\n whereismydata = {BusResult: network.buses,\n LoadResult: network.loads,\n LineResult: network.lines,\n TransformerResult: network.transformers,\n StorageResult: network.storage_units,\n GeneratorResult: network.generators,\n BusTResult: network.buses_t,\n LoadTResult: network.loads_t,\n LineTResult: network.lines_t,\n TransformerTResult: network.transformers_t,\n StorageTResult: network.storage_units_t,\n GeneratorTResult: network.generators_t}\n\n new_to_old_name = {'p_min_pu_fixed': 'p_min_pu',\n 'p_max_pu_fixed': 'p_max_pu',\n 'dispatch': 'former_dispatch',\n 'current_type': 'carrier',\n 'soc_cyclic': 'cyclic_state_of_charge',\n 'soc_initial': 'state_of_charge_initial'}\n\n ormclasses = [BusResult, LoadResult, LineResult, TransformerResult, \n GeneratorResult, StorageResult, BusTResult, LoadTResult, \n LineTResult, TransformerTResult, GeneratorTResult, \n StorageTResult]\n\n for ormclass in ormclasses:\n for index in whereismyindex[ormclass]:\n myinstance = ormclass()\n columns = ormclass.__table__.columns.keys()\n columns.remove('result_id')\n myinstance.result_id = new_res_id\n for col in columns:\n if '_id' in col:\n class_id_name = col\n else:\n continue\n setattr(myinstance, class_id_name, index)\n columns.remove(class_id_name)\n\n if str(ormclass)[:-2].endswith('T'):\n for col in columns:\n if col == 'soc_set':\n try:\n setattr(myinstance, col, getattr(\n whereismydata[ormclass], \n 'state_of_charge_set')[index].tolist())\n except:\n pass\n else:\n try:\n setattr(myinstance, col, getattr(\n whereismydata[ormclass], col)[index].tolist())\n except:\n pass\n session.add(myinstance)\n\n else:\n for col in columns:\n if col in new_to_old_name:\n if col == 'soc_cyclic':\n try:\n setattr(myinstance, col, bool(\n whereismydata[ormclass].loc[index,\n new_to_old_name[col]]))\n except:\n pass\n elif 'Storage' in str(ormclass) and col == 'dispatch':\n try:\n setattr(myinstance, col,\n whereismydata[ormclass].loc[index, col])\n except:\n pass\n else:\n try:\n setattr(\n myinstance, col, whereismydata[ormclass].\\\n loc[index, new_to_old_name[col]])\n except:\n pass\n elif col in ['s_nom_extendable', 'p_nom_extendable']:\n try:\n setattr(myinstance, col, bool(\n whereismydata[ormclass].loc[index, col]))\n except:\n pass\n else:\n try:\n setattr(myinstance, col,\n whereismydata[ormclass].loc[index, col])\n except:\n pass\n session.add(myinstance)\n\n session.commit()\n print('Upload finished!')\n\n return", "response": "Return results obtained from PyPSA to Oedb"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef run_sql_script(conn, scriptname='results_md2grid.sql'):\n\n script_dir = os.path.abspath(\n os.path.join(os.path.dirname(__file__), 'sql_scripts'))\n script_str = open(os.path.join(script_dir, scriptname)).read()\n conn.execution_options(autocommit=True).execute(script_str)\n\n return", "response": "This function runs the sql script in the folder sql_scripts"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef extension (network, session, version, scn_extension, start_snapshot, \n end_snapshot, **kwargs):\n \"\"\"\n Function that adds an additional network to the existing network container. \n The new network can include every PyPSA-component (e.g. buses, lines, links). \n To connect it to the existing network, transformers are needed. \n \n All components and its timeseries of the additional scenario need to be inserted in the fitting 'model_draft.ego_grid_pf_hv_extension_' table. \n The scn_name in the tables have to be labled with 'extension_' + scn_name (e.g. 'extension_nep2035').\n\n Until now, the tables include three additional scenarios:\n 'nep2035_confirmed': all new lines and needed transformers planed in the 'Netzentwicklungsplan 2035' (NEP2035) that have been confirmed by the Bundesnetzagentur (BNetzA)\n\n 'nep2035_b2': all new lines and needed transformers planned in the NEP 2035 in the scenario 2035 B2\n\n 'BE_NO_NEP 2035': DC-lines and transformers to connect the upcomming electrical-neighbours Belgium and Norway \n Generation, loads and its timeseries in Belgium and Norway for scenario 'NEP 2035'\n\n\n Parameters\n -----\n network : The existing network container (e.g. scenario 'NEP 2035')\n session : session-data\n overlay_scn_name : Name of the additional scenario (WITHOUT 'extension_')\n start_snapshot, end_snapshot: Simulation time\n\n Returns\n ------\n network : Network container including existing and additional network\n\n \"\"\"\n \n if version is None:\n ormcls_prefix = 'EgoGridPfHvExtension'\n else:\n ormcls_prefix = 'EgoPfHvExtension'\n \n # Adding overlay-network to existing network\n scenario = NetworkScenario(session,\n version = version,\n prefix=ormcls_prefix,\n method=kwargs.get('method', 'lopf'),\n start_snapshot=start_snapshot,\n end_snapshot=end_snapshot,\n scn_name='extension_' + scn_extension)\n\n network = scenario.build_network(network)\n\n # Allow lossless links to conduct bidirectional\n network.links.loc[network.links.efficiency == 1.0, 'p_min_pu'] = -1\n\n # Set coordinates for new buses\n extension_buses = network.buses[network.buses.scn_name ==\n 'extension_' + scn_extension]\n for idx, row in extension_buses.iterrows():\n wkt_geom = to_shape(row['geom'])\n network.buses.loc[idx, 'x'] = wkt_geom.x\n network.buses.loc[idx, 'y'] = wkt_geom.y\n \n return network", "response": "Function that adds an additional network to the existing network container."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nfunction that removes components in a decommissioning scenario from the network container.", "response": "def decommissioning(network, session, args, **kwargs):\n \"\"\"\n Function that removes components in a decommissioning-scenario from\n the existing network container.\n Currently, only lines can be decommissioned.\n \n All components of the decommissioning scenario need to be inserted in\n the fitting 'model_draft.ego_grid_pf_hv_extension_' table. \n The scn_name in the tables have to be labled with 'decommissioning_' \n + scn_name (e.g. 'decommissioning_nep2035'). \n \n \n Parameters\n -----\n network : The existing network container (e.g. scenario 'NEP 2035')\n session : session-data\n overlay_scn_name : Name of the decommissioning scenario\n\n\n Returns\n ------\n network : Network container including decommissioning\n \n \"\"\" \n\n if args['gridversion'] == None: \n ormclass = getattr(import_module('egoio.db_tables.model_draft'),\n 'EgoGridPfHvExtensionLine')\n else:\n ormclass = getattr(import_module('egoio.db_tables.grid'),\n 'EgoPfHvExtensionLine')\n\n query = session.query(ormclass).filter(\n ormclass.scn_name == 'decommissioning_' + \n args['scn_decommissioning'])\n\n df_decommisionning = pd.read_sql(query.statement,\n session.bind,\n index_col='line_id')\n df_decommisionning.index = df_decommisionning.index.astype(str)\n\n for idx, row in network.lines.iterrows():\n if (row['s_nom_min'] !=0) & (\n row['scn_name'] =='extension_' + args['scn_decommissioning']):\n v_nom_dec = df_decommisionning['v_nom'][(\n df_decommisionning.project == row['project']) & (\n df_decommisionning.project_id == row['project_id'])]\n\n if (v_nom_dec == 110).any():\n network.lines.s_nom_min[network.lines.index == idx]\\\n = args['branch_capacity_factor']['HV'] *\\\n network.lines.s_nom_min\n\n else:\n network.lines.s_nom_min[network.lines.index == idx] =\\\n args['branch_capacity_factor']['eHV'] *\\\n network.lines.s_nom_min\n\n ### Drop decommissioning-lines from existing network\n network.lines = network.lines[~network.lines.index.isin(\n df_decommisionning.index)]\n\n return network"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef distance(x0, x1, y0, y1):\n # Calculate square of the distance between two points (Pythagoras)\n distance = (x1.values- x0.values)*(x1.values- x0.values)\\\n + (y1.values- y0.values)*(y1.values- y0.values)\n return distance", "response": "Function that calculates the square of the distance between two points."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef calc_nearest_point(bus1, network):\n\n bus1_index = network.buses.index[network.buses.index == bus1]\n\n forbidden_buses = np.append(\n bus1_index.values, network.lines.bus1[\n network.lines.bus0 == bus1].values)\n\n forbidden_buses = np.append(\n forbidden_buses, network.lines.bus0[network.lines.bus1 == bus1].values)\n\n forbidden_buses = np.append(\n forbidden_buses, network.links.bus0[network.links.bus1 == bus1].values)\n\n forbidden_buses = np.append(\n forbidden_buses, network.links.bus1[network.links.bus0 == bus1].values)\n\n x0 = network.buses.x[network.buses.index.isin(bus1_index)]\n\n y0 = network.buses.y[network.buses.index.isin(bus1_index)]\n\n comparable_buses = network.buses[~network.buses.index.isin(\n forbidden_buses)]\n\n x1 = comparable_buses.x\n\n y1 = comparable_buses.y\n\n distance = (x1.values - x0.values)*(x1.values - x0.values) + \\\n (y1.values - y0.values)*(y1.values - y0.values)\n\n min_distance = distance.min()\n\n bus0 = comparable_buses[(((x1.values - x0.values)*(x1.values - x0.values\n ) + (y1.values - y0.values)*(y1.values - y0.values)) == min_distance)]\n bus0 = bus0.index[bus0.index == bus0.index.max()]\n bus0 = ''.join(bus0.values)\n\n return bus0", "response": "Function that finds the geographical nearest point in a network from a given bus."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef map_ormclass(self, name):\n\n try:\n self._mapped[name] = getattr(self._pkg, self._prefix + name)\n\n except AttributeError:\n print('Warning: Relation %s does not exist.' % name)", "response": "Populate the _mapped attribute with orm class name."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nconstruct a DatetimeIndex with the queried temporal resolution start - and end - snapshot.", "response": "def configure_timeindex(self):\n \"\"\" Construct a DateTimeIndex with the queried temporal resolution,\n start- and end_snapshot. \"\"\"\n\n try:\n\n ormclass = self._mapped['TempResolution']\n if self.version:\n tr = self.session.query(ormclass).filter(\n ormclass.temp_id == self.temp_id).filter(\n ormclass.version == self.version).one()\n else:\n tr = self.session.query(ormclass).filter(\n ormclass.temp_id == self.temp_id).one()\n\n except (KeyError, NoResultFound):\n print('temp_id %s does not exist.' % self.temp_id)\n\n timeindex = pd.DatetimeIndex(start=tr.start_time,\n periods=tr.timesteps,\n freq=tr.resolution)\n\n self.timeindex = timeindex[self.start_snapshot - 1: self.end_snapshot]\n \"\"\" pandas.tseries.index.DateTimeIndex :\n Index of snapshots or timesteps. \"\"\""} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nfetches DataFrame with component data from filtered table data.", "response": "def fetch_by_relname(self, name):\n \"\"\" Construct DataFrame with component data from filtered table data.\n\n Parameters\n ----------\n name : str\n Component name.\n\n Returns\n -------\n pd.DataFrame\n Component data.\n \"\"\"\n\n ormclass = self._mapped[name]\n query = self.session.query(ormclass)\n\n if name != carr_ormclass:\n\n query = query.filter(\n ormclass.scn_name == self.scn_name)\n\n if self.version:\n query = query.filter(ormclass.version == self.version)\n\n # TODO: Naming is not consistent. Change in database required.\n if name == 'Transformer':\n name = 'Trafo'\n\n df = pd.read_sql(query.statement,\n self.session.bind,\n index_col=name.lower() + '_id')\n if name == 'Link':\n df['bus0'] = df.bus0.astype(int)\n df['bus1'] = df.bus1.astype(int)\n\n if 'source' in df:\n df.source = df.source.map(self.id_to_source())\n\n return df"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nfetch series data from filtered by component name and column.", "response": "def series_fetch_by_relname(self, name, column):\n \"\"\" Construct DataFrame with component timeseries data from filtered\n table data.\n\n Parameters\n ----------\n name : str\n Component name.\n column : str\n Component field with timevarying data.\n\n Returns\n -------\n pd.DataFrame\n Component data.\n \"\"\"\n\n ormclass = self._mapped[name]\n\n # TODO: This is implemented in a not very robust way.\n id_column = re.findall(r'[A-Z][^A-Z]*', name)[0] + '_' + 'id'\n id_column = id_column.lower()\n\n query = self.session.query(\n getattr(ormclass, id_column),\n getattr(ormclass, column)[self.start_snapshot: self.end_snapshot].\n label(column)).filter(and_(\n ormclass.scn_name == self.scn_name,\n ormclass.temp_id == self.temp_id))\n\n if self.version:\n query = query.filter(ormclass.version == self.version)\n\n df = pd.io.sql.read_sql(query.statement,\n self.session.bind,\n columns=[column],\n index_col=id_column)\n\n df.index = df.index.astype(str)\n\n # change of format to fit pypsa\n df = df[column].apply(pd.Series).transpose()\n\n try:\n assert not df.empty\n df.index = self.timeindex\n except AssertionError:\n print(\"No data for %s in column %s.\" % (name, column))\n\n return df"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef build_network(self, network=None, *args, **kwargs):\n # TODO: build_network takes care of divergences in database design and\n # future PyPSA changes from PyPSA's v0.6 on. This concept should be\n # replaced, when the oedb has a revision system in place, because\n # sometime this will break!!!\n\n if network != None:\n network = network\n\n else:\n network = pypsa.Network()\n network.set_snapshots(self.timeindex)\n\n timevarying_override = False\n\n if pypsa.__version__ == '0.11.0':\n old_to_new_name = {'Generator':\n {'p_min_pu_fixed': 'p_min_pu',\n 'p_max_pu_fixed': 'p_max_pu',\n 'source': 'carrier',\n 'dispatch': 'former_dispatch'},\n 'Bus':\n {'current_type': 'carrier'},\n 'Transformer':\n {'trafo_id': 'transformer_id'},\n 'Storage':\n {'p_min_pu_fixed': 'p_min_pu',\n 'p_max_pu_fixed': 'p_max_pu',\n 'soc_cyclic': 'cyclic_state_of_charge',\n 'soc_initial': 'state_of_charge_initial',\n 'source': 'carrier'}}\n\n timevarying_override = True\n\n else:\n\n old_to_new_name = {'Storage':\n {'soc_cyclic': 'cyclic_state_of_charge',\n 'soc_initial': 'state_of_charge_initial'}}\n\n for comp, comp_t_dict in self.config.items():\n\n # TODO: This is confusing, should be fixed in db\n pypsa_comp_name = 'StorageUnit' if comp == 'Storage' else comp\n\n df = self.fetch_by_relname(comp)\n\n if comp in old_to_new_name:\n\n tmp = old_to_new_name[comp]\n df.rename(columns=tmp, inplace=True)\n\n network.import_components_from_dataframe(df, pypsa_comp_name)\n\n if comp_t_dict:\n\n for comp_t, columns in comp_t_dict.items():\n\n for col in columns:\n\n df_series = self.series_fetch_by_relname(comp_t, col)\n\n # TODO: VMagPuSet is not implemented.\n if timevarying_override and comp == 'Generator' \\\n and not df_series.empty:\n idx = df[df.former_dispatch == 'flexible'].index\n idx = [i for i in idx if i in df_series.columns]\n df_series.drop(idx, axis=1, inplace=True)\n\n try:\n\n pypsa.io.import_series_from_dataframe(\n network,\n df_series,\n pypsa_comp_name,\n col)\n\n except (ValueError, AttributeError):\n print(\"Series %s of component %s could not be \"\n \"imported\" % (col, pypsa_comp_name))\n\n # populate carrier attribute in PyPSA network\n network.import_components_from_dataframe(\n self.fetch_by_relname(carr_ormclass), 'Carrier')\n\n self.network = network\n\n return network", "response": "Build a PyPSA Network object."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef run(self):\n logger.debug(\"Starting execution of {0}{1}\".format(self, \" (backwards)\" if self.backward_execution else \"\"))\n self.setup_run()\n\n try:\n concurrency_history_item = self.setup_forward_or_backward_execution()\n concurrency_queue = self.start_child_states(concurrency_history_item)\n\n #######################################################\n # wait for the first threads to finish\n #######################################################\n finished_thread_id = concurrency_queue.get()\n finisher_state = self.states[finished_thread_id]\n finisher_state.join()\n\n # preempt all child states\n if not self.backward_execution:\n for state_id, state in self.states.items():\n state.recursively_preempt_states()\n # join all states\n for history_index, state in enumerate(self.states.values()):\n self.join_state(state, history_index, concurrency_history_item)\n self.add_state_execution_output_to_scoped_data(state.output_data, state)\n self.update_scoped_variables_with_output_dictionary(state.output_data, state)\n\n # add the data of the first state now to overwrite data of the preempted states\n self.add_state_execution_output_to_scoped_data(finisher_state.output_data, finisher_state)\n self.update_scoped_variables_with_output_dictionary(finisher_state.output_data, finisher_state)\n #######################################################\n # handle backward execution case\n #######################################################\n if self.states[finished_thread_id].backward_execution:\n return self.finalize_backward_execution()\n\n else:\n self.backward_execution = False\n\n #######################################################\n # handle no transition\n #######################################################\n transition = self.get_transition_for_outcome(self.states[finished_thread_id],\n self.states[finished_thread_id].final_outcome)\n if transition is None:\n # final outcome is set here\n transition = self.handle_no_transition(self.states[finished_thread_id])\n # it the transition is still None, then the state was preempted or aborted, in this case return\n if transition is None:\n self.output_data[\"error\"] = RuntimeError(\"state aborted\")\n else:\n if 'error' in self.states[finished_thread_id].output_data:\n self.output_data[\"error\"] = self.states[finished_thread_id].output_data['error']\n self.final_outcome = self.outcomes[transition.to_outcome]\n\n return self.finalize_concurrency_state(self.final_outcome)\n\n except Exception as e:\n logger.error(\"{0} had an internal error: {1}\\n{2}\".format(self, str(e), str(traceback.format_exc())))\n self.output_data[\"error\"] = e\n self.state_execution_status = StateExecutionStatus.WAIT_FOR_NEXT_STATE\n return self.finalize(Outcome(-1, \"aborted\"))", "response": "This method starts the concurrency state execution process"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _check_transition_validity(self, check_transition):\n valid, message = super(PreemptiveConcurrencyState, self)._check_transition_validity(check_transition)\n if not valid:\n return False, message\n\n # Only transitions to the parent state are allowed\n\n if check_transition.to_state != self.state_id:\n return False, \"Only transitions to the parent state are allowed\"\n\n return True, message", "response": "Check validity of a transition of BarrierConcurrencyState."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nprepares the model for destruction Unregisters itself from the state machine and the root state machine and the modification history.", "response": "def prepare_destruction(self):\n \"\"\"Prepares the model for destruction\n\n Un-registers itself as observer from the state machine and the root state\n \"\"\"\n try:\n self.relieve_model(self.state_machine_model)\n assert self.__buffered_root_state_model is self.state_machine_model.root_state\n self.relieve_model(self.__buffered_root_state_model)\n self.state_machine_model = None\n self.__buffered_root_state_model = None\n self.modifications.prepare_destruction()\n except KeyError: # Might happen if the observer was already unregistered\n pass\n if self.active_action:\n try:\n self.active_action.prepare_destruction()\n except Exception as e:\n logger.exception(\"The modification history has had left over an active-action and \"\n \"could not destroy it {0}.\".format(e))\n self.active_action = None"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nrecover a specific version of the all_time_history element by doing several undos and redos.", "response": "def recover_specific_version(self, pointer_on_version_to_recover):\n \"\"\" Recovers a specific version of the all_time_history element by doing several undos and redos.\n\n :param pointer_on_version_to_recover: the id of the list element which is to recover\n :return:\n \"\"\"\n # search for traceable path -> list of action to undo and list of action to redo\n logger.info(\"Going to history status #{0}\".format(pointer_on_version_to_recover))\n undo_redo_list = self.modifications.get_undo_redo_list_from_active_trail_history_item_to_version_id(pointer_on_version_to_recover)\n logger.debug(\"Multiple undo and redo to reach modification history element of version {0} \"\n \"-> undo-redo-list is: {1}\".format(pointer_on_version_to_recover, undo_redo_list))\n # logger.debug(\"acquire lock 1 - for multiple action {0}\".format(self.modifications.trail_pointer))\n self.state_machine_model.storage_lock.acquire()\n # logger.debug(\"acquired lock 1 - for multiple action {0}\".format(self.modifications.trail_pointer))\n for elem in undo_redo_list:\n if elem[1] == 'undo':\n # do undo\n self._undo(elem[0])\n else:\n # do redo\n self._redo(elem[0])\n\n self.modifications.reorganize_trail_history_for_version_id(pointer_on_version_to_recover)\n self.change_count += 1\n # logger.debug(\"release lock 1 - for multiple action {0}\".format(self.modifications.trail_pointer))\n self.state_machine_model.storage_lock.release()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nperform fast search from currently active branch to specific version_id and collect all recovery steps.", "response": "def get_undo_redo_list_from_active_trail_history_item_to_version_id(self, version_id):\n \"\"\"Perform fast search from currently active branch to specific version_id and collect all recovery steps.\n \"\"\"\n all_trail_action = [a.version_id for a in self.single_trail_history() if a is not None]\n all_active_action = self.get_all_active_actions()\n undo_redo_list = []\n _undo_redo_list = []\n\n intermediate_version_id = version_id\n if self.with_verbose:\n logger.verbose(\"Version_id : {0} in\".format(intermediate_version_id))\n logger.verbose(\"Active actions: {0} in: {1}\".format(all_active_action,\n intermediate_version_id in all_active_action))\n logger.verbose(\"Trail actions : {0} in: {1}\".format(all_trail_action,\n intermediate_version_id in all_trail_action))\n\n if intermediate_version_id not in all_trail_action:\n # get undo to come from version_id to trail_action\n while intermediate_version_id not in all_trail_action:\n _undo_redo_list.insert(0, (intermediate_version_id, 'redo'))\n intermediate_version_id = self.all_time_history[intermediate_version_id].prev_id\n intermediate_goal_version_id = intermediate_version_id\n else:\n intermediate_goal_version_id = version_id\n intermediate_version_id = self.trail_history[self.trail_pointer].version_id\n if self.with_verbose:\n logger.verbose(\"Version_id : {0} {1}\".format(intermediate_goal_version_id, intermediate_version_id))\n logger.verbose(\"Active actions: {0} in: {1}\".format(all_active_action,\n intermediate_version_id in all_active_action))\n logger.verbose(\"Trail actions : {0} in: {1}\".format(all_trail_action,\n intermediate_version_id in all_trail_action))\n\n # collect undo and redo on trail\n if intermediate_goal_version_id in all_active_action:\n # collect needed undo to reach intermediate version\n while not intermediate_version_id == intermediate_goal_version_id:\n undo_redo_list.append((intermediate_version_id, 'undo'))\n intermediate_version_id = self.all_time_history[intermediate_version_id].prev_id\n\n elif intermediate_goal_version_id in all_trail_action:\n # collect needed redo to reach intermediate version\n while not intermediate_version_id == intermediate_goal_version_id:\n intermediate_version_id = self.all_time_history[intermediate_version_id].next_id\n undo_redo_list.append((intermediate_version_id, 'redo'))\n\n for elem in _undo_redo_list:\n undo_redo_list.append(elem)\n\n return undo_redo_list"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nadjusting the position of a GTK Pane to a value stored in the runtime config file.", "response": "def set_pane_position(self, config_id):\n \"\"\"Adjusts the position of a GTK Pane to a value stored in the runtime config file. If there was no value\n stored, the pane's position is set to a default value.\n\n :param config_id: The pane identifier saved in the runtime config file\n \"\"\"\n default_pos = constants.DEFAULT_PANE_POS[config_id]\n position = global_runtime_config.get_config_value(config_id, default_pos)\n pane_id = constants.PANE_ID[config_id]\n self.view[pane_id].set_position(position)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nhighlighting buttons according actual execution status.", "response": "def model_changed(self, model, prop_name, info):\n \"\"\" Highlight buttons according actual execution status. Furthermore it triggers the label redraw of the active\n state machine.\n \"\"\"\n\n # TODO: find nice solution\n # this in only required if the GUI is terminated via Ctrl+C signal\n if not self.view:\n # this means that the main window is currently under destruction\n return\n\n execution_engine = rafcon.core.singleton.state_machine_execution_engine\n label_string = str(execution_engine.status.execution_mode)\n label_string = label_string.replace(\"STATE_MACHINE_EXECUTION_STATUS.\", \"\")\n self.view['execution_status_label'].set_text(label_string)\n\n current_execution_mode = execution_engine.status.execution_mode\n if current_execution_mode is StateMachineExecutionStatus.STARTED:\n self.view['step_buttons'].hide()\n self._set_single_button_active('button_start_shortcut')\n elif current_execution_mode is StateMachineExecutionStatus.PAUSED:\n self.view['step_buttons'].hide()\n self._set_single_button_active('button_pause_shortcut')\n elif execution_engine.finished_or_stopped():\n self.view['step_buttons'].hide()\n self._set_single_button_active('button_stop_shortcut')\n else: # all step modes\n self.view['step_buttons'].show()\n self._set_single_button_active('button_step_mode_shortcut')"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nput the focus on the given child controller.", "response": "def focus_notebook_page_of_controller(self, controller):\n \"\"\"Puts the focus on the given child controller\n\n The method implements focus request of the notebooks in left side-bar of the main window. Thereby it is the\n master-function of focus pattern of the notebooks in left side-bar.\n\n Actual pattern is:\n * Execution-History is put to focus any time requested (request occur at the moment when the state-machine\n is started and stopped.\n * Modification-History one time focused while and one time after execution if requested.\n\n :param controller The controller which request to be focused.\n \"\"\"\n # TODO think about to may substitute Controller- by View-objects it is may the better design\n if controller not in self.get_child_controllers():\n return\n # logger.info(\"focus controller {0}\".format(controller))\n if not self.modification_history_was_focused and isinstance(controller, ModificationHistoryTreeController) and \\\n self.view is not None:\n self.view.bring_tab_to_the_top('history')\n self.modification_history_was_focused = True\n\n if self.view is not None and isinstance(controller, ExecutionHistoryTreeController):\n self.view.bring_tab_to_the_top('execution_history')\n self.modification_history_was_focused = False"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ntrigger whenever a left - bar notebook tab is changed.", "response": "def on_notebook_tab_switch(self, notebook, page, page_num, title_label, window, notebook_identifier):\n \"\"\"Triggered whenever a left-bar notebook tab is changed.\n\n Updates the title of the corresponding notebook and updates the title of the left-bar window in case un-docked.\n\n :param notebook: The GTK notebook where a tab-change occurred\n :param page_num: The page number of the currently-selected tab\n :param title_label: The label holding the notebook's title\n :param window: The left-bar window, for which the title should be changed\n :param notebook_identifier: A string identifying whether the notebook is the upper or the lower one\n \"\"\"\n title = gui_helper_label.set_notebook_title(notebook, page_num, title_label)\n window.reset_title(title, notebook_identifier)\n self.on_switch_page_check_collapse_button(notebook, page_num)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _on_key_press(self, widget, event):\n self.currently_pressed_keys.add(event.keyval)\n if event.keyval in [Gdk.KEY_Tab, Gdk.KEY_ISO_Left_Tab] and event.state & Gdk.ModifierType.CONTROL_MASK:\n self.toggle_sidebars()", "response": "Updates the currently pressed keys\n "} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nsave the current configuration of windows panes and state machines to the runtime config file before RAFCON is closed.", "response": "def prepare_destruction(self):\n \"\"\"Saves current configuration of windows and panes to the runtime config file, before RAFCON is closed.\"\"\"\n plugins.run_hook(\"pre_destruction\")\n\n logger.debug(\"Saving runtime config to {0}\".format(global_runtime_config.config_file_path))\n\n # store pane last positions\n for key, widget_name in constants.PANE_ID.items():\n global_runtime_config.store_widget_properties(self.view[widget_name], key.replace('_POS', ''))\n\n # store hidden or undocked widget flags correctly -> make them independent for restoring\n for window_key in constants.UNDOCKABLE_WINDOW_KEYS:\n hidden = False\n if not global_runtime_config.get_config_value(window_key + \"_WINDOW_UNDOCKED\"):\n hidden = getattr(self, window_key.lower() + '_hidden')\n global_runtime_config.set_config_value(window_key + '_HIDDEN', hidden)\n\n global_runtime_config.save_configuration()\n \n # state-editor will relieve it's model => it won't observe the state machine manager any more\n self.get_controller('states_editor_ctrl').prepare_destruction() # avoid new state editor TODO tbd (deleted)\n rafcon.core.singleton.state_machine_manager.delete_all_state_machines()\n rafcon.core.singleton.library_manager.prepare_destruction()\n\n # gtkmvc installs a global glade custom handler that holds a reference to the last created View class,\n # preventing it from being destructed. By installing a dummy callback handler, after all views have been\n # created, the old handler is being removed and with it the reference, allowing all Views to be destructed.\n\n # Gtk TODO: check if necessary and search for replacement\n # try:\n # from gtk import glade\n # def dummy(*args, **kwargs):\n # pass\n # glade.set_custom_handler(dummy)\n # except ImportError:\n # pass\n\n # Recursively destroys the main window\n self.destroy()\n from rafcon.gui.clipboard import global_clipboard\n global_clipboard.destroy()\n gui_singletons.main_window_controller = None"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef setup_forward_or_backward_execution(self):\n if self.backward_execution:\n # pop the return item of this concurrency state to get the correct scoped data\n last_history_item = self.execution_history.pop_last_item()\n assert isinstance(last_history_item, ReturnItem)\n self.scoped_data = last_history_item.scoped_data\n # get the concurrency item for the children execution historys\n concurrency_history_item = self.execution_history.get_last_history_item()\n assert isinstance(concurrency_history_item, ConcurrencyItem)\n\n else: # forward_execution\n self.execution_history.push_call_history_item(self, CallType.CONTAINER, self, self.input_data)\n concurrency_history_item = self.execution_history.push_concurrency_history_item(self, len(self.states))\n return concurrency_history_item", "response": "Sets up the execution of the concurrency states dependent on if the state is executed forward of backward."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef finalize_concurrency_state(self, outcome):\n final_outcome = outcome\n self.write_output_data()\n self.check_output_data_type()\n self.execution_history.push_return_history_item(self, CallType.CONTAINER, self, self.output_data)\n self.state_execution_status = StateExecutionStatus.WAIT_FOR_NEXT_STATE\n\n singleton.state_machine_execution_engine._modify_run_to_states(self)\n\n if self.preempted:\n final_outcome = Outcome(-2, \"preempted\")\n return self.finalize(final_outcome)", "response": "This function is called by the concurrency state machine when the concurrency state is complete."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _head_length(self, port):\n if not port:\n return 0.\n parent_state_v = self.get_parent_state_v()\n if parent_state_v is port.parent: # port of connection's parent state\n return port.port_size[1]\n return max(port.port_size[1] * 1.5, self._calc_line_width() / 1.3)", "response": "Return the length of the head of the port."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncalls when the View was registered", "response": "def register_view(self, view):\n \"\"\"Called when the View was registered\"\"\"\n super(DataPortListController, self).register_view(view)\n\n view['name_col'].add_attribute(view['name_text'], 'text', self.NAME_STORAGE_ID)\n view['data_type_col'].add_attribute(view['data_type_text'], 'text', self.DATA_TYPE_NAME_STORAGE_ID)\n if not isinstance(self.model.state, LibraryState) and self.model.state.get_next_upper_library_root_state() is None:\n view['name_text'].set_property(\"editable\", True)\n view['data_type_text'].set_property(\"editable\", True)\n\n # in the linkage overview the the default value is not shown\n if isinstance(view, InputPortsListView) or isinstance(view, OutputPortsListView):\n view['default_value_col'].add_attribute(view['default_value_text'], 'text', self.DEFAULT_VALUE_STORAGE_ID)\n self._apply_value_on_edited_and_focus_out(view['default_value_text'], self._apply_new_data_port_default_value)\n view['default_value_col'].set_cell_data_func(view['default_value_text'],\n self._default_value_cell_data_func)\n if isinstance(self.model.state, LibraryState):\n view['default_value_col'].set_title(\"Used value\")\n if self.model.state.get_next_upper_library_root_state() is None: # never enabled means it is disabled\n view['default_value_text'].set_property(\"editable\", True)\n\n self._apply_value_on_edited_and_focus_out(view['name_text'], self._apply_new_data_port_name)\n self._apply_value_on_edited_and_focus_out(view['data_type_text'], self._apply_new_data_port_type)\n\n if isinstance(self.model.state, LibraryState):\n view['use_runtime_value_toggle'] = Gtk.CellRendererToggle()\n view['use_runtime_value_col'] = Gtk.TreeViewColumn(\"Use Runtime Value\")\n view['use_runtime_value_col'].set_property(\"sizing\", Gtk.TreeViewColumnSizing.AUTOSIZE)\n view.get_top_widget().append_column(view['use_runtime_value_col'])\n view['use_runtime_value_col'].pack_start(view['use_runtime_value_toggle'], True)\n view['use_runtime_value_col'].add_attribute(view['use_runtime_value_toggle'], 'active',\n self.USE_RUNTIME_VALUE_STORAGE_ID)\n if self.model.state.get_next_upper_library_root_state() is None:\n view['use_runtime_value_toggle'].set_property(\"activatable\", True)\n view['use_runtime_value_toggle'].connect(\"toggled\", self.on_use_runtime_value_toggled)\n\n self._reload_data_port_list_store()"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ntries to set the use runtime value flag to the newly entered one", "response": "def on_use_runtime_value_toggled(self, widget, path):\n \"\"\"Try to set the use runtime value flag to the newly entered one\n \"\"\"\n try:\n data_port_id = self.list_store[path][self.ID_STORAGE_ID]\n self.toggle_runtime_value_usage(data_port_id)\n except TypeError as e:\n logger.exception(\"Error while trying to change the use_runtime_value flag\")"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _default_value_cell_data_func(self, tree_view_column, cell, model, iter, data=None):\n if isinstance(self.model.state, LibraryState):\n use_runtime_value = model.get_value(iter, self.USE_RUNTIME_VALUE_STORAGE_ID)\n if use_runtime_value:\n cell.set_property(\"editable\", True)\n cell.set_property('text', model.get_value(iter, self.RUNTIME_VALUE_STORAGE_ID))\n cell.set_property('foreground', \"white\")\n else:\n cell.set_property(\"editable\", False)\n cell.set_property('text', model.get_value(iter, self.DEFAULT_VALUE_STORAGE_ID))\n cell.set_property('foreground', \"dark grey\")\n\n return", "response": "Function set renderer properties for every single cell independently"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreloads the input data port list store from the data port models", "response": "def _reload_data_port_list_store(self):\n \"\"\"Reloads the input data port list store from the data port models\"\"\"\n\n tmp = self._get_new_list_store()\n for data_port_m in self.data_port_model_list:\n data_port_id = data_port_m.data_port.data_port_id\n data_type = data_port_m.data_port.data_type\n # get name of type (e.g. ndarray)\n data_type_name = data_type.__name__\n # get module of type, e.g. numpy\n data_type_module = data_type.__module__\n # if the type is not a builtin type, also show the module\n if data_type_module not in ['__builtin__', 'builtins']:\n data_type_name = data_type_module + '.' + data_type_name\n if data_port_m.data_port.default_value is None:\n default_value = \"None\"\n else:\n default_value = data_port_m.data_port.default_value\n\n if not isinstance(self.model.state, LibraryState):\n tmp.append([data_port_m.data_port.name, data_type_name, str(default_value), data_port_id,\n None, None, data_port_m])\n else:\n use_runtime_value, runtime_value = self.get_data_port_runtime_configuration(data_port_id)\n tmp.append([data_port_m.data_port.name,\n data_type_name,\n str(default_value),\n data_port_id,\n bool(use_runtime_value),\n str(runtime_value),\n data_port_m,\n ])\n\n tms = Gtk.TreeModelSort(model=tmp)\n tms.set_sort_column_id(0, Gtk.SortType.ASCENDING)\n tms.set_sort_func(0, compare_variables)\n tms.sort_column_changed()\n tmp = tms\n self.list_store.clear()\n for elem in tmp:\n self.list_store.append(elem[:])"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\napplying the new name of the data port defined by path to the state data port.", "response": "def _apply_new_data_port_name(self, path, new_name):\n \"\"\"Applies the new name of the data port defined by path\n\n :param str path: The path identifying the edited data port\n :param str new_name: New name\n \"\"\"\n try:\n data_port_id = self.list_store[path][self.ID_STORAGE_ID]\n if self.state_data_port_dict[data_port_id].name != new_name:\n self.state_data_port_dict[data_port_id].name = new_name\n except (TypeError, ValueError) as e:\n logger.exception(\"Error while trying to change data port name\")"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\napplying the new data type of the data port defined by path", "response": "def _apply_new_data_port_type(self, path, new_data_type_str):\n \"\"\"Applies the new data type of the data port defined by path\n\n :param str path: The path identifying the edited data port\n :param str new_data_type_str: New data type as str\n \"\"\"\n try:\n data_port_id = self.list_store[path][self.ID_STORAGE_ID]\n if self.state_data_port_dict[data_port_id].data_type.__name__ != new_data_type_str:\n self.state_data_port_dict[data_port_id].change_data_type(new_data_type_str)\n except ValueError as e:\n logger.exception(\"Error while changing data type\")"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\napply the new default value of the data port defined by path", "response": "def _apply_new_data_port_default_value(self, path, new_default_value_str):\n \"\"\"Applies the new default value of the data port defined by path\n\n :param str path: The path identifying the edited variable\n :param str new_default_value_str: New default value as string\n \"\"\"\n try:\n data_port_id = self.list_store[path][self.ID_STORAGE_ID]\n if isinstance(self.model.state, LibraryState):\n # this always has to be true, as the runtime value column can only be edited\n # if the use_runtime_value flag is True\n if self.list_store[path][self.USE_RUNTIME_VALUE_STORAGE_ID]:\n self.set_data_port_runtime_value(data_port_id, new_default_value_str)\n else:\n if str(self.state_data_port_dict[data_port_id].default_value) != new_default_value_str:\n self.state_data_port_dict[data_port_id].default_value = new_default_value_str\n except (TypeError, AttributeError) as e:\n logger.exception(\"Error while changing default value\")"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _data_ports_changed(self, model):\n if not isinstance(model, AbstractStateModel):\n return\n # store port selection\n path_list = None\n if self.view is not None:\n model, path_list = self.tree_view.get_selection().get_selected_rows()\n selected_data_port_ids = [self.list_store[path[0]][self.ID_STORAGE_ID] for path in path_list] if path_list else []\n self._reload_data_port_list_store()\n # recover port selection\n if selected_data_port_ids:\n [self.select_entry(selected_data_port_id, False) for selected_data_port_id in selected_data_port_ids]", "response": "Reload list store and reminds selection when the model was changed"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nhandles cases for the library runtime values", "response": "def runtime_values_changed(self, model, prop_name, info):\n \"\"\"Handle cases for the library runtime values\"\"\"\n if (\"_input_runtime_value\" in info.method_name or\n info.method_name in ['use_runtime_value_input_data_ports',\n 'input_data_port_runtime_values']) and \\\n self.model is model:\n self._data_ports_changed(model)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nadd a new data port with default values and select it", "response": "def add_new_data_port(self):\n \"\"\"Add a new port with default values and select it\"\"\"\n try:\n new_data_port_ids = gui_helper_state_machine.add_data_port_to_selected_states('OUTPUT', int, [self.model])\n if new_data_port_ids:\n self.select_entry(new_data_port_ids[self.model.state])\n except ValueError:\n pass"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef add_callback_for_action(self, action, callback):\n if hasattr(callback, '__call__'): # Is the callback really a function?\n if action not in self.__action_to_callbacks:\n self.__action_to_callbacks[action] = []\n self.__action_to_callbacks[action].append(callback)\n\n controller = None\n try:\n controller = callback.__self__\n except AttributeError:\n try:\n # Needed when callback was wrapped using functools.partial\n controller = callback.func.__self__\n except AttributeError:\n pass\n\n if controller:\n if controller not in self.__controller_action_callbacks:\n self.__controller_action_callbacks[controller] = {}\n if action not in self.__controller_action_callbacks[controller]:\n self.__controller_action_callbacks[controller][action] = []\n self.__controller_action_callbacks[controller][action].append(callback)\n\n return True", "response": "Adds a callback function to an action."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef remove_callback_for_action(self, action, callback):\n if action in self.__action_to_callbacks:\n if callback in self.__action_to_callbacks[action]:\n self.__action_to_callbacks[action].remove(callback)", "response": "Remove a callback for a specific action"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef trigger_action(self, action, key_value, modifier_mask):\n res = False\n if action in self.__action_to_callbacks:\n for callback_function in self.__action_to_callbacks[action]:\n try:\n ret = callback_function(key_value, modifier_mask)\n # If at least one controller returns True, the whole result becomes True\n res |= (False if ret is None else ret)\n except Exception as e:\n logger.exception('Exception while calling callback methods for action \"{0}\": {1}'.format(action, e))\n return res", "response": "Calls the appropriate callback function(s) for the given action and returns a boolean indicating whether a callback was triggered."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef check_info_on_no_update_flags(self, info):\n # TODO that could need a second clean up\n # avoid updates because of state destruction\n if 'before' in info and info['method_name'] == \"remove_state\":\n if info.instance is self.model.state:\n self.no_update_state_destruction = True\n else:\n # if the state it self is removed lock the widget to never run updates and relieve all models\n removed_state_id = info.args[1] if len(info.args) > 1 else info.kwargs['state_id']\n if removed_state_id == self.model.state.state_id or \\\n not self.model.state.is_root_state and removed_state_id == self.model.parent.state.state_id:\n self.no_update_self_or_parent_state_destruction = True\n self.relieve_all_models()\n\n elif 'after' in info and info['method_name'] == \"remove_state\":\n if info.instance.state_id == self.model.state.state_id:\n self.no_update_state_destruction = False\n\n # reduce NotificationOverview generations by the fact that after could cause False and before could cause True\n if not self.no_update_state_destruction and not self.no_update_self_or_parent_state_destruction and \\\n (not self.no_update and 'before' in info or 'after' in info and self.no_update):\n return\n overview = NotificationOverview(info, False, self.__class__.__name__)\n\n # The method causing the change raised an exception, thus nothing was changed and updates are allowed\n if 'after' in info and isinstance(overview['result'][-1], Exception):\n self.no_update = False\n self.no_update_state_destruction = False\n # self.no_update_self_or_parent_state_destruction = False\n return\n\n if overview['method_name'][-1] in ['group_states', 'ungroup_state', \"change_state_type\",\n \"change_root_state_type\"]:\n instance_is_self = self.model.state is overview['instance'][-1]\n instance_is_parent = self.model.parent and self.model.parent.state is overview['instance'][-1]\n instance_is_parent_parent = self.model.parent and self.model.parent.parent and self.model.parent.parent.state is overview['instance'][-1]\n # print(\"no update flag: \", True if 'before' in info and (instance_is_self or instance_is_parent or instance_is_parent_parent) else False)\n if instance_is_self or instance_is_parent or instance_is_parent_parent:\n self.no_update = True if 'before' in info else False\n\n if overview['prop_name'][-1] == 'state' and overview['method_name'][-1] in [\"change_state_type\"] and \\\n self.model.get_state_machine_m() is not None:\n changed_model = self.model.get_state_machine_m().get_state_model_by_path(overview['args'][-1][1].get_path())\n if changed_model not in self._model_observed:\n self.observe_model(changed_model)", "response": "Stop updates while multi - actions are allowed"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef before_notification_state_machine_observation_control(self, model, prop_name, info):\n if is_execution_status_update_notification_from_state_machine_model(prop_name, info):\n return\n # do not update while multi-actions\n self.check_info_on_no_update_flags(info)", "response": "Check for multi - actions and set respective no update flags."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef store_value(self, name, value, parameters=None):\n if not isinstance(parameters, dict):\n raise TypeError(\"parameters must be a dict\")\n hash = self._parameter_hash(parameters)\n if name not in self._cache:\n self._cache[name] = {}\n self._cache[name][hash.hexdigest()] = value", "response": "Stores the value of a certain variable with name with the given value and parameters."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_value(self, name, parameters=None):\n if not isinstance(parameters, dict):\n raise TypeError(\"parameters must a dict\")\n if name not in self._cache:\n return None\n hash = self._parameter_hash(parameters)\n hashdigest = hash.hexdigest()\n return self._cache[name].get(hashdigest, None)", "response": "Return the value of a cached variable if applicable\n "} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _normalize_number_values(self, parameters):\n for key, value in parameters.items():\n if isinstance(value, (int, float)):\n parameters[key] = str(Decimal(value).normalize(self._context))", "response": "Assures equal precision for all number values"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef register_view(self, view):\n super(AbstractTreeViewController, self).register_view(view)\n self.signal_handlers.append((self._tree_selection,\n self._tree_selection.connect('changed', self.selection_changed)))\n # self.handler_ids.append((self.tree_view,\n # self.tree_view.connect('key-press-event', self.tree_view_keypress_callback)))\n self.tree_view.connect('key-release-event', self.on_key_release_event)\n self.tree_view.connect('button-release-event', self.tree_view_keypress_callback)\n # key press is needed for tab motion but needs to be registered already here TODO why?\n self.tree_view.connect('key-press-event', self.tree_view_keypress_callback)\n self._tree_selection.set_mode(Gtk.SelectionMode.MULTIPLE)\n self.update_selection_sm_prior()", "response": "Register callbacks for button press events and selection changed"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngetting actual tree selection object and all respective models of selected rows", "response": "def get_view_selection(self):\n \"\"\"Get actual tree selection object and all respective models of selected rows\"\"\"\n if not self.MODEL_STORAGE_ID:\n return None, None\n\n # avoid selection requests on empty tree views -> case warnings in gtk3\n if len(self.store) == 0:\n paths = []\n else:\n model, paths = self._tree_selection.get_selected_rows()\n\n # get all related models for selection from respective tree store field\n selected_model_list = []\n for path in paths:\n model = self.store[path][self.MODEL_STORAGE_ID]\n selected_model_list.append(model)\n return self._tree_selection, selected_model_list"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_selections(self):\n sm_selection, sm_filtered_selected_model_set = self.get_state_machine_selection()\n tree_selection, selected_model_list = self.get_view_selection()\n return tree_selection, selected_model_list, sm_selection, sm_filtered_selected_model_set", "response": "Get current model selection status in state machine selection and view selection of the widget"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nnotifies tree view about state machine selection", "response": "def state_machine_selection_changed(self, state_machine_m, signal_name, signal_msg):\n \"\"\"Notify tree view about state machine selection\"\"\"\n if self.CORE_ELEMENT_CLASS in signal_msg.arg.affected_core_element_classes:\n self.update_selection_sm_prior()"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef add_action_callback(self, *event):\n if react_to_event(self.view, self.tree_view, event) and self.active_entry_widget is None:\n self.on_add(None)\n return True", "response": "Callback method for add action"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef cut_action_callback(self, *event):\n if react_to_event(self.view, self.tree_view, event) and self.active_entry_widget is None:\n sm_selection, sm_selected_model_list = self.get_state_machine_selection()\n # only list specific elements are cut by widget\n if sm_selection is not None:\n sm_selection.set(sm_selected_model_list)\n global_clipboard.cut(sm_selection)\n return True", "response": "Callback method for cut action"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _apply_value_on_edited_and_focus_out(self, renderer, apply_method):\n assert isinstance(renderer, Gtk.CellRenderer)\n\n def remove_all_handler(renderer):\n \"\"\"Remove all handler for given renderer and its editable\n\n :param renderer: Renderer of the respective column which is edit by a entry widget, at the moment\n \"\"\"\n def remove_handler(widget, data_name):\n \"\"\"Remove handler from given widget\n\n :param Gtk.Widget widget: Widget from which a handler is to be removed\n :param data_name: Name of the data of the widget in which the handler id is stored\n \"\"\"\n handler_id = getattr(widget, data_name)\n if widget.handler_is_connected(handler_id):\n widget.disconnect(handler_id)\n\n editable = getattr(renderer, \"editable\")\n remove_handler(editable, \"focus_out_handler_id\")\n remove_handler(editable, \"cursor_move_handler_id\")\n remove_handler(editable, \"insert_at_cursor_handler_id\")\n remove_handler(editable, \"entry_widget_expose_event_handler_id\")\n remove_handler(renderer, \"editing_cancelled_handler_id\")\n\n def on_focus_out(entry, event):\n \"\"\"Applies the changes to the entry\n\n :param Gtk.Entry entry: The entry that was focused out\n :param Gtk.Event event: Event object with information about the event\n \"\"\"\n renderer.remove_all_handler(renderer)\n if renderer.ctrl.get_path() is None:\n return\n # Originally we had to use idle_add to prevent core dumps:\n # https://mail.gnome.org/archives/gtk-perl-list/2005-September/msg00143.html\n # Gtk TODO: use idle_add if there are problems\n apply_method(renderer.ctrl.get_path(), entry.get_text())\n\n def on_cursor_move_in_entry_widget(entry, step, count, extend_selection):\n \"\"\"Trigger scroll bar adjustments according active entry widgets cursor change\n \"\"\"\n self.tree_view_keypress_callback(entry, None)\n\n def on_editing_started(renderer, editable, path):\n \"\"\"Connects the a handler for the focus-out-event of the current editable\n\n :param Gtk.CellRendererText renderer: The cell renderer who's editing was started\n :param Gtk.CellEditable editable: interface for editing the current TreeView cell\n :param str path: the path identifying the edited cell\n \"\"\"\n # secure scrollbar adjustments on active cell\n ctrl = renderer.ctrl\n [path, focus_column] = ctrl.tree_view.get_cursor()\n if path:\n ctrl.tree_view.scroll_to_cell(path, ctrl.widget_columns[ctrl.widget_columns.index(focus_column)],\n use_align=False)\n\n editing_cancelled_handler_id = renderer.connect('editing-canceled', on_editing_canceled)\n focus_out_handler_id = editable.connect('focus-out-event', on_focus_out)\n cursor_move_handler_id = editable.connect('move-cursor', on_cursor_move_in_entry_widget)\n insert_at_cursor_handler_id = editable.connect(\"insert-at-cursor\", on_cursor_move_in_entry_widget)\n entry_widget_expose_event_handler_id = editable.connect(\"draw\", self.on_entry_widget_draw_event)\n # Store reference to editable and signal handler ids for later access when removing the handlers\n # see https://gitlab.gnome.org/GNOME/accerciser/commit/036689e70304a9e98ce31238dfad2432ad4c78ea\n # originally with renderer.set_data()\n # renderer.set_data(\"editable\", editable)\n setattr(renderer, \"editable\", editable)\n setattr(renderer, \"editing_cancelled_handler_id\", editing_cancelled_handler_id)\n # editable\n setattr(editable, \"focus_out_handler_id\", focus_out_handler_id)\n setattr(editable, \"cursor_move_handler_id\", cursor_move_handler_id)\n setattr(editable, \"insert_at_cursor_handler_id\", insert_at_cursor_handler_id)\n setattr(editable, \"entry_widget_expose_event_handler_id\", entry_widget_expose_event_handler_id)\n ctrl.active_entry_widget = editable\n\n def on_edited(renderer, path, new_value_str):\n \"\"\"Calls the apply method with the new value\n\n :param Gtk.CellRendererText renderer: The cell renderer that was edited\n :param str path: The path string of the renderer\n :param str new_value_str: The new value as string\n \"\"\"\n renderer.remove_all_handler(renderer)\n apply_method(path, new_value_str)\n renderer.ctrl.active_entry_widget = None\n\n def on_editing_canceled(renderer):\n \"\"\"Disconnects the focus-out-event handler of cancelled editable\n\n :param Gtk.CellRendererText renderer: The cell renderer who's editing was cancelled\n \"\"\"\n remove_all_handler(renderer)\n renderer.ctrl.active_entry_widget = None\n\n renderer.remove_all_handler = remove_all_handler\n renderer.ctrl = self\n # renderer.connect('editing-started', on_editing_started)\n # renderer.connect('edited', on_edited)\n self.__attached_renderers.append(renderer)\n self.connect_signal(renderer, 'editing-started', on_editing_started)\n self.connect_signal(renderer, 'edited', on_edited)", "response": "Set up the renderer to apply changes when loosing focus out"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef tree_view_keypress_callback(self, widget, event):\n current_row_path, current_focused_column = self.tree_view.get_cursor()\n # print(current_row_path, current_focused_column)\n if isinstance(widget, Gtk.TreeView) and not self.active_entry_widget: # avoid jumps for active entry widget\n pass\n # cursor motion/selection changes (e.g. also by button release event)\n # if current_row_path is not None and len(current_row_path) == 1 and isinstance(current_row_path[0], int):\n # self.tree_view.scroll_to_cell(current_row_path[0], current_focused_column, use_align=True)\n # # else:\n # # self._logger.debug(\"A ListViewController aspects a current_row_path of dimension 1 with integer but\"\n # # \" it is {0} and column is {1}\".format(current_row_path, current_focused_column))\n elif isinstance(widget, Gtk.Entry) and self.view.scrollbar_widget is not None:\n # calculate the position of the scrollbar to be always centered with the entry widget cursor\n # TODO check how to get sufficient the scroll-offset in the entry widget -> some times zero when not\n # TODO the scrollbar is one step behind cursor -> so jump from pos1 to end works not perfect\n entry_widget_scroll_offset, entry_widget_cursor_position, entry_widget_text_length = \\\n widget.get_properties(*[\"scroll-offset\", \"cursor-position\", \"text-length\"])\n cell_rect_of_entry_widget = widget.get_allocation()\n\n horizontal_scroll_bar = self.view.scrollbar_widget.get_hscrollbar()\n # entry_widget_text_length must be greater than zero otherwise DevisionByZero Exception\n if horizontal_scroll_bar is not None and float(entry_widget_text_length) > 0:\n adjustment = horizontal_scroll_bar.get_adjustment()\n layout_pixel_width = widget.get_layout().get_pixel_size()[0]\n # print(\"rel_pos pices\", cell_rect_of_entry_widget.x,)\n # int(layout_pixel_width*float(entry_widget_cursor_position)/float(entry_widget_text_length))\n rel_pos = cell_rect_of_entry_widget.x - entry_widget_scroll_offset + \\\n int(layout_pixel_width*float(entry_widget_cursor_position)/float(entry_widget_text_length))\n\n # optimize rel_pos for better user support\n bounds = widget.get_selection_bounds()\n if bounds and bounds[1] - bounds[0] == len(widget.get_text()):\n # if text is fully selected stay in front as far as possible\n rel_pos = cell_rect_of_entry_widget.x\n if self._horizontal_scrollbar_stay_in_front_if_possible():\n return True\n else:\n # try to stay long at the beginning of the columns if the columns fully fit in\n rel_space = adjustment.get_page_size() - cell_rect_of_entry_widget.x\n if cell_rect_of_entry_widget.x + widget.get_layout().get_pixel_size()[0] < \\\n adjustment.get_page_size():\n rel_pos = 0.\n elif rel_space and rel_pos <= rel_space:\n # accelerate the showing of the first columns\n rel_pos = rel_pos + rel_pos*3.*(rel_pos - rel_space)/adjustment.get_page_size()\n rel_pos = 0. if rel_pos <= 0 else rel_pos\n else:\n # and jump to the end of the scroller space if close to the upper limit\n rel_pos = adjustment.get_upper() if rel_pos + 2*entry_widget_scroll_offset > adjustment.get_upper() else rel_pos\n self._put_horizontal_scrollbar_onto_rel_pos(rel_pos)", "response": "This method is called by the key press event handler to adapt the tree view and controller behavior according to the key release events."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef register_view(self, view):\n super(ListViewController, self).register_view(view)\n self.tree_view.connect('button_press_event', self.mouse_click)", "response": "Register callbacks for button press events and selection changed"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nremove respective selected core elements and select the next one", "response": "def on_remove(self, widget, data=None):\n \"\"\"Removes respective selected core elements and select the next one\"\"\"\n\n path_list = None\n if self.view is not None:\n model, path_list = self.tree_view.get_selection().get_selected_rows()\n old_path = self.get_path()\n models = [self.list_store[path][self.MODEL_STORAGE_ID] for path in path_list] if path_list else []\n if models:\n try:\n self.remove_core_elements(models)\n except AttributeError as e:\n self._logger.warning(\"The respective core element of {1}.list_store couldn't be removed. -> {0}\"\n \"\".format(e, self.__class__.__name__))\n if len(self.list_store) > 0:\n self.tree_view.set_cursor(min(old_path[0], len(self.list_store) - 1))\n return True\n else:\n self._logger.warning(\"Please select an element to be removed.\")"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nimplementing mouse click events explicit iloilo", "response": "def mouse_click(self, widget, event=None):\n \"\"\"Implements shift- and control-key handling features for mouse button press events explicit\n\n The method is implements a fully defined mouse pattern to use shift- and control-key for multi-selection in a\n TreeView and a ListStore as model. It avoid problems caused by special renderer types like the text combo\n renderer by stopping the callback handler to continue with notifications.\n\n :param Gtk.Object widget: Object which is the source of the event\n :param Gtk.Event event: Event generated by mouse click\n :rtype: bool\n \"\"\"\n\n if event.type == Gdk.EventType.BUTTON_PRESS:\n pthinfo = self.tree_view.get_path_at_pos(int(event.x), int(event.y))\n\n if not bool(event.get_state() & Gdk.ModifierType.CONTROL_MASK) \\\n and not bool(event.get_state() & Gdk.ModifierType.SHIFT_MASK) and \\\n event.type == Gdk.EventType.BUTTON_PRESS and event.get_button()[1] == 3:\n if pthinfo is not None:\n model, paths = self._tree_selection.get_selected_rows()\n # print(paths)\n if pthinfo[0] not in paths:\n # self._logger.info(\"force single selection for right click\")\n self.tree_view.set_cursor(pthinfo[0])\n self._last_path_selection = pthinfo[0]\n else:\n # self._logger.info(\"single- or multi-selection for right click\")\n pass\n self.on_right_click_menu()\n return True\n\n if (bool(event.get_state() & Gdk.ModifierType.CONTROL_MASK) or \\\n bool(event.get_state() & Gdk.ModifierType.SHIFT_MASK)) and \\\n event.type == Gdk.EventType.BUTTON_PRESS and event.get_button()[1] == 3:\n return True\n\n if not bool(event.get_state() & Gdk.ModifierType.SHIFT_MASK) and event.get_button()[1] == 1:\n if pthinfo is not None:\n # self._logger.info(\"last select row {}\".format(pthinfo[0]))\n self._last_path_selection = pthinfo[0]\n # else:\n # self._logger.info(\"deselect rows\")\n # self.tree_selection.unselect_all()\n\n if bool(event.get_state() & Gdk.ModifierType.SHIFT_MASK) and event.get_button()[1] == 1:\n # self._logger.info(\"SHIFT adjust selection range\")\n model, paths = self._tree_selection.get_selected_rows()\n # print(model, paths, pthinfo[0])\n if paths and pthinfo and pthinfo[0]:\n if self._last_path_selection[0] <= pthinfo[0][0]:\n new_row_ids_selected = list(range(self._last_path_selection[0], pthinfo[0][0]+1))\n else:\n new_row_ids_selected = list(range(self._last_path_selection[0], pthinfo[0][0]-1, -1))\n # self._logger.info(\"range to select {0}, {1}\".format(new_row_ids_selected, model))\n self._tree_selection.unselect_all()\n for path in new_row_ids_selected:\n self._tree_selection.select_path(path)\n return True\n else:\n # self._logger.info(\"nothing selected {}\".format(model))\n if pthinfo and pthinfo[0]:\n self._last_path_selection = pthinfo[0]\n\n if bool(event.get_state() & Gdk.ModifierType.CONTROL_MASK) and event.get_button()[1] == 1:\n # self._logger.info(\"CONTROL adjust selection range\")\n model, paths = self._tree_selection.get_selected_rows()\n # print(model, paths, pthinfo[0])\n if paths and pthinfo and pthinfo[0]:\n if pthinfo[0] in paths:\n self._tree_selection.unselect_path(pthinfo[0])\n else:\n self._tree_selection.select_path(pthinfo[0])\n return True\n elif pthinfo and pthinfo[0]:\n self._tree_selection.select_path(pthinfo[0])\n return True\n\n elif event.type == Gdk.EventType._2BUTTON_PRESS:\n self._handle_double_click(event)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ndoubles click with left mouse button focuses the element", "response": "def _handle_double_click(self, event):\n \"\"\" Double click with left mouse button focuses the element\"\"\"\n if event.get_button()[1] == 1: # Left mouse button\n path_info = self.tree_view.get_path_at_pos(int(event.x), int(event.y))\n if path_info: # Valid entry was clicked on\n path = path_info[0]\n iter = self.list_store.get_iter(path)\n model = self.list_store.get_value(iter, self.MODEL_STORAGE_ID)\n selection = self.model.get_state_machine_m().selection\n selection.focus = model"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nupdates state machine prior update of tree selection", "response": "def update_selection_sm_prior(self):\n \"\"\"State machine prior update of tree selection\"\"\"\n if self._do_selection_update:\n return\n self._do_selection_update = True\n tree_selection, selected_model_list, sm_selection, sm_selected_model_list = self.get_selections()\n if tree_selection is not None:\n for path, row in enumerate(self.list_store):\n model = row[self.MODEL_STORAGE_ID]\n if model not in sm_selected_model_list and model in selected_model_list:\n tree_selection.unselect_path(Gtk.TreePath.new_from_indices([path]))\n if model in sm_selected_model_list and model not in selected_model_list:\n tree_selection.select_path(Gtk.TreePath.new_from_indices([path]))\n\n self._do_selection_update = False"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nupdates tree view prior of state machine selection", "response": "def update_selection_self_prior(self):\n \"\"\"Tree view prior update of state machine selection\"\"\"\n if self._do_selection_update:\n return\n self._do_selection_update = True\n tree_selection, selected_model_list, sm_selection, sm_selected_model_list = self.get_selections()\n if isinstance(sm_selection, Selection):\n sm_selection.handle_prepared_selection_of_core_class_elements(self.CORE_ELEMENT_CLASS, selected_model_list)\n self._do_selection_update = False"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nselects the row entry belonging to the given core_element_id by cursor or tree selection", "response": "def select_entry(self, core_element_id, by_cursor=True):\n \"\"\"Selects the row entry belonging to the given core_element_id by cursor or tree selection\"\"\"\n for row_num, element_row in enumerate(self.list_store):\n # Compare data port ids\n if element_row[self.ID_STORAGE_ID] == core_element_id:\n if by_cursor:\n self.tree_view.set_cursor(row_num)\n else:\n self.tree_view.get_selection().select_path((row_num, ))\n break"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_path_for_core_element(self, core_element_id):\n for row_num, element_row in enumerate(self.list_store):\n # Compare data port ids\n if element_row[self.ID_STORAGE_ID] == core_element_id:\n return row_num,", "response": "Get the path to the row representing the core element described by handed core_element_id"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef tree_view_keypress_callback(self, widget, event):\n # self._logger.info(\"key_value: \" + str(event.keyval if event is not None else ''))\n if event and \"GDK_KEY_PRESS\" == event.type.value_name \\\n and (event.keyval == Gdk.KEY_Tab or event.keyval == Gdk.KEY_ISO_Left_Tab):\n [path, focus_column] = self.tree_view.get_cursor()\n if not path:\n return False\n self.tree_view_keypress_callback.__func__.core_element_id = self.store[path][self.ID_STORAGE_ID]\n\n # finish active edit process\n if self.active_entry_widget is not None:\n text = self.active_entry_widget.get_buffer().get_text()\n if focus_column in self.widget_columns:\n focus_column.get_cells()[0].emit('edited', path[0], text)\n\n # row could be updated by other call_backs caused by emitting 'edited' signal but selection stays an editable neighbor\n path = self.get_path_for_core_element(self.tree_view_keypress_callback.__func__.core_element_id)\n if event.keyval == Gdk.KEY_Tab:\n # logger.info(\"move right\")\n direction = +1\n else:\n # logger.info(\"move left\")\n direction = -1\n\n # get next row_id for focus\n if direction < 0 and focus_column is self.widget_columns[0] \\\n or direction > 0 and focus_column is self.widget_columns[-1]:\n if direction < 0 < path[0] or direction > 0 and not path[0] + 1 > len(self.store):\n next_row = path[0] + direction\n else:\n return False\n else:\n next_row = path[0]\n # get next column_id for focus\n focus_column_id = self.widget_columns.index(focus_column)\n if focus_column_id is not None:\n # search all columns for next editable cell renderer\n next_focus_column_id = 0\n for index in range(len(self.tree_view.get_model())):\n test_id = focus_column_id + direction * index + direction\n next_focus_column_id = test_id % len(self.widget_columns)\n if test_id > len(self.widget_columns) - 1 or test_id < 0:\n next_row = path[0] + direction\n if next_row < 0 or next_row > len(self.tree_view.get_model()) - 1:\n return False\n\n if self.widget_columns[next_focus_column_id].get_cells()[0].get_property('editable'):\n break\n else:\n return False\n\n del self.tree_view_keypress_callback.__func__.core_element_id\n # self._logger.info(\"self.tree_view.scroll_to_cell(next_row={0}, self.widget_columns[{1}] , use_align={2})\"\n # \"\".format(next_row, next_focus_column_id, False))\n # self.tree_view.scroll_to_cell(next_row, self.widget_columns[next_focus_column_id], use_align=False)\n self.tree_view.set_cursor_on_cell(Gtk.TreePath.new_from_indices([next_row]), self.widget_columns[\n next_focus_column_id], focus_cell=None, start_editing=True)\n return True\n else:\n super(ListViewController, self).tree_view_keypress_callback(widget, event)", "response": "This method is called by the key press event handler for the tree view. It is used to create a new list item and edit process."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef iter_tree_with_handed_function(self, function, *function_args):\n def iter_all_children(row_iter, function, function_args):\n if isinstance(row_iter, Gtk.TreeIter):\n function(row_iter, *function_args)\n for n in reversed(range(self.tree_store.iter_n_children(row_iter))):\n child_iter = self.tree_store.iter_nth_child(row_iter, n)\n iter_all_children(child_iter, function, function_args)\n else:\n self._logger.warning(\"Iter has to be TreeIter -> handed argument is: {0}\".format(row_iter))\n\n # iter on root level of tree\n next_iter = self.tree_store.get_iter_first()\n while next_iter:\n iter_all_children(next_iter, function, function_args)\n next_iter = self.tree_store.iter_next(next_iter)", "response": "Iterate tree view with condition check function"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nupdate tree selection prior condition of tree model row", "response": "def update_selection_sm_prior_condition(self, state_row_iter, selected_model_list, sm_selected_model_list):\n \"\"\"State machine prior update of tree selection for one tree model row\"\"\"\n selected_path = self.tree_store.get_path(state_row_iter)\n tree_model_row = self.tree_store[selected_path]\n model = tree_model_row[self.MODEL_STORAGE_ID]\n\n if model not in sm_selected_model_list and model in selected_model_list:\n self._tree_selection.unselect_iter(state_row_iter)\n elif model in sm_selected_model_list and model not in selected_model_list:\n self.tree_view.expand_to_path(selected_path)\n self._tree_selection.select_iter(state_row_iter)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef update_selection_self_prior_condition(self, state_row_iter, sm_selected_model_set, selected_model_list):\n selected_path = self.tree_store.get_path(state_row_iter)\n tree_model_row = self.tree_store[selected_path]\n model = tree_model_row[self.MODEL_STORAGE_ID]\n\n if model in sm_selected_model_set and model not in selected_model_list:\n sm_selected_model_set.remove(model)\n elif model not in sm_selected_model_set and model in selected_model_list:\n sm_selected_model_set.add(model)", "response": "Tree view prior update of one model in the state machine selection"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nupdates tree view prior of state machine selection", "response": "def update_selection_self_prior(self):\n \"\"\"Tree view prior update of state machine selection\"\"\"\n if self._do_selection_update:\n return\n self._do_selection_update = True\n tree_selection, selected_model_list, sm_selection, sm_selected_model_set = self.get_selections()\n if isinstance(sm_selection, Selection):\n # current sm_selected_model_set will be updated and hand it back\n self.iter_tree_with_handed_function(self.update_selection_self_prior_condition,\n sm_selected_model_set, selected_model_list)\n sm_selection.handle_prepared_selection_of_core_class_elements(self.CORE_ELEMENT_CLASS, sm_selected_model_set)\n # TODO check if we can solve the difference that occurs e.g. while complex actions?, or same state paths!\n # -> models in selection for core element not in the tree the function iter tree + condition tolerates this\n if not set(selected_model_list) == sm_selected_model_set:\n self._logger.verbose(\"Difference between tree view selection: \\n{0} \\nand state machine selection: \"\n \"\\n{1}\".format(set(selected_model_list), sm_selected_model_set))\n\n # TODO check why sometimes not consistent with sm selection. e.g while modification history test\n if self.check_selection_consistency(sm_check=False):\n self.update_selection_sm_prior()\n self._do_selection_update = False"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef update_selection_sm_prior(self):\n if self._do_selection_update:\n return\n self._do_selection_update = True\n tree_selection, selected_model_list, sm_selection, sm_selected_model_list = self.get_selections()\n if tree_selection is not None:\n # self._logger.info(\"SM SELECTION IS: {2}\\n{0}, \\n{1}\".format(selected_model_list, sm_selected_model_list,\n # tree_selection.get_mode()))\n self.iter_tree_with_handed_function(self.update_selection_sm_prior_condition,\n selected_model_list, sm_selected_model_list)\n self.check_selection_consistency()\n self._do_selection_update = False", "response": "State machine prior update of tree selection"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nselects the row entry belonging to the given core_element_id by cursor or tree selection", "response": "def select_entry(self, core_element_id, by_cursor=True):\n \"\"\"Selects the row entry belonging to the given core_element_id by cursor or tree selection\"\"\"\n path = self.get_path_for_core_element(core_element_id)\n if path:\n if by_cursor:\n self.tree_view.set_cursor(path)\n else:\n self.tree_view.get_selection().select_path(path)\n else:\n self._logger.warning(\"Path not valid: {0} (by_cursor {1})\".format(str(core_element_id), str(by_cursor)))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncheck whether the passed variable is a tuple with two floats or integers", "response": "def contains_geometric_info(var):\n \"\"\" Check whether the passed variable is a tuple with two floats or integers \"\"\"\n return isinstance(var, tuple) and len(var) == 2 and all(isinstance(val, (int, float)) for val in var)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ngenerate default meta data for a child state according its parent state model", "response": "def generate_default_state_meta_data(parent_state_m, canvas=None, num_child_state=None, gaphas_editor=True):\n \"\"\"Generate default meta data for a child state according its parent state\n\n The function could work with the handed num_child_state if all child state are not drawn, till now.\n The method checks if the parents meta data is consistent in canvas state view and model if a canvas instance is \n handed. \n\n :param rafcon.gui.models.container_state.ContainerStateModel parent_state_m: Model of the state were the child \n should be drawn into\n :param rafcon.gui.mygaphas.canvas.MyCanvas canvas: canvas instance the state will be drawn into\n :param int num_child_state: Number of child states already drawn in canvas parent state view\n :return child relative pos (tuple) in parent and it size (tuple)\n \"\"\"\n parent_size = parent_state_m.get_meta_data_editor()['size']\n if not contains_geometric_info(parent_size):\n raise ValueError(\"Invalid state size: {}\".format(parent_size))\n\n # use handed number of child states and otherwise take number of child states from parent state model\n num_child_state = len(parent_state_m.states) if num_child_state is None else num_child_state\n\n # check if respective canvas is handed if meta data of parent canvas view is consistent with model\n if canvas is not None:\n parent_state_v = canvas.get_view_for_model(parent_state_m)\n if not (parent_state_v.width, parent_state_v.height) == parent_size:\n logger.warning(\"Size meta data of model {0} is different to gaphas {1}\"\n \"\".format((parent_state_v.width, parent_state_v.height), parent_size))\n\n # Calculate default positions for the child states\n # Make the inset from the top left corner\n parent_state_width, parent_state_height = parent_size\n new_state_side_size = min(parent_state_width * 0.2, parent_state_height * 0.2)\n\n child_width = new_state_side_size\n child_height = new_state_side_size\n child_size = (child_width, child_height)\n child_spacing = max(child_size) * 1.2\n\n parent_margin = cal_margin(parent_size)\n # print(\"parent size\", parent_size, parent_margin)\n max_cols = (parent_state_width - 2*parent_margin) // child_spacing\n (row, col) = divmod(num_child_state, max_cols)\n\n max_rows = (parent_state_height - 2*parent_margin - 0.5*child_spacing) // (1.5*child_spacing)\n (overlapping, row) = divmod(row, max_rows)\n\n overlapping_step = 0.5*parent_margin\n max_overlaps_x = (parent_state_width - 2*parent_margin - child_width -\n (parent_margin + (max_cols - 1) * child_spacing + child_spacing - child_width)) // overlapping_step\n max_overlaps_y = (parent_state_height - 2*parent_margin - child_height -\n child_spacing * (1.5 * (max_rows - 1) + 1)) // overlapping_step\n # handle case of less space TODO check again not perfect, maybe that can be done more simple\n max_overlaps_x = 0 if max_overlaps_x < 0 else max_overlaps_x\n max_overlaps_y = 0 if max_overlaps_y < 0 else max_overlaps_y\n max_overlaps = min(max_overlaps_x, max_overlaps_y) + 1\n overlapping = divmod(overlapping, max_overlaps)[1]\n\n child_rel_pos_x = parent_margin + col * child_spacing + child_spacing - child_width + overlapping*overlapping_step\n child_rel_pos_y = child_spacing * (1.5 * row + 1.) + overlapping*overlapping_step\n return (child_rel_pos_x, child_rel_pos_y), (new_state_side_size, new_state_side_size)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef add_boundary_clearance(left, right, top, bottom, frame, clearance=0.1):\n\n # print(\"old boundary\", left, right, top, bottom)\n width = right - left\n width = frame['size'][0] if width < frame['size'][0] else width\n left -= 0.5 * clearance * width\n left = 0 if left < 0 else left\n right += 0.5 * clearance * width\n\n height = bottom - top\n height = frame['size'][1] if height < frame['size'][1] else height\n top -= 0.5 * clearance * height\n top = 0 if top < 0 else top\n bottom += 0.5 * clearance * height\n # print(\"new boundary\", left, right, top, bottom)\n return left, right, top, bottom", "response": "Increase the space between left and right and top and bottom and the function increase the space between left and right and between top and bottom and between left and right parameters."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting boundaries of all models in a dictionary.", "response": "def get_boundaries_of_elements_in_dict(models_dict, clearance=0.):\n \"\"\" Get boundaries of all handed models\n\n The function checks all model meta data positions to increase boundary starting with a state or scoped variables.\n It is finally iterated over all states, data and logical port models and linkage if sufficient for respective\n graphical editor. At the end a clearance is added to the boundary if needed e.g. to secure size for opengl.\n\n :param models_dict: dict of all handed models\n :return: tuple of left, right, top and bottom value\n \"\"\"\n\n # Determine initial outer coordinates\n right = 0.\n bottom = 0.\n if 'states' in models_dict and models_dict['states']:\n left = list(models_dict['states'].items())[0][1].get_meta_data_editor()['rel_pos'][0]\n top = list(models_dict['states'].items())[0][1].get_meta_data_editor()['rel_pos'][1]\n elif 'scoped_variables' in models_dict and models_dict['scoped_variables']:\n left = list(models_dict['scoped_variables'].items())[0][1].get_meta_data_editor()['inner_rel_pos'][0]\n top = list(models_dict['scoped_variables'].items())[0][1].get_meta_data_editor()['inner_rel_pos'][1]\n else:\n all_ports = list(models_dict['input_data_ports'].values()) + list(models_dict['output_data_ports'].values()) + \\\n list(models_dict['scoped_variables'].values()) + list(models_dict['outcomes'].values())\n if len(set([port_m.core_element.parent for port_m in all_ports])) == 1:\n logger.info(\"Only one parent {0} {1}\".format(all_ports[0].core_element.parent, all_ports[0].parent.get_meta_data_editor()))\n if all_ports:\n left = all_ports[0].parent.get_meta_data_editor()['rel_pos'][0]\n top = all_ports[0].parent.get_meta_data_editor()['rel_pos'][1]\n else:\n raise ValueError(\"Get boundary method does not aspects all list elements empty in dictionary. {0}\"\n \"\".format(models_dict))\n\n def cal_max(max_x, max_y, rel_pos, size):\n max_x = size[0] + rel_pos[0] if size[0] + rel_pos[0] > max_x else max_x\n max_y = rel_pos[1] + size[1] if rel_pos[1] + size[1] > max_y else max_y\n return max_x, max_y\n\n def cal_min(min_x, min_y, rel_pos, size):\n min_x = rel_pos[0] if rel_pos[0] < min_x else min_x\n min_y = rel_pos[1] if rel_pos[1] < min_y else min_y\n return min_x, min_y\n\n # Finally take into account all relevant elements in models_dict\n # -> states, scoped variables (maybe input- and output- data ports) and transitions and data flows are relevant\n parts = ['states', 'transitions', 'data_flows']\n for key in parts:\n elems_dict = models_dict[key]\n rel_positions = []\n for model in elems_dict.values():\n _size = (0., 0.)\n if key == 'states':\n rel_positions = [model.get_meta_data_editor()['rel_pos']]\n _size = model.get_meta_data_editor()['size']\n # print(key, rel_positions, _size)\n elif key in ['scoped_variables', 'input_data_ports', 'output_data_ports']:\n rel_positions = [model.get_meta_data_editor()['inner_rel_pos']]\n # TODO check to take the ports size into account\n # print(key, rel_positions, _size)\n elif key in ['transitions', 'data_flows']:\n if key is \"data_flows\":\n # take into account the meta data positions of opengl if there is some (always in opengl format)\n rel_positions = mirror_waypoints(deepcopy(model.get_meta_data_editor()))['waypoints']\n else:\n rel_positions = model.get_meta_data_editor()['waypoints']\n # print(key, rel_positions, _size, model.meta)\n\n for rel_position in rel_positions:\n # check for empty fields and ignore them at this point\n if not contains_geometric_info(rel_position):\n continue\n right, bottom = cal_max(right, bottom, rel_position, _size)\n left, top = cal_min(left, top, rel_position, _size)\n # print(\"new edges:\", left, right, top, bottom, key)\n\n # increase of boundary results into bigger estimated size and finally stronger reduction of original element sizes\n left, right, top, bottom = add_boundary_clearance(left, right, top, bottom, {'size': (0., 0.)}, clearance)\n return left, right, top, bottom"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef cal_frame_according_boundaries(left, right, top, bottom, parent_size, gaphas_editor=True, group=True):\n # print(\"parent_size ->\", parent_size)\n margin = cal_margin(parent_size)\n # Add margin and ensure that the upper left corner is within the state\n if group:\n # frame of grouped state\n rel_pos = max(left - margin, 0), max(top - margin, 0)\n # Add margin and ensure that the lower right corner is within the state\n size = (min(right - left + 2 * margin, parent_size[0] - rel_pos[0]),\n min(bottom - top + 2 * margin, parent_size[1] - rel_pos[1]))\n else:\n # frame inside of state\n # rel_pos = max(margin, 0), max(margin, 0)\n rel_pos = left, top\n size = right - left, bottom - top\n\n return margin, rel_pos, size", "response": "Generate margin relative position and size handed boundary parameter and parent size"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nadds relative position offset to all handed models in dict", "response": "def offset_rel_pos_of_all_models_in_dict(models_dict, pos_offset, gaphas_editor=True):\n \"\"\" Add position offset to all handed models in dict\"\"\"\n # print(\"\\n\", \"#\"*30, \"offset models\", pos_offset, \"#\"*30)\n # Update relative position of states within the container in order to maintain their absolute position\n for child_state_m in models_dict['states'].values():\n old_rel_pos = child_state_m.get_meta_data_editor(for_gaphas=gaphas_editor)['rel_pos']\n # print(\"old_rel_pos\", old_rel_pos, child_state_m)\n child_state_m.set_meta_data_editor('rel_pos', add_pos(old_rel_pos, pos_offset), from_gaphas=gaphas_editor)\n # print(\"new_rel_pos\", child_state_m.get_meta_data_editor(for_gaphas=gaphas_editor), child_state_m)\n\n # Do the same for scoped variable\n if not gaphas_editor:\n for scoped_variable_m in models_dict['scoped_variables'].values():\n old_rel_pos = scoped_variable_m.get_meta_data_editor(for_gaphas=gaphas_editor)['inner_rel_pos']\n scoped_variable_m.set_meta_data_editor('inner_rel_pos', add_pos(old_rel_pos, pos_offset), gaphas_editor)\n\n # Do the same for all connections (transitions and data flows)\n connection_models = list(models_dict['transitions'].values()) + list(models_dict['data_flows'].values())\n for connection_m in connection_models:\n old_waypoints = connection_m.get_meta_data_editor(for_gaphas=gaphas_editor)['waypoints']\n new_waypoints = []\n for waypoint in old_waypoints:\n from rafcon.gui.models.data_flow import DataFlowModel\n if isinstance(connection_m, DataFlowModel) and gaphas_editor:\n new_waypoints.append(add_pos(waypoint, (pos_offset[0], -pos_offset[1])))\n else:\n new_waypoints.append(add_pos(waypoint, pos_offset))\n connection_m.set_meta_data_editor('waypoints', new_waypoints, from_gaphas=gaphas_editor)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nscales the ports of the library model accordingly relative to the state_copy meta size.", "response": "def scale_library_ports_meta_data(state_m, gaphas_editor=True):\n \"\"\"Scale the ports of library model accordingly relative to state_copy meta size.\n \n The function assumes that the meta data of ports of the state_copy of the library was copied to \n respective elements in the library and that those was not adjusted before.\n \"\"\"\n if state_m.meta_data_was_scaled:\n return\n state_m.income.set_meta_data_editor('rel_pos', state_m.state_copy.income.get_meta_data_editor()['rel_pos'])\n # print(\"scale_library_ports_meta_data \", state_m.get_meta_data_editor()['size'], \\)\n # state_m.state_copy.get_meta_data_editor()['size']\n factor = divide_two_vectors(state_m.get_meta_data_editor()['size'],\n state_m.state_copy.get_meta_data_editor()['size'])\n\n # print(\"scale_library_ports_meta_data -> resize_state_port_meta\", factor)\n if contains_geometric_info(factor):\n resize_state_port_meta(state_m, factor, True)\n state_m.meta_data_was_scaled = True\n else:\n logger.info(\"Skip resize of library ports meta data {0}\".format(state_m))"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nscales the meta data of the content of a LibraryStateModel to fit the current size of the LibraryStateModel.", "response": "def scale_library_content(library_state_m, gaphas_editor=True):\n \"\"\"Scales the meta data of the content of a LibraryState\n \n The contents of the `LibraryStateModel` `library_state_m` (i.e., the `state_copy` and all it children/state \n elements) to fit the current size of the `LibraryStateModel`.\n \n :param LibraryStateModel library_state_m: The library who's content is to be resized \n :param bool gaphas_editor: Whether to use the meta data for the GraphicalEditor using gaphas (default: True) \n \"\"\"\n assert isinstance(library_state_m, LibraryStateModel)\n # For library states with an ExecutionState as state_copy, scaling does not make sense\n if not isinstance(library_state_m.state_copy, ContainerStateModel):\n return\n\n # use same size for state copy and put rel_pos to zero\n library_meta = library_state_m.get_meta_data_editor(gaphas_editor)\n state_copy_meta = library_state_m.state_copy.set_meta_data_editor('size', library_meta['size'], gaphas_editor)\n library_state_m.state_copy.set_meta_data_editor('rel_pos', (0., 0.), from_gaphas=gaphas_editor)\n\n # work around that gaphas draws in state_copy coordinates (which is not shown) -> reduce state copy size\n if gaphas_editor:\n library_state_margin = cal_margin(state_copy_meta['size'])\n state_copy_size = subtract_pos(state_copy_meta['size'], (2*library_state_margin, 2*library_state_margin))\n library_state_m.state_copy.set_meta_data_editor('size', state_copy_size, gaphas_editor)\n\n # if meta data has empty fields put default data on state meta data\n if model_has_empty_meta(library_state_m.state_copy) and \\\n not put_default_meta_data_on_state_m_recursively(library_state_m.state_copy, library_state_m,\n only_child_states=True):\n return\n\n # prepare resize by collecting all state elements in the models_dict\n # do resize in respect to state copy\n # (opengl same size as library state and in case of gaphas reduced by library state margin)\n models_dict = {'state': library_state_m.state_copy}\n for state_element_key in library_state_m.state_copy.state.state_element_attrs:\n state_element_list = getattr(library_state_m.state_copy, state_element_key)\n # Some models are hold in a gtkmvc3.support.wrappers.ObsListWrapper, not a list\n if hasattr(state_element_list, 'keys'):\n state_element_list = state_element_list.values()\n models_dict[state_element_key] = {elem.core_element.core_element_id: elem for elem in state_element_list}\n\n # perform final resize\n resize_factor = (1., 1.)\n try:\n if not models_dict['states'] and (not models_dict['scoped_variables'] or gaphas_editor):\n logger.info(\"Skip scaling for empty root state {0}.\".format(library_state_m.state))\n else:\n resize_factor = scale_meta_data_according_state(models_dict, fill_up=True)\n except:\n logger.exception(\"Scale library content of {0} cause a problem.\".format(library_state_m.state))\n finally:\n resize_income_of_state_m(library_state_m.state_copy, resize_factor, gaphas_editor)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef resize_state_port_meta(state_m, factor, gaphas_editor=True):\n # print(\"scale ports\", factor, state_m, gaphas_editor)\n if not gaphas_editor and isinstance(state_m, ContainerStateModel):\n port_models = state_m.input_data_ports[:] + state_m.output_data_ports[:] + state_m.scoped_variables[:]\n else:\n port_models = state_m.input_data_ports[:] + state_m.output_data_ports[:] + state_m.outcomes[:]\n port_models += state_m.scoped_variables[:] if isinstance(state_m, ContainerStateModel) else []\n _resize_port_models_list(port_models, 'rel_pos' if gaphas_editor else 'inner_rel_pos', factor, gaphas_editor)\n\n resize_income_of_state_m(state_m, factor, gaphas_editor)", "response": "Resize data and logical ports relative positions"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef resize_state_meta(state_m, factor, gaphas_editor=True):\n # print(\"START RESIZE OF STATE\", state_m.get_meta_data_editor(for_gaphas=gaphas_editor), state_m)\n old_rel_pos = state_m.get_meta_data_editor(for_gaphas=gaphas_editor)['rel_pos']\n # print(\"old_rel_pos state\", old_rel_pos, state_m.core_element)\n state_m.set_meta_data_editor('rel_pos', mult_two_vectors(factor, old_rel_pos), from_gaphas=gaphas_editor)\n # print(\"new_rel_pos state\", state_m.get_meta_data_editor(for_gaphas=gaphas_editor), state_m.core_element)\n\n # print(\"resize factor\", factor, state_m, state_m.meta)\n old_size = state_m.get_meta_data_editor(for_gaphas=gaphas_editor)['size']\n # print(\"old_size\", old_size, type(old_size))\n state_m.set_meta_data_editor('size', mult_two_vectors(factor, old_size), from_gaphas=gaphas_editor)\n # print(\"new_size\", state_m.get_meta_data_editor(for_gaphas=gaphas_editor)['size'])\n if gaphas_editor:\n old_rel_pos = state_m.get_meta_data_editor(for_gaphas=gaphas_editor)['name']['rel_pos']\n state_m.set_meta_data_editor('name.rel_pos', mult_two_vectors(factor, old_rel_pos), from_gaphas=gaphas_editor)\n old_size = state_m.get_meta_data_editor(for_gaphas=gaphas_editor)['name']['size']\n state_m.set_meta_data_editor('name.size', mult_two_vectors(factor, old_size), from_gaphas=gaphas_editor)\n\n if isinstance(state_m, LibraryStateModel):\n # print(\"LIBRARY\", state_m)\n if gaphas_editor and state_m.state_copy_initialized:\n if state_m.meta_data_was_scaled:\n resize_state_port_meta(state_m, factor, gaphas_editor)\n else:\n scale_library_ports_meta_data(state_m, gaphas_editor)\n\n if state_m.state_copy_initialized:\n resize_state_meta(state_m.state_copy, factor, gaphas_editor)\n # print(\"END LIBRARY RESIZE\")\n else:\n # print(\"resize_state_meta -> resize_state_port_meta\")\n resize_state_port_meta(state_m, factor, gaphas_editor)\n if isinstance(state_m, ContainerStateModel):\n _resize_connection_models_list(state_m.transitions[:] + state_m.data_flows[:], factor, gaphas_editor)\n for child_state_m in state_m.states.values():\n resize_state_meta(child_state_m, factor, gaphas_editor)", "response": "Resize state meta data recursively what includes also LibraryStateModels meta data and its internal state_copy\n "} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef offset_rel_pos_of_models_meta_data_according_parent_state(models_dict):\n old_parent_rel_pos = models_dict['state'].get_meta_data_editor()['rel_pos']\n offset_rel_pos_of_all_models_in_dict(models_dict, pos_offset=old_parent_rel_pos)\n\n return True", "response": "Offset meta data of state elements according the area used indicated by the state meta data."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef scale_meta_data_according_states(models_dict):\n left, right, top, bottom = get_boundaries_of_elements_in_dict(models_dict=models_dict)\n\n parent_size = models_dict['state'].parent.get_meta_data_editor()['size']\n _, rel_pos, size = cal_frame_according_boundaries(left, right, top, bottom, parent_size)\n\n # Set size and position of new container state\n models_dict['state'].set_meta_data_editor('rel_pos', rel_pos)\n models_dict['state'].set_meta_data_editor('size', size)\n offset = mult_two_vectors((-1., -1.), rel_pos)\n offset_rel_pos_of_all_models_in_dict(models_dict, offset)\n\n return True", "response": "Scale meta data according states of group states."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nscales meta - data according to state.", "response": "def scale_meta_data_according_state(models_dict, rel_pos=None, as_template=False, fill_up=False):\n # TODO Documentation needed....\n \"\"\" Scale elements in dict to fit into dict state key element.\n\n If elements are already small enough no resize is performed.\n \"\"\"\n # TODO check about positions of input-data- and output-data- or scoped variable-ports is needed\n # TODO adjustments of data ports positions are not sufficient -> origin state size is maybe needed for that\n # TODO consistency check on boundary and scale parameter for every if else case\n\n if 'states' in models_dict or 'scoped_variables' in models_dict:\n left, right, top, bottom = get_boundaries_of_elements_in_dict(models_dict=models_dict)\n parent_size = models_dict['state'].get_meta_data_editor()['size']\n margin, old_rel_pos, size = cal_frame_according_boundaries(left, right, top, bottom, parent_size, group=False)\n automatic_mode = True if rel_pos is None else False\n clearance = 0.2 if rel_pos is None else 0.\n rel_pos = (margin, margin) if rel_pos is None else rel_pos\n # rel_pos = (0., 0.) if fill_up else rel_pos\n clearance_scale = 1 + clearance\n\n assert parent_size[0] > rel_pos[0]\n assert parent_size[1] > rel_pos[1]\n # print(\"edges:\", left, right, top, bottom)\n # print(\"margin:\", margin, \"rel_pos:\", rel_pos, \"old_rel_pos\", old_rel_pos, \"size:\", size)\n\n parent_width, parent_height = parent_size\n\n boundary_width, boundary_height = size\n # print(\"parent width: {0}, parent_height: {1}\".format(parent_width, parent_height))\n # print(\"boundary width: {0}, boundary_height: {1}\".format(boundary_width, boundary_height))\n\n # no site scale\n if parent_width - rel_pos[0] > boundary_width * clearance_scale and \\\n parent_height - rel_pos[1] > boundary_height * clearance_scale and not fill_up:\n if automatic_mode:\n resize_factor = 1.\n boundary_width_in_parent = boundary_width*resize_factor\n # print(boundary_width, resize_factor, boundary_width*resize_factor, boundary_height*resize_factor + margin*2, parent_height)\n # print(\"left over width: \", parent_width - boundary_width_in_parent - rel_pos[0] - margin)\n width_pos_offset_to_middle = (parent_width - boundary_width_in_parent - rel_pos[0] - margin)/2.\n rel_pos = add_pos(rel_pos, (width_pos_offset_to_middle, 0.))\n boundary_height_in_parent = boundary_height*resize_factor\n # print(\"left over height: \", parent_height - boundary_height_in_parent - rel_pos[0] - margin)\n height_pos_offset_to_middle = (parent_height - boundary_height_in_parent - rel_pos[1] - margin)/2.\n rel_pos = add_pos(rel_pos, (0., height_pos_offset_to_middle))\n offset = subtract_pos((0., 0.), subtract_pos(old_rel_pos, rel_pos))\n offset_rel_pos_of_all_models_in_dict(models_dict, offset)\n return 1, 1 # (1, 1), offset, rel_pos\n # smallest site scale\n else:\n # increase of boundary or clearance results into bigger estimated size and finally stronger\n # reduction of original element sizes\n left, right, top, bottom = get_boundaries_of_elements_in_dict(models_dict=models_dict, clearance=clearance)\n parent_size = models_dict['state'].get_meta_data_editor()['size']\n margin, old_rel_pos, size = cal_frame_according_boundaries(left, right, top, bottom, parent_size, group=False)\n\n rel_pos = (margin, margin) if rel_pos is None else rel_pos\n assert parent_size[0] > rel_pos[0]\n assert parent_size[1] > rel_pos[1]\n # print(\"edges:\", left, right, top, bottom)\n # print(\"margin:\", margin, \"rel_pos:\", rel_pos, \"old_rel_pos\", old_rel_pos, \"size:\", size)\n\n parent_width, parent_height = parent_size\n\n boundary_width, boundary_height = size\n if (parent_height - rel_pos[1] - margin)/boundary_height < \\\n (parent_width - rel_pos[0] - margin)/boundary_width:\n # print(\"#2\"*20, 1, \"#\"*20, rel_pos)\n resize_factor = (parent_height - rel_pos[1] - margin)/boundary_height\n boundary_width_in_parent = boundary_width*resize_factor\n # print(boundary_width, resize_factor, boundary_width*resize_factor)\n # print(\"left over width: \", parent_width - boundary_width_in_parent - rel_pos[0] - margin)\n width_pos_offset_to_middle = (parent_width - boundary_width_in_parent - rel_pos[0] - margin)/2.\n rel_pos = add_pos(rel_pos, (width_pos_offset_to_middle, 0.))\n # print(resize_factor, rel_pos)\n else:\n # print(\"#2\"*20, 2, \"#\"*20, rel_pos)\n resize_factor = (parent_width - rel_pos[0] - margin)/boundary_width\n boundary_height_in_parent = boundary_height*resize_factor\n # print(boundary_width, resize_factor, boundary_width*resize_factor)\n # print(\"left over height: \", parent_height - boundary_height_in_parent - rel_pos[0] - margin)\n height_pos_offset_to_middle = (parent_height - boundary_height_in_parent - rel_pos[1] - margin)/2.\n rel_pos = add_pos(rel_pos, (0., height_pos_offset_to_middle))\n # print(resize_factor, rel_pos)\n frame = {'rel_pos': rel_pos, 'size': mult_two_vectors((resize_factor, resize_factor), size)}\n # print(models_dict['state'].get_meta_data_editor()['rel_pos'], \\)\n # parent_size, models_dict['state'].parent.get_meta_data_editor()['size']\n # print(\"frame\", frame, resize_factor)\n # # rel_pos = mult_two_vectors((1, -1), frame['rel_pos'])\n offset = subtract_pos((0., 0.), old_rel_pos)\n offset_rel_pos_of_all_models_in_dict(models_dict, offset)\n\n resize_of_all_models_in_dict(models_dict, (resize_factor, resize_factor))\n\n offset_rel_pos_of_all_models_in_dict(models_dict, frame['rel_pos'])\n # scale_meta_data_according_frame(models_dict, frame)\n return resize_factor, resize_factor # (resize_factor, resize_factor) , offset, frame['rel_pos']\n else:\n return 1, 1"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_closest_sibling_state(state_m, from_logical_port=None):\n if not state_m.parent:\n logger.warning(\"A state can not have a closest sibling state if it has not parent as {0}\".format(state_m))\n return\n\n margin = cal_margin(state_m.parent.get_meta_data_editor()['size'])\n pos = state_m.get_meta_data_editor()['rel_pos']\n size = state_m.get_meta_data_editor()['size'] # otherwise measure from reference state itself\n if from_logical_port in [\"outcome\", \"income\"]:\n size = (margin, margin)\n if from_logical_port == \"outcome\":\n outcomes_m = [outcome_m for outcome_m in state_m.outcomes if outcome_m.outcome.outcome_id >= 0]\n free_outcomes_m = [oc_m for oc_m in outcomes_m\n if not state_m.state.parent.get_transition_for_outcome(state_m.state, oc_m.outcome)]\n if free_outcomes_m:\n outcome_m = free_outcomes_m[0]\n else:\n outcome_m = outcomes_m[0]\n pos = add_pos(pos, outcome_m.get_meta_data_editor()['rel_pos'])\n elif from_logical_port == \"income\":\n pos = add_pos(pos, state_m.income.get_meta_data_editor()['rel_pos'])\n\n min_distance = None\n for sibling_state_m in state_m.parent.states.values():\n if sibling_state_m is state_m:\n continue\n\n sibling_pos = sibling_state_m.get_meta_data_editor()['rel_pos']\n sibling_size = sibling_state_m.get_meta_data_editor()['size']\n\n distance = geometry.cal_dist_between_2_coord_frame_aligned_boxes(pos, size, sibling_pos, sibling_size)\n\n if not min_distance or min_distance[0] > distance:\n min_distance = (distance, sibling_state_m)\n\n return min_distance", "response": "Calculate the closest sibling state from optional logical port of handed state model"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncollect non empty list dict to create arguments for action signal and msg and logger messages", "response": "def get_action_arguments(self, target_state_m):\n \"\"\" Collect argument attributes for action signal\n\n Use non empty list dict to create arguments for action signal msg and logger messages. The action parent model\n can be different then the target state model because logical and data port changes also may influence the\n linkage, see action-module (undo/redo).\n\n :param rafcon.gui.models.abstract_state.AbstractStateModel target_state_m: State model of target of action\n :return: dict with lists of elements part of the action, action parent model\n \"\"\"\n non_empty_lists_dict = {key: elems for key, elems in self.model_copies.items() if elems}\n port_attrs = ['input_data_ports', 'output_data_ports', 'scoped_variables', 'outcomes']\n port_is_pasted = any([key in non_empty_lists_dict for key in port_attrs])\n return non_empty_lists_dict, target_state_m.parent if target_state_m.parent and port_is_pasted else target_state_m"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncopy all selected items to the clipboard using smart selection adaptation by default", "response": "def copy(self, selection, smart_selection_adaption=True):\n \"\"\" Copy all selected items to the clipboard using smart selection adaptation by default\n \n :param selection: the current selection\n :param bool smart_selection_adaption: flag to enable smart selection adaptation mode\n :return:\n \"\"\"\n assert isinstance(selection, Selection)\n self.__create_core_and_model_object_copies(selection, smart_selection_adaption)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef cut(self, selection, smart_selection_adaption=False):\n\n assert isinstance(selection, Selection)\n\n import rafcon.gui.helpers.state_machine as gui_helper_state_machine\n if gui_helper_state_machine.is_selection_inside_of_library_state(selected_elements=selection.get_all()):\n logger.warning(\"Cut is not performed because elements inside of a library state are selected.\")\n return\n selection_dict_of_copied_models, parent_m = self.__create_core_and_model_object_copies(\n selection, smart_selection_adaption)\n\n non_empty_lists_dict, action_parent_m = self.get_action_arguments(parent_m if parent_m else None)\n\n action_parent_m.action_signal.emit(ActionSignalMsg(action='cut', origin='clipboard',\n action_parent_m=action_parent_m,\n affected_models=[], after=False,\n kwargs={'remove': non_empty_lists_dict}))\n\n for models in selection_dict_of_copied_models.values():\n gui_helper_state_machine.delete_core_elements_of_models(models, destroy=True,\n recursive=True, force=False)\n affected_models = [model for models in non_empty_lists_dict.values() for model in models]\n action_parent_m.action_signal.emit(ActionSignalMsg(action='cut', origin='clipboard',\n action_parent_m=action_parent_m,\n affected_models=affected_models, after=True))", "response": "Cuts all selected items and copy them to the clipboard using smart selection adaptation by default"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef paste(self, target_state_m, cursor_position=None, limited=None, convert=False):\n self.reset_clipboard_mapping_dicts()\n if not isinstance(target_state_m, StateModel):\n logger.warning(\"Paste is not performed because target state indication has to be a StateModel not {0}\"\n \"\".format(target_state_m.__class__.__name__))\n return\n if target_state_m.state.get_next_upper_library_root_state() is not None:\n logger.warning(\"Paste is not performed because selected target state is inside of a library state.\")\n return\n\n element_m_copy_lists = self.model_copies\n self.prepare_new_copy() # threaded in future -> important that the copy is prepared here!!!\n # use non empty list dict to create arguments for action signal msg and logger messages\n dict_of_non_empty_lists_of_model_copies, action_parent_m = self.get_action_arguments(target_state_m)\n action_parent_m.action_signal.emit(ActionSignalMsg(action='paste', origin='clipboard',\n action_parent_m=action_parent_m,\n affected_models=[], after=False,\n kwargs={'insert': dict_of_non_empty_lists_of_model_copies,\n 'convert': convert,\n 'limited': limited}))\n self.state_id_mapping_dict[self.copy_parent_state_id] = target_state_m.state.state_id\n\n # prepare list of lists to copy for limited or converted paste of objects\n target_state_element_attrs = target_state_m.state.state_element_attrs\n if limited and all([state_element_attr in target_state_element_attrs for state_element_attr in limited]):\n if len(limited) == 1 and limited[0] in ['input_data_ports', 'output_data_ports', 'scoped_variables'] and convert:\n combined_list = element_m_copy_lists['input_data_ports'] + element_m_copy_lists['output_data_ports'] + \\\n element_m_copy_lists['scoped_variables']\n for state_element_attr in ['input_data_ports', 'output_data_ports', 'scoped_variables']:\n element_m_copy_lists[state_element_attr] = combined_list\n state_element_attrs_to_insert = limited\n else:\n state_element_attrs_to_insert = target_state_element_attrs\n\n # check list order and put transitions and data flows to the end\n for state_element_attr in ['transitions', 'data_flows']:\n if state_element_attr in state_element_attrs_to_insert:\n state_element_attrs_to_insert.remove(state_element_attr)\n state_element_attrs_to_insert.append(state_element_attr)\n\n def insert_elements_from_model_copies_list(model_list, state_element_name):\n \"\"\" Insert/add all core elements of model_list into the target_state_m\n\n The function returns a list of pairs of models (new and original models) because the target_state_m for\n some insert operations still generates a new model.\n :param list model_list: list of models\n :param str state_element_name: appendix string to \"_insert_*\" to get the attribute of respective methods in\n clipboard-class.\n :return: list of pairs of models (new and original models)\n :rtype: list[tuple]\n \"\"\"\n new_and_copied_models = []\n for orig_element_m_copy in model_list:\n try:\n # hold orig_element_m_copy related to newly generated model for debugging reasons\n # (its doubt that ids are fully correct, meta data is considered to be alright now)\n insert_function = getattr(self, '_insert_{0}'.format(state_element_name)) # e.g. self._insert_state\n new_element_m = insert_function(target_state_m, orig_element_m_copy)\n new_and_copied_models.append((new_element_m, orig_element_m_copy))\n except (ValueError, AttributeError, TypeError) as e:\n logger.warning(\"While inserting a {0} a failure was detected, exception: {1}.\"\n \"\".format(state_element_name, e))\n return new_and_copied_models\n\n # insert all lists and their elements into target state\n # insert_dict hold lists of pairs of models -> new (maybe generated by parent model) and original copy\n insert_dict = dict()\n for state_element_attr in state_element_attrs_to_insert:\n state_element_name = state_element_attr[:-1] # e.g. \"states\" => \"state\", \"outcomes\" => \"outcome\"\n insert_dict[state_element_attr] = \\\n insert_elements_from_model_copies_list(element_m_copy_lists[state_element_attr],\n state_element_name)\n\n # move meta data from original copied model to newly insert models and resize them to fit into target_state_m\n models_dict = {'state': target_state_m}\n for state_element_attr, state_elements in insert_dict.items():\n models_dict[state_element_attr] = {}\n for new_state_element_m, copied_state_element_m in state_elements:\n new_core_element_id = new_state_element_m.core_element.core_element_id\n models_dict[state_element_attr][new_core_element_id] = new_state_element_m\n\n affected_models = []\n for key, state_elements in insert_dict.items():\n if key == 'state':\n continue\n for new_state_element_m, copied_state_element_m in state_elements:\n affected_models.append(new_state_element_m)\n\n # commented parts are here for later use to detect empty meta data fields and debug those\n if all([all([not gui_helpers_meta_data.model_has_empty_meta(state_element_m) for state_element_m, _ in elems_list])\n if isinstance(elems_list, list) else gui_helpers_meta_data.model_has_empty_meta(elems_list)\n for elems_list in insert_dict.values()]) or \\\n len(dict_of_non_empty_lists_of_model_copies) == 1 and 'states' in dict_of_non_empty_lists_of_model_copies:\n try:\n gui_helpers_meta_data.scale_meta_data_according_state(models_dict)\n except:\n logger.exception(\"Scale of pasted content {0} cause a problems.\".format(models_dict))\n else:\n # TODO this should become a warning in the future or the meta module has to handle the empty data fields\n logger.info(\"Paste miss meta to scale. {0}\".format(affected_models))\n\n if not affected_models:\n logger.warning(\"Paste with no effect. No elements pasted from {0}\".format(dict_of_non_empty_lists_of_model_copies))\n action_parent_m.action_signal.emit(ActionSignalMsg(action='paste', origin='clipboard',\n action_parent_m=action_parent_m,\n affected_models=affected_models, after=True))\n return insert_dict", "response": "Paste objects to target state."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nresets the clipboard so that old elements do not pollute the new selection that is copied into the the clipboard.", "response": "def reset_clipboard(self):\n \"\"\" Resets the clipboard, so that old elements do not pollute the new selection that is copied into the\n clipboard.\n\n :return:\n \"\"\"\n # reset selections\n for state_element_attr in ContainerState.state_element_attrs:\n self.model_copies[state_element_attr] = []\n\n # reset parent state_id the copied elements are taken from\n self.copy_parent_state_id = None\n\n self.reset_clipboard_mapping_dicts()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nfinding and reduce selection to one parent state.", "response": "def do_selection_reduction_to_one_parent(selection):\n \"\"\" Find and reduce selection to one parent state.\n\n :param selection:\n :return: state model which is parent of selection or None if root state\n \"\"\"\n\n all_models_selected = selection.get_all()\n # check if all elements selected are on one hierarchy level -> TODO or in future are parts of sibling?!\n # if not take the state with the most siblings as the copy root\n parent_m_count_dict = {}\n for model in all_models_selected:\n parent_m_count_dict[model.parent] = parent_m_count_dict[model.parent] + 1 if model.parent in parent_m_count_dict else 1\n parent_m = None\n current_count_parent = 0\n for possible_parent_m, count in parent_m_count_dict.items():\n parent_m = possible_parent_m if current_count_parent < count else parent_m\n # if root no parent exist and only on model can be selected\n if len(selection.states) == 1 and selection.get_selected_state().state.is_root_state:\n parent_m = None\n # kick all selection except root_state\n if len(all_models_selected) > 1:\n selection.set(selection.get_selected_state())\n if parent_m is not None:\n # check and reduce selection\n for model in all_models_selected:\n if model.parent is not parent_m:\n selection.remove(model)\n\n return parent_m"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreducing and extend transition and data flow element selection if already enclosed by selection The smart selection adaptation checks and ignores directly data flows and transitions which are selected without selected related origin or targets elements. Additional the linkage (data flows and transitions) if those origins and targets are covered by the selected elements is added to the selection. Thereby the selection it self is manipulated to provide direct feedback to the user. :param selection: :param parent_m: :return:", "response": "def do_smart_selection_adaption(selection, parent_m):\n \"\"\" Reduce and extend transition and data flow element selection if already enclosed by selection\n\n The smart selection adaptation checks and ignores directly data flows and transitions which are selected\n without selected related origin or targets elements. Additional the linkage (data flows and transitions)\n if those origins and targets are covered by the selected elements is added to the selection.\n Thereby the selection it self is manipulated to provide direct feedback to the user.\n\n :param selection:\n :param parent_m:\n :return:\n \"\"\"\n\n def get_ports_related_to_data_flow(data_flow):\n from_port = data_flow.parent.get_data_port(data_flow.from_state, data_flow.from_key)\n to_port = data_flow.parent.get_data_port(data_flow.to_state, data_flow.to_key)\n return from_port, to_port\n\n def get_states_related_to_transition(transition):\n if transition.from_state == transition.parent.state_id or transition.from_state is None:\n from_state = transition.parent\n else:\n from_state = transition.parent.states[transition.from_state]\n if transition.to_state == transition.parent.state_id:\n to_state = transition.parent\n else:\n to_state = transition.parent.states[transition.to_state]\n if transition.to_outcome in transition.parent.outcomes:\n to_outcome = transition.parent.outcomes[transition.to_outcome]\n else:\n to_outcome = transition.to_outcome\n return from_state, to_state, to_outcome\n\n # reduce linkage selection by not fully by selection covered linkage\n possible_states = [state_m.state for state_m in selection.states]\n possible_outcomes = [outcome_m.outcome for outcome_m in selection.outcomes]\n for data_flow_m in selection.data_flows:\n from_port, to_port = get_ports_related_to_data_flow(data_flow_m.data_flow)\n if from_port.parent not in possible_states or to_port not in possible_states:\n selection.remove(data_flow_m)\n for transition_m in selection.transitions:\n from_state, to_state, to_oc = get_states_related_to_transition(transition_m.transition)\n if from_state not in possible_states or (to_state not in possible_states and to_oc not in possible_outcomes):\n selection.remove(transition_m)\n\n # extend linkage selection by fully by selected element enclosed linkage\n if parent_m and isinstance(parent_m.state, ContainerState):\n state_ids = [state.state_id for state in possible_states]\n port_ids = [sv_m.scoped_variable.data_port_id for sv_m in selection.scoped_variables] + \\\n [ip_m.data_port.data_port_id for ip_m in selection.input_data_ports] + \\\n [op_m.data_port.data_port_id for op_m in selection.output_data_ports]\n ports = [sv_m.scoped_variable for sv_m in selection.scoped_variables] + \\\n [ip_m.data_port for ip_m in selection.input_data_ports] + \\\n [op_m.data_port for op_m in selection.output_data_ports]\n related_transitions, related_data_flows = \\\n parent_m.state.related_linkage_states_and_scoped_variables(state_ids, port_ids)\n # extend by selected states or a port and a state enclosed data flows\n for data_flow in related_data_flows['enclosed']:\n data_flow_m = parent_m.get_data_flow_m(data_flow.data_flow_id)\n if data_flow_m not in selection.data_flows:\n selection.add(data_flow_m)\n # extend by selected ports enclosed data flows\n for data_flow_id, data_flow in parent_m.state.data_flows.items():\n from_port, to_port = get_ports_related_to_data_flow(data_flow)\n if from_port in ports and to_port in ports:\n selection.add(parent_m.get_data_flow_m(data_flow_id))\n # extend by selected states enclosed transitions\n for transition in related_transitions['enclosed']:\n transition_m = parent_m.get_transition_m(transition.transition_id)\n if transition_m not in selection.transitions:\n selection.add(transition_m)\n # extend by selected state and outcome enclosed transitions\n for transition_id, transition in parent_m.state.transitions.items():\n from_state, to_state, to_oc = get_states_related_to_transition(transition)\n if from_state in possible_states and to_oc in possible_outcomes:\n selection.add(parent_m.get_transition_m(transition_id))"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef __create_core_and_model_object_copies(self, selection, smart_selection_adaption):\n\n all_models_selected = selection.get_all()\n if not all_models_selected:\n logger.warning(\"Nothing to copy because state machine selection is empty.\")\n return\n\n parent_m = self.do_selection_reduction_to_one_parent(selection)\n self.copy_parent_state_id = parent_m.state.state_id if parent_m else None\n\n if smart_selection_adaption:\n self.do_smart_selection_adaption(selection, parent_m)\n\n # store all lists of selection\n selected_models_dict = {}\n for state_element_attr in ContainerState.state_element_attrs:\n selected_models_dict[state_element_attr] = list(getattr(selection, state_element_attr))\n\n # delete old models\n self.destroy_all_models_in_dict(self.model_copies)\n # copy all selected elements\n self.model_copies = deepcopy(selected_models_dict)\n\n new_content_of_clipboard = ', '.join([\"{0} {1}\".format(len(elems), key if len(elems) > 1 else key[:-1])\n for key, elems in self.model_copies.items() if elems])\n logger.info(\"The new content is {0}\".format(new_content_of_clipboard.replace('_', ' ')))\n\n return selected_models_dict, parent_m", "response": "Create a dictionary of all models and model objects that are copied and the model objects of the selected elements."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef destroy_all_models_in_dict(target_dict):\n if target_dict:\n for model_list in target_dict.values():\n if isinstance(model_list, (list, tuple)):\n for model in model_list:\n model.prepare_destruction()\n if model._parent:\n model._parent = None\n else:\n raise Exception(\"wrong data in clipboard\")", "response": "Method runs the prepare destruction method of all models in a dict."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef destroy(self):\n self.destroy_all_models_in_dict(self.model_copies)\n self.model_copies = None\n self.copy_parent_state_id = None\n self.outcome_id_mapping_dict = None\n self.port_id_mapping_dict = None\n self.state_id_mapping_dict = None", "response": "Destroys the clipboard by relieving all model references."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef extend_extents(extents, factor=1.1):\n width = extents[2] - extents[0]\n height = extents[3] - extents[1]\n add_width = (factor - 1) * width\n add_height = (factor - 1) * height\n x1 = extents[0] - add_width / 2\n x2 = extents[2] + add_width / 2\n y1 = extents[1] - add_height / 2\n y2 = extents[3] + add_height / 2\n return x1, y1, x2, y2", "response": "Extend a bounding box by a given factor"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef calc_rel_pos_to_parent(canvas, item, handle):\n from gaphas.item import NW\n\n if isinstance(item, ConnectionView):\n return item.canvas.get_matrix_i2i(item, item.parent).transform_point(*handle.pos)\n\n parent = canvas.get_parent(item)\n if parent:\n return item.canvas.get_matrix_i2i(item, parent).transform_point(*handle.pos)\n else:\n return item.canvas.get_matrix_i2c(item).transform_point(*item.handles()[NW].pos)", "response": "This method calculates the relative position of the given item s handle to its parent."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef assert_exactly_one_true(bool_list):\n assert isinstance(bool_list, list)\n counter = 0\n for item in bool_list:\n if item:\n counter += 1\n return counter == 1", "response": "This method asserts that only one value of the provided list is True."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_port_for_handle(handle, state):\n from rafcon.gui.mygaphas.items.state import StateView\n if isinstance(state, StateView):\n if state.income.handle == handle:\n return state.income\n else:\n for outcome in state.outcomes:\n if outcome.handle == handle:\n return outcome\n for input in state.inputs:\n if input.handle == handle:\n return input\n for output in state.outputs:\n if output.handle == handle:\n return output\n for scoped in state.scoped_variables:\n if scoped.handle == handle:\n return scoped", "response": "Looks for and returns the PortView to the given handle in the provided state"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef create_new_connection(from_port, to_port):\n from rafcon.gui.mygaphas.items.ports import ScopedVariablePortView, LogicPortView, DataPortView\n\n if isinstance(from_port, LogicPortView) and isinstance(to_port, LogicPortView):\n return add_transition_to_state(from_port, to_port)\n elif isinstance(from_port, (DataPortView, ScopedVariablePortView)) and \\\n isinstance(to_port, (DataPortView, ScopedVariablePortView)):\n return add_data_flow_to_state(from_port, to_port)\n # Both ports are not None\n elif from_port and to_port:\n logger.error(\"Connection of non-compatible ports: {0} and {1}\".format(type(from_port), type(to_port)))\n\n return False", "response": "Checks the type of connection and creates a new connection if it is not already added"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef add_data_flow_to_state(from_port, to_port):\n from rafcon.gui.mygaphas.items.ports import InputPortView, OutputPortView, ScopedVariablePortView\n from rafcon.gui.models.container_state import ContainerStateModel\n\n from_state_v = from_port.parent\n to_state_v = to_port.parent\n\n from_state_m = from_state_v.model\n to_state_m = to_state_v.model\n\n from_state_id = from_state_m.state.state_id\n to_state_id = to_state_m.state.state_id\n\n from_port_id = from_port.port_id\n to_port_id = to_port.port_id\n\n if not isinstance(from_port, (InputPortView, OutputPortView, ScopedVariablePortView)) or \\\n not isinstance(from_port, (InputPortView, OutputPortView, ScopedVariablePortView)):\n logger.error(\"Data flows only exist between data ports (input, output, scope). Given: {0} and {1}\".format(type(\n from_port), type(to_port)))\n return False\n\n responsible_parent_m = None\n\n # from parent to child\n if isinstance(from_state_m, ContainerStateModel) and \\\n check_if_dict_contains_object_reference_in_values(to_state_m.state, from_state_m.state.states):\n responsible_parent_m = from_state_m\n # from child to parent\n elif isinstance(to_state_m, ContainerStateModel) and \\\n check_if_dict_contains_object_reference_in_values(from_state_m.state, to_state_m.state.states):\n responsible_parent_m = to_state_m\n # from parent to parent\n elif isinstance(from_state_m, ContainerStateModel) and from_state_m.state is to_state_m.state:\n responsible_parent_m = from_state_m # == to_state_m\n # from child to child\n elif (not from_state_m.state.is_root_state) and (not to_state_m.state.is_root_state) \\\n and from_state_m.state is not to_state_m.state \\\n and from_state_m.parent.state.state_id and to_state_m.parent.state.state_id:\n responsible_parent_m = from_state_m.parent\n\n if not isinstance(responsible_parent_m, ContainerStateModel):\n logger.error(\"Data flows only exist in container states (e.g. hierarchy states)\")\n return False\n\n try:\n responsible_parent_m.state.add_data_flow(from_state_id, from_port_id, to_state_id, to_port_id)\n return True\n except (ValueError, AttributeError, TypeError) as e:\n logger.error(\"Data flow couldn't be added: {0}\".format(e))\n return False", "response": "This method adds a data flow between two state models."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef add_transition_to_state(from_port, to_port):\n from rafcon.gui.mygaphas.items.ports import IncomeView, OutcomeView\n\n from_state_v = from_port.parent\n to_state_v = to_port.parent\n\n from_state_m = from_state_v.model\n to_state_m = to_state_v.model\n\n # Gather necessary information to create transition\n from_state_id = from_state_m.state.state_id\n to_state_id = to_state_m.state.state_id\n\n responsible_parent_m = None\n\n # Start transition\n if isinstance(from_port, IncomeView):\n from_state_id = None\n from_outcome_id = None\n responsible_parent_m = from_state_m\n # Transition from parent income to child income\n if isinstance(to_port, IncomeView):\n to_outcome_id = None\n # Transition from parent income to parent outcome\n elif isinstance(to_port, OutcomeView):\n to_outcome_id = to_port.outcome_id\n elif isinstance(from_port, OutcomeView):\n from_outcome_id = from_port.outcome_id\n # Transition from child outcome to child income\n if isinstance(to_port, IncomeView):\n responsible_parent_m = from_state_m.parent\n to_outcome_id = None\n # Transition from child outcome to parent outcome\n elif isinstance(to_port, OutcomeView):\n responsible_parent_m = to_state_m\n to_outcome_id = to_port.outcome_id\n else:\n raise ValueError(\"Invalid port type\")\n\n from rafcon.gui.models.container_state import ContainerStateModel\n if not responsible_parent_m:\n logger.error(\"Transitions only exist between incomes and outcomes. Given: {0} and {1}\".format(type(\n from_port), type(to_port)))\n return False\n elif not isinstance(responsible_parent_m, ContainerStateModel):\n logger.error(\"Transitions only exist in container states (e.g. hierarchy states)\")\n return False\n\n try:\n t_id = responsible_parent_m.state.add_transition(from_state_id, from_outcome_id, to_state_id, to_outcome_id)\n if from_state_id == to_state_id:\n gui_helper_meta_data.insert_self_transition_meta_data(responsible_parent_m.states[from_state_id], t_id,\n combined_action=True)\n return True\n except (ValueError, AttributeError, TypeError) as e:\n logger.error(\"Transition couldn't be added: {0}\".format(e))\n return False", "response": "This method adds a transition from one state to another."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_relative_positions_of_waypoints(transition_v):\n handles_list = transition_v.handles()\n rel_pos_list = []\n for handle in handles_list:\n if handle in transition_v.end_handles(include_waypoints=True):\n continue\n rel_pos = transition_v.canvas.get_matrix_i2i(transition_v, transition_v.parent).transform_point(*handle.pos)\n rel_pos_list.append(rel_pos)\n return rel_pos_list", "response": "This method returns all relative positions of waypoints in a transition_v."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef update_meta_data_for_transition_waypoints(graphical_editor_view, transition_v, last_waypoint_list, publish=True):\n\n from rafcon.gui.mygaphas.items.connection import TransitionView\n assert isinstance(transition_v, TransitionView)\n\n transition_m = transition_v.model\n waypoint_list = get_relative_positions_of_waypoints(transition_v)\n if waypoint_list != last_waypoint_list:\n transition_m.set_meta_data_editor('waypoints', waypoint_list)\n if publish:\n graphical_editor_view.emit('meta_data_changed', transition_m, \"waypoints\", False)", "response": "This method updates the meta data of the transition waypoints if they changed."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef update_meta_data_for_name_view(graphical_editor_view, name_v, publish=True):\n from gaphas.item import NW\n\n rel_pos = calc_rel_pos_to_parent(graphical_editor_view.editor.canvas, name_v, name_v.handles()[NW])\n\n state_v = graphical_editor_view.editor.canvas.get_parent(name_v)\n state_v.model.set_meta_data_editor('name.size', (name_v.width, name_v.height))\n state_v.model.set_meta_data_editor('name.rel_pos', rel_pos)\n\n if publish:\n graphical_editor_view.emit('meta_data_changed', state_v.model, \"name_size\", False)", "response": "This method updates the meta data of a name view."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef update_meta_data_for_state_view(graphical_editor_view, state_v, affects_children=False, publish=True):\n from gaphas.item import NW\n\n # Update all port meta data to match with new position and size of parent\n update_meta_data_for_port(graphical_editor_view, state_v, None)\n\n if affects_children:\n update_meta_data_for_name_view(graphical_editor_view, state_v.name_view, publish=False)\n for transition_v in state_v.get_transitions():\n update_meta_data_for_transition_waypoints(graphical_editor_view, transition_v, None, publish=False)\n for child_state_v in state_v.child_state_views():\n update_meta_data_for_state_view(graphical_editor_view, child_state_v, True, publish=False)\n\n rel_pos = calc_rel_pos_to_parent(graphical_editor_view.editor.canvas, state_v, state_v.handles()[NW])\n\n state_v.model.set_meta_data_editor('size', (state_v.width, state_v.height))\n state_v.model.set_meta_data_editor('rel_pos', rel_pos)\n\n if publish:\n graphical_editor_view.emit('meta_data_changed', state_v.model, \"size\", affects_children)", "response": "This method updates the meta data of a state view. It updates all ports and transitions and child state views."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nremove all children and then the StateView itself.", "response": "def remove(self):\n \"\"\"Remove recursively all children and then the StateView itself\n \"\"\"\n self.canvas.get_first_view().unselect_item(self)\n\n for child in self.canvas.get_children(self)[:]:\n child.remove()\n\n self.remove_income()\n for outcome_v in self.outcomes[:]:\n self.remove_outcome(outcome_v)\n for input_port_v in self.inputs[:]:\n self.remove_input_port(input_port_v)\n for output_port_v in self.outputs[:]:\n self.remove_output_port(output_port_v)\n for scoped_variable_port_v in self.scoped_variables[:]:\n self.remove_scoped_variable(scoped_variable_port_v)\n\n self.remove_keep_rect_within_constraint_from_parent()\n for constraint in self._constraints[:]:\n self.canvas.solver.remove_constraint(constraint)\n self._constraints.remove(constraint)\n self.canvas.remove(self)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef set_enable_flag_keep_rect_within_constraints(self, enable):\n for child_state_v in self.child_state_views():\n self.keep_rect_constraints[child_state_v].enable = enable\n child_state_v.keep_rect_constraints[child_state_v._name_view].enable = enable", "response": "Enable or disable the KeepRectangleWithinConstraint for all child states."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nchecking if the state is a library with the show_content flag set", "response": "def show_content(self, with_content=False):\n \"\"\"Checks if the state is a library with the `show_content` flag set\n\n :param with_content: If this parameter is `True`, the method return only True if the library represents a\n ContainerState\n :return: Whether the content of a library state is shown\n \"\"\"\n if isinstance(self.model, LibraryStateModel) and self.model.show_content():\n return not with_content or isinstance(self.model.state_copy, ContainerStateModel)\n return False"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _calculate_port_pos_on_line(self, port_num, side_length, port_width=None):\n if port_width is None:\n port_width = 2 * self.border_width\n border_size = self.border_width\n pos = 0.5 * border_size + port_num * port_width\n outermost_pos = max(side_length / 2., side_length - 0.5 * border_size - port_width)\n pos = min(pos, outermost_pos)\n return pos", "response": "Calculate the position of a port on a line."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef write_dict_to_yaml(dictionary, path, **kwargs):\n with open(path, 'w') as f:\n yaml.dump(dictionary, f, indent=4, **kwargs)", "response": "Writes a dictionary to a yaml file"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef load_dict_from_yaml(path):\n f = file(path, 'r')\n dictionary = yaml.load(f)\n f.close()\n return dictionary", "response": "Loads a dictionary from a yaml file"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef write_dict_to_json(dictionary, path, **kwargs):\n result_string = json.dumps(dictionary, cls=JSONObjectEncoder,\n indent=4, check_circular=False, sort_keys=True, **kwargs)\n with open(path, 'w') as f:\n # We cannot write directly to the file, as otherwise the 'encode' method wouldn't be called\n f.write(result_string)", "response": "Writes a dictionary to a json file."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef load_objects_from_json(path, as_dict=False):\n f = open(path, 'r')\n if as_dict:\n result = json.load(f)\n else:\n result = json.load(f, cls=JSONObjectDecoder, substitute_modules=substitute_modules)\n f.close()\n return result", "response": "Loads a dictionary from a json file."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef solve_for(self, var=None):\n if not self.enable:\n return\n\n margin = self.margin_method()\n\n def parent_width():\n return self.parent_se[0].value - self.parent_nw[0].value\n\n def parent_height():\n return self.parent_se[1].value - self.parent_nw[1].value\n\n def child_width():\n child_width = self.child_se[0].value - self.child_nw[0].value\n if child_width > parent_width() - 2 * margin:\n child_width = parent_width() - 2 * margin\n return max(self.child.min_width, child_width)\n\n def child_height():\n child_height = self.child_se[1].value - self.child_nw[1].value\n if child_height > parent_height() - 2 * margin:\n child_height = parent_height() - 2 * margin\n return max(self.child.min_height, child_height)\n\n updated = False\n # Left edge (west)\n if self.parent_nw[0].value > self.child_nw[0].value - margin + EPSILON:\n width = child_width()\n _update(self.child_nw[0], self.parent_nw[0].value + margin)\n _update(self.child_se[0], self.child_nw[0].value + width)\n updated = True\n # Right edge (east)\n elif self.parent_se[0].value < self.child_se[0].value + margin - EPSILON:\n width = child_width()\n _update(self.child_se[0], self.parent_se[0].value - margin)\n _update(self.child_nw[0], self.child_se[0].value - width)\n updated = True\n # Upper edge (north)\n if self.parent_nw[1].value > self.child_nw[1].value - margin + EPSILON:\n height = child_height()\n _update(self.child_nw[1], self.parent_nw[1].value + margin)\n _update(self.child_se[1], self.child_nw[1].value + height)\n updated = True\n # Lower edge (south)\n elif self.parent_se[1].value < self.child_se[1].value + margin - EPSILON:\n height = child_height()\n _update(self.child_se[1], self.parent_se[1].value - margin)\n _update(self.child_nw[1], self.child_se[1].value - height)\n updated = True\n\n from rafcon.gui.mygaphas.items.state import StateView\n if updated and isinstance(self.child, StateView):\n self.child.update_minimum_size_of_children()", "response": "Solve the child for a specific variable."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef solve_for(self, var=None):\n margin = self.margin_method()\n if self.parent_nw[0].value > self.child[0].value - margin:\n _update(self.child[0], self.parent_nw[0].value + margin)\n # Right edge (east)\n if self.parent_se[0].value < self.child[0].value + margin:\n _update(self.child[0], self.parent_se[0].value - margin)\n # Upper edge (north)\n if self.parent_nw[1].value > self.child[1].value - margin:\n _update(self.child[1], self.parent_nw[1].value + margin)\n # Lower edge (south)\n if self.parent_se[1].value < self.child[1].value + margin:\n _update(self.child[1], self.parent_se[1].value - margin)", "response": "Solve for the child of the log entry."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nupdate the initial position of the port.", "response": "def update_port_side(self):\n \"\"\"Updates the initial position of the port\n\n The port side is ignored but calculated from the port position. Then the port position is limited to the four\n side lines of the state.\n \"\"\"\n from rafcon.utils.geometry import point_left_of_line\n\n p = (self._initial_pos.x, self._initial_pos.y)\n nw_x, nw_y, se_x, se_y = self.get_adjusted_border_positions()\n if point_left_of_line(p, (nw_x, nw_y), (se_x, se_y)): # upper right triangle of state\n if point_left_of_line(p, (nw_x, se_y), (se_x, nw_y)): # upper quarter triangle of state\n self._port.side = SnappedSide.TOP\n self.limit_pos(p[0], se_x, nw_x)\n else: # right quarter triangle of state\n self._port.side = SnappedSide.RIGHT\n self.limit_pos(p[1], se_y, nw_y)\n else: # lower left triangle of state\n if point_left_of_line(p, (nw_x, se_y), (se_x, nw_y)): # left quarter triangle of state\n self._port.side = SnappedSide.LEFT\n self.limit_pos(p[1], se_y, nw_y)\n else: # lower quarter triangle of state\n self._port.side = SnappedSide.BOTTOM\n self.limit_pos(p[0], se_x, nw_x)\n\n self.set_nearest_border()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsolving the port movement and keep it aligned with the binding rect.", "response": "def _solve(self):\n \"\"\"\n Calculates the correct position of the port and keeps it aligned with the binding rect\n \"\"\"\n\n # As the size of the containing state may has changed we need to update the distance to the border\n self.update_distance_to_border()\n px, py = self._point\n nw_x, nw_y, se_x, se_y = self.get_adjusted_border_positions()\n\n # If the port is located in one of the corners it is possible to move in two directions\n if ((self._initial_pos.x == nw_x and self._initial_pos.y == nw_y) or\n (self._initial_pos.x == se_x and self._initial_pos.y == nw_y) or\n (self._initial_pos.x == se_x and self._initial_pos.y == se_y) or\n (self._initial_pos.x == nw_x and self._initial_pos.y == se_y)):\n self.limit_pos(px, se_x, nw_x)\n self.limit_pos(py, se_y, nw_y)\n # If port movement starts at LEFT position, keep X position at place and move Y\n elif self._initial_pos.x == nw_x:\n _update(px, nw_x)\n self.limit_pos(py, se_y, nw_y)\n self._port.side = SnappedSide.LEFT\n # If port movement starts at TOP position, keep Y position at place and move X\n elif self._initial_pos.y == nw_y:\n _update(py, nw_y)\n self.limit_pos(px, se_x, nw_x)\n self._port.side = SnappedSide.TOP\n # If port movement starts at RIGHT position, keep X position at place and move Y\n elif self._initial_pos.x == se_x:\n _update(px, se_x)\n self.limit_pos(py, se_y, nw_y)\n self._port.side = SnappedSide.RIGHT\n # If port movement starts at BOTTOM position, keep Y position at place and move X\n elif self._initial_pos.y == se_y:\n _update(py, se_y)\n self.limit_pos(px, se_x, nw_x)\n self._port.side = SnappedSide.BOTTOM\n # If containing state has been resized, snap ports accordingly to border\n else:\n self.set_nearest_border()\n\n # Update initial position for next reference\n _update(self._initial_pos.x, deepcopy(px.value))\n _update(self._initial_pos.y, deepcopy(py.value))"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nlimits the position p to stay inside containing state", "response": "def limit_pos(p, se_pos, nw_pos):\n \"\"\"\n Limits position p to stay inside containing state\n :param p: Position to limit\n :param se_pos: Bottom/Right boundary\n :param nw_pos: Top/Left boundary\n :return:\n \"\"\"\n if p > se_pos:\n _update(p, se_pos)\n elif p < nw_pos:\n _update(p, nw_pos)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_adjusted_border_positions(self):\n nw_x, nw_y = self._rect[0]\n se_x, se_y = self._rect[1]\n\n nw_x += self._distance_to_border\n nw_y += self._distance_to_border\n se_x -= self._distance_to_border\n se_y -= self._distance_to_border\n\n return nw_x, nw_y, se_x, se_y", "response": "Calculates the positions to limit the port movement to\n "} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nsnap the port to the correct side upon state size change", "response": "def set_nearest_border(self):\n \"\"\"Snaps the port to the correct side upon state size change\n \"\"\"\n px, py = self._point\n nw_x, nw_y, se_x, se_y = self.get_adjusted_border_positions()\n\n if self._port.side == SnappedSide.RIGHT:\n _update(px, se_x)\n elif self._port.side == SnappedSide.BOTTOM:\n _update(py, se_y)\n elif self._port.side == SnappedSide.LEFT:\n _update(px, nw_x)\n elif self._port.side == SnappedSide.TOP:\n _update(py, nw_y)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef remove_obsolete_folders(states, path):\n elements_in_folder = os.listdir(path)\n # find all state folder elements in system path\n state_folders_in_file_system = []\n for folder_name in elements_in_folder:\n if os.path.exists(os.path.join(path, folder_name, FILE_NAME_CORE_DATA)) or \\\n os.path.exists(os.path.join(path, folder_name, FILE_NAME_CORE_DATA_OLD)):\n state_folders_in_file_system.append(folder_name)\n\n # remove elements used by existing states and storage format\n for state in states:\n storage_folder_for_state = get_storage_id_for_state(state)\n if storage_folder_for_state in state_folders_in_file_system:\n state_folders_in_file_system.remove(storage_folder_for_state)\n\n # remove the remaining state folders\n for folder_name in state_folders_in_file_system:\n shutil.rmtree(os.path.join(path, folder_name))", "response": "Removes obsolete state machine folders"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef clean_path_from_deprecated_naming(base_path):\n def warning_logger_message(insert_string):\n not_allowed_characters = \"'\" + \"', '\".join(REPLACED_CHARACTERS_FOR_NO_OS_LIMITATION.keys()) + \"'\"\n logger.warning(\"Deprecated {2} in {0}. Please avoid to use the following characters {1}.\"\n \"\".format(base_path, not_allowed_characters, insert_string))\n from rafcon.core.singleton import library_manager\n if library_manager.is_os_path_within_library_root_paths(base_path):\n library_path, library_name = library_manager.get_library_path_and_name_for_os_path(base_path)\n clean_library_path = clean_path(library_path)\n clean_library_name = clean_path(library_name)\n if library_name != clean_library_name or library_path != clean_library_path:\n warning_logger_message(\"library path\")\n library_root_key = library_manager._get_library_root_key_for_os_path(base_path)\n library_root_path = library_manager._library_root_paths[library_root_key]\n clean_base_path = os.path.join(library_root_path, clean_library_path, clean_library_name)\n else:\n path_elements = base_path.split(os.path.sep)\n state_machine_folder_name = base_path.split(os.path.sep)[-1]\n path_elements[-1] = clean_path(state_machine_folder_name)\n if not state_machine_folder_name == path_elements[-1]:\n warning_logger_message(\"state machine folder name\")\n clean_base_path = os.path.sep.join(path_elements)\n return clean_base_path", "response": "Checks if the base path includes deprecated characters and returns corrected version\n "} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef clean_path(base_path):\n path_elements = base_path.split(os.path.sep)\n reduced_path_elements = [clean_path_element(elem, max_length=255) for elem in path_elements]\n if not all(path_elements[i] == elem for i, elem in enumerate(reduced_path_elements)):\n # logger.info(\"State machine storage path is reduced\")\n base_path = os.path.sep.join(reduced_path_elements)\n return base_path", "response": "This function cleans a file system path in terms of removing all not allowed characters of each path element."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef save_state_machine_to_path(state_machine, base_path, delete_old_state_machine=False, as_copy=False):\n # warns the user in the logger when using deprecated names\n clean_path_from_deprecated_naming(base_path)\n\n state_machine.acquire_modification_lock()\n try:\n root_state = state_machine.root_state\n\n # clean old path first\n if delete_old_state_machine:\n if os.path.exists(base_path):\n shutil.rmtree(base_path)\n\n # Ensure that path is existing\n if not os.path.exists(base_path):\n os.makedirs(base_path)\n\n old_update_time = state_machine.last_update\n state_machine.last_update = storage_utils.get_current_time_string()\n state_machine_dict = state_machine.to_dict()\n storage_utils.write_dict_to_json(state_machine_dict, os.path.join(base_path, STATEMACHINE_FILE))\n\n # set the file_system_path of the state machine\n if not as_copy:\n state_machine.file_system_path = copy.copy(base_path)\n else:\n state_machine.last_update = old_update_time\n\n # add root state recursively\n remove_obsolete_folders([root_state], base_path)\n save_state_recursively(root_state, base_path, \"\", as_copy)\n\n if state_machine.marked_dirty and not as_copy:\n state_machine.marked_dirty = False\n logger.debug(\"State machine with id {0} was saved at {1}\".format(state_machine.state_machine_id, base_path))\n except Exception:\n raise\n finally:\n state_machine.release_modification_lock()", "response": "Saves a state machine to a file system."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef save_script_file_for_state_and_source_path(state, state_path_full, as_copy=False):\n from rafcon.core.states.execution_state import ExecutionState\n if isinstance(state, ExecutionState):\n source_script_file = os.path.join(state.script.path, state.script.filename)\n destination_script_file = os.path.join(state_path_full, SCRIPT_FILE)\n\n try:\n write_file(destination_script_file, state.script_text)\n except Exception:\n logger.exception(\"Storing of script file failed: {0} -> {1}\".format(state.get_path(),\n destination_script_file))\n raise\n\n if not source_script_file == destination_script_file and not as_copy:\n state.script.filename = SCRIPT_FILE\n state.script.path = state_path_full", "response": "Saves the script file for a state and the source path."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nsave the semantic data for the state to a separate json file.", "response": "def save_semantic_data_for_state(state, state_path_full):\n \"\"\"Saves the semantic data in a separate json file.\n\n :param state: The state of which the script file should be saved\n :param str state_path_full: The path to the file system storage location of the state\n \"\"\"\n\n destination_script_file = os.path.join(state_path_full, SEMANTIC_DATA_FILE)\n\n try:\n storage_utils.write_dict_to_json(state.semantic_data, destination_script_file)\n except IOError:\n logger.exception(\"Storing of semantic data for state {0} failed! Destination path: {1}\".\n format(state.get_path(), destination_script_file))\n raise"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef save_state_recursively(state, base_path, parent_path, as_copy=False):\n from rafcon.core.states.execution_state import ExecutionState\n from rafcon.core.states.container_state import ContainerState\n\n state_path = os.path.join(parent_path, get_storage_id_for_state(state))\n state_path_full = os.path.join(base_path, state_path)\n if not os.path.exists(state_path_full):\n os.makedirs(state_path_full)\n\n storage_utils.write_dict_to_json(state, os.path.join(state_path_full, FILE_NAME_CORE_DATA))\n if not as_copy:\n state.file_system_path = state_path_full\n\n if isinstance(state, ExecutionState):\n save_script_file_for_state_and_source_path(state, state_path_full, as_copy)\n\n save_semantic_data_for_state(state, state_path_full)\n\n # create yaml files for all children\n if isinstance(state, ContainerState):\n remove_obsolete_folders(state.states.values(), os.path.join(base_path, state_path))\n for state in state.states.values():\n save_state_recursively(state, base_path, state_path, as_copy)", "response": "Recursively saves a state to a json file"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef load_state_machine_from_path(base_path, state_machine_id=None):\n logger.debug(\"Loading state machine from path {0}...\".format(base_path))\n\n state_machine_file_path = os.path.join(base_path, STATEMACHINE_FILE)\n state_machine_file_path_old = os.path.join(base_path, STATEMACHINE_FILE_OLD)\n\n # was the root state specified as state machine base_path to load from?\n if not os.path.exists(state_machine_file_path) and not os.path.exists(state_machine_file_path_old):\n\n # catch the case that a state machine root file is handed\n if os.path.exists(base_path) and os.path.isfile(base_path):\n base_path = os.path.dirname(base_path)\n state_machine_file_path = os.path.join(base_path, STATEMACHINE_FILE)\n state_machine_file_path_old = os.path.join(base_path, STATEMACHINE_FILE_OLD)\n\n if not os.path.exists(state_machine_file_path) and not os.path.exists(state_machine_file_path_old):\n raise ValueError(\"Provided path doesn't contain a valid state machine: {0}\".format(base_path))\n\n state_machine_dict = storage_utils.load_objects_from_json(state_machine_file_path)\n if 'used_rafcon_version' in state_machine_dict:\n previously_used_rafcon_version = StrictVersion(state_machine_dict['used_rafcon_version']).version\n active_rafcon_version = StrictVersion(rafcon.__version__).version\n\n rafcon_newer_than_sm_version = \"You are trying to load a state machine that was stored with an older \" \\\n \"version of RAFCON ({0}) than the one you are using ({1}).\".format(\n state_machine_dict['used_rafcon_version'], rafcon.__version__)\n rafcon_older_than_sm_version = \"You are trying to load a state machine that was stored with an newer \" \\\n \"version of RAFCON ({0}) than the one you are using ({1}).\".format(\n state_machine_dict['used_rafcon_version'], rafcon.__version__)\n note_about_possible_incompatibility = \"The state machine will be loaded with no guarantee of success.\"\n\n if active_rafcon_version[0] > previously_used_rafcon_version[0]:\n # this is the default case\n # for a list of breaking changes please see: doc/breaking_changes.rst\n # logger.warning(rafcon_newer_than_sm_version)\n # logger.warning(note_about_possible_incompatibility)\n pass\n if active_rafcon_version[0] == previously_used_rafcon_version[0]:\n if active_rafcon_version[1] > previously_used_rafcon_version[1]:\n # this is the default case\n # for a list of breaking changes please see: doc/breaking_changes.rst\n # logger.info(rafcon_newer_than_sm_version)\n # logger.info(note_about_possible_incompatibility)\n pass\n elif active_rafcon_version[1] == previously_used_rafcon_version[1]:\n # Major and minor version of RAFCON and the state machine match\n # It should be safe to load the state machine, as the patch level does not change the format\n pass\n else:\n logger.warning(rafcon_older_than_sm_version)\n logger.warning(note_about_possible_incompatibility)\n else:\n logger.warning(rafcon_older_than_sm_version)\n logger.warning(note_about_possible_incompatibility)\n\n state_machine = StateMachine.from_dict(state_machine_dict, state_machine_id)\n if \"root_state_storage_id\" not in state_machine_dict:\n root_state_storage_id = state_machine_dict['root_state_id']\n state_machine.supports_saving_state_names = False\n else:\n root_state_storage_id = state_machine_dict['root_state_storage_id']\n\n root_state_path = os.path.join(base_path, root_state_storage_id)\n state_machine.file_system_path = base_path\n dirty_states = []\n state_machine.root_state = load_state_recursively(parent=state_machine, state_path=root_state_path,\n dirty_states=dirty_states)\n if len(dirty_states) > 0:\n state_machine.marked_dirty = True\n else:\n state_machine.marked_dirty = False\n\n hierarchy_level = 0\n number_of_states, hierarchy_level = state_machine.root_state.get_states_statistics(hierarchy_level)\n logger.debug(\"Loaded state machine ({1}) has {0} states. (Max hierarchy level {2})\".format(\n number_of_states, base_path, hierarchy_level))\n logger.debug(\"Loaded state machine ({1}) has {0} transitions.\".format(\n state_machine.root_state.get_number_of_transitions(), base_path))\n\n return state_machine", "response": "Loads a state machine from the given path."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef load_state_recursively(parent, state_path=None, dirty_states=[]):\n from rafcon.core.states.execution_state import ExecutionState\n from rafcon.core.states.container_state import ContainerState\n from rafcon.core.states.hierarchy_state import HierarchyState\n\n path_core_data = os.path.join(state_path, FILE_NAME_CORE_DATA)\n\n logger.debug(\"Load state recursively: {0}\".format(str(state_path)))\n\n # TODO: Should be removed with next minor release\n if not os.path.exists(path_core_data):\n path_core_data = os.path.join(state_path, FILE_NAME_CORE_DATA_OLD)\n\n try:\n state_info = load_data_file(path_core_data)\n except ValueError as e:\n logger.exception(\"Error while loading state data: {0}\".format(e))\n return\n except LibraryNotFoundException as e:\n logger.error(\"Library could not be loaded: {0}\\n\"\n \"Skipping library and continuing loading the state machine\".format(e))\n state_info = storage_utils.load_objects_from_json(path_core_data, as_dict=True)\n state_id = state_info[\"state_id\"]\n dummy_state = HierarchyState(LIBRARY_NOT_FOUND_DUMMY_STATE_NAME, state_id=state_id)\n # set parent of dummy state\n if isinstance(parent, ContainerState):\n parent.add_state(dummy_state, storage_load=True)\n else:\n dummy_state.parent = parent\n return dummy_state\n\n # Transitions and data flows are not added when loading a state, as also states are not added.\n # We have to wait until the child states are loaded, before adding transitions and data flows, as otherwise the\n # validity checks for transitions and data flows would fail\n if not isinstance(state_info, tuple):\n state = state_info\n else:\n state = state_info[0]\n transitions = state_info[1]\n data_flows = state_info[2]\n\n # set parent of state\n if parent is not None and isinstance(parent, ContainerState):\n parent.add_state(state, storage_load=True)\n else:\n state.parent = parent\n\n # read script file if an execution state\n if isinstance(state, ExecutionState):\n script_text = read_file(state_path, state.script.filename)\n state.script_text = script_text\n\n # load semantic data\n try:\n semantic_data = load_data_file(os.path.join(state_path, SEMANTIC_DATA_FILE))\n state.semantic_data = semantic_data\n except Exception as e:\n # semantic data file does not have to be there\n pass\n\n one_of_my_child_states_not_found = False\n\n # load child states\n for p in os.listdir(state_path):\n child_state_path = os.path.join(state_path, p)\n if os.path.isdir(child_state_path):\n child_state = load_state_recursively(state, child_state_path, dirty_states)\n if child_state.name is LIBRARY_NOT_FOUND_DUMMY_STATE_NAME:\n one_of_my_child_states_not_found = True\n\n if one_of_my_child_states_not_found:\n # omit adding transitions and data flows in this case\n pass\n else:\n # Now we can add transitions and data flows, as all child states were added\n if isinstance(state_info, tuple):\n state.transitions = transitions\n state.data_flows = data_flows\n\n state.file_system_path = state_path\n\n if state.marked_dirty:\n dirty_states.append(state)\n\n return state", "response": "Recursively loads the state of a state machine."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef load_data_file(path_of_file):\n if os.path.exists(path_of_file):\n return storage_utils.load_objects_from_json(path_of_file)\n raise ValueError(\"Data file not found: {0}\".format(path_of_file))", "response": "Loads the content of a file by using json. load.\n "} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef limit_text_max_length(text, max_length, separator='_'):\n if max_length is not None:\n if isinstance(text, string_types) and len(text) > max_length:\n max_length = int(max_length)\n half_length = float(max_length - 1) / 2\n return text[:int(math.ceil(half_length))] + separator + text[-int(math.floor(half_length)):]\n return text", "response": "Limit the length of a string to a maximum length."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef clean_path_element(text, max_length=None, separator='_'):\n elements_to_replace = REPLACED_CHARACTERS_FOR_NO_OS_LIMITATION\n for elem, replace_with in elements_to_replace.items():\n text = text.replace(elem, replace_with)\n if max_length is not None:\n text = limit_text_max_length(text, max_length, separator)\n return text", "response": "Replace characters that conflict with a free OS choice when in a file system path."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nlimit the text to be a path element.", "response": "def limit_text_to_be_path_element(text, max_length=None, separator='_'):\n \"\"\" Replace characters that are not in the valid character set of RAFCON.\n\n :param text: the string to be cleaned\n :param max_length: the maximum length of the output string\n :param separator: the separator used for rafcon.core.storage.storage.limit_text_max_length\n :return:\n \"\"\"\n # TODO: Should there not only be one method i.e. either this one or \"clean_path_element\"\n elements_to_replace = {' ': '_', '*': '_'}\n for elem, replace_with in elements_to_replace.items():\n text = text.replace(elem, replace_with)\n text = re.sub('[^a-zA-Z0-9-_]', '', text)\n if max_length is not None:\n text = limit_text_max_length(text, max_length, separator)\n return text"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_storage_id_for_state(state):\n if global_config.get_config_value('STORAGE_PATH_WITH_STATE_NAME'):\n max_length = global_config.get_config_value('MAX_LENGTH_FOR_STATE_NAME_IN_STORAGE_PATH')\n\n max_length_of_state_name_in_folder_name = 255 - len(ID_NAME_DELIMITER + state.state_id)\n # TODO: should we allow \"None\" in config file?\n if max_length is None or max_length == \"None\" or max_length > max_length_of_state_name_in_folder_name:\n if max_length_of_state_name_in_folder_name < len(state.name):\n logger.info(\"The storage folder name is forced to be maximal 255 characters in length.\")\n max_length = max_length_of_state_name_in_folder_name\n\n return limit_text_to_be_path_element(state.name, max_length) + ID_NAME_DELIMITER + state.state_id\n else:\n return state.state_id", "response": "Calculates the storage id for a state"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef pane_position_check(self):\n text_buffer = self.get_buffer()\n\n # not needed if the right side bar is un-docked\n from rafcon.gui.singleton import main_window_controller\n\n if main_window_controller is None or main_window_controller.view is None:\n return\n from rafcon.gui.runtime_config import global_runtime_config\n if global_runtime_config.get_config_value('RIGHT_BAR_WINDOW_UNDOCKED'):\n return\n\n # move the pane left if the cursor is to far right and the pane position is less then 440 from its max position\n button_container_min_width = self.button_container_min_width\n width_of_all = button_container_min_width + self.tab_width\n text_view_width = button_container_min_width - self.line_numbers_width\n min_line_string_length = float(button_container_min_width)/float(self.source_view_character_size)\n\n current_pane_pos = main_window_controller.view['right_h_pane'].get_property('position')\n max_position = main_window_controller.view['right_h_pane'].get_property('max_position')\n pane_rel_pos = main_window_controller.view['right_h_pane'].get_property('max_position') - current_pane_pos\n if pane_rel_pos >= width_of_all + self.line_numbers_width:\n pass\n else:\n cursor_line_offset = text_buffer.get_iter_at_offset(text_buffer.props.cursor_position).get_line_offset()\n needed_rel_pos = text_view_width/min_line_string_length*cursor_line_offset \\\n + self.tab_width + self.line_numbers_width\n needed_rel_pos = min(width_of_all, needed_rel_pos)\n if pane_rel_pos >= needed_rel_pos:\n pass\n else:\n main_window_controller.view['right_h_pane'].set_property('position', max_position - needed_rel_pos)\n spacer_width = int(width_of_all + self.line_numbers_width - needed_rel_pos)\n self.spacer_frame.set_size_request(width=spacer_width, height=-1)", "response": "Updates the right bar pane position if needed"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef push_call_history_item(self, state, call_type, state_for_scoped_data, input_data=None):\n last_history_item = self.get_last_history_item()\n from rafcon.core.states.library_state import LibraryState # delayed imported on purpose\n if isinstance(state_for_scoped_data, LibraryState):\n state_for_scoped_data = state_for_scoped_data.state_copy\n return_item = CallItem(state, last_history_item, call_type, state_for_scoped_data, input_data,\n state.run_id)\n return self._push_item(last_history_item, return_item)", "response": "Adds a new call - history - item to the list of items that can be executed by the state machine."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef push_return_history_item(self, state, call_type, state_for_scoped_data, output_data=None):\n last_history_item = self.get_last_history_item()\n from rafcon.core.states.library_state import LibraryState # delayed imported on purpose\n if isinstance(state_for_scoped_data, LibraryState):\n state_for_scoped_data = state_for_scoped_data.state_copy\n return_item = ReturnItem(state, last_history_item, call_type, state_for_scoped_data, output_data,\n state.run_id)\n return self._push_item(last_history_item, return_item)", "response": "Adds a new return - history - item to the list of return - history items"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef push_concurrency_history_item(self, state, number_concurrent_threads):\n last_history_item = self.get_last_history_item()\n return_item = ConcurrencyItem(state, self.get_last_history_item(),\n number_concurrent_threads, state.run_id,\n self.execution_history_storage)\n return self._push_item(last_history_item, return_item)", "response": "Adds a new concurrency - history - item to the list of concurrency - history items that are currently in the execution history."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef update_hash_from_dict(obj_hash, object_):\n if isinstance(object_, Hashable):\n object_.update_hash(obj_hash)\n elif isinstance(object_, (list, set, tuple)):\n if isinstance(object_, set): # A set is not ordered\n object_ = sorted(object_)\n for element in object_:\n Hashable.update_hash_from_dict(obj_hash, element)\n elif isinstance(object_, dict):\n for key in sorted(object_.keys()): # A dict is not ordered\n Hashable.update_hash_from_dict(obj_hash, key)\n Hashable.update_hash_from_dict(obj_hash, object_[key])\n else:\n obj_hash.update(Hashable.get_object_hash_string(object_))", "response": "Updates an existing hash object with another Hashable list set tuple dict or stringifyable object."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncreate a hash with the immutable data fields of the object", "response": "def mutable_hash(self, obj_hash=None):\n \"\"\"Creates a hash with the (im)mutable data fields of the object\n\n Example:\n >>> my_obj = type(\"MyDerivedClass\", (Hashable,), { \"update_hash\": lambda self, h: h.update(\"RAFCON\") })()\n >>> my_obj_hash = my_obj.mutable_hash()\n >>> print('Hash: ' + my_obj_hash.hexdigest())\n Hash: c8b2e32dcb31c5282e4b9dbc6a9975b65bf59cd80a7cee66d195e320484df5c6\n\n :param obj_hash: The hash object (see Python hashlib)\n :return: The updated hash object\n \"\"\"\n if obj_hash is None:\n obj_hash = hashlib.sha256()\n self.update_hash(obj_hash)\n return obj_hash"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef emit(self, record):\n try:\n # Shorten the source name of the record (remove rafcon.)\n if sys.version_info >= (2, 7):\n record.__setattr__(\"name\", record.name.replace(\"rafcon.\", \"\"))\n msg = self.format(record)\n fs = \"%s\"\n try:\n ufs = u'%s'\n try:\n entry = ufs % msg\n except UnicodeEncodeError:\n entry = fs % msg\n except UnicodeError:\n entry = fs % msg\n\n for logging_view in self._logging_views.values():\n logging_view.print_message(entry, record.levelno)\n except (KeyboardInterrupt, SystemExit):\n raise\n except:\n self.handleError(record)", "response": "Logs a new record to all logging views."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the meta data editor for the specified editor.", "response": "def get_meta_data_editor(self, for_gaphas=True):\n \"\"\"Returns the editor for the specified editor\n\n This method should be used instead of accessing the meta data of an editor directly. It return the meta data\n of the editor available (with priority to the one specified by `for_gaphas`) and converts it if needed.\n\n :param bool for_gaphas: True (default) if the meta data is required for gaphas, False if for OpenGL\n :return: Meta data for the editor\n :rtype: Vividict\n \"\"\"\n meta_gaphas = self.meta['gui']['editor_gaphas']\n meta_opengl = self.meta['gui']['editor_opengl']\n assert isinstance(meta_gaphas, Vividict) and isinstance(meta_opengl, Vividict)\n\n # Use meta data of editor with more keys (typically one of the editors has zero keys)\n # TODO check if the magic length condition in the next line can be improved (consistent behavior getter/setter?)\n parental_conversion_from_opengl = self._parent and self._parent().temp['conversion_from_opengl']\n from_gaphas = len(meta_gaphas) > len(meta_opengl) or (len(meta_gaphas) == len(meta_opengl) and for_gaphas and\n not parental_conversion_from_opengl)\n # Convert meta data if meta data target and origin differ\n if from_gaphas and not for_gaphas:\n self.meta['gui']['editor_opengl'] = self._meta_data_editor_gaphas2opengl(meta_gaphas)\n elif not from_gaphas and for_gaphas:\n self.meta['gui']['editor_gaphas'] = self._meta_data_editor_opengl2gaphas(meta_opengl)\n\n # only keep meta data for one editor\n del self.meta['gui']['editor_opengl' if for_gaphas else 'editor_gaphas']\n return self.meta['gui']['editor_gaphas'] if for_gaphas else self.meta['gui']['editor_opengl']"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef set_meta_data_editor(self, key, meta_data, from_gaphas=True):\n self.do_convert_meta_data_if_no_data(from_gaphas)\n meta_gui = self.meta['gui']\n meta_gui = meta_gui['editor_gaphas'] if from_gaphas else meta_gui['editor_opengl']\n\n key_path = key.split('.')\n for key in key_path:\n if isinstance(meta_gui, list):\n meta_gui[int(key)] = meta_data\n break\n if key == key_path[-1]:\n meta_gui[key] = meta_data\n else:\n meta_gui = meta_gui[key]\n\n return self.get_meta_data_editor(for_gaphas=from_gaphas)", "response": "Sets the meta data for a specific key of the desired editor."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef meta_data_hash(self, obj_hash=None):\n if obj_hash is None:\n obj_hash = hashlib.sha256()\n self.update_meta_data_hash(obj_hash)\n return obj_hash", "response": "Creates a hash with the meta data of the model\n "} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nprepare the model for destruction.", "response": "def prepare_destruction(self):\n \"\"\"Prepares the model for destruction\n\n \"\"\"\n self._Observer__PROP_TO_METHS.clear()\n self._Observer__METH_TO_PROPS.clear()\n self._Observer__PAT_TO_METHS.clear()\n self._Observer__METH_TO_PAT.clear()\n self._Observer__PAT_METH_TO_KWARGS.clear()"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_col_rgba(color, transparency=None, opacity=None):\n r, g, b = color.red, color.green, color.blue\n # Convert from 0-6535 to 0-1\n r /= 65535.\n g /= 65535.\n b /= 65535.\n\n if transparency is not None or opacity is None:\n transparency = 0 if transparency is None else transparency # default value\n if transparency < 0 or transparency > 1:\n raise ValueError(\"Transparency must be between 0 and 1\")\n alpha = 1 - transparency\n else:\n if opacity < 0 or opacity > 1:\n raise ValueError(\"Opacity must be between 0 and 1\")\n alpha = opacity\n\n return r, g, b, alpha", "response": "This class converts a Gdk. Color into its r g b parts and adds an alpha according to needs\nAppsApps."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_side_length_of_resize_handle(view, item):\n from rafcon.gui.mygaphas.items.state import StateView, NameView\n if isinstance(item, StateView):\n return item.border_width * view.get_zoom_factor() / 1.5\n elif isinstance(item, NameView):\n return item.parent.border_width * view.get_zoom_factor() / 2.5\n return 0", "response": "Calculate the side length of a resize handle"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ndraws label of scoped variable This method draws the label of a scoped variable connected to a data port. This is represented by drawing a bigger label where the top part is filled and the bottom part isn't. :param context: Draw Context :param Gdk.Color color: Color to draw the label in (border and background fill color) :param name_size: Size of the name labels (scoped variable and port name) combined :param handle_pos: Position of port which label is connected to :param port_side: Side on which the label should be drawn :param port_side_size: Size of port (to have a relative size) :param draw_connection_to_port: Whether there should be a line connecting the label to the port :return: Rotation Angle (to rotate names accordingly), X-Position of name labels start point, Y-Position of name labels start point", "response": "def draw_connected_scoped_label(context, color, name_size, handle_pos, port_side, port_side_size,\n draw_connection_to_port=False):\n \"\"\"Draw label of scoped variable\n\n This method draws the label of a scoped variable connected to a data port. This is represented by drawing a bigger\n label where the top part is filled and the bottom part isn't.\n\n :param context: Draw Context\n :param Gdk.Color color: Color to draw the label in (border and background fill color)\n :param name_size: Size of the name labels (scoped variable and port name) combined\n :param handle_pos: Position of port which label is connected to\n :param port_side: Side on which the label should be drawn\n :param port_side_size: Size of port (to have a relative size)\n :param draw_connection_to_port: Whether there should be a line connecting the label to the port\n :return: Rotation Angle (to rotate names accordingly), X-Position of name labels start point, Y-Position of name\n labels start point\n \"\"\"\n c = context.cairo\n c.set_line_width(port_side_size * .03)\n\n c.set_source_rgb(*color.to_floats())\n\n rot_angle = .0\n move_x = 0.\n move_y = 0.\n\n if port_side is SnappedSide.RIGHT:\n move_x = handle_pos.x + 2 * port_side_size\n move_y = handle_pos.y - name_size[1] / 2.\n\n c.move_to(move_x + name_size[0], move_y + name_size[1] / 2.)\n c.line_to(move_x + name_size[0], move_y)\n c.line_to(move_x, move_y)\n c.line_to(handle_pos.x + port_side_size, handle_pos.y)\n c.fill_preserve()\n c.stroke()\n if draw_connection_to_port:\n c.line_to(handle_pos.x + port_side_size / 2., handle_pos.y)\n c.line_to(handle_pos.x + port_side_size, handle_pos.y)\n else:\n c.move_to(handle_pos.x + port_side_size, handle_pos.y)\n c.line_to(move_x, move_y + name_size[1])\n c.line_to(move_x + name_size[0], move_y + name_size[1])\n c.line_to(move_x + name_size[0], move_y + name_size[1] / 2.)\n elif port_side is SnappedSide.BOTTOM:\n move_x = handle_pos.x + name_size[1] / 2.\n move_y = handle_pos.y + 2 * port_side_size\n rot_angle = pi / 2.\n\n c.move_to(move_x - name_size[1] / 2., move_y + name_size[0])\n c.line_to(move_x, move_y + name_size[0])\n c.line_to(move_x, move_y)\n c.line_to(handle_pos.x, move_y - port_side_size)\n c.fill_preserve()\n c.stroke()\n if draw_connection_to_port:\n c.line_to(handle_pos.x, handle_pos.y + port_side_size / 2.)\n c.line_to(handle_pos.x, move_y - port_side_size)\n else:\n c.move_to(handle_pos.x, move_y - port_side_size)\n c.line_to(move_x - name_size[1], move_y)\n c.line_to(move_x - name_size[1], move_y + name_size[0])\n c.line_to(move_x - name_size[1] / 2., move_y + name_size[0])\n elif port_side is SnappedSide.LEFT:\n move_x = handle_pos.x - 2 * port_side_size - name_size[0]\n move_y = handle_pos.y - name_size[1] / 2.\n\n c.move_to(move_x, move_y + name_size[1] / 2.)\n c.line_to(move_x, move_y)\n c.line_to(move_x + name_size[0], move_y)\n c.line_to(handle_pos.x - port_side_size, move_y + name_size[1] / 2.)\n c.fill_preserve()\n c.stroke()\n if draw_connection_to_port:\n c.line_to(handle_pos.x - port_side_size / 2., handle_pos.y)\n c.line_to(handle_pos.x - port_side_size, handle_pos.y)\n else:\n c.move_to(handle_pos.x - port_side_size, move_y + name_size[1] / 2.)\n c.line_to(move_x + name_size[0], move_y + name_size[1])\n c.line_to(move_x, move_y + name_size[1])\n c.line_to(move_x, move_y + name_size[1] / 2.)\n elif port_side is SnappedSide.TOP:\n move_x = handle_pos.x - name_size[1] / 2.\n move_y = handle_pos.y - 2 * port_side_size\n rot_angle = -pi / 2.\n\n c.move_to(move_x + name_size[1] / 2., move_y - name_size[0])\n c.line_to(move_x, move_y - name_size[0])\n c.line_to(move_x, move_y)\n c.line_to(handle_pos.x, move_y + port_side_size)\n c.fill_preserve()\n c.stroke()\n if draw_connection_to_port:\n c.line_to(handle_pos.x, handle_pos.y - port_side_size / 2.)\n c.line_to(handle_pos.x, move_y + port_side_size)\n else:\n c.move_to(handle_pos.x, move_y + port_side_size)\n c.line_to(move_x + name_size[1], move_y)\n c.line_to(move_x + name_size[1], move_y - name_size[0])\n c.line_to(move_x + name_size[1] / 2., move_y - name_size[0])\n c.stroke()\n\n return rot_angle, move_x, move_y"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef draw_port_label(context, port, transparency, fill, label_position, show_additional_value=False,\n additional_value=None, only_extent_calculations=False):\n \"\"\"Draws a normal label indicating the port name.\n\n :param context: Draw Context\n :param port: The PortView\n :param transparency: Transparency of the text\n :param fill: Whether the label should be filled or not\n :param label_position: Side on which the label should be drawn\n :param show_additional_value: Whether to show an additional value (for data ports)\n :param additional_value: The additional value to be shown\n :param only_extent_calculations: Calculate only the extends and do not actually draw\n \"\"\"\n c = context\n cairo_context = c\n if isinstance(c, CairoBoundingBoxContext):\n cairo_context = c._cairo\n\n # Gtk TODO\n # c.set_antialias(Antialias.GOOD)\n\n text = port.name\n label_color = get_col_rgba(port.fill_color, transparency)\n text_color = port.text_color\n port_height = port.port_size[1]\n\n port_position = c.get_current_point()\n\n layout = PangoCairo.create_layout(cairo_context)\n layout.set_text(text, -1)\n\n font_name = constants.INTERFACE_FONT\n font = FontDescription(font_name + \" \" + str(FONT_SIZE))\n layout.set_font_description(font)\n\n ink_extents, logical_extents = layout.get_extents()\n extents = [extent / float(SCALE) for extent in [logical_extents.x, logical_extents.y,\n logical_extents.width, logical_extents.height]]\n real_text_size = extents[2], extents[3]\n desired_height = port_height\n scale_factor = real_text_size[1] / desired_height\n\n # margin is the distance between the text and the border line\n margin = desired_height / 2.5\n arrow_height = desired_height\n # The real_text_size dimensions are rotated by 90 deg compared to the label, as the label is drawn upright\n text_size = desired_height, real_text_size[0] / scale_factor,\n text_size_with_margin = text_size[0] + 2 * margin, text_size[1] + 2 * margin + arrow_height\n port_distance = desired_height\n port_offset = desired_height / 2.\n\n if label_position is SnappedSide.RIGHT:\n label_angle = deg2rad(-90)\n text_angle = 0\n elif label_position is SnappedSide.BOTTOM:\n label_angle = 0\n text_angle = deg2rad(-90)\n elif label_position is SnappedSide.LEFT:\n label_angle = deg2rad(90)\n text_angle = 0\n else: # label_position is SnappedSide.TOP:\n label_angle = deg2rad(180)\n text_angle = deg2rad(90)\n\n # Draw (filled) outline of label\n c.move_to(*port_position)\n c.save()\n c.rotate(label_angle)\n draw_label_path(c, text_size_with_margin[0], text_size_with_margin[1], arrow_height, port_distance, port_offset)\n c.restore()\n\n c.set_line_width(port_height * .03)\n c.set_source_rgba(*label_color)\n label_extents = c.stroke_extents()\n if label_extents[0] == 0:\n label_extents = c.fill_extents()\n\n if only_extent_calculations:\n c.new_path()\n else:\n if fill:\n c.fill_preserve()\n c.stroke()\n\n # Move to the upper left corner of the desired text position\n c.save()\n c.move_to(*port_position)\n c.rotate(label_angle)\n c.rel_move_to(0, port_distance + arrow_height + 2 * margin)\n c.scale(1. / scale_factor, 1. / scale_factor)\n c.rel_move_to(-real_text_size[1] / 2 - extents[1], real_text_size[0] - extents[0])\n c.restore()\n\n # Show text in correct orientation\n c.save()\n c.rotate(text_angle)\n c.scale(1. / scale_factor, 1. / scale_factor)\n # Correction for labels positioned right: as the text is mirrored, the anchor point must be moved\n if label_position is SnappedSide.RIGHT:\n c.rel_move_to(-real_text_size[0], -real_text_size[1])\n c.set_source_rgba(*get_col_rgba(text_color, transparency))\n PangoCairo.update_layout(cairo_context, layout)\n PangoCairo.show_layout(cairo_context, layout)\n c.restore()\n\n if show_additional_value:\n value_text = limit_value_string_length(additional_value)\n value_layout = PangoCairo.create_layout(cairo_context)\n value_layout.set_text(value_text, -1)\n value_layout.set_font_description(font)\n\n ink_extents, logical_extents = value_layout.get_extents()\n extents = [extent / float(SCALE) for extent in [logical_extents.x, logical_extents.y,\n logical_extents.width, logical_extents.height]]\n value_text_size = extents[2], real_text_size[1]\n\n # Move to the upper left corner of the additional value box\n c.save()\n c.move_to(*port_position)\n c.rotate(label_angle)\n c.rel_move_to(-text_size_with_margin[0] / 2., text_size_with_margin[1] + port_distance)\n # Draw rectangular path\n c.rel_line_to(text_size_with_margin[0], 0)\n c.rel_line_to(0, value_text_size[0] / scale_factor + 2 * margin)\n c.rel_line_to(-text_size_with_margin[0], 0)\n c.close_path()\n c.restore()\n\n value_extents = c.stroke_extents()\n\n if only_extent_calculations:\n c.new_path()\n else:\n # Draw filled outline\n c.set_source_rgba(*get_col_rgba(gui_config.gtk_colors['DATA_VALUE_BACKGROUND']))\n c.fill_preserve()\n c.set_source_rgb(*gui_config.gtk_colors['BLACK'].to_floats())\n c.stroke()\n\n # Move to the upper left corner of the desired text position\n c.save()\n c.move_to(*port_position)\n c.rotate(label_angle)\n c.rel_move_to(0, margin + text_size_with_margin[1] + port_distance)\n c.scale(1. / scale_factor, 1. / scale_factor)\n c.rel_move_to(-real_text_size[1] / 2., value_text_size[0])\n c.restore()\n\n # Show text in correct orientation\n c.save()\n c.rotate(text_angle)\n c.scale(1. / scale_factor, 1. / scale_factor)\n # Correction for labels positioned right: as the text is mirrored, the anchor point must be moved\n if label_position is SnappedSide.RIGHT:\n c.rel_move_to(-value_text_size[0] - margin * scale_factor, -real_text_size[1])\n c.set_source_rgba(*get_col_rgba(gui_config.gtk_colors['SCOPED_VARIABLE_TEXT']))\n PangoCairo.update_layout(cairo_context, value_layout)\n PangoCairo.show_layout(cairo_context, value_layout)\n c.restore()\n\n label_extents = min(label_extents[0], value_extents[0]), min(label_extents[1], value_extents[1]), \\\n max(label_extents[2], value_extents[2]), max(label_extents[3], value_extents[3])\n\n return label_extents", "response": "Draws a normal label indicating the port name."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ndraws the path for an upright label on the Cairo context.", "response": "def draw_label_path(context, width, height, arrow_height, distance_to_port, port_offset):\n \"\"\"Draws the path for an upright label\n\n :param context: The Cairo context\n :param float width: Width of the label\n :param float height: Height of the label\n :param float distance_to_port: Distance to the port related to the label\n :param float port_offset: Distance from the port center to its border\n :param bool draw_connection_to_port: Whether to draw a line from the tip of the label to the port\n \"\"\"\n c = context\n # The current point is the port position\n\n # Mover to outer border of state\n c.rel_move_to(0, port_offset)\n # Draw line to arrow tip of label\n c.rel_line_to(0, distance_to_port)\n\n # Line to upper left corner\n c.rel_line_to(-width / 2., arrow_height)\n # Line to lower left corner\n c.rel_line_to(0, height - arrow_height)\n # Line to lower right corner\n c.rel_line_to(width, 0)\n # Line to upper right corner\n c.rel_line_to(0, -(height - arrow_height))\n # Line to center top (tip of label)\n c.rel_line_to(-width / 2., -arrow_height)\n # Close path\n c.close_path()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef prepare_destruction(self, recursive=True):\n self.destruction_signal.emit()\n try:\n self.unregister_observer(self)\n except KeyError: # Might happen if the observer was already unregistered\n pass\n if recursive:\n\n if self.state_copy:\n self.state_copy.prepare_destruction(recursive)\n self.state_copy = None\n else:\n if self.state_copy_initialized:\n logger.verbose(\"Multiple calls of prepare destruction for {0}\".format(self))\n\n # The next lines are commented because not needed and create problems if used why it is an open to-do\n # for port in self.input_data_ports[:] + self.output_data_ports[:] + self.outcomes[:]:\n # if port.core_element is not None:\n # # TODO setting data ports None in a Library state cause gtkmvc3 attribute getter problems why?\n # port.prepare_destruction()\n\n del self.input_data_ports[:]\n del self.output_data_ports[:]\n del self.outcomes[:]\n self.state = None", "response": "Prepares the model for destruction."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _load_input_data_port_models(self):\n if not self.state_copy_initialized:\n return\n self.input_data_ports = []\n for input_data_port_m in self.state_copy.input_data_ports:\n new_ip_m = deepcopy(input_data_port_m)\n new_ip_m.parent = self\n new_ip_m.data_port = input_data_port_m.data_port\n self.input_data_ports.append(new_ip_m)", "response": "Reloads the input data port models directly from the state copy"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreloading the output data port models directly from the state copy", "response": "def _load_output_data_port_models(self):\n \"\"\"Reloads the output data port models directly from the the state\"\"\"\n if not self.state_copy_initialized:\n return\n self.output_data_ports = []\n for output_data_port_m in self.state_copy.output_data_ports:\n new_op_m = deepcopy(output_data_port_m)\n new_op_m.parent = self\n new_op_m.data_port = output_data_port_m.data_port\n self.output_data_ports.append(new_op_m)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreloads the income model directly from the state copy", "response": "def _load_income_model(self):\n \"\"\"Reloads the income model directly from the state\"\"\"\n if not self.state_copy_initialized:\n return\n self.income = None\n income_m = deepcopy(self.state_copy.income)\n income_m.parent = self\n income_m.income = income_m.income\n self.income = income_m"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreloads the outcome models directly from the state copy", "response": "def _load_outcome_models(self):\n \"\"\"Reloads the outcome models directly from the state\"\"\"\n if not self.state_copy_initialized:\n return\n self.outcomes = []\n for outcome_m in self.state_copy.outcomes:\n new_oc_m = deepcopy(outcome_m)\n new_oc_m.parent = self\n new_oc_m.outcome = outcome_m.outcome\n self.outcomes.append(new_oc_m)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nchecking if content of library is to be shown", "response": "def show_content(self):\n \"\"\"Check if content of library is to be shown\n \n Content is shown, if the uppermost state's meta flag \"show_content\" is True and the library hierarchy depth\n (up to MAX_VISIBLE_LIBRARY_HIERARCHY level) is not to high.\n \n :return: Whether the content is to be shown\n :rtype: bool\n \"\"\"\n current_hierarchy_depth = self.state.library_hierarchy_depth\n max_hierarchy_depth = global_gui_config.get_config_value(\"MAX_VISIBLE_LIBRARY_HIERARCHY\", 2)\n if current_hierarchy_depth >= max_hierarchy_depth:\n return False\n if current_hierarchy_depth > 1:\n uppermost_lib_state = self.state.get_uppermost_library_root_state().parent\n uppermost_lib_state_m = self.get_state_machine_m().get_state_model_by_path(uppermost_lib_state.get_path())\n else:\n uppermost_lib_state_m = self\n uppermost_lib_meta = uppermost_lib_state_m.meta\n return False if 'show_content' not in uppermost_lib_meta['gui'] else uppermost_lib_meta['gui']['show_content']"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef prepare_destruction(self):\n if self.core_element is None:\n logger.verbose(\"Multiple calls of prepare destruction for {0}\".format(self))\n self.destruction_signal.emit()\n try:\n self.unregister_observer(self)\n except KeyError: # Might happen if the observer was already unregistered\n pass\n super(StateElementModel, self).prepare_destruction()", "response": "Prepares the model for destruction. Unregisters the model from observing itself."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef model_changed(self, model, prop_name, info):\n if self.parent is not None:\n self.parent.model_changed(model, prop_name, info)", "response": "This method notifies the parent state about changes made to the state element\n "} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef meta_changed(self, model, prop_name, info):\n if self.parent is not None:\n msg = info.arg\n # Add information about notification to the signal message\n notification = Notification(model, prop_name, info)\n msg = msg._replace(notification=notification)\n info.arg = msg\n self.parent.meta_changed(model, prop_name, info)", "response": "This method notifies the parent state about changes made to the meta data\n "} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef on_config_value_changed(self, config_m, prop_name, info):\n config_key = info['args'][1]\n\n if config_key in [\"EXECUTION_TICKER_ENABLED\"]:\n self.check_configuration()", "response": "Callback when a config value has been changed"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef disable(self):\n\n self.ticker_text_label.hide()\n if self.current_observed_sm_m:\n self.stop_sm_m_observation(self.current_observed_sm_m)", "response": "Relieve all state machines that have no active execution and hide the widget"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef on_state_execution_status_changed_after(self, model, prop_name, info):\n from rafcon.gui.utils.notification_overview import NotificationOverview\n from rafcon.core.states.state import State\n\n def name_and_next_state(state):\n assert isinstance(state, State)\n if state.is_root_state_of_library:\n return state.parent.parent, state.parent.name\n else:\n return state.parent, state.name\n\n def create_path(state, n=3, separator='/'):\n next_parent, name = name_and_next_state(state)\n path = separator + name\n n -= 1\n while n > 0 and isinstance(next_parent, State):\n next_parent, name = name_and_next_state(next_parent)\n path = separator + name + path\n n -= 1\n if isinstance(next_parent, State):\n path = separator + '..' + path\n return path\n\n if 'kwargs' in info and 'method_name' in info['kwargs']:\n overview = NotificationOverview(info)\n if overview['method_name'][-1] == 'state_execution_status':\n active_state = overview['model'][-1].state\n assert isinstance(active_state, State)\n\n path_depth = rafcon.gui.singleton.global_gui_config.get_config_value(\"EXECUTION_TICKER_PATH_DEPTH\", 3)\n\n message = self._fix_text_of_label + create_path(active_state, path_depth)\n if rafcon.gui.singleton.main_window_controller.view is not None:\n self.ticker_text_label.set_text(message)\n else:\n logger.warn(\"Not initialized yet\")", "response": "Show current execution status in the widget"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef register_observer(self):\n self.execution_engine.add_observer(self, \"start\", notify_before_function=self.on_start)\n self.execution_engine.add_observer(self, \"pause\", notify_before_function=self.on_pause)\n self.execution_engine.add_observer(self, \"stop\", notify_before_function=self.on_stop)", "response": "Register all observable which are of interest\n "} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef register_states_of_state_machine(self, state_machine):\n root = state_machine.root_state\n root.add_observer(self, \"state_execution_status\",\n notify_after_function=self.on_state_execution_status_changed_after)\n self.recursively_register_child_states(root)", "response": "This function registers all states of state machine."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef recursively_register_child_states(self, state):\n self.logger.info(\"Execution status observer add new state {}\".format(state))\n if isinstance(state, ContainerState):\n state.add_observer(self, \"add_state\",\n notify_after_function=self.on_add_state)\n for state in list(state.states.values()):\n self.recursively_register_child_states(state)\n state.add_observer(self, \"state_execution_status\",\n notify_after_function=self.on_state_execution_status_changed_after)\n\n if isinstance(state, LibraryState):\n self.recursively_register_child_states(state.state_copy)\n state.add_observer(self, \"state_execution_status\",\n notify_after_function=self.on_state_execution_status_changed_after)", "response": "A function tha registers recursively all child states of a state"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef on_add_state_machine_after(self, observable, return_value, args):\n self.logger.info(\"Execution status observer register new state machine sm_id: {}\".format(args[1].state_machine_id))\n self.register_states_of_state_machine(args[1])", "response": "This method is called when a state machine is added to the state machine manager"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef on_state_execution_status_changed_after(self, observable, return_value, args):\n self.logger.info(\"Execution status has changed for state '{0}' to status: {1}\"\n \"\".format(observable.get_path(by_name=True), observable.state_execution_status))", "response": "This function is called when the state machine execution status of a state machine has changed after the state machine execution status of a state machine is created."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef run(self):\n\n try:\n self._initialize_hierarchy()\n while self.child_state is not self:\n # print(\"hs1\", self.name)\n self.handling_execution_mode = True\n execution_mode = singleton.state_machine_execution_engine.handle_execution_mode(self, self.child_state)\n\n # in the case of starting the sm from a specific state not the transitions define the logic flow\n # but the the execution_engine.run_to_states; thus, do not alter the next state in this case\n if not self._start_state_modified:\n # check if e.g. the state machine was paused and the next state was modified (e.g. removed)\n self.check_if_child_state_was_modified()\n\n self.handling_execution_mode = False\n if self.state_execution_status is not StateExecutionStatus.EXECUTE_CHILDREN:\n self.state_execution_status = StateExecutionStatus.EXECUTE_CHILDREN\n\n # print(\"hs2\", self.name)\n\n self.backward_execution = False\n if self.preempted:\n if self.last_transition and self.last_transition.from_outcome == -2:\n logger.debug(\"Execute preemption handling for '{0}'\".format(self.child_state))\n else:\n break\n elif execution_mode == StateMachineExecutionStatus.BACKWARD:\n break_loop = self._handle_backward_execution_before_child_execution()\n if break_loop:\n break\n # This is only the case if this hierarchy-state is started in backward mode,\n # but the user directly switches to the forward execution mode\n if self.child_state is None:\n break\n\n # print(\"hs3\", self.name)\n\n self._execute_current_child()\n\n if self.backward_execution:\n # print(\"hs4\", self.name)\n break_loop = self._handle_backward_execution_after_child_execution()\n if break_loop:\n # print(\"hs4.1\", self.name)\n break\n else:\n # print(\"hs5\", self.name)\n break_loop = self._handle_forward_execution_after_child_execution()\n if break_loop:\n break\n # print(\"hs6\", self.name)\n return self._finalize_hierarchy()\n\n except Exception as e:\n logger.error(\"{0} had an internal error: {1}\\n{2}\".format(self, str(e), str(traceback.format_exc())))\n self.output_data[\"error\"] = e\n self.state_execution_status = StateExecutionStatus.WAIT_FOR_NEXT_STATE\n self.child_state = None\n self.last_child = None\n return self.finalize(Outcome(-1, \"aborted\"))", "response": "This function executes all the child states and stores the output data for the next state."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsets up all data after receiving a backward execution step from the execution engine", "response": "def _handle_backward_execution_before_child_execution(self):\n \"\"\" Sets up all data after receiving a backward execution step from the execution engine\n :return: a flag to indicate if normal child state execution should abort\n \"\"\"\n self.backward_execution = True\n last_history_item = self.execution_history.pop_last_item()\n if last_history_item.state_reference is self:\n # if the the next child_state in the history is self exit this hierarchy-state\n if self.child_state:\n # do not set the last state to inactive before executing the new one\n self.child_state.state_execution_status = StateExecutionStatus.INACTIVE\n return True\n assert isinstance(last_history_item, ReturnItem)\n self.scoped_data = last_history_item.scoped_data\n self.child_state = last_history_item.state_reference\n return False"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _execute_current_child(self):\n\n self.child_state.input_data = self.get_inputs_for_state(self.child_state)\n self.child_state.output_data = self.create_output_dictionary_for_state(self.child_state)\n\n # process data of last state\n if self.last_error:\n self.child_state.input_data['error'] = copy.deepcopy(self.last_error)\n self.last_error = None\n if self.last_child:\n # do not set the last state to inactive before executing the new one\n self.last_child.state_execution_status = StateExecutionStatus.INACTIVE\n\n self.child_state.generate_run_id()\n if not self.backward_execution: # only add history item if it is not a backward execution\n self.execution_history.push_call_history_item(\n self.child_state, CallType.EXECUTE, self, self.child_state.input_data)\n self.child_state.start(self.execution_history, backward_execution=self.backward_execution,\n generate_run_id=False)\n\n self.child_state.join()\n\n # this line is important to indicate the parent the current execution status\n # it may also change during the execution of an hierarchy state\n # e.g. a hierarchy state may be started in forward execution mode but can leave in backward execution mode\n # print(self.child_state)\n # print(self.child_state.backward_execution)\n self.backward_execution = self.child_state.backward_execution\n # for h in self.execution_history._history_items:\n # print(h)\n\n if self.preempted:\n if self.backward_execution:\n # this is the case if the user backward step through its state machine and stops it\n # as preemption behaviour in backward mode is not defined, set the state to forward mode\n # to ensure clean state machine shutdown\n self.backward_execution = False\n\n # set last_error and self.last_child\n if self.child_state.final_outcome is not None: # final outcome can be None if only one state in a\n # hierarchy state is executed and immediately backward executed\n if self.child_state.final_outcome.outcome_id == -1: # if the child_state aborted save the error\n self.last_error = \"\"\n if 'error' in self.child_state.output_data:\n self.last_error = self.child_state.output_data['error']\n self.last_child = self.child_state", "response": "This function processes all data for a child state and executes it."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _handle_backward_execution_after_child_execution(self):\n self.child_state.state_execution_status = StateExecutionStatus.WAIT_FOR_NEXT_STATE\n # the item popped now from the history will be a CallItem and will contain the scoped data,\n # that was valid before executing the child_state\n last_history_item = self.execution_history.pop_last_item()\n assert isinstance(last_history_item, CallItem)\n # copy the scoped_data of the history from the point before the child_state was executed\n self.scoped_data = last_history_item.scoped_data\n\n # this is a look-ahead step to directly leave this hierarchy-state if the last child_state\n # was executed; this leads to the backward and forward execution of a hierarchy child_state\n # having the exact same number of steps\n last_history_item = self.execution_history.get_last_history_item()\n if last_history_item.state_reference is self:\n last_history_item = self.execution_history.pop_last_item()\n assert isinstance(last_history_item, CallItem)\n self.scoped_data = last_history_item.scoped_data\n self.child_state.state_execution_status = StateExecutionStatus.INACTIVE\n return True\n return False", "response": "Cleanup the former child state execution and prepare for the next state execution in the backward execution case."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nrotate tab labels of a given GTK Notebook container by 90 degrees and makes them detachable.", "response": "def rotate_and_detach_tab_labels(self):\n \"\"\"Rotates tab labels of a given notebook by 90 degrees and makes them detachable.\n\n :param notebook: GTK Notebook container, whose tab labels are to be rotated and made detachable\n \"\"\"\n icons = {'Libraries': constants.SIGN_LIB, 'States Tree': constants.ICON_TREE,\n 'Global Variables': constants.ICON_GLOB, 'Modification History': constants.ICON_HIST,\n 'Execution History': constants.ICON_EHIST, 'network': constants.ICON_NET}\n for notebook in self.left_bar_notebooks:\n for i in range(notebook.get_n_pages()):\n child = notebook.get_nth_page(i)\n tab_label = notebook.get_tab_label(child)\n tab_label_text = tab_label.get_text()\n notebook.set_tab_label(child, gui_helper_label.create_tab_header_label(tab_label_text, icons))\n notebook.set_tab_reorderable(child, True)\n notebook.set_tab_detachable(child, True)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nfinding tab with label tab_label in list of notebooks and set it to the current page.", "response": "def bring_tab_to_the_top(self, tab_label):\n \"\"\"Find tab with label tab_label in list of notebooks and set it to the current page.\n\n :param tab_label: String containing the label of the tab to be focused\n \"\"\"\n found = False\n for notebook in self.left_bar_notebooks:\n for i in range(notebook.get_n_pages()):\n if gui_helper_label.get_notebook_tab_title(notebook, i) == gui_helper_label.get_widget_title(tab_label):\n found = True\n break\n if found:\n notebook.set_current_page(i)\n break"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef add_transitions_from_selected_state_to_parent():\n task_string = \"create transition\"\n sub_task_string = \"to parent state\"\n\n selected_state_m, msg = get_selected_single_state_model_and_check_for_its_parent()\n if selected_state_m is None:\n logger.warning(\"Can not {0} {1}: {2}\".format(task_string, sub_task_string, msg))\n return\n logger.debug(\"Check to {0} {1} ...\".format(task_string, sub_task_string))\n state = selected_state_m.state\n parent_state = state.parent\n\n # find all possible from outcomes\n from_outcomes = get_all_outcomes_except_of_abort_and_preempt(state)\n\n # find lowest valid outcome id\n possible_oc_ids = [oc_id for oc_id in state.parent.outcomes.keys() if oc_id >= 0]\n possible_oc_ids.sort()\n to_outcome = state.parent.outcomes[possible_oc_ids[0]]\n\n oc_connected_to_parent = [oc for oc in from_outcomes if is_outcome_connect_to_state(oc, parent_state.state_id)]\n oc_not_connected = [oc for oc in from_outcomes if not state.parent.get_transition_for_outcome(state, oc)]\n if all(oc in oc_connected_to_parent for oc in from_outcomes):\n logger.info(\"Remove transition {0} because all outcomes are connected to it.\".format(sub_task_string))\n for from_outcome in oc_connected_to_parent:\n transition = parent_state.get_transition_for_outcome(state, from_outcome)\n parent_state.remove(transition)\n elif oc_not_connected:\n logger.debug(\"Create transition {0} ... \".format(sub_task_string))\n for from_outcome in from_outcomes:\n parent_state.add_transition(state.state_id, from_outcome.outcome_id,\n parent_state.state_id, to_outcome.outcome_id)\n else:\n if remove_transitions_if_target_is_the_same(from_outcomes):\n logger.info(\"Removed transitions origin from outcomes of selected state {0}\"\n \"because all point to the same target.\".format(sub_task_string))\n return add_transitions_from_selected_state_to_parent()\n logger.info(\"Will not create transition {0}: Not clear situation of connected transitions.\"\n \"There will be no transitions to other states be touched.\".format(sub_task_string))\n\n return True", "response": "Generates the default success transition of a state to its parent success"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngenerates the outcome transitions from outcomes with positive outcome_id to the closest next state", "response": "def add_transitions_to_closest_sibling_state_from_selected_state():\n \"\"\" Generates the outcome transitions from outcomes with positive outcome_id to the closest next state\n\n :return:\n \"\"\"\n task_string = \"create transition\"\n sub_task_string = \"to closest sibling state\"\n selected_state_m, msg = get_selected_single_state_model_and_check_for_its_parent()\n if selected_state_m is None:\n logger.warning(\"Can not {0} {1}: {2}\".format(task_string, sub_task_string, msg))\n return\n logger.debug(\"Check to {0} {1} ...\".format(task_string, sub_task_string))\n state = selected_state_m.state\n parent_state = state.parent\n\n # find closest other state to connect to -> to_state\n closest_sibling_state_tuple = gui_helper_meta_data.get_closest_sibling_state(selected_state_m, 'outcome')\n if closest_sibling_state_tuple is None:\n logger.info(\"Can not {0} {1}: There is no other sibling state.\".format(task_string, sub_task_string))\n return\n distance, sibling_state_m = closest_sibling_state_tuple\n to_state = sibling_state_m.state\n\n # find all possible from outcomes\n from_outcomes = get_all_outcomes_except_of_abort_and_preempt(state)\n\n from_oc_not_connected = [oc for oc in from_outcomes if not state.parent.get_transition_for_outcome(state, oc)]\n # all ports not connected connect to next state income\n if from_oc_not_connected:\n logger.debug(\"Create transition {0} ...\".format(sub_task_string))\n for from_outcome in from_oc_not_connected:\n parent_state.add_transition(state.state_id, from_outcome.outcome_id, to_state.state_id, None)\n # no transitions are removed if not all connected to the same other state\n else:\n target = remove_transitions_if_target_is_the_same(from_outcomes)\n if target:\n target_state_id, _ = target\n if not target_state_id == to_state.state_id:\n logger.info(\"Removed transitions from outcomes {0} \"\n \"because all point to the same target.\".format(sub_task_string.replace('closest ', '')))\n add_transitions_to_closest_sibling_state_from_selected_state()\n else:\n logger.info(\"Removed transitions from outcomes {0} \"\n \"because all point to the same target.\".format(sub_task_string))\n return True\n logger.info(\"Will not {0} {1}: Not clear situation of connected transitions.\"\n \"There will be no transitions to other states be touched.\".format(task_string, sub_task_string))\n\n return True"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nadds coordinates to nodes based on provided geom", "response": "def add_coordinates(network):\n \"\"\"\n Add coordinates to nodes based on provided geom\n\n Parameters\n ----------\n network : PyPSA network container\n\n Returns\n -------\n Altered PyPSA network container ready for plotting\n \"\"\"\n for idx, row in network.buses.iterrows():\n wkt_geom = to_shape(row['geom'])\n network.buses.loc[idx, 'x'] = wkt_geom.x\n network.buses.loc[idx, 'y'] = wkt_geom.y\n\n return network"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef plot_line_loading(\n network,\n timesteps=range(1,2),\n filename=None,\n boundaries=[],\n arrows=False):\n \"\"\"\n Plots line loading as a colored heatmap.\n\n Line loading is displayed as relative to nominal capacity in %.\n \n Parameters\n ----------\n network : PyPSA network container\n Holds topology of grid including results from powerflow analysis\n timesteps : range \n Defines which timesteps are considered. If more than one, an \n average line loading is calculated.\n filename : str\n Specify filename\n If not given, figure will be show directly\n boundaries : list\n If given, the colorbar is fixed to a given min and max value\n arrows : bool\n If True, the direction of the power flows is displayed as \n arrows.\n \"\"\"\n # TODO: replace p0 by max(p0,p1) and analogously for q0\n # TODO: implement for all given snapshots\n\n # calculate relative line loading as S/S_nom\n # with S = sqrt(P^2 + Q^2)\n cmap = plt.cm.jet\n array_line = [['Line'] * len(network.lines), network.lines.index]\n array_link = [['Link'] * len(network.links), network.links.index]\n \n if network.lines_t.q0.empty:\n\n loading_lines = pd.Series((network.lines_t.p0.mul(\n network.snapshot_weightings, axis=0).loc[network.snapshots[\n timesteps]].abs().sum() / (network.lines.s_nom_opt)).data,\n index=array_line)\n\n else:\n loading_lines = pd.Series(((network.lines_t.p0.mul(\n network.snapshot_weightings, axis=0)\\\n .loc[network.snapshots[timesteps]].abs().sum() ** 2 +\\\n network.lines_t.q0.mul(\n network.snapshot_weightings, axis=0)\\\n .loc[network.snapshots[timesteps]].abs().sum() ** 2).\\\n apply(sqrt) / (network.lines.s_nom_opt)).data, index =\n array_line)\n\n # Aviod covering of bidirectional links\n network.links['linked_to'] = 0\n for i, row in network.links.iterrows():\n if not (network.links.index[(network.links.bus0 == row['bus1']) &\n (network.links.bus1 == row['bus0']) &\n (network.links.length == row['length']\n )]).empty:\n\n l = network.links.index[(network.links.bus0 == row['bus1']) &\n (network.links.bus1 == row['bus0']) &\n (network.links.length == row['length'])]\n\n network.links.set_value(i, 'linked_to',l.values[0])\n\n network.links.linked_to = network.links.linked_to.astype(str)\n link_load = network.links_t.p0[network.links.index[\n network.links.linked_to == '0']]\n\n for i, row in network.links[network.links.linked_to != '0'].iterrows():\n load = pd.DataFrame(index = network.links_t.p0.index,\n columns = ['to', 'from'])\n load['to'] = network.links_t.p0[row['linked_to']]\n load['from'] = network.links_t.p0[i]\n link_load[i] = load.abs().max(axis = 1)\n\n loading_links = pd.Series((link_load.mul(\n network.snapshot_weightings, axis=0).loc[network.snapshots[\n timesteps]].abs().sum()[network.links.index] / (\n network.links.p_nom_opt)).data, index=array_link).dropna()\n\n load_links_rel = (loading_links/ \n network.snapshot_weightings\\\n [network.snapshots[timesteps]].sum())* 100\n \n load_lines_rel = (loading_lines / network.snapshot_weightings\\\n [network.snapshots[timesteps]].sum()) * 100\n\n loading = load_lines_rel.append(load_links_rel)\n\n ll = network.plot(line_colors=loading, line_cmap=cmap,\n title=\"Line loading\", line_widths=0.55)\n # add colorbar, note mappable sliced from ll by [1]\n\n if not boundaries:\n v = np.linspace(min(loading), max(loading), 101)\n boundaries = [min(loading), max(loading)]\n \n else:\n v = np.linspace(boundaries[0], boundaries[1], 101)\n \n cb = plt.colorbar(ll[1], boundaries=v,\n ticks=v[0:101:10])\n cb_Link = plt.colorbar(ll[2], boundaries=v,\n ticks=v[0:101:10])\n\n cb.set_clim(vmin=boundaries[0], vmax=boundaries[1])\n \n cb_Link.set_clim(vmin=boundaries[0], vmax=boundaries[1])\n \n cb_Link.remove()\n \n cb.set_label('Line loading in %')\n\n if arrows:\n ax = plt.axes()\n path = ll[1].get_segments()\n x_coords_lines = np.zeros([len(path)])\n cmap = cmap\n colors = cmap(ll[1].get_array() / 100)\n for i in range(0, len(path)):\n x_coords_lines[i] = network.buses.loc[str(\n network.lines.iloc[i, 2]), 'x']\n color = colors[i]\n if (x_coords_lines[i] == path[i][0][0] and load_lines_rel[i] >= 0):\n arrowprops = dict(arrowstyle=\"->\", color=color)\n else:\n arrowprops = dict(arrowstyle=\"<-\", color=color)\n ax.annotate(\n \"\",\n xy=abs(\n (path[i][0] - path[i][1]) * 0.51 - path[i][0]),\n xytext=abs(\n (path[i][0] - path[i][1]) * 0.49 - path[i][0]),\n arrowprops=arrowprops,\n size=10)\n\n if filename is None:\n plt.show()\n else:\n plt.savefig(filename)\n plt.close()", "response": "Plots the line loading of the given time series."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef plot_line_loading_diff(networkA, networkB, timestep=0):\n\n # new colormap to make sure 0% difference has the same color in every plot\n def shiftedColorMap(\n cmap,\n start=0,\n midpoint=0.5,\n stop=1.0,\n name='shiftedcmap'):\n '''\n Function to offset the \"center\" of a colormap. Useful for\n data with a negative min and positive max and you want the\n middle of the colormap's dynamic range to be at zero\n\n Input\n -----\n cmap : The matplotlib colormap to be altered\n start : Offset from lowest point in the colormap's range.\n Defaults to 0.0 (no lower ofset). Should be between\n 0.0 and `midpoint`.\n midpoint : The new center of the colormap. Defaults to\n 0.5 (no shift). Should be between 0.0 and 1.0. In\n general, this should be 1 - vmax/(vmax + abs(vmin))\n For example if your data range from -15.0 to +5.0 and\n you want the center of the colormap at 0.0, `midpoint`\n should be set to 1 - 5/(5 + 15)) or 0.75\n stop : Offset from highets point in the colormap's range.\n Defaults to 1.0 (no upper ofset). Should be between\n `midpoint` and 1.0.\n '''\n cdict = {\n 'red': [],\n 'green': [],\n 'blue': [],\n 'alpha': []\n }\n\n # regular index to compute the colors\n reg_index = np.linspace(start, stop, 257)\n\n # shifted index to match the data\n shift_index = np.hstack([\n np.linspace(0.0, midpoint, 128, endpoint=False),\n np.linspace(midpoint, 1.0, 129, endpoint=True)\n ])\n\n for ri, si in zip(reg_index, shift_index):\n r, g, b, a = cmap(ri)\n\n cdict['red'].append((si, r, r))\n cdict['green'].append((si, g, g))\n cdict['blue'].append((si, b, b))\n cdict['alpha'].append((si, a, a))\n\n newcmap = matplotlib.colors.LinearSegmentedColormap(name, cdict)\n plt.register_cmap(cmap=newcmap)\n\n return newcmap\n\n # calculate difference in loading between both networks\n loading_switches = abs(\n networkA.lines_t.p0.mul(networkA.snapshot_weightings, axis=0).\\\n loc[networkA.snapshots[timestep]].to_frame())\n loading_switches.columns = ['switch']\n loading_noswitches = abs(\n networkB.lines_t.p0.mul(networkB.snapshot_weightings, axis=0).\\\n loc[networkB.snapshots[timestep]].to_frame())\n loading_noswitches.columns = ['noswitch']\n diff_network = loading_switches.join(loading_noswitches)\n diff_network['noswitch'] = diff_network['noswitch'].fillna(\n diff_network['switch'])\n diff_network[networkA.snapshots[timestep]] \\\n = diff_network['switch'] - diff_network['noswitch']\n\n # get switches\n new_buses = pd.Series(index=networkA.buses.index.values)\n new_buses.loc[set(networkA.buses.index.values) -\n set(networkB.buses.index.values)] = 0.1\n new_buses = new_buses.fillna(0)\n\n # plot network with difference in loading and shifted colormap\n loading = (diff_network.loc[:, networkA.snapshots[timestep]] /\n (networkA.lines.s_nom)) * 100\n midpoint = 1 - max(loading) / (max(loading) + abs(min(loading)))\n shifted_cmap = shiftedColorMap(\n plt.cm.jet, midpoint=midpoint, name='shifted')\n ll = networkA.plot(line_colors=loading, line_cmap=shifted_cmap,\n title=\"Line loading\", bus_sizes=new_buses,\n bus_colors='blue', line_widths=0.55)\n\n cb = plt.colorbar(ll[1])\n cb.set_label('Difference in line loading in % of s_nom')", "response": "Plot difference in line loading between two networks."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nplots relative or absolute network extension of AC - and DC - lines.", "response": "def network_expansion(network, method = 'rel', ext_min=0.1,\n ext_width=False, filename=None, boundaries=[]):\n \"\"\"Plot relative or absolute network extension of AC- and DC-lines.\n \n Parameters\n ----------\n network: PyPSA network container\n Holds topology of grid including results from powerflow analysis\n method: str\n Choose 'rel' for extension relative to s_nom and 'abs' for \n absolute extensions. \n ext_min: float\n Choose minimum relative line extension shown in plot in p.u..\n ext_width: float or bool\n Choose if line_width respects line extension. Turn off with 'False' or\n set linear factor to decremise extension line_width.\n filename: str or None\n Save figure in this direction\n boundaries: array\n Set boundaries of heatmap axis\n \n \"\"\"\n\n cmap = plt.cm.jet\n\n overlay_network = network.copy()\n overlay_network.lines = overlay_network.lines[\n overlay_network.lines.s_nom_extendable & ((\n overlay_network.lines.s_nom_opt -\n overlay_network.lines.s_nom_min) /\n overlay_network.lines.s_nom >= ext_min)]\n overlay_network.links = overlay_network.links[\n overlay_network.links.p_nom_extendable & ((\n overlay_network.links.p_nom_opt -\n overlay_network.links.p_nom_min)/\n overlay_network.links.p_nom >= ext_min)]\n\n for i, row in overlay_network.links.iterrows():\n linked = overlay_network.links[(row['bus1'] ==\n overlay_network.links.bus0) & (\n row['bus0'] == overlay_network.links.bus1)]\n if not linked.empty:\n if row['p_nom_opt'] < linked.p_nom_opt.values[0]:\n overlay_network.links.p_nom_opt[i] = linked.p_nom_opt.values[0]\n\n array_line = [['Line'] * len(overlay_network.lines),\n overlay_network.lines.index]\n \n array_link = [['Link'] * len(overlay_network.links),\n overlay_network.links.index]\n \n if method == 'rel':\n\n extension_lines = pd.Series((100 *\n (overlay_network.lines.s_nom_opt -\n overlay_network.lines.s_nom_min) /\n overlay_network.lines.s_nom).data,\n index=array_line)\n\n extension_links = pd.Series((100 *\n (overlay_network.links.p_nom_opt -\n overlay_network.links.p_nom_min)/\n (overlay_network.links.p_nom)).data,\n index=array_link)\n if method == 'abs':\n extension_lines = pd.Series(\n (overlay_network.lines.s_nom_opt -\n overlay_network.lines.s_nom_min).data,\n index=array_line)\n\n extension_links = pd.Series(\n (overlay_network.links.p_nom_opt -\n overlay_network.links.p_nom_min).data,\n index=array_link)\n \n\n extension = extension_lines.append(extension_links)\n\n # Plot whole network in backgroud of plot\n network.plot(\n line_colors=pd.Series(\"grey\", index = [['Line'] * len(\n network.lines), network.lines.index]).append(\n pd.Series(\"grey\", index = [['Link'] * len(network.links),\n network.links.index])),\n bus_sizes=0,\n line_widths=pd.Series(0.5, index = [['Line'] * len(network.lines),\n network.lines.index]).append(\n pd.Series(0.55, index = [['Link'] * len(network.links),\n network.links.index])))\n\n if not ext_width:\n line_widths= pd.Series(0.8, index = array_line).append(\n pd.Series(0.8, index = array_link))\n \n else: \n line_widths= 0.5 + (extension / ext_width)\n \n ll = overlay_network.plot(\n line_colors=extension,\n line_cmap=cmap,\n bus_sizes=0,\n title=\"Optimized AC- and DC-line expansion\",\n line_widths=line_widths) \n\n if not boundaries:\n v = np.linspace(min(extension), max(extension), 101)\n boundaries = [min(extension), max(extension)]\n \n else:\n v = np.linspace(boundaries[0], boundaries[1], 101)\n \n if not extension_links.empty:\n cb_Link = plt.colorbar(ll[2], boundaries=v,\n ticks=v[0:101:10])\n cb_Link.set_clim(vmin=boundaries[0], vmax=boundaries[1])\n \n cb_Link.remove()\n \n cb = plt.colorbar(ll[1], boundaries=v,\n ticks=v[0:101:10], fraction=0.046, pad=0.04)\n \n cb.set_clim(vmin=boundaries[0], vmax=boundaries[1])\n \n if method == 'rel':\n cb.set_label('line expansion relative to s_nom in %')\n if method == 'abs':\n cb.set_label('line expansion in MW')\n if filename is None:\n plt.show()\n else:\n plt.savefig(filename)\n plt.close()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nplotting relative network expansion derivation of AC - and DC - lines.", "response": "def network_expansion_diff (networkA, networkB, filename=None, boundaries=[]):\n \"\"\"Plot relative network expansion derivation of AC- and DC-lines.\n \n Parameters\n ----------\n networkA: PyPSA network container\n Holds topology of grid including results from powerflow analysis\n networkB: PyPSA network container\n Holds topology of grid including results from powerflow analysis\n filename: str or None\n Save figure in this direction\n boundaries: array\n Set boundaries of heatmap axis\n \n \"\"\"\n\n cmap = plt.cm.jet\n \n array_line = [['Line'] * len(networkA.lines), networkA.lines.index]\n\n extension_lines = pd.Series(100 *\\\n ((networkA.lines.s_nom_opt - \\\n networkB.lines.s_nom_opt)/\\\n networkA.lines.s_nom_opt ).values,\\\n index=array_line)\n\n array_link = [['Link'] * len(networkA.links), networkA.links.index]\n\n extension_links = pd.Series(100 *\n ((networkA.links.p_nom_opt -\\\n networkB.links.p_nom_opt)/\\\n networkA.links.p_nom_opt).values,\\\n index=array_link)\n\n extension = extension_lines.append(extension_links)\n\n ll = networkA.plot(\n line_colors=extension,\n line_cmap=cmap,\n bus_sizes=0,\n title=\"Derivation of AC- and DC-line extension\",\n line_widths=2)\n\n if not boundaries:\n v = np.linspace(min(extension), max(extension), 101)\n boundaries = [min(extension).round(0), max(extension).round(0)]\n \n else:\n v = np.linspace(boundaries[0], boundaries[1], 101)\n \n if not extension_links.empty:\n cb_Link = plt.colorbar(ll[2], boundaries=v,\n ticks=v[0:101:10])\n cb_Link.set_clim(vmin=boundaries[0], vmax=boundaries[1])\n \n cb_Link.remove()\n \n cb = plt.colorbar(ll[1], boundaries=v,\n ticks=v[0:101:10], fraction=0.046, pad=0.04)\n \n cb.set_clim(vmin=boundaries[0], vmax=boundaries[1])\n cb.set_label('line extension derivation in %')\n\n if filename is None:\n plt.show()\n else:\n plt.savefig(filename)\n plt.close()"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nplot loading of lines in equivalten full load hours.", "response": "def full_load_hours(network, boundaries=[], filename=None, two_cb=False):\n \"\"\"Plot loading of lines in equivalten full load hours.\n \n Parameters\n ----------\n network: PyPSA network container\n Holds topology of grid including results from powerflow analysis\n filename: str or None\n Save figure in this direction\n boundaries: array\n Set boundaries of heatmap axis\n two_cb: bool\n Choose if an extra colorbar for DC-lines is plotted\n \n \"\"\"\n cmap = plt.cm.jet\n\n array_line = [['Line'] * len(network.lines), network.lines.index]\n\n load_lines = pd.Series(abs((network.lines_t.p0.mul(\n network.snapshot_weightings, axis=0).sum() /\n (network.lines.s_nom))).data, index=array_line)\n\n array_link = [['Link'] * len(network.links), network.links.index]\n\n load_links = pd.Series(abs((network.links_t.p0.mul(\n network.snapshot_weightings, axis=0).sum() /\n (network.links.p_nom))).data, index=array_link)\n\n load_hours = load_lines.append(load_links)\n\n ll = network.plot(line_colors=load_hours, line_cmap=cmap, bus_sizes=0,\n title=\"Full load-hours of lines\", line_widths=2)\n\n if not boundaries:\n cb = plt.colorbar(ll[1])\n cb_Link = plt.colorbar(ll[2])\n elif boundaries:\n v = np.linspace(boundaries[0], boundaries[1], 101)\n\n cb_Link = plt.colorbar(ll[2], boundaries=v,\n ticks=v[0:101:10])\n cb_Link.set_clim(vmin=boundaries[0], vmax=boundaries[1])\n\n cb = plt.colorbar(ll[1], boundaries=v,\n ticks=v[0:101:10])\n cb.set_clim(vmin=boundaries[0], vmax=boundaries[1])\n\n if two_cb:\n cb_Link.set_label('Number of full-load hours of DC-lines')\n cb.set_label('Number of full-load hours of AC-lines')\n\n else:\n cb.set_label('Number of full-load hours')\n\n if filename is None:\n plt.show()\n else:\n plt.savefig(filename)\n plt.close()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nplotting maximal reactive line load.", "response": "def plot_q_flows(network):\n \"\"\"Plot maximal reactive line load. \n \n Parameters\n ----------\n network: PyPSA network container\n Holds topology of grid including results from powerflow analysis\n\n \"\"\"\n cmap_line = plt.cm.jet\n \n q_flows_max = abs(network.lines_t.q0.abs().max()/(network.lines.s_nom))\n \n ll = network.plot(line_colors = q_flows_max, line_cmap = cmap_line)\n boundaries = [min(q_flows_max), max(q_flows_max)]\n v = np.linspace(boundaries[0], boundaries[1], 101)\n\n cb = plt.colorbar(ll[1], boundaries=v,\n ticks=v[0:101:10])\n\n cb.set_clim(vmin=boundaries[0], vmax=boundaries[1])"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nplotting maximum loading of each line.", "response": "def max_load(network, boundaries=[], filename=None, two_cb=False):\n \n \"\"\"Plot maximum loading of each line. \n \n Parameters\n ----------\n network: PyPSA network container\n Holds topology of grid including results from powerflow analysis\n filename: str or None\n Save figure in this direction\n boundaries: array\n Set boundaries of heatmap axis\n two_cb: bool\n Choose if an extra colorbar for DC-lines is plotted\n \n \"\"\"\n\n cmap_line = plt.cm.jet\n cmap_link = plt.cm.jet\n array_line = [['Line'] * len(network.lines), network.lines.index]\n array_link = [['Link'] * len(network.links), network.links.index]\n\n if network.lines_t.q0.empty:\n load_lines = pd.Series((abs(network.lines_t.p0).max(\n ) / (network.lines.s_nom) * 100).data, index=array_line)\n\n else: load_lines = pd.Series(((network.lines_t.p0**2 +\n network.lines_t.q0 ** 2).max().apply(sqrt)/\n (network.lines.s_nom) * 100).data, index=array_line)\n \n load_links = pd.Series((abs(network.links_t.p0.max(\n ) / (network.links.p_nom)) * 100).data, index=array_link)\n\n max_load = load_lines.append(load_links)\n\n ll = network.plot(\n line_colors=max_load,\n line_cmap={\n 'Line': cmap_line,\n 'Link': cmap_link},\n bus_sizes=0,\n title=\"Maximum of line loading\",\n line_widths=2)\n\n if not boundaries:\n boundaries = [min(max_load), max(max_load)]\n\n v = np.linspace(boundaries[0], boundaries[1], 101)\n\n cb = plt.colorbar(ll[1], boundaries=v,\n ticks=v[0:101:10])\n\n cb.set_clim(vmin=boundaries[0], vmax=boundaries[1])\n\n cb_Link = plt.colorbar(ll[2], boundaries=v,\n ticks=v[0:101:10])\n cb_Link.set_clim(vmin=boundaries[0], vmax=boundaries[1])\n\n if two_cb:\n # cb_Link.set_label('Maximum load of DC-lines %')\n cb.set_label('Maximum load of AC-lines %')\n\n else:\n cb.set_label('Maximum load in %')\n if filename is None:\n plt.show()\n else:\n plt.savefig(filename)\n plt.close()"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nplots number of hours with line loading in selected range.", "response": "def load_hours(network, min_load=0.9, max_load=1, boundaries=[0, 8760]):\n \n \"\"\"Plot number of hours with line loading in selected range. \n \n Parameters\n ----------\n network: PyPSA network container\n Holds topology of grid including results from powerflow analysis\n min_load: float\n Choose lower bound of relative load \n max_load: float\n Choose upper bound of relative load\n boundaries: array\n Set boundaries of heatmap axis \n \n \"\"\"\n \n cmap_line = plt.cm.jet\n cmap_link = plt.cm.jet\n array_line = [['Line'] * len(network.lines), network.lines.index]\n\n load_lines = pd.Series(((abs(network.lines_t.p0[(\n abs(network.lines_t.p0.mul(network.snapshot_weightings, axis=0)) / \n network.lines.s_nom_opt >= min_load) &\n (\n abs(network.lines_t.p0.mul(network.snapshot_weightings, axis=0)) / \n network.lines.s_nom_opt <= max_load)]) /\n abs(network.lines_t.p0[(\n abs(network.lines_t.p0) /\n network.lines.s_nom_opt >= min_load) &\n (abs(network.lines_t.p0) /\n network.lines.s_nom_opt <= max_load)]))\n .sum()).data, index=array_line)\n\n array_link = [['Link'] * len(network.links), network.links.index]\n\n load_links = pd.Series(((abs(network.links_t.p0[(\n abs(network.links_t.p0.mul(network.snapshot_weightings, axis=0)) / \n network.links.p_nom_opt >= min_load) &\n (\n abs(network.links_t.p0.mul(network.snapshot_weightings, axis=0)) / \n network.links.p_nom_opt <= max_load)]) /\n abs(network.links_t.p0[(\n abs(network.links_t.p0) /\n network.links.p_nom_opt >= min_load) &\n (abs(network.links_t.p0) /\n network.links.p_nom_opt <= max_load)]))\n .sum()).data, index=array_link)\n\n load_hours = load_lines.append(load_links)\n\n ll = network.plot(\n line_colors=load_hours,\n line_cmap={\n 'Line': cmap_line,\n 'Link': cmap_link},\n bus_sizes=0,\n title=\"Number of hours with more then 90% load\",\n line_widths=2)\n\n v1 = np.linspace(boundaries[0], boundaries[1], 101)\n v = np.linspace(boundaries[0], boundaries[1], 101)\n cb_Link = plt.colorbar(ll[2], boundaries=v1,\n ticks=v[0:101:10])\n cb_Link.set_clim(vmin=boundaries[0], vmax=boundaries[1])\n\n cb = plt.colorbar(ll[1], boundaries=v,\n ticks=v[0:101:10])\n cb.set_clim(vmin=boundaries[0], vmax=boundaries[1])\n\n cb.set_label('Number of hours')"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nplots residual load summed of all exisiting buses.", "response": "def plot_residual_load(network):\n \"\"\" Plots residual load summed of all exisiting buses.\n\n Parameters\n ----------\n network : PyPSA network containter\n \"\"\"\n\n renewables = network.generators[\n network.generators.carrier.isin(['wind_onshore', 'wind_offshore', \n 'solar', 'run_of_river',\n 'wind'])]\n renewables_t = network.generators.p_nom[renewables.index] * \\\n network.generators_t.p_max_pu[renewables.index].mul(\n network.snapshot_weightings, axis=0)\n load = network.loads_t.p_set.mul(network.snapshot_weightings, axis=0).\\\n sum(axis=1)\n all_renew = renewables_t.sum(axis=1)\n residual_load = load - all_renew\n plot = residual_load.plot(\n title = 'Residual load',\n drawstyle='steps',\n lw=2,\n color='red',\n legend=False)\n plot.set_ylabel(\"MW\")\n # sorted curve\n sorted_residual_load = residual_load.sort_values(\n ascending=False).reset_index()\n plot1 = sorted_residual_load.plot(\n title='Sorted residual load',\n drawstyle='steps',\n lw=2,\n color='red',\n legend=False)\n plot1.set_ylabel(\"MW\")"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nplots the stacked sum of generators grouped by carrier type.", "response": "def plot_stacked_gen(network, bus=None, resolution='GW', filename=None):\n \"\"\"\n Plot stacked sum of generation grouped by carrier type\n\n\n Parameters\n ----------\n network : PyPSA network container\n bus: string\n Plot all generators at one specific bus. If none,\n sum is calulated for all buses\n resolution: string\n Unit for y-axis. Can be either GW/MW/KW\n\n Returns\n -------\n Plot\n \"\"\"\n if resolution == 'GW':\n reso_int = 1e3\n elif resolution == 'MW':\n reso_int = 1\n elif resolution == 'KW':\n reso_int = 0.001\n\n # sum for all buses\n if bus is None:\n p_by_carrier = pd.concat([network.generators_t.p[network.generators\n [network.generators.control != 'Slack'].index],\n network.generators_t.p.mul(\n network.snapshot_weightings, axis=0)\n [network.generators[network.generators.control == \n 'Slack'].index]\n .iloc[:, 0].apply(lambda x: x if x > 0 else 0)],\n axis=1)\\\n .groupby(network.generators.carrier, axis=1).sum()\n load = network.loads_t.p.sum(axis=1)\n if hasattr(network, 'foreign_trade'):\n trade_sum = network.foreign_trade.sum(axis=1)\n p_by_carrier['imports'] = trade_sum[trade_sum > 0]\n p_by_carrier['imports'] = p_by_carrier['imports'].fillna(0)\n # sum for a single bus\n elif bus is not None:\n filtered_gens = network.generators[network.generators['bus'] == bus]\n p_by_carrier = network.generators_t.p.mul(network.snapshot_weightings,\n axis=0).groupby(filtered_gens.carrier, axis=1).abs().sum()\n filtered_load = network.loads[network.loads['bus'] == bus]\n load = network.loads_t.p.mul(network.snapshot_weightings, axis=0)\\\n [filtered_load.index]\n\n colors = coloring()\n# TODO: column reordering based on available columns\n\n fig, ax = plt.subplots(1, 1)\n\n fig.set_size_inches(12, 6)\n colors = [colors[col] for col in p_by_carrier.columns]\n if len(colors) == 1:\n colors = colors[0]\n (p_by_carrier / reso_int).plot(kind=\"area\", ax=ax, linewidth=0,\n color=colors)\n (load / reso_int).plot(ax=ax, legend='load', lw=2, color='darkgrey',\n style='--')\n ax.legend(ncol=4, loc=\"upper left\")\n\n ax.set_ylabel(resolution)\n ax.set_xlabel(\"\")\n \n\n matplotlib.rcParams.update({'font.size': 22})\n\n\n\n if filename is None:\n plt.show()\n else:\n plt.savefig(filename)\n plt.close()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef plot_gen_diff(\n networkA,\n networkB,\n leave_out_carriers=[\n 'geothermal',\n 'oil',\n 'other_non_renewable',\n 'reservoir',\n 'waste']):\n \"\"\"\n Plot difference in generation between two networks grouped by carrier type\n\n\n Parameters\n ----------\n networkA : PyPSA network container with switches\n networkB : PyPSA network container without switches\n leave_out_carriers : list of carriers to leave out (default to all small\n carriers)\n\n Returns\n -------\n Plot\n \"\"\"\n def gen_by_c(network):\n gen = pd.concat([network.generators_t.p.mul(\n network.snapshot_weightings, axis=0)[network.generators\n [network.generators.control != 'Slack'].index],\n network.generators_t.p.mul(\n network.snapshot_weightings, axis=0)[network.generators\n [network. generators.control == 'Slack'].index]\n .iloc[:, 0].apply(lambda x: x if x > 0 else 0)],\n axis=1)\\\n .groupby(network.generators.carrier,axis=1).sum()\n return gen\n\n gen = gen_by_c(networkB)\n gen_switches = gen_by_c(networkA)\n diff = gen_switches - gen\n\n colors = coloring()\n diff.drop(leave_out_carriers, axis=1, inplace=True)\n colors = [colors[col] for col in diff.columns]\n\n plot = diff.plot(kind='line', color=colors, use_index=False)\n plot.legend(loc='upper left', ncol=5, prop={'size': 8})\n x = []\n for i in range(0, len(diff)):\n x.append(i)\n plt.xticks(x, x)\n plot.set_xlabel('Timesteps')\n plot.set_ylabel('Difference in Generation in MW')\n plot.set_title('Difference in Generation')\n plt.tight_layout()", "response": "Plot difference in generation between two networks grouped by carrier type and other non - renewable ones."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef plot_voltage(network, boundaries=[]):\n\n x = np.array(network.buses['x'])\n y = np.array(network.buses['y'])\n\n alpha = np.array(network.buses_t.v_mag_pu.loc[network.snapshots[0]])\n\n fig, ax = plt.subplots(1, 1)\n fig.set_size_inches(6, 4)\n cmap = plt.cm.jet\n if not boundaries:\n plt.hexbin(x, y, C=alpha, cmap=cmap, gridsize=100)\n cb = plt.colorbar()\n elif boundaries:\n v = np.linspace(boundaries[0], boundaries[1], 101)\n norm = matplotlib.colors.BoundaryNorm(v, cmap.N)\n plt.hexbin(x, y, C=alpha, cmap=cmap, gridsize=100, norm=norm)\n cb = plt.colorbar(boundaries=v, ticks=v[0:101:10], norm=norm)\n cb.set_clim(vmin=boundaries[0], vmax=boundaries[1])\n cb.set_label('Voltage Magnitude per unit of v_nom')\n\n network.plot(\n ax=ax, line_widths=pd.Series(0.5, network.lines.index), bus_sizes=0)\n plt.show()", "response": "Plot voltage at buses as hexbin"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nplot the curtailment of the selected carrier.", "response": "def curtailment(network, carrier='solar', filename=None):\n \"\"\"\n Plot curtailment of selected carrier\n\n\n Parameters\n ----------\n network : PyPSA network container\n Holds topology of grid including results from powerflow analysis\n carrier: str\n Plot curtailemt of this carrier\n filename: str or None\n Save figure in this direction\n \n\n Returns\n -------\n Plot\n \"\"\"\n p_by_carrier = network.generators_t.p.groupby\\\n (network.generators.carrier, axis=1).sum()\n capacity = network.generators.groupby(\"carrier\").sum().at[carrier, \"p_nom\"]\n p_available = network.generators_t.p_max_pu.multiply(\n network.generators[\"p_nom\"])\n p_available_by_carrier = p_available.groupby(\n network.generators.carrier, axis=1).sum()\n p_curtailed_by_carrier = p_available_by_carrier - p_by_carrier\n print(p_curtailed_by_carrier.sum())\n p_df = pd.DataFrame({carrier +\n \" available\": p_available_by_carrier[carrier],\n carrier +\n \" dispatched\": p_by_carrier[carrier], carrier +\n \" curtailed\": p_curtailed_by_carrier[carrier]})\n\n p_df[carrier + \" capacity\"] = capacity\n p_df[carrier + \" curtailed\"][p_df[carrier + \" curtailed\"] < 0.] = 0.\n\n fig, ax = plt.subplots(1, 1)\n fig.set_size_inches(12, 6)\n p_df[[carrier + \" dispatched\", carrier + \" curtailed\"]\n ].plot(kind=\"area\", ax=ax, linewidth=3)\n p_df[[carrier + \" available\", carrier + \" capacity\"]\n ].plot(ax=ax, linewidth=3)\n\n ax.set_xlabel(\"\")\n ax.set_ylabel(\"Power [MW]\")\n ax.set_ylim([0, capacity * 1.1])\n ax.legend()\n if filename is None:\n plt.show()\n else:\n plt.savefig(filename)\n plt.close()"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef storage_distribution(network, scaling=1, filename=None):\n\n stores = network.storage_units\n storage_distribution = network.storage_units.p_nom_opt[stores.index]\\\n .groupby(network.storage_units.bus)\\\n .sum().reindex(network.buses.index, fill_value=0.)\n\n fig, ax = plt.subplots(1, 1)\n fig.set_size_inches(6, 6)\n\n msd_max = storage_distribution.max()\n msd_median = storage_distribution[storage_distribution != 0].median()\n msd_min = storage_distribution[storage_distribution > 1].min()\n\n if msd_max != 0:\n LabelVal = int(log10(msd_max))\n else:\n LabelVal = 0\n if LabelVal < 0:\n LabelUnit = 'kW'\n msd_max, msd_median, msd_min = msd_max * \\\n 1000, msd_median * 1000, msd_min * 1000\n storage_distribution = storage_distribution * 1000\n elif LabelVal < 3:\n LabelUnit = 'MW'\n else:\n LabelUnit = 'GW'\n msd_max, msd_median, msd_min = msd_max / \\\n 1000, msd_median / 1000, msd_min / 1000\n storage_distribution = storage_distribution / 1000\n\n if sum(storage_distribution) == 0:\n network.plot(bus_sizes=0, ax=ax, title=\"No storages\")\n else:\n network.plot(\n bus_sizes=storage_distribution * scaling,\n ax=ax,\n line_widths=0.3,\n title=\"Storage distribution\")\n\n # Here we create a legend:\n # we'll plot empty lists with the desired size and label\n for area in [msd_max, msd_median, msd_min]:\n plt.scatter([], [], c='white', s=area * scaling,\n label='= ' + str(round(area, 0)) + LabelUnit + ' ')\n plt.legend(scatterpoints=1, labelspacing=1, title='Storage size')\n\n if filename is None:\n plt.show()\n else:\n plt.savefig(filename)\n plt.close()", "response": "Plots storage distribution as circles on grid nodes."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef storage_expansion(network, basemap=True, scaling=1, filename=None):\n\n stores = network.storage_units[network.storage_units.carrier == \n 'extendable_storage']\n batteries = stores[stores.max_hours == 6]\n hydrogen = stores[stores.max_hours == 168]\n storage_distribution =\\\n network.storage_units.p_nom_opt[stores.index].groupby(\n network.storage_units.bus).sum().reindex(\n network.buses.index, fill_value=0.)\n battery_distribution =\\\n network.storage_units.p_nom_opt[batteries.index].groupby(\n network.storage_units.bus).sum().reindex(\n network.buses.index, fill_value=0.)\n hydrogen_distribution =\\\n network.storage_units.p_nom_opt[hydrogen.index].groupby(\n network.storage_units.bus).sum().reindex(\n network.buses.index, fill_value=0.)\n\n sbatt = network.storage_units.index[\n (network.storage_units.p_nom_opt > 1) & (\n network.storage_units.capital_cost > 10) & (\n network.storage_units.max_hours == 6)]\n shydr = network.storage_units.index[\n (network.storage_units.p_nom_opt > 1) & (\n network.storage_units.capital_cost > 10) & (\n network.storage_units.max_hours == 168)]\n\n fig, ax = plt.subplots(1, 1)\n fig.set_size_inches(6, 6)\n \n msd_max = storage_distribution.max()\n msd_max_bat = battery_distribution.max()\n msd_max_hyd = hydrogen_distribution.max()\n\n if msd_max != 0:\n LabelVal = int(log10(msd_max))\n else:\n LabelVal = 0\n if LabelVal < 0:\n LabelUnit = 'kW'\n msd_max, msd_max_bat, msd_max_hyd = msd_max * \\\n 1000, msd_max_bat * 1000, msd_max_hyd * 1000\n battery_distribution = battery_distribution * 1000\n hydrogen_distribution = hydrogen_distribution * 1000\n elif LabelVal < 3:\n LabelUnit = 'MW'\n else:\n LabelUnit = 'GW'\n msd_max, msd_max_bat, msd_max_hyd = msd_max / \\\n 1000, msd_max_bat / 1000, msd_max_hyd / 1000\n battery_distribution = battery_distribution / 1000\n hydrogen_distribution = hydrogen_distribution / 1000\n\n if network.storage_units.p_nom_opt[sbatt].sum() < 1 and\\\n network.storage_units.p_nom_opt[shydr].sum() < 1:\n print(\"No storage unit to plot\")\n elif network.storage_units.p_nom_opt[sbatt].sum() > 1 and\\\n network.storage_units.p_nom_opt[shydr].sum() < 1:\n network.plot(bus_sizes=battery_distribution * scaling, \n bus_colors='orangered', ax=ax, line_widths=0.3)\n elif network.storage_units.p_nom_opt[sbatt].sum() < 1 and\\\n network.storage_units.p_nom_opt[shydr].sum() > 1:\n network.plot(bus_sizes=hydrogen_distribution * scaling,\n bus_colors='teal', ax=ax, line_widths=0.3)\n else:\n network.plot(bus_sizes=battery_distribution * scaling,\n bus_colors='orangered', ax=ax, line_widths=0.3)\n network.plot(bus_sizes=hydrogen_distribution * scaling,\n bus_colors='teal', ax=ax, line_widths=0.3)\n\n if basemap and basemap_present:\n x = network.buses[\"x\"]\n y = network.buses[\"y\"]\n x1 = min(x)\n x2 = max(x)\n y1 = min(y)\n y2 = max(y)\n\n bmap = Basemap(resolution='l', epsg=network.srid, llcrnrlat=y1, urcrnrlat=y2, llcrnrlon=x1, urcrnrlon=x2, ax=ax)\n bmap.drawcountries()\n bmap.drawcoastlines()\n\n if msd_max_hyd !=0:\n plt.scatter([], [], c='teal', s=msd_max_hyd * scaling,\n label='= ' + str(round(msd_max_hyd, 0)) + LabelUnit + ' hydrogen storage')\n if msd_max_bat !=0:\n plt.scatter([], [], c='orangered', s=msd_max_bat * scaling,\n label='= ' + str(round(msd_max_bat, 0)) + LabelUnit + ' battery storage')\n plt.legend(scatterpoints=1, labelspacing=1, title='Storage size and technology', borderpad=1.3, loc=2)\n ax.set_title(\"Storage expansion\")\n\n \n if filename is None:\n plt.show()\n else:\n plt.savefig(filename)\n plt.close()\n\n return", "response": "Plots the storage expansion of the given network."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ngenerate the difference between two sets of nodes in the generation distribution.", "response": "def gen_dist_diff(\n networkA,\n networkB,\n techs=None,\n snapshot=0,\n n_cols=3,\n gen_size=0.2,\n filename=None,\n buscmap=plt.cm.jet):\n \"\"\"\n Difference in generation distribution\n Green/Yellow/Red colors mean that the generation at a location\n is bigger with switches than without\n Blue colors mean that the generation at a location is smaller with switches\n than without\n \n Parameters\n ----------\n networkA : PyPSA network container\n Holds topology of grid with switches\n including results from powerflow analysis\n networkB : PyPSA network container\n Holds topology of grid without switches\n including results from powerflow analysis\n techs : dict\n type of technologies which shall be plotted\n snapshot : int\n snapshot\n n_cols : int\n number of columns of the plot\n gen_size : num\n size of generation bubbles at the buses\n filename : str\n Specify filename\n If not given, figure will be show directly\n \"\"\"\n if techs is None:\n techs = networkA.generators.carrier.unique()\n else:\n techs = techs\n\n n_graphs = len(techs)\n n_cols = n_cols\n\n if n_graphs % n_cols == 0:\n n_rows = n_graphs // n_cols\n else:\n n_rows = n_graphs // n_cols + 1\n\n fig, axes = plt.subplots(nrows=n_rows, ncols=n_cols)\n\n size = 4\n\n fig.set_size_inches(size * n_cols, size * n_rows)\n\n for i, tech in enumerate(techs):\n i_row = i // n_cols\n i_col = i % n_cols\n\n ax = axes[i_row, i_col]\n\n gensA = networkA.generators[networkA.generators.carrier == tech]\n gensB = networkB.generators[networkB.generators.carrier == tech]\n\n gen_distribution =\\\n networkA.generators_t.p.mul(networkA.snapshot_weightings, axis=0)\\\n [gensA.index].loc[networkA.snapshots[snapshot]].groupby(\n networkA.generators.bus).sum().reindex(\n networkA.buses.index, fill_value=0.) -\\\n networkB.generators_t.p.mul(networkB.snapshot_weightings, axis=0)\\\n [gensB.index].loc[networkB.snapshots[snapshot]].groupby(\n networkB.generators.bus).sum().reindex(\n networkB.buses.index, fill_value=0.)\n\n networkA.plot(\n ax=ax,\n bus_sizes=gen_size * abs(gen_distribution),\n bus_colors=gen_distribution,\n line_widths=0.1,\n bus_cmap=buscmap)\n\n ax.set_title(tech)\n\n if filename is None:\n plt.show()\n else:\n plt.savefig(filename)\n plt.close()"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef gen_dist(\n network,\n techs=None,\n snapshot=1,\n n_cols=3,\n gen_size=0.2,\n filename=None):\n \"\"\"\n Generation distribution\n\n Parameters\n ----------\n network : PyPSA network container\n Holds topology of grid including results from powerflow analysis\n techs : dict\n type of technologies which shall be plotted\n snapshot : int\n snapshot\n n_cols : int\n number of columns of the plot\n gen_size : num\n size of generation bubbles at the buses\n filename : str\n Specify filename\n If not given, figure will be show directly\n \"\"\"\n if techs is None:\n techs = network.generators.carrier.unique()\n else:\n techs = techs\n\n n_graphs = len(techs)\n n_cols = n_cols\n\n if n_graphs % n_cols == 0:\n n_rows = n_graphs // n_cols\n else:\n n_rows = n_graphs // n_cols + 1\n\n fig, axes = plt.subplots(nrows=n_rows, ncols=n_cols)\n\n size = 4\n\n fig.set_size_inches(size * n_cols, size * n_rows)\n\n for i, tech in enumerate(techs):\n i_row = i // n_cols\n i_col = i % n_cols\n\n ax = axes[i_row, i_col]\n\n gens = network.generators[network.generators.carrier == tech]\n gen_distribution = network.generators_t.p.mul(network.\n snapshot_weightings, axis=0)\\\n [gens.index].loc[network.snapshots[snapshot]].groupby(\n network.generators.bus).sum().reindex(\n network.buses.index, fill_value=0.)\n\n network.plot(\n ax=ax,\n bus_sizes=gen_size * gen_distribution,\n line_widths=0.1)\n\n ax.set_title(tech)\n if filename is None:\n plt.show()\n else:\n plt.savefig(filename)\n plt.close()", "response": "Generate the distribution of the bus bubbles at a particular snapshot."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef nodal_gen_dispatch(\n network,\n networkB=None,\n techs=['wind_onshore', 'solar'],\n item='energy',\n direction=None,\n scaling=1,\n filename=None):\n \"\"\"\n Plot nodal dispatch or capacity. If networkB is given, difference in\n dispatch is plotted.\n \n Parameters\n ----------\n network : PyPSA network container\n Holds topology of grid including results from powerflow analysis\n networkB : PyPSA network container\n If given and item is 'energy', difference in dispatch between network \n and networkB is plotted. If item is 'capacity', networkB is ignored.\n default None\n techs : None or list, \n Techs to plot. If None, all techs are plotted.\n default ['wind_onshore', 'solar']\n item : str\n Specifies the plotted item. Options are 'energy' and 'capacity'.\n default 'energy'\n direction : str\n Only considered if networkB is given and item is 'energy'. Specifies\n the direction of change in dispatch between network and networkB.\n If 'positive', generation per tech which is higher in network than in \n networkB is plotted.\n If 'negative', generation per tech whcih is lower in network than \n in networkB is plotted.\n If 'absolute', total change per node is plotted. \n Green nodes have higher dispatch in network than in networkB.\n Red nodes have lower dispatch in network than in networkB.\n default None\n scaling : int\n Scaling to change plot sizes.\n default 1\n filename : path to folder\n \"\"\" \n \n if techs:\n gens = network.generators[network.generators.carrier.isin(techs)]\n elif techs is None:\n gens = network.generators\n techs = gens.carrier.unique()\n if item == 'capacity':\n dispatch = gens.p_nom.groupby([network.generators.bus, \n network.generators.carrier]).sum()\n elif item == 'energy':\n if networkB:\n dispatch_network =\\\n network.generators_t.p[gens.index].mul(\n network.snapshot_weightings, axis=0).groupby(\n [network.generators.bus, network.generators.carrier], \n axis=1).sum()\n dispatch_networkB =\\\n networkB.generators_t.p[gens.index].mul(\n networkB.snapshot_weightings, axis=0).groupby(\n [networkB.generators.bus, networkB.generators.carrier], \n axis=1).sum()\n dispatch = dispatch_network - dispatch_networkB\n \n if direction == 'positive':\n dispatch = dispatch[dispatch > 0].fillna(0)\n elif direction == 'negative':\n dispatch = dispatch[dispatch < 0].fillna(0)\n elif direction == 'absolute':\n pass\n else:\n return('No valid direction given.')\n dispatch = dispatch.sum()\n \n elif networkB is None:\n dispatch =\\\n network.generators_t.p[gens.index].mul(\n network.snapshot_weightings, axis=0).sum().groupby(\n [network.generators.bus, network.generators.carrier]).sum()\n \n fig, ax = plt.subplots(1, 1) \n scaling = 1/(max(abs(dispatch.groupby(level=0).sum())))*scaling\n if direction != 'absolute':\n colors = coloring()\n subcolors = {a: colors[a] for a in techs}\n dispatch = dispatch.abs() + 1e-9\n else:\n dispatch = dispatch.sum(level=0)\n colors = {s[0]: 'green' if s[1] > 0 else 'red' \n for s in dispatch.iteritems()}\n dispatch = dispatch.abs()\n subcolors = {'negative': 'red', 'positive': 'green'}\n \n network.plot(\n bus_sizes=dispatch * scaling,\n bus_colors=colors,\n line_widths=0.2,\n margin=0.01,\n ax=ax)\n\n fig.subplots_adjust(right=0.8)\n plt.subplots_adjust(wspace=0, hspace=0.001)\n\n patchList = []\n for key in subcolors:\n data_key = mpatches.Patch(color=subcolors[key], label=key)\n patchList.append(data_key)\n\n ax.legend(handles=patchList, loc='upper left')\n ax.autoscale()\n\n if filename is None:\n plt.show()\n else:\n plt.savefig(filename)\n plt.close()\n\n return", "response": "Plot nodal dispatch or capacity."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef nodal_production_balance(\n network, \n snapshot='all', \n scaling=0.00001, \n filename=None):\n \"\"\"\n Plots the nodal difference between generation and consumption.\n \n Parameters\n ----------\n network : PyPSA network container\n Holds topology of grid including results from powerflow analysis\n snapshot : int or 'all'\n Snapshot to plot.\n default 'all'\n scaling : int\n Scaling to change plot sizes.\n default 0.0001\n filename : path to folder\n \n \"\"\"\n fig, ax = plt.subplots(1, 1)\n gen = network.generators_t.p.groupby(network.generators.bus, axis=1).sum()\n load = network.loads_t.p.groupby(network.loads.bus, axis=1).sum()\n \n if snapshot == 'all':\n diff = (gen - load).sum()\n else:\n timestep = network.snapshots[snapshot]\n diff = (gen - load).loc[timestep]\n \n colors = {s[0]: 'green' if s[1] > 0 else 'red' \n for s in diff.iteritems()}\n subcolors = {'Net Consumer': 'red', 'Net Producer': 'green'}\n diff = diff.abs()\n network.plot(\n bus_sizes=diff * scaling,\n bus_colors=colors,\n line_widths=0.2,\n margin=0.01,\n ax=ax)\n \n patchList = []\n for key in subcolors:\n data_key = mpatches.Patch(color=subcolors[key], label=key)\n patchList.append(data_key)\n\n ax.legend(handles=patchList, loc='upper left')\n ax.autoscale()\n if filename:\n plt.savefig(filename)\n plt.close()\n \n return", "response": "Plots the nodal difference between generation and consumption of a given node."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef storage_p_soc(network, mean='1H', filename = None):\n\n sbatt = network.storage_units.index[(network.storage_units.p_nom_opt > 1)\n & (network.storage_units.capital_cost > 10) &\n (network.storage_units.max_hours == 6)]\n shydr = network.storage_units.index[(network.storage_units.p_nom_opt > 1) \n & (network.storage_units.capital_cost > 10) & \n (network.storage_units.max_hours == 168)]\n\n cap_batt = (network.storage_units.max_hours[sbatt] *\n network.storage_units.p_nom_opt[sbatt]).sum()\n cap_hydr = (network.storage_units.max_hours[shydr] * \n network.storage_units.p_nom_opt[shydr]).sum()\n\n fig, ax = plt.subplots(1, 1)\n\n if network.storage_units.p_nom_opt[sbatt].sum() < 1 and \\\n network.storage_units.p_nom_opt[shydr].sum() < 1:\n print(\"No storage unit to plot\")\n\n elif network.storage_units.p_nom_opt[sbatt].sum() > 1 and \\\n network.storage_units.p_nom_opt[shydr].sum() < 1:\n\n (network.storage_units_t.p[sbatt].resample(mean).mean().sum(axis=1) / \\\n network.storage_units.p_nom_opt[sbatt].sum()).plot(\n ax=ax, label=\"Battery dispatch\", color='orangered')\n # instantiate a second axes that shares the same x-axis\n ax2 = ax.twinx()\n ((network.storage_units_t.state_of_charge[sbatt].resample(mean).\\\n mean().sum(axis=1) / cap_batt)*100).plot(ax=ax2,\n label=\"Battery state of charge\", color='blue')\n elif network.storage_units.p_nom_opt[sbatt].sum() < 1 and\\\n network.storage_units.p_nom_opt[shydr].sum() > 1:\n (network.storage_units_t.p[shydr].resample(mean).mean().sum(axis=1) /\\\n network.storage_units.p_nom_opt[shydr].sum()).plot(\n ax=ax, label=\"Hydrogen dispatch\", color='teal')\n # instantiate a second axes that shares the same x-axis\n ax2 = ax.twinx()\n ((network.storage_units_t.state_of_charge[shydr].resample(mean).\\\n mean().sum(axis=1) / cap_hydr)*100).plot(\n ax=ax2, label=\"Hydrogen state of charge\", color='green')\n else:\n (network.storage_units_t.p[sbatt].resample(mean).mean().sum(axis=1) / \\\n network.storage_units.p_nom_opt[sbatt].sum()).plot(\n ax=ax, label=\"Battery dispatch\", color='orangered')\n \n (network.storage_units_t.p[shydr].resample(mean).mean().sum(axis=1) /\\\n network.storage_units.p_nom_opt[shydr].sum()).plot(\n ax=ax, label=\"Hydrogen dispatch\", color='teal') \n # instantiate a second axes that shares the same x-axis\n ax2 = ax.twinx()\n ((network.storage_units_t.state_of_charge[shydr].resample(mean).\\\n mean().sum(axis=1) / cap_hydr)*100).plot(\n ax=ax2, label=\"Hydrogen state of charge\", color='green')\n \n ((network.storage_units_t.state_of_charge[sbatt].resample(mean).\\\n mean().sum(axis=1) / cap_batt)*100).plot(\n ax=ax2, label=\"Battery state of charge\", color='blue')\n\n ax.set_xlabel(\"\")\n ax.set_ylabel(\"Storage dispatch in p.u. \\n <- charge - discharge ->\")\n ax2.set_ylabel(\"Storage state of charge in % \")\n ax2.set_ylim([0, 100])\n ax.set_ylim([-1,1])\n ax.legend(loc=2)\n ax2.legend(loc=1)\n ax.set_title(\"Storage dispatch and state of charge\")\n \n\n if filename is None:\n plt.show()\n else:\n plt.savefig(filename)\n plt.close()\n\n return", "response": "Plots the dispatch and state of charge of extendable storages."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nplotting the soc of extendable storages in a sorted order.", "response": "def storage_soc_sorted(network, filename = None):\n \"\"\"\n Plots the soc (state-pf-charge) of extendable storages\n \n Parameters\n ----------\n network : PyPSA network container\n Holds topology of grid including results from powerflow analysis\n\n filename : path to folder\n \n \"\"\"\n sbatt = network.storage_units.index[(network.storage_units.p_nom_opt>1) &\n (network.storage_units.capital_cost>10) & \n (network.storage_units.max_hours==6)]\n shydr = network.storage_units.index[(network.storage_units.p_nom_opt>1) &\n (network.storage_units.capital_cost>10)\n & (network.storage_units.max_hours==168)]\n\n cap_batt = (network.storage_units.max_hours[sbatt] * \n network.storage_units.p_nom_opt[sbatt]).sum()\n cap_hydr = (network.storage_units.max_hours[shydr] *\n network.storage_units.p_nom_opt[shydr]).sum()\n\n fig, ax = plt.subplots(1, 1)\n\n if network.storage_units.p_nom_opt[sbatt].sum() < 1 and \\\n network.storage_units.p_nom_opt[shydr].sum() < 1:\n print(\"No storage unit to plot\")\n elif network.storage_units.p_nom_opt[sbatt].sum() > 1 and \\\n network.storage_units.p_nom_opt[shydr].sum() < 1:\n (network.storage_units_t.p[sbatt].sum(axis=1).sort_values(\n ascending=False).reset_index() / \\\n network.storage_units.p_nom_opt[sbatt].sum())[0].plot(\n ax=ax, label=\"Battery storage\", color='orangered')\n elif network.storage_units.p_nom_opt[sbatt].sum() < 1 and \\\n network.storage_units.p_nom_opt[shydr].sum() > 1:\n (network.storage_units_t.p[shydr].sum(axis=1).sort_values(\n ascending=False).reset_index() / \\\n network.storage_units.p_nom_opt[shydr].sum())[0].plot(\n ax=ax, label=\"Hydrogen storage\", color='teal')\n else:\n (network.storage_units_t.p[sbatt].sum(axis=1).sort_values(\n ascending=False).reset_index() / \\\n network.storage_units.p_nom_opt[sbatt].sum())[0].plot(\n ax=ax, label=\"Battery storage\", color='orangered')\n (network.storage_units_t.p[shydr].sum(axis=1).sort_values(\n ascending=False).reset_index() / \\\n network.storage_units.p_nom_opt[shydr].sum())[0].plot(\n ax=ax, label=\"Hydrogen storage\", color='teal')\n\n ax.set_xlabel(\"\")\n ax.set_ylabel(\"Storage dispatch in p.u. \\n <- charge - discharge ->\")\n ax.set_ylim([-1.05,1.05])\n ax.legend()\n ax.set_title(\"Sorted duration curve of storage dispatch\")\n\n if filename is None:\n plt.show()\n else:\n plt.savefig(filename,figsize=(3,4),bbox_inches='tight')\n plt.close()\n\n return"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef register(self):\n # logger.debug(\"StateMachineEditionChangeHistory register state_machine old/new sm_id %s/%s\" %\n # (self.__my_selected_sm_id, self.model.selected_state_machine_id))\n\n # relieve old models\n if self.__my_selected_sm_id is not None: # no old models available\n self.relieve_model(self._selected_sm_model.history)\n\n if self.model.selected_state_machine_id is not None and global_gui_config.get_config_value('HISTORY_ENABLED'):\n\n # set own selected state machine id\n self.__my_selected_sm_id = self.model.selected_state_machine_id\n\n # observe new models\n self._selected_sm_model = self.model.state_machines[self.__my_selected_sm_id]\n if self._selected_sm_model.history:\n self.observe_model(self._selected_sm_model.history)\n self.update(None, None, None)\n else:\n logger.warning(\"The history is enabled but not generated {0}. {1}\"\n \"\".format(self._selected_sm_model.state_machine.state_machine_id,\n self._selected_sm_model.state_machine.file_system_path))\n else:\n if self.__my_selected_sm_id is not None:\n self.doing_update = True\n self.history_tree_store.clear()\n self.doing_update = False\n self.__my_selected_sm_id = None\n self._selected_sm_model = None", "response": "Registers the state machine that is observed for new selected states to the selected state machine."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nregisters callback methods for triggered actions", "response": "def register_actions(self, shortcut_manager):\n \"\"\"Register callback methods for triggered actions\n\n :param rafcon.gui.shortcut_manager.ShortcutManager shortcut_manager:\n \"\"\"\n shortcut_manager.add_callback_for_action(\"undo\", self.undo)\n shortcut_manager.add_callback_for_action(\"redo\", self.redo)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nundoing for selected state - machine.", "response": "def undo(self, key_value, modifier_mask):\n \"\"\"Undo for selected state-machine if no state-source-editor is open and focused in states-editor-controller.\n\n :return: True if a undo was performed, False if focus on source-editor.\n :rtype: bool\n \"\"\"\n # TODO re-organize as request to controller which holds source-editor-view or any parent to it\n for key, tab in gui_singletons.main_window_controller.get_controller('states_editor_ctrl').tabs.items():\n if tab['controller'].get_controller('source_ctrl') is not None and \\\n react_to_event(self.view, tab['controller'].get_controller('source_ctrl').view.textview,\n (key_value, modifier_mask)) or \\\n tab['controller'].get_controller('description_ctrl') is not None and \\\n react_to_event(self.view, tab['controller'].get_controller('description_ctrl').view.textview,\n (key_value, modifier_mask)):\n return False\n if self._selected_sm_model is not None:\n self._selected_sm_model.history.undo()\n return True\n else:\n logger.debug(\"Undo is not possible now as long as no state_machine is selected.\")"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef update(self, model, prop_name, info):\n # logger.debug(\"History changed %s\\n%s\\n%s\" % (model, prop_name, info))\n if self.parent is not None and hasattr(self.parent, \"focus_notebook_page_of_controller\"):\n # request focus -> which has not have to be satisfied\n self.parent.focus_notebook_page_of_controller(self)\n\n if self._selected_sm_model is None or self._selected_sm_model.history.fake or \\\n info is not None and info.method_name not in [\"insert_action\", \"undo\", \"redo\", \"reset\"]:\n return\n\n self.doing_update = True\n self.history_tree_store.clear()\n self.list_tree_iter = {}\n trail_actions = [action.version_id for action in self._selected_sm_model.history.modifications.single_trail_history()]\n\n def insert_this_action(action, parent_tree_item, init_branch=False):\n \"\"\" The function insert a action with respective arguments (e.g. method_name, instance) into a TreeStore.\n - final trail or tree specific gtk-tree-levels -> 'trail' has no levels as well as 'branch' without tree\n - defines which element is marked as active\n - generate branch labels with version-id\n \"\"\"\n model = action.before_overview['model'][-1]\n method_name = action.before_overview['method_name'][-1]\n instance = action.before_overview['instance'][-1]\n parameters = []\n tool_tip = None\n if action.before_overview['type'] == 'signal':\n if isinstance(action.before_overview['signal'][0], MetaSignalMsg):\n # logger.info(action.before_overview._overview_dict)\n parameters.append(str(action.meta))\n elif isinstance(action.before_overview['signal'][-1], ActionSignalMsg):\n if action.action_type not in ['paste', 'cut']:\n parameters.append(str(action.before_overview['signal'][-1].kwargs))\n else:\n logger.warning(\"no parameters defined for signal: {0}\".format(action.before_overview['signal']))\n # for index, signal in enumerate(action.before_overview['signal']):\n # print(\"\\n\", index, signal)\n else:\n for index, value in enumerate(action.before_overview['args'][-1]):\n if not index == 0:\n parameters.append(str(value))\n for name, value in action.before_overview['kwargs'][-1].items():\n parameters.append(\"{0}: {1}\".format(name, value))\n\n if hasattr(action, 'action_type') and action.action_type == \"script_text\" and hasattr(action, 'script_diff'):\n line = \"\"\n for elem in action.script_diff.split('\\n'):\n line = elem\n if not line.replace(' ', '') == '' and (line.startswith('+') or line.startswith('-')):\n break\n tool_tip = action.script_diff\n parameters = [line] # + \"\\n -> [hover for source script diff in tooltip.]\"]\n if hasattr(action, 'action_type') and action.action_type == \"description\" and hasattr(action, 'description_diff'):\n line = \"\"\n for elem in action.description_diff.split('\\n'):\n line = elem\n if not line.replace(' ', '') == '' and (line.startswith('+') or line.startswith('-')):\n break\n tool_tip = action.description_diff\n parameters = [line] # + \"\\n -> [hover for source script diff in tooltip.]\"]\n\n # find active actions in to be marked in view\n if self._mode == 'trail':\n active = len(self.history_tree_store) <= self._selected_sm_model.history.modifications.trail_pointer\n else:\n all_active = self._selected_sm_model.history.modifications.get_all_active_actions()\n active = action.version_id in all_active\n\n # generate label to mark branches\n version_label = action.version_id\n if init_branch:\n version_label = 'b.' + str(action.version_id)\n tool_tip = \"The element '{0}' starts a new branch of actions.\".format(version_label)\n\n tree_row_iter = self.new_change(model, str(method_name).replace('_', ' '), instance, info, version_label, active,\n parent_tree_item, ', '.join(parameters), tool_tip)\n self.list_tree_iter[action.version_id] = (tree_row_iter, parent_tree_item)\n return tree_row_iter\n\n def insert_all_next_actions(version_id, parent_tree_item=None):\n \"\"\" The function defines linkage of history-tree-elements in a gtk-tree-view to create a optimal overview.\n \"\"\"\n if len(self._selected_sm_model.history.modifications.all_time_history) == 1:\n return\n next_id = version_id\n while next_id is not None:\n # print(next_id, len(self._selected_sm_model.history.modifications.all_time_history))\n version_id = next_id\n history_tree_elem = self._selected_sm_model.history.modifications.all_time_history[next_id]\n next_id = history_tree_elem.next_id\n prev_tree_elem = None\n if history_tree_elem.prev_id is not None:\n prev_tree_elem = self._selected_sm_model.history.modifications.all_time_history[history_tree_elem.prev_id]\n action = history_tree_elem.action\n # logger.info(\"prev branch #{0} <-> {1}, trail_actions are {2}\".format(history_tree_elem.action.version_id, prev_tree_elem, trail_actions))\n in_trail = history_tree_elem.action.version_id in trail_actions\n if in_trail:\n # in trail parent is always None\n if prev_tree_elem is not None and prev_tree_elem.old_next_ids:\n child_tree_item = insert_this_action(action, None, init_branch=True)\n else:\n child_tree_item = insert_this_action(action, None)\n elif prev_tree_elem is not None and prev_tree_elem.old_next_ids:\n # branch and not trail -> level + 1 ... child of prev_id -> parent_iter is prev_id_iter\n prev_id_iter = self.list_tree_iter[prev_tree_elem.action.version_id][0]\n child_tree_item = insert_this_action(action, prev_id_iter, init_branch=True)\n elif prev_tree_elem is not None and not prev_tree_elem.old_next_ids:\n # no branch and not trail\n prev_prev_tree_elem = self._selected_sm_model.history.modifications.all_time_history[prev_tree_elem.prev_id]\n branch_limit_for_extra_ident_level = 1 if prev_tree_elem.prev_id in trail_actions else 0\n if len(prev_prev_tree_elem.old_next_ids) > branch_limit_for_extra_ident_level:\n # -> level + 1 as previous element, because to many branches -> so prev_id iter as parent_iter\n iter_of_prev_id = self.list_tree_iter[prev_tree_elem.action.version_id][0]\n child_tree_item = insert_this_action(action, iter_of_prev_id)\n else:\n # -> same level as previous element -> so same parent_iter as prev_id\n parent_iter_of_prev_id = self.list_tree_iter[prev_tree_elem.action.version_id][1]\n child_tree_item = insert_this_action(action, parent_iter_of_prev_id)\n else:\n logger.warning(\"relative level could not be found -> this should not happen\")\n child_tree_item = insert_this_action(action, parent_tree_item)\n\n if history_tree_elem.old_next_ids and self._mode == 'branch':\n old_next_ids = history_tree_elem.old_next_ids\n for old_next_id in old_next_ids:\n insert_all_next_actions(old_next_id, child_tree_item)\n\n insert_all_next_actions(version_id=0)\n\n # set selection of Tree\n if self._selected_sm_model.history.modifications.trail_pointer is not None and len(self.history_tree_store) > 1:\n searched_row_version_id = self._selected_sm_model.history.modifications.single_trail_history()[self._selected_sm_model.history.modifications.trail_pointer].version_id\n row_number = 0\n for action_entry in self.history_tree_store:\n # compare action.version_id\n if int(action_entry[1].split('.')[-1]) == searched_row_version_id:\n self.view['history_tree'].set_cursor(row_number)\n break\n row_number += 1\n\n # set colors of Tree\n # - is state full and all element which are open to be re-done gray\n self.doing_update = False", "response": "This method updates the history of the treeview."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef exception_message(self) -> Union[str, None]:\n if self.has_error:\n exception_data = self._raw.get(\"exception\", {})\n return exception_data.get(\"message\")\n return None", "response": "Return the exception message."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nexecutes a loadtracks request. Only works on Lavalink V3.", "response": "async def load_tracks(self, query) -> LoadResult:\n \"\"\"\n Executes a loadtracks request. Only works on Lavalink V3.\n\n Parameters\n ----------\n query : str\n\n Returns\n -------\n LoadResult\n \"\"\"\n self.__check_node_ready()\n url = self._uri + quote(str(query))\n\n data = await self._get(url)\n\n if isinstance(data, dict):\n return LoadResult(data)\n elif isinstance(data, list):\n modified_data = {\n \"loadType\": LoadType.V2_COMPAT,\n \"tracks\": data\n }\n return LoadResult(modified_data)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets tracks from lavalink.", "response": "async def get_tracks(self, query) -> Tuple[Track, ...]:\n \"\"\"\n Gets tracks from lavalink.\n\n Parameters\n ----------\n query : str\n\n Returns\n -------\n Tuple[Track, ...]\n \"\"\"\n if not self._warned:\n log.warn(\"get_tracks() is now deprecated. Please switch to using load_tracks().\")\n self._warned = True\n result = await self.load_tracks(query)\n return result.tracks"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a tuple which can be used for setup. py s data_files", "response": "def get_data_files_tuple(*rel_path, **kwargs):\n \"\"\"Return a tuple which can be used for setup.py's data_files\n\n :param tuple path: List of path elements pointing to a file or a directory of files\n :param dict kwargs: Set path_to_file to True is `path` points to a file\n :return: tuple of install directory and list of source files\n :rtype: tuple(str, [str])\n \"\"\"\n rel_path = os.path.join(*rel_path)\n target_path = os.path.join(\"share\", *rel_path.split(os.sep)[1:]) # remove source/ (package_dir)\n if \"path_to_file\" in kwargs and kwargs[\"path_to_file\"]:\n source_files = [rel_path]\n target_path = os.path.dirname(target_path)\n else:\n source_files = [os.path.join(rel_path, filename) for filename in os.listdir(rel_path)]\n return target_path, source_files"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nadding all files of the specified path to a list of data files compatible", "response": "def get_data_files_recursively(*rel_root_path, **kwargs):\n \"\"\" Adds all files of the specified path to a data_files compatible list\n\n :param tuple rel_root_path: List of path elements pointing to a directory of files\n :return: list of tuples of install directory and list of source files\n :rtype: list(tuple(str, [str]))\n \"\"\"\n result_list = list()\n rel_root_dir = os.path.join(*rel_root_path)\n share_target_root = os.path.join(\"share\", kwargs.get(\"share_target_root\", \"rafcon\"))\n distutils.log.debug(\"recursively generating data files for folder '{}' ...\".format(\n rel_root_dir))\n\n for dir_, _, files in os.walk(rel_root_dir):\n relative_directory = os.path.relpath(dir_, rel_root_dir)\n file_list = list()\n for fileName in files:\n rel_file_path = os.path.join(relative_directory, fileName)\n abs_file_path = os.path.join(rel_root_dir, rel_file_path)\n file_list.append(abs_file_path)\n if len(file_list) > 0:\n # this is a valid path in ~/.local folder: e.g. share/rafcon/libraries/generic/wait\n target_path = os.path.join(share_target_root, relative_directory)\n result_list.append((target_path, file_list))\n return result_list"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef generate_data_files():\n assets_folder = path.join('source', 'rafcon', 'gui', 'assets')\n share_folder = path.join(assets_folder, 'share')\n themes_folder = path.join(share_folder, 'themes', 'RAFCON')\n examples_folder = path.join('share', 'examples')\n libraries_folder = path.join('share', 'libraries')\n\n gui_data_files = [\n get_data_files_tuple(assets_folder, 'splashscreens'),\n get_data_files_tuple(assets_folder, 'fonts', 'FontAwesome'),\n get_data_files_tuple(assets_folder, 'fonts', 'Source Sans Pro'),\n get_data_files_tuple(themes_folder, 'gtk-3.0', 'gtk.css', path_to_file=True),\n get_data_files_tuple(themes_folder, 'gtk-3.0', 'gtk-dark.css', path_to_file=True),\n get_data_files_tuple(themes_folder, 'assets'),\n get_data_files_tuple(themes_folder, 'sass'),\n get_data_files_tuple(themes_folder, 'gtk-sourceview'),\n get_data_files_tuple(themes_folder, 'colors.json', path_to_file=True),\n get_data_files_tuple(themes_folder, 'colors-dark.json', path_to_file=True)\n ]\n # print(\"gui_data_files\", gui_data_files)\n\n icon_data_files = get_data_files_recursively(path.join(share_folder, 'icons'), share_target_root=\"icons\")\n # print(\"icon_data_files\", icon_data_files)\n\n locale_data_files = create_mo_files()\n # example tuple\n # locale_data_files = [('share/rafcon/locale/de/LC_MESSAGES', ['source/rafcon/locale/de/LC_MESSAGES/rafcon.mo'])]\n # print(\"locale_data_files\", locale_data_files)\n\n version_data_file = [(\"./\", [\"VERSION\"])]\n desktop_data_file = [(\"share/applications\", [path.join('share', 'applications', 'de.dlr.rm.RAFCON.desktop')])]\n\n examples_data_files = get_data_files_recursively(examples_folder, share_target_root=path.join(\"rafcon\", \"examples\"))\n libraries_data_files = get_data_files_recursively(libraries_folder, share_target_root=path.join(\"rafcon\",\n \"libraries\"))\n generated_data_files = gui_data_files + icon_data_files + locale_data_files + version_data_file + \\\n desktop_data_file + examples_data_files + libraries_data_files\n # for elem in generated_data_files:\n # print(elem)\n return generated_data_files", "response": "Generate the data_files list used in the setup function\n "} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef extendable(network, args, line_max):\n\n \"\"\"\n Function that sets selected components extendable\n\n 'network' for all lines, links and transformers\n 'german_network' for all lines, links and transformers located in Germany\n 'foreign_network' for all foreign lines, links and transformers\n 'transformers' for all transformers\n 'storages' for extendable storages\n 'overlay_network' for lines, links and trafos in extension scenerio(s)\n \n Parameters\n ----------\n network : :class:`pypsa.Network\n Overall container of PyPSA\n args : dict\n Arguments set in appl.py\n\n\n Returns\n -------\n network : :class:`pypsa.Network\n Overall container of PyPSA\n \"\"\"\n\n if 'network' in args['extendable']:\n network.lines.s_nom_extendable = True\n network.lines.s_nom_min = network.lines.s_nom\n \n if not line_max==None:\n network.lines.s_nom_max = line_max * network.lines.s_nom\n \n else:\n network.lines.s_nom_max = float(\"inf\")\n\n if not network.transformers.empty:\n network.transformers.s_nom_extendable = True\n network.transformers.s_nom_min = network.transformers.s_nom\n\n if not line_max==None:\n network.transformers.s_nom_max =\\\n line_max * network.transformers.s_nom\n \n else:\n network.transformers.s_nom_max = float(\"inf\")\n\n if not network.links.empty:\n network.links.p_nom_extendable = True\n network.links.p_nom_min = network.links.p_nom\n network.links.p_nom_max = float(\"inf\")\n if not line_max==None:\n network.links.p_nom_max=\\\n line_max * network.links.p_nom\n \n else:\n network.links.p_nom_max = float(\"inf\")\n\n network = set_line_costs(network, args)\n network = set_trafo_costs(network, args)\n\n if 'german_network' in args['extendable']:\n buses = network.buses[~network.buses.index.isin(\n buses_by_country(network).index)]\n network.lines.loc[(network.lines.bus0.isin(buses.index)) &\n (network.lines.bus1.isin(buses.index)),\n 's_nom_extendable'] = True\n network.lines.loc[(network.lines.bus0.isin(buses.index)) &\n (network.lines.bus1.isin(buses.index)),\n 's_nom_min'] = network.lines.s_nom\n network.lines.loc[(network.lines.bus0.isin(buses.index)) &\n (network.lines.bus1.isin(buses.index)),\n 's_nom_max'] = float(\"inf\")\n \n if not line_max==None:\n network.lines.loc[(network.lines.bus0.isin(buses.index)) &\n (network.lines.bus1.isin(buses.index)),\n 's_nom_max'] = line_max * network.lines.s_nom\n \n else:\n network.lines.loc[(network.lines.bus0.isin(buses.index)) &\n (network.lines.bus1.isin(buses.index)),\n 's_nom_max'] = float(\"inf\")\n\n if not network.transformers.empty:\n network.transformers.loc[network.transformers.bus0.isin(\n buses.index),'s_nom_extendable'] = True\n network.transformers.loc[network.transformers.bus0.isin(\n buses.index),'s_nom_min'] = network.transformers.s_nom\n \n if not line_max==None:\n network.transformers.loc[network.transformers.bus0.isin(\n buses.index),'s_nom_max'] = \\\n line_max * network.transformers.s_nom\n \n else:\n network.transformers.loc[network.transformers.bus0.isin(\n buses.index),'s_nom_max'] = float(\"inf\")\n\n if not network.links.empty:\n network.links.loc[(network.links.bus0.isin(buses.index)) &\n (network.links.bus1.isin(buses.index)),\n 'p_nom_extendable'] = True\n network.links.loc[(network.links.bus0.isin(buses.index)) &\n (network.links.bus1.isin(buses.index)),\n 'p_nom_min'] = network.links.p_nom\n\n if not line_max==None:\n network.links.loc[(network.links.bus0.isin(buses.index)) &\n (network.links.bus1.isin(buses.index)),\n 'p_nom_max'] = line_max * network.links.p_nom\n \n else:\n network.links.loc[(network.links.bus0.isin(buses.index)) &\n (network.links.bus1.isin(buses.index)),\n 'p_nom_max'] = float(\"inf\")\n \n network = set_line_costs(network, args)\n network = set_trafo_costs(network, args)\n\n if 'foreign_network' in args['extendable']:\n buses = network.buses[network.buses.index.isin(\n buses_by_country(network).index)]\n network.lines.loc[network.lines.bus0.isin(buses.index) |\n network.lines.bus1.isin(buses.index) ,\n 's_nom_extendable'] = True\n network.lines.loc[network.lines.bus0.isin(buses.index) |\n network.lines.bus1.isin(buses.index),\n 's_nom_min'] = network.lines.s_nom\n\n if not line_max==None:\n network.lines.loc[network.lines.bus0.isin(buses.index) |\n network.lines.bus1.isin(buses.index),\n 's_nom_max'] = line_max * network.lines.s_nom\n \n else:\n network.lines.loc[network.lines.bus0.isin(buses.index) |\n network.lines.bus1.isin(buses.index),\n 's_nom_max'] = float(\"inf\")\n \n if not network.transformers.empty:\n network.transformers.loc[network.transformers.bus0.isin(\n buses.index) | network.transformers.bus1.isin(\n buses.index) ,'s_nom_extendable'] = True\n network.transformers.loc[network.transformers.bus0.isin(\n buses.index) | network.transformers.bus1.isin(\n buses.index) ,'s_nom_min'] = network.transformers.s_nom\n\n if not line_max==None:\n network.transformers.loc[network.transformers.bus0.isin(\n buses.index) | network.transformers.bus1.isin(\n buses.index),'s_nom_max'] = \\\n line_max * network.transformers.s_nom\n \n else:\n network.transformers.loc[network.transformers.bus0.isin(\n buses.index) | network.transformers.bus1.isin(\n buses.index),'s_nom_max'] = float(\"inf\")\n\n if not network.links.empty:\n network.links.loc[(network.links.bus0.isin(buses.index)) |\n (network.links.bus1.isin(buses.index)),\n 'p_nom_extendable'] = True\n network.links.loc[(network.links.bus0.isin(buses.index)) |\n (network.links.bus1.isin(buses.index)),\n 'p_nom_min'] = network.links.p_nom\n \n if not line_max==None:\n network.links.loc[(network.links.bus0.isin(buses.index)) |\n (network.links.bus1.isin(buses.index)),\n 'p_nom_max'] = line_max * network.links.p_nom\n \n else:\n network.links.loc[(network.links.bus0.isin(buses.index)) |\n (network.links.bus1.isin(buses.index)),\n 'p_nom_max'] = float(\"inf\")\n \n network = set_line_costs(network, args)\n network = set_trafo_costs(network, args)\n\n if 'transformers' in args['extendable']:\n network.transformers.s_nom_extendable = True\n network.transformers.s_nom_min = network.transformers.s_nom\n network.transformers.s_nom_max = float(\"inf\")\n network = set_trafo_costs(network)\n\n if 'storages' in args['extendable'] or 'storage' in args['extendable']:\n if network.storage_units.\\\n carrier[network.\n storage_units.carrier ==\n 'extendable_storage'].any() == 'extendable_storage':\n network.storage_units.loc[network.storage_units.carrier ==\n 'extendable_storage',\n 'p_nom_extendable'] = True\n\n if 'generators' in args['extendable']:\n network.generators.p_nom_extendable = True\n network.generators.p_nom_min = network.generators.p_nom\n network.generators.p_nom_max = float(\"inf\")\n \n if 'foreign_storage' in args['extendable']:\n network.storage_units.p_nom_extendable[(network.storage_units.bus.isin(\n network.buses.index[network.buses.country_code != 'DE'])) & (\n network.storage_units.carrier.isin(\n ['battery_storage','hydrogen_storage'] ))] = True\n\n network.storage_units.loc[\n network.storage_units.p_nom_max.isnull(),'p_nom_max'] = \\\n network.storage_units.p_nom\n\n network.storage_units.loc[\n (network.storage_units.carrier == 'battery_storage'),\n 'capital_cost'] = network.storage_units.loc[(\n network.storage_units.carrier == 'extendable_storage') \n & (network.storage_units.max_hours == 6),'capital_cost'].max()\n\n network.storage_units.loc[ \n (network.storage_units.carrier == 'hydrogen_storage'),\n 'capital_cost'] = network.storage_units.loc[(\n network.storage_units.carrier == 'extendable_storage') &\n (network.storage_units.max_hours == 168),'capital_cost'].max()\n\n network.storage_units.loc[\n (network.storage_units.carrier == 'battery_storage'),\n 'marginal_cost'] = network.storage_units.loc[(\n network.storage_units.carrier == 'extendable_storage') \n & (network.storage_units.max_hours == 6),'marginal_cost'].max()\n\n network.storage_units.loc[\n (network.storage_units.carrier == 'hydrogen_storage'),\n 'marginal_cost'] = network.storage_units.loc[(\n network.storage_units.carrier == 'extendable_storage') &\n (network.storage_units.max_hours == 168),'marginal_cost'].max()\n\n\n # Extension settings for extension-NEP 2035 scenarios\n if 'NEP Zubaunetz' in args['extendable']:\n for i in range(len(args['scn_extension'])):\n network.lines.loc[(network.lines.project != 'EnLAG') & (\n network.lines.scn_name == 'extension_' + args['scn_extension'][i]),\n 's_nom_extendable'] = True\n\n network.transformers.loc[(\n network.transformers.project != 'EnLAG') & (\n network.transformers.scn_name == (\n 'extension_'+ args['scn_extension'][i])),\n 's_nom_extendable'] = True\n\n network.links.loc[network.links.scn_name == (\n 'extension_' + args['scn_extension'][i]\n ), 'p_nom_extendable'] = True\n\n if 'overlay_network' in args['extendable']:\n for i in range(len(args['scn_extension'])):\n network.lines.loc[network.lines.scn_name == (\n 'extension_' + args['scn_extension'][i]\n ), 's_nom_extendable'] = True\n \n network.lines.loc[network.lines.scn_name == (\n 'extension_' + args['scn_extension'][i]\n ), 's_nom_max'] = network.lines.s_nom[network.lines.scn_name == (\n 'extension_' + args['scn_extension'][i])]\n \n network.links.loc[network.links.scn_name == (\n 'extension_' + args['scn_extension'][i]\n ), 'p_nom_extendable'] = True\n \n network.transformers.loc[network.transformers.scn_name == (\n 'extension_' + args['scn_extension'][i]\n ), 's_nom_extendable'] = True\n \n network.lines.loc[network.lines.scn_name == (\n 'extension_' + args['scn_extension'][i]\n ), 'capital_cost'] = network.lines.capital_cost/\\\n args['branch_capacity_factor']['eHV'] \n\n if 'overlay_lines' in args['extendable']:\n for i in range(len(args['scn_extension'])):\n network.lines.loc[network.lines.scn_name == (\n 'extension_' + args['scn_extension'][i]\n ), 's_nom_extendable'] = True\n \n network.links.loc[network.links.scn_name == (\n 'extension_' + args['scn_extension'][i]\n ), 'p_nom_extendable'] = True\n \n network.lines.loc[network.lines.scn_name == (\n 'extension_' + args['scn_extension'][i]),\n 'capital_cost'] = network.lines.capital_cost + (2 * 14166)\n\n network.lines.s_nom_min[network.lines.s_nom_extendable == False] =\\\n network.lines.s_nom\n\n network.transformers.s_nom_min[network.transformers.s_nom_extendable == \\\n False] = network.transformers.s_nom\n \n network.lines.s_nom_max[network.lines.s_nom_extendable == False] =\\\n network.lines.s_nom\n\n network.transformers.s_nom_max[network.transformers.s_nom_extendable == \\\n False] = network.transformers.s_nom\n\n return network", "response": "Function that sets the extendable flag of the given PyPSA network."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nfunctioning that returns a new set of extension variables that are used to reduce nubmer of extension variables.", "response": "def extension_preselection(network, args, method, days = 3):\n \n \"\"\"\n Function that preselects lines which are extendend in snapshots leading to \n overloading to reduce nubmer of extension variables. \n \n Parameters\n ----------\n network : :class:`pypsa.Network\n Overall container of PyPSA\n args : dict\n Arguments set in appl.py\n method: str\n Choose method of selection:\n 'extreme_situations' for remarkable timsteps \n (e.g. minimal resiudual load)\n 'snapshot_clustering' for snapshot clustering with number of days\n days: int\n Number of clustered days, only used when method = 'snapshot_clustering'\n\n Returns\n -------\n network : :class:`pypsa.Network\n Overall container of PyPSA\n \"\"\"\n\n weighting = network.snapshot_weightings\n\n if method == 'extreme_situations':\n snapshots = find_snapshots(network, 'residual load')\n snapshots = snapshots.append(find_snapshots(network, 'wind_onshore'))\n snapshots = snapshots.append(find_snapshots(network, 'solar'))\n snapshots = snapshots.drop_duplicates()\n snapshots = snapshots.sort_values()\n\n if method == 'snapshot_clustering':\n network_cluster = snapshot_clustering(network, how='daily', \n clusters=days)\n snapshots = network_cluster.snapshots\n network.snapshot_weightings = network_cluster.snapshot_weightings\n\n # Set all lines and trafos extendable in network\n network.lines.loc[:, 's_nom_extendable'] = True\n network.lines.loc[:, 's_nom_min'] = network.lines.s_nom\n network.lines.loc[:, 's_nom_max'] = np.inf\n \n network.links.loc[:, 'p_nom_extendable'] = True\n network.links.loc[:, 'p_nom_min'] = network.links.p_nom\n network.links.loc[:, 'p_nom_max'] = np.inf\n\n network.transformers.loc[:, 's_nom_extendable'] = True\n network.transformers.loc[:, 's_nom_min'] = network.transformers.s_nom\n network.transformers.loc[:, 's_nom_max'] = np.inf\n\n network = set_line_costs(network)\n network = set_trafo_costs(network)\n network = convert_capital_costs(network, 1, 1)\n extended_lines = network.lines.index[network.lines.s_nom_opt >\n network.lines.s_nom]\n extended_links = network.links.index[network.links.p_nom_opt >\n network.links.p_nom]\n\n x = time.time()\n for i in range(int(snapshots.value_counts().sum())):\n if i > 0:\n network.lopf(snapshots[i], solver_name=args['solver'])\n extended_lines = extended_lines.append(\n network.lines.index[network.lines.s_nom_opt >\n network.lines.s_nom])\n extended_lines = extended_lines.drop_duplicates()\n extended_links = extended_links.append(\n network.links.index[network.links.p_nom_opt >\n network.links.p_nom])\n extended_links = extended_links.drop_duplicates()\n\n print(\"Number of preselected lines: \", len(extended_lines))\n\n network.lines.loc[~network.lines.index.isin(extended_lines),\n 's_nom_extendable'] = False\n network.lines.loc[network.lines.s_nom_extendable, 's_nom_min']\\\n = network.lines.s_nom\n network.lines.loc[network.lines.s_nom_extendable, 's_nom_max']\\\n = np.inf\n \n network.links.loc[~network.links.index.isin(extended_links),\n 'p_nom_extendable'] = False\n network.links.loc[network.links.p_nom_extendable, 'p_nom_min']\\\n = network.links.p_nom\n network.links.loc[network.links.p_nom_extendable, 'p_nom_max']\\\n = np.inf\n\n network.snapshot_weightings = weighting\n network = set_line_costs(network)\n network = set_trafo_costs(network)\n network = convert_capital_costs(network, args['start_snapshot'],\n args['end_snapshot'])\n\n y = time.time()\n z1st = (y - x) / 60\n\n print(\"Time for first LOPF [min]:\", round(z1st, 2))\n\n return network"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef handle(self, event):\n # Allow to handle a subset of events while having a grabbed tool (between a button press & release event)\n suppressed_grabbed_tool = None\n if event.type in (Gdk.EventType.SCROLL, Gdk.EventType.KEY_PRESS, Gdk.EventType.KEY_RELEASE):\n suppressed_grabbed_tool = self._grabbed_tool\n self._grabbed_tool = None\n\n rt = super(ToolChain, self).handle(event)\n\n if suppressed_grabbed_tool:\n self._grabbed_tool = suppressed_grabbed_tool\n\n return rt", "response": "Handle the event by calling each tool until the event is handled by calling each tool until the event is grabbed."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nfilters selection Filter items that cannot be moved and return the rest.", "response": "def movable_items(self):\n \"\"\"Filter selection\n\n Filter items of selection that cannot be moved (i.e. are not instances of `Item`) and return the rest.\n \"\"\"\n view = self.view\n\n if self._move_name_v:\n yield InMotion(self._item, view)\n else:\n selected_items = set(view.selected_items)\n for item in selected_items:\n if not isinstance(item, Item):\n continue\n yield InMotion(item, view)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef on_button_press(self, event):\n if event.get_button()[1] not in self._buttons:\n return False # Only handle events for registered buttons (left mouse clicks)\n\n if event.get_state()[1] & constants.RUBBERBAND_MODIFIER:\n return False # Mouse clicks with pressed shift key are handled in another tool\n\n # Special case: moving the NameView\n # This is only allowed, if the hovered item is a NameView and the Ctrl-key is pressed and the only selected\n # item is the parental StateView. In this case, the selection and _item will no longer be looked at,\n # but only _move_name_v\n self._item = self.get_item()\n if isinstance(self._item, NameView):\n selected_items = self.view.selected_items\n if event.get_state()[1] & Gdk.ModifierType.CONTROL_MASK and len(selected_items) == 1 and next(iter(selected_items)) is \\\n self._item.parent:\n self._move_name_v = True\n else:\n self._item = self._item.parent\n\n if not self._move_name_v:\n self._old_selection = self.view.selected_items\n if self._item not in self.view.selected_items:\n # When items are to be moved, a button-press should not cause any deselection.\n # However, the selection is stored, in case no move operation is performed.\n self.view.handle_new_selection(self._item)\n\n if not self.view.is_focus():\n self.view.grab_focus()\n\n return True", "response": "Handles the mouse button press event."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef on_button_release(self, event):\n affected_models = {}\n\n for inmotion in self._movable_items:\n inmotion.move((event.x, event.y))\n rel_pos = gap_helper.calc_rel_pos_to_parent(self.view.canvas, inmotion.item,\n inmotion.item.handles()[NW])\n if isinstance(inmotion.item, StateView):\n state_v = inmotion.item\n state_m = state_v.model\n self.view.canvas.request_update(state_v)\n if state_m.get_meta_data_editor()['rel_pos'] != rel_pos:\n state_m.set_meta_data_editor('rel_pos', rel_pos)\n affected_models[state_m] = (\"position\", True, state_v)\n elif isinstance(inmotion.item, NameView):\n state_v = inmotion.item\n state_m = self.view.canvas.get_parent(state_v).model\n self.view.canvas.request_update(state_v)\n if state_m.get_meta_data_editor()['name']['rel_pos'] != rel_pos:\n state_m.set_meta_data_editor('name.rel_pos', rel_pos)\n affected_models[state_m] = (\"name_position\", False, state_v)\n elif isinstance(inmotion.item, TransitionView):\n transition_v = inmotion.item\n transition_m = transition_v.model\n self.view.canvas.request_update(transition_v)\n current_waypoints = gap_helper.get_relative_positions_of_waypoints(transition_v)\n old_waypoints = transition_m.get_meta_data_editor()['waypoints']\n if current_waypoints != old_waypoints:\n transition_m.set_meta_data_editor('waypoints', current_waypoints)\n affected_models[transition_m] = (\"waypoints\", False, transition_v)\n\n if len(affected_models) == 1:\n model = next(iter(affected_models))\n change, affects_children, view = affected_models[model]\n self.view.graphical_editor.emit('meta_data_changed', model, change, affects_children)\n elif len(affected_models) > 1:\n # if more than one item has been moved, we need to call the meta_data_changed signal on a common parent\n common_parents = None\n for change, affects_children, view in affected_models.values():\n parents_of_view = set(self.view.canvas.get_ancestors(view))\n if common_parents is None:\n common_parents = parents_of_view\n else:\n common_parents = common_parents.intersection(parents_of_view)\n assert len(common_parents) > 0, \"The selected elements do not have common parent element\"\n for state_v in common_parents:\n # Find most nested state_v\n children_of_state_v = self.view.canvas.get_all_children(state_v)\n if any(common_parent in children_of_state_v for common_parent in common_parents):\n continue\n self.view.graphical_editor.emit('meta_data_changed', state_v.model, \"positions\", True)\n break\n\n if not affected_models and self._old_selection is not None:\n # The selection is handled differently depending on whether states were moved or not\n # If no move operation was performed, we reset the selection to that is was before the button-press event\n # and let the state machine selection handle the selection\n self.view.unselect_all()\n self.view.select_item(self._old_selection)\n self.view.handle_new_selection(self._item)\n\n self._move_name_v = False\n self._old_selection = None\n return super(MoveItemTool, self).on_button_release(event)", "response": "Write back changes for the items in the current language."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _filter_library_state(self, items):\n if not items:\n return items\n\n top_most_item = items[0]\n # If the hovered item is e.g. a connection, we need to get the parental state\n top_most_state_v = top_most_item if isinstance(top_most_item, StateView) else top_most_item.parent\n state = top_most_state_v.model.state\n\n global_gui_config = gui_helper_state_machine.global_gui_config\n if global_gui_config.get_config_value('STATE_SELECTION_INSIDE_LIBRARY_STATE_ENABLED'):\n # select the library state instead of the library_root_state because it is hidden\n if state.is_root_state_of_library:\n new_topmost_item = self.view.canvas.get_view_for_core_element(state.parent)\n return self.dismiss_upper_items(items, new_topmost_item)\n return items\n else:\n # Find state_copy of uppermost LibraryState\n library_root_state = state.get_uppermost_library_root_state()\n\n # If the hovered element is a child of a library, make the library the hovered_item\n if library_root_state:\n library_state = library_root_state.parent\n library_state_v = self.view.canvas.get_view_for_core_element(library_state)\n return self.dismiss_upper_items(items, library_state_v)\n return items", "response": "Filter out child elements of library state when they cannot be hovered"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _filter_hovered_items(self, items, event):\n items = self._filter_library_state(items)\n if not items:\n return items\n top_most_item = items[0]\n second_top_most_item = items[1] if len(items) > 1 else None\n\n # States/Names take precedence over connections if the connections are on the same hierarchy and if there is\n # a port beneath the cursor\n first_state_v = next(filter(lambda item: isinstance(item, (NameView, StateView)), items))\n first_state_v = first_state_v.parent if isinstance(first_state_v, NameView) else first_state_v\n if first_state_v:\n # There can be several connections above the state/name skip those and find the first non-connection-item\n for item in items:\n if isinstance(item, ConnectionView):\n # connection is on the same hierarchy level as the state/name, thus we dismiss it\n if self.view.canvas.get_parent(top_most_item) is not first_state_v:\n continue\n break\n\n # Connections are only dismissed, if there is a port beneath the cursor. Search for ports here:\n port_beneath_cursor = False\n state_ports = first_state_v.get_all_ports()\n position = self.view.get_matrix_v2i(first_state_v).transform_point(event.x, event.y)\n i2v_matrix = self.view.get_matrix_i2v(first_state_v)\n for port_v in state_ports:\n item_distance = port_v.port.glue(position)[1]\n view_distance = i2v_matrix.transform_distance(item_distance, 0)[0]\n if view_distance == 0:\n port_beneath_cursor = True\n break\n\n if port_beneath_cursor:\n items = self.dismiss_upper_items(items, item)\n top_most_item = items[0]\n second_top_most_item = items[1] if len(items) > 1 else None\n\n # NameView can only be hovered if it or its parent state is selected\n if isinstance(top_most_item, NameView):\n state_v = second_top_most_item # second item in the list must be the parent state of the NameView\n if state_v not in self.view.selected_items and top_most_item not in self.view.selected_items:\n items = items[1:]\n\n return items", "response": "Filter out items that cannot be hovered"}