{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncheck common Renku commands used in various situations.", "response": "def cli(ctx, path, renku_home, use_external_storage):\n \"\"\"Check common Renku commands used in various situations.\"\"\"\n ctx.obj = LocalClient(\n path=path,\n renku_home=renku_home,\n use_external_storage=use_external_storage,\n )"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nupdates existing files by rerunning their outdated workflow.", "response": "def update(client, revision, no_output, siblings, paths):\n \"\"\"Update existing files by rerunning their outdated workflow.\"\"\"\n graph = Graph(client)\n outputs = graph.build(revision=revision, can_be_cwl=no_output, paths=paths)\n outputs = {node for node in outputs if graph.need_update(node)}\n\n if not outputs:\n click.secho(\n 'All files were generated from the latest inputs.', fg='green'\n )\n sys.exit(0)\n\n # Check or extend siblings of outputs.\n outputs = siblings(graph, outputs)\n output_paths = {node.path for node in outputs if _safe_path(node.path)}\n\n # Get all clean nodes.\n input_paths = {node.path for node in graph.nodes} - output_paths\n\n # Store the generated workflow used for updating paths.\n import yaml\n\n output_file = client.workflow_path / '{0}.cwl'.format(uuid.uuid4().hex)\n workflow = graph.ascwl(\n input_paths=input_paths,\n output_paths=output_paths,\n outputs=outputs,\n )\n\n # Make sure all inputs are pulled from a storage.\n client.pull_paths_from_storage(\n *(path for _, path in workflow.iter_input_files(client.workflow_path))\n )\n\n with output_file.open('w') as f:\n f.write(\n yaml.dump(\n ascwl(\n workflow,\n filter=lambda _, x: x is not None,\n basedir=client.workflow_path,\n ),\n default_flow_style=False\n )\n )\n\n from ._cwl import execute\n execute(client, output_file, output_paths=output_paths)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nwrites Renku configuration to file.", "response": "def write_config(config, path, final=False):\n \"\"\"Write Renku configuration.\"\"\"\n with open(config_path(path, final=final), 'w+') as configfile:\n yaml.dump(config, configfile, default_flow_style=False)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef config_load(ctx, param, value):\n if ctx.obj is None:\n ctx.obj = {}\n\n ctx.obj['config_path'] = value\n ctx.obj['config'] = read_config(value)\n return value", "response": "Print application config path."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nadds config to function.", "response": "def with_config(f):\n \"\"\"Add config to function.\"\"\"\n # keep it.\n\n @click.pass_context\n def new_func(ctx, *args, **kwargs):\n # Invoked with custom config:\n if 'config' in kwargs:\n return ctx.invoke(f, *args, **kwargs)\n\n if ctx.obj is None:\n ctx.obj = {}\n\n config = ctx.obj['config']\n\n project_enabled = not ctx.obj.get('no_project', False)\n project_config_path = get_project_config_path()\n\n if project_enabled and project_config_path:\n project_config = read_config(project_config_path)\n config['project'] = project_config\n result = ctx.invoke(f, config, *args, **kwargs)\n project_config = config.pop('project', None)\n if project_config:\n if not project_config_path:\n raise RuntimeError('Invalid config update')\n write_config(project_config, path=project_config_path)\n write_config(config, path=ctx.obj['config_path'])\n if project_config is not None:\n config['project'] = project_config\n return result\n\n return update_wrapper(new_func, f)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nprint application config path.", "response": "def print_app_config_path(ctx, param, value):\n \"\"\"Print application config path.\"\"\"\n if not value or ctx.resilient_parsing:\n return\n click.echo(config_path(os.environ.get('RENKU_CONFIG')))\n ctx.exit()"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate new project configuration folder.", "response": "def create_project_config_path(\n path, mode=0o777, parents=False, exist_ok=False\n):\n \"\"\"Create new project configuration folder.\"\"\"\n # FIXME check default directory mode\n project_path = Path(path).absolute().joinpath(RENKU_HOME)\n project_path.mkdir(mode=mode, parents=parents, exist_ok=exist_ok)\n return str(project_path)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_project_config_path(path=None):\n project_path = Path(path or '.').absolute().joinpath(RENKU_HOME)\n if project_path.exists() and project_path.is_dir():\n return str(project_path)", "response": "Return project configuration folder if exist."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef find_project_config_path(path=None):\n path = Path(path) if path else Path.cwd()\n abspath = path.absolute()\n\n project_path = get_project_config_path(abspath)\n if project_path:\n return project_path\n\n for parent in abspath.parents:\n project_path = get_project_config_path(parent)\n if project_path:\n return project_path", "response": "Find project config path."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nyielding nodes from entities.", "response": "def _nodes(output, parent=None):\n \"\"\"Yield nodes from entities.\"\"\"\n # NOTE refactor so all outputs behave the same\n entity = getattr(output, 'entity', output)\n\n if isinstance(entity, Collection):\n for member in entity.members:\n if parent is not None:\n member = attr.evolve(member, parent=parent)\n yield from _nodes(member)\n yield output\n else:\n yield output"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate a list of instances from a mapping.", "response": "def mapped(cls, key='id', **kwargs):\n \"\"\"Create list of instances from a mapping.\"\"\"\n kwargs.setdefault('metadata', {})\n kwargs['metadata']['jsonldPredicate'] = {'mapSubject': key}\n kwargs.setdefault('default', attr.Factory(list))\n\n def converter(value):\n \"\"\"Convert mapping to a list of instances.\"\"\"\n if isinstance(value, dict):\n result = []\n for k, v in iteritems(value):\n if not hasattr(cls, 'from_cwl'):\n vv = dict(v)\n vv[key] = k\n else:\n vv = attr.evolve(cls.from_cwl(v), **{key: k})\n result.append(vv)\n else:\n result = value\n\n def fix_keys(data):\n \"\"\"Fix names of keys.\"\"\"\n for a in fields(cls):\n a_name = a.name.rstrip('_')\n if a_name in data:\n yield a.name, data[a_name]\n\n return [\n cls(**{kk: vv\n for kk, vv in fix_keys(v)}) if not isinstance(v, cls) else v\n for v in result\n ]\n\n kwargs['converter'] = converter\n return attr.ib(**kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef ascwl(\n inst,\n recurse=True,\n filter=None,\n dict_factory=dict,\n retain_collection_types=False,\n basedir=None,\n):\n \"\"\"Return the ``attrs`` attribute values of *inst* as a dict.\n\n Support ``jsonldPredicate`` in a field metadata for generating\n mappings from lists.\n\n Adapted from ``attr._funcs``.\n \"\"\"\n attrs = fields(inst.__class__)\n rv = dict_factory()\n\n def convert_value(v):\n \"\"\"Convert special types.\"\"\"\n if isinstance(v, Path):\n v = str(v)\n return os.path.relpath(v, str(basedir)) if basedir else v\n return v\n\n for a in attrs:\n if a.name.startswith('__'):\n continue\n\n a_name = a.name.rstrip('_')\n v = getattr(inst, a.name)\n if filter is not None and not filter(a, v):\n continue\n if recurse is True:\n if has(v.__class__):\n rv[a_name] = ascwl(\n v,\n recurse=True,\n filter=filter,\n dict_factory=dict_factory,\n basedir=basedir,\n )\n\n elif isinstance(v, (tuple, list, set)):\n cf = v.__class__ if retain_collection_types is True else list\n rv[a_name] = cf([\n ascwl(\n i,\n recurse=True,\n filter=filter,\n dict_factory=dict_factory,\n basedir=basedir,\n ) if has(i.__class__) else i for i in v\n ])\n\n if 'jsonldPredicate' in a.metadata:\n k = a.metadata['jsonldPredicate'].get('mapSubject')\n if k:\n vv = dict_factory()\n for i in rv[a_name]:\n kk = i.pop(k)\n vv[kk] = i\n rv[a_name] = vv\n\n elif isinstance(v, dict):\n df = dict_factory\n rv[a_name] = df((\n ascwl(\n kk,\n dict_factory=df,\n basedir=basedir,\n ) if has(kk.__class__) else convert_value(kk),\n ascwl(\n vv,\n dict_factory=df,\n basedir=basedir,\n ) if has(vv.__class__) else vv\n ) for kk, vv in iteritems(v))\n else:\n rv[a_name] = convert_value(v)\n else:\n rv[a_name] = convert_value(v)\n\n if isinstance(inst, CWLClass):\n rv['class'] = inst.__class__.__name__\n\n return rv", "response": "Return the attrs attribute values of inst as a dict."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef from_cwl(cls, data, __reference__=None):\n class_name = data.get('class', None)\n cls = cls.registry.get(class_name, cls)\n\n if __reference__:\n with with_reference(__reference__):\n self = cls(\n **{k: v\n for k, v in iteritems(data) if k != 'class'}\n )\n else:\n self = cls(**{k: v for k, v in iteritems(data) if k != 'class'})\n return self", "response": "Return an instance from CWL data."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef from_yaml(cls, path):\n import yaml\n\n with path.open(mode='r') as fp:\n self = cls.from_cwl(yaml.safe_load(fp), __reference__=path)\n\n return self", "response": "Return an instance from a YAML file."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef template(client, force):\n import pkg_resources\n\n # create the templated files\n for tpl_file in CI_TEMPLATES:\n tpl_path = client.path / tpl_file\n with pkg_resources.resource_stream(__name__, tpl_file) as tpl:\n content = tpl.read()\n\n if not force and tpl_path.exists():\n click.confirm(\n 'Do you want to override \"{tpl_file}\"'.format(\n tpl_file=tpl_file\n ),\n abort=True,\n )\n\n with tpl_path.open('wb') as dest:\n dest.write(content)", "response": "Render templated configuration files."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef rerun(client, run, job):\n from renku.models.provenance import ProcessRun\n activity = client.process_commmit()\n\n if not isinstance(activity, ProcessRun):\n click.secho('No tool was found.', fg='red', file=sys.stderr)\n return\n\n try:\n args = ['cwl-runner', activity.path]\n if job:\n job_file = tempfile.NamedTemporaryFile(\n suffix='.yml', dir=os.getcwd(), delete=False\n )\n args.append(job_file.name)\n\n with job_file as fp:\n yaml.dump(yaml.safe_load(job), stream=fp, encoding='utf-8')\n\n if run:\n return call(args, cwd=os.getcwd())\n finally:\n if job:\n os.unlink(job_file.name)", "response": "Re - run existing workflow or tool using CWL runner."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nshowing workflow inputs and exit.", "response": "def show_inputs(client, workflow):\n \"\"\"Show workflow inputs and exit.\"\"\"\n for input_ in workflow.inputs:\n click.echo(\n '{id}: {default}'.format(\n id=input_.id,\n default=_format_default(client, input_.default),\n )\n )\n sys.exit(0)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef rerun(client, revision, roots, siblings, inputs, paths):\n graph = Graph(client)\n outputs = graph.build(paths=paths, revision=revision)\n\n # Check or extend siblings of outputs.\n outputs = siblings(graph, outputs)\n output_paths = {node.path for node in outputs}\n\n # Normalize and check all starting paths.\n roots = {graph.normalize_path(root) for root in roots}\n assert not roots & output_paths, '--from colides with output paths'\n\n # Generate workflow and check inputs.\n # NOTE The workflow creation is done before opening a new file.\n workflow = inputs(\n client,\n graph.ascwl(\n input_paths=roots,\n output_paths=output_paths,\n outputs=outputs,\n )\n )\n\n # Make sure all inputs are pulled from a storage.\n client.pull_paths_from_storage(\n *(path for _, path in workflow.iter_input_files(client.workflow_path))\n )\n\n # Store the generated workflow used for updating paths.\n import yaml\n\n output_file = client.workflow_path / '{0}.cwl'.format(uuid.uuid4().hex)\n with output_file.open('w') as f:\n f.write(\n yaml.dump(\n ascwl(\n workflow,\n filter=lambda _, x: x is not None,\n basedir=client.workflow_path,\n ),\n default_flow_style=False\n )\n )\n\n # Execute the workflow and relocate all output files.\n from ._cwl import execute\n # FIXME get new output paths for edited tools\n # output_paths = {path for _, path in workflow.iter_output_files()}\n execute(\n client,\n output_file,\n output_paths=output_paths,\n )", "response": "Recreate files generated by a sequence of run commands."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nchecking if a path is available.", "response": "def path(ctx, paths):\n \"\"\"DEPRECATED: use 'renku storage pull'.\"\"\"\n click.secho('Use \"renku storage pull\" instead.', fg='red', err=True)\n ctx.exit(2)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ndetecting the URL of the Docker registry.", "response": "def detect_registry_url(client, auto_login=True):\n \"\"\"Return a URL of the Docker registry.\"\"\"\n repo = client.repo\n config = repo.config_reader()\n\n # Find registry URL in .git/config\n remote_url = None\n try:\n registry_url = config.get_value('renku', 'registry', None)\n except NoSectionError:\n registry_url = None\n\n remote_branch = repo.head.reference.tracking_branch()\n\n if remote_branch is not None:\n remote_name = remote_branch.remote_name\n config_section = 'renku \"{remote_name}\"'.format(\n remote_name=remote_name\n )\n try:\n registry_url = config.get_value(\n config_section, 'registry', registry_url\n )\n except NoSectionError:\n pass\n remote_url = repo.remotes[remote_name].url\n\n if registry_url:\n # Look in [renku] and [renku \"{remote_name}\"] for registry_url key.\n url = GitURL.parse(registry_url)\n elif remote_url:\n # Use URL based on remote configuration.\n url = GitURL.parse(remote_url)\n\n # Replace gitlab. with registry. unless running on gitlab.com.\n hostname_parts = url.hostname.split('.')\n if len(hostname_parts) > 2 and hostname_parts[0] == 'gitlab':\n hostname_parts = hostname_parts[1:]\n hostname = '.'.join(['registry'] + hostname_parts)\n url = attr.evolve(url, hostname=hostname)\n else:\n raise errors.ConfigurationError(\n 'Configure renku.repository_url or Git remote.'\n )\n\n if auto_login and url.username and url.password:\n try:\n subprocess.run([\n 'docker',\n 'login',\n url.hostname,\n '-u',\n url.username,\n '--password-stdin',\n ],\n check=True,\n input=url.password.encode('utf-8'))\n except subprocess.CalledProcessError:\n raise errors.AuthenticationError(\n 'Check configuration of password or token in the registry URL'\n )\n\n return url"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ntracks work on a specific problem.", "response": "def run(client, outputs, no_output, success_codes, isolation, command_line):\n \"\"\"Tracking work on a specific problem.\"\"\"\n working_dir = client.repo.working_dir\n mapped_std = _mapped_std_streams(client.candidate_paths)\n factory = CommandLineToolFactory(\n command_line=command_line,\n directory=os.getcwd(),\n working_dir=working_dir,\n successCodes=success_codes,\n **{\n name: os.path.relpath(path, working_dir)\n for name, path in mapped_std.items()\n }\n )\n\n with client.with_workflow_storage() as wf:\n with factory.watch(\n client, no_output=no_output, outputs=outputs\n ) as tool:\n # Make sure all inputs are pulled from a storage.\n client.pull_paths_from_storage(\n *(\n path\n for _, path in tool.iter_input_files(client.workflow_path)\n )\n )\n\n returncode = call(\n factory.command_line,\n cwd=os.getcwd(),\n **{key: getattr(sys, key)\n for key in mapped_std.keys()},\n )\n\n if returncode not in (success_codes or {0}):\n raise errors.InvalidSuccessCode(\n returncode, success_codes=success_codes\n )\n\n sys.stdout.flush()\n sys.stderr.flush()\n\n wf.add_step(run=tool)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef log(client, revision, format, no_output, paths):\n graph = Graph(client)\n if not paths:\n start, is_range, stop = revision.partition('..')\n if not is_range:\n stop = start\n elif not stop:\n stop = 'HEAD'\n\n commit = client.repo.rev_parse(stop)\n paths = (\n str(client.path / item.a_path)\n for item in commit.diff(commit.parents or NULL_TREE)\n # if not item.deleted_file\n )\n\n # NOTE shall we warn when \"not no_output and not paths\"?\n graph.build(paths=paths, revision=revision, can_be_cwl=no_output)\n\n FORMATS[format](graph)", "response": "Show logs for a file."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef status(ctx, client, revision, no_output, path):\n graph = Graph(client)\n # TODO filter only paths = {graph.normalize_path(p) for p in path}\n status = graph.build_status(revision=revision, can_be_cwl=no_output)\n\n click.echo('On branch {0}'.format(client.repo.active_branch))\n if status['outdated']:\n click.echo(\n 'Files generated from newer inputs:\\n'\n ' (use \"renku log [...]\" to see the full lineage)\\n'\n ' (use \"renku update [...]\" to '\n 'generate the file from its latest inputs)\\n'\n )\n\n for filepath, stts in sorted(status['outdated'].items()):\n outdated = (\n ', '.join(\n '{0}#{1}'.format(\n click.style(\n graph._format_path(n.path), fg='blue', bold=True\n ),\n _format_sha1(graph, n),\n ) for n in stts\n if n.path and n.path not in status['outdated']\n )\n )\n\n click.echo(\n '\\t{0}: {1}'.format(\n click.style(\n graph._format_path(filepath), fg='red', bold=True\n ), outdated\n )\n )\n\n click.echo()\n\n else:\n click.secho(\n 'All files were generated from the latest inputs.', fg='green'\n )\n\n if status['multiple-versions']:\n click.echo(\n 'Input files used in different versions:\\n'\n ' (use \"renku log --revision \" to see a lineage '\n 'for the given revision)\\n'\n )\n\n for filepath, files in sorted(status['multiple-versions'].items()):\n # Do not show duplicated commits! (see #387)\n commits = {_format_sha1(graph, key) for key in files}\n click.echo(\n '\\t{0}: {1}'.format(\n click.style(\n graph._format_path(filepath), fg='blue', bold=True\n ), ', '.join(commits)\n )\n )\n\n click.echo()\n\n if status['deleted']:\n click.echo(\n 'Deleted files used to generate outputs:\\n'\n ' (use \"git show :\" to see the file content '\n 'for the given revision)\\n'\n )\n\n for filepath, node in status['deleted'].items():\n click.echo(\n '\\t{0}: {1}'.format(\n click.style(\n graph._format_path(filepath), fg='blue', bold=True\n ), _format_sha1(graph, node)\n )\n )\n\n click.echo()\n\n ctx.exit(1 if status['outdated'] else 0)", "response": "Show a status of the repository."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nvalidating a project name.", "response": "def validate_name(ctx, param, value):\n \"\"\"Validate a project name.\"\"\"\n if not value:\n value = os.path.basename(ctx.params['directory'].rstrip(os.path.sep))\n return value"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nstore directory as a new Git home.", "response": "def store_directory(ctx, param, value):\n \"\"\"Store directory as a new Git home.\"\"\"\n Path(value).mkdir(parents=True, exist_ok=True)\n set_git_home(value)\n return value"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef init(ctx, client, directory, name, force, use_external_storage):\n if not client.use_external_storage:\n use_external_storage = False\n\n ctx.obj = client = attr.evolve(\n client,\n path=directory,\n use_external_storage=use_external_storage,\n )\n\n msg = 'Initialized empty project in {path}'\n branch_name = None\n\n stack = contextlib.ExitStack()\n\n if force and client.repo:\n msg = 'Initialized project in {path} (branch {branch_name})'\n merge_args = ['--no-ff', '-s', 'recursive', '-X', 'ours']\n\n try:\n commit = client.find_previous_commit(\n str(client.renku_metadata_path),\n )\n branch_name = 'renku/init/' + str(commit)\n except KeyError:\n from git import NULL_TREE\n commit = NULL_TREE\n branch_name = 'renku/init/root'\n merge_args.append('--allow-unrelated-histories')\n\n ctx.obj = client = stack.enter_context(\n client.worktree(\n branch_name=branch_name,\n commit=commit,\n merge_args=merge_args,\n )\n )\n\n try:\n with client.lock:\n path = client.init_repository(name=name, force=force)\n except FileExistsError:\n raise click.UsageError(\n 'Renku repository is not empty. '\n 'Please use --force flag to use the directory as Renku '\n 'repository.'\n )\n\n stack.enter_context(client.commit())\n\n with stack:\n # Install Git hooks.\n from .githooks import install\n ctx.invoke(install, force=force)\n\n # Create all necessary template files.\n from .runner import template\n ctx.invoke(template, force=force)\n\n click.echo(msg.format(path=path, branch_name=branch_name))", "response": "Initialize a Renku project."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nfind missing files listed in datasets.", "response": "def check_missing_files(client):\n \"\"\"Find missing files listed in datasets.\"\"\"\n missing = defaultdict(list)\n\n for path, dataset in client.datasets.items():\n for file in dataset.files:\n filepath = (path.parent / file)\n if not filepath.exists():\n missing[str(\n path.parent.relative_to(client.renku_datasets_path)\n )].append(\n os.path.normpath(str(filepath.relative_to(client.path)))\n )\n\n if not missing:\n return True\n\n click.secho(\n WARNING + 'There are missing files in datasets.'\n # '\\n (use \"renku dataset clean \" to clean them)'\n )\n\n for dataset, files in missing.items():\n click.secho(\n '\\n\\t' + click.style(dataset, fg='yellow') + ':\\n\\t ' +\n '\\n\\t '.join(click.style(path, fg='red') for path in files)\n )\n\n return False"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef from_http_exception(cls, e):\n assert isinstance(e, requests.exceptions.HTTPError)\n response = e.response\n try:\n message = response.json()['message']\n except (KeyError, ValueError):\n message = response.content.strip()\n\n raise cls(message)", "response": "Create an APIError from an HTTP exception."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncheck for expected_status_code and return the response object.", "response": "def return_or_raise(cls, response, expected_status_code):\n \"\"\"Check for ``expected_status_code``.\"\"\"\n try:\n if response.status_code in expected_status_code:\n return response\n except TypeError:\n if response.status_code == expected_status_code:\n return response\n\n raise cls(response)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_git_home(path='.'):\n ctx = click.get_current_context(silent=True)\n if ctx and GIT_KEY in ctx.meta:\n return ctx.meta[GIT_KEY]\n\n from git import Repo\n return Repo(path, search_parent_directories=True).working_dir", "response": "Get Git path from the current context."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_git_isolation():\n ctx = click.get_current_context(silent=True)\n if ctx and GIT_ISOLATION in ctx.meta:\n return ctx.meta[GIT_ISOLATION]", "response": "Get Git isolation from the current context."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn an attr enabled class that can be used to create a new object.", "response": "def attrs(\n maybe_cls=None, type=None, context=None, translate=None, **attrs_kwargs\n):\n \"\"\"Wrap an attr enabled class.\"\"\"\n if isinstance(type, (list, tuple, set)):\n types = list(type)\n else:\n types = [type] if type is not None else []\n context = context or {}\n translate = translate or {}\n\n def wrap(cls):\n \"\"\"Decorate an attr enabled class.\"\"\"\n jsonld_cls = attr.s(cls, **attrs_kwargs)\n\n if not issubclass(jsonld_cls, JSONLDMixin):\n jsonld_cls = attr.s(\n make_type(cls.__name__, (jsonld_cls, JSONLDMixin), {}),\n **attrs_kwargs\n )\n\n # Merge types\n for subcls in jsonld_cls.mro():\n subtype = getattr(subcls, '_jsonld_type', None)\n if subtype:\n if isinstance(subtype, (tuple, list)):\n types.extend(subtype)\n else:\n types.append(subtype)\n\n for key, value in getattr(subcls, '_jsonld_context', {}).items():\n if key in context and context[key] != value:\n raise TypeError()\n context.setdefault(key, value)\n\n for a in attr.fields(jsonld_cls):\n key = a.name\n ctx = a.metadata.get(KEY)\n if ctx is None:\n continue\n\n if ':' in ctx:\n prefix, _ = ctx.split(':', 1)\n if prefix in context:\n context[key] = ctx\n continue\n\n if isinstance(ctx, dict) or ctx not in context:\n context[key] = ctx\n\n if KEY_CLS in a.metadata:\n merge_ctx = a.metadata[KEY_CLS]._jsonld_context\n for ctx_key, ctx_value in merge_ctx.items():\n context.setdefault(ctx_key, ctx_value)\n if context[ctx_key] != ctx_value:\n raise TypeError(\n 'Can not merge {0} and {1} because of {2}'.format(\n jsonld_cls, a.metadata[KEY_CLS], ctx_key\n )\n )\n\n jsonld_cls.__module__ = cls.__module__\n jsonld_cls._jsonld_type = types[0] if len(types) == 1 else list(\n sorted(set(types))\n )\n jsonld_cls._jsonld_context = context\n jsonld_cls._jsonld_translate = translate\n jsonld_cls._jsonld_fields = {\n a.name\n for a in attr.fields(jsonld_cls) if KEY in a.metadata\n }\n\n context_doc = '\\n'.join(\n ' ' + line for line in json.dumps(context, indent=2).split('\\n')\n )\n jsonld_cls.__doc__ = DOC_TPL.format(\n cls=cls,\n type=json.dumps(jsonld_cls._jsonld_type),\n context=context_doc,\n )\n\n # Register class for given JSON-LD @type\n try:\n type_ = ld.expand({\n '@type': jsonld_cls._jsonld_type,\n '@context': context\n })[0]['@type']\n if isinstance(type_, list):\n type_ = tuple(sorted(type_))\n except Exception:\n # FIXME make sure all classes have @id defined\n return jsonld_cls\n\n if type_ in jsonld_cls.__type_registry__:\n raise TypeError(\n 'Type {0!r} is already registered for class {1!r}.'.format(\n jsonld_cls._jsonld_type,\n jsonld_cls.__type_registry__[jsonld_cls._jsonld_type],\n )\n )\n jsonld_cls.__type_registry__[type_] = jsonld_cls\n return jsonld_cls\n\n if maybe_cls is None:\n return wrap\n return wrap(maybe_cls)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncreating a new attribute with context.", "response": "def attrib(context=None, **kwargs):\n \"\"\"Create a new attribute with context.\"\"\"\n kwargs.setdefault('metadata', {})\n kwargs['metadata'][KEY] = context\n return attr.ib(**kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn a function that creates a container attribute builder.", "response": "def _container_attrib_builder(name, container, mapper):\n \"\"\"Builder for container attributes.\"\"\"\n context = {'@container': '@{0}'.format(name)}\n\n def _attrib(type, **kwargs):\n \"\"\"Define a container attribute.\"\"\"\n kwargs.setdefault('metadata', {})\n kwargs['metadata'][KEY_CLS] = type\n kwargs['default'] = Factory(container)\n\n def _converter(value):\n \"\"\"Convert value to the given type.\"\"\"\n if isinstance(value, container):\n return mapper(type, value)\n elif value is None:\n return value\n raise ValueError(value)\n\n kwargs.setdefault('converter', _converter)\n context_ib = context.copy()\n context_ib.update(kwargs.pop('context', {}))\n return attrib(context=context_ib, **kwargs)\n\n return _attrib"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef asjsonld(\n inst,\n recurse=True,\n filter=None,\n dict_factory=dict,\n retain_collection_types=False,\n export_context=True,\n basedir=None,\n):\n \"\"\"Dump a JSON-LD class to the JSON with generated ``@context`` field.\"\"\"\n jsonld_fields = inst.__class__._jsonld_fields\n attrs = tuple(\n field\n for field in fields(inst.__class__) if field.name in jsonld_fields\n )\n rv = dict_factory()\n\n def convert_value(v):\n \"\"\"Convert special types.\"\"\"\n if isinstance(v, Path):\n v = str(v)\n return os.path.relpath(v, str(basedir)) if basedir else v\n return v\n\n for a in attrs:\n v = getattr(inst, a.name)\n\n # skip proxies\n if isinstance(v, weakref.ReferenceType):\n continue\n\n # do not export context for containers\n ec = export_context and KEY_CLS not in a.metadata\n\n if filter is not None and not filter(a, v):\n continue\n if recurse is True:\n if has(v.__class__):\n rv[a.name] = asjsonld(\n v,\n recurse=True,\n filter=filter,\n dict_factory=dict_factory,\n basedir=basedir,\n )\n elif isinstance(v, (tuple, list, set)):\n cf = v.__class__ if retain_collection_types is True else list\n rv[a.name] = cf([\n asjsonld(\n i,\n recurse=True,\n filter=filter,\n dict_factory=dict_factory,\n export_context=ec,\n basedir=basedir,\n ) if has(i.__class__) else i for i in v\n ])\n elif isinstance(v, dict):\n df = dict_factory\n rv[a.name] = df((\n asjsonld(\n kk,\n dict_factory=df,\n basedir=basedir,\n ) if has(kk.__class__) else convert_value(kk),\n asjsonld(\n vv,\n dict_factory=df,\n export_context=ec,\n basedir=basedir,\n ) if has(vv.__class__) else vv\n ) for kk, vv in iteritems(v))\n else:\n rv[a.name] = convert_value(v)\n else:\n rv[a.name] = convert_value(v)\n\n inst_cls = type(inst)\n\n if export_context:\n rv['@context'] = deepcopy(inst_cls._jsonld_context)\n\n if inst_cls._jsonld_type:\n rv['@type'] = inst_cls._jsonld_type\n return rv", "response": "Dump a JSON - LD class to the JSON with generated context field."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef from_jsonld(cls, data, __reference__=None, __source__=None):\n if isinstance(data, cls):\n return data\n\n if not isinstance(data, dict):\n raise ValueError(data)\n\n if '@type' in data:\n type_ = tuple(sorted(data['@type']))\n if type_ in cls.__type_registry__ and getattr(\n cls, '_jsonld_type', None\n ) != type_:\n new_cls = cls.__type_registry__[type_]\n if cls != new_cls:\n return new_cls.from_jsonld(data)\n\n if cls._jsonld_translate:\n data = ld.compact(data, {'@context': cls._jsonld_translate})\n data.pop('@context', None)\n\n data.setdefault('@context', cls._jsonld_context)\n\n if data['@context'] != cls._jsonld_context:\n compacted = ld.compact(data, {'@context': cls._jsonld_context})\n else:\n compacted = data\n\n # assert compacted['@type'] == cls._jsonld_type, '@type must be equal'\n # TODO update self(not cls)._jsonld_context with data['@context']\n fields = cls._jsonld_fields\n\n if __reference__:\n with with_reference(__reference__):\n self = cls(\n **{\n k.lstrip('_'): v\n for k, v in compacted.items() if k in fields\n }\n )\n else:\n self = cls(\n **{\n k.lstrip('_'): v\n for k, v in compacted.items() if k in fields\n }\n )\n if __source__:\n setattr(self, '__source__', __source__)\n return self", "response": "Instantiate a JSON - LD class from data."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn an instance from a YAML file.", "response": "def from_yaml(cls, path):\n \"\"\"Return an instance from a YAML file.\"\"\"\n import yaml\n\n with path.open(mode='r') as fp:\n source = yaml.safe_load(fp) or {}\n self = cls.from_jsonld(\n source,\n __reference__=path,\n __source__=deepcopy(source),\n )\n\n return self"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates JSON - LD with the original source data.", "response": "def asjsonld(self):\n \"\"\"Create JSON-LD with the original source data.\"\"\"\n source = {}\n if self.__source__:\n source.update(self.__source__)\n source.update(asjsonld(self))\n return source"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef to_yaml(self):\n import yaml\n\n with self.__reference__.open('w') as fp:\n yaml.dump(self.asjsonld(), fp, default_flow_style=False)", "response": "Store an instance to the referenced YAML file."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ntakes a GPG key and extracts the email addresses for it.", "response": "def addresses_for_key(gpg, key):\r\n \"\"\"\r\n Takes a key and extracts the email addresses for it.\r\n \"\"\"\r\n fingerprint = key[\"fingerprint\"]\r\n addresses = []\r\n for key in gpg.list_keys():\r\n if key[\"fingerprint\"] == fingerprint:\r\n addresses.extend([address.split(\"<\")[-1].strip(\">\")\r\n for address in key[\"uids\"] if address])\r\n return addresses"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nsend an email using the given recipients.", "response": "def send_mail(subject, body_text, addr_from, recipient_list,\r\n fail_silently=False, auth_user=None, auth_password=None,\r\n attachments=None, body_html=None, html_message=None,\r\n connection=None, headers=None):\r\n \"\"\"\r\n Sends a multipart email containing text and html versions which\r\n are encrypted for each recipient that has a valid gpg key\r\n installed.\r\n \"\"\"\r\n\r\n # Make sure only one HTML option is specified\r\n if body_html is not None and html_message is not None: # pragma: no cover\r\n raise ValueError(\"You cannot specify body_html and html_message at \"\r\n \"the same time. Please only use html_message.\")\r\n\r\n # Push users to update their code\r\n if body_html is not None: # pragma: no cover\r\n warn(\"Using body_html is deprecated; use the html_message argument \"\r\n \"instead. Please update your code.\", DeprecationWarning)\r\n html_message = body_html\r\n\r\n # Allow for a single address to be passed in.\r\n if isinstance(recipient_list, six.string_types):\r\n recipient_list = [recipient_list]\r\n\r\n connection = connection or get_connection(\r\n username=auth_user, password=auth_password,\r\n fail_silently=fail_silently)\r\n\r\n # Obtain a list of the recipients that have gpg keys installed.\r\n key_addresses = {}\r\n if USE_GNUPG:\r\n from email_extras.models import Address\r\n key_addresses = dict(Address.objects.filter(address__in=recipient_list)\r\n .values_list('address', 'use_asc'))\r\n # Create the gpg object.\r\n if key_addresses:\r\n gpg = GPG(gnupghome=GNUPG_HOME)\r\n if GNUPG_ENCODING is not None:\r\n gpg.encoding = GNUPG_ENCODING\r\n\r\n # Check if recipient has a gpg key installed\r\n def has_pgp_key(addr):\r\n return addr in key_addresses\r\n\r\n # Encrypts body if recipient has a gpg key installed.\r\n def encrypt_if_key(body, addr_list):\r\n if has_pgp_key(addr_list[0]):\r\n encrypted = gpg.encrypt(body, addr_list[0],\r\n always_trust=ALWAYS_TRUST)\r\n if encrypted == \"\" and body != \"\": # encryption failed\r\n raise EncryptionFailedError(\"Encrypting mail to %s failed.\",\r\n addr_list[0])\r\n return smart_text(encrypted)\r\n return body\r\n\r\n # Load attachments and create name/data tuples.\r\n attachments_parts = []\r\n if attachments is not None:\r\n for attachment in attachments:\r\n # Attachments can be pairs of name/data, or filesystem paths.\r\n if not hasattr(attachment, \"__iter__\"):\r\n with open(attachment, \"rb\") as f:\r\n attachments_parts.append((basename(attachment), f.read()))\r\n else:\r\n attachments_parts.append(attachment)\r\n\r\n # Send emails - encrypted emails needs to be sent individually, while\r\n # non-encrypted emails can be sent in one send. So the final list of\r\n # lists of addresses to send to looks like:\r\n # [[unencrypted1, unencrypted2, unencrypted3], [encrypted1], [encrypted2]]\r\n unencrypted = [addr for addr in recipient_list\r\n if addr not in key_addresses]\r\n unencrypted = [unencrypted] if unencrypted else unencrypted\r\n encrypted = [[addr] for addr in key_addresses]\r\n for addr_list in unencrypted + encrypted:\r\n msg = EmailMultiAlternatives(subject,\r\n encrypt_if_key(body_text, addr_list),\r\n addr_from, addr_list,\r\n connection=connection, headers=headers)\r\n if html_message is not None:\r\n if has_pgp_key(addr_list[0]):\r\n mimetype = \"application/gpg-encrypted\"\r\n else:\r\n mimetype = \"text/html\"\r\n msg.attach_alternative(encrypt_if_key(html_message, addr_list),\r\n mimetype)\r\n for parts in attachments_parts:\r\n name = parts[0]\r\n if key_addresses.get(addr_list[0]):\r\n name += \".asc\"\r\n msg.attach(name, encrypt_if_key(parts[1], addr_list))\r\n msg.send(fail_silently=fail_silently)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nsend an email using a template.", "response": "def send_mail_template(subject, template, addr_from, recipient_list,\r\n fail_silently=False, attachments=None, context=None,\r\n connection=None, headers=None):\r\n \"\"\"\r\n Send email rendering text and html versions for the specified\r\n template name using the context dictionary passed in.\r\n \"\"\"\r\n\r\n if context is None:\r\n context = {}\r\n\r\n # Loads a template passing in vars as context.\r\n def render(ext):\r\n name = \"email_extras/%s.%s\" % (template, ext)\r\n return loader.get_template(name).render(context)\r\n\r\n send_mail(subject, render(\"txt\"), addr_from, recipient_list,\r\n fail_silently=fail_silently, attachments=attachments,\r\n html_message=render(\"html\"), connection=connection,\r\n headers=headers)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef topological(nodes):\n order, enter, state = deque(), set(nodes), {}\n\n def dfs(node):\n \"\"\"Visit nodes in depth-first order.\"\"\"\n state[node] = GRAY\n for parent in nodes.get(node, ()):\n color = state.get(parent, None)\n if color == GRAY:\n raise ValueError('cycle')\n if color == BLACK:\n continue\n enter.discard(parent)\n dfs(parent)\n order.appendleft(node)\n state[node] = BLACK\n\n while enter:\n dfs(enter.pop())\n\n return order", "response": "Return nodes in a topological order."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nmoves files and check repository for potential problems.", "response": "def move(ctx, client, sources, destination):\n \"\"\"Move files and check repository for potential problems.\"\"\"\n from renku.api._git import _expand_directories\n\n dst = Path(destination)\n\n def fmt_path(path):\n \"\"\"Format path as relative to the client path.\"\"\"\n return str(Path(path).absolute().relative_to(client.path))\n\n files = {\n fmt_path(source): fmt_path(file_or_dir)\n for file_or_dir in sources\n for source in _expand_directories((file_or_dir, ))\n }\n\n def fmt_dst(path):\n \"\"\"Build a destination path for a source path.\"\"\"\n return str(dst / os.path.relpath(path, start=files[path]))\n\n destinations = {source: fmt_dst(source) for source in files}\n\n # 1. Check .gitignore.\n ignored = client.find_ignored_paths(*destinations.values())\n if ignored:\n click.echo(WARNING + 'Renamed files match .gitignore.\\n')\n if click.confirm(\n 'Do you want to edit \".gitignore\" now?', default=False\n ):\n click.edit(filename=str(client.path / '.gitignore'))\n\n # 2. Update dataset metadata files.\n with progressbar(\n client.datasets.items(),\n item_show_func=lambda item: str(item[1].short_id) if item else '',\n label='Updating dataset metadata',\n width=0,\n ) as bar:\n for (path, dataset) in bar:\n renames = {}\n\n for file in dataset.files:\n filepath = fmt_path(os.path.normpath(str(path.parent / file)))\n\n if filepath in files:\n renames[file] = os.path.relpath(\n destinations[filepath], start=str(path.parent)\n )\n\n if renames:\n dataset = dataset.rename_files(\n lambda key: renames.get(key, key)\n )\n\n dataset.to_yaml()\n\n # 3. Manage .gitattributes for external storage.\n tracked = tuple(\n path for path, attr in client.find_attr(*files).items()\n if attr.get('filter') == 'lfs'\n )\n client.untrack_paths_from_storage(*tracked)\n existing = client.find_attr(*tracked)\n if existing:\n click.echo(WARNING + 'There are custom .gitattributes.\\n')\n if click.confirm(\n 'Do you want to edit \".gitattributes\" now?', default=False\n ):\n click.edit(filename=str(client.path / '.gitattributes'))\n\n client.track_paths_in_storage(*(destinations[path] for path in tracked))\n\n # 4. Handle symlinks.\n dst.parent.mkdir(parents=True, exist_ok=True)\n\n for source, target in destinations.items():\n src = Path(source)\n if src.is_symlink():\n Path(target).parent.mkdir(parents=True, exist_ok=True)\n Path(target).symlink_to(\n os.path.relpath(\n str(src.resolve()), start=os.path.dirname(target)\n )\n )\n src.unlink()\n del files[source]\n\n # Finally move the files.\n final_sources = list(set(files.values()))\n if final_sources:\n run(['git', 'mv'] + final_sources + [destination], check=True)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef check_dataset_metadata(client):\n # Find pre 0.3.4 metadata files.\n old_metadata = list(_dataset_metadata_pre_0_3_4(client))\n\n if not old_metadata:\n return True\n\n click.secho(\n WARNING + 'There are metadata files in the old location.'\n '\\n (use \"renku migrate datasets\" to move them)\\n\\n\\t' + '\\n\\t'.join(\n click.style(str(path.relative_to(client.path)), fg='yellow')\n for path in old_metadata\n ) + '\\n'\n )\n\n return False", "response": "Check location of dataset metadata."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nshows siblings for given paths.", "response": "def siblings(client, revision, paths):\n \"\"\"Show siblings for given paths.\"\"\"\n graph = Graph(client)\n nodes = graph.build(paths=paths, revision=revision)\n siblings_ = set(nodes)\n for node in nodes:\n siblings_ |= graph.siblings(node)\n\n paths = {node.path for node in siblings_}\n for path in paths:\n click.echo(graph._format_path(path))"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef inputs(ctx, client, revision, paths):\n from renku.models.provenance import ProcessRun\n\n graph = Graph(client)\n paths = set(paths)\n nodes = graph.build(revision=revision)\n\n commits = {node.commit for node in nodes}\n candidates = {(node.commit, node.path)\n for node in nodes if not paths or node.path in paths}\n\n input_paths = set()\n\n for commit in commits:\n activity = graph.activities[commit]\n\n if isinstance(activity, ProcessRun):\n for usage in activity.qualified_usage:\n for entity in usage.entity.entities:\n path = str((usage.client.path / entity.path).relative_to(\n client.path\n ))\n usage_key = (entity.commit, entity.path)\n\n if path not in input_paths and usage_key in candidates:\n input_paths.add(path)\n\n click.echo('\\n'.join(graph._format_path(path) for path in input_paths))\n ctx.exit(0 if not paths or len(input_paths) == len(paths) else 1)", "response": "Show inputs files in the repository."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nshowing output files in the repository.", "response": "def outputs(ctx, client, revision, paths):\n r\"\"\"Show output files in the repository.\n\n Files to show. If no files are given all output files are shown.\n \"\"\"\n graph = Graph(client)\n filter = graph.build(paths=paths, revision=revision)\n output_paths = graph.output_paths\n\n click.echo('\\n'.join(graph._format_path(path) for path in output_paths))\n\n if paths:\n if not output_paths:\n ctx.exit(1)\n\n from renku.models._datastructures import DirectoryTree\n tree = DirectoryTree.from_list(item.path for item in filter)\n\n for output in output_paths:\n if tree.get(output) is None:\n ctx.exit(1)\n return"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn list of valid context names.", "response": "def _context_names():\n \"\"\"Return list of valid context names.\"\"\"\n import inspect\n\n from renku.models import provenance\n from renku.models._jsonld import JSONLDMixin\n\n for name in dir(provenance):\n cls = getattr(provenance, name)\n if inspect.isclass(cls) and issubclass(cls, JSONLDMixin):\n yield name"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef print_context_names(ctx, param, value):\n if not value or ctx.resilient_parsing:\n return\n click.echo('\\n'.join(_context_names()))\n ctx.exit()", "response": "Print all possible types."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns JSON - LD string for given context name.", "response": "def _context_json(name):\n \"\"\"Return JSON-LD string for given context name.\"\"\"\n from renku.models import provenance\n\n cls = getattr(provenance, name)\n return {\n '@context': cls._jsonld_context,\n '@type': cls._jsonld_type,\n }"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef context(names):\n import json\n\n contexts = [_context_json(name) for name in set(names)]\n if contexts:\n click.echo(\n json.dumps(\n contexts[0] if len(contexts) == 1 else contexts,\n indent=2,\n )\n )", "response": "Show JSON - LD context for repository objects."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nlisting or manage workflows with subcommands.", "response": "def workflow(ctx, client):\n \"\"\"List or manage workflows with subcommands.\"\"\"\n if ctx.invoked_subcommand is None:\n from renku.models.refs import LinkReference\n\n names = defaultdict(list)\n for ref in LinkReference.iter_items(client, common_path='workflows'):\n names[ref.reference.name].append(ref.name)\n\n for path in client.workflow_path.glob('*.cwl'):\n click.echo(\n '{path}: {names}'.format(\n path=path.name,\n names=', '.join(\n click.style(_deref(name), fg='green')\n for name in names[path.name]\n ),\n )\n )"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ndetect a workflow path if it is not passed.", "response": "def validate_path(ctx, param, value):\n \"\"\"Detect a workflow path if it is not passed.\"\"\"\n client = ctx.obj\n\n if value is None:\n from renku.models.provenance import ProcessRun\n activity = client.process_commit()\n\n if not isinstance(activity, ProcessRun):\n raise click.BadParameter('No tool was found.')\n\n return activity.path\n\n return value"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef set_name(client, name, path, force):\n from renku.models.refs import LinkReference\n LinkReference.create(client=client, name=_ref(name),\n force=force).set_reference(path)", "response": "Sets the name for remote path."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef rename(client, old, new, force):\n from renku.models.refs import LinkReference\n LinkReference(client=client, name=_ref(old)).rename(_ref(new), force=force)", "response": "Rename the workflow named old to new."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef remove(client, name):\n from renku.models.refs import LinkReference\n LinkReference(client=client, name=_ref(name)).delete()", "response": "Remove the remote named name."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncreating a workflow description for a file.", "response": "def create(client, output_file, revision, paths):\n \"\"\"Create a workflow description for a file.\"\"\"\n graph = Graph(client)\n outputs = graph.build(paths=paths, revision=revision)\n\n output_file.write(\n yaml.dump(\n ascwl(\n graph.ascwl(outputs=outputs),\n filter=lambda _, x: x is not None and x != [],\n basedir=os.path.dirname(getattr(output_file, 'name', '.')) or\n '.',\n ),\n default_flow_style=False\n )\n )"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef endpoint(ctx, config, verbose):\n if ctx.invoked_subcommand is None:\n # TODO default_endpoint = config.get('core', {}).get('default')\n for endpoint, values in config.get('endpoints', {}).items():\n # TODO is_default = default_endpoint == endpoint\n if not verbose:\n click.echo(endpoint)\n else:\n click.echo(\n '{endpoint}\\t{url}'.format(\n endpoint=endpoint, url=values.get('url', '')\n )\n )", "response": "Manage set of platform API endpoints."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _wrap_path_or_stream(method, mode): # noqa: D202\n\n def decorator(path_or_stream):\n \"\"\"Open the path if needed.\"\"\"\n if isinstance(path_or_stream, (str, Path)):\n return method(Path(path_or_stream).open(mode))\n return method(path_or_stream)\n\n return decorator", "response": "Wrap a method to open a path or stream at the end."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncheck your system and repository for potential problems.", "response": "def doctor(ctx, client):\n \"\"\"Check your system and repository for potential problems.\"\"\"\n click.secho('\\n'.join(textwrap.wrap(DOCTOR_INFO)) + '\\n', bold=True)\n\n from . import _checks\n\n is_ok = True\n for attr in _checks.__all__:\n is_ok &= getattr(_checks, attr)(client)\n\n if is_ok:\n click.secho('Everything seems to be ok.', fg='green')\n\n ctx.exit(0 if is_ok else 1)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncreate a new project.", "response": "def create(self, name=None, **kwargs):\n \"\"\"Create a new project.\n\n :param name: The name of the project.\n :returns: An instance of the newly create project.\n :rtype: renku.models.projects.Project\n \"\"\"\n data = self._client.api.create_project({'name': name})\n return self.Meta.model(data, client=self._client, collection=self)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef clean_key(self):\n key = self.cleaned_data[\"key\"]\n gpg = GPG(gnupghome=GNUPG_HOME)\n result = gpg.import_keys(key)\n if result.count == 0:\n raise forms.ValidationError(_(\"Invalid Key\"))\n return key", "response": "Validate the key contains an email address."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef from_list(cls, values):\n self = cls()\n for value in values:\n self.add(value)\n return self", "response": "Construct a tree from a list with paths."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns a subtree if exists.", "response": "def get(self, value, default=None):\n \"\"\"Return a subtree if exists.\"\"\"\n path = value if isinstance(value, Path) else Path(str(value))\n subtree = self\n for part in path.parts:\n try:\n subtree = subtree[part]\n except KeyError:\n return default\n return subtree"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef add(self, value):\n path = value if isinstance(value, Path) else Path(str(value))\n if path and path != path.parent:\n destination = self\n for part in path.parts:\n destination = destination.setdefault(part, DirectoryTree())", "response": "Create a safe directory from a value."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef default_endpoint_from_config(config, option=None):\n default_endpoint = config.get('core', {}).get('default')\n project_endpoint = config.get('project',\n {}).get('core',\n {}).get('default', default_endpoint)\n return Endpoint(\n option or project_endpoint or default_endpoint,\n default=default_endpoint,\n project=project_endpoint,\n option=option\n )", "response": "Return a default endpoint from a config object."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef password_prompt(ctx, param, value):\n if ctx.resilient_parsing:\n return\n\n if not value:\n if 'password_stdin' in ctx.params:\n with click.open_file('-') as fp:\n value = fp.read().strip('\\n')\n else:\n value = click.prompt('Password', hide_input=True)\n\n click.echo(value)\n return value", "response": "Prompt for password if password_stdin is not used."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ninstall completion for the current shell.", "response": "def install_completion(ctx, attr, value): # pragma: no cover\n \"\"\"Install completion for the current shell.\"\"\"\n import click_completion.core\n\n if not value or ctx.resilient_parsing:\n return value\n\n shell, path = click_completion.core.install()\n click.secho(\n '{0} completion installed in {1}'.format(shell, path), fg='green'\n )\n ctx.exit()"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn default endpoint if specified.", "response": "def default_endpoint(ctx, param, value):\n \"\"\"Return default endpoint if specified.\"\"\"\n if ctx.resilient_parsing:\n return\n\n config = ctx.obj['config']\n endpoint = default_endpoint_from_config(config, option=value)\n\n if endpoint is None:\n raise click.UsageError('No default endpoint found.')\n\n return endpoint"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncheck that all outputs have their siblings listed.", "response": "def check_siblings(graph, outputs):\n \"\"\"Check that all outputs have their siblings listed.\"\"\"\n siblings = set()\n for node in outputs:\n siblings |= graph.siblings(node)\n\n siblings = {node.path for node in siblings}\n missing = siblings - {node.path for node in outputs}\n\n if missing:\n msg = (\n 'Include the files above in the command '\n 'or use the --with-siblings option.'\n )\n raise click.ClickException(\n 'There are missing output siblings:\\n\\n'\n '\\t{0}\\n\\n{1}'.format(\n '\\n\\t'.join(click.style(path, fg='red') for path in missing),\n msg,\n ),\n )\n return outputs"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ninclude all missing siblings in graph.", "response": "def with_siblings(graph, outputs):\n \"\"\"Include all missing siblings.\"\"\"\n siblings = set()\n for node in outputs:\n siblings |= graph.siblings(node)\n return siblings"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ndisplaying pager only if it does not fit in one terminal screen.", "response": "def echo_via_pager(*args, **kwargs):\n \"\"\"Display pager only if it does not fit in one terminal screen.\n\n NOTE: The feature is available only on ``less``-based pager.\n \"\"\"\n try:\n restore = 'LESS' not in os.environ\n os.environ.setdefault('LESS', '-iXFR')\n click.echo_via_pager(*args, **kwargs)\n finally:\n if restore:\n os.environ.pop('LESS', None)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncheck if the first argument is an existing command.", "response": "def parse_args(self, ctx, args):\n \"\"\"Check if the first argument is an existing command.\"\"\"\n if args and args[0] in self.commands:\n args.insert(0, '')\n super(OptionalGroup, self).parse_args(ctx, args)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef execute(client, output_file, output_paths=None):\n output_paths = output_paths or set()\n\n import cwltool.factory\n from cwltool import workflow\n from cwltool.context import LoadingContext, RuntimeContext\n from cwltool.utils import visit_class\n\n def construct_tool_object(toolpath_object, *args, **kwargs):\n \"\"\"Fix missing locations.\"\"\"\n protocol = 'file://'\n\n def addLocation(d):\n if 'location' not in d and 'path' in d:\n d['location'] = protocol + d['path']\n\n visit_class(toolpath_object, ('File', 'Directory'), addLocation)\n return workflow.default_make_tool(toolpath_object, *args, **kwargs)\n\n argv = sys.argv\n sys.argv = ['cwltool']\n\n # Keep all environment variables.\n runtime_context = RuntimeContext(\n kwargs={\n 'rm_tmpdir': False,\n 'move_outputs': 'leave',\n 'preserve_entire_environment': True,\n }\n )\n loading_context = LoadingContext(\n kwargs={\n 'construct_tool_object': construct_tool_object,\n }\n )\n\n factory = cwltool.factory.Factory(\n loading_context=loading_context,\n runtime_context=runtime_context,\n )\n process = factory.make(os.path.relpath(str(output_file)))\n outputs = process()\n\n sys.argv = argv\n\n # Move outputs to correct location in the repository.\n output_dirs = process.factory.executor.output_dirs\n\n def remove_prefix(location, prefix='file://'):\n if location.startswith(prefix):\n return location[len(prefix):]\n return location\n\n locations = {\n remove_prefix(output['location'])\n for output in outputs.values()\n }\n\n with progressbar(\n locations,\n label='Moving outputs',\n ) as bar:\n for location in bar:\n for output_dir in output_dirs:\n if location.startswith(output_dir):\n output_path = location[len(output_dir):].lstrip(\n os.path.sep\n )\n destination = client.path / output_path\n if destination.is_dir():\n shutil.rmtree(str(destination))\n destination = destination.parent\n shutil.move(location, str(destination))\n continue\n\n unchanged_paths = client.remove_unmodified(output_paths)\n if unchanged_paths:\n click.echo(\n 'Unchanged files:\\n\\n\\t{0}'.format(\n '\\n\\t'.join(\n click.style(path, fg='yellow') for path in unchanged_paths\n )\n )\n )", "response": "Execute the cwltool workflow using cwltool library."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef pull(client, revision, auto_login):\n registry_url = detect_registry_url(client, auto_login=auto_login)\n\n repo = client.repo\n sha = repo.rev_parse(revision).hexsha\n short_sha = repo.git.rev_parse(sha, short=7)\n\n image = '{registry}:{short_sha}'.format(\n registry=registry_url.image, short_sha=short_sha\n )\n\n result = subprocess.run(['docker', 'image', 'pull', image])\n if result.returncode != 0:\n raise click.ClickException(\n 'The image \"{image}\" was not pulled.\\n\\n'\n 'Push the repository to the server or build the image manually:\\n'\n '\\n\\tdocker build -t {image} .'.format(image=image)\n )", "response": "Pull an existing image from the project registry."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nformatting dataset files with a tabular output.", "response": "def tabular(client, records):\n \"\"\"Format dataset files with a tabular output.\n\n :param client: LocalClient instance.\n :param records: Filtered collection.\n \"\"\"\n from renku.models._tabulate import tabulate\n\n echo_via_pager(\n tabulate(\n records,\n headers=OrderedDict((\n ('added', None),\n ('authors_csv', 'authors'),\n ('dataset', None),\n ('full_path', 'path'),\n )),\n )\n )"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef jsonld(client, records):\n from renku.models._json import dumps\n from renku.models._jsonld import asjsonld\n\n data = [asjsonld(record) for record in records]\n echo_via_pager(dumps(data, indent=2))", "response": "Format dataset files as JSON - LD."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nhandle exceptions using Sentry.", "response": "def _handle_sentry(self):\n \"\"\"Handle exceptions using Sentry.\"\"\"\n from sentry_sdk import capture_exception, configure_scope\n from sentry_sdk.utils import capture_internal_exceptions\n\n with configure_scope() as scope:\n with capture_internal_exceptions():\n from git import Repo\n from renku.cli._git import get_git_home\n from renku.models.datasets import Author\n\n user = Author.from_git(Repo(get_git_home()))\n\n scope.user = {'name': user.name, 'email': user.email}\n\n event_id = capture_exception()\n click.echo(\n _BUG + 'Recorded in Sentry with ID: {0}\\n'.format(event_id),\n err=True,\n )\n raise"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _handle_github(self):\n value = click.prompt(\n _BUG + click.style(\n '1. Open an issue by typing \"open\";\\n',\n fg='green',\n ) + click.style(\n '2. Print human-readable information by typing '\n '\"print\";\\n',\n fg='yellow',\n ) + click.style(\n '3. See the full traceback without submitting details '\n '(default: \"ignore\").\\n\\n',\n fg='red',\n ) + 'Please select an action by typing its name',\n type=click.Choice([\n 'open',\n 'print',\n 'ignore',\n ], ),\n default='ignore',\n )\n getattr(self, '_process_' + value)()", "response": "Handle exception and submit it as GitHub issue."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _format_issue_url(self):\n query = urlencode({\n 'title': self._format_issue_title(),\n 'body': self._format_issue_body(),\n })\n return self.REPO_URL + self.ISSUE_SUFFIX + '?' + query", "response": "Format full issue URL."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _process_open(self):\n click.launch(self._format_issue_url())\n if not click.confirm('Did it work?', default=True):\n click.echo()\n self._process_print()\n click.secho(\n '\\nOpen the line manually and copy the text above\\n',\n fg='yellow'\n )\n click.secho(\n ' ' + self.REPO_URL + self.ISSUE_SUFFIX + '\\n', bold=True\n )", "response": "Open link in a browser."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\npasses client from the current context to the decorated command.", "response": "def pass_local_client(\n method=None,\n clean=None,\n up_to_date=None,\n commit=None,\n ignore_std_streams=True,\n lock=None,\n):\n \"\"\"Pass client from the current context to the decorated command.\"\"\"\n if method is None:\n return functools.partial(\n pass_local_client,\n clean=clean,\n up_to_date=up_to_date,\n commit=commit,\n ignore_std_streams=ignore_std_streams,\n lock=lock,\n )\n\n def new_func(*args, **kwargs):\n ctx = click.get_current_context()\n client = ctx.ensure_object(LocalClient)\n stack = contextlib.ExitStack()\n\n # Handle --isolation option:\n if get_git_isolation():\n client = stack.enter_context(client.worktree())\n\n transaction = client.transaction(\n clean=clean,\n up_to_date=up_to_date,\n commit=commit,\n ignore_std_streams=ignore_std_streams\n )\n stack.enter_context(transaction)\n\n if lock or (lock is None and commit):\n stack.enter_context(client.lock)\n\n with stack:\n result = ctx.invoke(method, client, *args, **kwargs)\n return result\n\n return functools.update_wrapper(new_func, method)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _mapped_std_streams(lookup_paths, streams=('stdin', 'stdout', 'stderr')):\n # FIXME add device number too\n standard_inos = {}\n for stream in streams:\n try:\n stream_stat = os.fstat(getattr(sys, stream).fileno())\n key = stream_stat.st_dev, stream_stat.st_ino\n standard_inos[key] = stream\n except Exception: # FIXME UnsupportedOperation\n pass\n # FIXME if not getattr(sys, stream).istty()\n\n def stream_inos(paths):\n \"\"\"Yield tuples with stats and path.\"\"\"\n for path in paths:\n try:\n stat = os.stat(path)\n key = (stat.st_dev, stat.st_ino)\n if key in standard_inos:\n yield standard_inos[key], path\n except FileNotFoundError: # pragma: no cover\n pass\n\n return dict(stream_inos(lookup_paths)) if standard_inos else {}", "response": "Get a mapping of standard streams to given paths."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _clean_streams(repo, mapped_streams):\n for stream_name in ('stdout', 'stderr'):\n stream = mapped_streams.get(stream_name)\n if not stream:\n continue\n\n path = os.path.relpath(stream, start=repo.working_dir)\n if (path, 0) not in repo.index.entries:\n os.remove(stream)\n else:\n blob = repo.index.entries[(path, 0)].to_blob(repo)\n with open(path, 'wb') as fp:\n fp.write(blob.data_stream.read())", "response": "Clean mapped standard streams."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _expand_directories(paths):\n for path in paths:\n path_ = Path(path)\n if path_.is_dir():\n for expanded in path_.rglob('*'):\n yield str(expanded)\n else:\n yield path", "response": "Expand directory with all files it contains."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create(client, name):\n from renku.models.datasets import Author\n\n with client.with_dataset(name=name) as dataset:\n click.echo('Creating a dataset ... ', nl=False)\n author = Author.from_git(client.repo)\n if author not in dataset.authors:\n dataset.authors.append(author)\n\n click.secho('OK', fg='green')", "response": "Create an empty dataset in the current repo."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nadds data to a dataset.", "response": "def add(client, name, urls, link, relative_to, target, force):\n \"\"\"Add data to a dataset.\"\"\"\n try:\n with client.with_dataset(name=name) as dataset:\n target = target if target else None\n with progressbar(urls, label='Adding data to dataset') as bar:\n for url in bar:\n client.add_data_to_dataset(\n dataset,\n url,\n link=link,\n target=target,\n relative_to=relative_to,\n force=force,\n )\n except FileNotFoundError:\n raise BadParameter('Could not process {0}'.format(url))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nlist files in dataset.", "response": "def ls_files(client, names, authors, include, exclude, format):\n \"\"\"List files in dataset.\"\"\"\n records = _filter(\n client, names=names, authors=authors, include=include, exclude=exclude\n )\n\n DATASET_FILES_FORMATS[format](client, records)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef unlink(client, name, include, exclude, yes):\n dataset = client.load_dataset(name=name)\n records = _filter(\n client, names=[dataset.name], include=include, exclude=exclude\n )\n\n if not yes and records:\n prompt_text = (\n 'You are about to remove '\n 'following from \"{0}\" dataset.\\n'.format(dataset.name) +\n '\\n'.join([str(record.full_path)\n for record in records]) + '\\nDo you wish to continue?'\n )\n click.confirm(WARNING + prompt_text, abort=True)\n\n if records:\n for item in records:\n dataset.unlink_file(item.path)\n\n dataset.to_yaml()\n click.secho('OK', fg='green')", "response": "Remove matching files from a dataset."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nchecking if file matches one of include filters and not in exclude filters.", "response": "def _include_exclude(file_path, include=None, exclude=None):\n \"\"\"Check if file matches one of include filters and not in exclude filter.\n\n :param file_path: Path to the file.\n :param include: Tuple containing patterns to which include from result.\n :param exclude: Tuple containing patterns to which exclude from result.\n \"\"\"\n if exclude is not None and exclude:\n for pattern in exclude:\n if file_path.match(pattern):\n return False\n\n if include is not None and include:\n for pattern in include:\n if file_path.match(pattern):\n return True\n return False\n\n return True"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _filter(client, names=None, authors=None, include=None, exclude=None):\n if isinstance(authors, str):\n authors = set(authors.split(','))\n\n if isinstance(authors, list) or isinstance(authors, tuple):\n authors = set(authors)\n\n records = []\n for path_, dataset in client.datasets.items():\n if not names or dataset.name in names:\n for file_ in dataset.files.values():\n file_.dataset = dataset.name\n\n path_ = file_.full_path.relative_to(client.path)\n match = _include_exclude(path_, include, exclude)\n\n if authors:\n match = match and authors.issubset({\n author.name\n for author in file_.authors\n })\n\n if match:\n records.append(file_)\n\n return sorted(records, key=lambda file_: file_.added)", "response": "Filter dataset files by specified filters."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn a tuple with config section and key.", "response": "def _split_section_and_key(key):\n \"\"\"Return a tuple with config section and key.\"\"\"\n parts = key.split('.')\n if len(parts) > 1:\n return 'renku \"{0}\"'.format(parts[0]), '.'.join(parts[1:])\n return 'renku', key"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef config(client, key, value):\n if value is None:\n cfg = client.repo.config_reader()\n click.echo(cfg.get_value(*_split_section_and_key(key)))\n else:\n with client.repo.config_writer() as cfg:\n section, config_key = _split_section_and_key(key)\n cfg.set_value(section, config_key, value)\n click.echo(value)", "response": "Get and set Renku repository and global options."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nchecks if a url points to a git repository.", "response": "def check_for_git_repo(url):\n \"\"\"Check if a url points to a git repository.\"\"\"\n u = parse.urlparse(url)\n is_git = False\n\n if os.path.splitext(u.path)[1] == '.git':\n is_git = True\n elif u.scheme in ('', 'file'):\n from git import InvalidGitRepositoryError, Repo\n\n try:\n Repo(u.path, search_parent_directories=True)\n is_git = True\n except InvalidGitRepositoryError:\n is_git = False\n return is_git"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nprinting RENKU environment variables.", "response": "def env(config, endpoint):\n \"\"\"Print RENKU environment variables.\n\n Run this command to configure your Renku client:\n\n $ eval \"$(renku env)\"\n\n \"\"\"\n access_token = config['endpoints'][endpoint]['token']['access_token']\n click.echo('export {0}={1}'.format('RENKU_ENDPOINT', endpoint))\n click.echo('export {0}={1}'.format('RENKU_ACCESS_TOKEN', access_token))\n click.echo('# Run this command to configure your Renku client:')\n click.echo('# eval \"$(renku env)\"')"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncheck if the current version of renku on PyPI is the latest version of renku on PyPI.", "response": "def check_version(ctx, param, value):\n \"\"\"Check for latest version of renku on PyPI.\"\"\"\n if ctx.resilient_parsing:\n return\n\n if not value and ctx.invoked_subcommand != 'run':\n ctx.call_on_close(_check_version)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nloads services configuration from oslo config object. It reads ServiceManager and Service configuration options from an oslo_config.ConfigOpts() object. Also It registers a ServiceManager hook to reload the configuration file on reload in the master process and in all children. And then when each child start or reload, the configuration options are logged if the oslo config option 'log_options' is True. On children, the configuration file is reloaded before the running the application reload method. Options currently supported on ServiceManager and Service: * graceful_shutdown_timeout :param service_manager: ServiceManager instance :type service_manager: cotyledon.ServiceManager :param conf: Oslo Config object :type conf: oslo_config.ConfigOpts() :param reload_method: reload or mutate the config files :type reload_method: str \"reload/mutate\"", "response": "def setup(service_manager, conf, reload_method=\"reload\"):\n \"\"\"Load services configuration from oslo config object.\n\n It reads ServiceManager and Service configuration options from an\n oslo_config.ConfigOpts() object. Also It registers a ServiceManager hook to\n reload the configuration file on reload in the master process and in all\n children. And then when each child start or reload, the configuration\n options are logged if the oslo config option 'log_options' is True.\n\n On children, the configuration file is reloaded before the running the\n application reload method.\n\n Options currently supported on ServiceManager and Service:\n * graceful_shutdown_timeout\n\n :param service_manager: ServiceManager instance\n :type service_manager: cotyledon.ServiceManager\n :param conf: Oslo Config object\n :type conf: oslo_config.ConfigOpts()\n :param reload_method: reload or mutate the config files\n :type reload_method: str \"reload/mutate\"\n \"\"\"\n conf.register_opts(service_opts)\n\n # Set cotyledon options from oslo config options\n _load_service_manager_options(service_manager, conf)\n\n def _service_manager_reload():\n _configfile_reload(conf, reload_method)\n _load_service_manager_options(service_manager, conf)\n\n if os.name != \"posix\":\n # NOTE(sileht): reloading can't be supported oslo.config is not pickle\n # But we don't care SIGHUP is not support on window\n return\n\n service_manager.register_hooks(\n on_new_worker=functools.partial(\n _new_worker_hook, conf, reload_method),\n on_reload=_service_manager_reload)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nregisters hooks on the master process.", "response": "def register_hooks(self, on_terminate=None, on_reload=None,\n on_new_worker=None, on_dead_worker=None):\n \"\"\"Register hook methods\n\n This can be callable multiple times to add more hooks, hooks are\n executed in added order. If a hook raised an exception, next hooks\n will be not executed.\n\n :param on_terminate: method called on SIGTERM\n :type on_terminate: callable()\n :param on_reload: method called on SIGHUP\n :type on_reload: callable()\n :param on_new_worker: method called in the child process when this one\n is ready\n :type on_new_worker: callable(service_id, worker_id, service_obj)\n :param on_new_worker: method called when a child died\n :type on_new_worker: callable(service_id, worker_id, exit_code)\n\n If window support is planned, hooks callable must support\n to be pickle.pickle(). See CPython multiprocessing module documentation\n for more detail.\n \"\"\"\n\n if on_terminate is not None:\n _utils.check_callable(on_terminate, 'on_terminate')\n self._hooks['terminate'].append(on_terminate)\n if on_reload is not None:\n _utils.check_callable(on_reload, 'on_reload')\n self._hooks['reload'].append(on_reload)\n if on_new_worker is not None:\n _utils.check_callable(on_new_worker, 'on_new_worker')\n self._hooks['new_worker'].append(on_new_worker)\n if on_dead_worker is not None:\n _utils.check_callable(on_dead_worker, 'on_dead_worker')\n self._hooks['dead_worker'].append(on_dead_worker)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nadd a new service to the ServiceManager.", "response": "def add(self, service, workers=1, args=None, kwargs=None):\n \"\"\"Add a new service to the ServiceManager\n\n :param service: callable that return an instance of :py:class:`Service`\n :type service: callable\n :param workers: number of processes/workers for this service\n :type workers: int\n :param args: additional positional arguments for this service\n :type args: tuple\n :param kwargs: additional keywoard arguments for this service\n :type kwargs: dict\n\n :return: a service id\n :rtype: uuid.uuid4\n \"\"\"\n _utils.check_callable(service, 'service')\n _utils.check_workers(workers, 1)\n service_id = uuid.uuid4()\n self._services[service_id] = _service.ServiceConfig(\n service_id, service, workers, args, kwargs)\n return service_id"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreconfiguring a service in ServiceManager with the given number of workers.", "response": "def reconfigure(self, service_id, workers):\n \"\"\"Reconfigure a service registered in ServiceManager\n\n :param service_id: the service id\n :type service_id: uuid.uuid4\n :param workers: number of processes/workers for this service\n :type workers: int\n :raises: ValueError\n \"\"\"\n try:\n sc = self._services[service_id]\n except KeyError:\n raise ValueError(\"%s service id doesn't exists\" % service_id)\n else:\n _utils.check_workers(workers, minimum=(1 - sc.workers))\n sc.workers = workers\n # Reset forktimes to respawn services quickly\n self._forktimes = []"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nstarts and supervise services workers", "response": "def run(self):\n \"\"\"Start and supervise services workers\n\n This method will start and supervise all children processes\n until the master process asked to shutdown by a SIGTERM.\n\n All spawned processes are part of the same unix process group.\n \"\"\"\n\n self._systemd_notify_once()\n self._child_supervisor = _utils.spawn(self._child_supervisor_thread)\n self._wait_forever()"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _reload(self):\n self._run_hooks('reload')\n\n # Reset forktimes to respawn services quickly\n self._forktimes = []\n signal.signal(signal.SIGHUP, signal.SIG_IGN)\n os.killpg(0, signal.SIGHUP)\n signal.signal(signal.SIGHUP, self._signal_catcher)", "response": "reload all children\n posix only"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the last died worker information or None", "response": "def _get_last_worker_died(self):\n \"\"\"Return the last died worker information or None\"\"\"\n for service_id in list(self._running_services.keys()):\n # We copy the list to clean the orignal one\n processes = list(self._running_services[service_id].items())\n for process, worker_id in processes:\n if not process.is_alive():\n self._run_hooks('dead_worker', service_id, worker_id,\n process.exitcode)\n if process.exitcode < 0:\n sig = _utils.signal_to_name(process.exitcode)\n LOG.info('Child %(pid)d killed by signal %(sig)s',\n dict(pid=process.pid, sig=sig))\n else:\n LOG.info('Child %(pid)d exited with status %(code)d',\n dict(pid=process.pid, code=process.exitcode))\n del self._running_services[service_id][process]\n return service_id, worker_id"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsends notification once to Systemd that the service is ready.", "response": "def _systemd_notify_once():\n \"\"\"Send notification once to Systemd that service is ready.\n\n Systemd sets NOTIFY_SOCKET environment variable with the name of the\n socket listening for notifications from services.\n This method removes the NOTIFY_SOCKET environment variable to ensure\n notification is sent only once.\n \"\"\"\n\n notify_socket = os.getenv('NOTIFY_SOCKET')\n if notify_socket:\n if notify_socket.startswith('@'):\n # abstract namespace socket\n notify_socket = '\\0%s' % notify_socket[1:]\n sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)\n with contextlib.closing(sock):\n try:\n sock.connect(notify_socket)\n sock.sendall(b'READY=1')\n del os.environ['NOTIFY_SOCKET']\n except EnvironmentError:\n LOG.debug(\"Systemd notification failed\", exc_info=True)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef returns(returntype):\n returntype = T.TypeFactory(returntype)\n def _decorator(func):\n # @returns decorator\n if U.has_fun_prop(func, \"returntype\"):\n raise ValueError(\"Cannot set return type twice\")\n U.set_fun_prop(func, \"returntype\", returntype)\n return _wrap(func)\n return _decorator", "response": "A function decorator to specify return type of the function."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef paranoidconfig(**kwargs):\n def _decorator(func):\n for k,v in kwargs.items():\n Settings._set(k, v, function=func)\n return _wrap(func)\n return _decorator", "response": "A function decorator to set a local setting."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nensuring v is a valid Type.", "response": "def TypeFactory(v):\n \"\"\"Ensure `v` is a valid Type.\n\n This function is used to convert user-specified types into\n internal types for the verification engine. It allows Type\n subclasses, Type subclass instances, Python type, and user-defined\n classes to be passed. Returns an instance of the type of `v`.\n\n Users should never access this function directly.\n \"\"\"\n\n if v is None:\n return Nothing()\n elif issubclass(type(v), Type):\n return v\n elif issubclass(v, Type):\n return v()\n elif issubclass(type(v), type):\n return Generic(v)\n else:\n raise InvalidTypeError(\"Invalid type %s\" % v)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef profile_v3_to_proofs(profile, fqdn, refresh=False, address = None):\n\n proofs = []\n\n try:\n test = profile.items()\n except:\n return proofs\n\n if 'account' in profile:\n accounts = profile['account']\n else:\n return proofs\n\n for account in accounts:\n\n # skip if proof service is not supported\n if 'service' in account and account['service'].lower() not in SITES:\n continue\n\n if 'proofType' in account and account['proofType'] == \"http\":\n\n try:\n proof = {\"service\": account['service'],\n \"proof_url\": account['proofUrl'],\n \"identifier\": account['identifier'],\n \"valid\": False}\n\n if is_valid_proof(account['service'], account['identifier'],\n fqdn, account['proofUrl'], address = address):\n\n proof[\"valid\"] = True\n\n proofs.append(proof)\n except Exception as e:\n pass\n\n return proofs", "response": "Convert profile format v3 to proofs"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef has_fun_prop(f, k):\n if not hasattr(f, _FUN_PROPS):\n return False\n if not isinstance(getattr(f, _FUN_PROPS), dict):\n return False\n if k not in getattr(f, _FUN_PROPS).keys():\n return False\n return True", "response": "Test whether function f has property k."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_fun_prop(f, k):\n if not has_fun_prop(f, k):\n raise InternalError(\"Function %s has no property %s\" % (str(f), k))\n return getattr(f, _FUN_PROPS)[k]", "response": "Get the value of property k from function f."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsets the value of property k to v in function f.", "response": "def set_fun_prop(f, k, v):\n \"\"\"Set the value of property `k` to be `v` in function `f`.\n\n We define properties as annotations added to a function throughout\n the process of defining a function for verification, e.g. the\n argument types. This sets function `f`'s property named `k` to be\n value `v`.\n\n Users should never access this function directly.\n \"\"\"\n if not hasattr(f, _FUN_PROPS):\n setattr(f, _FUN_PROPS, {})\n if not isinstance(getattr(f, _FUN_PROPS), dict):\n raise InternalError(\"Invalid properties dictionary for %s\" % str(f))\n getattr(f, _FUN_PROPS)[k] = v"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the name of the function f s keyword argument parameter if it exists otherwise None", "response": "def get_func_posargs_name(f):\n \"\"\"Returns the name of the function f's keyword argument parameter if it exists, otherwise None\"\"\"\n sigparams = inspect.signature(f).parameters\n for p in sigparams:\n if sigparams[p].kind == inspect.Parameter.VAR_POSITIONAL:\n return sigparams[p].name\n return None"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning the name of the function f s keyword argument parameter if it exists otherwise None", "response": "def get_func_kwargs_name(f):\n \"\"\"Returns the name of the function f's keyword argument parameter if it exists, otherwise None\"\"\"\n sigparams = inspect.signature(f).parameters\n for p in sigparams:\n if sigparams[p].kind == inspect.Parameter.VAR_KEYWORD:\n return sigparams[p].name\n return None"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef set(**kwargs):\n for k,v in kwargs.items():\n Settings._set(k, v)", "response": "Set configuration parameters. \n\n Pass keyword arguments for the parameters you would like to set."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _set(name, value, function=None):\n\n if name not in Settings.__global_setting_values.keys():\n raise NameError(\"Invalid setting value\")\n if name in Settings.__validate_settings.keys():\n if not Settings.__validate_settings[name](value):\n raise ValueError(\"Invalid setting: %s = %s\" %\n (name, value))\n # Set the setting either globally (if no function is passed)\n # or else locally to the function (if a function is passed).\n if function:\n if not hasattr(function, Settings.FUNCTION_SETTINGS_NAME):\n setattr(function, Settings.FUNCTION_SETTINGS_NAME, {})\n # Test if this wraps something. TODO this will fail\n # for nested decorators. This also assumes that, if\n # there is a wrapped function (super wraps sub), that\n # if super doesn't have settings, then sup doesn't\n # either. (This assumption is valid for paranoid\n # decorators since it properly uses update_wrapper,\n # but may not be valid for other decorators.)\n if hasattr(function, \"__wrapped__\"):\n setattr(function.__wrapped__,\n Settings.FUNCTION_SETTINGS_NAME,\n getattr(function, Settings.FUNCTION_SETTINGS_NAME))\n getattr(function, Settings.FUNCTION_SETTINGS_NAME)[name] = value\n else:\n Settings.__global_setting_values[name] = value", "response": "Internal method to set a value for a specific config parameter."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngets a setting. a name should be the name of the setting to look for.", "response": "def get(name, function=None):\n \"\"\"Get a setting.\n\n `name` should be the name of the setting to look for. If the\n optional argument `function` is passed, this will look for a\n value local to the function before retrieving the global\n value.\n \"\"\"\n if function is not None:\n if hasattr(function, Settings.FUNCTION_SETTINGS_NAME):\n if name in getattr(function, Settings.FUNCTION_SETTINGS_NAME):\n return getattr(function, Settings.FUNCTION_SETTINGS_NAME)[name]\n return Settings.__global_setting_values[name]"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef timeit(method):\n\n def timed(*arguments, **kw):\n ts = time.time()\n result = method(*arguments, **kw)\n te = time.time()\n\n sys.stdout.write('Time: %r %2.2f sec\\n' % (method.__name__.strip(\"_\"), te - ts))\n sys.stdout.write('------------------------------------\\n')\n sys.stdout.flush()\n return result\n\n return timed", "response": "Decorator to compute the execution time of a function\n "} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreads the undirected network file and sets self. g", "response": "def __read_graph(self, network_filename):\n \"\"\"\n Read .ncol network file\n\n :param network_filename: complete path for the .ncol file\n :return: an undirected network\n \"\"\"\n self.g = nx.read_edgelist(network_filename, nodetype=int)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef execute(self):\n\n for n in self.g.nodes():\n self.g.node[n]['communities'] = [n]\n\n all_communities = {}\n\n for ego in tqdm.tqdm(nx.nodes(self.g), ncols=35, bar_format='Exec: {l_bar}{bar}'):\n\n ego_minus_ego = nx.ego_graph(self.g, ego, 1, False)\n community_to_nodes = self.__overlapping_label_propagation(ego_minus_ego, ego)\n\n # merging phase\n for c in community_to_nodes.keys():\n if len(community_to_nodes[c]) > self.min_community_size:\n actual_community = community_to_nodes[c]\n all_communities = self.__merge_communities(all_communities, actual_community)\n\n # write output on file\n if self.file_output:\n with open(self.file_output, \"w\") as out_file_com:\n for idc, c in enumerate(all_communities.keys()):\n out_file_com.write(\"%d\\t%s\\n\" % (idc, str(sorted(c))))\n\n return list(all_communities.keys())", "response": "Execute Demon algorithm\nResourceException"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_defined_enums(conn, schema):\n sql = \"\"\"\n SELECT\n pg_catalog.format_type(t.oid, NULL),\n ARRAY(SELECT enumlabel\n FROM pg_catalog.pg_enum\n WHERE enumtypid = t.oid)\n FROM pg_catalog.pg_type t\n LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace\n WHERE\n t.typtype = 'e'\n AND n.nspname = %s\n \"\"\"\n return {r[0]: frozenset(r[1]) for r in conn.execute(sql, (schema,))}", "response": "Returns a dict mapping PostgreSQL enumeration types to their set of their\nalues defined values."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_declared_enums(metadata, schema, default):\n types = set(column.type\n for table in metadata.tables.values()\n for column in table.columns\n if (isinstance(column.type, sqlalchemy.Enum) and\n schema == (column.type.schema or default)))\n return {t.name: frozenset(t.enums) for t in types}", "response": "Returns a dict mapping SQLAlchemy enumeration types to the set of their\n declared values."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef compare_enums(autogen_context, upgrade_ops, schema_names):\n to_add = set()\n for schema in schema_names:\n default = autogen_context.dialect.default_schema_name\n if schema is None:\n schema = default\n\n defined = get_defined_enums(autogen_context.connection, schema)\n declared = get_declared_enums(autogen_context.metadata, schema, default)\n for name, new_values in declared.items():\n old_values = defined.get(name)\n # Alembic will handle creation of the type in this migration, so\n # skip undefined names.\n if name in defined and new_values.difference(old_values):\n to_add.add((schema, name, old_values, new_values))\n\n for schema, name, old_values, new_values in sorted(to_add):\n op = SyncEnumValuesOp(schema, name, old_values, new_values)\n upgrade_ops.ops.append(op)", "response": "Compare the declared and declared Enums with the schema_names and generate SyncEnumValuesOp migrations."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef delete(self, key, **opts):\n key, store = self._expand_opts(key, opts)\n try:\n del store[key]\n except KeyError:\n pass", "response": "Remove a key from the cache."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef expire_at(self, key, expiry, **opts):\n key, store = self._expand_opts(key, opts)\n data = store.get(key)\n if data is not None:\n data = list(data)\n data[EXPIRY_INDEX] = expiry\n store[key] = tuple(data)\n else:\n raise KeyError(key)", "response": "Set the explicit unix expiry time of a key."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef expire(self, key, max_age, **opts):\n self.expire_at(key, time() + max_age, **opts)", "response": "Set the maximum age of a given key in seconds."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets the time - to - live of a given key ; None if not set.", "response": "def ttl(self, key, **opts):\n \"\"\"Get the time-to-live of a given key; None if not set.\"\"\"\n key, store = self._expand_opts(key, opts)\n if hasattr(store, 'ttl'):\n return store.ttl(key)\n data = store.get(key)\n if data is None:\n return None\n expiry = data[EXPIRY_INDEX]\n if expiry is not None:\n return max(0, expiry - time()) or None"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef exists(self, key, **opts):\n key, store = self._expand_opts(key, opts)\n data = store.get(key)\n # Note that we do not actually delete the thing here as the max_age\n # just for this call may have triggered a False.\n if not data or self._has_expired(data, opts):\n return False\n return True", "response": "Return if a key exists in the cache."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _destroy(self):\n self.unuse_region()\n\n if self._rlist is not None:\n # Actual client count, which doesn't include the reference kept by the manager, nor ours\n # as we are about to be deleted\n try:\n if len(self._rlist) == 0:\n # Free all resources associated with the mapped file\n self._manager._fdict.pop(self._rlist.path_or_fd())\n # END remove regions list from manager\n except (TypeError, KeyError):\n # sometimes, during shutdown, getrefcount is None. Its possible\n # to re-import it, however, its probably better to just ignore\n # this python problem (for now).\n # The next step is to get rid of the error prone getrefcount alltogether.\n pass", "response": "Destruction code to decrement counters"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _copy_from(self, rhs):\n self._manager = rhs._manager\n self._rlist = type(rhs._rlist)(rhs._rlist)\n self._region = rhs._region\n self._ofs = rhs._ofs\n self._size = rhs._size\n\n for region in self._rlist:\n region.increment_client_count()\n\n if self._region is not None:\n self._region.increment_client_count()", "response": "Copy all data from rhs into this instance handles usage count"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef use_region(self, offset=0, size=0, flags=0):\n need_region = True\n man = self._manager\n fsize = self._rlist.file_size()\n size = min(size or fsize, man.window_size() or fsize) # clamp size to window size\n\n if self._region is not None:\n if self._region.includes_ofs(offset):\n need_region = False\n else:\n self.unuse_region()\n # END handle existing region\n # END check existing region\n\n # offset too large ?\n if offset >= fsize:\n return self\n # END handle offset\n\n if need_region:\n self._region = man._obtain_region(self._rlist, offset, size, flags, False)\n self._region.increment_client_count()\n # END need region handling\n\n self._ofs = offset - self._region._b\n self._size = min(size, self._region.ofs_end() - offset)\n\n return self", "response": "Assure we point to a valid region for this file."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns a buffer object which allows access to our memory region from our offset to the window size.", "response": "def buffer(self):\n \"\"\"Return a buffer object which allows access to our memory region from our offset\n to the window size. Please note that it might be smaller than you requested when calling use_region()\n\n **Note:** You can only obtain a buffer if this instance is_valid() !\n\n **Note:** buffers should not be cached passed the duration of your access as it will\n prevent resources from being freed even though they might not be accounted for anymore !\"\"\"\n return buffer(self._region.buffer(), self._ofs, self._size)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef includes_ofs(self, ofs):\n # unroll methods\n return (self._region._b + self._ofs) <= ofs < (self._region._b + self._ofs + self._size)", "response": "Returns True if the given absolute offset is contained in the cursors\n current region\n "} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef path(self):\n if isinstance(self._rlist.path_or_fd(), int):\n raise ValueError(\"Path queried although mapping was applied to a file descriptor\")\n # END handle type\n return self._rlist.path_or_fd()", "response": "Returns the path of the underlying mapped file."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the file descriptor used to create the underlying mapping.", "response": "def fd(self):\n \"\"\":return: file descriptor used to create the underlying mapping.\n\n **Note:** it is not required to be valid anymore\n :raise ValueError: if the mapping was not created by a file descriptor\"\"\"\n if isinstance(self._rlist.path_or_fd(), string_types()):\n raise ValueError(\"File descriptor queried although mapping was generated from path\")\n # END handle type\n return self._rlist.path_or_fd()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _collect_lru_region(self, size):\n num_found = 0\n while (size == 0) or (self._memory_size + size > self._max_memory_size):\n lru_region = None\n lru_list = None\n for regions in self._fdict.values():\n for region in regions:\n # check client count - if it's 1, it's just us\n if (region.client_count() == 1 and\n (lru_region is None or region._uc < lru_region._uc)):\n lru_region = region\n lru_list = regions\n # END update lru_region\n # END for each region\n # END for each regions list\n\n if lru_region is None:\n break\n # END handle region not found\n\n num_found += 1\n del(lru_list[lru_list.index(lru_region)])\n lru_region.increment_client_count(-1)\n self._memory_size -= lru_region.size()\n self._handle_count -= 1\n # END while there is more memory to free\n return num_found", "response": "Collect the least - recently used region and remove the oldest one."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _obtain_region(self, a, offset, size, flags, is_recursive):\n if self._memory_size + size > self._max_memory_size:\n self._collect_lru_region(size)\n # END handle collection\n\n r = None\n if a:\n assert len(a) == 1\n r = a[0]\n else:\n try:\n r = self.MapRegionCls(a.path_or_fd(), 0, sys.maxsize, flags)\n except Exception:\n # apparently we are out of system resources or hit a limit\n # As many more operations are likely to fail in that condition (\n # like reading a file from disk, etc) we free up as much as possible\n # As this invalidates our insert position, we have to recurse here\n if is_recursive:\n # we already tried this, and still have no success in obtaining\n # a mapping. This is an exception, so we propagate it\n raise\n # END handle existing recursion\n self._collect_lru_region(0)\n return self._obtain_region(a, offset, size, flags, True)\n # END handle exceptions\n\n self._handle_count += 1\n self._memory_size += r.size()\n a.append(r)\n # END handle array\n\n assert r.includes_ofs(offset)\n return r", "response": "Helper method to obtain a new region from a list of regions."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncreate a new cursor pointing to the given path or file descriptor.", "response": "def make_cursor(self, path_or_fd):\n \"\"\"\n :return: a cursor pointing to the given path or file descriptor.\n It can be used to map new regions of the file into memory\n\n **Note:** if a file descriptor is given, it is assumed to be open and valid,\n but may be closed afterwards. To refer to the same file, you may reuse\n your existing file descriptor, but keep in mind that new windows can only\n be mapped as long as it stays valid. This is why the using actual file paths\n are preferred unless you plan to keep the file descriptor open.\n\n **Note:** file descriptors are problematic as they are not necessarily unique, as two\n different files opened and closed in succession might have the same file descriptor id.\n\n **Note:** Using file descriptors directly is faster once new windows are mapped as it\n prevents the file to be opened again just for the purpose of mapping it.\"\"\"\n regions = self._fdict.get(path_or_fd)\n if regions is None:\n regions = self.MapRegionListCls(path_or_fd)\n self._fdict[path_or_fd] = regions\n # END obtain region for path\n return self.WindowCursorCls(self, regions)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\namount of opened files in the system", "response": "def num_open_files(self):\n \"\"\"Amount of opened files in the system\"\"\"\n return reduce(lambda x, y: x + y, (1 for rlist in self._fdict.values() if len(rlist) > 0), 0)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef force_map_handle_removal_win(self, base_path):\n if sys.platform != 'win32':\n return\n # END early bailout\n\n num_closed = 0\n for path, rlist in self._fdict.items():\n if path.startswith(base_path):\n for region in rlist:\n region.release()\n num_closed += 1\n # END path matches\n # END for each path\n return num_closed", "response": "This function is used to enforce that a memory map is not closed when anybody has removed the file at the given base path."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nalign the given integer number to the closest page offset.", "response": "def align_to_mmap(num, round_up):\n \"\"\"\n Align the given integer number to the closest page offset, which usually is 4096 bytes.\n\n :param round_up: if True, the next higher multiple of page size is used, otherwise\n the lower page_size will be used (i.e. if True, 1 becomes 4096, otherwise it becomes 0)\n :return: num rounded to closest page\"\"\"\n res = (num // ALLOCATIONGRANULARITY) * ALLOCATIONGRANULARITY\n if round_up and (res != num):\n res += ALLOCATIONGRANULARITY\n # END handle size\n return res"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nassures the previous window area is contained in the new one", "response": "def align(self):\n \"\"\"Assures the previous window area is contained in the new one\"\"\"\n nofs = align_to_mmap(self.ofs, 0)\n self.size += self.ofs - nofs # keep size constant\n self.ofs = nofs\n self.size = align_to_mmap(self.size, 1)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef extend_left_to(self, window, max_size):\n rofs = self.ofs - window.ofs_end()\n nsize = rofs + self.size\n rofs -= nsize - min(nsize, max_size)\n self.ofs = self.ofs - rofs\n self.size += rofs", "response": "Adjust the offset to start where the given window on our left ends if possible and don t make yourself larger than max_size."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nadjusting the size to make our window end where the right window begins and don t have a larger size than max_size", "response": "def extend_right_to(self, window, max_size):\n \"\"\"Adjust the size to make our window end where the right window begins, but don't\n get larger than max_size\"\"\"\n self.size = min(self.size + (window.ofs - self.ofs_end()), max_size)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn True if the given offset can be read in our mapped region False otherwise.", "response": "def includes_ofs(self, ofs):\n \"\"\":return: True if the given offset can be read in our mapped region\"\"\"\n return self._b <= ofs < self._b + self._size"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef increment_client_count(self, ofs = 1):\n self._uc += ofs\n assert self._uc > -1, \"Increments must match decrements, usage counter negative: %i\" % self._uc\n\n if self.client_count() == 0:\n self.release()\n return True\n else:\n return False", "response": "Increments the usage count by the given positive or negative offset."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns size of file we manager", "response": "def file_size(self):\n \"\"\":return: size of file we manager\"\"\"\n if self._file_size is None:\n if isinstance(self._path_or_fd, string_types()):\n self._file_size = os.stat(self._path_or_fd).st_size\n else:\n self._file_size = os.fstat(self._path_or_fd).st_size\n # END handle path type\n # END update file size\n return self._file_size"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncall this before the first use of this instance. The method was already called by the constructor in case sufficient information was provided. For more information no the parameters, see the __init__ method :param path: if cursor is None the existing one will be used. :return: True if the buffer can be used", "response": "def begin_access(self, cursor=None, offset=0, size=sys.maxsize, flags=0):\n \"\"\"Call this before the first use of this instance. The method was already\n called by the constructor in case sufficient information was provided.\n\n For more information no the parameters, see the __init__ method\n :param path: if cursor is None the existing one will be used.\n :return: True if the buffer can be used\"\"\"\n if cursor:\n self._c = cursor\n # END update our cursor\n\n # reuse existing cursors if possible\n if self._c is not None and self._c.is_associated():\n res = self._c.use_region(offset, size, flags).is_valid()\n if res:\n # if given size is too large or default, we computer a proper size\n # If its smaller, we assume the combination between offset and size\n # as chosen by the user is correct and use it !\n # If not, the user is in trouble.\n if size > self._c.file_size():\n size = self._c.file_size() - offset\n # END handle size\n self._size = size\n # END set size\n return res\n # END use our cursor\n return False"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn an argparse instance for this script.", "response": "def make_parser():\n \"\"\"\n Returns an argparse instance for this script.\n \"\"\"\n parser = argparse.ArgumentParser(description=\"generate HTML from crawler JSON\")\n parser.add_argument(\n \"--data-dir\", default=\"data\",\n help=u\"Directory containing JSON data from crawler [%(default)s]\"\n )\n parser.add_argument(\n \"--output-dir\", default=\"html\",\n help=u\"Directory to output the resulting HTML files [%(default)s]\"\n )\n return parser"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef main():\n parser = make_parser()\n args = parser.parse_args()\n data_dir = Path(args.data_dir).expand()\n if not data_dir.isdir(): # pylint: disable=no-value-for-parameter\n msg = u\"Data directory {dir} does not exist\".format(dir=args.data_dir)\n raise ValueError(msg)\n data_filenames = [name for name in data_dir.files() # pylint: disable=no-value-for-parameter\n if name.endswith(\".json\")]\n if not data_filenames:\n msg = u\"Data directory {dir} contains no JSON files\".format(dir=args.data_dir)\n raise ValueError(msg)\n output_dir = Path(args.output_dir).expand()\n output_dir.makedirs_p() # pylint: disable=no-value-for-parameter\n\n return render_html(data_dir, output_dir)", "response": "Validate script arguments and calls render_html function with them."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef wcag_refs(code):\n bits = code.split(\".\")\n for bit in reversed(bits):\n if WCAG_REFS_RE.match(bit):\n return bit.split(\",\")\n return []", "response": "Given a pa11y code return a list of the WCAG references."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef render_template(env, html_path, template_filename, context):\n template = env.get_template(template_filename)\n rendered_html = template.render(**context) # pylint: disable=no-member\n html_path.write_text(rendered_html, encoding='utf-8')", "response": "Render a template file into the given output location."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nrendering the HTML files for the current language.", "response": "def render_html(data_dir, output_dir):\n \"\"\"\n The main workhorse of this script. Finds all the JSON data files\n from pa11ycrawler, and transforms them into HTML files via Jinja2 templating.\n \"\"\"\n env = Environment(loader=PackageLoader('pa11ycrawler', 'templates'))\n env.globals[\"wcag_refs\"] = wcag_refs\n pages = []\n counter = collections.Counter()\n grouped_violations = collections.defaultdict(dict)\n\n # render detail templates\n for data_file in data_dir.files('*.json'):\n data = json.load(data_file.open())\n num_error, num_warning, num_notice = pa11y_counts(data['pa11y'])\n\n data[\"num_error\"] = num_error\n data[\"num_warning\"] = num_warning\n data[\"num_notice\"] = num_notice\n fname = data_file.namebase + \".html\"\n html_path = output_dir / fname\n render_template(env, html_path, 'detail.html', data)\n\n data[\"filename\"] = fname\n pages.append(data)\n\n for violation in data['pa11y']:\n violation_id = hashlib.md5(\n (violation['selector'] + violation['code']).encode('utf-8')\n ).hexdigest()\n\n if violation_id not in grouped_violations[violation['type']]:\n violation['pages'] = []\n grouped_violations[violation['type']][violation_id] = violation\n counter[violation['type']] += 1\n\n grouped_violations[violation['type']][violation_id]['pages'].append({\n 'url': data['url'],\n 'page_title': data['page_title']\n })\n\n def extract_nums(page):\n \"Used to sort pages by violation counts\"\n return (\n page[\"num_error\"],\n page[\"num_warning\"],\n page[\"num_notice\"],\n )\n\n index_path = output_dir / INDEX_TEMPLATE\n render_template(env, index_path, INDEX_TEMPLATE, {\n \"pages\": sorted(pages, key=extract_nums, reverse=True),\n \"num_error\": counter[\"error\"],\n \"num_warning\": counter[\"warning\"],\n \"num_notice\": counter[\"notice\"]\n })\n\n for violation_type in grouped_violations:\n unique_path = output_dir / u'{}s.html'.format(violation_type)\n render_template(env, unique_path, UNIQUE_TEMPLATE, {\n \"grouped_violations\": sorted(\n grouped_violations[violation_type].values(),\n key=lambda item: len(item['pages']),\n reverse=True\n ),\n \"current_type\": violation_type,\n \"violation_counts\": counter\n })"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef ignore_rules_for_url(spider, url):\n ignore_rules = getattr(spider, \"pa11y_ignore_rules\", {}) or {}\n return itertools.chain.from_iterable(\n rule_list\n for url_glob, rule_list\n in ignore_rules.items()\n if fnmatch.fnmatch(url, url_glob)\n )", "response": "Returns a list of ignore rules from the given spider that are relevant to the given URL."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef ignore_rule_matches_result(ignore_rule, pa11y_result):\n return all(\n fnmatch.fnmatch(pa11y_result.get(attr), ignore_rule.get(attr))\n for attr in ignore_rule.keys()\n )", "response": "Returns a boolean result of whether the given ignore rule matches the given pa11y result."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nload output from pa11y filtering out the ignored messages.", "response": "def load_pa11y_results(stdout, spider, url):\n \"\"\"\n Load output from pa11y, filtering out the ignored messages.\n The `stdout` parameter is a bytestring, not a unicode string.\n \"\"\"\n if not stdout:\n return []\n\n results = json.loads(stdout.decode('utf8'))\n\n ignore_rules = ignore_rules_for_url(spider, url)\n for rule in ignore_rules:\n results = [\n result for result in results\n if not ignore_rule_matches_result(rule, result)\n ]\n return results"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nwriting the pa11y config into a file.", "response": "def write_pa11y_config(item):\n \"\"\"\n The only way that pa11y will see the same page that scrapy sees\n is to make sure that pa11y requests the page with the same headers.\n However, the only way to configure request headers with pa11y is to\n write them into a config file.\n\n This function will create a config file, write the config into it,\n and return a reference to that file.\n \"\"\"\n config = {\n \"page\": {\n \"headers\": item[\"request_headers\"],\n },\n }\n config_file = tempfile.NamedTemporaryFile(\n mode=\"w\",\n prefix=\"pa11y-config-\",\n suffix=\".json\",\n delete=False\n )\n json.dump(config, config_file)\n config_file.close()\n return config_file"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nchecks if the expected title matches the A11y title.", "response": "def check_title_match(expected_title, pa11y_results, logger):\n \"\"\"\n Check if Scrapy reports any issue with the HTML element.\n If so, compare that <title> element to the title that we got in the\n A11yItem. If they don't match, something is screwy, and pa11y isn't\n parsing the page that we expect.\n \"\"\"\n if not pa11y_results:\n # no output from pa11y, nothing to check.\n return\n title_errs = [err for err in pa11y_results\n if err[\"context\"].startswith(\"<title\")]\n for err in title_errs:\n title_elmt = html.fragment_fromstring(err[\"context\"])\n # pa11ycrawler will elide the title, so grab whatever true\n # content we can from the output\n elided_title = title_elmt.text.strip()\n if elided_title.endswith(\"...\"):\n pa11y_title = elided_title[0:-4].strip()\n else:\n pa11y_title = elided_title\n\n # check that they match -- the elided version should be a substring\n # of the full version\n if pa11y_title not in expected_title:\n # whoa, something's screwy!\n msg = (\n u'Parser mismatch! '\n u'Scrapy saw full title \"{scrapy_title}\", '\n u'Pa11y saw elided title \"{elided_title}\".'\n ).format(\n scrapy_title=expected_title, elided_title=elided_title,\n )\n logger.error(msg)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef track_pa11y_stats(pa11y_results, spider):\n num_err, num_warn, num_notice = pa11y_counts(pa11y_results)\n stats = spider.crawler.stats\n stats.inc_value(\"pa11y/error\", count=num_err, spider=spider)\n stats.inc_value(\"pa11y/warning\", count=num_warn, spider=spider)\n stats.inc_value(\"pa11y/notice\", count=num_notice, spider=spider)", "response": "Track the number of pa11y errors warnings and notices."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nwriting the output from pa11y into a data file.", "response": "def write_pa11y_results(item, pa11y_results, data_dir):\n \"\"\"\n Write the output from pa11y into a data file.\n \"\"\"\n data = dict(item)\n data['pa11y'] = pa11y_results\n\n # it would be nice to use the URL as the filename,\n # but that gets complicated (long URLs, special characters, etc)\n # so we'll make the filename a hash of the URL instead,\n # and throw in the access time so that we can store the same URL\n # multiple times in this data directory\n hasher = hashlib.md5()\n hasher.update(item[\"url\"].encode('utf8'))\n hasher.update(item[\"accessed_at\"].isoformat().encode('utf8'))\n basename = hasher.hexdigest()\n filename = basename + \".json\"\n filepath = data_dir / filename\n data_dir.makedirs_p()\n text = json.dumps(data, cls=DateTimeEncoder)\n filepath.write_text(text)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nprocessing a single item from the Pa11y command line tool.", "response": "def process_item(self, item, spider):\n \"\"\"\n Use the Pa11y command line tool to get an a11y report.\n \"\"\"\n config_file = write_pa11y_config(item)\n args = [\n self.pa11y_path,\n item[\"url\"],\n '--config={file}'.format(file=config_file.name),\n ]\n for flag, value in self.cli_flags.items():\n args.append(\"--{flag}={value}\".format(flag=flag, value=value))\n\n retries_remaining = 3\n while retries_remaining:\n logline = \" \".join(args)\n if retries_remaining != 3:\n logline += u\" # (retry {num})\".format(num=3-retries_remaining)\n spider.logger.info(logline)\n\n proc = sp.Popen(\n args, shell=False,\n stdout=sp.PIPE, stderr=sp.PIPE,\n )\n stdout, stderr = proc.communicate()\n if proc.returncode in (0, 2):\n # `pa11y` ran successfully!\n # Return code 0 means no a11y errors.\n # Return code 2 means `pa11y` identified a11y errors.\n # Either way, we're done, so break out of the `while` loop\n break\n else:\n # `pa11y` did _not_ run successfully!\n # We sometimes get the error \"Truffler timed out\":\n # truffler is what accesses the web page for `pa11y1`.\n # https://www.npmjs.com/package/truffler\n # If this is the error, we can resolve it just by trying again,\n # so decrement the retries_remaining and start over.\n retries_remaining -= 1\n\n if retries_remaining == 0:\n raise DropItem(\n u\"Couldn't get pa11y results for {url}. Error:\\n{err}\".format(\n url=item['url'],\n err=stderr,\n )\n )\n\n pa11y_results = load_pa11y_results(stdout, spider, item['url'])\n check_title_match(item['page_title'], pa11y_results, spider.logger)\n track_pa11y_stats(pa11y_results, spider)\n os.remove(config_file.name)\n write_pa11y_results(item, pa11y_results, Path(spider.data_dir))\n return item"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn True if the given extension is one of the watched extensions", "response": "def watched_extension(extension):\n \"\"\"Return True if the given extension is one of the watched extensions\"\"\"\n for ext in hamlpy.VALID_EXTENSIONS:\n if extension.endswith('.' + ext):\n return True\n return False"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef watch_folder():\n argv = sys.argv[1:] if len(sys.argv) > 1 else []\n args = arg_parser.parse_args(sys.argv[1:])\n compiler_args = {}\n \n input_folder = os.path.realpath(args.input_dir)\n if not args.output_dir:\n output_folder = input_folder\n else:\n output_folder = os.path.realpath(args.output_dir)\n \n if args.verbose:\n Options.VERBOSE = True\n print \"Watching {} at refresh interval {} seconds\".format(input_folder, args.refresh)\n \n if args.extension:\n Options.OUTPUT_EXT = args.extension\n \n if getattr(args, 'tags', False):\n hamlpynodes.TagNode.self_closing.update(args.tags)\n \n if args.input_extension:\n hamlpy.VALID_EXTENSIONS += args.input_extension\n \n if args.attr_wrapper:\n compiler_args['attr_wrapper'] = args.attr_wrapper\n \n if args.jinja:\n for k in ('ifchanged', 'ifequal', 'ifnotequal', 'autoescape', 'blocktrans',\n 'spaceless', 'comment', 'cache', 'localize', 'compress'):\n del hamlpynodes.TagNode.self_closing[k]\n \n hamlpynodes.TagNode.may_contain.pop(k, None)\n \n hamlpynodes.TagNode.self_closing.update({\n 'macro' : 'endmacro',\n 'call' : 'endcall',\n 'raw' : 'endraw'\n })\n \n hamlpynodes.TagNode.may_contain['for'] = 'else'\n \n while True:\n try:\n _watch_folder(input_folder, output_folder, compiler_args)\n time.sleep(args.refresh)\n except KeyboardInterrupt:\n # allow graceful exit (no stacktrace output)\n sys.exit(0)", "response": "Watches the folder and writes the new files to the output folder."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _watch_folder(folder, destination, compiler_args):\n for dirpath, dirnames, filenames in os.walk(folder):\n for filename in filenames:\n # Ignore filenames starting with \".#\" for Emacs compatibility\n if watched_extension(filename) and not filename.startswith('.#'):\n fullpath = os.path.join(dirpath, filename)\n subfolder = os.path.relpath(dirpath, folder)\n mtime = os.stat(fullpath).st_mtime\n \n # Create subfolders in target directory if they don't exist\n compiled_folder = os.path.join(destination, subfolder)\n if not os.path.exists(compiled_folder):\n os.makedirs(compiled_folder)\n \n compiled_path = _compiled_path(compiled_folder, filename)\n if (not fullpath in compiled or\n compiled[fullpath] < mtime or\n not os.path.isfile(compiled_path)):\n compile_file(fullpath, compiled_path, compiler_args)\n compiled[fullpath] = mtime", "response": "Compares modified timestamps against the compiled dict if necessary."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef is_sequence_start_page(self, url):\n return (\n len(url.path.segments) == 6 and\n url.path.segments[0] == 'courses' and\n url.path.segments[2] == 'courseware' and\n url.path.segments[5] == '1'\n )", "response": "Is this URL represent the first page in a section sequence? E. g. courseware section 1."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nprocess a single item.", "response": "def process_item(self, item, spider): # pylint: disable=unused-argument\n \"\"\"\n Stops processing item if we've already seen this URL before.\n \"\"\"\n url = self.clean_url(item[\"url\"])\n\n if self.is_sequence_start_page(url):\n url = url.parent\n\n if url in self.urls_seen:\n raise DropItem(u\"Dropping duplicate url {url}\".format(url=item[\"url\"]))\n else:\n self.urls_seen.add(url)\n return item"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef process_item(self, item, spider): # pylint: disable=unused-argument\n \"Check for DRF urls.\"\n url = URLObject(item[\"url\"])\n if url.path.startswith(\"/api/\"):\n raise DropItem(u\"Dropping DRF url {url}\".format(url=url))\n else:\n return item", "response": "Check for DRF urls."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef pa11y_counts(results):\n num_error = 0\n num_warning = 0\n num_notice = 0\n for result in results:\n if result['type'] == 'error':\n num_error += 1\n elif result['type'] == 'warning':\n num_warning += 1\n elif result['type'] == 'notice':\n num_notice += 1\n return num_error, num_warning, num_notice", "response": "Given a list of pa11y results return three integers num_error num_warning num_notice"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_csrf_token(response):\n cookie_headers = [\n h.decode('ascii') for h in response.headers.getlist(\"Set-Cookie\")\n ]\n if not cookie_headers:\n return None\n csrf_headers = [\n h for h in cookie_headers if h.startswith(\"csrftoken=\")\n ]\n if not csrf_headers:\n return None\n match = re.match(\"csrftoken=([^ ;]+);\", csrf_headers[-1])\n return match.group(1)", "response": "Extract the CSRF token out of the Set - Cookie header of a response."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nloading the pa11y ignore rules from the given file or URL.", "response": "def load_pa11y_ignore_rules(file=None, url=None): # pylint: disable=redefined-builtin\n \"\"\"\n Load the pa11y ignore rules from the given file or URL.\n \"\"\"\n if not file and not url:\n return None\n\n if file:\n file = Path(file)\n if not file.isfile():\n msg = (\n u\"pa11y_ignore_rules_file specified, but file does not exist! {file}\"\n ).format(file=file)\n raise ValueError(msg)\n return yaml.safe_load(file.text())\n\n # must be URL\n resp = requests.get(url)\n if not resp.ok:\n msg = (\n u\"pa11y_ignore_rules_url specified, but failed to fetch URL. status={status}\"\n ).format(status=resp.status_code)\n err = RuntimeError(msg)\n err.response = resp\n raise err\n return yaml.safe_load(resp.text)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nlog the error and return the ID of the object that was used to create the object.", "response": "def handle_error(self, failure):\n \"\"\"\n Provides basic error information for bad requests.\n If the error was an HttpError or DNSLookupError, it\n prints more specific information.\n \"\"\"\n self.logger.error(repr(failure))\n\n if failure.check(HttpError):\n response = failure.value.response\n self.logger.error(u'HttpError on %s', response.url)\n self.logger.error(u'HttpError Code: %s', response.status)\n if response.status in (401, 403):\n # If the error is from invalid login, tell the user\n self.logger.error(\n \"Credentials failed. Either add/update the current credentials \"\n \"or remove them to enable auto auth\"\n )\n elif failure.check(DNSLookupError):\n request = failure.request\n self.logger.error(u'DNSLookupError on %s', request.url)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nstart the spider requests.", "response": "def start_requests(self):\n \"\"\"\n Gets the spider started.\n If both `self.login_email` and `self.login_password` are set,\n this method generates a request to login with those credentials.\n Otherwise, this method generates a request to go to the \"auto auth\"\n page and get credentials from there. Either way, this method\n doesn't actually generate requests from `self.start_urls` -- that is\n handled by the `after_initial_login()` and `after_auto_auth()`\n methods.\n \"\"\"\n if self.single_url:\n port = urlparse(self.single_url).port\n if port:\n if port != self.port:\n self.port = port\n else:\n # No need for credentials\n yield scrapy.Request(\n self.single_url,\n callback=self.parse_item,\n errback=self.handle_error\n )\n return\n\n if self.login_email and self.login_password:\n login_url = (\n URLObject(\"http://\")\n .with_hostname(self.domain)\n .with_port(self.port)\n .with_path(LOGIN_HTML_PATH)\n )\n yield scrapy.Request(\n login_url,\n callback=self.after_initial_csrf,\n errback=self.handle_error\n )\n else:\n self.logger.info(\n \"email/password unset, fetching credentials via auto_auth\"\n )\n auth_url = (\n URLObject(\"http://\")\n .with_hostname(self.domain)\n .with_port(self.port)\n .with_path(AUTO_AUTH_PATH)\n .set_query_params(\n staff='true',\n course_id=self.course_key,\n )\n )\n # make sure to request a parseable JSON response\n headers = {\n b\"Accept\": b\"application/json\",\n }\n yield scrapy.Request(\n auth_url,\n headers=headers,\n callback=self.after_auto_auth,\n errback=self.handle_error\n )"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef after_initial_csrf(self, response):\n login_url = (\n URLObject(\"http://\")\n .with_hostname(self.domain)\n .with_port(self.port)\n .with_path(LOGIN_API_PATH)\n )\n credentials = {\n \"email\": self.login_email,\n \"password\": self.login_password,\n }\n headers = {\n b\"X-CSRFToken\": get_csrf_token(response),\n }\n yield scrapy.FormRequest(\n login_url,\n formdata=credentials,\n headers=headers,\n callback=self.after_initial_login,\n errback=self.handle_error\n )", "response": "This method is called by the crawler when the initial login is complete."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef after_auto_auth(self, response):\n result = json.loads(response.text)\n self.login_email = result[\"email\"]\n self.login_password = result[\"password\"]\n msg = (\n u\"Obtained credentials via auto_auth! email={email} password={password}\"\n ).format(**result)\n self.logger.info(msg)\n\n if self.single_url:\n yield scrapy.Request(\n self.single_url,\n callback=self.parse_item,\n errback=self.handle_error\n )\n else:\n for url in self.start_urls:\n yield scrapy.Request(\n url,\n callback=self.analyze_url_list,\n errback=self.handle_error\n )", "response": "This method is called when the crawler is started with auto auth. It parses the response from the crawler and saves the email and password combination."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nparse the response for the beginning url s for", "response": "def analyze_url_list(self, response):\n \"\"\"\n Parse JSON response for the beginning url(s) for\n the crawler.\n \"\"\"\n response = json.loads(response.text)\n for _, block in response['blocks'].items():\n for attribute in block:\n parsed = urlparse(block[attribute])\n # find urls in the JSON response\n if parsed.scheme and parsed.netloc:\n yield self.make_requests_from_url(block[attribute])"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef parse_item(self, response):\n # if we got redirected to a login page, then login\n if URLObject(response.url).path == LOGIN_HTML_PATH:\n reqs = self.handle_unexpected_redirect_to_login_page(response)\n for req in reqs:\n yield req\n\n title = response.xpath(\"//title/text()\").extract_first()\n if title:\n title = title.strip()\n\n # `response.request.headers` is a dictionary where the key is the\n # header name, and the value is a *list*, containing one item,\n # which is the header value. We need to get rid of this list, and just\n # have key-value pairs. (This list probably exists in case the same\n # header is sent multiple times, but that's not happening in this case,\n # and the list construct is getting in the way.)\n #\n # We also need to convert bytes to ASCII. In practice, headers can\n # only contain ASCII characters: see\n # http://stackoverflow.com/questions/5423223/how-to-send-non-english-unicode-string-using-http-header\n request_headers = {key.decode('ascii'): value[0].decode('ascii')\n for key, value\n in response.request.headers.items()}\n item = A11yItem(\n url=response.url,\n request_headers=request_headers,\n accessed_at=datetime.utcnow(),\n page_title=title,\n )\n yield item", "response": "Parse a response from a page into a list of A11yItem objects."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef handle_unexpected_redirect_to_login_page(self, response):\n next_url = URLObject(response.url).query_dict.get(\"next\")\n login_url = (\n URLObject(\"http://\")\n .with_hostname(self.domain)\n .with_port(self.port)\n .with_path(LOGIN_API_PATH)\n )\n if next_url:\n login_url = login_url.set_query_param(\"next\", next_url)\n\n credentials = {\n \"email\": self.login_email,\n \"password\": self.login_password,\n }\n headers = {\n b\"X-CSRFToken\": get_csrf_token(response),\n }\n yield scrapy.FormRequest(\n login_url,\n formdata=credentials,\n headers=headers,\n callback=self.after_login,\n errback=self.handle_error\n )", "response": "This method is called when the crawler has unexpectedly logged out. It is called by the crawler to redirect the user to the login page."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nescaping quotes with a backslash except those inside a Django tag", "response": "def _escape_attribute_quotes(self, v):\n '''\n Escapes quotes with a backslash, except those inside a Django tag\n '''\n escaped = []\n inside_tag = False\n for i, _ in enumerate(v):\n if v[i:i + 2] == '{%':\n inside_tag = True\n elif v[i:i + 2] == '%}':\n inside_tag = False\n\n if v[i] == self.attr_wrapper and not inside_tag:\n escaped.append('\\\\')\n\n escaped.append(v[i])\n\n return ''.join(escaped)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef add_child(self, child):\n '''Add child node, and copy all options to it'''\n super(RootNode, self).add_child(child)\n child.attr_wrapper = self.attr_wrapper", "response": "Add child node and copy all options to it"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _render_before(self, element):\n '''Render opening tag and inline content'''\n start = [\"%s<%s\" % (self.spaces, element.tag)]\n if element.id:\n start.append(\" id=%s\" % self.element.attr_wrap(self.replace_inline_variables(element.id)))\n if element.classes:\n start.append(\" class=%s\" % self.element.attr_wrap(self.replace_inline_variables(element.classes)))\n if element.attributes:\n start.append(' ' + self.replace_inline_variables(element.attributes))\n\n content = self._render_inline_content(self.element.inline_content)\n\n if element.nuke_inner_whitespace and content:\n content = content.strip()\n\n if element.self_close and not content:\n start.append(\" />\")\n elif content:\n start.append(\">%s\" % (content))\n elif self.children:\n start.append(\">%s\" % (self.render_newlines()))\n else:\n start.append(\">\")\n return ''.join(start)", "response": "Render opening tag and inline content"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef update_note(self, note):\n # determine whether to create a new note or update an existing one\n # Also need to add/remove key field to keep simplenote.py consistency\n if \"key\" in note:\n # Then already have a noteid we need to remove before passing to Simperium API\n noteid = note.pop(\"key\", None)\n else:\n # Adding a new note\n noteid = uuid.uuid4().hex\n\n\n # TODO: Set a ccid?\n # ccid = uuid.uuid4().hex\n if \"version\" in note:\n version = note.pop(\"version\", None)\n url = '%s/i/%s/v/%s?response=1' % (DATA_URL, noteid, version)\n else:\n url = '%s/i/%s?response=1' % (DATA_URL, noteid)\n\n # TODO: Could do with being consistent here. Everywhere else is Request(DATA_URL+params)\n note = self.__remove_simplenote_api_fields(note)\n request = Request(url, data=json.dumps(note).encode('utf-8'))\n request.add_header(self.header, self.get_token())\n request.add_header('Content-Type', 'application/json')\n\n response = \"\"\n try:\n response = urllib2.urlopen(request)\n except HTTPError as e:\n if e.code == 401:\n raise SimplenoteLoginFailed('Login to Simplenote API failed! Check Token.')\n else:\n return e, -1\n except IOError as e:\n return e, -1\n note = json.loads(response.read().decode('utf-8'))\n note = self.__add_simplenote_api_fields(note, noteid, int(response.info().get(\"X-Simperium-Version\")))\n return note, 0", "response": "Method to update a specific note object"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef add_note(self, note):\n\n if type(note) == str:\n return self.update_note({\"content\": note})\n elif (type(note) == dict) and \"content\" in note:\n return self.update_note(note)\n else:\n return \"No string or valid note.\", -1", "response": "Wrapper method to add a note to the internal cache"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef delete_note(self, note_id):\n # notes have to be trashed before deletion\n note, status = self.trash_note(note_id)\n if (status == -1):\n return note, status\n\n params = '/i/%s' % (str(note_id))\n request = Request(url=DATA_URL+params, method='DELETE')\n request.add_header(self.header, self.get_token())\n try:\n response = urllib2.urlopen(request)\n except IOError as e:\n return e, -1\n except HTTPError as e:\n if e.code == 401:\n raise SimplenoteLoginFailed('Login to Simplenote API failed! Check Token.')\n else:\n return e, -1\n return {}, 0", "response": "Method to permanently delete a note from the cache."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef authenticate_credentials(self, token):\n\n try:\n user_info = self.get_user_info(token)\n except UserInfoRetrievalFailed:\n msg = 'Failed to retrieve user info. Unable to authenticate.'\n logger.error(msg)\n raise exceptions.AuthenticationFailed(msg)\n\n user, __ = get_user_model().objects.get_or_create(username=user_info['username'], defaults=user_info)\n\n if not user.is_active:\n raise exceptions.AuthenticationFailed('User inactive or deleted.')\n\n return user, token", "response": "Validate the bearer token against the OAuth provider and return a User and access token."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_user_info(self, token):\n\n url = self.get_user_info_url()\n\n try:\n headers = {'Authorization': 'Bearer {}'.format(token)}\n response = requests.get(url, headers=headers)\n except requests.RequestException:\n logger.exception('Failed to retrieve user info due to a request exception.')\n raise UserInfoRetrievalFailed\n\n if response.status_code == 200:\n return self.process_user_info_response(response.json())\n else:\n msg = 'Failed to retrieve user info. Server [{server}] responded with status [{status}].'.format(\n server=url,\n status=response.status_code\n )\n raise UserInfoRetrievalFailed(msg)", "response": "Retrieves the user info from the OAuth provider."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef process_user_info_response(self, response):\n mapping = (\n ('username', 'preferred_username'),\n ('email', 'email'),\n ('last_name', 'family_name'),\n ('first_name', 'given_name'),\n )\n\n return {dest: response[source] for dest, source in mapping}", "response": "Process the user info response data."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nget or create an active user with the username contained in the payload.", "response": "def authenticate_credentials(self, payload):\n \"\"\"Get or create an active user with the username contained in the payload.\"\"\"\n username = payload.get('preferred_username') or payload.get('username')\n\n if username is None:\n raise exceptions.AuthenticationFailed('JWT must include a preferred_username or username claim!')\n else:\n try:\n user, __ = get_user_model().objects.get_or_create(username=username)\n attributes_updated = False\n for claim, attr in self.get_jwt_claim_attribute_map().items():\n payload_value = payload.get(claim)\n\n if getattr(user, attr) != payload_value and payload_value is not None:\n setattr(user, attr, payload_value)\n attributes_updated = True\n\n if attributes_updated:\n user.save()\n except:\n msg = 'User retrieval failed.'\n logger.exception(msg)\n raise exceptions.AuthenticationFailed(msg)\n\n return user"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning whether any class in iter_classes is a subclass of base_class.", "response": "def _includes_base_class(self, iter_classes, base_class):\n \"\"\"\n Returns whether any class in iter_class is a subclass of the given base_class.\n \"\"\"\n return any(\n issubclass(auth_class, base_class) for auth_class in iter_classes,\n )"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _add_missing_jwt_permission_classes(self, view_class):\n view_permissions = list(getattr(view_class, 'permission_classes', []))\n\n # Not all permissions are classes, some will be ConditionalPermission\n # objects from the rest_condition library. So we have to crawl all those\n # and expand them to see if our target classes are inside the\n # conditionals somewhere.\n permission_classes = []\n classes_to_add = []\n while view_permissions:\n permission = view_permissions.pop()\n if not hasattr(permission, 'perms_or_conds'):\n permission_classes.append(permission)\n else:\n for child in getattr(permission, 'perms_or_conds', []):\n view_permissions.append(child)\n\n for perm_class in self._required_permission_classes:\n if not self._includes_base_class(permission_classes, perm_class):\n log.warning(\n u\"The view %s allows Jwt Authentication but needs to include the %s permission class (adding it for you)\",\n view_class.__name__,\n perm_class.__name__,\n )\n classes_to_add.append(perm_class)\n\n if classes_to_add:\n view_class.permission_classes += tuple(classes_to_add)", "response": "Adds permissions classes that should not exist for Jwt based authentication."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _get_missing_cookie_message_and_metric(self, cookie_name):\n cookie_missing_message = '{} cookie is missing. JWT auth cookies will not be reconstituted.'.format(\n cookie_name\n )\n request_jwt_cookie = 'missing-{}'.format(cookie_name)\n return cookie_missing_message, request_jwt_cookie", "response": "Returns a message and metric value for the missing cookie."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef process_request(self, request):\n use_jwt_cookie_requested = request.META.get(USE_JWT_COOKIE_HEADER)\n header_payload_cookie = request.COOKIES.get(jwt_cookie_header_payload_name())\n signature_cookie = request.COOKIES.get(jwt_cookie_signature_name())\n\n if not use_jwt_cookie_requested:\n metric_value = 'not-requested'\n elif header_payload_cookie and signature_cookie:\n # Reconstitute JWT auth cookie if split cookies are available and jwt cookie\n # authentication was requested by the client.\n request.COOKIES[jwt_cookie_name()] = '{}{}{}'.format(\n header_payload_cookie,\n JWT_DELIMITER,\n signature_cookie,\n )\n metric_value = 'success'\n elif header_payload_cookie or signature_cookie:\n # Log unexpected case of only finding one cookie.\n if not header_payload_cookie:\n log_message, metric_value = self._get_missing_cookie_message_and_metric(\n jwt_cookie_header_payload_name()\n )\n if not signature_cookie:\n log_message, metric_value = self._get_missing_cookie_message_and_metric(\n jwt_cookie_signature_name()\n )\n log.warning(log_message)\n else:\n metric_value = 'missing-both'\n\n monitoring.set_custom_metric('request_jwt_cookie', metric_value)", "response": "Reconstitute the full JWT and add a new cookie on the request object."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef jwt_decode_handler(token):\n jwt_issuer = get_first_jwt_issuer()\n _verify_jwt_signature(token, jwt_issuer)\n decoded_token = _decode_and_verify_token(token, jwt_issuer)\n return _set_token_defaults(decoded_token)", "response": "Returns a handler that decodes a JSON Web Token."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _set_token_defaults(token):\n def _verify_version(jwt_version):\n supported_version = Version(\n settings.JWT_AUTH.get('JWT_SUPPORTED_VERSION', JwtTokenVersion.default_latest_supported)\n )\n if jwt_version.major > supported_version.major:\n logger.info('Token decode failed due to unsupported JWT version number [%s]', str(jwt_version))\n raise jwt.InvalidTokenError('JWT version number [%s] is unsupported', str(jwt_version))\n\n def _get_and_set_version(token):\n \"\"\"\n Tokens didn't always contain a version number so we\n default to a nominal starting number.\n \"\"\"\n if 'version' not in token:\n token['version'] = str(JwtTokenVersion.starting_version)\n\n return Version(token['version'])\n\n def _set_is_restricted(token):\n \"\"\"\n We can safely default to False since all \"restricted\" tokens\n created prior to the addition of the `is_restricted` flag were\n always created as expired tokens. Expired tokens would not\n validate and so would not get this far into the decoding process.\n # TODO: ARCH-166\n \"\"\"\n if 'is_restricted' not in token:\n token['is_restricted'] = False\n\n def _set_filters(token):\n \"\"\"\n We can safely default to an empty list of filters since\n previously created tokens were either \"restricted\" (always\n expired) or had full access.\n # TODO: ARCH-166\n \"\"\"\n if 'filters' not in token:\n token['filters'] = []\n\n token_version = _get_and_set_version(token)\n _verify_version(token_version)\n _set_is_restricted(token)\n _set_filters(token)\n return token", "response": "Sets the default values for the fields that were introduced since the token was created."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning a JWK Keyset containing all active keys that are configured for verifying signatures.", "response": "def _get_signing_jwk_key_set(jwt_issuer):\n \"\"\"\n Returns a JWK Keyset containing all active keys that are configured\n for verifying signatures.\n \"\"\"\n key_set = KEYS()\n\n # asymmetric keys\n signing_jwk_set = settings.JWT_AUTH.get('JWT_PUBLIC_SIGNING_JWK_SET')\n if signing_jwk_set:\n key_set.load_jwks(signing_jwk_set)\n\n # symmetric key\n key_set.add({'key': jwt_issuer['SECRET_KEY'], 'kty': 'oct'})\n\n return key_set"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef paginate_search_results(object_class, search_results, page_size, page):\n paginator = Paginator(search_results['results'], page_size)\n\n # This code is taken from within the GenericAPIView#paginate_queryset method.\n # It is common code, but\n try:\n page_number = paginator.validate_number(page)\n except InvalidPage:\n if page == 'last':\n page_number = paginator.num_pages\n else:\n raise Http404(\"Page is not 'last', nor can it be converted to an int.\")\n\n try:\n paged_results = paginator.page(page_number)\n except InvalidPage as exception:\n raise Http404(\n \"Invalid page {page_number}: {message}\".format(\n page_number=page_number,\n message=str(exception)\n )\n )\n\n search_queryset_pks = [item['data']['pk'] for item in paged_results.object_list]\n queryset = object_class.objects.filter(pk__in=search_queryset_pks)\n\n def ordered_objects(primary_key):\n \"\"\" Returns database object matching the search result object\"\"\"\n for obj in queryset:\n if obj.pk == primary_key:\n return obj\n\n # map over the search results and get a list of database objects in the same order\n object_results = list(map(ordered_objects, search_queryset_pks))\n paged_results.object_list = object_results\n\n return paged_results", "response": "Takes edX - search results and returns a Paginator object populated with model objects for that page."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_paginated_response(self, data):\n return Response({\n 'next': self.get_next_link(),\n 'previous': self.get_previous_link(),\n 'count': self.page.paginator.count,\n 'num_pages': self.page.paginator.num_pages,\n 'current_page': self.page.number,\n 'start': (self.page.number - 1) * self.get_page_size(self.request),\n 'results': data\n })", "response": "Annotate the response with pagination information."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_paginated_response(self, data):\n metadata = {\n 'next': self.get_next_link(),\n 'previous': self.get_previous_link(),\n 'count': self.get_result_count(),\n 'num_pages': self.get_num_pages(),\n }\n if isinstance(data, dict):\n if 'results' not in data:\n raise TypeError(u'Malformed result dict')\n data['pagination'] = metadata\n else:\n data = {\n 'results': data,\n 'pagination': metadata,\n }\n return Response(data)", "response": "Annotate the response with pagination information"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_decoded_jwt(request):\n jwt_cookie = request.COOKIES.get(jwt_cookie_name(), None)\n\n if not jwt_cookie:\n return None\n return jwt_decode_handler(jwt_cookie)", "response": "Grab jwt from jwt cookie in request if possible. Returns a decoded jwt dict if it can be found. Returns None if the jwt is not found."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nauthenticating the user with a CSRF token.", "response": "def authenticate(self, request):\n \"\"\"Authenticate the user, requiring a logged-in account and CSRF.\n\n This is exactly the same as the `SessionAuthentication` implementation,\n with the `user.is_active` check removed.\n\n Args:\n request (HttpRequest)\n\n Returns:\n Tuple of `(user, token)`\n\n Raises:\n PermissionDenied: The CSRF token check failed.\n\n \"\"\"\n # Get the underlying HttpRequest object\n request = request._request # pylint: disable=protected-access\n user = getattr(request, 'user', None)\n\n # Unauthenticated, CSRF validation not required\n # This is where regular `SessionAuthentication` checks that the user is active.\n # We have removed that check in this implementation.\n # But we added a check to prevent anonymous users since we require a logged-in account.\n if not user or user.is_anonymous:\n return None\n\n self.enforce_csrf(request)\n\n # CSRF passed with authenticated user\n return (user, None)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef has_permission(self, request, view):\n course_key = CourseKey.from_string(view.kwargs.get('course_id'))\n jwt_filters = decode_jwt_filters(request.auth)\n for filter_type, filter_value in jwt_filters:\n if filter_type == 'content_org' and filter_value == course_key.org:\n return True\n log.warning(\n u\"Permission JwtHasContentOrgFilterForRequestedCourse: no filter found for %s.\",\n course_key.org,\n )\n return False", "response": "Check if the request has permission to view the one - item content org."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nchecks if the user in the request matches the user in the URL.", "response": "def has_permission(self, request, view):\n \"\"\"\n If the JWT has a user filter, verify that the filtered\n user value matches the user in the URL.\n \"\"\"\n user_filter = self._get_user_filter(request)\n if not user_filter:\n # no user filters are present in the token to limit access\n return True\n\n username_param = get_username_param(request)\n allowed = user_filter == username_param\n if not allowed:\n log.warning(\n u\"Permission JwtHasUserFilterForRequestedUser: user_filter %s doesn't match username %s.\",\n user_filter,\n username_param,\n )\n return allowed"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _get_deprecated_jwt_issuers():\n # If JWT_ISSUERS is not defined, attempt to return the deprecated settings.\n warnings.warn(\n \"'JWT_ISSUERS' list not defined, checking for deprecated settings.\",\n DeprecationWarning\n )\n\n return [\n {\n 'ISSUER': api_settings.JWT_ISSUER,\n 'SECRET_KEY': api_settings.JWT_SECRET_KEY,\n 'AUDIENCE': api_settings.JWT_AUDIENCE\n }\n ]", "response": "Internal helper to retrieve the deprecated set of JWT_ISSUER data from the JWT_AUTH configuration."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef process_response(self, request, response):\n self._set_request_auth_type_metric(request)\n self._set_request_user_agent_metrics(request)\n self._set_request_referer_metric(request)\n self._set_request_user_id_metric(request)\n\n return response", "response": "Add metrics for various details of the request."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _set_request_user_id_metric(self, request):\n if hasattr(request, 'user') and hasattr(request.user, 'id') and request.user.id:\n monitoring.set_custom_metric('request_user_id', request.user.id)", "response": "Add request_user_id metric to the monitoring object"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsets the metric request_referer for the referer.", "response": "def _set_request_referer_metric(self, request):\n \"\"\"\n Add metric 'request_referer' for http referer.\n \"\"\"\n if 'HTTP_REFERER' in request.META and request.META['HTTP_REFERER']:\n monitoring.set_custom_metric('request_referer', request.META['HTTP_REFERER'])"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _set_request_user_agent_metrics(self, request):\n if 'HTTP_USER_AGENT' in request.META and request.META['HTTP_USER_AGENT']:\n user_agent = request.META['HTTP_USER_AGENT']\n monitoring.set_custom_metric('request_user_agent', user_agent)\n if user_agent:\n # Example agent string from edx-rest-api-client:\n # python-requests/2.9.1 edx-rest-api-client/1.7.2 ecommerce\n # See https://github.com/edx/edx-rest-api-client/commit/692903c30b157f7a4edabc2f53aae1742db3a019\n user_agent_parts = user_agent.split()\n if len(user_agent_parts) == 3 and user_agent_parts[1].startswith('edx-rest-api-client/'):\n monitoring.set_custom_metric('request_client_name', user_agent_parts[2])", "response": "Add metrics for user agent for python."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _set_request_auth_type_metric(self, request):\n if 'HTTP_AUTHORIZATION' in request.META and request.META['HTTP_AUTHORIZATION']:\n token_parts = request.META['HTTP_AUTHORIZATION'].split()\n # Example: \"JWT eyJhbGciO...\"\n if len(token_parts) == 2:\n auth_type = token_parts[0].lower() # 'jwt' or 'bearer' (for example)\n else:\n auth_type = 'other-token-type'\n elif not hasattr(request, 'user') or not request.user:\n auth_type = 'no-user'\n elif not request.user.is_authenticated:\n auth_type = 'unauthenticated'\n else:\n auth_type = 'session-or-unknown'\n monitoring.set_custom_metric('request_auth_type', auth_type)", "response": "Set the metric request_auth_type for the authentication type used."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nproducing a JSONRPC request.", "response": "def jsonrpc_request(method, identifier, params=None):\n \"\"\"Produce a JSONRPC request.\"\"\"\n return '{}\\r\\n'.format(json.dumps({\n 'id': identifier,\n 'method': method,\n 'params': params or {},\n 'jsonrpc': '2.0'\n })).encode()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nsend a JSONRPC request.", "response": "def request(self, method, params):\n \"\"\"Send a JSONRPC request.\"\"\"\n identifier = random.randint(1, 1000)\n self._transport.write(jsonrpc_request(method, identifier, params))\n self._buffer[identifier] = {'flag': asyncio.Event()}\n yield from self._buffer[identifier]['flag'].wait()\n result = self._buffer[identifier]['data']\n del self._buffer[identifier]['data']\n return result"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _do_connect(self):\n _, self._protocol = yield from self._loop.create_connection(\n lambda: SnapcastProtocol(self._callbacks), self._host, self._port)", "response": "Perform the connection to the server."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef delete_client(self, identifier):\n params = {'id': identifier}\n response = yield from self._transact(SERVER_DELETECLIENT, params)\n self.synchronize(response)", "response": "Delete a client from the server."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _request(self, method, identifier, key=None, value=None):\n params = {'id': identifier}\n if key is not None and value is not None:\n params[key] = value\n result = yield from self._transact(method, params)\n return result.get(key)", "response": "Perform request with identifier."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _on_group_mute(self, data):\n self._groups.get(data.get('id')).update_mute(data)", "response": "Handle group mute event."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nhandle group stream change.", "response": "def _on_group_stream_changed(self, data):\n \"\"\"Handle group stream change.\"\"\"\n self._groups.get(data.get('id')).update_stream(data)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _on_client_volume_changed(self, data):\n self._clients.get(data.get('id')).update_volume(data)", "response": "Handle client volume change."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _on_client_name_changed(self, data):\n self._clients.get(data.get('id')).update_name(data)", "response": "Handle client name changed."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nhandle client latency changed.", "response": "def _on_client_latency_changed(self, data):\n \"\"\"Handle client latency changed.\"\"\"\n self._clients.get(data.get('id')).update_latency(data)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nindicate readiness to receive stream.", "response": "def request_start(self):\n \"\"\" Indicate readiness to receive stream.\n\n This is a blocking call.\n \"\"\"\n self._queue.put(command_packet(CMD_START_STREAM))\n _LOGGER.info('Requesting stream')\n self._source.run()"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _read_socket(self):\n while True:\n base_bytes = self._socket.recv(BASE_SIZE)\n base = basemessage.parse(base_bytes)\n payload_bytes = self._socket.recv(base.payload_length)\n self._handle_message(packet.parse(base_bytes + payload_bytes))", "response": "Read incoming messages from the socket."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nhandle messages from the server.", "response": "def _handle_message(self, data):\n \"\"\" Handle messages. \"\"\"\n if data.type == MSG_SERVER_SETTINGS:\n _LOGGER.info(data.payload)\n elif data.type == MSG_SAMPLE_FORMAT:\n _LOGGER.info(data.payload)\n self._connected = True\n elif data.type == MSG_TIME:\n if not self._buffered:\n _LOGGER.info('Buffering')\n elif data.type == MSG_HEADER:\n # Push to app source and start playing.\n _LOGGER.info(data.payload.codec.decode('ascii'))\n self._source.push(data.payload.header)\n self._source.play()\n elif data.type == MSG_WIRE_CHUNK:\n # Add chunks to play queue.\n self._buffer.put(data.payload.chunk)\n if self._buffer.qsize() > BUFFER_SIZE:\n self._buffered = True\n if self._buffer.empty():\n self._buffered = False"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _write_socket(self):\n while True:\n now = time.time()\n if self._connected and (self._last_sync + SYNC_AFTER) < now:\n self._queue.put(request_packet(MSG_TIME))\n self._last_sync = now\n if not self._queue.empty():\n self._socket.send(self._queue.get())", "response": "Pass messages from queue to socket."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nplaying buffer to app source.", "response": "def _play(self):\n \"\"\" Relay buffer to app source. \"\"\"\n while True:\n if self._buffered:\n self._source.push(self._buffer.get())"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsetting group mute status.", "response": "def set_muted(self, status):\n \"\"\"Set group mute status.\"\"\"\n self._group['muted'] = status\n yield from self._server.group_mute(self.identifier, status)\n _LOGGER.info('set muted to %s on %s', status, self.friendly_name)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsetting volume on all clients.", "response": "def set_volume(self, volume):\n \"\"\"Set volume.\"\"\"\n for data in self._group.get('clients'):\n client = self._server.client(data.get('id'))\n yield from client.set_volume(volume, update_group=False)\n client.update_volume({\n 'volume': {\n 'percent': volume,\n 'muted': client.muted\n }\n })\n _LOGGER.info('set volume to %s on clients in %s', volume, self.friendly_name)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nadds a client to the group.", "response": "def add_client(self, client_identifier):\n \"\"\"Add a client.\"\"\"\n if client_identifier in self.clients:\n _LOGGER.error('%s already in group %s', client_identifier, self.identifier)\n return\n new_clients = self.clients\n new_clients.append(client_identifier)\n yield from self._server.group_clients(self.identifier, new_clients)\n _LOGGER.info('added %s to %s', client_identifier, self.identifier)\n self._server.client(client_identifier).callback()\n self.callback()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef remove_client(self, client_identifier):\n new_clients = self.clients\n new_clients.remove(client_identifier)\n yield from self._server.group_clients(self.identifier, new_clients)\n _LOGGER.info('removed %s from %s', client_identifier, self.identifier)\n self._server.client(client_identifier).callback()\n self.callback()", "response": "Remove a client from the group."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef restore(self):\n if not self._snapshot:\n return\n yield from self.set_muted(self._snapshot['muted'])\n yield from self.set_volume(self._snapshot['volume'])\n yield from self.set_stream(self._snapshot['stream'])\n self.callback()\n _LOGGER.info('restored snapshot of state of %s', self.friendly_name)", "response": "Restore the state of the current instance."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nbuild a message from a single object.", "response": "def message(message_type, payload, payload_length):\n \"\"\" Build a message. \"\"\"\n return packet.build(\n Container(\n type=message_type,\n id=1,\n refer=0,\n sent=Container(\n secs=0,\n usecs=0\n ),\n recv=Container(\n secs=0,\n usecs=0\n ),\n payload_length=payload_length,\n payload=payload\n )\n )"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nbuilds a map message.", "response": "def map_helper(data):\n \"\"\" Build a map message. \"\"\"\n as_list = []\n length = 2\n for field, value in data.items():\n as_list.append(Container(field=bytes(field, ENCODING),\n value=bytes(value, ENCODING)))\n length += len(field) + len(value) + 4\n return (Container(\n num=len(as_list),\n map=as_list\n ), length)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef command_packet(cmd):\n return message('Command',\n Container(string_length=len(cmd),\n string=bytes(cmd, ENCODING)),\n len(cmd) + 2)", "response": "Build a command message."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef set_name(self, name):\n if not name:\n name = ''\n self._client['config']['name'] = name\n yield from self._server.client_name(self.identifier, name)", "response": "Set a client name."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nsets client mute status.", "response": "def set_muted(self, status):\n \"\"\"Set client mute status.\"\"\"\n new_volume = self._client['config']['volume']\n new_volume['muted'] = status\n self._client['config']['volume']['muted'] = status\n yield from self._server.client_volume(self.identifier, new_volume)\n _LOGGER.info('set muted to %s on %s', status, self.friendly_name)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef set_volume(self, percent, update_group=True):\n if percent not in range(0, 101):\n raise ValueError('Volume percent out of range')\n new_volume = self._client['config']['volume']\n new_volume['percent'] = percent\n self._client['config']['volume']['percent'] = percent\n yield from self._server.client_volume(self.identifier, new_volume)\n if update_group:\n self._server.group(self.group.identifier).callback()\n _LOGGER.info('set volume to %s on %s', percent, self.friendly_name)", "response": "Set client volume percent."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nrestore the state of the current instance.", "response": "def restore(self):\n \"\"\"Restore snapshotted state.\"\"\"\n if not self._snapshot:\n return\n yield from self.set_name(self._snapshot['name'])\n yield from self.set_volume(self._snapshot['volume'])\n yield from self.set_muted(self._snapshot['muted'])\n yield from self.set_latency(self._snapshot['latency'])\n self.callback()\n _LOGGER.info('restored snapshot of state of %s', self.friendly_name)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\npushes a buffer into the source.", "response": "def push(self, buf):\n \"\"\" Push a buffer into the source. \"\"\"\n self._src.emit('push-buffer', Gst.Buffer.new_wrapped(buf))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncreating a new server.", "response": "def create_server(loop, host, port=CONTROL_PORT, reconnect=False):\n \"\"\"Server factory.\"\"\"\n server = Snapserver(loop, host, port, reconnect)\n yield from server.start()\n return server"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _get_ordering(son):\n def fmt(field, direction):\n return '{0}{1}'.format({-1: '-', 1: '+'}[direction], field)\n\n if '$orderby' in son:\n return ', '.join(fmt(f, d) for f, d in son['$orderby'].items())", "response": "Helper function to extract formatted ordering from dict.\n "} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nclean up stacktrace and remove all entries that are part of our stacktracing code.", "response": "def _tidy_stacktrace(stack):\n \"\"\"\n Clean up stacktrace and remove all entries that:\n 1. Are part of Django (except contrib apps)\n 2. Are part of SocketServer (used by Django's dev server)\n 3. Are the last entry (which is part of our stacktracing code)\n\n ``stack`` should be a list of frame tuples from ``inspect.stack()``\n \"\"\"\n django_path = os.path.realpath(os.path.dirname(django.__file__))\n django_path = os.path.normpath(os.path.join(django_path, '..'))\n socketserver_path = os.path.realpath(os.path.dirname(SocketServer.__file__))\n pymongo_path = os.path.realpath(os.path.dirname(pymongo.__file__))\n\n trace = []\n for frame, path, line_no, func_name, text in (f[:5] for f in stack):\n s_path = os.path.realpath(path)\n # Support hiding of frames -- used in various utilities that provide\n # inspection.\n if '__traceback_hide__' in frame.f_locals:\n continue\n if getattr(settings, 'DEBUG_TOOLBAR_CONFIG', {}).get('HIDE_DJANGO_SQL', True) \\\n and django_path in s_path and not 'django/contrib' in s_path:\n continue\n if socketserver_path in s_path:\n continue\n if pymongo_path in s_path:\n continue\n if not text:\n text = ''\n else:\n text = (''.join(text)).strip()\n trace.append((path, line_no, func_name, text))\n return trace"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the class loader that spark uses.", "response": "def classloader(self):\n \"\"\"Returns the private class loader that spark uses.\n\n This is needed since jars added with --jars are not easily resolvable by py4j's classloader\n \"\"\"\n return self.jvm.org.apache.spark.util.Utils.getContextOrSparkClassLoader()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_java_container(self, package_name=None, object_name=None, java_class_instance=None):\n if package_name is not None:\n jcontainer = self.import_scala_package_object(package_name)\n elif object_name is not None:\n jcontainer = self.import_scala_object(object_name)\n elif java_class_instance is not None:\n jcontainer = java_class_instance\n else:\n raise RuntimeError(\"Expected one of package_name, object_name or java_class_instance\")\n return jcontainer", "response": "Convenience method to get the java container that houses methods we want to call a method on."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef wrap_function_cols(self, name, package_name=None, object_name=None, java_class_instance=None, doc=\"\"):\n def _(*cols):\n jcontainer = self.get_java_container(package_name=package_name, object_name=object_name, java_class_instance=java_class_instance)\n # Ensure that your argument is a column\n col_args = [col._jc if isinstance(col, Column) else _make_col(col)._jc for col in cols]\n function = getattr(jcontainer, name)\n args = col_args\n jc = function(*args)\n return Column(jc)\n _.__name__ = name\n _.__doc__ = doc\n return _", "response": "Utility method for wrapping a scala / java function that returns a spark sql Column."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nwrapping a scala Spark user defined function", "response": "def wrap_spark_sql_udf(self, name, package_name=None, object_name=None, java_class_instance=None, doc=\"\"):\n \"\"\"Wraps a scala/java spark user defined function \"\"\"\n def _(*cols):\n jcontainer = self.get_java_container(package_name=package_name, object_name=object_name, java_class_instance=java_class_instance)\n # Ensure that your argument is a column\n function = getattr(jcontainer, name)\n judf = function()\n jc = judf.apply(self.to_scala_seq([_to_java_column(c) for c in cols]))\n return Column(jc)\n _.__name__ = name\n _.__doc__ = doc\n return _"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _save_documentation(version, base_url=\"https://spark.apache.org/docs\"):\n target_dir = join(dirname(__file__), 'spylon', 'spark')\n with open(join(target_dir, \"spark_properties_{}.json\".format(version)), 'w') as fp:\n all_props = _fetch_documentation(version=version, base_url=base_url)\n all_props = sorted(all_props, key=lambda x: x[0])\n all_props_d = [{\"property\": p, \"default\": d, \"description\": desc} for p, d, desc in all_props]\n json.dump(all_props_d, fp, indent=2)", "response": "Write the spark property documentation to a file\n "} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _pretty_time_delta(td):\n seconds = td.total_seconds()\n sign_string = '-' if seconds < 0 else ''\n seconds = abs(int(seconds))\n days, seconds = divmod(seconds, 86400)\n hours, seconds = divmod(seconds, 3600)\n minutes, seconds = divmod(seconds, 60)\n d = dict(sign=sign_string, days=days, hours=hours, minutes=minutes, seconds=seconds)\n if days > 0:\n return '{sign}{days}d{hours:02d}h{minutes:02d}m:{seconds:02d}s'.format(**d)\n elif hours > 0:\n return '{sign}{hours:02d}h{minutes:02d}m:{seconds:02d}s'.format(**d)\n elif minutes > 0:\n return '{sign}{minutes:02d}m:{seconds:02d}s'.format(**d)\n else:\n return '{sign}{seconds:02d}s'.format(**d)", "response": "Returns a string representation of a time delta."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _format_stage_info(bar_width, stage_info, duration, timedelta_formatter=_pretty_time_delta):\n dur = timedelta_formatter(duration)\n percent = (stage_info.numCompletedTasks * bar_width) // stage_info.numTasks\n bar = [' '] * bar_width\n for i in range(bar_width):\n char = ' '\n if i < percent:\n char = '='\n if i == percent:\n char = '>'\n bar[i] = char\n bar = ''.join(bar)\n return \"[Stage {info.stageId}:{bar} \" \\\n \"({info.numCompletedTasks} + {info.numActiveTasks} / {info.numTasks} Dur: {dur}]\" \\\n .format(info=stage_info, dur=dur, bar=bar)", "response": "Formats the Spark stage progress."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef start(sc, timedelta_formatter=_pretty_time_delta, bar_width=20, sleep_time=0.5):\n global _printer_singleton\n\n if _printer_singleton is None:\n _printer_singleton = ProgressPrinter(sc, timedelta_formatter, bar_width, sleep_time)\n _printer_singleton.start()\n return _printer_singleton", "response": "Starts a new ProgressPrinter for the given SparkContext."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nrun the progress printing loop.", "response": "def run(self):\n \"\"\"Run the progress printing loop.\"\"\"\n last_status = ''\n # lambda is used to avoid http://bugs.python.org/issue30473 in py36\n start_times = defaultdict(lambda: datetime.datetime.now())\n max_stage_id = -1\n\n status = self.sc.statusTracker()\n while True:\n with self.condition:\n if self.sc._jsc is None or not self.alive:\n # End the thread\n self.paused = True\n break\n elif self.paused:\n # Pause the thread\n self.condition.wait()\n stage_ids = status.getActiveStageIds()\n progressbar_list = []\n # Only show first 3\n stage_counter = 0\n current_max_stage = max_stage_id\n for stage_id in stage_ids:\n stage_info = status.getStageInfo(stage_id)\n if stage_info and stage_info.numTasks > 0:\n # Set state variables used for flushing later\n current_max_stage = stage_id\n stage_counter += 1\n td = datetime.datetime.now() - start_times[stage_id]\n s = _format_stage_info(self.bar_width, stage_info, td, self.timedelta_formatter)\n progressbar_list.append(s)\n if stage_counter == 3:\n break\n\n # Ensure that when we get a new maximum stage id we print a \\n\n # to make the progress bar go on to the next line.\n if current_max_stage > max_stage_id:\n if last_status != '':\n sys.stderr.write(\"\\n\")\n sys.stderr.flush()\n max_stage_id = current_max_stage\n\n new_status = ' '.join(progressbar_list)\n if new_status != last_status:\n sys.stderr.write(\"\\r\" + new_status)\n sys.stderr.flush()\n last_status = new_status\n time.sleep(self.sleep_time)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncreating a conda environment inside the current sandbox.", "response": "def create_conda_env(sandbox_dir, env_name, dependencies, options=()):\n \"\"\"\n Create a conda environment inside the current sandbox for the given list of dependencies and options.\n\n Parameters\n ----------\n sandbox_dir : str\n env_name : str\n dependencies : list\n List of conda specs\n options\n List of additional options to pass to conda. Things like [\"-c\", \"conda-forge\"]\n\n Returns\n -------\n (env_dir, env_name)\n \"\"\"\n\n env_dir = os.path.join(sandbox_dir, env_name)\n cmdline = [\"conda\", \"create\", \"--yes\", \"--copy\", \"--quiet\", \"-p\", env_dir] + list(options) + dependencies\n\n log.info(\"Creating conda environment: \")\n log.info(\" command line: %s\", cmdline)\n subprocess.check_call(cmdline, stderr=subprocess.PIPE, stdout=subprocess.PIPE)\n log.debug(\"Environment created\")\n\n return env_dir, env_name"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncompress the directory and writes to its parent .", "response": "def archive_dir(env_dir):\n \"\"\"\n Compresses the directory and writes to its parent\n\n Parameters\n ----------\n env_dir : str\n\n Returns\n -------\n str\n \"\"\"\n output_filename = env_dir + \".zip\"\n log.info(\"Archiving conda environment: %s -> %s\", env_dir, output_filename)\n subprocess.check_call([\"zip\", \"-r\", \"-0\", \"-q\", output_filename, env_dir])\n return output_filename"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef prepare_pyspark_yarn_interactive(env_name, env_archive, spark_conf):\n\n from .launcher import SparkConfiguration\n\n assert isinstance(spark_conf, SparkConfiguration)\n\n yarn_python = os.path.join(\".\", \"CONDA\", env_name, \"bin\", \"python\")\n archives = env_archive + \"#CONDA\"\n\n new_spark_conf = copy.deepcopy(spark_conf)\n new_spark_conf.master = \"yarn\"\n new_spark_conf.deploy_mode = \"client\"\n new_spark_conf.archives = [archives]\n new_spark_conf.conf.set(\"spark.executorEnv.PYSPARK_PYTHON\", yarn_python)\n new_spark_conf._python_path = yarn_python\n\n env_update = {\n \"PYSPARK_DRIVER_PYTHON\": sys.executable,\n \"PYSPARK_PYTHON\": yarn_python\n }\n\n os.environ.update(env_update)\n\n return new_spark_conf", "response": "Prepare a Spark environment for interactive use."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef run_pyspark_yarn_cluster(env_dir, env_name, env_archive, args):\n env = dict(os.environ)\n yarn_python = os.path.join(\".\", \"CONDA\", env_name, \"bin\", \"python\")\n archives = env_archive + \"#CONDA\"\n\n prepend_args = [\n \"--master\", \"yarn\",\n \"--deploy-mode\", \"cluster\",\n \"--conf\", \"spark.yarn.appMasterEnv.PYSPARK_PYTHON={}\".format(yarn_python),\n \"--archives\", archives,\n ]\n\n env_update = {\n \"PYSPARK_PYTHON\": yarn_python\n }\n\n env.update(env_update)\n spark_submit = os.path.join(env[\"SPARK_HOME\"], \"bin\", \"spark-submit\")\n\n log.info(\"Running spark in YARN-client mode with added arguments\")\n log.info(\" args: %s\", pprint.pprint(prepend_args, indent=4))\n log.info(\" env: %s\", pprint.pprint(env_update, indent=4))\n # REPLACE our python process with another one\n subprocess.check_call([spark_submit] + prepend_args + args, env=env)", "response": "This function runs a spark - submit command on the given python environment and adds the given list of arguments to the spark command line options."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef launcher(deploy_mode, args, working_dir=\".\", cleanup=True):\n spark_args = args.copy()\n # Scan through the arguments to find --conda\n # TODO: make this optional, if not specified ignore all the python stuff\n # Is this double dash in front of conda env correct?\n i = spark_args.index(\"--conda-env\")\n # pop off the '--conda-env' portion and just drop it on the floor\n spark_args.pop(i)\n # Now pop off the actual conda env var passed to the launcher\n conda_env = spark_args.pop(i)\n cleanup_functions = []\n # What else could this possibly be other than a string here?\n assert isinstance(conda_env, str)\n func_kwargs = {'conda_env': conda_env,\n 'deploy_mode': deploy_mode,\n 'working_dir': working_dir,\n 'cleanup_functions': cleanup_functions}\n if conda_env.startswith(\"hdfs:/\"):\n # \"hadoop fs -ls\" can return URLs with only a single \"/\" after the \"hdfs:\" scheme\n env_name, env_dir, env_archive = _conda_from_hdfs(**func_kwargs)\n elif conda_env.endswith(\".zip\"):\n # We have a precreated conda environment around.\n env_name, env_dir, conda_env = _conda_from_zip(**func_kwargs)\n elif conda_env.endswith(\".yaml\"):\n # The case where we have to CREATE the environment ourselves\n env_name, env_dir, env_archive = _conda_from_yaml(**func_kwargs)\n else:\n raise NotImplementedError(\"Can only run launcher if your conda env is on hdfs (starts \"\n \"with 'hdfs:/', is already a zip (ends with '.zip'), or is \"\n \"coming from a yaml specification (ends with '.yaml' and \"\n \"conforms to the conda environment.yaml spec)\")\n del func_kwargs\n\n func_kwargs = dict(env_dir=env_dir, env_name=env_name, env_archive=env_archive, args=spark_args)\n funcs = {'client': run_pyspark_yarn_client, 'cluster': run_pyspark_yarn_cluster}\n try:\n funcs[deploy_mode](**func_kwargs)\n finally:\n if not cleanup:\n return\n # iterate over and call all cleanup functions\n for function in cleanup_functions:\n try:\n function()\n except:\n log.exception(\"Cleanup function %s failed\", function)", "response": "Initializes arguments and starts up pyspark with the correct deploy mode and environment."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef keyfilter(predicate, d, factory=dict):\n rv = factory()\n for k, v in iteritems(d):\n if predicate(k):\n rv[k] = v\n return rv", "response": "Filter items in dictionary by key."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef with_sql_context(application_name, conf=None):\n if conf is None:\n conf = default_configuration\n assert isinstance(conf, SparkConfiguration)\n\n sc = conf.spark_context(application_name)\n import pyspark.sql\n try:\n yield sc, pyspark.sql.SQLContext(sc)\n finally:\n sc.stop()", "response": "Context manager for a Spark context and SQLContext"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nset a particular spark property by the string key name if it hasn t already been set.", "response": "def set_if_unset(self, key, value):\n \"\"\"Set a particular spark property by the string key name if it hasn't already been set.\n\n This method allows chaining so that i can provide a similar feel to the standard Scala way of setting\n multiple configurations\n\n Parameters\n ----------\n key : string\n value : string\n\n Returns\n -------\n self\n \"\"\"\n if key not in self._conf_dict:\n self.set(key, value)\n return self"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ninitialize the correct environment variables for spark", "response": "def _set_environment_variables(self):\n \"\"\"Initializes the correct environment variables for spark\"\"\"\n cmd = []\n\n # special case for driver JVM properties.\n self._set_launcher_property(\"driver-memory\", \"spark.driver.memory\")\n self._set_launcher_property(\"driver-library-path\", \"spark.driver.extraLibraryPath\")\n self._set_launcher_property(\"driver-class-path\", \"spark.driver.extraClassPath\")\n self._set_launcher_property(\"driver-java-options\", \"spark.driver.extraJavaOptions\")\n self._set_launcher_property(\"executor-memory\", \"spark.executor.memory\")\n self._set_launcher_property(\"executor-cores\", \"spark.executor.cores\")\n\n for key, val in self._spark_launcher_args.items():\n if val is None:\n continue\n val = list(as_iterable(val))\n if len(val):\n if key in self._boolean_args:\n cmd.append(\"--{key}\".format(key=key))\n else:\n sep = self._spark_launcher_arg_sep.get(key, ',')\n cmd.append('--{key} {val}'.format(key=key, val=sep.join(str(x) for x in val)))\n\n cmd += ['pyspark-shell']\n cmd_line = ' '.join(x for x in cmd if x)\n os.environ[\"PYSPARK_SUBMIT_ARGS\"] = cmd_line\n log.info(\"spark-submit arguments: %s\", cmd_line)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ninitializing spark so that pyspark is importable.", "response": "def _init_spark(self):\n \"\"\"Initializes spark so that pyspark is importable. This also sets up the required environment variables\n \"\"\"\n global _SPARK_INITIALIZED\n spark_home = self.spark_home\n python_path = self._python_path\n\n if use_findspark:\n if _SPARK_INITIALIZED:\n if spark_home == os.environ[\"SPARK_HOME\"]:\n # matches with already initialized\n pass\n else:\n # findspark adds two path to the search path.\n sys.path.pop(0)\n sys.path.pop(0)\n findspark.init(spark_home=spark_home, edit_rc=False, edit_profile=False, python_path=python_path)\n else:\n findspark.init(spark_home=spark_home, edit_rc=False, edit_profile=False, python_path=python_path)\n\n _SPARK_INITIALIZED = True\n self._set_environment_variables()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef spark_context(self, application_name):\n\n # initialize the spark configuration\n self._init_spark()\n import pyspark\n import pyspark.sql\n\n # initialize conf\n spark_conf = pyspark.SparkConf()\n for k, v in self._spark_conf_helper._conf_dict.items():\n spark_conf.set(k, v)\n\n log.info(\"Starting SparkContext\")\n return pyspark.SparkContext(appName=application_name, conf=spark_conf)", "response": "Create a SparkContext given the parameters configured in this class."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncreating a SparkContext object given the parameters configured in this class and application_name.", "response": "def sql_context(self, application_name):\n \"\"\"Create a spark context given the parameters configured in this class.\n\n The caller is responsible for calling ``.close`` on the resulting spark context\n\n Parameters\n ----------\n application_name : string\n\n Returns\n -------\n sc : SparkContext\n \"\"\"\n sc = self.spark_context(application_name)\n import pyspark\n sqlContext = pyspark.SQLContext(sc)\n return (sc, sqlContext)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef call(self, uri, params=None, token=None, risk_token=None, client_ip=None):\n\n headers = {'Content-Type': 'application/json',\n 'User-Agent': 'WePay Python SDK'}\n url = self.api_endpoint + uri\n\n if self.access_token or token:\n headers['Authorization'] = 'Bearer ' + \\\n (token if token else self.access_token)\n\n if self.api_version:\n headers['Api-Version'] = self.api_version\n\n if risk_token:\n headers['WePay-Risk-Token'] = risk_token\n\n if client_ip:\n headers['Client-IP'] = client_ip\n\n if params:\n params = json.dumps(params)\n\n try:\n response = self.requests.post(\n url, data=params, headers=headers,\n timeout=self.request_timeout)\n return response.json()\n except:\n if 400 <= response.status_code <= 599:\n raise Exception('Unknown error. Please contact support@wepay.com')", "response": "Calls wepay. com and returns the JSON response."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the URL to send the user to in order to get authorization.", "response": "def get_authorization_url(self, redirect_uri, client_id, options=None,\n scope=None):\n \"\"\"\n Returns a URL to send the user to in order to get authorization.\n After getting authorization the user will return to redirect_uri.\n Optionally, scope can be set to limit permissions, and the options\n dict can be loaded with any combination of state, user_name\n or user_email.\n\n :param str redirect_uri: The URI to redirect to after a authorization.\n :param str client_id: The client ID issued by WePay to your app.\n :keyword dict options: Allows for passing additional values to the\n authorize call, aside from scope, redirect_uri, and etc.\n :keyword str scope: A comma-separated string of permissions.\n \"\"\"\n if not options:\n options = {}\n if not scope:\n scope = \"manage_accounts,collect_payments,\" \\\n \"view_user,preapprove_payments,\" \\\n \"manage_subscriptions,send_money\"\n\n options['scope'] = scope\n options['redirect_uri'] = redirect_uri\n options['client_id'] = client_id\n\n return self.browser_endpoint + '/oauth2/authorize?' + \\\n urllib.urlencode(options)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_token(\n self, redirect_uri, client_id, client_secret,\n code, callback_uri=None):\n \"\"\"\n Calls wepay.com/v2/oauth2/token to get an access token. Sets the\n access_token for the WePay instance and returns the entire response\n as a dict. Should only be called after the user returns from being\n sent to get_authorization_url.\n\n :param str redirect_uri: The same URI specified in the\n :py:meth:`get_authorization_url` call that preceeded this.\n :param str client_id: The client ID issued by WePay to your app.\n :param str client_secret: The client secret issued by WePay\n to your app.\n :param str code: The code returned by :py:meth:`get_authorization_url`.\n :param str callback_uri: The callback_uri you want to receive IPNs for\n this user on.\n \"\"\"\n params = {\n 'redirect_uri': redirect_uri,\n 'client_id': client_id,\n 'client_secret': client_secret,\n 'code': code,\n }\n if callback_uri:\n params.update({'callback_uri': callback_uri})\n response = self.call('/oauth2/token', params)\n \n # The call to /oauth2/token should return an access_token\n # if the access_token was not returned, then an error occured\n # we need to raise this error, \n # otherwise this will die when trying to use the 'access_token' field\n if 'access_token' not in response:\n raise WePayError(response['error'], response['error_code'], response['error_description'])\n\n self.access_token = response['access_token']\n return response", "response": "Get an access token for the WePay instance."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates a blank figure.", "response": "def figure(bgcolor=(1,1,1), size=(1000,1000)):\n \"\"\"Create a blank figure.\n\n Parameters\n ----------\n bgcolor : (3,) float\n Color of the background with values in [0,1].\n size : (2,) int\n Width and height of the figure in pixels.\n \"\"\"\n Visualizer3D._scene = Scene(background_color=np.array(bgcolor))\n Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)\n Visualizer3D._init_size = np.array(size)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ndisplays the current figure and enable interaction.", "response": "def show(animate=False, axis=np.array([0.,0.,1.]), clf=True, **kwargs):\n \"\"\"Display the current figure and enable interaction.\n\n Parameters\n ----------\n animate : bool\n Whether or not to animate the scene.\n axis : (3,) float or None\n If present, the animation will rotate about the given axis in world coordinates.\n Otherwise, the animation will rotate in azimuth.\n clf : bool\n If true, the Visualizer is cleared after showing the figure.\n kwargs : dict\n Other keyword arguments for the SceneViewer instance.\n \"\"\"\n x = SceneViewer(Visualizer3D._scene,\n size=Visualizer3D._init_size,\n animate=animate,\n animate_axis=axis,\n save_directory=Visualizer3D._save_directory,\n **kwargs)\n if x.save_directory:\n Visualizer3D._save_directory = x.save_directory\n if clf:\n Visualizer3D.clf()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef render(n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):\n v = SceneViewer(Visualizer3D._scene,\n size=Visualizer3D._init_size,\n animate=(n_frames > 1),\n animate_axis=axis,\n max_frames=n_frames,\n **kwargs)\n\n if clf:\n Visualizer3D.clf()\n\n return v.saved_frames", "response": "Render frames from the viewer."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nsave frames from the viewer out to a file.", "response": "def save(filename, n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):\n \"\"\"Save frames from the viewer out to a file.\n\n Parameters\n ----------\n filename : str\n The filename in which to save the output image. If more than one frame,\n should have extension .gif.\n n_frames : int\n Number of frames to render. If more than one, the scene will animate.\n axis : (3,) float or None\n If present, the animation will rotate about the given axis in world coordinates.\n Otherwise, the animation will rotate in azimuth.\n clf : bool\n If true, the Visualizer is cleared after rendering the figure.\n kwargs : dict\n Other keyword arguments for the SceneViewer instance.\n \"\"\"\n if n_frames >1 and os.path.splitext(filename)[1] != '.gif':\n raise ValueError('Expected .gif file for multiple-frame save.')\n v = SceneViewer(Visualizer3D._scene,\n size=Visualizer3D._init_size,\n animate=(n_frames > 1),\n animate_axis=axis,\n max_frames=n_frames,\n **kwargs)\n data = [m.data for m in v.saved_frames]\n if len(data) > 1:\n imageio.mimwrite(filename, data, fps=v._animate_rate, palettesize=128, subrectangles=True)\n else:\n imageio.imwrite(filename, data[0])\n\n if clf:\n Visualizer3D.clf()"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef save_loop(filename, framerate=30, time=3.0, axis=np.array([0.,0.,1.]), clf=True, **kwargs):\n n_frames = framerate * time\n az = 2.0 * np.pi / n_frames\n Visualizer3D.save(filename, n_frames=n_frames, axis=axis, clf=clf,\n animate_rate=framerate, animate_az=az)\n if clf:\n Visualizer3D.clf()", "response": "Off - screen save a loop of 3D."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef clf():\n Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)\n Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)", "response": "Clear the current figure figure\n "} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef points(points, T_points_world=None, color=np.array([0,1,0]), scale=0.01, n_cuts=20, subsample=None, random=False, name=None):\n if isinstance(points, BagOfPoints):\n if points.dim != 3:\n raise ValueError('BagOfPoints must have dimension 3xN!')\n else:\n if type(points) is not np.ndarray:\n raise ValueError('Points visualizer expects BagOfPoints or numpy array!')\n if len(points.shape) == 1:\n points = points[:,np.newaxis].T\n if len(points.shape) != 2 or points.shape[1] != 3:\n raise ValueError('Numpy array of points must have dimension (N,3)')\n frame = 'points'\n if T_points_world:\n frame = T_points_world.from_frame\n points = PointCloud(points.T, frame=frame)\n\n color = np.array(color)\n if subsample is not None:\n num_points = points.num_points\n points, inds = points.subsample(subsample, random=random)\n if color.shape[0] == num_points and color.shape[1] == 3:\n color = color[inds,:]\n\n # transform into world frame\n if points.frame != 'world':\n if T_points_world is None:\n T_points_world = RigidTransform(from_frame=points.frame, to_frame='world')\n points_world = T_points_world * points\n else:\n points_world = points\n\n point_data = points_world.data\n if len(point_data.shape) == 1:\n point_data = point_data[:,np.newaxis]\n point_data = point_data.T\n\n mpcolor = color\n if len(color.shape) > 1:\n mpcolor = color[0]\n mp = MaterialProperties(\n color = np.array(mpcolor),\n k_a = 0.5,\n k_d = 0.3,\n k_s = 0.0,\n alpha = 10.0,\n smooth=True\n )\n\n # For each point, create a sphere of the specified color and size.\n sphere = trimesh.creation.uv_sphere(scale, [n_cuts, n_cuts])\n raw_pose_data = np.tile(np.eye(4), (points.num_points, 1))\n raw_pose_data[3::4, :3] = point_data\n\n instcolor = None\n if color.ndim == 2 and color.shape[0] == points.num_points and color.shape[1] == 3:\n instcolor = color\n obj = InstancedSceneObject(sphere, raw_pose_data=raw_pose_data, colors=instcolor, material=mp)\n if name is None:\n name = str(uuid.uuid4())\n Visualizer3D._scene.add_object(name, obj)", "response": "Scatter a point cloud in pose T_points_world."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef mesh(mesh, T_mesh_world=RigidTransform(from_frame='obj', to_frame='world'),\n style='surface', smooth=False, color=(0.5,0.5,0.5), name=None):\n \"\"\"Visualize a 3D triangular mesh.\n\n Parameters\n ----------\n mesh : trimesh.Trimesh\n The mesh to visualize.\n T_mesh_world : autolab_core.RigidTransform\n The pose of the mesh, specified as a transformation from mesh frame to world frame.\n style : str\n Triangular mesh style, either 'surface' or 'wireframe'.\n smooth : bool\n If true, the mesh is smoothed before rendering.\n color : 3-tuple\n Color tuple.\n name : str\n A name for the object to be added.\n \"\"\"\n if not isinstance(mesh, trimesh.Trimesh):\n raise ValueError('Must provide a trimesh.Trimesh object')\n\n mp = MaterialProperties(\n color = np.array(color),\n k_a = 0.5,\n k_d = 0.3,\n k_s = 0.1,\n alpha = 10.0,\n smooth=smooth,\n wireframe=(style == 'wireframe')\n )\n\n obj = SceneObject(mesh, T_mesh_world, mp)\n if name is None:\n name = str(uuid.uuid4())\n Visualizer3D._scene.add_object(name, obj)", "response": "Visualize a 3D triangular mesh."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nvisualize a mesh in a stable pose.", "response": "def mesh_stable_pose(mesh, T_obj_table,\n T_table_world=RigidTransform(from_frame='table', to_frame='world'),\n style='wireframe', smooth=False, color=(0.5,0.5,0.5),\n dim=0.15, plot_table=True, plot_com=False, name=None):\n \"\"\"Visualize a mesh in a stable pose.\n\n Parameters\n ----------\n mesh : trimesh.Trimesh\n The mesh to visualize.\n T_obj_table : autolab_core.RigidTransform\n Pose of object relative to table.\n T_table_world : autolab_core.RigidTransform\n Pose of table relative to world.\n style : str\n Triangular mesh style, either 'surface' or 'wireframe'.\n smooth : bool\n If true, the mesh is smoothed before rendering.\n color : 3-tuple\n Color tuple.\n dim : float\n The side-length for the table.\n plot_table : bool\n If true, a table is visualized as well.\n plot_com : bool\n If true, a ball is visualized at the object's center of mass.\n name : str\n A name for the object to be added.\n\n Returns\n -------\n autolab_core.RigidTransform\n The pose of the mesh in world frame.\n \"\"\"\n T_obj_table = T_obj_table.as_frames('obj', 'table')\n T_obj_world = T_table_world * T_obj_table\n\n Visualizer3D.mesh(mesh, T_obj_world, style=style, smooth=smooth, color=color, name=name)\n if plot_table:\n Visualizer3D.table(T_table_world, dim=dim)\n if plot_com:\n Visualizer3D.points(Point(np.array(mesh.center_mass), 'obj'), T_obj_world, scale=0.01)\n return T_obj_world"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nplotting a 3D pose of the current species.", "response": "def pose(T_frame_world, alpha=0.1, tube_radius=0.005, center_scale=0.01):\n \"\"\"Plot a 3D pose as a set of axes (x red, y green, z blue).\n\n Parameters\n ----------\n T_frame_world : autolab_core.RigidTransform\n The pose relative to world coordinates.\n alpha : float\n Length of plotted x,y,z axes.\n tube_radius : float\n Radius of plotted x,y,z axes.\n center_scale : float\n Radius of the pose's origin ball.\n \"\"\"\n R = T_frame_world.rotation\n t = T_frame_world.translation\n\n x_axis_tf = np.array([t, t + alpha * R[:,0]])\n y_axis_tf = np.array([t, t + alpha * R[:,1]])\n z_axis_tf = np.array([t, t + alpha * R[:,2]])\n\n Visualizer3D.points(t, color=(1,1,1), scale=center_scale)\n Visualizer3D.plot3d(x_axis_tf, color=(1,0,0), tube_radius=tube_radius)\n Visualizer3D.plot3d(y_axis_tf, color=(0,1,0), tube_radius=tube_radius)\n Visualizer3D.plot3d(z_axis_tf, color=(0,0,1), tube_radius=tube_radius)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef table(T_table_world=RigidTransform(from_frame='table', to_frame='world'), dim=0.16, color=(0,0,0)):\n\n table_vertices = np.array([[ dim, dim, 0],\n [ dim, -dim, 0],\n [-dim, dim, 0],\n [-dim, -dim, 0]]).astype('float')\n table_tris = np.array([[0, 1, 2], [1, 2, 3]])\n table_mesh = trimesh.Trimesh(table_vertices, table_tris)\n table_mesh.apply_transform(T_table_world.matrix)\n Visualizer3D.mesh(table_mesh, style='surface', smooth=True, color=color)", "response": "Plot a 3D table mesh in 3D."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nplot a 3D curve through a set of 3D points using tubes.", "response": "def plot3d(points, color=(0.5, 0.5, 0.5), tube_radius=0.005, n_components=30, name=None):\n \"\"\"Plot a 3d curve through a set of points using tubes.\n\n Parameters\n ----------\n points : (n,3) float\n A series of 3D points that define a curve in space.\n color : (3,) float\n The color of the tube.\n tube_radius : float\n Radius of tube representing curve.\n n_components : int\n The number of edges in each polygon representing the tube.\n name : str\n A name for the object to be added.\n \"\"\"\n points = np.asanyarray(points)\n mp = MaterialProperties(\n color = np.array(color),\n k_a = 0.5,\n k_d = 0.3,\n k_s = 0.0,\n alpha = 10.0,\n smooth=True\n )\n\n # Generate circular polygon\n vec = np.array([0,1]) * tube_radius\n angle = np.pi * 2.0 / n_components\n rotmat = np.array([\n [np.cos(angle), -np.sin(angle)],\n [np.sin(angle), np.cos(angle)]\n ])\n perim = []\n for i in range(n_components):\n perim.append(vec)\n vec = np.dot(rotmat, vec)\n poly = Polygon(perim)\n\n # Sweep it out along the path\n mesh = trimesh.creation.sweep_polygon(poly, points)\n obj = SceneObject(mesh, material=mp)\n if name is None:\n name = str(uuid.uuid4())\n Visualizer3D._scene.add_object(name, obj)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef figure(size=(8,8), *args, **kwargs):\n return plt.figure(figsize=size, *args, **kwargs)", "response": "Creates a figure with the current version of the current version of the current version of the mayavi figure"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nshow the current figure.", "response": "def show(filename=None, *args, **kwargs):\n \"\"\" Show the current figure.\n\n Parameters\n ----------\n filename : :obj:`str`\n filename to save the image to, for auto-saving\n \"\"\"\n if filename is None:\n plt.show(*args, **kwargs)\n else:\n plt.savefig(filename, *args, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ndisplays an image. Parameters ---------- image : :obj:`perception.Image` image to display auto_subplot : bool whether or not to automatically subplot for multi-channel images e.g. rgbd", "response": "def imshow(image, auto_subplot=False, **kwargs):\n \"\"\" Displays an image.\n Parameters\n ----------\n image : :obj:`perception.Image`\n image to display\n auto_subplot : bool\n whether or not to automatically subplot for multi-channel images e.g. rgbd\n \"\"\"\n if isinstance(image, BinaryImage) or isinstance(image, GrayscaleImage):\n plt.imshow(image.data, cmap=plt.cm.gray, **kwargs)\n elif isinstance(image, ColorImage) or isinstance(image, SegmentationImage):\n plt.imshow(image.data, **kwargs)\n elif isinstance(image, DepthImage):\n plt.imshow(image.data, cmap=plt.cm.gray_r, **kwargs)\n elif isinstance(image, RgbdImage):\n if auto_subplot:\n plt.subplot(1,2,1)\n plt.imshow(image.color.data, **kwargs)\n plt.axis('off')\n plt.subplot(1,2,2)\n plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)\n else:\n plt.imshow(image.color.data, **kwargs)\n elif isinstance(image, GdImage):\n if auto_subplot:\n plt.subplot(1,2,1)\n plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)\n plt.axis('off')\n plt.subplot(1,2,2)\n plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)\n else:\n plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)\n plt.axis('off')"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ndraw a box on the current plot.", "response": "def box(b, line_width=2, color='g', style='-'):\n \"\"\" Draws a box on the current plot.\n\n Parameters\n ----------\n b : :obj:`autolab_core.Box`\n box to draw\n line_width : int\n width of lines on side of box\n color : :obj:`str`\n color of box\n style : :obj:`str`\n style of lines to draw\n \"\"\"\n if not isinstance(b, Box):\n raise ValueError('Input must be of type Box')\n \n # get min pixels\n min_i = b.min_pt[1]\n min_j = b.min_pt[0]\n max_i = b.max_pt[1]\n max_j = b.max_pt[0]\n top_left = np.array([min_i, min_j])\n top_right = np.array([max_i, min_j])\n bottom_left = np.array([min_i, max_j])\n bottom_right = np.array([max_i, max_j])\n\n # create lines\n left = np.c_[top_left, bottom_left].T\n right = np.c_[top_right, bottom_right].T\n top = np.c_[top_left, top_right].T\n bottom = np.c_[bottom_left, bottom_right].T\n\n # plot lines\n plt.plot(left[:,0], left[:,1], linewidth=line_width, color=color, linestyle=style)\n plt.plot(right[:,0], right[:,1], linewidth=line_width, color=color, linestyle=style)\n plt.plot(top[:,0], top[:,1], linewidth=line_width, color=color, linestyle=style)\n plt.plot(bottom[:,0], bottom[:,1], linewidth=line_width, color=color, linestyle=style)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef contour(c, subsample=1, size=10, color='g'):\n if not isinstance(c, Contour):\n raise ValueError('Input must be of type Contour')\n \n for i in range(c.num_pixels)[0::subsample]:\n plt.scatter(c.boundary_pixels[i,1], c.boundary_pixels[i,0], s=size, c=color)", "response": "Draws a contour on the current plot by scattering points."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nplotting a 2D grasp with arrow and jaw style using matplotlib.", "response": "def grasp(grasp, width=None, color='r', arrow_len=4, arrow_head_len = 2, arrow_head_width = 3,\n arrow_width = 1, jaw_len=3, jaw_width = 1.0,\n grasp_center_size=1, grasp_center_thickness=2.5,\n grasp_center_style='+', grasp_axis_width=1,\n grasp_axis_style='--', line_width=1.0, alpha=50, show_center=True, show_axis=False, scale=1.0):\n \"\"\"\n Plots a 2D grasp with arrow and jaw style using matplotlib\n \n Parameters\n ----------\n grasp : :obj:`Grasp2D`\n 2D grasp to plot\n width : float\n width, in pixels, of the grasp (overrides Grasp2D.width_px)\n color : :obj:`str`\n color of plotted grasp\n arrow_len : float\n length of arrow body\n arrow_head_len : float\n length of arrow head\n arrow_head_width : float\n width of arrow head\n arrow_width : float\n width of arrow body\n jaw_len : float\n length of jaw line\n jaw_width : float\n line width of jaw line\n grasp_center_thickness : float\n thickness of grasp center\n grasp_center_style : :obj:`str`\n style of center of grasp\n grasp_axis_width : float\n line width of grasp axis\n grasp_axis_style : :obj:`str`\n style of grasp axis line\n show_center : bool\n whether or not to plot the grasp center\n show_axis : bool\n whether or not to plot the grasp axis\n \"\"\"\n # set vars for suction\n skip_jaws = False\n if not hasattr(grasp, 'width'):\n grasp_center_style = '.'\n grasp_center_size = 50\n plt.scatter(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, s=scale*grasp_center_size)\n\n if hasattr(grasp, 'orientation'):\n axis = np.array([np.cos(grasp.angle), np.sin(grasp.angle)])\n p = grasp.center.data + alpha * axis\n line = np.c_[grasp.center.data, p]\n plt.plot(line[0,:], line[1,:], color=color, linewidth=scale*grasp_axis_width)\n plt.scatter(p[0], p[1], c=color, marker=grasp_center_style, s=scale*grasp_center_size)\n return\n\n # plot grasp center\n if show_center:\n plt.plot(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, mew=scale*grasp_center_thickness, ms=scale*grasp_center_size)\n if skip_jaws:\n return\n \n # compute axis and jaw locations\n axis = grasp.axis\n width_px = width\n if width_px is None and hasattr(grasp, 'width_px'):\n width_px = grasp.width_px\n g1 = grasp.center.data - (float(width_px) / 2) * axis\n g2 = grasp.center.data + (float(width_px) / 2) * axis\n g1p = g1 - scale * arrow_len * axis # start location of grasp jaw 1\n g2p = g2 + scale * arrow_len * axis # start location of grasp jaw 2\n\n # plot grasp axis\n if show_axis:\n plt.plot([g1[0], g2[0]], [g1[1], g2[1]], color=color, linewidth=scale*grasp_axis_width, linestyle=grasp_axis_style)\n \n # direction of jaw line\n jaw_dir = scale * jaw_len * np.array([axis[1], -axis[0]])\n \n # length of arrow\n alpha = scale*(arrow_len - arrow_head_len)\n \n # plot first jaw\n g1_line = np.c_[g1p, g1 - scale*arrow_head_len*axis].T\n plt.arrow(g1p[0], g1p[1], alpha*axis[0], alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)\n jaw_line1 = np.c_[g1 + jaw_dir, g1 - jaw_dir].T\n\n plt.plot(jaw_line1[:,0], jaw_line1[:,1], linewidth=scale*jaw_width, c=color) \n\n # plot second jaw\n g2_line = np.c_[g2p, g2 + scale*arrow_head_len*axis].T\n plt.arrow(g2p[0], g2p[1], -alpha*axis[0], -alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)\n jaw_line2 = np.c_[g2 + jaw_dir, g2 - jaw_dir].T\n plt.plot(jaw_line2[:,0], jaw_line2[:,1], linewidth=scale*jaw_width, c=color)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef all_subs(bounds):\n idx_list = []\n for i in range(len(bounds)):\n this_dim = bounds[i]\n lo,hi = this_dim[0],this_dim[1] # bounds for this dimension\n this_dim_idxs = range(lo,hi+1) # indexes for this dimension\n idx_list.append(this_dim_idxs)\n\n return idx2subs(idx_list)", "response": "given a list of tuples specifying the bounds of an array all_subs()\n returns a list of all the tuples of subscripts for that array"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef idx2subs(idx_list):\n if not idx_list:\n return [()]\n return [items + (item,) \n for items in idx2subs(idx_list[:-1]) for item in idx_list[-1]]", "response": "Given a list of index values for each dimension of an array returns a list of tuples of subscripts for all of the array elements specified by those index values."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngiving an expression expr denoting a list of values returns a list of values for that expression.", "response": "def array_values(expr):\n \"\"\"Given an expression expr denoting a list of values, array_values(expr) \n returns a list of values for that expression.\"\"\"\n if isinstance(expr, Array):\n return expr.get_elems(all_subs(expr._bounds))\n elif isinstance(expr, list):\n vals = [array_values(x) for x in expr]\n return flatten(vals)\n else:\n return [expr]"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ngive an expression that denotes the set of elements that are to be accessed by the array_subscripts() returns a list of the elements denoted by expr.", "response": "def array_subscripts(expr):\n \"\"\"Given a subscript expression expr (i.e., an expression that denotes the\n set of elements of some array that are to be accessed), \n array_subscripts() returns a list of the elements denoted by expr.\"\"\"\n if isinstance(expr, Array):\n return all_subs(expr._bounds)\n elif isinstance(expr, list):\n subs = [subscripts(x) for x in expr]\n return flatten(subs)\n else:\n return [expr]"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngiving a list of values in_list returns the list obtained by flattening the top - level elements of in_list", "response": "def flatten(in_list):\n \"\"\"given a list of values in_list, flatten returns the list obtained by \n flattening the top-level elements of in_list.\"\"\"\n out_list = []\n for val in in_list:\n if isinstance(val, list):\n out_list.extend(val)\n else:\n out_list.append(val)\n\n return out_list"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef implied_loop_expr(expr, start, end, delta):\n if delta > 0:\n stop = end+1\n else:\n stop = end-1\n\n result_list = [expr(x) for x in range(start,stop,delta)]\n\n # return the flattened list of results\n return list(itertools.chain(result_list))", "response": "given the parameters of an implied loop expr returns a list of values of the lambda expression expr applied to \n successive values of the implied loop"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _mk_uninit_array(self, bounds):\n if len(bounds) == 0:\n raise For2PyError(\"Zero-length arrays current not handled!.\")\n\n this_dim = bounds[0]\n lo,hi = this_dim[0],this_dim[1]\n sz = hi-lo+1\n\n if len(bounds) == 1:\n return [None] * sz\n\n sub_array = self._mk_uninit_array(bounds[1:])\n this_array = [copy.deepcopy(sub_array) for i in range(sz)]\n\n return this_array", "response": "creates and returns an N - dimensional array of \n the size specified by the bounds with each element set to the value\n None."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _posn(self, bounds, idx):\n lo,hi = bounds[0],bounds[1]\n assert idx >= lo and idx <= hi, \\\n f\"Array index {idx} out of bounds: {bounds}\\n\"\n \n return idx-lo", "response": "returns the position in a 0 - based array corresponding to idx"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _access(self, subs, acc_type, val):\n if isinstance(subs, int): \n # if subs is just an integer, take it to be an index value.\n subs = (subs,)\n\n if len(subs) == 0:\n raise For2PyError(\"Zero-length arrays currently not handled.\")\n\n bounds = self._bounds\n sub_arr = self._values\n ndims = len(subs)\n for i in range(ndims):\n this_pos = self._posn(bounds[i], subs[i])\n\n if i == ndims-1:\n if acc_type == _GET_:\n return sub_arr[this_pos]\n else:\n sub_arr[this_pos] = val\n else:\n sub_arr = sub_arr[this_pos]", "response": "_access returns the value of the element specified by the subscript values subs."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nperforms string replacement from dict in a single pass.", "response": "def multiple_replace(d: Dict[str, str], text: str) -> str:\n \"\"\" Performs string replacement from dict in a single pass. Taken from\n https://www.oreilly.com/library/view/python-cookbook/0596001673/ch03s15.html\n \"\"\"\n # Create a regular expression from all of the dictionary keys\n regex = re.compile(\"|\".join(map(re.escape, d.keys())))\n\n # For each match, look up the corresponding value in the dictionary\n return regex.sub(lambda match: d[match.group(0)], text)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef create_parameterized_CAG(input, output, filename=\"CAG_with_indicators_and_values.pdf\"):\n with open(input, \"rb\") as f:\n G = pickle.load(f)\n G.parameterize(year=2017, month=4)\n G.get_timeseries_values_for_indicators()\n with open(output, \"wb\") as f:\n pickle.dump(G, f)", "response": "Create a CAG with mapped and parameterized indicators."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngetting a set of all unique concepts in the INDRA statements.", "response": "def get_concepts(sts: List[Influence]) -> Set[str]:\n \"\"\" Get a set of all unique concepts in the list of INDRA statements. \"\"\"\n return set(flatMap(nameTuple, sts))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_valid_statements_for_modeling(sts: List[Influence]) -> List[Influence]:\n\n return [\n s\n for s in sts\n if is_grounded_statement(s)\n and (s.subj_delta[\"polarity\"] is not None)\n and (s.obj_delta[\"polarity\"] is not None)\n ]", "response": "Select INDRA statements that can be used to construct a Delphi model from a given list of INDRA statements."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nchecking if an Influence statement is grounded.", "response": "def _(s: Influence) -> bool:\n \"\"\" Check if an Influence statement is grounded \"\"\"\n return is_grounded(s.subj) and is_grounded(s.obj)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncheck if an Influence statement is grounded.", "response": "def is_grounded_statement(s: Influence) -> bool:\n \"\"\" Check if an Influence statement is grounded \"\"\"\n return is_grounded_concept(s.subj) and is_grounded_concept(s.obj)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncheck if a concept has a high grounding score.", "response": "def _(c: Concept, cutoff: float = 0.7) -> bool:\n \"\"\"Check if a concept has a high grounding score. \"\"\"\n\n return is_grounded(c) and (top_grounding_score(c) >= cutoff)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning true if subj and obj are grounded to the UN ontology.", "response": "def _(s: Influence, cutoff: float = 0.7) -> bool:\n \"\"\" Returns true if both subj and obj are grounded to the UN ontology. \"\"\"\n\n return all(map(lambda c: is_well_grounded(c, cutoff), s.agent_list()))"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncheck if a concept is grounded and has a high grounding score.", "response": "def is_well_grounded_concept(c: Concept, cutoff: float = 0.7) -> bool:\n \"\"\"Check if a concept has a high grounding score. \"\"\"\n\n return is_grounded(c) and (top_grounding_score(c) >= cutoff)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn true if subj and obj are grounded to the UN ontology.", "response": "def is_well_grounded_statement(s: Influence, cutoff: float = 0.7) -> bool:\n \"\"\" Returns true if both subj and obj are grounded to the UN ontology. \"\"\"\n\n return all(\n map(lambda c: is_well_grounded_concept(c, cutoff), s.agent_list())\n )"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef is_grounded_to_name(c: Concept, name: str, cutoff=0.7) -> bool:\n return (top_grounding(c) == name) if is_well_grounded(c, cutoff) else False", "response": "Check if a concept is grounded to a given name."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef contains_relevant_concept(\n s: Influence, relevant_concepts: List[str], cutoff=0.7\n) -> bool:\n \"\"\" Returns true if a given Influence statement has a relevant concept, and\n false otherwise. \"\"\"\n\n return any(\n map(lambda c: contains_concept(s, c, cutoff=cutoff), relevant_concepts)\n )", "response": "Returns true if a given Influence statement has a relevant concept and False otherwise."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef top_grounding(c: Concept) -> str:\n return c.db_refs[\"UN\"][0][0] if \"UN\" in c.db_refs else c.name", "response": "Return the top - scoring grounding from the UN ontology."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn a 2 - tuple consisting of the top groundings of the subj and obj of an Influence statement.", "response": "def nameTuple(s: Influence) -> Tuple[str, str]:\n \"\"\" Returns a 2-tuple consisting of the top groundings of the subj and obj\n of an Influence statement. \"\"\"\n return top_grounding(s.subj), top_grounding(s.obj)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef createNewICM():\n data = json.loads(request.data)\n G = AnalysisGraph.from_uncharted_json_serialized_dict(data)\n G.assemble_transition_model_from_gradable_adjectives()\n G.sample_from_prior()\n G.to_sql(app=current_app)\n _metadata = ICMMetadata.query.filter_by(id=G.id).first().deserialize()\n del _metadata[\"model_id\"]\n return jsonify(_metadata)", "response": "Create a new ICM"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef getICMByUUID(uuid: str):\n _metadata = ICMMetadata.query.filter_by(id=uuid).first().deserialize()\n del _metadata[\"model_id\"]\n return jsonify(_metadata)", "response": "Fetch an ICM by UUID"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn all ICM primitives", "response": "def getICMPrimitives(uuid: str):\n \"\"\" returns all ICM primitives (TODO - needs filter support)\"\"\"\n primitives = [\n p.deserialize()\n for p in CausalPrimitive.query.filter_by(model_id=uuid).all()\n ]\n for p in primitives:\n del p[\"model_id\"]\n return jsonify(primitives)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn a list of Evidence objects for a causal primitive", "response": "def getEvidenceForID(uuid: str, prim_id: str):\n \"\"\" returns evidence for a causal primitive (needs pagination support)\"\"\"\n evidences = [\n evidence.deserialize()\n for evidence in Evidence.query.filter_by(\n causalrelationship_id=prim_id\n ).all()\n ]\n for evidence in evidences:\n del evidence[\"causalrelationship_id\"]\n\n return jsonify(evidences)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nexecute an experiment over the model", "response": "def createExperiment(uuid: str):\n \"\"\" Execute an experiment over the model\"\"\"\n data = request.get_json()\n G = DelphiModel.query.filter_by(id=uuid).first().model\n if os.environ.get(\"TRAVIS\") is not None:\n config_file=\"bmi_config.txt\"\n else:\n if not os.path.exists(\"/tmp/delphi\"):\n os.makedirs(\"/tmp/delphi\", exist_ok=True)\n config_file=\"/tmp/delphi/bmi_config.txt\"\n\n G.initialize(initialize_indicators = False, config_file=config_file)\n for n in G.nodes(data=True):\n rv = n[1][\"rv\"]\n rv.partial_t = 0.0\n for variable in data[\"interventions\"]:\n if n[1][\"id\"] == variable[\"id\"]:\n # TODO : Right now, we are only taking the first value in the\n # \"values\" list. Need to generalize this so that you can have\n # multiple interventions at different times.\n\n # TODO : The subtraction of 1 is a TEMPORARY PATCH to address\n # the mismatch in semantics between the ICM API and the Delphi\n # model. MUST FIX ASAP.\n rv.partial_t = variable[\"values\"][\"value\"][\"value\"] - 1\n for s0 in G.s0:\n s0[f\"\u2202({n[0]})/\u2202t\"] = rv.partial_t\n break\n\n id = str(uuid4())\n experiment = ForwardProjection(baseType=\"ForwardProjection\", id=id)\n db.session.add(experiment)\n db.session.commit()\n\n result = ForwardProjectionResult(id=id, baseType=\"ForwardProjectionResult\")\n db.session.add(result)\n db.session.commit()\n\n d = dateutil.parser.parse(data[\"projection\"][\"startTime\"])\n\n n_timesteps = data[\"projection\"][\"numSteps\"]\n\n for i in range(n_timesteps):\n if data[\"projection\"][\"stepSize\"] == \"MONTH\":\n d = d + relativedelta(months=1)\n elif data[\"projection\"][\"stepSize\"] == \"YEAR\":\n d = d + relativedelta(years=1)\n\n for n in G.nodes(data=True):\n CausalVariable.query.filter_by(\n id=n[1][\"id\"]\n ).first().lastUpdated = d.isoformat()\n result.results.append(\n {\n \"id\": n[1][\"id\"],\n \"baseline\": {\n \"active\": \"ACTIVE\",\n \"time\": d.isoformat(),\n \"value\": {\"baseType\": \"FloatValue\", \"value\": 1.0},\n },\n \"intervened\": {\n \"active\": \"ACTIVE\",\n \"time\": d.isoformat(),\n \"value\": {\n \"baseType\": \"FloatValue\",\n \"value\": np.median([s[n[0]] for s in G.s0]),\n },\n },\n }\n )\n\n G.update(update_indicators=False)\n\n # Hack for 12-month evaluation - have the partial derivative decay over\n # time to restore equilibrium\n\n tau = 1.0 # Time constant to control the rate of the decay\n for n in G.nodes(data=True):\n for variable in data[\"interventions\"]:\n if n[1][\"id\"] == variable[\"id\"]:\n rv = n[1][\"rv\"]\n for s0 in G.s0:\n s0[f\"\u2202({n[0]})/\u2202t\"] = rv.partial_t * exp(-tau*i)\n\n db.session.add(result)\n db.session.commit()\n\n return jsonify(\n {\n \"id\": experiment.id,\n \"message\": \"Forward projection sent successfully\",\n }\n )"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nlists active (running or completed) experiments", "response": "def getExperiments(uuid: str):\n \"\"\" list active (running or completed) experiments\"\"\"\n return jsonify([x.deserialize() for x in Experiment.query.all()])"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncreate an HTML representation of INDRA statements to be manually inspected for validity.", "response": "def create_statement_inspection_table(sts: List[Influence]):\n \"\"\" Display an HTML representation of a table with INDRA statements to\n manually inspect for validity.\n\n Args:\n sts: A list of INDRA statements to be manually inspected for validity.\n \"\"\"\n\n columns = [\n \"un_groundings\",\n \"subj_polarity\",\n \"obj_polarity\",\n \"Sentence\",\n \"Source API\",\n ]\n\n polarity_to_str = lambda x: \"+\" if x == 1 else \"-\" if x == -1 else \"None\"\n l = []\n for s in sts:\n subj_un_grounding = s.subj.db_refs[\"UN\"][0][0].split(\"/\")[-1]\n obj_un_grounding = s.obj.db_refs[\"UN\"][0][0].split(\"/\")[-1]\n subj_polarity = s.subj_delta[\"polarity\"]\n obj_polarity = s.obj_delta[\"polarity\"]\n subj_adjectives = s.subj_delta[\"adjectives\"]\n for e in s.evidence:\n l.append(\n (\n (subj_un_grounding, obj_un_grounding),\n subj_polarity,\n obj_polarity,\n e.text,\n e.source_api,\n )\n )\n\n df = pd.DataFrame(l, columns=columns)\n df = df.pivot_table(index=[\"un_groundings\", \"Source API\", \"Sentence\"])\n\n def hover(hover_color=\"#ffff99\"):\n return dict(\n selector=\"tr:hover\",\n props=[(\"background-color\", \"%s\" % hover_color)],\n )\n\n styles = [\n hover(),\n dict(props=[(\"font-size\", \"100%\"), (\"font-family\", \"Gill Sans\")]),\n ]\n\n return df.style.set_table_styles(styles)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_python_shell():\n\n env = os.environ\n shell = \"shell\"\n program = os.path.basename(env[\"_\"])\n\n if \"jupyter-notebook\" in program:\n shell = \"jupyter-notebook\"\n elif \"JPY_PARENT_PID\" in env or \"ipython\" in program:\n shell = \"ipython\"\n if \"JPY_PARENT_PID\" in env:\n shell = \"ipython-notebook\"\n\n return shell", "response": "Determine python shell\n\n get_python_shell() returns\n\n 'shell' (started python on command line using \"python\")\n 'ipython' (started ipython on command line using \"ipython\")\n 'ipython-notebook' (e.g., running in Spyder or started with \"ipython qtconsole\")\n 'jupyter-notebook' (running in a Jupyter notebook)\n\n See also https://stackoverflow.com/a/37661854"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncreate a CAG that examines the downstream effects of changes in precipitation.", "response": "def create_precipitation_centered_CAG(input, output):\n \"\"\" Get a CAG that examines the downstream effects of changes in precipitation. \"\"\"\n\n with open(input, \"rb\") as f:\n G = pickle.load(f)\n G = G.get_subgraph_for_concept(\n \"UN/events/weather/precipitation\", depth=2, reverse=False\n )\n G.prune(cutoff=2)\n\n # Manually correcting a bad CWMS extraction\n G.edges[\n \"UN/events/weather/precipitation\",\n \"UN/entities/human/infrastructure/transportation/road\",\n ][\"InfluenceStatements\"][0].obj_delta[\"polarity\"] = -1\n with open(output, \"wb\") as f:\n pickle.dump(G, f)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef index_modules(root) -> Dict:\n\n module_index_dict = {\n node[\"name\"]: (node.get(\"tag\"), index)\n for index, node in enumerate(root)\n if node.get(\"tag\") in (\"module\", \"program\", \"subroutine\")\n }\n\n return module_index_dict", "response": "Counts the number of modules in the Fortran file including the program\n file."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nprint out the array declaration in a format of Array class object declaration.", "response": "def printArray(self, node, printState: PrintState):\n \"\"\" Prints out the array declaration in a format of Array class\n object declaration. 'arrayName = Array(Type, [bounds])'\n \"\"\"\n if (\n self.nameMapper[node[\"name\"]] not in printState.definedVars\n and self.nameMapper[node[\"name\"]] not in printState.globalVars\n ):\n printState.definedVars += [self.nameMapper[node[\"name\"]]]\n assert int(node[\"count\"]) > 0\n printState.definedVars += [node[\"name\"]]\n\n varType = \"\"\n if node[\"type\"].upper() == \"INTEGER\":\n varType = \"int\"\n elif node[\"type\"].upper() in (\"DOUBLE\", \"REAL\"):\n varType = \"float\"\n elif node[\"type\"].upper() == \"CHARACTER\":\n varType = \"str\"\n elif node[\"isDevTypeVar\"]:\n varType = node[\"type\"].lower() + \"()\"\n\n assert varType != \"\"\n\n self.pyStrings.append(f\"{node['name']} = Array({varType}, [\")\n for i in range(0, int(node[\"count\"])):\n loBound = node[\"low\" + str(i + 1)]\n upBound = node[\"up\" + str(i + 1)]\n dimensions = f\"({loBound}, {upBound})\"\n if i < int(node[\"count\"]) - 1:\n self.pyStrings.append(f\"{dimensions}, \")\n else:\n self.pyStrings.append(f\"{dimensions}\")\n self.pyStrings.append(\"])\")\n\n if node[\"isDevTypeVar\"]:\n self.pyStrings.append(printState.sep)\n # This may require updating later when we have to deal with the\n # multi-dimensional derived type arrays\n upBound = node[\"up1\"]\n self.pyStrings.append(\n f\"for z in range(1, {upBound}+1):\" + printState.sep\n )\n self.pyStrings.append(\n f\" obj = {node['type']}()\" + printState.sep\n )\n self.pyStrings.append(\n f\" {node['name']}.set_(z, obj)\" + printState.sep\n )"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef draw_graph(G: nx.DiGraph, filename: str):\n A = to_agraph(G)\n A.graph_attr[\"rankdir\"] = \"LR\"\n A.draw(filename, prog=\"dot\")", "response": "Draw a networkx graph with Pygraphviz."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting all input nodes from a network.", "response": "def get_input_nodes(G: nx.DiGraph) -> List[str]:\n \"\"\" Get all input nodes from a network. \"\"\"\n return [n for n, d in G.in_degree() if d == 0]"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_output_nodes(G: nx.DiGraph) -> List[str]:\n return [n for n, d in G.out_degree() if d == 0]", "response": "Get all output nodes from a network."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets a networkx graph from a DOT file.", "response": "def nx_graph_from_dotfile(filename: str) -> nx.DiGraph:\n \"\"\" Get a networkx graph from a DOT file, and reverse the edges. \"\"\"\n return nx.DiGraph(read_dot(filename).reverse())"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\noutputting a networkx graph to a DOT file.", "response": "def to_dotfile(G: nx.DiGraph, filename: str):\n \"\"\" Output a networkx graph to a DOT file. \"\"\"\n A = to_agraph(G)\n A.write(filename)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_shared_nodes(G1: nx.DiGraph, G2: nx.DiGraph) -> List[str]:\n return list(set(G1.nodes()).intersection(set(G2.nodes())))", "response": "Get all the nodes that are common to both networks."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef line_is_comment(line: str) -> bool:\n\n if line[0] in \"cCdD*!\":\n return True\n\n llstr = line.strip()\n if len(llstr) == 0 or llstr[0] == \"!\":\n return True\n\n return False", "response": "Check if a line is a comment line."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef line_starts_subpgm(line: str) -> Tuple[bool, Optional[str]]:\n\n match = RE_SUB_START.match(line)\n if match != None:\n f_name = match.group(1)\n return (True, f_name)\n\n match = RE_FN_START.match(line)\n if match != None:\n f_name = match.group(1)\n return (True, f_name)\n\n return (False, None)", "response": "Determines if a line in the program starts a subprogram definition."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef program_unit_name(line:str) -> str:\n match = RE_PGM_UNIT_START.match(line)\n assert match != None\n return match.group(2)", "response": "Given a line that starts a program unit this function returns the name associated\n with that program unit."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef line_is_continuation(line: str) -> bool:\n\n llstr = line.lstrip()\n return len(llstr) > 0 and llstr[0] == \"&\"", "response": "Returns True iff line is a continuation line."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef line_is_executable(line: str) -> bool:\n\n if line_is_comment(line):\n return False\n\n if re.match(RE_TYPE_NAMES, line):\n return False\n\n for exp in EXECUTABLE_CODE_START:\n if re.match(exp, line) != None:\n return True\n\n return False", "response": "line_is_executable() returns True iff the line can start an anova facility executable statement in a program."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning an iterator that yields x followed by xs.", "response": "def prepend(x: T, xs: Iterable[T]) -> Iterator[T]:\n \"\"\" Prepend a value to an iterable.\n\n Parameters\n ----------\n x\n An element of type T.\n xs\n An iterable of elements of type T.\n\n Returns\n -------\n Iterator\n An iterator that yields *x* followed by elements of *xs*.\n\n Examples\n --------\n\n >>> from delphi.utils.fp import prepend\n >>> list(prepend(1, [2, 3]))\n [1, 2, 3]\n\n \"\"\"\n return chain([x], xs)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nappend a value to an iterable.", "response": "def append(x: T, xs: Iterable[T]) -> Iterator[T]:\n \"\"\" Append a value to an iterable.\n\n Parameters\n ----------\n x\n An element of type T.\n xs\n An iterable of elements of type T.\n\n Returns\n -------\n Iterator\n An iterator that yields elements of *xs*, then yields *x*.\n\n\n Examples\n --------\n >>> from delphi.utils.fp import append\n >>> list(append(1, [2, 3]))\n [2, 3, 1]\n\n \"\"\"\n\n return chain(xs, [x])"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nmake an iterator that returns accumulated results of a binary function f applied to elements of an iterable.", "response": "def scanl(f: Callable[[T, U], T], x: T, xs: Iterable[U]) -> Iterator[T]:\n \"\"\" Make an iterator that returns accumulated results of a binary function\n applied to elements of an iterable.\n\n .. math::\n scanl(f, x_0, [x_1, x_2, ...]) = [x_0, f(x_0, x_1), f(f(x_0, x_1), x_2), ...]\n\n Parameters\n ----------\n f\n A binary function of two arguments of type T.\n x\n An initializer element of type T.\n xs\n An iterable of elements of type T. \n\n Returns\n -------\n Iterator\n The iterator of accumulated results.\n\n\n Examples\n --------\n >>> from delphi.utils.fp import scanl\n >>> list(scanl(lambda x, y: x + y, 10, range(5)))\n [10, 10, 11, 13, 16, 20]\n\n \"\"\"\n\n return accumulate(prepend(x, xs), f)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef scanl1(f: Callable[[T, T], T], xs: Iterable[T]) -> Iterator[T]:\n return accumulate(xs, f)", "response": "Make an iterator that returns accumulated results of a binary function f applied to elements of an iterable xs."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns the accumulated result of a binary function applied to elements of an iterable.", "response": "def foldl(f: Callable[[T, U], T], x: T, xs: Iterable[U]) -> T:\n \"\"\" Returns the accumulated result of a binary function applied to elements\n of an iterable.\n\n .. math::\n foldl(f, x_0, [x_1, x_2, x_3]) = f(f(f(f(x_0, x_1), x_2), x_3)\n\n\n Examples\n --------\n >>> from delphi.utils.fp import foldl\n >>> foldl(lambda x, y: x + y, 10, range(5))\n 20\n\n \"\"\"\n return reduce(f, xs, x)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns the accumulated result of a binary function applied to elements of an iterable.", "response": "def foldl1(f: Callable[[T, T], T], xs: Iterable[T]) -> T:\n \"\"\" Returns the accumulated result of a binary function applied to elements\n of an iterable.\n\n .. math::\n foldl1(f, [x_0, x_1, x_2, x_3]) = f(f(f(f(x_0, x_1), x_2), x_3)\n\n\n Examples\n --------\n >>> from delphi.utils.fp import foldl1\n >>> foldl1(lambda x, y: x + y, range(5))\n 10\n \"\"\"\n\n return reduce(f, xs)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef flatten(xs: Union[List, Tuple]) -> List:\n return (\n sum(map(flatten, xs), [])\n if (isinstance(xs, list) or isinstance(xs, tuple))\n else [xs]\n )", "response": "Flatten a nested list or tuple."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef iterate(f: Callable[[T], T], x: T) -> Iterator[T]:\n return scanl(lambda x, _: f(x), x, repeat(None))", "response": "Takes a function and returns the result of successive\n applications of a function to an element."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef ptake(n: int, xs: Iterable[T]) -> Iterable[T]:\n return tqdm(take(n, xs), total=n)", "response": "Take with a tqdm progress bar."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef ltake(n: int, xs: Iterable[T]) -> List[T]:\n return list(take(n, xs))", "response": "A lazy version of take."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef compose(*fs: Any) -> Callable:\n return foldl1(lambda f, g: lambda *x: f(g(*x)), fs)", "response": "Compose functions from left to right."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef rcompose(*fs: Any) -> Callable:\n return foldl1(lambda f, g: lambda *x: g(f(*x)), fs)", "response": "Compose functions from right to left."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef flatMap(f: Callable, xs: Iterable) -> List:\n return flatten(lmap(f, xs))", "response": "Map a function onto an iterable and flatten the result."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncollects data into fixed - length chunks or blocks.", "response": "def grouper(xs: Iterable, n: int, fillvalue=None):\n \"\"\"Collect data into fixed-length chunks or blocks.\n >>> from delphi.utils.fp import grouper\n >>> list(grouper('ABCDEFG', 3, 'x'))\n [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]\n \"\"\"\n args = [iter(xs)] * n\n return zip_longest(*args, fillvalue=fillvalue)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nprocess CliMIS crop production data.", "response": "def process_climis_crop_production_data(data_dir: str):\n \"\"\" Process CliMIS crop production data \"\"\"\n\n climis_crop_production_csvs = glob(\n \"{data_dir}/Climis South Sudan Crop Production Data/\"\n \"Crops_EstimatedProductionConsumptionBalance*.csv\"\n )\n state_county_df = pd.read_csv(\n f\"{data_dir}/ipc_data.csv\", skipinitialspace=True\n )\n combined_records = []\n\n for f in climis_crop_production_csvs:\n year = int(f.split(\"/\")[-1].split(\"_\")[2].split(\".\")[0])\n df = pd.read_csv(f).dropna()\n for i, r in df.iterrows():\n record = {\n \"Year\": year,\n \"Month\": None,\n \"Source\": \"CliMIS\",\n \"Country\": \"South Sudan\",\n }\n region = r[\"State/County\"].strip()\n\n if region.lower() in state_county_df[\"State\"].str.lower().values:\n record[\"State\"] = region\n record[\"County\"] = None\n else:\n potential_states = state_county_df.loc[\n state_county_df[\"County\"] == region\n ][\"State\"]\n record[\"State\"] = (\n potential_states.iloc[0]\n if len(potential_states) != 0\n else None\n )\n record[\"County\"] = region\n\n for field in r.index:\n if field != \"State/County\":\n if \"Net Cereal production\" in field:\n record[\"Variable\"] = \"Net Cereal Production\"\n record[\"Value\"] = r[field]\n if field.split()[-1].startswith(\"(\"):\n record[\"Unit\"] = field.split()[-1][1:-1].lower()\n else:\n record[\"Unit\"] = None\n\n combined_records.append(record)\n\n df = pd.DataFrame(combined_records)\n return df"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef process_climis_livestock_data(data_dir: str):\n\n records = []\n\n livestock_data_dir = f\"{data_dir}/Climis South Sudan Livestock Data\"\n\n for filename in glob(\n f\"{livestock_data_dir}/Livestock Body Condition/*2017.csv\"\n ):\n records += process_file_with_single_table(\n filename,\n lambda ind: f\"Percentage of {filename.split('_')[-3].lower()} with body condition {ind.lower()}\",\n lambda f: f.split(\"_\")[-2],\n )\n\n\n for filename in glob(\n f\"{livestock_data_dir}/Livestock Production/*2017.csv\"\n ):\n records += process_file_with_single_table(\n filename,\n lambda ind: \"Percentage of householding at least milking one of their livestocks\",\n lambda f: f.split(\"_\")[1],\n )\n\n disease_acronym_dict = {\n \"FMD\": \"Foot and Mouth Disease (FMD)\",\n \"LSD\": \"Lumpy Skin Disease (LSD)\",\n \"CBPP\": \"Contagious Bovine Pleuropneumonia (CBPP)\",\n \"CCPP\": \"Contagious Caprine Pleuropneumonia (CCPP)\",\n \"NC\": \"NC\",\n \"PPR\": \"Peste des Petits Ruminants (PPR)\",\n \"Others\": \"Other diseases\",\n }\n\n func = (\n lambda k, i: f\"Percentage of livestock with {disease_acronym_dict[k]} that are {i.lower().strip()}\"\n )\n livestock_disease_header_dict = {\n k: partial(func, k) for k in disease_acronym_dict\n }\n\n livestock_migration_header_dict = {\n \"Livestock migration\": lambda i: f\"Percentage of livestock migrating {i.split()[-1].lower()}\",\n \"Distance covered\": lambda i: \"Distance covered by migrating livestock\",\n \"Proportion of livestock that migrated\": lambda i: \"Percentage of livestock that migrated\",\n \"Migration normal at this time of the year\": lambda i: f\"Migration normal at this time of year, {i}\",\n \"Duration in months when the migrated animals are expected to be back after\": lambda i: \"Duration in months when the migrated animals are expected to be back after\",\n \"Reasons for livestock migration\": lambda i: f\"Percentage of livestock migrating due to {i.lower()}\",\n }\n\n def process_directory(dirname, header_dict):\n return pd.concat(\n [\n df\n for df in [\n process_file_with_multiple_tables(f, header_dict)\n for f in glob(f\"{livestock_data_dir}/{dirname}/*2017.csv\")\n ]\n if df is not None\n ]\n )\n\n func2 = (\n lambda k, i: f\"{k.replace('animals', i.lower()).replace('stock', 'stock of '+i.lower()).replace('animal', i.lower())}\"\n )\n livestock_ownership_headers = [\n \"Average current stock per household\",\n \"Average number of animals born per household during last 4 weeks\",\n \"Average number of animals acquired per household during last 4 weeks (dowry, purchase, gift)\",\n \"Average number of animals given out as bride price/gift per household during last 4 weeks per household\",\n \"Average number of animals sold per household during last 4 weeks household\",\n \"Average price of animal sold (SSP)\",\n \"Average number of animals exchanged for grain per household during last 4 weeks\",\n \"Average number of animals died/slaughtered/lost per household during last 4 weeks\",\n ]\n\n livestock_ownership_header_dict = {\n k: partial(func2, k) for k in livestock_ownership_headers\n }\n ownership_df = process_directory(\n \"Livestock Ownership\", livestock_ownership_header_dict\n )\n\n disease_df = process_directory(\n \"Livestock Diseases\", livestock_disease_header_dict\n )\n\n livestock_migration_df = process_directory(\n \"Livestock Migration\", livestock_migration_header_dict\n )\n\n livestock_pasture_header_dict = {\n \"Pasture condtion\": lambda i: f\"Percentage of livestock pasture in {i.lower()} condition\",\n \"Pasture condition compared to similar time in a normal year\": lambda i: f\"Percentage of livestock pasture in {i.lower()} condition compared to a similar time in a normal year\",\n \"Browse condition\": lambda i: f\"Percentage of livestock pasture in {i.lower()} browse condition\",\n \"Browse condition compared to similar time in a normal year\": lambda i: f\"Percentage of livestock pasture in {i.lower()} browse condition compared to a similar time in a normal year\",\n \"Presence of constraints in accessing forage\": lambda i: f\"Percentage reporting the {('presence' if i=='Yes' else 'absence')} of constraints in accessing forage\",\n \"Main forage constraints\": lambda i: f\"Percentage reporting {i.lower()} as the main forage constraint\",\n }\n livestock_pasture_df = process_directory(\n \"Livestock Pasture\", livestock_pasture_header_dict\n )\n livestock_water_sources_header_dict = {\n \"Main water sources\": lambda i: f\"Percentage of livestock whose main water source is {i.lower()}\",\n \"Number of days livestock have been watered in the last 7 days\": lambda i: f\"Number of days {i.lower()} have been watered in the last 7 days\",\n }\n\n livestock_water_sources_df = process_directory(\n \"Livestock Water Sources\", livestock_water_sources_header_dict\n )\n\n for filename in glob(f\"{livestock_data_dir}/Livestock Loss/*2017.csv\"):\n records += process_file_with_single_table(\n filename,\n lambda ind: f\"Percentage of {filename.split('_')[-3].lower()} loss accounted for by {ind.lower()}\",\n lambda f: f.split(\"_\")[-2],\n )\n\n\n for record in records:\n if isinstance(record[\"Value\"], str):\n record[\"Value\"] = record[\"Value\"].replace(\"%\", \"\")\n\n livestock_prices_df = pd.concat(\n [\n make_livestock_prices_table(f)\n for f in glob(\n f\"{livestock_data_dir}/Livestock Market Prices/*2017.csv\"\n )\n ]\n )\n\n climis_livestock_data_df = pd.concat(\n [\n pd.DataFrame(records),\n disease_df,\n ownership_df,\n livestock_prices_df,\n livestock_migration_df,\n livestock_pasture_df,\n livestock_water_sources_df,\n ]\n )\n return climis_livestock_data_df", "response": "Processes all files in CliMIS livestock data directory and returns a list of dictionaries containing all the relevant data."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef separate_trailing_comments(lines: List[str]) -> List[Tuple[int, str]]:\n\n enumerated_lines = list(enumerate(lines, 1))\n i = 0\n while i < len(enumerated_lines):\n (n, code_line) = enumerated_lines[i]\n if not line_is_comment(code_line):\n (code_part, comment_part) = split_trailing_comment(code_line)\n if comment_part is not None:\n enumerated_lines[i] = (n, comment_part)\n enumerated_lines.insert(i + 1, (n, code_part))\n i += 1\n\n return enumerated_lines", "response": "Separates trailing comments from the source code lines."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngiving a list of lines that are continued by a single line merge the lines that are continued by a single line.", "response": "def merge_continued_lines(lines):\n \"\"\"Given a list of numered Fortran source code lines, i.e., pairs of the\n form (n, code_line) where n is a line number and code_line is a line\n of code, merge_continued_lines() merges sequences of lines that are\n indicated to be continuation lines.\n \"\"\"\n\n # Before a continuation line L1 is merged with the line L0 before it (and\n # presumably the one L1 is continuing), ensure that L0 is not a comment.\n # If L0 is a comment, swap L0 and L1.\n chg = True\n while chg:\n chg = False\n i = 0\n while i < len(lines) - 1:\n ln0, ln1 = lines[i], lines[i + 1]\n if line_is_comment(ln0[1]) and line_is_continuation(ln1[1]):\n # swap the code portions of lines[i] and lines[i+1]\n lines[i], lines[i + 1] = (ln0[0], ln1[1]), (ln1[0], ln0[1])\n chg = True\n i += 1\n\n # Merge continuation lines\n chg = True\n while chg:\n chg = False\n i = 0\n while i < len(lines):\n line = lines[i]\n if line_is_continuation(line[1]):\n assert i > 0\n (prev_linenum, prev_line_code) = lines[i - 1]\n curr_line_code = line[1].lstrip()[\n 1:\n ] # remove continuation char\n merged_code = prev_line_code.rstrip() + curr_line_code.lstrip()\n\n lines[i - 1] = (prev_linenum, merged_code)\n lines.pop(i)\n chg = True\n i += 1\n\n return lines"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngives a line of code returns a string indicating what kind of code it is.", "response": "def type_of_line(line):\n \"\"\"Given a line of code, type_of_line() returns a string indicating\n what kind of code it is.\"\"\"\n\n if line_is_comment(line):\n return \"comment\"\n elif line_is_executable(line):\n return \"exec_stmt\"\n elif line_is_pgm_unit_end(line):\n return \"pgm_unit_end\"\n else:\n if line_is_pgm_unit_start(line):\n return \"pgm_unit_start\"\n else:\n return \"other\""} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ngive a list of numbered lines from a Fortran file where comments internal to subprogram bodies have been moved out into their own lines, extract_comments() extracts comments into a dictionary and replaces each comment internal to subprogram bodies with a marker statement. It returns a pair (code, comments) where code is a list of numbered lines with comments removed and marker statements (plus corresponding variable declarations) added; and comments is a dictionary mapping marker statement variables to the corresponding comments.", "response": "def extract_comments(\n lines: List[Tuple[int, str]]\n) -> Tuple[List[Tuple[int, str]], Dict[str, List[str]]]:\n \"\"\"Given a list of numbered lines from a Fortran file where comments\n internal to subprogram bodies have been moved out into their own lines,\n extract_comments() extracts comments into a dictionary and replaces\n each comment internal to subprogram bodies with a marker statement.\n It returns a pair (code, comments) where code is a list of numbered\n lines with comments removed and marker statements (plus corresponding\n variable declarations) added; and comments is a dictionary mapping\n marker statement variables to the corresponding comments.\"\"\"\n\n curr_comment = []\n curr_fn, prev_fn, curr_marker = None, None, None\n comments = OrderedDict()\n\n # curr_state refers to the state of the finite-state machine (see above)\n curr_state = \"outside\"\n\n comments[\"$file_head\"] = []\n comments[\"$file_foot\"] = []\n\n for i in range(len(lines)):\n (linenum, line) = lines[i]\n\n # determine what kind of line this is\n line_type = type_of_line(line)\n\n # process the line appropriately\n if curr_state == \"outside\":\n assert line_type in (\"comment\", \"pgm_unit_start\"), (\n line_type,\n line,\n )\n if line_type == \"comment\":\n curr_comment.append(line)\n lines[i] = (linenum, None)\n else:\n # line_type == \"pgm_unit_start\"\n pgm_unit_name = program_unit_name(line)\n comments[\"$file_head\"] = curr_comment\n\n if prev_fn is not None:\n comments[prev_fn][\"foot\"] = curr_comment\n\n prev_fn = curr_fn\n curr_fn = pgm_unit_name\n\n internal_comments = OrderedDict()\n\n comments[curr_fn] = init_comment_map(\n curr_comment, [], [], internal_comments\n )\n curr_comment = []\n\n elif curr_state == \"in_neck\":\n assert line_type in (\"comment\", \"exec_stmt\", \"other\")\n if line_type == \"comment\":\n curr_comment.append(line)\n lines[i] = (linenum, None)\n elif line_type == \"exec_stmt\":\n comments[curr_fn][\"neck\"] = curr_comment\n curr_comment = []\n else:\n pass # nothing to do -- continue\n\n elif curr_state == \"in_body\":\n assert line_type in (\n \"comment\",\n \"exec_stmt\",\n \"pgm_unit_end\",\n ), f\"[Line {linenum}]: {line}\"\n\n if line_type == \"comment\":\n # Ignore empty lines, which are technically comments but which\n # don't contribute any semantic content.\n if line != \"\\n\":\n marker_var = f\"{INTERNAL_COMMENT_PREFIX}_{linenum}\"\n marker_stmt = f\" {marker_var} = .True.\\n\"\n internal_comments[marker_var] = line\n lines[i] = (linenum, marker_stmt)\n else:\n pass # nothing to do -- continue\n\n # update the current state\n curr_state = TRANSITIONS[curr_state][line_type]\n\n # if there's a comment at the very end of the file, make it the foot\n # comment of curr_fn\n if curr_comment != [] and comments.get(curr_fn):\n comments[curr_fn][\"foot\"] = curr_comment\n comments[\"$file_foot\"] = curr_comment\n\n return (lines, comments)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ntaking a line and splits it into two parts code_part comment_part and code_part where comment_part is the trailing comment character.", "response": "def split_trailing_comment(line: str) -> str:\n \"\"\"Takes a line and splits it into two parts (code_part, comment_part)\n where code_part is the line up to but not including any trailing\n comment (the '!' comment character and subsequent characters\n to the end of the line), while comment_part is the trailing comment.\n\n Args:\n line: A line of Fortran source code.\n\n Returns:\n A pair (code_part, comment_part) where comment_part is the trailing\n comment. If the line does not contain any trailing comment, then\n comment_part is None.\n \"\"\"\n\n if line.find(\"!\") == -1:\n return (line, None)\n\n i = 0\n while i < len(line):\n if line[i] == \"'\":\n j = line.find(\"'\", i + 1)\n if j == -1:\n sys.stderr.write(\"WEIRD: unbalanced quote ': line = \" + line)\n return line\n else:\n i = j + 1\n elif line[i] == '\"':\n j = line.find('\"', i + 1)\n if j == -1:\n sys.stderr.write('WEIRD: unbalanced quote \": line = ' + line)\n return line\n else:\n i = j + 1\n elif line[i] == \"!\": # partial-line comment\n comment_part = line[i:]\n code_part = line[:i].rstrip() + \"\\n\"\n return (code_part, comment_part)\n else:\n i += 1\n\n return (line, None)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nprocess is the interface used by an earlier version of this preprocessor.", "response": "def process(inputLines: List[str]) -> str:\n \"\"\"process() provides the interface used by an earlier version of this\n preprocessor.\"\"\"\n lines = separate_trailing_comments(inputLines)\n merge_continued_lines(lines)\n (lines, comments) = extract_comments(lines)\n actual_lines = [\n line[1]\n for line in lines\n if line[1] is not None and \"i_g_n_o_r_e___m_e_\" not in line[1]\n ]\n return \"\".join(actual_lines)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nassign uuids to nodes and edges.", "response": "def assign_uuids_to_nodes_and_edges(self):\n \"\"\" Assign uuids to nodes and edges. \"\"\"\n for node in self.nodes(data=True):\n node[1][\"id\"] = str(uuid4())\n\n for edge in self.edges(data=True):\n edge[2][\"id\"] = str(uuid4())"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nconstruct an AnalysisGraph object from a pickle file containing a list of INDRA statements.", "response": "def from_statements_file(cls, file: str):\n \"\"\" Construct an AnalysisGraph object from a pickle file containing a\n list of INDRA statements. \"\"\"\n\n with open(file, \"rb\") as f:\n sts = pickle.load(f)\n\n return cls.from_statements(sts)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef from_statements(\n cls, sts: List[Influence], assign_default_polarities: bool = True\n ):\n \"\"\" Construct an AnalysisGraph object from a list of INDRA statements.\n Unknown polarities are set to positive by default.\n\n Args:\n sts: A list of INDRA Statements\n\n Returns:\n An AnalysisGraph instance constructed from a list of INDRA\n statements.\n \"\"\"\n\n _dict = {}\n for s in sts:\n if assign_default_polarities:\n for delta in deltas(s):\n if delta[\"polarity\"] is None:\n delta[\"polarity\"] = 1\n concepts = nameTuple(s)\n\n # Excluding self-loops for now:\n if concepts[0] != concepts[1]:\n if all(\n map(exists, (delta[\"polarity\"] for delta in deltas(s)))\n ):\n if concepts in _dict:\n _dict[concepts].append(s)\n else:\n _dict[concepts] = [s]\n\n edges = [\n (*concepts, {\"InfluenceStatements\": statements})\n for concepts, statements in _dict.items()\n ]\n return cls(edges)", "response": "Construct an AnalysisGraph object from a list of INDRA Statements."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef from_text(cls, text: str):\n\n eidosProcessor = process_text(text)\n return cls.from_statements(eidosProcessor.statements)", "response": "Construct an AnalysisGraph object from a text string."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nconstructing an AnalysisGraph object from a file containing INDRA statements serialized by Uncharted s CauseMos webapp.", "response": "def from_uncharted_json_file(cls, file):\n \"\"\" Construct an AnalysisGraph object from a file containing INDRA\n statements serialized exported by Uncharted's CauseMos webapp.\n \"\"\"\n with open(file, \"r\") as f:\n _dict = json.load(f)\n return cls.from_uncharted_json_serialized_dict(_dict)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef from_uncharted_json_serialized_dict(\n cls, _dict, minimum_evidence_pieces_required: int = 1\n ):\n \"\"\" Construct an AnalysisGraph object from a dict of INDRA statements\n exported by Uncharted's CauseMos webapp. \"\"\"\n sts = _dict[\"statements\"]\n G = nx.DiGraph()\n for s in sts:\n if len(s[\"evidence\"]) >= minimum_evidence_pieces_required:\n subj, obj = s[\"subj\"], s[\"obj\"]\n if (\n subj[\"db_refs\"][\"concept\"] is not None\n and obj[\"db_refs\"][\"concept\"] is not None\n ):\n subj_name, obj_name = [\n \"/\".join(s[x][\"db_refs\"][\"concept\"].split(\"/\")[:])\n for x in [\"subj\", \"obj\"]\n ]\n G.add_edge(subj_name, obj_name)\n subj_delta = s[\"subj_delta\"]\n obj_delta = s[\"obj_delta\"]\n\n for delta in (subj_delta, obj_delta):\n # TODO : Ensure that all the statements provided by\n # Uncharted have unambiguous polarities.\n if delta[\"polarity\"] is None:\n delta[\"polarity\"] = 1\n\n influence_stmt = Influence(\n Concept(subj_name, db_refs=subj[\"db_refs\"]),\n Concept(obj_name, db_refs=obj[\"db_refs\"]),\n subj_delta=s[\"subj_delta\"],\n obj_delta=s[\"obj_delta\"],\n evidence=[\n INDRAEvidence(\n source_api=ev[\"source_api\"],\n annotations=ev[\"annotations\"],\n text=ev[\"text\"],\n epistemics=ev.get(\"epistemics\"),\n )\n for ev in s[\"evidence\"]\n ],\n )\n influence_sts = G.edges[subj_name, obj_name].get(\n \"InfluenceStatements\", []\n )\n influence_sts.append(influence_stmt)\n G.edges[subj_name, obj_name][\n \"InfluenceStatements\"\n ] = influence_sts\n\n for concept, indicator in _dict[\n \"concept_to_indicator_mapping\"\n ].items():\n if indicator is not None:\n indicator_source, indicator_name = (\n indicator[\"name\"].split(\"/\")[0],\n \"/\".join(indicator[\"name\"].split(\"/\")[1:]),\n )\n if concept in G:\n if G.nodes[concept].get(\"indicators\") is None:\n G.nodes[concept][\"indicators\"] = {}\n G.nodes[concept][\"indicators\"][indicator_name] = Indicator(\n indicator_name, indicator_source\n )\n\n self = cls(G)\n self.assign_uuids_to_nodes_and_edges()\n return self", "response": "Construct an AnalysisGraph object from a dictionary of INDRA statements."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nassembles transition model from gradient adjectives.", "response": "def assemble_transition_model_from_gradable_adjectives(self):\n \"\"\" Add probability distribution functions constructed from gradable\n adjective data to the edges of the analysis graph data structure.\n\n Args:\n adjective_data\n res\n \"\"\"\n\n df = pd.read_sql_table(\"gradableAdjectiveData\", con=engine)\n gb = df.groupby(\"adjective\")\n\n rs = gaussian_kde(\n flatMap(\n lambda g: gaussian_kde(get_respdevs(g[1]))\n .resample(self.res)[0]\n .tolist(),\n gb,\n )\n ).resample(self.res)[0]\n\n for edge in self.edges(data=True):\n edge[2][\"ConditionalProbability\"] = constructConditionalPDF(\n gb, rs, edge\n )\n edge[2][\"\u03b2s\"] = np.tan(\n edge[2][\"ConditionalProbability\"].resample(self.res)[0]\n )"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsamples elements of the stochastic transition matrix from the prior state.", "response": "def sample_from_prior(self):\n \"\"\" Sample elements of the stochastic transition matrix from the prior\n distribution, based on gradable adjectives. \"\"\"\n\n # simple_path_dict caches the results of the graph traversal that finds\n # simple paths between pairs of nodes, so that it doesn't have to be\n # executed for every sampled transition matrix.\n\n node_pairs = list(permutations(self.nodes(), 2))\n simple_path_dict = {\n node_pair: [\n list(pairwise(path))\n for path in nx.all_simple_paths(self, *node_pair)\n ]\n for node_pair in node_pairs\n }\n\n self.transition_matrix_collection = []\n\n elements = self.get_latent_state_components()\n\n for i in range(self.res):\n A = pd.DataFrame(\n np.identity(2 * len(self)), index=elements, columns=elements\n )\n\n for node in self.nodes:\n A[f\"\u2202({node})/\u2202t\"][node] = self.\u0394t\n\n for node_pair in node_pairs:\n A[f\"\u2202({node_pair[0]})/\u2202t\"][node_pair[1]] = sum(\n np.prod(\n [\n self.edges[edge[0], edge[1]][\"\u03b2s\"][i]\n for edge in simple_path_edge_list\n ]\n )\n * self.\u0394t\n for simple_path_edge_list in simple_path_dict[node_pair]\n )\n self.transition_matrix_collection.append(A)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nsample observed state vector. This is the implementation of the Observed state vector function. This is the implementation of the Observed state vector function.", "response": "def sample_observed_state(self, s: pd.Series) -> Dict:\n \"\"\" Sample observed state vector. This is the implementation of the\n emission function.\n\n Args:\n s: Latent state vector.\n\n Returns:\n Observed state vector.\n \"\"\"\n\n return {\n n[0]: {\n i.name: np.random.normal(s[n[0]] * i.mean, i.stdev)\n for i in n[1][\"indicators\"].values()\n }\n for n in self.nodes(data=True)\n }"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nsample observed state sequences from the likelihood model given a collection of transition matrices.", "response": "def sample_from_likelihood(self, n_timesteps=10):\n \"\"\" Sample a collection of observed state sequences from the likelihood\n model given a collection of transition matrices.\n\n Args:\n n_timesteps: The number of timesteps for the sequences.\n \"\"\"\n\n self.latent_state_sequences = lmap(\n lambda A: ltake(\n n_timesteps,\n iterate(\n lambda s: pd.Series(A @ s.values, index=s.index), self.s0\n ),\n ),\n self.transition_matrix_collection,\n )\n\n self.observed_state_sequences = [\n [self.sample_observed_state(s) for s in latent_state_sequence]\n for latent_state_sequence in self.latent_state_sequences\n ]"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsampling a new transition matrix from the proposal distribution A.", "response": "def sample_from_proposal(self, A: pd.DataFrame) -> None:\n \"\"\" Sample a new transition matrix from the proposal distribution,\n given a current candidate transition matrix. In practice, this amounts\n to the in-place perturbation of an element of the transition matrix\n currently being used by the sampler.\n\n Args\n \"\"\"\n\n # Choose the element of A to perturb\n self.source, self.target, self.edge_dict = random.choice(\n list(self.edges(data=True))\n )\n self.original_value = A[f\"\u2202({self.source})/\u2202t\"][self.target]\n A[f\"\u2202({self.source})/\u2202t\"][self.target] += np.random.normal(scale=0.001)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_timeseries_values_for_indicators(\n self, resolution: str = \"month\", months: Iterable[int] = range(6, 9)\n ):\n \"\"\" Attach timeseries to indicators, for performing Bayesian inference.\n \"\"\"\n if resolution == \"month\":\n funcs = [\n partial(get_indicator_value, month=month) for month in months\n ]\n else:\n raise NotImplementedError(\n \"Currently, only the 'month' resolution is supported.\"\n )\n\n for n in self.nodes(data=True):\n for indicator in n[1][\"indicators\"].values():\n indicator.timeseries = [\n func(indicator, year=\"2017\")[0] for func in funcs\n ]\n if len(set(indicator.timeseries)) == 1:\n indicator.timeseries = None", "response": "Attach timeseries to indicators for performing Bayesian inference."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nrun Bayesian inference - sample from the posterior distribution.", "response": "def sample_from_posterior(self, A: pd.DataFrame) -> None:\n \"\"\" Run Bayesian inference - sample from the posterior distribution.\"\"\"\n self.sample_from_proposal(A)\n self.set_latent_state_sequence(A)\n self.update_log_prior(A)\n self.update_log_likelihood()\n\n candidate_log_joint_probability = self.log_prior + self.log_likelihood\n\n delta_log_joint_probability = (\n candidate_log_joint_probability - self.log_joint_probability\n )\n\n acceptance_probability = min(1, np.exp(delta_log_joint_probability))\n if acceptance_probability > np.random.rand():\n self.update_log_joint_probability()\n else:\n A[f\"\u2202({self.source})/\u2202t\"][self.target] = self.original_value\n self.set_latent_state_sequence(A)\n self.update_log_likelihood()\n self.update_log_prior(A)\n self.update_log_joint_probability()"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ninfers the transition matrix coefficient from data.", "response": "def infer_transition_matrix_coefficient_from_data(\n self,\n source: str,\n target: str,\n state: Optional[str] = None,\n crop: Optional[str] = None,\n ):\n \"\"\" Infer the distribution of a particular transition matrix\n coefficient from data.\n\n Args:\n source: The source of the edge corresponding to the matrix element\n to infer.\n target: The target of the edge corresponding to the matrix element\n to infer.\n state:\n The state in South Sudan for which the transition matrix\n coefficient should be calculated.\n crop:\n The crop for which the transition matrix coefficient should be\n calculated.\n \"\"\"\n rows = engine.execute(\n f\"select * from dssat where `Crop` like '{crop}'\"\n f\" and `State` like '{state}'\"\n )\n xs, ys = lzip(*[(r[\"Rainfall\"], r[\"Production\"]) for r in rows])\n xs_scaled, ys_scaled = xs / np.mean(xs), ys / np.mean(ys)\n p, V = np.polyfit(xs_scaled, ys_scaled, 1, cov=True)\n self.edges[source, target][\"\u03b2s\"] = np.random.normal(\n p[0], np.sqrt(V[0][0]), self.res\n )\n self.sample_from_prior()"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef create_bmi_config_file(self, filename: str = \"bmi_config.txt\") -> None:\n s0 = self.construct_default_initial_state()\n s0.to_csv(filename, index_label=\"variable\")", "response": "Create a BMI config file to initialize the model."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef default_update_function(self, n: Tuple[str, dict]) -> List[float]:\n return [\n self.transition_matrix_collection[i].loc[n[0]].values\n @ self.s0[i].values\n for i in range(self.res)\n ]", "response": "The default update function for a CAG node."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ninitialize the AnalysisGraph with a config file.", "response": "def initialize(\n self, config_file: str = \"bmi_config.txt\", initialize_indicators=True\n ):\n \"\"\" Initialize the executable AnalysisGraph with a config file.\n\n Args:\n config_file\n\n Returns:\n AnalysisGraph\n \"\"\"\n self.t = 0.0\n if not os.path.isfile(config_file):\n self.create_bmi_config_file(config_file)\n\n self.s0 = [\n pd.read_csv(\n config_file, index_col=0, header=None, error_bad_lines=False\n )[1]\n for _ in range(self.res)\n ]\n self.s0_original = self.s0[0].copy(deep=True)\n\n self.latent_state_vector = self.construct_default_initial_state()\n\n for n in self.nodes(data=True):\n rv = LatentVar(n[0])\n n[1][\"rv\"] = rv\n n[1][\"update_function\"] = self.default_update_function\n rv.dataset = [1.0 for _ in range(self.res)]\n rv.partial_t = self.s0[0][f\"\u2202({n[0]})/\u2202t\"]\n if initialize_indicators:\n for indicator in n[1][\"indicators\"].values():\n indicator.samples = np.random.normal(\n indicator.mean * np.array(n[1][\"rv\"].dataset),\n scale=0.01,\n )"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nadvance the model by one time step.", "response": "def update(self, \u03c4: float = 1.0, update_indicators=True, dampen=False):\n \"\"\" Advance the model by one time step. \"\"\"\n\n for n in self.nodes(data=True):\n n[1][\"next_state\"] = n[1][\"update_function\"](n)\n\n for n in self.nodes(data=True):\n n[1][\"rv\"].dataset = n[1][\"next_state\"]\n\n for n in self.nodes(data=True):\n for i in range(self.res):\n self.s0[i][n[0]] = n[1][\"rv\"].dataset[i]\n if dampen:\n self.s0[i][f\"\u2202({n[0]})/\u2202t\"] = self.s0_original[\n f\"\u2202({n[0]})/\u2202t\"\n ] * exp(-\u03c4 * self.t)\n if update_indicators:\n for indicator in n[1][\"indicators\"].values():\n indicator.samples = np.random.normal(\n indicator.mean * np.array(n[1][\"rv\"].dataset),\n scale=0.01,\n )\n\n self.t += self.\u0394t"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef export_node(self, n) -> Dict[str, Union[str, List[str]]]:\n node_dict = {\n \"name\": n[0],\n \"units\": _get_units(n[0]),\n \"dtype\": _get_dtype(n[0]),\n \"arguments\": list(self.predecessors(n[0])),\n }\n\n if not n[1].get(\"indicators\") is None:\n for indicator in n[1][\"indicators\"].values():\n if \"dataset\" in indicator.__dict__:\n del indicator.__dict__[\"dataset\"]\n\n node_dict[\"indicators\"] = [\n _process_datetime(indicator.__dict__)\n for indicator in n[1][\"indicators\"].values()\n ]\n else:\n node_dict[\"indicators\"] = None\n\n return node_dict", "response": "Returns a dict suitable for exporting to JSON."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef to_dict(self) -> Dict:\n return {\n \"name\": self.name,\n \"dateCreated\": str(self.dateCreated),\n \"variables\": lmap(\n lambda n: self.export_node(n), self.nodes(data=True)\n ),\n \"timeStep\": str(self.\u0394t),\n \"edge_data\": lmap(export_edge, self.edges(data=True)),\n }", "response": "Export the CAG to a dict that can be serialized to JSON."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nmapping each concept node in the AnalysisGraph instance to one or more tangible quantities known as indicators.", "response": "def map_concepts_to_indicators(\n self, n: int = 1, min_temporal_res: Optional[str] = None\n ):\n \"\"\" Map each concept node in the AnalysisGraph instance to one or more\n tangible quantities, known as 'indicators'.\n\n Args:\n n: Number of matches to keep\n min_temporal_res: Minimum temporal resolution that the indicators\n must have data for.\n \"\"\"\n\n for node in self.nodes(data=True):\n query_parts = [\n \"select Indicator from concept_to_indicator_mapping\",\n f\"where `Concept` like '{node[0]}'\",\n ]\n\n # TODO May need to delve into SQL/database stuff a bit more deeply\n # for this. Foreign keys perhaps?\n\n query = \" \".join(query_parts)\n results = engine.execute(query)\n\n if min_temporal_res is not None:\n if min_temporal_res not in [\"month\"]:\n raise ValueError(\"min_temporal_res must be 'month'\")\n\n vars_with_required_temporal_resolution = [\n r[0]\n for r in engine.execute(\n \"select distinct `Variable` from indicator where \"\n f\"`{min_temporal_res.capitalize()}` is not null\"\n )\n ]\n results = [\n r\n for r in results\n if r[0] in vars_with_required_temporal_resolution\n ]\n\n node[1][\"indicators\"] = {\n x: Indicator(x, \"MITRE12\")\n for x in [r[0] for r in take(n, results)]\n }"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef delete_nodes(self, nodes: Iterable[str]):\n for n in nodes:\n if self.has_node(n):\n self.remove_node(n)", "response": "Iterate over a set of nodes and remove the ones that are present in\n the graph."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef delete_node(self, node: str):\n if self.has_node(node):\n self.remove_node(node)", "response": "Removes a node from the graph if it is in the graph."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nremoves an edge from the graph if it is in the graph.", "response": "def delete_edge(self, source: str, target: str):\n \"\"\" Removes an edge if it is in the graph. \"\"\"\n if self.has_edge(source, target):\n self.remove_edge(source, target)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\niterate over a set of edges and remove the ones that are present in the graph.", "response": "def delete_edges(self, edges: Iterable[Tuple[str, str]]):\n \"\"\" Iterate over a set of edges and remove the ones that are present in\n the graph. \"\"\"\n for edge in edges:\n if self.has_edge(*edge):\n self.remove_edge(*edge)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef prune(self, cutoff: int = 2):\n\n # Remove redundant paths.\n for node_pair in tqdm(list(permutations(self.nodes(), 2))):\n paths = [\n list(pairwise(path))\n for path in nx.all_simple_paths(self, *node_pair, cutoff)\n ]\n if len(paths) > 1:\n for path in paths:\n if len(path) == 1:\n self.delete_edge(*path[0])\n if any(self.degree(n) == 0 for n in path[0]):\n self.add_edge(*path[0])\n break", "response": "Removes redundant paths from the CAG."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef merge_nodes(self, n1: str, n2: str, same_polarity: bool = True):\n\n for p in self.predecessors(n1):\n for st in self[p][n1][\"InfluenceStatements\"]:\n if not same_polarity:\n st.obj_delta[\"polarity\"] = -st.obj_delta[\"polarity\"]\n st.obj.db_refs[\"UN\"][0] = (n2, st.obj.db_refs[\"UN\"][0][1])\n\n if not self.has_edge(p, n2):\n self.add_edge(p, n2)\n self[p][n2][\"InfluenceStatements\"] = self[p][n1][\n \"InfluenceStatements\"\n ]\n\n else:\n self[p][n2][\"InfluenceStatements\"] += self[p][n1][\n \"InfluenceStatements\"\n ]\n\n for s in self.successors(n1):\n for st in self.edges[n1, s][\"InfluenceStatements\"]:\n if not same_polarity:\n st.subj_delta[\"polarity\"] = -st.subj_delta[\"polarity\"]\n st.subj.db_refs[\"UN\"][0] = (n2, st.subj.db_refs[\"UN\"][0][1])\n\n if not self.has_edge(n2, s):\n self.add_edge(n2, s)\n self[n2][s][\"InfluenceStatements\"] = self[n1][s][\n \"InfluenceStatements\"\n ]\n else:\n self[n2][s][\"InfluenceStatements\"] += self[n1][s][\n \"InfluenceStatements\"\n ]\n\n self.remove_node(n1)", "response": "Merge two nodes into one."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a new subgraph of the analysis graph for a single concept.", "response": "def get_subgraph_for_concept(\n self, concept: str, depth: int = 1, reverse: bool = False\n ):\n \"\"\" Returns a new subgraph of the analysis graph for a single concept.\n\n Args:\n concept: The concept that the subgraph will be centered around.\n depth: The depth to which the depth-first search must be performed.\n\n reverse: Sets the direction of causal influence flow to examine.\n Setting this to False (default) will search for upstream causal\n influences, and setting it to True will search for\n downstream causal influences.\n\n Returns:\n AnalysisGraph\n \"\"\"\n\n nodeset = {concept}\n\n if reverse:\n func = self.predecessors\n else:\n func = self.successors\n for i in range(depth):\n nodeset.update(\n chain.from_iterable([list(func(n)) for n in nodeset])\n )\n\n return AnalysisGraph(self.subgraph(nodeset).copy())"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns an AnalysisGraph comprised of simple paths between the source and target.", "response": "def get_subgraph_for_concept_pair(\n self, source: str, target: str, cutoff: Optional[int] = None\n ):\n \"\"\" Get subgraph comprised of simple paths between the source and the\n target.\n\n Args:\n source\n target\n cutoff\n \"\"\"\n paths = nx.all_simple_paths(self, source, target, cutoff=cutoff)\n return AnalysisGraph(self.subgraph(set(chain.from_iterable(paths))))"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_subgraph_for_concept_pairs(\n self, concepts: List[str], cutoff: Optional[int] = None\n ):\n \"\"\" Get subgraph comprised of simple paths between the source and the\n target.\n\n Args:\n concepts\n cutoff\n \"\"\"\n path_generator = (\n nx.all_simple_paths(self, source, target, cutoff=cutoff)\n for source, target in permutations(concepts, 2)\n )\n paths = chain.from_iterable(path_generator)\n return AnalysisGraph(self.subgraph(set(chain.from_iterable(paths))))", "response": "Returns an AnalysisGraph comprised of simple paths between the source and target and the given concepts."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ninserting the model into the SQLite3 database associated with the ICM REST API.", "response": "def to_sql(self, app=None, last_known_value_date: Optional[date] = None):\n \"\"\" Inserts the model into the SQLite3 database associated with Delphi,\n for use with the ICM REST API. \"\"\"\n\n from delphi.apps.rest_api import create_app, db\n\n self.assign_uuids_to_nodes_and_edges()\n icm_metadata = ICMMetadata(\n id=self.id,\n created=self.dateCreated,\n estimatedNumberOfPrimitives=len(self.nodes) + len(self.edges),\n createdByUser_id=1,\n lastAccessedByUser_id=1,\n lastUpdatedByUser_id=1,\n )\n if last_known_value_date is None:\n today = date.today().isoformat()\n else:\n today = last_known_value_date.isoformat()\n default_latent_var_value = 1.0\n causal_primitives = []\n nodeset = {n.split(\"/\")[-1] for n in self.nodes}\n simplified_labels = len(nodeset) == len(self)\n for n in self.nodes(data=True):\n n[1][\"rv\"] = LatentVar(n[0])\n n[1][\"update_function\"] = self.default_update_function\n rv = n[1][\"rv\"]\n rv.dataset = [default_latent_var_value for _ in range(self.res)]\n\n causal_variable = CausalVariable(\n id=n[1][\"id\"],\n model_id=self.id,\n units=\"\",\n namespaces={},\n auxiliaryProperties=[],\n label=n[0].split(\"/\")[-1].replace(\"_\", \" \").capitalize()\n if simplified_labels\n else n[0],\n description=n[0],\n lastUpdated=today,\n confidence=1.0,\n lastKnownValue={\n \"active\": \"ACTIVE\",\n \"trend\": None,\n \"time\": today,\n \"value\": {\n \"baseType\": \"FloatValue\",\n \"value\": n[1][\"rv\"].dataset[0],\n },\n },\n range={\n \"baseType\": \"FloatRange\",\n \"range\": {\"min\": 0, \"max\": 5, \"step\": 0.1},\n },\n )\n causal_primitives.append(causal_variable)\n\n max_mean_betas = max(\n [abs(np.median(e[2][\"\u03b2s\"])) for e in self.edges(data=True)]\n )\n for e in self.edges(data=True):\n causal_relationship = CausalRelationship(\n id=e[2][\"id\"],\n namespaces={},\n source={\n \"id\": self.nodes[e[0]][\"id\"],\n \"baseType\": \"CausalVariable\",\n },\n target={\n \"id\": self.nodes[e[1]][\"id\"],\n \"baseType\": \"CausalVariable\",\n },\n model_id=self.id,\n auxiliaryProperties=[],\n lastUpdated=today,\n types=[\"causal\"],\n description=\"\",\n confidence=np.mean(\n [s.belief for s in e[2][\"InfluenceStatements\"]]\n ),\n label=\"\",\n strength=abs(np.median(e[2][\"\u03b2s\"]) / max_mean_betas),\n reinforcement=(\n True\n if np.mean(\n [\n stmt.subj_delta[\"polarity\"]\n * stmt.obj_delta[\"polarity\"]\n for stmt in e[2][\"InfluenceStatements\"]\n ]\n )\n > 0\n else False\n ),\n )\n causal_primitives.append(causal_relationship)\n evidences = []\n for edge in self.edges(data=True):\n for stmt in edge[2][\"InfluenceStatements\"]:\n for ev in stmt.evidence:\n evidence = Evidence(\n id=str(uuid4()),\n causalrelationship_id=edge[2][\"id\"],\n # TODO - make source and target appear in CauseEx HMI\n description=(ev.text),\n )\n evidences.append(evidence)\n\n if app is None:\n app = create_app()\n\n with app.app_context():\n db.drop_all()\n db.create_all()\n db.session.add(icm_metadata)\n db.session.add(DelphiModel(id=self.id, model=self))\n for causal_primitive in causal_primitives:\n db.session.add(causal_primitive)\n for evidence in evidences:\n db.session.add(evidence)\n db.session.commit()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef to_agraph(\n self,\n indicators: bool = False,\n indicator_values: bool = False,\n nodes_to_highlight=None,\n *args,\n **kwargs,\n ):\n \"\"\" Exports the CAG as a pygraphviz AGraph for visualization.\n\n Args:\n indicators: Whether to display indicators in the AGraph\n indicator_values: Whether to display indicator values in the AGraph\n nodes_to_highlight: Nodes to highlight in the AGraph.\n Returns:\n A PyGraphviz AGraph representation of the AnalysisGraph.\n \"\"\"\n\n from delphi.utils.misc import choose_font\n\n FONT = choose_font()\n A = nx.nx_agraph.to_agraph(self)\n A.graph_attr.update(\n {\n \"dpi\": 227,\n \"fontsize\": 20,\n \"rankdir\": kwargs.get(\"rankdir\", \"TB\"),\n \"fontname\": FONT,\n \"overlap\": \"scale\",\n \"splines\": True,\n }\n )\n\n A.node_attr.update(\n {\n \"shape\": \"rectangle\",\n \"color\": \"black\",\n # \"color\": \"#650021\",\n \"style\": \"rounded\",\n \"fontname\": FONT,\n }\n )\n\n nodes_with_indicators = [\n n\n for n in self.nodes(data=True)\n if n[1].get(\"indicators\") is not None\n ]\n\n n_max = max(\n [\n sum([len(s.evidence) for s in e[2][\"InfluenceStatements\"]])\n for e in self.edges(data=True)\n ]\n )\n\n nodeset = {n.split(\"/\")[-1] for n in self.nodes}\n\n simplified_labels = len(nodeset) == len(self)\n color_str = \"#650021\"\n for n in self.nodes(data=True):\n if kwargs.get(\"values\"):\n node_label = (\n n[0].capitalize().replace(\"_\", \" \")\n + \" (\"\n + str(np.mean(n[1][\"rv\"].dataset))\n + \")\"\n )\n else:\n node_label = (\n n[0].split(\"/\")[-1].replace(\"_\", \" \").capitalize()\n if simplified_labels\n else n[0]\n )\n A.add_node(n[0], label=node_label)\n\n if list(self.edges(data=True))[0][2].get(\"\u03b2s\") is not None:\n max_median_betas = max(\n [abs(np.median(e[2][\"\u03b2s\"])) for e in self.edges(data=True)]\n )\n\n for e in self.edges(data=True):\n # Calculate reinforcement (ad-hoc!)\n\n sts = e[2][\"InfluenceStatements\"]\n total_evidence_pieces = sum([len(s.evidence) for s in sts])\n reinforcement = (\n sum(\n [\n stmt.overall_polarity() * len(stmt.evidence)\n for stmt in sts\n ]\n )\n / total_evidence_pieces\n )\n opacity = total_evidence_pieces / n_max\n h = (opacity * 255).hex()\n\n if list(self.edges(data=True))[0][2].get(\"\u03b2s\") is not None:\n penwidth = 3 * abs(np.median(e[2][\"\u03b2s\"]) / max_median_betas)\n cmap = cm.Greens if reinforcement > 0 else cm.Reds\n c_str = (\n matplotlib.colors.rgb2hex(cmap(abs(reinforcement)))\n + h[4:6]\n )\n else:\n penwidth = 1\n c_str = \"black\"\n\n A.add_edge(e[0], e[1], color=c_str, penwidth=penwidth)\n\n # Drawing indicator variables\n\n if indicators:\n for n in nodes_with_indicators:\n for indicator_name, ind in n[1][\"indicators\"].items():\n node_label = _insert_line_breaks(\n ind.name.replace(\"_\", \" \"), 30\n )\n if indicator_values:\n if ind.unit is not None:\n units = f\" {ind.unit}\"\n else:\n units = \"\"\n\n if ind.mean is not None:\n ind_value = \"{:.2f}\".format(ind.mean)\n node_label = (\n f\"{node_label}\\n{ind_value} {ind.unit}\"\n f\"\\nSource: {ind.source}\"\n f\"\\nAggregation axes: {ind.aggaxes}\"\n f\"\\nAggregation method: {ind.aggregation_method}\"\n )\n\n A.add_node(\n indicator_name,\n style=\"rounded, filled\",\n fillcolor=\"lightblue\",\n label=node_label,\n )\n A.add_edge(n[0], indicator_name, color=\"royalblue4\")\n\n if nodes_to_highlight is not None:\n nodes = kwargs.pop(\"nodes_to_highlight\")\n if isinstance(nodes, list):\n for n in nodes:\n if n in A.nodes():\n A.add_node(n, fontcolor=\"royalblue\")\n elif isinstance(nodes, str):\n if n in A.nodes():\n A.add_node(nodes, fontcolor=\"royalblue\")\n\n if kwargs.get(\"graph_label\") is not None:\n A.graph_attr[\"label\"] = kwargs[\"graph_label\"]\n\n return A", "response": "Exports the CAG as a pygraphviz AGraph."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef dump(node, annotate_fields=True, include_attributes=False, indent=\" \"):\n\n def _format(node, level=0):\n if isinstance(node, ast.AST):\n fields = [(a, _format(b, level)) for a, b in ast.iter_fields(node)]\n if include_attributes and node._attributes:\n fields.extend(\n [\n (a, _format(getattr(node, a), level))\n for a in node._attributes\n ]\n )\n return \"\".join(\n [\n node.__class__.__name__,\n \"(\",\n \", \".join(\n (\"%s=%s\" % field for field in fields)\n if annotate_fields\n else (b for a, b in fields)\n ),\n \")\",\n ]\n )\n elif isinstance(node, list):\n lines = [\"[\"]\n lines.extend(\n (\n indent * (level + 2) + _format(x, level + 2) + \",\"\n for x in node\n )\n )\n if len(lines) > 1:\n lines.append(indent * (level + 1) + \"]\")\n else:\n lines[-1] += \"]\"\n return \"\\n\".join(lines)\n return repr(node)\n\n if not isinstance(node, ast.AST):\n raise TypeError(\"expected AST, got %r\" % node.__class__.__name__)\n return _format(node)", "response": "Return a formatted string of the tree in node."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncreating a Python dict representing the PGM and its additional metadata for PGM output.", "response": "def create_pgm_dict(\n lambdaFile: str,\n asts: List,\n file_name: str,\n mode_mapper_dict: dict,\n save_file=False,\n) -> Dict:\n \"\"\" Create a Python dict representing the PGM, with additional metadata for\n JSON output. \"\"\"\n\n lambdaStrings = [\"import math\\n\\n\"]\n state = PGMState(lambdaStrings)\n generator = GrFNGenerator()\n generator.mode_mapper = mode_mapper_dict\n pgm = generator.genPgm(asts, state, {}, \"\")[0]\n if pgm.get(\"start\"):\n pgm[\"start\"] = pgm[\"start\"][0]\n else:\n pgm[\"start\"] = generator.function_defs[-1]\n\n pgm[\"source\"] = [[get_path(file_name, \"source\")]]\n\n # dateCreated stores the date and time on which the lambda and PGM file was created.\n # It is stored in YYYMMDD format\n pgm[\"dateCreated\"] = f\"{datetime.today().strftime('%Y%m%d')}\"\n\n with open(lambdaFile, \"w\") as f:\n f.write(\"\".join(lambdaStrings))\n\n # View the PGM file that will be used to build a scope tree\n if save_file:\n json.dump(pgm, open(file_name[:file_name.rfind(\".\")] + \".json\", \"w\"))\n\n return pgm"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nfilter preassembled statements according to certain rules.", "response": "def filter_and_process_statements(\n sts,\n grounding_score_cutoff: float = 0.8,\n belief_score_cutoff: float = 0.85,\n concepts_of_interest: List[str] = [],\n):\n\n \"\"\" Filter preassembled statements according to certain rules. \"\"\"\n filtered_sts = []\n counters = {}\n\n def update_counter(counter_name):\n if counter_name in counters:\n counters[counter_name] += 1\n else:\n counters[counter_name] = 1\n\n for s in tqdm(sts):\n\n update_counter(\"Original number of statements\")\n\n # Apply belief score threshold cutoff\n\n if not s.belief > belief_score_cutoff:\n continue\n update_counter(f\"Statements with belief score > {belief_score_cutoff}\")\n\n # Select statements with UN groundings\n\n if s.subj.db_refs.get(\"UN\") is None or s.obj.db_refs.get(\"UN\") is None:\n continue\n update_counter(\"Statements with UN groundings\")\n\n # Apply grounding score cutoffs\n\n if not all(\n x[1] > grounding_score_cutoff\n for x in (y.db_refs[\"UN\"][0] for y in (s.subj, s.obj))\n ):\n continue\n update_counter(\n f\"Statements with subj and obj grounding scores > {grounding_score_cutoff}\"\n )\n\n # Assign default polarities\n\n if s.subj_delta[\"polarity\"] is None:\n s.subj_delta[\"polarity\"] = 1\n if s.obj_delta[\"polarity\"] is None:\n s.obj_delta[\"polarity\"] = 1\n\n filtered_sts.append(s)\n\n for k, v in counters.items():\n print(f\"{k}: {v}\")\n\n return filtered_sts"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create_CAG_with_indicators(input, output, filename=\"CAG_with_indicators.pdf\"):\n with open(input, \"rb\") as f:\n G = pickle.load(f)\n G.map_concepts_to_indicators(min_temporal_res=\"month\")\n G.set_indicator(\"UN/events/weather/precipitation\", \"Historical Average Total Daily Rainfall (Maize)\", \"DSSAT\")\n G.set_indicator(\"UN/events/human/agriculture/food_production\",\n \"Historical Production (Maize)\", \"DSSAT\")\n G.set_indicator(\"UN/entities/human/food/food_security\", \"IPC Phase Classification\", \"FEWSNET\")\n G.set_indicator(\"UN/entities/food_availability\", \"Production, Meat indigenous, total\", \"FAO\")\n G.set_indicator(\"UN/entities/human/financial/economic/market\", \"Inflation Rate\", \"ieconomics.com\")\n G.set_indicator(\"UN/events/human/death\", \"Battle-related deaths\", \"WDI\")\n with open(output, \"wb\") as f:\n pickle.dump(G, f)", "response": "Create a CAG with mapped indicators."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nexecute the GrFN over a particular set of inputs and returns the result.", "response": "def run(\n self,\n inputs: Dict[str, Union[float, Iterable]],\n torch_size: Optional[int] = None,\n ) -> Union[float, Iterable]:\n \"\"\"Executes the GrFN over a particular set of inputs and returns the\n result.\n\n Args:\n inputs: Input set where keys are the names of input nodes in the\n GrFN and each key points to a set of input values (or just one).\n\n Returns:\n A set of outputs from executing the GrFN, one for every set of\n inputs.\n \"\"\"\n # Set input values\n for i in self.inputs:\n self.nodes[i][\"value\"] = inputs[i]\n\n for func_set in self.function_sets:\n for func_name in func_set:\n lambda_fn = self.nodes[func_name][\"lambda_fn\"]\n output_node = list(self.successors(func_name))[0]\n\n signature = self.nodes[func_name][\"func_inputs\"]\n input_values = [self.nodes[n][\"value\"] for n in signature]\n res = lambda_fn(*input_values)\n\n if torch_size is not None and len(signature) == 0:\n self.nodes[output_node][\"value\"] = torch.tensor(\n [res] * torch_size, dtype=torch.double\n )\n else:\n self.nodes[output_node][\"value\"] = res\n\n # Return the output\n return self.nodes[self.output_node][\"value\"]"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef to_CAG(self):\n\n G = nx.DiGraph()\n for (name, attrs) in self.nodes(data=True):\n if attrs[\"type\"] == \"variable\":\n for pred_fn in self.predecessors(name):\n if not any(\n fn_type in pred_fn\n for fn_type in (\"condition\", \"decision\")\n ):\n for pred_var in self.predecessors(pred_fn):\n G.add_node(\n self.nodes[pred_var][\"basename\"],\n **self.nodes[pred_var],\n )\n G.add_node(attrs[\"basename\"], **attrs)\n G.add_edge(\n self.nodes[pred_var][\"basename\"],\n attrs[\"basename\"],\n )\n if attrs[\"is_loop_index\"]:\n G.add_edge(attrs[\"basename\"], attrs[\"basename\"])\n\n return G", "response": "Export the current state of the variable to a Causal Analysis Graph."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nbuild a GrFN object from a JSON file.", "response": "def from_json_and_lambdas(cls, file: str, lambdas):\n \"\"\"Builds a GrFN from a JSON object.\n\n Args:\n cls: The class variable for object creation.\n file: Filename of a GrFN JSON file.\n\n Returns:\n type: A GroundedFunctionNetwork object.\n\n \"\"\"\n with open(file, \"r\") as f:\n data = json.load(f)\n\n return cls.from_dict(data, lambdas)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef from_dict(cls, data: Dict, lambdas):\n functions = {d[\"name\"]: d for d in data[\"functions\"]}\n G = nx.DiGraph()\n scope_tree = nx.DiGraph()\n\n def add_variable_node(\n basename: str, parent: str, index: int, is_loop_index: bool = False\n ):\n G.add_node(\n f\"{parent}::{basename}_{index}\",\n type=\"variable\",\n color=\"maroon\",\n parent=parent,\n label=f\"{basename}_{index}\",\n basename=basename,\n is_loop_index=is_loop_index,\n padding=15,\n value=None,\n )\n\n def find_correct_scope(cur_scope, var_name):\n if cur_scope.parent is None:\n return cur_scope.name, -1\n\n new_scope = cur_scope.parent\n\n if var_name in new_scope.variables:\n return new_scope.name, new_scope.variables[var_name]\n else:\n return find_correct_scope(new_scope, var_name)\n\n def process_container(\n scope, loop_index_variable: Optional[str] = None\n ):\n for stmt in scope.body:\n if \"name\" in stmt:\n stmt_type = functions[stmt[\"name\"]][\"type\"]\n if stmt_type in (\"assign\", \"condition\", \"decision\"):\n if stmt_type == \"assign\":\n if \"body\" in functions[stmt[\"name\"]]:\n stmt_type = \"literal\"\n\n output = stmt[\"output\"]\n scope.variables[output[\"variable\"]] = output[\"index\"]\n add_variable_node(\n output[\"variable\"], scope.name, output[\"index\"]\n )\n G.add_edge(\n stmt[\"name\"],\n f\"{scope.name}::{output['variable']}_{output['index']}\",\n )\n\n ordered_inputs = list()\n for input in stmt.get(\"input\", []):\n parent = scope.name\n index = input[\"index\"]\n base_name = input[\"variable\"]\n if (\n index == -1\n and base_name != loop_index_variable\n ):\n parent, index = find_correct_scope(\n scope, base_name\n )\n\n add_variable_node(\n base_name,\n parent,\n index,\n base_name == loop_index_variable,\n )\n node_name = f\"{parent}::{base_name}_{index}\"\n ordered_inputs.append(node_name)\n G.add_edge(node_name, stmt[\"name\"])\n\n G.add_node(\n stmt[\"name\"],\n type=\"function\",\n lambda_fn=getattr(lambdas, stmt[\"name\"]),\n func_inputs=ordered_inputs,\n shape=\"rectangle\",\n parent=scope.name,\n label=stmt_type[0].upper(),\n padding=10,\n )\n\n elif stmt_type == \"loop_plate\":\n index_variable = functions[stmt[\"name\"]][\n \"index_variable\"\n ]\n scope_tree.add_node(stmt[\"name\"], color=\"blue\")\n scope_tree.add_edge(scope.name, stmt[\"name\"])\n new_scope = ScopeNode(\n functions[stmt[\"name\"]], parent=scope\n )\n process_container(\n new_scope, loop_index_variable=index_variable\n )\n else:\n pass\n elif \"function\" in stmt and stmt[\"function\"] != \"print\":\n scope_tree.add_node(stmt[\"function\"], color=\"green\")\n scope_tree.add_edge(scope.name, stmt[\"function\"])\n new_scope = ScopeNode(\n functions[stmt[\"function\"]], parent=scope\n )\n process_container(new_scope)\n\n root = data[\"start\"]\n scope_tree.add_node(root, color=\"green\")\n cur_scope = ScopeNode(functions[root])\n process_container(cur_scope)\n return cls(G, scope_tree)", "response": "Builds a GrFN object from a set of extracted function data objects and a set of lambdas."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef from_python_file(\n cls, python_file, lambdas_path, json_filename: str, stem: str\n ):\n \"\"\"Builds GrFN object from Python file.\"\"\"\n with open(python_file, \"r\") as f:\n pySrc = f.read()\n return cls.from_python_src(pySrc, lambdas_path, json_filename, stem)", "response": "Builds GrFN object from Python file."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef from_python_src(\n cls,\n pySrc,\n lambdas_path,\n json_filename: str,\n stem: str,\n save_file: bool = False,\n ):\n \"\"\"Builds GrFN object from Python source code.\"\"\"\n asts = [ast.parse(pySrc)]\n pgm_dict = genPGM.create_pgm_dict(\n lambdas_path,\n asts,\n json_filename,\n {\"FileName\": f\"{stem}.py\"}, # HACK\n )\n lambdas = importlib.__import__(stem + \"_lambdas\")\n return cls.from_dict(pgm_dict, lambdas)", "response": "Builds GrFN object from Python source code."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nbuild a GrFN object from a Fortran file.", "response": "def from_fortran_file(cls, fortran_file: str, tmpdir: str = \".\"):\n \"\"\"Builds GrFN object from a Fortran program.\"\"\"\n stem = Path(fortran_file).stem\n if tmpdir == \".\" and \"/\" in fortran_file:\n tmpdir = Path(fortran_file).parent\n preprocessed_fortran_file = f\"{tmpdir}/{stem}_preprocessed.f\"\n lambdas_path = f\"{tmpdir}/{stem}_lambdas.py\"\n json_filename = stem + \".json\"\n\n with open(fortran_file, \"r\") as f:\n inputLines = f.readlines()\n\n with open(preprocessed_fortran_file, \"w\") as f:\n f.write(preprocessor.process(inputLines))\n\n xml_string = sp.run(\n [\n \"java\",\n \"fortran.ofp.FrontEnd\",\n \"--class\",\n \"fortran.ofp.XMLPrinter\",\n \"--verbosity\",\n \"0\",\n preprocessed_fortran_file,\n ],\n stdout=sp.PIPE,\n ).stdout\n trees = [ET.fromstring(xml_string)]\n comments = get_comments.get_comments(preprocessed_fortran_file)\n os.remove(preprocessed_fortran_file)\n xml_to_json_translator = translate.XMLToJSONTranslator()\n outputDict = xml_to_json_translator.analyze(trees, comments)\n pySrc = pyTranslate.create_python_source_list(outputDict)[0][0]\n\n G = cls.from_python_src(pySrc, lambdas_path, json_filename, stem)\n return G"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncreates a GroundedFunctionNetwork instance from a string with raw Fortran source code.", "response": "def from_fortran_src(cls, fortran_src: str, dir: str = \".\"):\n \"\"\" Create a GroundedFunctionNetwork instance from a string with raw\n Fortran code.\n\n Args:\n fortran_src: A string with Fortran source code.\n dir: (Optional) - the directory in which the temporary Fortran file\n will be created (make sure you have write permission!) Defaults to\n the current directory.\n Returns:\n A GroundedFunctionNetwork instance\n \"\"\"\n import tempfile\n fp = tempfile.NamedTemporaryFile('w+t', delete=False, dir=dir)\n fp.writelines(fortran_src)\n fp.close()\n G = cls.from_fortran_file(fp.name, dir)\n os.remove(fp.name)\n return G"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef clear(self):\n for n in self.nodes():\n if self.nodes[n][\"type\"] == \"variable\":\n self.nodes[n][\"value\"] = None\n elif self.nodes[n][\"type\"] == \"function\":\n self.nodes[n][\"func_visited\"] = False", "response": "Clear variable nodes for next computation."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef to_FIB(self, other):\n\n if not isinstance(other, GroundedFunctionNetwork):\n raise TypeError(\n f\"Expected GroundedFunctionNetwork, but got {type(other)}\"\n )\n\n def shortname(var):\n return var[var.find(\"::\") + 2 : var.rfind(\"_\")]\n\n def shortname_vars(graph, shortname):\n return [v for v in graph.nodes() if shortname in v]\n\n this_var_nodes = [\n shortname(n)\n for (n, d) in self.nodes(data=True)\n if d[\"type\"] == \"variable\"\n ]\n other_var_nodes = [\n shortname(n)\n for (n, d) in other.nodes(data=True)\n if d[\"type\"] == \"variable\"\n ]\n\n shared_vars = set(this_var_nodes).intersection(set(other_var_nodes))\n full_shared_vars = {\n full_var\n for shared_var in shared_vars\n for full_var in shortname_vars(self, shared_var)\n }\n\n return ForwardInfluenceBlanket(self, full_shared_vars)", "response": "Creates a ForwardInfluenceBlanket object representing the intersection of this model with the input model."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nexport to a PyGraphviz AGraph object.", "response": "def to_agraph(self):\n \"\"\" Export to a PyGraphviz AGraph object. \"\"\"\n A = nx.nx_agraph.to_agraph(self)\n A.graph_attr.update(\n {\"dpi\": 227, \"fontsize\": 20, \"fontname\": \"Menlo\", \"rankdir\": \"TB\"}\n )\n A.node_attr.update({\"fontname\": \"Menlo\"})\n\n def build_tree(cluster_name, root_graph):\n subgraph_nodes = [\n n[0]\n for n in self.nodes(data=True)\n if n[1][\"parent\"] == cluster_name\n ]\n root_graph.add_nodes_from(subgraph_nodes)\n subgraph = root_graph.add_subgraph(\n subgraph_nodes,\n name=f\"cluster_{cluster_name}\",\n label=cluster_name,\n style=\"bold, rounded\",\n )\n for n in self.scope_tree.successors(cluster_name):\n build_tree(n, subgraph)\n\n root = [n for n, d in self.scope_tree.in_degree() if d == 0][0]\n build_tree(root, A)\n return A"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef to_CAG_agraph(self):\n CAG = self.to_CAG()\n A = nx.nx_agraph.to_agraph(CAG)\n A.graph_attr.update({\"dpi\": 227, \"fontsize\": 20, \"fontname\": \"Menlo\"})\n A.node_attr.update(\n {\n \"shape\": \"rectangle\",\n \"color\": \"#650021\",\n \"style\": \"rounded\",\n \"fontname\": \"Gill Sans\",\n }\n )\n A.edge_attr.update({\"color\": \"#650021\", \"arrowsize\": 0.5})\n return A", "response": "Returns a variable - only view of the GrFN object in the form of an AGraph."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nbuild a PyGraphviz AGraph object corresponding to a call graph of functions.", "response": "def to_call_agraph(self):\n \"\"\" Build a PyGraphviz AGraph object corresponding to a call graph of\n functions. \"\"\"\n\n A = nx.nx_agraph.to_agraph(self.call_graph)\n A.graph_attr.update({\"dpi\": 227, \"fontsize\": 20, \"fontname\": \"Menlo\"})\n A.node_attr.update(\n {\"shape\": \"rectangle\", \"color\": \"#650021\", \"style\": \"rounded\"}\n )\n A.edge_attr.update({\"color\": \"#650021\", \"arrowsize\": 0.5})\n return A"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nexecutes the FIB over a particular set of inputs and returns the result.", "response": "def run(\n self,\n inputs: Dict[str, Union[float, Iterable]],\n covers: Dict[str, Union[float, Iterable]],\n torch_size: Optional[int] = None,\n ) -> Union[float, Iterable]:\n \"\"\"Executes the FIB over a particular set of inputs and returns the\n result.\n Args:\n inputs: Input set where keys are the names of input nodes in the\n GrFN and each key points to a set of input values (or just one).\n Returns:\n A set of outputs from executing the GrFN, one for every set of\n inputs.\n \"\"\"\n # Abort run if covers does not match our expected cover set\n if len(covers) != len(self.cover_nodes):\n raise ValueError(\"Incorrect number of cover values.\")\n\n # Set the cover node values\n for node_name, val in covers.items():\n self.nodes[node_name][\"value\"] = val\n\n return super().run(inputs, torch_size)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef S2_surface(self, sizes, bounds, presets, covers, use_torch=False,\n num_samples = 10):\n \"\"\"Calculates the sensitivity surface of a GrFN for the two variables with\n the highest S2 index.\n\n Args:\n num_samples: Number of samples for sensitivity analysis.\n sizes: Tuple of (number of x inputs, number of y inputs).\n bounds: Set of bounds for GrFN inputs.\n presets: Set of standard values for GrFN inputs.\n\n Returns:\n Tuple:\n Tuple: The names of the two variables that were selected\n Tuple: The X, Y vectors of eval values\n Z: The numpy matrix of output evaluations\n\n \"\"\"\n args = self.inputs\n Si = self.sobol_analysis(\n num_samples,\n {\n \"num_vars\": len(args),\n \"names\": args,\n \"bounds\": [bounds[arg] for arg in args],\n },\n covers\n )\n S2 = Si[\"S2\"]\n (s2_max, v1, v2) = get_max_s2_sensitivity(S2)\n\n x_var = args[v1]\n y_var = args[v2]\n search_space = [(x_var, bounds[x_var]), (y_var, bounds[y_var])]\n preset_vals = {\n arg: presets[arg]\n for i, arg in enumerate(args)\n if i != v1 and i != v2\n }\n\n X = np.linspace(*search_space[0][1], sizes[0])\n Y = np.linspace(*search_space[1][1], sizes[1])\n\n if use_torch:\n Xm, Ym = torch.meshgrid(torch.tensor(X), torch.tensor(Y))\n inputs = {n: torch.full_like(Xm, v) for n, v in presets.items()}\n inputs.update({search_space[0][0]: Xm, search_space[1][0]: Ym})\n Z = self.run(inputs, covers).numpy()\n else:\n Xm, Ym = np.meshgrid(X, Y)\n Z = np.zeros((len(X), len(Y)))\n for x, y in itertools.product(range(len(X)), range(len(Y))):\n inputs = {n: v for n, v in presets.items()}\n inputs.update({search_space[0][0]: x, search_space[1][0]: y})\n Z[x][y] = self.run(inputs, covers)\n\n return X, Y, Z, x_var, y_var", "response": "Calculates the sensitivity surface of a GrFN for the two variables with the highest S2 index."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef process_direct_map(self, root, state) -> List[Dict]:\n\n val = {\"tag\": root.tag, \"args\": []}\n for node in root:\n val[\"args\"] += self.parseTree(node, state)\n return [val]", "response": "Handles tags that are mapped directly from xml to IR with no\n additional processing other than recursive translation of any child\n nodes."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nparse the XML tree recursively to generate a JSON AST which can be ingested by other scripts.", "response": "def parseTree(self, root, state: ParseState) -> List[Dict]:\n \"\"\"\n Parses the XML ast tree recursively to generate a JSON AST\n which can be ingested by other scripts to generate Python\n scripts.\n\n Args:\n root: The current root of the tree.\n state: The current state of the tree defined by an object of the\n ParseState class.\n\n Returns:\n ast: A JSON ast that defines the structure of the Fortran file.\n \"\"\"\n\n if root.tag in self.AST_TAG_HANDLERS:\n return self.AST_TAG_HANDLERS[root.tag](root, state)\n\n elif root.tag in self.libRtns:\n return self.process_libRtn(root, state)\n\n else:\n prog = []\n for node in root:\n prog += self.parseTree(node, state)\n return prog"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef loadFunction(self, root):\n for element in root.iter():\n if element.tag == \"function\":\n self.functionList.append(element.attrib[\"name\"])", "response": "Loads a list with all the functions in the Fortran File."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef analyze(\n self, trees: List[ET.ElementTree], comments: OrderedDict\n ) -> Dict:\n outputDict = {}\n ast = []\n\n # Parse through the ast once to identify and grab all the functions\n # present in the Fortran file.\n for tree in trees:\n self.loadFunction(tree)\n\n # Parse through the ast tree a second time to convert the XML ast\n # format to a format that can be used to generate Python statements.\n for tree in trees:\n ast += self.parseTree(tree, ParseState())\n\n \"\"\"\n Find the entry point for the Fortran file.\n The entry point for a conventional Fortran file is always the PROGRAM\n section. This 'if' statement checks for the presence of a PROGRAM\n segment.\n\n If not found, the entry point can be any of the functions or\n subroutines in the file. So, all the functions and subroutines of the\n program are listed and included as the possible entry point.\n \"\"\"\n if self.entryPoint:\n entry = {\"program\": self.entryPoint[0]}\n else:\n entry = {}\n if self.functionList:\n entry[\"function\"] = self.functionList\n if self.subroutineList:\n entry[\"subroutine\"] = self.subroutineList\n\n # Load the functions list and Fortran ast to a single data structure\n # which can be pickled and hence is portable across various scripts and\n # usages.\n outputDict[\"ast\"] = ast\n outputDict[\"functionList\"] = self.functionList\n outputDict[\"comments\"] = comments\n return outputDict", "response": "This function will load the functions and subroutines and return a dictionary of the information that can be used to generate the Fortran file."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef main():\n parser = ArgumentParser(\n description=\"Dynamic Bayes Net Executable Model\",\n formatter_class=ArgumentDefaultsHelpFormatter,\n )\n\n def add_flag(short_arg: str, long_arg: str, help: str):\n parser.add_argument(\n \"-\" + short_arg, \"--\" + long_arg, help=help, action=\"store_true\"\n )\n\n subparsers = parser.add_subparsers()\n\n parser_execute = subparsers.add_parser(\"execute\", help=\"Model execution\")\n parser_execute.set_defaults(func=execute)\n\n # ==========================================================================\n # Model execution options\n # ==========================================================================\n\n parser_execute.add_argument(\n \"--input_dressed_cag\",\n help=\"Path to the input dressed cag\",\n type=str,\n default=\"delphi_model.pkl\",\n )\n\n parser_execute.add_argument(\n \"--steps\",\n help=\"Number of time steps to take\",\n type=partial(positive_int, \"steps\"),\n default=5,\n )\n\n parser_execute.add_argument(\n \"--samples\",\n help=\"Number of sequences to sample\",\n type=partial(positive_int, \"samples\"),\n default=100,\n )\n\n parser_execute.add_argument(\n \"--output_sequences\",\n help=\"Output file containing sampled sequences\",\n type=str,\n default=\"dbn_sampled_sequences.csv\",\n )\n\n parser_execute.add_argument(\n \"--input_variables\",\n help=\"Path to the variables of the input dressed cag\",\n type=str,\n default=\"bmi_config.txt\",\n )\n\n args = parser.parse_args()\n\n if len(sys.argv) == 1:\n parser.print_help()\n\n else:\n args.func(args)", "response": "The main function of the CLI."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nconstruct FAO variable ontology for use with Eidos.", "response": "def construct_FAO_ontology():\n \"\"\" Construct FAO variable ontology for use with Eidos. \"\"\"\n df = pd.read_csv(\"south_sudan_data_fao.csv\")\n gb = df.groupby(\"Element\")\n\n d = [\n {\n \"events\": [\n {\n k: [\n {e: [process_variable_name(k, e)]}\n for e in list(set(gb.get_group(k)[\"Item\"].tolist()))\n ]\n }\n for k in gb.groups.keys()\n ]\n }\n ]\n\n yaml = YAML()\n yaml.default_flow_style = False\n\n with open(\"fao_variable_ontology.yml\", \"w\") as f:\n yaml.dump(d, f)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ndrill down into an edge in the analysis graph and inspect its provenance.", "response": "def inspect_edge(G: AnalysisGraph, source: str, target: str):\n \"\"\" 'Drill down' into an edge in the analysis graph and inspect its\n provenance. This function prints the provenance.\n\n Args:\n G\n source\n target\n \"\"\"\n\n return create_statement_inspection_table(\n G[source][target][\"InfluenceStatements\"]\n )"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _get_edge_sentences(\n G: AnalysisGraph, source: str, target: str\n) -> List[str]:\n \"\"\" Return the sentences that led to the construction of a specified edge.\n\n Args:\n G\n source: The source of the edge.\n target: The target of the edge.\n \"\"\"\n\n return chain.from_iterable(\n [\n [repr(e.text) for e in s.evidence]\n for s in G.edges[source, target][\"InfluenceStatements\"]\n ]\n )", "response": "Returns the list of sentences that led to the construction of a specified edge."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the NodeType given a name of a JSON function object.", "response": "def get_node_type(type_str):\n \"\"\"Returns the NodeType given a name of a JSON function object.\"\"\"\n if type_str == \"container\":\n return NodeType.CONTAINER\n elif type_str == \"loop_plate\":\n return NodeType.LOOP\n elif type_str == \"assign\":\n return NodeType.ASSIGN\n elif type_str == \"condition\":\n return NodeType.CONDITION\n elif type_str == \"decision\":\n return NodeType.DECISION\n else:\n raise ValueError(\"Unrecognized type string: \", type_str)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef list_data_type(type_list):\n data_type = []\n for item in type_list:\n match = re.match(r\"(\\d+)(.+)\", item)\n if not match:\n reps = 1\n if item[0] in \"FfEegG\":\n data_type.append(\"REAL\")\n elif item[0] in \"Ii\":\n data_type.append(\"INTEGER\")\n else:\n reps = match.group(1)\n fmt = match.group(2)\n if \"(\" in fmt and \",\" in fmt:\n fmt = fmt[1:-1].split(\",\")\n elif \"(\" in fmt:\n fmt = [fmt[1:-1]]\n else:\n fmt = [fmt]\n for i in range(int(reps)):\n for ft in fmt:\n if ft[0] in \"FfEegG\":\n data_type.append(\"REAL\")\n elif ft[0] in \"Ii\":\n data_type.append(\"INTEGER\")\n return data_type", "response": "This function takes a list of format specifiers and returns a list of data types represented by the format specifiers."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef init_read_line(self):\n format_list = self._format_list\n self._re_cvt = self.match_input_fmt(format_list)\n regexp0_str = \"\".join([subs[0] for subs in self._re_cvt])\n self._regexp_str = regexp0_str\n self._re = re.compile(regexp0_str)\n self._match_exps = [\n subs[1] for subs in self._re_cvt if subs[1] is not None\n ]\n self._divisors = [subs[2] for subs in self._re_cvt if subs[2] is not None]\n self._in_cvt_fns = [\n subs[3] for subs in self._re_cvt if subs[3] is not None\n ]\n self._read_line_init = True", "response": "Initialize the internal state of the read line that is relevant to input matching"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ninitialize the internal state of the write line", "response": "def init_write_line(self):\n \"\"\"init_write_line() initializes fields relevant to output generation\"\"\"\n format_list = self._format_list\n output_info = self.gen_output_fmt(format_list)\n self._output_fmt = \"\".join([sub[0] for sub in output_info])\n self._out_gen_fmt = [sub[1] for sub in output_info if sub[1] is not None]\n self._out_widths = [sub[2] for sub in output_info if sub[2] is not None]\n self._write_line_init = True"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef read_line(self, line):\n\n if not self._read_line_init:\n self.init_read_line()\n\n match = self._re.match(line)\n assert match is not None, f\"Format mismatch (line = {line})\"\n\n matched_values = []\n for i in range(self._re.groups):\n cvt_re = self._match_exps[i]\n cvt_div = self._divisors[i]\n cvt_fn = self._in_cvt_fns[i]\n match_str = match.group(i + 1)\n\n match0 = re.match(cvt_re, match_str)\n if match0 is not None:\n if cvt_fn == \"float\":\n if \".\" in match_str:\n val = float(match_str)\n else:\n val = int(match_str) / cvt_div\n elif cvt_fn == \"int\":\n val = int(match_str)\n else:\n sys.stderr.write(\n f\"Unrecognized conversion function: {cvt_fn}\\n\"\n )\n else:\n sys.stderr.write(\n f\"Format conversion failed: {match_str}\\n\"\n )\n\n matched_values.append(val)\n\n return tuple(matched_values)", "response": "Match a line of input according to the format specified and return a tuple of the matched values"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nwriting a line of output according to the format specified to generate", "response": "def write_line(self, values):\n \"\"\"\n Process a list of values according to the format specified to generate\n a line of output.\n \"\"\"\n\n if not self._write_line_init:\n self.init_write_line()\n\n if len(self._out_widths) > len(values):\n raise For2PyError(f\"ERROR: too few values for format {self._format_list}\\n\")\n\n out_strs = []\n for i in range(len(self._out_widths)):\n out_fmt = self._out_gen_fmt[i]\n out_width = self._out_widths[i]\n\n out_val = out_fmt.format(values[i])\n if len(out_val) > out_width: # value too big for field\n out_val = \"*\" * out_width\n\n out_strs.append(out_val)\n\n out_str_exp = (\n '\"' + self._output_fmt + '\".format' + str(tuple(out_strs))\n )\n out_str = eval(out_str_exp)\n return out_str + \"\\n\""} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef match_input_fmt(self, fmt_list):\n\n rexp_list = []\n for fmt in fmt_list:\n rexp_list.extend(self.match_input_fmt_1(fmt))\n\n return rexp_list", "response": "Given a list of Fortran format specifiers e. g. I5 2X F4. 1 returns a list of tuples for matching an input\n string against those format specifiers."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngives a single format specifier, e.g., '2X', 'I5', etc., this function constructs a list of tuples for matching against that specifier. Each element of this list is a tuple (xtract_re, cvt_re, divisor, cvt_fn) where: xtract_re is a regular expression that extracts an input field of the requisite width; cvt_re is a regular expression that matches the character sequence extracted by xtract_re against the specified format; divisor is the value to divide by in order to get the appropriate number of decimal places if a decimal point is not given in the input value (meaningful only for floats); and cvt_fn is a string denoting the function to be used to convert the matched string to a value.", "response": "def match_input_fmt_1(self, fmt):\n \"\"\"\n Given a single format specifier, e.g., '2X', 'I5', etc., this function\n constructs a list of tuples for matching against that specifier. Each\n element of this list is a tuple\n\n (xtract_re, cvt_re, divisor, cvt_fn)\n\n where:\n\n xtract_re is a regular expression that extracts an input field of\n the requisite width;\n cvt_re is a regular expression that matches the character sequence\n extracted by xtract_re against the specified format;\n divisor is the value to divide by in order to get the appropriate\n number of decimal places if a decimal point is not given\n in the input value (meaningful only for floats); and\n cvt_fn is a string denoting the function to be used to convert the\n matched string to a value.\n \"\"\"\n\n # first, remove any surrounding space\n fmt = fmt.strip()\n\n # get any leading digits indicating repetition\n match = re.match(\"(\\d+)(.+)\", fmt)\n if match is None:\n reps = 1\n else:\n reps = int(match.group(1))\n fmt = match.group(2)\n\n if fmt[0] == \"(\": # process parenthesized format list recursively\n fmt = fmt[1:-1]\n fmt_list = fmt.split(\",\")\n rexp = self.match_input_fmt(fmt_list)\n else:\n if fmt[0] in \"iI\": # integer\n sz = fmt[1:]\n xtract_rexp = \"(.{\" + sz + \"})\" # r.e. for extraction\n leading_sp = \" *\"\n optional_sign = \"-?\"\n rexp0 = \"\\d+\"\n rexp1 = leading_sp + optional_sign + rexp0 # r.e. for matching\n divisor = 1\n rexp = [(xtract_rexp, rexp1, divisor, \"int\")]\n\n elif fmt[0] in \"xX\": # skip\n xtract_rexp = \".\" # r.e. for extraction\n rexp = [(xtract_rexp, None, None, None)]\n\n elif fmt[0] in \"fF\": # floating point\n idx0 = fmt.find(\".\")\n sz = fmt[1:idx0]\n divisor = 10 ** (int(fmt[idx0 + 1 :]))\n xtract_rexp = \"(.{\" + sz + \"})\" # r.e. for extraction\n leading_sp = \" *\"\n optional_sign = \"-?\"\n rexp0 = \"\\d+(\\.\\d+)?\"\n rexp1 = leading_sp + optional_sign + rexp0 # r.e. for matching\n rexp = [(xtract_rexp, rexp1, divisor, \"float\")]\n else:\n raise For2PyError(\n f\"ERROR: Unrecognized format specifier {fmt}\\n\"\n )\n\n # replicate the regular expression by the repetition factor in the format\n rexp *= reps\n\n return rexp"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngives a list of Fortran format specifiers e. g. I5 2X F4. 1 returns a list of tuples for constructing an output string based on those format specifiers.", "response": "def gen_output_fmt(self, fmt_list):\n \"\"\"given a list of Fortran format specifiers, e.g., ['I5', '2X', 'F4.1'],\n this function constructs a list of tuples for constructing an output\n string based on those format specifiers.\"\"\"\n\n rexp_list = []\n for fmt in fmt_list:\n rexp_list.extend(self.gen_output_fmt_1(fmt))\n\n return rexp_list"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef gen_output_fmt_1(self, fmt):\n\n # first, remove any surrounding space\n fmt = fmt.strip()\n\n # get any leading digits indicating repetition\n match = re.match(\"(\\d+)(.+)\", fmt)\n if match is None:\n reps = 1\n else:\n reps = int(match.group(1))\n fmt = match.group(2)\n\n if fmt[0] == \"(\": # process parenthesized format list recursively\n fmt = fmt[1:-1]\n fmt_list = fmt.split(\",\")\n rexp = self.gen_output_fmt(fmt_list)\n else:\n if fmt[0] in \"iI\": # integer\n sz = fmt[1:]\n gen_fmt = \"{}\"\n cvt_fmt = \"{:\" + str(sz) + \"d}\"\n rexp = [(gen_fmt, cvt_fmt, int(sz))]\n\n elif fmt[0] in \"xX\":\n gen_fmt = \" \"\n rexp = [(gen_fmt, None, None)]\n\n elif fmt[0] in \"eEfFgG\": # various floating point formats\n idx0 = fmt.find(\".\")\n sz = fmt[1:idx0]\n suffix = fmt[idx0 + 1 :]\n # The 'E' and G formats can optionally specify the width of\n # the exponent, e.g.: 'E15.3E2'. For now we ignore any such\n # the exponent width -- but if it's there, we need to extract\n # the sequence of digits before it.\n m = re.match(\"(\\d+).*\", suffix)\n assert m is not None, f\"Improper format? '{fmt}'\"\n prec = m.group(1)\n gen_fmt = \"{}\"\n cvt_fmt = \"{:\" + sz + \".\" + prec + fmt[0] + \"}\"\n rexp = [(gen_fmt, cvt_fmt, int(sz))]\n\n elif fmt[0] in \"pP\": # scaling factor\n # For now we ignore scaling: there are lots of other things we\n # need to spend time on. To fix later if necessary.\n rest_of_fmt = fmt[1:]\n rexp = self.gen_output_fmt_1(rest_of_fmt)\n\n elif fmt[0] in \"'\\\"\": # character string\n sz = len(fmt) - 2 # -2 for the quote at either end\n gen_fmt = fmt[1:-1]\n rexp = [(gen_fmt, None, None)]\n\n elif fmt[0] == \"/\": # newlines\n gen_fmt = \"\\\\n\" * len(fmt)\n rexp = [(gen_fmt, None, None)]\n\n else:\n raise For2PyError(\n f\"ERROR: Unrecognized format specifier {fmt}\\n\"\n )\n\n # replicate the regular expression by the repetition factor in the format\n rexp *= reps\n\n return rexp", "response": "given a single format specifier get_output_fmt_1() constructs and returns a list of tuples for matching against that specifier."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nconstruct a conditional probability density function for a particular base object.", "response": "def constructConditionalPDF(\n gb, rs: np.ndarray, e: Tuple[str, str, Dict]\n) -> gaussian_kde:\n \"\"\" Construct a conditional probability density function for a particular\n AnalysisGraph edge. \"\"\"\n\n adjective_response_dict = {}\n all_\u03b8s = []\n\n # Setting \u03c3_X and \u03c3_Y that are in Eq. 1.21 of the model document.\n # This assumes that the real-valued variables representing the abstract\n # concepts are on the order of 1.0.\n # TODO Make this more general.\n\n \u03c3_X = \u03c3_Y = 0.1\n\n for stmt in e[2][\"InfluenceStatements\"]:\n for ev in stmt.evidence:\n # To account for discrepancy between Hume and Eidos extractions\n if ev.annotations.get(\"subj_adjectives\") is not None:\n for subj_adjective in ev.annotations[\"subj_adjectives\"]:\n if (\n subj_adjective in gb.groups\n and subj_adjective not in adjective_response_dict\n ):\n adjective_response_dict[subj_adjective] = get_respdevs(\n gb.get_group(subj_adjective)\n )\n rs_subj = stmt.subj_delta[\n \"polarity\"\n ] * adjective_response_dict.get(subj_adjective, rs)\n\n for obj_adjective in ev.annotations[\"obj_adjectives\"]:\n if (\n obj_adjective in gb.groups\n and obj_adjective not in adjective_response_dict\n ):\n adjective_response_dict[\n obj_adjective\n ] = get_respdevs(gb.get_group(obj_adjective))\n\n rs_obj = stmt.obj_delta[\n \"polarity\"\n ] * adjective_response_dict.get(obj_adjective, rs)\n\n xs1, ys1 = np.meshgrid(rs_subj, rs_obj, indexing=\"xy\")\n \u03b8s = np.arctan2(\u03c3_Y * ys1.flatten(), xs1.flatten())\n all_\u03b8s.append(\u03b8s)\n\n # Prior\n xs1, ys1 = np.meshgrid(\n stmt.subj_delta[\"polarity\"] * rs,\n stmt.obj_delta[\"polarity\"] * rs,\n indexing=\"xy\",\n )\n # TODO - make the setting of \u03c3_X and \u03c3_Y more automated\n \u03b8s = np.arctan2(\u03c3_Y * ys1.flatten(), \u03c3_X * xs1.flatten())\n\n # all_\u03b8s.append(\u03b8s)\n # return gaussian_kde(np.concatenate(all_\u03b8s))\n\n if len(all_\u03b8s) == 0:\n all_\u03b8s.append(\u03b8s)\n return gaussian_kde(all_\u03b8s)\n else:\n return gaussian_kde(np.concatenate(all_\u03b8s))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nprocess the variable name to make it more human - readable.", "response": "def get_variable_and_source(x: str):\n \"\"\" Process the variable name to make it more human-readable. \"\"\"\n xs = x.replace(\"\\/\", \"|\").split(\"/\")\n xs = [x.replace(\"|\", \"/\") for x in xs]\n if xs[0] == \"FAO\":\n return \" \".join(xs[2:]), xs[0]\n else:\n return xs[-1], xs[0]"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef construct_concept_to_indicator_mapping(n: int = 1) -> Dict[str, List[str]]:\n\n df = pd.read_sql_table(\"concept_to_indicator_mapping\", con=engine)\n gb = df.groupby(\"Concept\")\n\n _dict = {\n k: [get_variable_and_source(x) for x in take(n, v[\"Indicator\"].values)]\n for k, v in gb\n }\n return _dict", "response": "Create a dictionary mapping high - level concepts to low - level indicators."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef setup(self, builder: Builder):\n self.config = builder.configuration.mortality\n self.population_view = builder.population.get_view(['alive'], query=\"alive == 'alive'\")\n self.randomness = builder.randomness.get_stream('mortality')\n\n self.mortality_rate = builder.value.register_rate_producer('mortality_rate', source=self.base_mortality_rate)\n\n builder.event.register_listener('time_step', self.determine_deaths)", "response": "Performs this component s simulation setup."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncomputing the base mortality rate for every individual in the index.", "response": "def base_mortality_rate(self, index: pd.Index) -> pd.Series:\n \"\"\"Computes the base mortality rate for every individual.\n\n Parameters\n ----------\n index :\n A representation of the simulants to compute the base mortality\n rate for.\n\n Returns\n -------\n The base mortality rate for all simulants in the index.\n \"\"\"\n return pd.Series(self.config.mortality_rate, index=index)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ndetermines who dies each time step.", "response": "def determine_deaths(self, event: Event):\n \"\"\"Determines who dies each time step.\n\n Parameters\n ----------\n event :\n An event object emitted by the simulation containing an index\n representing the simulants affected by the event and timing\n information.\n \"\"\"\n effective_rate = self.mortality_rate(event.index)\n effective_probability = 1 - np.exp(-effective_rate)\n draw = self.randomness.get_draw(event.index)\n affected_simulants = draw < effective_probability\n self.population_view.update(pd.Series('dead', index=event.index[affected_simulants]))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _prep_components(component_list: Sequence[str]) -> List[Tuple[str, Tuple[str]]]:\n components = []\n for c in component_list:\n path, args_plus = c.split('(')\n cleaned_args = _clean_args(args_plus[:-1].split(','), path)\n components.append((path, cleaned_args))\n return components", "response": "Transform component description strings into tuples of component paths and required arguments."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_components(self, component_config: Union[ConfigTree, List]) -> List:\n if isinstance(component_config, ConfigTree):\n component_list = self.parse_component_config(component_config.to_dict())\n else: # Components were specified in a list rather than a tree.\n component_list = component_config\n component_list = _prep_components(component_list)\n return _import_and_instantiate_components(component_list)", "response": "Extracts component specifications from configuration information and returns initialized components."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef parse_component_config(self, component_config: Dict[str, Union[Dict, List]]) -> List[str]:\n return _parse_component_config(component_config)", "response": "Parses a hierarchical component specification into a list of standardized component definitions."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nload the SAR format logfile in ASCII format.", "response": "def load_file(self):\n \"\"\"\n Loads SAR format logfile in ASCII format (sarXX).\n :return: ``True`` if loading and parsing of file went fine, \\\n ``False`` if it failed (at any point)\n \"\"\"\n\n # We first split file into pieces\n searchunks = self._split_file()\n\n if searchunks:\n\n # And then we parse pieces into meaningful data\n usage = self._parse_file(searchunks)\n\n if 'CPU' in usage:\n return False\n\n self._sarinfo = usage\n del usage\n return True\n\n else:\n return False"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning parsed sar info of the current CPU.", "response": "def get_sar_info(self):\n \"\"\"\n Returns parsed sar info\n :return: ``Dictionary``-style list of SAR data\n \"\"\"\n\n try:\n test = self._sarinfo[\"CPU\"]\n del test\n\n except KeyError:\n file_parsed = self.load_file()\n if file_parsed:\n return self._sarinfo\n else:\n return False\n\n except:\n ### DEBUG\n traceback.print_exc()\n return False\n\n return self._sarinfo"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsplits the file into sections separated by the type of info we need for it.", "response": "def _split_file(self, data=''):\n \"\"\"\n Splits SAR output or SAR output file (in ASCII format) in order to\n extract info we need for it, in the format we want.\n :param data: Input data instead of file\n :type data: str.\n :return: ``List``-style of SAR file sections separated by\n the type of info they contain (SAR file sections) without\n parsing what is exactly what at this point\n \"\"\"\n\n # Filename passed checks through __init__\n if ((self.__filename and os.access(self.__filename, os.R_OK))\n or data != ''):\n\n fhandle = None\n\n if data == '':\n try:\n fhandle = os.open(self.__filename, os.O_RDONLY)\n except OSError:\n print((\"Couldn't open file %s\" % self.__filename))\n fhandle = None\n\n if fhandle or data != '':\n\n datalength = 0\n \n # Dealing with mmap difference on Windows and Linux\n if platform.system() == 'Windows':\n dataprot = mmap.ACCESS_READ\n else:\n dataprot = mmap.PROT_READ\n\n if data != '':\n fhandle = -1\n datalength = len(data)\n \n if platform.system() == 'Windows': \n dataprot = mmap.ACCESS_READ | mmap.ACCESS_WRITE\n else:\n dataprot = mmap.PROT_READ | mmap.PROT_WRITE \n\n try:\n if platform.system() == 'Windows':\n sarmap = mmap.mmap(\n fhandle, length=datalength, access=dataprot\n )\n else:\n sarmap = mmap.mmap(\n fhandle, length=datalength, prot=dataprot\n )\n if data != '':\n\n sarmap.write(data)\n sarmap.flush()\n sarmap.seek(0, os.SEEK_SET)\n\n except (TypeError, IndexError):\n if data == '':\n os.close(fhandle)\n traceback.print_exc()\n # sys.exit(-1)\n return False\n\n # Here we'll store chunks of SAR file, unparsed\n searchunks = []\n oldchunkpos = 0\n dlpos = sarmap.find('\\n\\n', 0)\n size = 0\n\n if data == '':\n # We can do mmap.size() only on read-only mmaps\n size = sarmap.size()\n else:\n # Otherwise, if data was passed to us,\n # we measure its length\n len(data)\n\n # oldchunkpos = dlpos\n\n while dlpos > -1: # mmap.find() returns -1 on failure.\n\n tempchunk = sarmap.read(dlpos - oldchunkpos)\n searchunks.append(tempchunk.strip())\n\n # We remember position, add 2 for 2 DD's\n # (newspaces in production). We have to remember\n # relative value\n oldchunkpos += (dlpos - oldchunkpos) + 2\n\n # We position to new place, to be behind \\n\\n\n # we've looked for.\n try:\n sarmap.seek(2, os.SEEK_CUR)\n except ValueError:\n print(('Out of bounds (%s)!\\n' % (sarmap.tell())))\n # Now we repeat find.\n dlpos = sarmap.find(\"\\n\\n\")\n\n # If it wasn't the end of file, we want last piece of it\n if oldchunkpos < size:\n tempchunk = sarmap[oldchunkpos:]\n searchunks.append(tempchunk.strip())\n\n sarmap.close()\n\n if fhandle != -1:\n os.close(fhandle)\n\n if searchunks:\n return searchunks\n else:\n return False\n\n return False"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _parse_file(self, sar_parts):\n usage = {}\n output = {}\n\n # If sar_parts is a list\n if type(sar_parts) is list:\n restart_pattern = re.compile(PATTERN_RESTART)\n\n \"\"\" !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! \"\"\"\n \"\"\" ********** ATTENTION ******* \"\"\"\n \"\"\" THERE CAN BE MORE THAN ONE SAME SECTION IN ONE FILE \"\"\"\n \"\"\" IF SYSTEM WAS REBOOTED DURING THE DAY \"\"\"\n \"\"\" !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! \"\"\"\n\n for PATTERNSNAME in ALL_PATTERNS:\n patterns = ALL_PATTERNS[PATTERNSNAME]\n rgxpattern = re.compile(patterns['PATTERN'])\n\n for part in sar_parts:\n\n if rgxpattern.search(part):\n\n if PATTERNSNAME in usage:\n usage[PATTERNSNAME] += '\\n' + part\n else:\n usage[PATTERNSNAME] = part\n try:\n first_line = part.split('\\n')[0]\n except IndexError:\n first_line = part\n\n self.__fields[PATTERNSNAME] = self.__find_column(patterns['FIELDS'], first_line)\n\n # Try to match restart time\n if restart_pattern.search(part):\n pieces = part.split()\n self.__restart_times.append(pieces[0])\n del pieces\n\n del sar_parts\n\n # Now we have parts pulled out and combined, do further\n # processing.\n for PATTERNSNAME in ALL_PATTERNS:\n patterns = ALL_PATTERNS[PATTERNSNAME]\n output[PATTERNSNAME] = self.__split_info(usage[PATTERNSNAME], PATTERNSNAME, patterns)\n del usage\n\n return output\n\n return output", "response": "Parses the SAR file and returns a dictionary of information."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef __find_column(self, column_names, part_first_line):\n part_parts = part_first_line.split()\n\n ### DEBUG\n # print(\"Parts: %s\" % (part_parts))\n\n return_dict = {}\n\n counter = 0\n for piece in part_parts:\n for colname in column_names:\n pattern_re = re.compile(colname)\n if pattern_re.search(piece):\n return_dict[colname] = counter\n break\n counter += 1\n\n # Verify the content of the return dictionary, fill the blanks\n # with -1s :-)\n for colver in column_names:\n try:\n tempval = return_dict[colver]\n del tempval\n except KeyError:\n return_dict[colver] = None\n\n return return_dict", "response": "Find the column for the column_name in sar type definition and returns its index."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef __split_info(self, info_part, patternsname, patterns):\n\n pattern = patterns['PATTERN']\n if pattern == '':\n return False\n\n return_dict = {}\n\n pattern_re = re.compile(pattern)\n\n for part_line in info_part.split('\\n'):\n\n if part_line.strip() != '' and not pattern_re.search(part_line):\n\n # Take care of AM/PM timestamps in SAR file\n is_24hr = True\n is_AM = False\n\n if part_line[9:11] == 'AM':\n is_24hr = False\n is_AM = True\n elif part_line[9:11] == 'PM':\n is_24hr = False\n is_AM = False\n\n if is_24hr is False:\n part_line = ('%s_%s XX %s' % (part_line[:8], part_line[9:11], part_line[12:]))\n\n # Line is not empty, nor it's header.\n # let's hit the road Jack!\n elems = part_line.split()\n full_time = elems[0].strip()\n\n if full_time != \"Average:\":\n\n # Convert time to 24hr format if needed\n if is_24hr is False:\n full_time = full_time[:-3]\n\n # 12 is a bitch in AM/PM notation\n if full_time[:2] == '12':\n if is_AM is True:\n full_time = ('%s:%s' % ('00', full_time[3:]))\n is_AM = not is_AM\n\n if is_AM is False and full_time[0:2] != '00':\n hours = int(full_time[:2]) + 12\n hours = ('%02d' % (hours,))\n full_time = ('%s:%s' % (hours, full_time[3:]))\n\n try:\n blah = return_dict[full_time]\n del blah\n except KeyError:\n return_dict[full_time] = {}\n\n fields = self.__fields[patternsname]\n pairs = patterns[\"PAIRS\"]\n\n for sectionname in pairs.iterkeys():\n\n value = elems[fields[pairs[sectionname]]]\n\n if sectionname == 'membuffer' or \\\n sectionname == 'memcache' or \\\n sectionname == 'memfree' or \\\n sectionname == 'memused' or \\\n sectionname == 'swapfree' or \\\n sectionname == 'swapused':\n value = int(value)\n else:\n value = float(value)\n\n if patternsname == 'CPU':\n cpuid = elems[(1 if is_24hr is True else 2)]\n try:\n blah = return_dict[full_time][cpuid]\n del blah\n except KeyError:\n return_dict[full_time][cpuid] = {}\n return_dict[full_time][cpuid][sectionname] = \\\n value\n else:\n return_dict[full_time][sectionname] = value\n\n return return_dict", "response": "Splits info part into logical stuff"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nparse the ISO - style date of the SAR file and returns the ISO - style date of the SAR file.", "response": "def __get_filedate(self):\n \"\"\"\n Parses (extracts) date of SAR data, from the SAR output file itself.\n :return: ISO-style (YYYY-MM-DD) date from SAR file\n \"\"\"\n\n if os.access(self.__filename, os.R_OK):\n\n # Read first line of the file\n try:\n sar_file = open(self.__filename, \"r\")\n\n except OSError:\n ### DEBUG\n traceback.print_exc()\n return False\n\n except:\n ### DEBUG\n traceback.print_exc()\n return False\n\n firstline = sar_file.readline()\n info = firstline.split()\n sar_file.close()\n\n try:\n self.__file_date = info[3]\n\n except KeyError:\n self.__file_date = ''\n return False\n\n except:\n ### DEBUG\n traceback.print_exc()\n return False\n\n return True\n\n return False"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nmoving a population between different states using information from a TransitionSet.", "response": "def _next_state(index, event_time, transition_set, population_view):\n \"\"\"Moves a population between different states using information from a `TransitionSet`.\n\n Parameters\n ----------\n index : iterable of ints\n An iterable of integer labels for the simulants.\n event_time : pandas.Timestamp\n When this transition is occurring.\n transition_set : TransitionSet\n A set of potential transitions available to the simulants.\n population_view : vivarium.framework.population.PopulationView\n A view of the internal state of the simulation.\n \"\"\"\n\n if len(transition_set) == 0 or index.empty:\n return\n\n outputs, decisions = transition_set.choose_new_state(index)\n groups = _groupby_new_state(index, outputs, decisions)\n\n if groups:\n for output, affected_index in sorted(groups, key=lambda x: str(x[0])):\n if output == 'null_transition':\n pass\n elif isinstance(output, Transient):\n if not isinstance(output, State):\n raise ValueError('Invalid transition output: {}'.format(output))\n output.transition_effect(affected_index, event_time, population_view)\n output.next_state(affected_index, event_time, population_view)\n elif isinstance(output, State):\n output.transition_effect(affected_index, event_time, population_view)\n else:\n raise ValueError('Invalid transition output: {}'.format(output))"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngrouping the simulants in the index by their new output state.", "response": "def _groupby_new_state(index, outputs, decisions):\n \"\"\"Groups the simulants in the index by their new output state.\n\n Parameters\n ----------\n index : iterable of ints\n An iterable of integer labels for the simulants.\n outputs : iterable\n A list of possible output states.\n decisions : `pandas.Series`\n A series containing the name of the next state for each simulant in the index.\n\n Returns\n -------\n iterable of 2-tuples\n The first item in each tuple is the name of an output state and the second item\n is a `pandas.Index` representing the simulants to transition into that state.\n \"\"\"\n output_map = {o: i for i, o in enumerate(outputs)}\n groups = pd.Series(index).groupby([output_map[d] for d in decisions])\n results = [(outputs[i], pd.Index(sub_group.values)) for i, sub_group in groups]\n selected_outputs = [o for o, _ in results]\n for output in outputs:\n if output not in selected_outputs:\n results.append((output, pd.Index([])))\n return results"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nmove a population between different states using information this state s transition_set.", "response": "def next_state(self, index, event_time, population_view):\n \"\"\"Moves a population between different states using information this state's `transition_set`.\n\n Parameters\n ----------\n index : iterable of ints\n An iterable of integer labels for the simulants.\n event_time : pandas.Timestamp\n When this transition is occurring.\n population_view : vivarium.framework.population.PopulationView\n A view of the internal state of the simulation.\n \"\"\"\n return _next_state(index, event_time, self.transition_set, population_view)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef transition_effect(self, index, event_time, population_view):\n population_view.update(pd.Series(self.state_id, index=index))\n self._transition_side_effect(index, event_time)", "response": "Updates the simulation state and triggers any side - effects associated with entering this state."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef add_transition(self, output,\n probability_func=lambda index: np.ones(len(index), dtype=float),\n triggered=Trigger.NOT_TRIGGERED):\n \"\"\"Builds a transition from this state to the given state.\n\n output : State\n The end state after the transition.\n \"\"\"\n t = Transition(self, output, probability_func=probability_func, triggered=triggered)\n self.transition_set.append(t)\n return t", "response": "Builds a transition from this state to the given state."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nperforms this component s simulation setup and return sub - components.", "response": "def setup(self, builder):\n \"\"\"Performs this component's simulation setup and return sub-components.\n\n Parameters\n ----------\n builder : `engine.Builder`\n Interface to several simulation tools including access to common random\n number generation, in particular.\n\n Returns\n -------\n iterable\n This component's sub-components.\n \"\"\"\n builder.components.add_components(self.transitions)\n self.random = builder.randomness.get_stream(self.key)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nselects a new state for each simulant in the index.", "response": "def choose_new_state(self, index):\n \"\"\"Chooses a new state for each simulant in the index.\n\n Parameters\n ----------\n index : iterable of ints\n An iterable of integer labels for the simulants.\n\n Returns\n -------\n outputs : list\n The possible end states of this set of transitions.\n decisions: `pandas.Series`\n A series containing the name of the next state for each simulant in the index.\n \"\"\"\n outputs, probabilities = zip(*[(transition.output_state, np.array(transition.probability(index)))\n for transition in self.transitions])\n probabilities = np.transpose(probabilities)\n outputs, probabilities = self._normalize_probabilities(outputs, probabilities)\n return outputs, self.random.choice(index, outputs, probabilities)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nnormalizing probabilities to sum to 1 and add a null transition if desired.", "response": "def _normalize_probabilities(self, outputs, probabilities):\n \"\"\"Normalize probabilities to sum to 1 and add a null transition if desired.\n\n Parameters\n ----------\n outputs : iterable\n List of possible end states corresponding to this containers transitions.\n probabilities : iterable of iterables\n A set of probability weights whose columns correspond to the end states in `outputs`\n and whose rows correspond to each simulant undergoing the transition.\n\n Returns\n -------\n outputs: list\n The original output list expanded to include a null transition (a transition back\n to the starting state) if requested.\n probabilities : ndarray\n The original probabilities rescaled to sum to 1 and potentially expanded to\n include a null transition weight.\n \"\"\"\n outputs = list(outputs)\n total = np.sum(probabilities, axis=1)\n if self.allow_null_transition or not np.any(total):\n if np.any(total > 1+1e-08): # Accommodate rounding errors\n raise ValueError(\n \"Null transition requested with un-normalized probability weights: {}\".format(probabilities))\n total[total > 1] = 1 # Correct allowed rounding errors.\n probabilities = np.concatenate([probabilities, (1-total)[:, np.newaxis]], axis=1)\n outputs.append('null_transition')\n return outputs, probabilities/(np.sum(probabilities, axis=1)[:, np.newaxis])"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef setup(self, builder):\n builder.components.add_components(self.states)\n self.population_view = builder.population.get_view([self.state_column])", "response": "Performs this component s simulation setup and return sub - components."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nfind the population in each state and moves them to the next state.", "response": "def transition(self, index, event_time):\n \"\"\"Finds the population in each state and moves them to the next state.\n\n Parameters\n ----------\n index : iterable of ints\n An iterable of integer labels for the simulants.\n event_time : pandas.Timestamp\n The time at which this transition occurs.\n \"\"\"\n for state, affected in self._get_state_pops(index):\n if not affected.empty:\n state.next_state(affected.index, event_time, self.population_view.subview([self.state_column]))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef to_dot(self):\n from graphviz import Digraph\n dot = Digraph(format='png')\n for state in self.states:\n if isinstance(state, TransientState):\n dot.node(state.state_id, style='dashed')\n else:\n dot.node(state.state_id)\n for transition in state.transition_set:\n dot.edge(state.state_id, transition.output.state_id, transition.label())\n return dot", "response": "Returns a graphviz. Digraph representation of this state machine."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef setup_components(self, builder, configuration):\n self._managers = _setup_components(builder, self._managers, configuration)\n self._components = _setup_components(builder, self._components, configuration)", "response": "Apply component level configuration defaults to the global config and run setup methods on the components and managers."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_dataset(catalog, identifier=None, title=None):\n\n msg = \"Se requiere un 'identifier' o 'title' para buscar el dataset.\"\n assert identifier or title, msg\n catalog = read_catalog_obj(catalog)\n\n # b\u00fasqueda optimizada por identificador\n if identifier:\n try:\n return _get_dataset_by_identifier(catalog, identifier)\n except BaseException:\n try:\n catalog._build_index()\n return _get_dataset_by_identifier(catalog, identifier)\n except BaseException:\n filtered_datasets = get_datasets(\n catalog, {\"dataset\": {\"identifier\": identifier}})\n elif title: # TODO: is this required?\n filtered_datasets = get_datasets(\n catalog, {\"dataset\": {\"title\": title}})\n\n if len(filtered_datasets) > 1:\n if identifier:\n raise ce.DatasetIdRepetitionError(\n identifier, filtered_datasets)\n elif title:\n # TODO: Improve exceptions module!\n raise ce.DatasetTitleRepetitionError(title, filtered_datasets)\n elif len(filtered_datasets) == 0:\n return None\n else:\n return filtered_datasets[0]", "response": "Devuelve un Dataset del cat\u00e1logo."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_catalog_metadata(catalog, exclude_meta_fields=None):\n exclude_meta_fields = exclude_meta_fields or []\n catalog_dict_copy = catalog.copy()\n del catalog_dict_copy[\"dataset\"]\n\n for excluded_meta_field in exclude_meta_fields:\n catalog_dict_copy.pop(excluded_meta_field, None)\n\n return catalog_dict_copy", "response": "Devuelve s\u00f3lo la metadata de nivel cat\u00e1logo."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef field_to_markdown(field):\n if \"title\" in field:\n field_title = \"**{}**\".format(field[\"title\"])\n else:\n raise Exception(\"Es necesario un `title` para describir un campo.\")\n\n field_type = \" ({})\".format(field[\"type\"]) if \"type\" in field else \"\"\n field_desc = \": {}\".format(\n field[\"description\"]) if \"description\" in field else \"\"\n\n text_template = \"{title}{type}{description}\"\n text = text_template.format(title=field_title, type=field_type,\n description=field_desc)\n\n return text", "response": "Genera texto en markdown a partir de los metadatos de un campo."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _dataset_report_helper(cls, dataset, catalog_homepage=None):\n publisher_name = helpers.traverse_dict(dataset, [\"publisher\", \"name\"])\n\n languages = cls._stringify_list(dataset.get(\"language\"))\n super_themes = cls._stringify_list(dataset.get(\"superTheme\"))\n themes = cls._stringify_list(dataset.get(\"theme\"))\n\n def _stringify_distribution(distribution):\n title = distribution.get(\"title\")\n url = distribution.get(\"downloadURL\")\n\n return \"\\\"{}\\\": {}\".format(title, url)\n\n distributions = [d for d in dataset[\"distribution\"]\n if isinstance(d, dict)]\n\n # crea lista de distribuciones\n distributions_list = None\n if isinstance(distributions, list):\n distributions_strings = [\n _stringify_distribution(d) for d in distributions\n ]\n distributions_list = \"\\n\\n\".join(distributions_strings)\n\n # crea lista de formatos\n distributions_formats = json.dumps(\n helpers.count_distribution_formats_dataset(dataset))\n\n fields = OrderedDict()\n fields[\"dataset_identifier\"] = dataset.get(\"identifier\")\n fields[\"dataset_title\"] = dataset.get(\"title\")\n fields[\"dataset_accrualPeriodicity\"] = dataset.get(\n \"accrualPeriodicity\")\n fields[\"dataset_description\"] = dataset.get(\"description\")\n fields[\"dataset_publisher_name\"] = publisher_name\n fields[\"dataset_superTheme\"] = super_themes\n fields[\"dataset_theme\"] = themes\n fields[\"dataset_landingPage\"] = dataset.get(\"landingPage\")\n fields[\"dataset_landingPage_generated\"] = cls._generate_landingPage(\n catalog_homepage, dataset.get(\"identifier\")\n )\n fields[\"dataset_issued\"] = dataset.get(\"issued\")\n fields[\"dataset_modified\"] = dataset.get(\"modified\")\n fields[\"distributions_formats\"] = distributions_formats\n fields[\"distributions_list\"] = distributions_list\n fields[\"dataset_license\"] = dataset.get(\"license\")\n fields[\"dataset_language\"] = languages\n fields[\"dataset_spatial\"] = dataset.get(\"spatial\")\n fields[\"dataset_temporal\"] = dataset.get(\"temporal\")\n\n return fields", "response": "A helper method that returns a dict that contains the metadata for a dataset."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _catalog_report_helper(catalog, catalog_validation, url, catalog_id,\n catalog_org):\n \"\"\"Toma un dict con la metadata de un cat\u00e1logo, y devuelve un dict con\n los valores que catalog_report() usa para reportar sobre \u00e9l.\n\n Args:\n catalog (dict): Diccionario con la metadata de un cat\u00e1logo.\n validation (dict): Resultado, \u00fanicamente a nivel cat\u00e1logo, de la\n validaci\u00f3n completa de `catalog`.\n\n Returns:\n dict: Diccionario con los campos a nivel cat\u00e1logo que requiere\n catalog_report().\n \"\"\"\n fields = OrderedDict()\n fields[\"catalog_metadata_url\"] = url\n fields[\"catalog_federation_id\"] = catalog_id\n fields[\"catalog_federation_org\"] = catalog_org\n fields[\"catalog_title\"] = catalog.get(\"title\")\n fields[\"catalog_description\"] = catalog.get(\"description\")\n fields[\"valid_catalog_metadata\"] = (\n 1 if catalog_validation[\"status\"] == \"OK\" else 0)\n\n return fields", "response": "Helper function for catalog_report."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _dataset_report(\n self, dataset, dataset_validation, dataset_index,\n catalog_fields, harvest='none', report=None, catalog_homepage=None\n ):\n \"\"\" Genera una l\u00ednea del `catalog_report`, correspondiente a un dataset\n de los que conforman el cat\u00e1logo analizado.\"\"\"\n\n # hace un breve an\u00e1lisis de qa al dataset\n good_qa, notes = self._dataset_qa(dataset)\n\n dataset_report = OrderedDict(catalog_fields)\n dataset_report[\"valid_dataset_metadata\"] = (\n 1 if dataset_validation[\"status\"] == \"OK\" else 0)\n dataset_report[\"dataset_index\"] = dataset_index\n\n if isinstance(harvest, list):\n dataset_report[\"harvest\"] = 1 if dataset[\"title\"] in harvest else 0\n elif harvest == 'all':\n dataset_report[\"harvest\"] = 1\n elif harvest == 'none':\n dataset_report[\"harvest\"] = 0\n elif harvest == 'valid':\n dataset_report[\"harvest\"] = (\n int(dataset_report[\"valid_dataset_metadata\"]))\n elif harvest == 'good':\n valid_metadata = int(dataset_report[\"valid_dataset_metadata\"]) == 1\n dataset_report[\"harvest\"] = 1 if valid_metadata and good_qa else 0\n\n elif harvest == 'report':\n if not report:\n raise ValueError(\"\"\"\nUsted eligio 'report' como criterio de harvest, pero no proveyo un valor para\nel argumento 'report'. Por favor, intentelo nuevamente.\"\"\")\n\n datasets_to_harvest = self._extract_datasets_to_harvest(report)\n dataset_report[\"harvest\"] = (\n 1 if (dataset_report[\"catalog_metadata_url\"],\n dataset.get(\"title\")) in datasets_to_harvest\n else 0)\n else:\n raise ValueError(\"\"\"\n{} no es un criterio de harvest reconocido. Pruebe con 'all', 'none', 'valid' o\n'report'.\"\"\".format(harvest))\n\n dataset_report.update(\n self._dataset_report_helper(\n dataset, catalog_homepage=catalog_homepage)\n )\n\n dataset_report[\"notas\"] = \"\\n\\n\".join(notes)\n\n return dataset_report.copy()", "response": "Genera una l\u00ednea del dataset_report."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef catalog_report(self, catalog, harvest='none', report=None,\n catalog_id=None, catalog_homepage=None,\n catalog_org=None):\n \"\"\"Genera un reporte sobre los datasets de un \u00fanico cat\u00e1logo.\n\n Args:\n catalog (dict, str o unicode): Representaci\u00f3n externa (path/URL) o\n interna (dict) de un cat\u00e1logo.\n harvest (str): Criterio de cosecha ('all', 'none',\n 'valid', 'report' o 'good').\n\n Returns:\n list: Lista de diccionarios, con un elemento por cada dataset\n presente en `catalog`.\n \"\"\"\n\n url = catalog if isinstance(catalog, string_types) else None\n catalog = readers.read_catalog(catalog)\n\n validation = self.validate_catalog(catalog)\n catalog_validation = validation[\"error\"][\"catalog\"]\n datasets_validations = validation[\"error\"][\"dataset\"]\n\n catalog_fields = self._catalog_report_helper(\n catalog, catalog_validation, url, catalog_id, catalog_org\n )\n\n if \"dataset\" in catalog and isinstance(catalog[\"dataset\"], list):\n datasets = [d if isinstance(d, dict) else {} for d in\n catalog[\"dataset\"]]\n else:\n datasets = []\n\n catalog_report = [\n self._dataset_report(\n dataset, datasets_validations[index], index,\n catalog_fields, harvest, report=report,\n catalog_homepage=catalog_homepage\n )\n for index, dataset in enumerate(datasets)\n ]\n\n return catalog_report", "response": "Genera un reporte sobre los datasets de un \u00fanico cat\u00e1logo."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ngenerate a list of dicts that represent un archivo un cat\u00e1logo.", "response": "def generate_harvestable_catalogs(self, catalogs, harvest='all',\n report=None, export_path=None):\n \"\"\"Filtra los cat\u00e1logos provistos seg\u00fan el criterio determinado en\n `harvest`.\n\n Args:\n catalogs (str, dict o list): Uno (str o dict) o varios (list de\n strs y/o dicts) cat\u00e1logos.\n harvest (str): Criterio para determinar qu\u00e9 datasets conservar de\n cada cat\u00e1logo ('all', 'none', 'valid' o 'report').\n report (list o str): Tabla de reporte generada por\n generate_datasets_report() como lista de diccionarios o archivo\n en formato XLSX o CSV. S\u00f3lo se usa cuando `harvest=='report'`.\n export_path (str): Path a un archivo JSON o directorio donde\n exportar los cat\u00e1logos filtrados. Si termina en \".json\" se\n exportar\u00e1 la lista de cat\u00e1logos a un \u00fanico archivo. Si es un\n directorio, se guardar\u00e1 en \u00e9l un JSON por cat\u00e1logo. Si se\n especifica `export_path`, el m\u00e9todo no devolver\u00e1 nada.\n\n Returns:\n list of dicts: Lista de cat\u00e1logos.\n \"\"\"\n assert isinstance(catalogs, string_types + (dict, list))\n # Si se pasa un \u00fanico cat\u00e1logo, genero una lista que lo contenga\n if isinstance(catalogs, string_types + (dict,)):\n catalogs = [catalogs]\n\n harvestable_catalogs = [readers.read_catalog(c) for c in catalogs]\n catalogs_urls = [catalog if isinstance(catalog, string_types)\n else None for catalog in catalogs]\n\n # aplica los criterios de cosecha\n if harvest == 'all':\n pass\n elif harvest == 'none':\n for catalog in harvestable_catalogs:\n catalog[\"dataset\"] = []\n elif harvest == 'valid':\n report = self.generate_datasets_report(catalogs, harvest)\n return self.generate_harvestable_catalogs(\n catalogs=catalogs, harvest='report', report=report,\n export_path=export_path)\n elif harvest == 'report':\n if not report:\n raise ValueError(\"\"\"\nUsted eligio 'report' como criterio de harvest, pero no proveyo un valor para\nel argumento 'report'. Por favor, intentelo nuevamente.\"\"\")\n datasets_to_harvest = self._extract_datasets_to_harvest(report)\n for idx_cat, catalog in enumerate(harvestable_catalogs):\n catalog_url = catalogs_urls[idx_cat]\n if (\"dataset\" in catalog and\n isinstance(catalog[\"dataset\"], list)):\n catalog[\"dataset\"] = [\n dataset for dataset in catalog[\"dataset\"]\n if (catalog_url, dataset.get(\"title\")) in\n datasets_to_harvest\n ]\n else:\n catalog[\"dataset\"] = []\n else:\n raise ValueError(\"\"\"\n{} no es un criterio de harvest reconocido. Pruebe con 'all', 'none', 'valid' o\n'report'.\"\"\".format(harvest))\n\n # devuelve los cat\u00e1logos harvesteables\n if export_path and os.path.isdir(export_path):\n # Creo un JSON por cat\u00e1logo\n for idx, catalog in enumerate(harvestable_catalogs):\n filename = os.path.join(export_path, \"catalog_{}\".format(idx))\n writers.write_json(catalog, filename)\n elif export_path:\n # Creo un \u00fanico JSON con todos los cat\u00e1logos\n writers.write_json(harvestable_catalogs, export_path)\n else:\n return harvestable_catalogs"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nextracts datasets to harvest.", "response": "def _extract_datasets_to_harvest(cls, report):\n \"\"\"Extrae de un reporte los datos necesarios para reconocer qu\u00e9\n datasets marcar para cosecha en cualquier generador.\n\n Args:\n report (str o list): Reporte (lista de dicts) o path a uno.\n\n Returns:\n list: Lista de tuplas con los t\u00edtulos de cat\u00e1logo y dataset de cada\n reporte extra\u00eddo.\n \"\"\"\n assert isinstance(report, string_types + (list,))\n\n # Si `report` es una lista de tuplas con longitud 2, asumimos que es un\n # reporte procesado para extraer los datasets a harvestear. Se devuelve\n # intacta.\n if (isinstance(report, list) and all([isinstance(x, tuple) and\n len(x) == 2 for x in report])):\n return report\n\n table = readers.read_table(report)\n table_keys = table[0].keys()\n expected_keys = [\"catalog_metadata_url\", \"dataset_title\",\n \"dataset_accrualPeriodicity\"]\n\n # Verifico la presencia de las claves b\u00e1sicas de un config de harvester\n for key in expected_keys:\n if key not in table_keys:\n raise KeyError(\"\"\"\nEl reporte no contiene la clave obligatoria {}. Pruebe con otro archivo.\n\"\"\".format(key))\n\n if \"harvest\" in table_keys:\n # El archivo es un reporte de datasets.\n datasets_to_harvest = [\n (row[\"catalog_metadata_url\"], row[\"dataset_title\"]) for row in\n table if int(row[\"harvest\"])]\n else:\n # El archivo es un config de harvester.\n datasets_to_harvest = [\n (row[\"catalog_metadata_url\"], row[\"dataset_title\"]) for row in\n table]\n\n return datasets_to_harvest"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef import_by_path(path: str) -> Callable:\n\n module_path, _, class_name = path.rpartition('.')\n return getattr(import_module(module_path), class_name)", "response": "Imports a class or function given it s absolute path."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreading a catalog from the file system.", "response": "def read_catalog(catalog, default_values=None, catalog_format=None):\n \"\"\"Toma una representaci\u00f3n cualquiera de un cat\u00e1logo, y devuelve su\n representaci\u00f3n interna (un diccionario de Python con su metadata.)\n\n Si recibe una representaci\u00f3n _interna_ (un diccionario), lo devuelve\n intacto. Si recibe una representaci\u00f3n _externa_ (path/URL a un archivo\n JSON/XLSX), devuelve su represetaci\u00f3n interna, es decir, un diccionario.\n\n Args:\n catalog (dict or str): Representaci\u00f3n externa/interna de un cat\u00e1logo.\n Una representaci\u00f3n _externa_ es un path local o una URL remota a un\n archivo con la metadata de un cat\u00e1logo, en formato JSON o XLSX. La\n representaci\u00f3n _interna_ de un cat\u00e1logo es un diccionario.\n\n Returns:\n dict: Representaci\u00f3n interna de un cat\u00e1logo para uso en las funciones\n de esta librer\u00eda.\n \"\"\"\n unknown_catalog_repr_msg = \\\n \"\"\"No se pudo inferir una representaci\u00f3n v\u00e1lida de un cat\u00e1logo del\n par\u00e1metro provisto: {}.\"\"\".format(catalog)\n assert isinstance(catalog, string_types + (dict,)\n ), unknown_catalog_repr_msg\n\n if isinstance(catalog, dict):\n catalog_dict = catalog\n\n else:\n # catalog es una URL remota o un path local\n suffix = catalog.split(\".\")[-1].strip(\"/\")\n if suffix in ('json', 'xlsx'):\n catalog_format = catalog_format or suffix\n if catalog_format == \"xlsx\":\n try:\n catalog_dict = read_xlsx_catalog(catalog)\n except openpyxl_exceptions + \\\n (ValueError, AssertionError, IOError, BadZipfile) as e:\n raise ce.NonParseableCatalog(catalog, str(e))\n elif catalog_format == \"json\":\n try:\n catalog_dict = read_json(catalog)\n except(ValueError, TypeError, IOError) as e:\n raise ce.NonParseableCatalog(catalog, str(e))\n elif catalog_format == \"ckan\":\n catalog_dict = read_ckan_catalog(catalog)\n else:\n catalog_dict = read_suffixless_catalog(catalog)\n\n # si se pasaron valores default, los aplica al cat\u00e1logo le\u00eddo\n if default_values:\n _apply_default_values(catalog_dict, default_values)\n\n return catalog_dict"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _apply_default_values(catalog, default_values):\n\n for field, default_value in iteritems(default_values):\n class_metadata = field.split(\"_\")[0]\n field_json_path = field.split(\"_\")[1:]\n\n # valores default de cat\u00e1logo\n if class_metadata == \"catalog\":\n _set_default_value(catalog, field_json_path, default_value)\n\n # valores default de dataset\n elif class_metadata == \"dataset\":\n for dataset in catalog[\"dataset\"]:\n _set_default_value(dataset, field_json_path, default_value)\n\n # valores default de distribuci\u00f3n\n elif class_metadata == \"distribution\":\n for dataset in catalog[\"dataset\"]:\n for distribution in dataset[\"distribution\"]:\n _set_default_value(\n distribution, field_json_path, default_value)\n\n # valores default de field\n elif class_metadata == \"field\":\n for dataset in catalog[\"dataset\"]:\n for distribution in dataset[\"distribution\"]:\n\n # campo \"field\" en una \"distribution\" no es obligatorio\n if distribution.get(\"field\"):\n for field in distribution[\"field\"]:\n _set_default_value(\n field, field_json_path, default_value)", "response": "Applies default values to the internal valores of the internal valores of the internal valores of the internal valores of the internal valores of the internal valores."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _set_default_value(dict_obj, keys, value):\n variable = dict_obj\n\n if len(keys) == 1:\n if not variable.get(keys[0]):\n variable[keys[0]] = value\n\n else:\n for idx, field in enumerate(keys):\n if idx < len(keys) - 1:\n variable = variable[field]\n\n if not variable.get(keys[-1]):\n variable[keys[-1]] = value", "response": "Seta valor en diccionario anidado siguiendo lista de keys."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef read_json(json_path_or_url):\n assert isinstance(json_path_or_url, string_types)\n\n parsed_url = urlparse(json_path_or_url)\n if parsed_url.scheme in [\"http\", \"https\"]:\n res = requests.get(json_path_or_url, verify=False)\n json_dict = json.loads(res.content, encoding='utf-8')\n\n else:\n # Si json_path_or_url parece ser una URL remota, lo advierto.\n path_start = parsed_url.path.split(\".\")[0]\n if path_start == \"www\" or path_start.isdigit():\n warnings.warn(\"\"\"\nLa direcci\u00f3n del archivo JSON ingresada parece una URL, pero no comienza\ncon 'http' o 'https' as\u00ed que ser\u00e1 tratada como una direcci\u00f3n local. \u00bfTal vez\nquiso decir 'http://{}'?\"\"\".format(json_path_or_url).encode(\"utf-8\"))\n\n with io.open(json_path_or_url, encoding='utf-8') as json_file:\n json_dict = json.load(json_file)\n\n return json_dict", "response": "Read a JSON file and return a dict."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef read_xlsx_catalog(xlsx_path_or_url, logger=None):\n logger = logger or pydj_logger\n assert isinstance(xlsx_path_or_url, string_types)\n\n parsed_url = urlparse(xlsx_path_or_url)\n if parsed_url.scheme in [\"http\", \"https\"]:\n res = requests.get(xlsx_path_or_url, verify=False)\n tmpfilename = \".tmpfile.xlsx\"\n with io.open(tmpfilename, 'wb') as tmpfile:\n tmpfile.write(res.content)\n catalog_dict = read_local_xlsx_catalog(tmpfilename, logger)\n os.remove(tmpfilename)\n else:\n # Si xlsx_path_or_url parece ser una URL remota, lo advierto.\n path_start = parsed_url.path.split(\".\")[0]\n if path_start == \"www\" or path_start.isdigit():\n warnings.warn(\"\"\"\nLa direcci\u00f3n del archivo JSON ingresada parece una URL, pero no comienza\ncon 'http' o 'https' as\u00ed que ser\u00e1 tratada como una direcci\u00f3n local. \u00bfTal vez\nquiso decir 'http://{}'?\n \"\"\".format(xlsx_path_or_url).encode(\"utf8\"))\n\n catalog_dict = read_local_xlsx_catalog(xlsx_path_or_url)\n\n return catalog_dict", "response": "Read a dictionary of all the data in an XLSX file."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _make_contact_point(dataset):\n keys = [k for k in [\"contactPoint_fn\", \"contactPoint_hasEmail\"]\n if k in dataset]\n if keys:\n dataset[\"contactPoint\"] = {\n key.replace(\"contactPoint_\", \"\"): dataset.pop(key) for key in keys\n }\n return dataset", "response": "En una diccionario a una dataset de los claves necesarias."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngets the index of the dataset that matches the identifier.", "response": "def _get_dataset_index(catalog, dataset_identifier, dataset_title,\n logger=None):\n \"\"\"Devuelve el \u00edndice de un dataset en el cat\u00e1logo en funci\u00f3n de su\n identificador\"\"\"\n logger = logger or pydj_logger\n matching_datasets = []\n\n for idx, dataset in enumerate(catalog[\"catalog_dataset\"]):\n if dataset[\"dataset_identifier\"] == dataset_identifier:\n if dataset[\"dataset_title\"] == dataset_title:\n matching_datasets.append(idx)\n else:\n logger.warning(\n ce.DatasetUnexpectedTitle(\n dataset_identifier,\n dataset[\"dataset_title\"],\n dataset_title\n )\n )\n\n # Debe haber exactamente un dataset con el identificador provisto.\n no_dsets_msg = \"No hay ningun dataset con el identifier {}\".format(\n dataset_identifier)\n many_dsets_msg = \"Hay mas de un dataset con el identifier {}: {}\".format(\n dataset_identifier, matching_datasets)\n if len(matching_datasets) == 0:\n logger.error(no_dsets_msg)\n return None\n elif len(matching_datasets) > 1:\n logger.error(many_dsets_msg)\n return None\n else:\n return matching_datasets[0]"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _get_distribution_indexes(catalog, dataset_identifier, dataset_title,\n distribution_identifier, distribution_title,\n logger=None):\n \"\"\"Devuelve el \u00edndice de una distribuci\u00f3n en su dataset en funci\u00f3n de su\n t\u00edtulo, junto con el \u00edndice de su dataset padre en el cat\u00e1logo, en\n funci\u00f3n de su identificador\"\"\"\n logger = logger or pydj_logger\n dataset_index = _get_dataset_index(\n catalog, dataset_identifier, dataset_title)\n if dataset_index is None:\n return None, None\n else:\n dataset = catalog[\"catalog_dataset\"][dataset_index]\n\n matching_distributions = []\n\n for idx, distribution in enumerate(dataset[\"dataset_distribution\"]):\n if distribution[\"distribution_identifier\"] == distribution_identifier:\n if distribution[\"distribution_title\"] == distribution_title:\n matching_distributions.append(idx)\n else:\n logger.warning(\n ce.DistributionUnexpectedTitle(\n distribution_identifier,\n distribution[\"distribution_title\"],\n distribution_title\n )\n )\n\n # Debe haber exactamente una distribuci\u00f3n con los identicadores provistos\n if len(matching_distributions) == 0:\n logger.warning(\n ce.DistributionTitleNonExistentError(\n distribution_title, dataset_identifier\n )\n )\n return dataset_index, None\n elif len(matching_distributions) > 1:\n logger.warning(\n ce.DistributionTitleRepetitionError(\n distribution_title, dataset_identifier, matching_distributions)\n )\n return dataset_index, None\n else:\n distribution_index = matching_distributions[0]\n return dataset_index, distribution_index", "response": "Get the index of the distribution in the catalog."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef read_local_xlsx_catalog(xlsx_path, logger=None):\n logger = logger or pydj_logger\n\n wb = pyxl.load_workbook(open(xlsx_path, 'rb'), data_only=True,\n read_only=True)\n\n # Toma las hojas del modelo, resistente a may\u00fasuculas/min\u00fasculas\n ws_catalog = helpers.get_ws_case_insensitive(wb, \"catalog\")\n ws_dataset = helpers.get_ws_case_insensitive(wb, \"dataset\")\n ws_distribution = helpers.get_ws_case_insensitive(wb, \"distribution\")\n ws_theme = helpers.get_ws_case_insensitive(wb, \"theme\")\n ws_field = helpers.get_ws_case_insensitive(wb, \"field\")\n\n catalogs = helpers.sheet_to_table(ws_catalog)\n # Debe haber exactamente un cat\u00e1logo en la hoja 'Catalog'\n assert (len(catalogs) != 0), \"No hay ningun cat\u00e1logo en la hoja 'Catalog'\"\n assert (len(catalogs) < 2), \"Hay mas de un cat\u00e1logo en la hoja 'Catalog'\"\n # Genero el cat\u00e1logo base\n catalog = catalogs[0]\n\n # Agrego themes y datasets al cat\u00e1logo\n catalog[\"catalog_dataset\"] = helpers.sheet_to_table(ws_dataset)\n\n # Me aseguro que los identificadores de dataset se guarden como cadenas\n for dataset in catalog[\"catalog_dataset\"]:\n dataset[\"dataset_identifier\"] = text_type(\n dataset[\"dataset_identifier\"])\n\n catalog[\"catalog_themeTaxonomy\"] = (\n helpers.sheet_to_table(ws_theme))\n\n # Agrego lista de distribuciones vac\u00eda a cada dataset\n for dataset in catalog[\"catalog_dataset\"]:\n dataset[\"dataset_distribution\"] = []\n\n # Ubico cada distribuci\u00f3n en su dataset\n distributions = helpers.sheet_to_table(ws_distribution)\n for distribution in distributions:\n # Me aseguro que los identificadores se guarden como cadenas\n distribution[\"dataset_identifier\"] = text_type(\n distribution[\"dataset_identifier\"])\n distribution[\"distribution_identifier\"] = text_type(\n distribution[\"distribution_identifier\"])\n\n dataset_index = _get_dataset_index(\n catalog, distribution[\"dataset_identifier\"],\n distribution[\"dataset_title\"], logger)\n if dataset_index is None:\n logger.error(\"\"\"La distribucion con ID '{}' y titulo '{}' no se\npudo asignar a un dataset, y no figurara en el data.json de salida.\"\"\".format(\n distribution[\"distribution_identifier\"],\n distribution[\"distribution_title\"]))\n else:\n dataset = catalog[\"catalog_dataset\"][dataset_index]\n dataset[\"dataset_distribution\"].append(distribution)\n\n # Ubico cada campo en su distribuci\u00f3n\n fields = helpers.sheet_to_table(ws_field)\n for idx, field in enumerate(fields):\n # Me aseguro que los identificadores se guarden como cadenas\n field[\"dataset_identifier\"] = text_type(field[\"dataset_identifier\"])\n field[\"distribution_identifier\"] = text_type(\n field[\"distribution_identifier\"])\n\n dataset_index, distribution_index = _get_distribution_indexes(\n catalog, field[\"dataset_identifier\"], field[\"dataset_title\"],\n field[\"distribution_identifier\"], field[\"distribution_title\"],\n logger)\n\n if dataset_index is None:\n logger.error(\n \"\"\"No se encontro el dataset '{}' especificado para el campo\n '{}' (fila #{} de la hoja \"Field\"). Este campo no figurara en\n el data.json de salida.\"\"\".format(\n unidecode(field[\"dataset_title\"]),\n unidecode(field[\"field_title\"]), idx + 2))\n\n elif distribution_index is None:\n logger.error(\n \"\"\"No se encontro la distribucion '{}' especificada para el\n campo'{}' (fila #{} de la hoja \"Field\"). Este campo no figurara\n en el data.json de salida.\"\"\".format(\n unidecode(field[\"distribution_title\"]),\n unidecode(field[\"field_title\"]), idx + 2))\n\n else:\n dataset = catalog[\"catalog_dataset\"][dataset_index]\n distribution = dataset[\"dataset_distribution\"][distribution_index]\n\n if \"distribution_field\" in distribution:\n distribution[\"distribution_field\"].append(field)\n else:\n distribution[\"distribution_field\"] = [field]\n\n # Transformo campos de texto separado por comas en listas\n if \"catalog_language\" in catalog:\n catalog[\"catalog_language\"] = helpers.string_to_list(\n catalog[\"catalog_language\"], filter_empty=True)\n\n for dataset in catalog[\"catalog_dataset\"]:\n array_fields = [\"dataset_superTheme\", \"dataset_theme\", \"dataset_tags\",\n \"dataset_keyword\", \"dataset_language\"]\n for field in array_fields:\n if field in dataset:\n dataset[field] = helpers.string_to_list(\n dataset[field], filter_empty=True)\n\n # Elimino los prefijos de los campos a nivel cat\u00e1logo\n _remove_prefixes_and_unprefixed_fields(catalog, 'catalog_')\n\n # Elimino los prefijos de los campos a nivel tema\n for theme in catalog[\"themeTaxonomy\"]:\n _remove_prefixes_and_unprefixed_fields(theme, 'theme_')\n\n # Elimino los prefijos de los campos a nivel dataset\n for dataset in catalog[\"dataset\"]:\n _remove_prefixes_and_unprefixed_fields(dataset, 'dataset_')\n\n # Elimino los campos auxiliares y los prefijos de los campos a nivel\n # distribuci\u00f3n\n for dataset in catalog[\"dataset\"]:\n for distribution in dataset[\"distribution\"]:\n _remove_prefixes_and_unprefixed_fields(\n distribution, 'distribution_')\n\n # Elimino campos auxiliares y los prefijos de los campos a nivel \"campo\"\n for dataset in catalog[\"dataset\"]:\n for distribution in dataset[\"distribution\"]:\n if \"field\" in distribution:\n for field in distribution[\"field\"]:\n _remove_prefixes_and_unprefixed_fields(field, \"field_\")\n\n # Agrupo las claves de \"publisher\" y \"contactPoint\" en sendos diccionarios\n catalog = _make_publisher(catalog)\n for dataset in catalog[\"dataset\"]:\n dataset = _make_publisher(dataset)\n dataset = _make_contact_point(dataset)\n\n return catalog", "response": "Read a local XLSX catalog and return a dict of diccionario de metadatos de un cat\u00e1logo."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreads a table from a file.", "response": "def read_table(path):\n \"\"\"Lee un archivo tabular (CSV o XLSX) a una lista de diccionarios.\n\n La extensi\u00f3n del archivo debe ser \".csv\" o \".xlsx\". En funci\u00f3n de\n ella se decidir\u00e1 el m\u00e9todo a usar para leerlo.\n\n Si recibe una lista, comprueba que todos sus diccionarios tengan las\n mismas claves y de ser as\u00ed, la devuelve intacta. Levanta una Excepci\u00f3n\n en caso contrario.\n\n Args:\n path(str o list): Como 'str', path a un archivo CSV o XLSX.\n\n Returns:\n list: Lista de diccionarios con claves id\u00e9nticas representando el\n archivo original.\n \"\"\"\n assert isinstance(path, string_types + (list,)), \"\"\"\n{} no es un `path` valido\"\"\".format(path)\n\n # Si `path` es una lista, devolverla intacta si tiene formato tabular.\n # Si no, levantar una excepci\u00f3n.\n if isinstance(path, list):\n if helpers.is_list_of_matching_dicts(path):\n return path\n else:\n raise ValueError(\"\"\"\nLa lista ingresada no esta formada por diccionarios con las mismas claves.\"\"\")\n\n # Deduzco el formato de archivo de `path` y redirijo seg\u00fan corresponda.\n suffix = path.split(\".\")[-1]\n if suffix == \"csv\":\n return _read_csv_table(path)\n elif suffix == \"xlsx\":\n return _read_xlsx_table(path)\n else:\n raise ValueError(\"\"\"\n{} no es un sufijo reconocido. Pruebe con .csv o .xlsx\"\"\".format(suffix))"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _read_csv_table(path):\n with open(path, 'rb') as csvfile:\n reader = csv.DictReader(csvfile)\n table = list(reader)\n return table", "response": "Lee un CSV a una lista de diccionarios."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _read_xlsx_table(path):\n workbook = pyxl.load_workbook(path)\n worksheet = workbook.active\n table = helpers.sheet_to_table(worksheet)\n\n return table", "response": "Lee la hoja activa de un archivo XLSX a una lista de diccionarios."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nensuring the provided file is a yaml file", "response": "def validate_model_specification_file(file_path: str) -> str:\n \"\"\"Ensures the provided file is a yaml file\"\"\"\n if not os.path.isfile(file_path):\n raise ConfigurationError('If you provide a model specification file, it must be a file. '\n f'You provided {file_path}')\n\n extension = file_path.split('.')[-1]\n if extension not in ['yaml', 'yml']:\n raise ConfigurationError(f'Model specification files must be in a yaml format. You provided {extension}')\n # Attempt to load\n yaml.full_load(file_path)\n return file_path"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning a tuple of the value s source and the value at the specified layer.", "response": "def get_value_with_source(self, layer=None):\n \"\"\"Returns a tuple of the value's source and the value at the specified\n layer. If no layer is specified then the outer layer is used.\n\n Parameters\n ----------\n layer : str\n Name of the layer to use. If None then the outermost where the value\n exists will be used.\n\n Raises\n ------\n KeyError\n If the value is not set for the specified layer\n \"\"\"\n if layer:\n return self._values[layer]\n\n for layer in reversed(self._layers):\n if layer in self._values:\n return self._values[layer]\n raise KeyError(layer)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning all values and associated metadata for this node as a list.", "response": "def metadata(self):\n \"\"\"Returns all values and associated metadata for this node as a dict.\n The value which would be selected if the node's value was requested\n is indicated by the `default` flag.\n \"\"\"\n result = []\n for layer in self._layers:\n if layer in self._values:\n result.append({\n 'layer': layer,\n 'value': self._values[layer][1],\n 'source': self._values[layer][0],\n 'default': layer == self._layers[-1]\n })\n return result"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nsetting a value for a particular layer with optional metadata about source.", "response": "def set_value(self, value, layer=None, source=None):\n \"\"\"Set a value for a particular layer with optional metadata about source.\n\n Parameters\n ----------\n value : str\n Data to store in the node.\n layer : str\n Name of the layer to use. If None then the outermost where the value\n exists will be used.\n source : str\n Metadata indicating the source of this value (e.g. a file path)\n\n Raises\n ------\n TypeError\n If the node is frozen\n KeyError\n If the named layer does not exist\n \"\"\"\n if self._frozen:\n raise TypeError('Frozen ConfigNode does not support assignment')\n\n if not layer:\n layer = self._layers[-1]\n self._values[layer] = (source, value)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef drop_layer(self, layer):\n if self._frozen:\n raise TypeError('Frozen ConfigNode does not support modification')\n self.reset_layer(layer)\n self._layers.remove(layer)", "response": "Removes the named layer and the value associated with it from the node."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef reset_layer(self, layer):\n if self._frozen:\n raise TypeError('Frozen ConfigNode does not support modification')\n if layer in self._values:\n del self._values[layer]", "response": "Removes any value and metadata associated with the named layer."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef freeze(self):\n self.__dict__['_frozen'] = True\n for child in self._children.values():\n child.freeze()", "response": "Causes the ConfigTree to become read only."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_from_layer(self, name, layer=None):\n if name not in self._children:\n if self._frozen:\n raise KeyError(name)\n self._children[name] = ConfigTree(layers=self._layers)\n child = self._children[name]\n if isinstance(child, ConfigNode):\n return child.get_value(layer)\n else:\n return child", "response": "Get a configuration value from the named layer."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nset a value in the named layer with the given source.", "response": "def _set_with_metadata(self, name, value, layer=None, source=None):\n \"\"\"Set a value in the named layer with the given source.\n\n Parameters\n ----------\n name : str\n The name of the value\n value\n The value to store\n layer : str, optional\n The name of the layer to store the value in. If none is supplied\n then the value will be stored in the outermost layer.\n source : str, optional\n The source to attribute the value to.\n\n Raises\n ------\n TypeError\n if the ConfigTree is frozen\n \"\"\"\n\n if self._frozen:\n raise TypeError('Frozen ConfigTree does not support assignment')\n\n if isinstance(value, dict):\n if name not in self._children or not isinstance(self._children[name], ConfigTree):\n self._children[name] = ConfigTree(layers=list(self._layers))\n self._children[name].update(value, layer, source)\n else:\n if name not in self._children or not isinstance(self._children[name], ConfigNode):\n self._children[name] = ConfigNode(list(self._layers))\n child = self._children[name]\n child.set_value(value, layer, source)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef update(self, data: Union[Mapping, str, bytes], layer: str=None, source: str=None):\n if isinstance(data, dict):\n self._read_dict(data, layer, source)\n elif isinstance(data, ConfigTree):\n # TODO: set this to parse the other config tree including layer and source info. Maybe.\n self._read_dict(data.to_dict(), layer, source)\n elif isinstance(data, str):\n if data.endswith(('.yaml', '.yml')):\n source = source if source else data\n self._load(data, layer, source)\n else:\n try:\n self._loads(data, layer, source)\n except AttributeError:\n raise ValueError(\"The string data should be yaml formated string or path to .yaml/.yml file\")\n elif data is None:\n pass\n else:\n raise ValueError(f\"Update must be called with dictionary, string, or ConfigTree. \"\n f\"You passed in {type(data)}\")", "response": "Updates the internal state of the object with the given data."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _read_dict(self, data_dict, layer=None, source=None):\n for k, v in data_dict.items():\n self._set_with_metadata(k, v, layer, source)", "response": "Load a dictionary into the ConfigTree."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _loads(self, data_string, layer=None, source=None):\n data_dict = yaml.full_load(data_string)\n self._read_dict(data_dict, layer, source)", "response": "Loads data from a yaml formatted string."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _load(self, f, layer=None, source=None):\n if hasattr(f, 'read'):\n self._loads(f.read(), layer=layer, source=source)\n else:\n with open(f) as f:\n self._loads(f.read(), layer=layer, source=source)", "response": "Load data from a yaml formatted file."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn value and metadata associated with the named value", "response": "def metadata(self, name):\n \"\"\"Return value and metadata associated with the named value\n\n Parameters\n ----------\n name : str\n name to retrieve. If the name contains '.'s it will be retrieved recursively\n\n Raises\n ------\n KeyError\n if name is not defined in the ConfigTree\n \"\"\"\n if name in self._children:\n return self._children[name].metadata()\n else:\n head, _, tail = name.partition('.')\n if head in self._children:\n return self._children[head].metadata(key=tail)\n else:\n raise KeyError(name)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nremove any value and metadata associated with the named layer.", "response": "def reset_layer(self, layer, preserve_keys=()):\n \"\"\"Removes any value and metadata associated with the named layer.\n\n Parameters\n ----------\n layer : str\n Name of the layer to reset.\n preserve_keys : list or tuple\n A list of keys to skip while removing data\n\n Raises\n ------\n TypeError\n If the node is frozen\n KeyError\n If the named layer does not exist\n \"\"\"\n self._reset_layer(layer, [k.split('.') for k in preserve_keys], prefix=[])"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nremove the named layer and the value associated with it from the node.", "response": "def drop_layer(self, layer):\n \"\"\"Removes the named layer and the value associated with it from the node.\n\n Parameters\n ----------\n layer : str\n Name of the layer to drop.\n\n Raises\n ------\n TypeError\n If the node is frozen\n KeyError\n If the named layer does not exist\n \"\"\"\n if self._frozen:\n raise TypeError('Frozen ConfigTree does not support modification')\n for child in self._children.values():\n child.drop_layer(layer)\n self._layers.remove(layer)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nlists all keys which are present in the ConfigTree but which have not been accessed.", "response": "def unused_keys(self):\n \"\"\"Lists all keys which are present in the ConfigTree but which have not been accessed.\"\"\"\n unused = set()\n for k, c in self._children.items():\n if isinstance(c, ConfigNode):\n if not c.has_been_accessed():\n unused.add(k)\n else:\n for ck in c.unused_keys():\n unused.add(k+'.'+ck)\n return unused"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncreate a new Draft4Validator object.", "response": "def create_validator(schema_filename=None, schema_dir=None):\n \"\"\"Crea el validador necesario para inicializar un objeto DataJson.\n\n Para poder resolver referencias inter-esquemas, un Validador requiere\n que se especifique un RefResolver (Resolvedor de Referencias) con el\n directorio base (absoluto) y el archivo desde el que se referencia el\n directorio.\n\n Para poder validar formatos, un Validador requiere que se provea\n expl\u00edcitamente un FormatChecker. Actualmente se usa el default de la\n librer\u00eda, jsonschema.FormatChecker().\n\n Args:\n schema_filename (str): Nombre del archivo que contiene el esquema\n validador \"maestro\".\n schema_dir (str): Directorio (absoluto) donde se encuentra el\n esquema validador maestro y sus referencias, de tenerlas.\n\n Returns:\n Draft4Validator: Un validador de JSONSchema Draft #4. El validador\n se crea con un RefResolver que resuelve referencias de\n `schema_filename` dentro de `schema_dir`.\n \"\"\"\n schema_filename = schema_filename or DEFAULT_CATALOG_SCHEMA_FILENAME\n schema_dir = schema_dir or ABSOLUTE_SCHEMA_DIR\n schema_path = os.path.join(schema_dir, schema_filename)\n schema = readers.read_json(schema_path)\n\n # Seg\u00fan https://github.com/Julian/jsonschema/issues/98\n # Permite resolver referencias locales a otros esquemas.\n if platform.system() == 'Windows':\n base_uri = \"file:///\" + schema_path.replace(\"\\\\\", \"/\")\n else:\n base_uri = \"file://\" + schema_path\n resolver = jsonschema.RefResolver(base_uri=base_uri, referrer=schema)\n\n format_checker = jsonschema.FormatChecker()\n\n validator = jsonschema.Draft4Validator(\n schema=schema, resolver=resolver, format_checker=format_checker)\n\n return validator"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef is_valid_catalog(catalog, validator=None):\n catalog = readers.read_catalog(catalog)\n if not validator:\n if hasattr(catalog, \"validator\"):\n validator = catalog.validator\n else:\n validator = create_validator()\n\n jsonschema_res = validator.is_valid(catalog)\n custom_errors = iter_custom_errors(catalog)\n\n return jsonschema_res and len(list(custom_errors)) == 0", "response": "Valida que un archivo data. json cumpla con el schema definida."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef validate_catalog(catalog, only_errors=False, fmt=\"dict\",\n export_path=None, validator=None):\n \"\"\"Analiza un data.json registrando los errores que encuentra.\n\n Chequea que el data.json tiene todos los campos obligatorios y que\n tanto los campos obligatorios como los opcionales siguen la estructura\n definida en el schema.\n\n Args:\n catalog (str o dict): Cat\u00e1logo (dict, JSON o XLSX) a ser validado.\n only_errors (bool): Si es True s\u00f3lo se reportan los errores.\n fmt (str): Indica el formato en el que se desea el reporte.\n \"dict\" es el reporte m\u00e1s verborr\u00e1gico respetando la\n estructura del data.json.\n \"list\" devuelve un dict con listas de errores formateados para\n generar tablas.\n export_path (str): Path donde exportar el reporte generado (en\n formato XLSX o CSV). Si se especifica, el m\u00e9todo no devolver\u00e1\n nada, a pesar de que se pase alg\u00fan argumento en `fmt`.\n\n Returns:\n dict: Diccionario resumen de los errores encontrados::\n\n {\n \"status\": \"OK\", # resultado de la validaci\u00f3n global\n \"error\": {\n \"catalog\": {\n \"status\": \"OK\",\n \"errors\": []\n \"title\": \"T\u00edtulo Catalog\"},\n \"dataset\": [\n {\n \"status\": \"OK\",\n \"errors\": [],\n \"title\": \"Titulo Dataset 1\"\n },\n {\n \"status\": \"ERROR\",\n \"errors\": [error1_info, error2_info, ...],\n \"title\": \"Titulo Dataset 2\"\n }\n ]\n }\n }\n\n Donde errorN_info es un dict con la informaci\u00f3n del N-\u00e9simo\n error encontrado, con las siguientes claves: \"path\", \"instance\",\n \"message\", \"validator\", \"validator_value\", \"error_code\".\n\n \"\"\"\n catalog = readers.read_catalog(catalog)\n\n # La respuesta por default se devuelve si no hay errores\n default_response = {\n \"status\": \"OK\",\n \"error\": {\n \"catalog\": {\n \"status\": \"OK\",\n \"title\": catalog.get(\"title\"),\n \"errors\": []\n },\n # \"dataset\" contiene lista de rtas default si el cat\u00e1logo\n # contiene la clave \"dataset\" y adem\u00e1s su valor es una lista.\n # En caso contrario \"dataset\" es None.\n \"dataset\": [\n {\n \"status\": \"OK\",\n \"title\": dataset.get(\"title\"),\n \"identifier\": dataset.get(\"identifier\"),\n \"list_index\": index,\n \"errors\": []\n } for index, dataset in enumerate(catalog[\"dataset\"])\n ] if (\"dataset\" in catalog and\n isinstance(catalog[\"dataset\"], list)) else None\n }\n }\n\n # Genero la lista de errores en la instancia a validar\n if not validator:\n if hasattr(catalog, \"validator\"):\n validator = catalog.validator\n else:\n validator = create_validator()\n jsonschema_errors = list(validator.iter_errors(catalog))\n custom_errors = list(iter_custom_errors(catalog))\n\n errors = jsonschema_errors + custom_errors\n\n response = default_response.copy()\n for error in errors:\n response = _update_validation_response(\n error, response)\n\n # filtra los resultados que est\u00e1n ok, para hacerlo m\u00e1s compacto\n if only_errors:\n response[\"error\"][\"dataset\"] = [\n dataset for dataset in response[\"error\"][\"dataset\"] if\n dataset[\"status\"] == \"ERROR\"]\n\n # elige el formato del resultado\n if export_path:\n validation_lists = _catalog_validation_to_list(response)\n\n # config styles para reportes en excel\n alignment = Alignment(\n wrap_text=True,\n shrink_to_fit=True,\n vertical=\"center\"\n )\n column_styles = {\n \"catalog\": {\n \"catalog_status\": {\"width\": 20},\n \"catalog_error_location\": {\"width\": 40},\n \"catalog_error_message\": {\"width\": 40},\n \"catalog_title\": {\"width\": 20},\n },\n \"dataset\": {\n \"dataset_error_location\": {\"width\": 20},\n \"dataset_identifier\": {\"width\": 40},\n \"dataset_status\": {\"width\": 20},\n \"dataset_title\": {\"width\": 40},\n \"dataset_list_index\": {\"width\": 20},\n \"dataset_error_message\": {\"width\": 40},\n }\n }\n cell_styles = {\n \"catalog\": [\n {\"alignment\": Alignment(vertical=\"center\")},\n {\"row\": 1, \"font\": Font(bold=True)},\n ],\n \"dataset\": [\n {\"alignment\": Alignment(vertical=\"center\")},\n {\"row\": 1, \"font\": Font(bold=True)},\n ]\n }\n\n # crea tablas en un s\u00f3lo excel o varios CSVs\n writers.write_tables(\n tables=validation_lists, path=export_path,\n column_styles=column_styles, cell_styles=cell_styles\n )\n\n elif fmt == \"dict\":\n return response\n\n elif fmt == \"list\":\n return _catalog_validation_to_list(response)\n\n # Comentado porque requiere una extensi\u00f3n pesada nueva para pydatajson\n # elif fmt == \"df\":\n # validation_lists = self._catalog_validation_to_list(response)\n # return {\n # \"catalog\": pd.DataFrame(validation_lists[\"catalog\"]),\n # \"dataset\": pd.DataFrame(validation_lists[\"dataset\"])\n # }\n\n else:\n raise Exception(\"No se reconoce el formato {}\".format(fmt))", "response": "Analiza un data.json registrando los errores que encuentra.\n\n Chequea que el data.json tiene todos los campos obligatorios y que\n tanto los campos obligatorios como los opcionales siguen la estructura\n definida en el schema.\n\n Args:\n catalog (str o dict): Cat\u00e1logo (dict, JSON o XLSX) a ser validado.\n only_errors (bool): Si es True s\u00f3lo se reportan los errores.\n fmt (str): Indica el formato en el que se desea el reporte.\n \"dict\" es el reporte m\u00e1s verborr\u00e1gico respetando la\n estructura del data.json.\n \"list\" devuelve un dict con listas de errores formateados para\n generar tablas.\n export_path (str): Path donde exportar el reporte generado (en\n formato XLSX o CSV). Si se especifica, el m\u00e9todo no devolver\u00e1\n nada, a pesar de que se pase alg\u00fan argumento en `fmt`.\n\n Returns:\n dict: Diccionario resumen de los errores encontrados::\n\n {\n \"status\": \"OK\", # resultado de la validaci\u00f3n global\n \"error\": {\n \"catalog\": {\n \"status\": \"OK\",\n \"errors\": []\n \"title\": \"T\u00edtulo Catalog\"},\n \"dataset\": [\n {\n \"status\": \"OK\",\n \"errors\": [],\n \"title\": \"Titulo Dataset 1\"\n },\n {\n \"status\": \"ERROR\",\n \"errors\": [error1_info, error2_info, ...],\n \"title\": \"Titulo Dataset 2\"\n }\n ]\n }\n }\n\n Donde errorN_info es un dict con la informaci\u00f3n del N-\u00e9simo\n error encontrado, con las siguientes claves: \"path\", \"instance\",\n \"message\", \"validator\", \"validator_value\", \"error_code\"."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning an iterator over the custom error messages that are generated by the jsonschema.", "response": "def iter_custom_errors(catalog):\n \"\"\"Realiza validaciones sin usar el jsonschema.\n\n En esta funci\u00f3n se agregan bloques de c\u00f3digo en python que realizan\n validaciones complicadas o imposibles de especificar usando jsonschema.\n \"\"\"\n\n try:\n # chequea que no se repiten los ids de la taxonom\u00eda espec\u00edfica\n if \"themeTaxonomy\" in catalog:\n theme_ids = [theme[\"id\"] for theme in catalog[\"themeTaxonomy\"]]\n dups = _find_dups(theme_ids)\n if len(dups) > 0:\n yield ce.ThemeIdRepeated(dups)\n\n # chequea que un dataset no use un theme que no exista en la taxonom\u00eda\n # TODO: hay que implementarlo\n\n # chequea que un dataset no use theme con id est\u00e9 repetido en taxonom\u00eda\n # TODO: hay que implementarlo\n\n # chequea que la extensi\u00f3n de fileName, downloadURL y format sean\n # consistentes\n for dataset_idx, dataset in enumerate(catalog[\"dataset\"]):\n for distribution_idx, distribution in enumerate(\n dataset[\"distribution\"]):\n for attribute in ['downloadURL', 'fileName']:\n if not format_matches_extension(distribution, attribute):\n yield ce.ExtensionError(dataset_idx, distribution_idx,\n distribution, attribute)\n\n # chequea que no haya duplicados en los downloadURL de las\n # distribuciones\n # urls = []\n # for dataset in catalog[\"dataset\"]:\n # urls += [distribution['downloadURL']\n # for distribution in dataset['distribution']\n # if distribution.get('downloadURL')]\n # dups = _find_dups(urls)\n # if len(dups) > 0:\n # yield ce.DownloadURLRepetitionError(dups)\n\n # chequea que las URLs de las distribuciones est\u00e9n operativas\n # hay que revisar\n # for distribution in catalog.distributions:\n # url = distribution.get(\"downloadURL\")\n # status_code = requests.head(url).status_code\n # if status_code != 200:\n # yield ce.DownloadURLBrokenError(\n # distribution.get(\"identifier\"), url, status_code)\n\n except Exception:\n logger.exception(\"Error de validaci\u00f3n.\")"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef format_matches_extension(distribution, attribute):\n\n if attribute in distribution and \"format\" in distribution:\n if \"/\" in distribution['format']:\n possible_format_extensions = mimetypes.guess_all_extensions(\n distribution['format'])\n else:\n possible_format_extensions = ['.' + distribution['format'].lower()]\n\n file_name = urlparse(distribution[attribute]).path\n extension = os.path.splitext(file_name)[-1].lower()\n\n if attribute == 'downloadURL' and not extension:\n return True\n\n # hay extensiones exceptuadas porque enmascaran otros formatos\n if extension.lower().replace(\".\", \"\") in EXTENSIONS_EXCEPTIONS:\n return True\n\n if extension not in possible_format_extensions:\n return False\n\n return True", "response": "Chequea si una extensi\u00f3n podr\u00eda corresponder a un formato dado."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef push_dataset_to_ckan(catalog, owner_org, dataset_origin_identifier,\n portal_url, apikey, catalog_id=None,\n demote_superThemes=True, demote_themes=True,\n download_strategy=None, generate_new_access_url=None):\n \"\"\"Escribe la metadata de un dataset en el portal pasado por par\u00e1metro.\n\n Args:\n catalog (DataJson): El cat\u00e1logo de origen que contiene el dataset.\n owner_org (str): La organizaci\u00f3n a la cual pertence el dataset.\n dataset_origin_identifier (str): El id del dataset que se va a\n federar.\n portal_url (str): La URL del portal CKAN de destino.\n apikey (str): La apikey de un usuario con los permisos que le\n permitan crear o actualizar el dataset.\n catalog_id (str): El prefijo con el que va a preceder el id del\n dataset en cat\u00e1logo destino.\n demote_superThemes(bool): Si est\u00e1 en true, los ids de los super\n themes del dataset, se propagan como grupo.\n demote_themes(bool): Si est\u00e1 en true, los labels de los themes\n del dataset, pasan a ser tags. Sino, se pasan como grupo.\n download_strategy(callable): Una funci\u00f3n (cat\u00e1logo, distribuci\u00f3n)->\n bool. Sobre las distribuciones que eval\u00faa True, descarga el\n recurso en el downloadURL y lo sube al portal de destino.\n Por default no sube ninguna distribuci\u00f3n.\n generate_new_access_url(list): Se pasan los ids de las\n distribuciones cuyo accessURL se regenerar en el portal de\n destino. Para el resto, el portal debe mantiene el valor pasado\n en el DataJson.\n Returns:\n str: El id del dataset en el cat\u00e1logo de destino.\n \"\"\"\n dataset = catalog.get_dataset(dataset_origin_identifier)\n ckan_portal = RemoteCKAN(portal_url, apikey=apikey)\n\n package = map_dataset_to_package(catalog, dataset, owner_org, catalog_id,\n demote_superThemes, demote_themes)\n\n # Get license id\n if dataset.get('license'):\n license_list = ckan_portal.call_action('license_list')\n try:\n ckan_license = next(\n license_item for license_item in license_list if\n license_item['title'] == dataset['license'] or\n license_item['url'] == dataset['license']\n )\n package['license_id'] = ckan_license['id']\n except StopIteration:\n package['license_id'] = 'notspecified'\n else:\n package['license_id'] = 'notspecified'\n\n try:\n pushed_package = ckan_portal.call_action(\n 'package_update', data_dict=package)\n except NotFound:\n pushed_package = ckan_portal.call_action(\n 'package_create', data_dict=package)\n\n with resource_files_download(catalog, dataset.get('distribution', []),\n download_strategy) as resource_files:\n resources_update(portal_url, apikey, dataset.get('distribution', []),\n resource_files, generate_new_access_url, catalog_id)\n\n ckan_portal.close()\n return pushed_package['id']", "response": "Escribe la metadata de un dataset en el CKAN de destino."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef resources_update(portal_url, apikey, distributions,\n resource_files, generate_new_access_url=None,\n catalog_id=None):\n \"\"\"Sube archivos locales a sus distribuciones correspondientes en el portal\n pasado por par\u00e1metro.\n\n Args:\n portal_url (str): La URL del portal CKAN de destino.\n apikey (str): La apikey de un usuario con los permisos que le\n permitan crear o actualizar el dataset.\n distributions(list): Lista de distribuciones posibles para\n actualizar.\n resource_files(dict): Diccionario con entradas\n id_de_distribucion:path_al_recurso a subir\n generate_new_access_url(list): Lista de ids de distribuciones a\n las cuales se actualizar\u00e1 el accessURL con los valores\n generados por el portal de destino\n catalog_id(str): prependea el id al id del recurso para\n encontrarlo antes de subirlo\n Returns:\n list: los ids de los recursos modificados\n \"\"\"\n ckan_portal = RemoteCKAN(portal_url, apikey=apikey)\n result = []\n generate_new_access_url = generate_new_access_url or []\n for distribution in distributions:\n updated = False\n resource_id = catalog_id + '_' + distribution['identifier']\\\n if catalog_id else distribution['identifier']\n fields = {'id': resource_id}\n if distribution['identifier'] in generate_new_access_url:\n fields.update({'accessURL': ''})\n updated = True\n if distribution['identifier'] in resource_files:\n fields.update({'resource_type': 'file.upload',\n 'upload':\n open(resource_files[distribution['identifier']],\n 'rb')\n })\n updated = True\n if updated:\n try:\n pushed = ckan_portal.action.resource_patch(**fields)\n result.append(pushed['id'])\n except CKANAPIError as e:\n logger.exception(\n \"Error subiendo recurso {} a la distribuci\u00f3n {}: {}\"\n .format(resource_files[distribution['identifier']],\n resource_files, str(e)))\n return result", "response": "Update the content of a resource in a CKAN portal."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef remove_datasets_from_ckan(portal_url, apikey, filter_in=None,\n filter_out=None, only_time_series=False,\n organization=None):\n \"\"\"Borra un dataset en el portal pasado por par\u00e1metro.\n\n Args:\n portal_url (str): La URL del portal CKAN de destino.\n apikey (str): La apikey de un usuario con los permisos que le\n permitan borrar el dataset.\n filter_in(dict): Diccionario de filtrado positivo, similar al\n de search.get_datasets.\n filter_out(dict): Diccionario de filtrado negativo, similar al\n de search.get_datasets.\n only_time_series(bool): Filtrar solo los datasets que tengan\n recursos con series de tiempo.\n organization(str): Filtrar solo los datasets que pertenezcan a\n cierta organizacion.\n Returns:\n None\n \"\"\"\n ckan_portal = RemoteCKAN(portal_url, apikey=apikey)\n identifiers = []\n datajson_filters = filter_in or filter_out or only_time_series\n if datajson_filters:\n identifiers += get_datasets(\n portal_url + '/data.json',\n filter_in=filter_in, filter_out=filter_out,\n only_time_series=only_time_series, meta_field='identifier'\n )\n if organization:\n query = 'organization:\"' + organization + '\"'\n search_result = ckan_portal.call_action('package_search', data_dict={\n 'q': query, 'rows': 500, 'start': 0})\n org_identifiers = [dataset['id']\n for dataset in search_result['results']]\n start = 500\n while search_result['count'] > start:\n search_result = ckan_portal.call_action(\n 'package_search', data_dict={\n 'q': query, 'rows': 500, 'start': start})\n org_identifiers += [dataset['id']\n for dataset in search_result['results']]\n start += 500\n\n if datajson_filters:\n identifiers = set(identifiers).intersection(set(org_identifiers))\n else:\n identifiers = org_identifiers\n\n for identifier in identifiers:\n ckan_portal.call_action('dataset_purge', data_dict={'id': identifier})", "response": "Borra un dataset en el portal pasado por par\u00e1metro."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nharvest a dataset to CKAN.", "response": "def harvest_dataset_to_ckan(catalog, owner_org, dataset_origin_identifier,\n portal_url, apikey, catalog_id,\n download_strategy=None):\n \"\"\"Federa la metadata de un dataset en el portal pasado por par\u00e1metro.\n\n Args:\n catalog (DataJson): El cat\u00e1logo de origen que contiene el dataset.\n owner_org (str): La organizaci\u00f3n a la cual pertence el dataset.\n dataset_origin_identifier (str): El id del dataset que se va a\n restaurar.\n portal_url (str): La URL del portal CKAN de destino.\n apikey (str): La apikey de un usuario con los permisos que le\n permitan crear o actualizar el dataset.\n catalog_id(str): El id que prependea al dataset y recursos\n download_strategy(callable): Una funci\u00f3n (cat\u00e1logo, distribuci\u00f3n)->\n bool. Sobre las distribuciones que eval\u00faa True, descarga el\n recurso en el downloadURL y lo sube al portal de destino.\n Por default no sube ninguna distribuci\u00f3n.\n Returns:\n str: El id del dataset restaurado.\n \"\"\"\n\n return push_dataset_to_ckan(catalog, owner_org, dataset_origin_identifier,\n portal_url, apikey, catalog_id=catalog_id,\n download_strategy=download_strategy)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nharvesting a catalog to CKAN.", "response": "def harvest_catalog_to_ckan(catalog, portal_url, apikey, catalog_id,\n dataset_list=None, owner_org=None,\n download_strategy=None):\n \"\"\"Federa los datasets de un cat\u00e1logo al portal pasado por par\u00e1metro.\n\n Args:\n catalog (DataJson): El cat\u00e1logo de origen que se federa.\n portal_url (str): La URL del portal CKAN de destino.\n apikey (str): La apikey de un usuario con los permisos que le\n permitan crear o actualizar el dataset.\n catalog_id (str): El prefijo con el que va a preceder el id del\n dataset en cat\u00e1logo destino.\n dataset_list(list(str)): Los ids de los datasets a federar. Si no\n se pasa una lista, todos los datasests se federan.\n owner_org (str): La organizaci\u00f3n a la cual pertencen los datasets.\n Si no se pasa, se utiliza el catalog_id.\n download_strategy(callable): Una funci\u00f3n (cat\u00e1logo, distribuci\u00f3n)->\n bool. Sobre las distribuciones que eval\u00faa True, descarga el\n recurso en el downloadURL y lo sube al portal de destino.\n Por default no sube ninguna distribuci\u00f3n.\n Returns:\n str: El id del dataset en el cat\u00e1logo de destino.\n \"\"\"\n # Evitar entrar con valor falsy\n harvested = []\n if dataset_list is None:\n try:\n dataset_list = [ds['identifier'] for ds in catalog.datasets]\n except KeyError:\n logger.exception('Hay datasets sin identificadores')\n return harvested\n owner_org = owner_org or catalog_id\n errors = {}\n for dataset_id in dataset_list:\n try:\n harvested_id = harvest_dataset_to_ckan(catalog, owner_org,\n dataset_id, portal_url,\n apikey, catalog_id,\n download_strategy)\n harvested.append(harvested_id)\n except Exception as e:\n msg = \"Error federando catalogo: %s, dataset: %s al portal: %s\\n\"\\\n % (catalog_id, dataset_id, portal_url)\n msg += str(e)\n logger.error(msg)\n errors[dataset_id] = str(e)\n\n return harvested, errors"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef push_new_themes(catalog, portal_url, apikey):\n ckan_portal = RemoteCKAN(portal_url, apikey=apikey)\n existing_themes = ckan_portal.call_action('group_list')\n new_themes = [theme['id'] for theme in catalog[\n 'themeTaxonomy'] if theme['id'] not in existing_themes]\n pushed_names = []\n for new_theme in new_themes:\n name = push_theme_to_ckan(\n catalog, portal_url, apikey, identifier=new_theme)\n pushed_names.append(name)\n return pushed_names", "response": "Pushes new themes to CKAN"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_organization_from_ckan(portal_url, org_id):\n ckan_portal = RemoteCKAN(portal_url)\n return ckan_portal.call_action('organization_show',\n data_dict={'id': org_id})", "response": "Get organization from CKAN"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef push_organization_tree_to_ckan(portal_url, apikey, org_tree, parent=None):\n created = []\n for node in org_tree:\n pushed_org = push_organization_to_ckan(portal_url,\n apikey,\n node,\n parent=parent)\n if pushed_org['success']:\n pushed_org['children'] = push_organization_tree_to_ckan(\n portal_url, apikey, node['children'], parent=node['name'])\n\n created.append(pushed_org)\n return created", "response": "Pushes an organization tree to CKAN"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef push_organization_to_ckan(portal_url, apikey, organization, parent=None):\n portal = RemoteCKAN(portal_url, apikey=apikey)\n if parent:\n organization['groups'] = [{'name': parent}]\n try:\n pushed_org = portal.call_action('organization_create',\n data_dict=organization)\n pushed_org['success'] = True\n except Exception as e:\n logger.exception('Ocurri\u00f3 un error creando la organizaci\u00f3n {}: {}'\n .format(organization['title'], str(e)))\n pushed_org = {'name': organization, 'success': False}\n\n return pushed_org", "response": "Pushes an organization to CKAN"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nremove an organization from CKAN", "response": "def remove_organization_from_ckan(portal_url, apikey, organization_id):\n \"\"\"Toma un id de organizaci\u00f3n y la purga del portal de destino.\n Args:\n portal_url (str): La URL del portal CKAN de destino.\n apikey (str): La apikey de un usuario con los permisos que le\n permitan borrar la organizaci\u00f3n.\n organization_id(str): Id o name de la organizaci\u00f3n a borrar.\n Returns:\n None.\n\n \"\"\"\n portal = RemoteCKAN(portal_url, apikey=apikey)\n try:\n portal.call_action('organization_purge',\n data_dict={'id': organization_id})\n\n except Exception as e:\n logger.exception('Ocurri\u00f3 un error borrando la organizaci\u00f3n {}: {}'\n .format(organization_id, str(e)))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef remove_organizations_from_ckan(portal_url, apikey, organization_list):\n for org in organization_list:\n remove_organization_from_ckan(portal_url, apikey, org)", "response": "Removes organizations from CKAN"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef restore_organization_to_ckan(catalog, owner_org, portal_url, apikey,\n dataset_list=None, download_strategy=None,\n generate_new_access_url=None):\n \"\"\"Restaura los datasets de la organizaci\u00f3n de un cat\u00e1logo al portal pasado\n por par\u00e1metro. Si hay temas presentes en el DataJson que no est\u00e1n en el\n portal de CKAN, los genera.\n\n Args:\n catalog (DataJson): El cat\u00e1logo de origen que se restaura.\n portal_url (str): La URL del portal CKAN de destino.\n apikey (str): La apikey de un usuario con los permisos que le\n permitan crear o actualizar el dataset.\n dataset_list(list(str)): Los ids de los datasets a restaurar. Si no\n se pasa una lista, todos los datasests se restauran.\n owner_org (str): La organizaci\u00f3n a la cual pertencen los datasets.\n download_strategy(callable): Una funci\u00f3n (cat\u00e1logo, distribuci\u00f3n)->\n bool. Sobre las distribuciones que eval\u00faa True, descarga el\n recurso en el downloadURL y lo sube al portal de destino.\n Por default no sube ninguna distribuci\u00f3n.\n generate_new_access_url(list): Se pasan los ids de las\n distribuciones cuyo accessURL se regenerar en el portal de\n destino. Para el resto, el portal debe mantiene el valor\n pasado en el DataJson.\n Returns:\n list(str): La lista de ids de datasets subidos.\n \"\"\"\n push_new_themes(catalog, portal_url, apikey)\n restored = []\n if dataset_list is None:\n try:\n dataset_list = [ds['identifier'] for ds in catalog.datasets]\n except KeyError:\n logger.exception('Hay datasets sin identificadores')\n return restored\n\n for dataset_id in dataset_list:\n try:\n restored_id = restore_dataset_to_ckan(catalog, owner_org,\n dataset_id, portal_url,\n apikey, download_strategy,\n generate_new_access_url)\n restored.append(restored_id)\n except (CKANAPIError, KeyError, AttributeError) as e:\n logger.exception('Ocurri\u00f3 un error restaurando el dataset {}: {}'\n .format(dataset_id, str(e)))\n return restored", "response": "Restaura los datasets de la organizaci\u00f3n de un cat\u00e1logo al portal de CKAN de destino."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef restore_catalog_to_ckan(catalog, origin_portal_url, destination_portal_url,\n apikey, download_strategy=None,\n generate_new_access_url=None):\n \"\"\"Restaura los datasets de un cat\u00e1logo original al portal pasado\n por par\u00e1metro. Si hay temas presentes en el DataJson que no est\u00e1n en\n el portal de CKAN, los genera.\n\n Args:\n catalog (DataJson): El cat\u00e1logo de origen que se restaura.\n origin_portal_url (str): La URL del portal CKAN de origen.\n destination_portal_url (str): La URL del portal CKAN de\n destino.\n apikey (str): La apikey de un usuario con los permisos que le\n permitan crear o actualizar el dataset.\n download_strategy(callable): Una funci\u00f3n\n (cat\u00e1logo, distribuci\u00f3n)-> bool. Sobre las distribuciones\n que eval\u00faa True, descarga el recurso en el downloadURL y lo\n sube al portal de destino. Por default no sube ninguna\n distribuci\u00f3n.\n generate_new_access_url(list): Se pasan los ids de las\n distribuciones cuyo accessURL se regenerar en el portal de\n destino. Para el resto, el portal debe mantiene el valor\n pasado en el DataJson.\n Returns:\n dict: Diccionario con key organizaci\u00f3n y value la lista de ids\n de datasets subidos a esa organizaci\u00f3n\n \"\"\"\n catalog['homepage'] = catalog.get('homepage') or origin_portal_url\n res = {}\n origin_portal = RemoteCKAN(origin_portal_url)\n\n try:\n org_list = origin_portal.action.organization_list()\n\n except CKANAPIError as e:\n logger.exception(\n 'Ocurri\u00f3 un error buscando las organizaciones del portal {}: {}'\n .format(origin_portal_url, str(e)))\n print(e)\n return res\n\n for org in org_list:\n print(\"Restaurando organizacion {}\".format(org))\n\n response = origin_portal.action.organization_show(\n id=org, include_datasets=True)\n datasets = [package['id'] for package in response['packages']]\n pushed_datasets = restore_organization_to_ckan(\n catalog, org, destination_portal_url, apikey,\n dataset_list=datasets, download_strategy=download_strategy,\n generate_new_access_url=generate_new_access_url\n )\n res[org] = pushed_datasets\n\n return res", "response": "Restaura los datasets de un cat\u00e1logo original al portal de CKAN destino."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nconstructs a simulation from a list of components componentTrees configuration and pluginTrees.", "response": "def initialize_simulation(components: List, input_config: Mapping=None,\n plugin_config: Mapping=None) -> InteractiveContext:\n \"\"\"Construct a simulation from a list of components, component\n configuration, and a plugin configuration.\n\n The simulation context returned by this method still needs to be setup by\n calling its setup method. It is mostly useful for testing and debugging.\n\n Parameters\n ----------\n components\n A list of initialized simulation components. Corresponds to the\n components block of a model specification.\n input_config\n A nested dictionary with any additional simulation configuration\n information needed. Corresponds to the configuration block of a model\n specification.\n plugin_config\n A dictionary containing a description of any simulation plugins to\n include in the simulation. If you're using this argument, you're either\n deep in the process of simulation development or the maintainers have\n done something wrong. Corresponds to the plugins block of a model\n specification.\n\n Returns\n -------\n An initialized (but not set up) simulation context.\n \"\"\"\n config = build_simulation_configuration()\n config.update(input_config)\n plugin_manager = PluginManager(plugin_config)\n\n return InteractiveContext(config, components, plugin_manager)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nconstructs a simulation from a list of components and call its setup method.", "response": "def setup_simulation(components: List, input_config: Mapping=None,\n plugin_config: Mapping=None) -> InteractiveContext:\n \"\"\"Construct a simulation from a list of components and call its setup\n method.\n\n Parameters\n ----------\n components\n A list of initialized simulation components. Corresponds to the\n components block of a model specification.\n input_config\n A nested dictionary with any additional simulation configuration\n information needed. Corresponds to the configuration block of a model\n specification.\n plugin_config\n A dictionary containing a description of any simulation plugins to\n include in the simulation. If you're using this argument, you're either\n deep in the process of simulation development or the maintainers have\n done something wrong. Corresponds to the plugins block of a model\n specification.\n\n Returns\n -------\n A simulation context that is setup and ready to run.\n \"\"\"\n simulation = initialize_simulation(components, input_config, plugin_config)\n simulation.setup()\n\n return simulation"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nconstructing a simulation from a model specification file.", "response": "def initialize_simulation_from_model_specification(model_specification_file: str) -> InteractiveContext:\n \"\"\"Construct a simulation from a model specification file.\n\n The simulation context returned by this method still needs to be setup by\n calling its setup method. It is mostly useful for testing and debugging.\n\n Parameters\n ----------\n model_specification_file\n The path to a model specification file.\n\n Returns\n -------\n An initialized (but not set up) simulation context.\n \"\"\"\n model_specification = build_model_specification(model_specification_file)\n\n plugin_config = model_specification.plugins\n component_config = model_specification.components\n simulation_config = model_specification.configuration\n\n plugin_manager = PluginManager(plugin_config)\n component_config_parser = plugin_manager.get_plugin('component_configuration_parser')\n components = component_config_parser.get_components(component_config)\n\n return InteractiveContext(simulation_config, components, plugin_manager)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef setup_simulation_from_model_specification(model_specification_file: str) -> InteractiveContext:\n simulation = initialize_simulation_from_model_specification(model_specification_file)\n simulation.setup()\n\n return simulation", "response": "Construct a simulation from a model specification file and call its setup method."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef setup(self):\n super().setup()\n self._start_time = self.clock.time\n self.initialize_simulants()", "response": "Setup the simulation and initialize its population."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef initialize_simulants(self):\n super().initialize_simulants()\n self._initial_population = self.population.get_population(True)", "response": "Initialize this simulation s population. Should not be called\n directly."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef reset(self):\n warnings.warn(\"This reset method is very crude. It should work for \"\n \"many simple simulations, but we make no guarantees. In \"\n \"particular, if you have components that manage their \"\n \"own state in any way, this might not work.\")\n self.population._population = self._initial_population\n self.clock._time = self._start_time", "response": "Reset the simulation to its initial state."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef run(self, with_logging: bool=True) -> int:\n return self.run_until(self.clock.stop_time, with_logging=with_logging)", "response": "Runs the simulation for the time duration specified in the\n configuration."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nrunning the simulation for the given time duration.", "response": "def run_for(self, duration: Timedelta, with_logging: bool=True) -> int:\n \"\"\"Run the simulation for the given time duration.\n\n\n Parameters\n ----------\n duration\n The length of time to run the simulation for. Should be the same\n type as the simulation clock's step size (usually a pandas\n Timedelta).\n with_logging\n Whether or not to log the simulation steps. Only works in an ipython\n environment.\n\n Returns\n -------\n The number of steps the simulation took.\n \"\"\"\n return self.run_until(self.clock.time + duration, with_logging=with_logging)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nrun the simulation until the end time.", "response": "def run_until(self, end_time: Time, with_logging=True) -> int:\n \"\"\"Run the simulation until the provided end time.\n\n Parameters\n ----------\n end_time\n The time to run the simulation until. The simulation will run until\n its clock is greater than or equal to the provided end time.\n with_logging\n Whether or not to log the simulation steps. Only works in an ipython\n environment.\n\n Returns\n -------\n The number of steps the simulation took.\n \"\"\"\n if not isinstance(end_time, type(self.clock.time)):\n raise ValueError(f\"Provided time must be an instance of {type(self.clock.time)}\")\n\n iterations = int(ceil((end_time - self.clock.time)/self.clock.step_size))\n self.take_steps(number_of_steps=iterations, with_logging=with_logging)\n assert self.clock.time - self.clock.step_size < end_time <= self.clock.time\n return iterations"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nadvance the simulation one step.", "response": "def step(self, step_size: Timedelta=None):\n \"\"\"Advance the simulation one step.\n\n Parameters\n ----------\n step_size\n An optional size of step to take. Must be the same type as the\n simulation clock's step size (usually a pandas.Timedelta).\n \"\"\"\n old_step_size = self.clock.step_size\n if step_size is not None:\n if not isinstance(step_size, type(self.clock.step_size)):\n raise ValueError(f\"Provided time must be an instance of {type(self.clock.step_size)}\")\n self.clock._step_size = step_size\n super().step()\n self.clock._step_size = old_step_size"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef take_steps(self, number_of_steps: int=1, step_size: Timedelta=None, with_logging: bool=True):\n if not isinstance(number_of_steps, int):\n raise ValueError('Number of steps must be an integer.')\n\n if run_from_ipython() and with_logging:\n for _ in log_progress(range(number_of_steps), name='Step'):\n self.step(step_size)\n else:\n for _ in range(number_of_steps):\n self.step(step_size)", "response": "Run the simulation for the given number of steps."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_population(self, untracked: bool=False) -> pd.DataFrame:\n return self.population.get_population(untracked)", "response": "Get a copy of the population state table."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_listeners(self, event_type: str) -> List[Callable]:\n if event_type not in self.events:\n raise ValueError(f'No event {event_type} in system.')\n return self.events.get_listeners(event_type)", "response": "Get all listeners of a particular type of event."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_emitter(self, event_type: str) -> Callable:\n if event_type not in self.events:\n raise ValueError(f'No event {event_type} in system.')\n return self.events.get_emitter(event_type)", "response": "Get the callable that emits the given type of events."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_components(self) -> List:\n return [component for component in self.component_manager._components + self.component_manager._managers]", "response": "Get a list of all components in the simulation."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nconstructing a LookupTable from input data.", "response": "def build_table(self, data, key_columns=('sex',), parameter_columns=(['age', 'age_group_start', 'age_group_end'],\n ['year', 'year_start', 'year_end']),\n value_columns=None) -> LookupTable:\n \"\"\"Construct a LookupTable from input data.\n\n If data is a ``pandas.DataFrame``, an interpolation function of the specified\n order will be calculated for each permutation of the set of key_columns.\n The columns in parameter_columns will be used as parameters for the interpolation\n functions which will estimate all remaining columns in the table.\n\n If data is a number, time, list, or tuple, a scalar table will be constructed with\n the values in data as the values in each column of the table, named according to\n value_columns.\n\n\n Parameters\n ----------\n data : The source data which will be used to build the resulting LookupTable.\n key_columns : [str]\n Columns used to select between interpolation functions. These\n should be the non-continuous variables in the data. For\n example 'sex' in data about a population.\n parameter_columns : [str]\n The columns which contain the parameters to the interpolation functions.\n These should be the continuous variables. For example 'age'\n in data about a population.\n value_columns : [str]\n The data columns that will be in the resulting LookupTable. Columns to be\n interpolated over if interpolation or the names of the columns in the scalar\n table.\n Returns\n -------\n LookupTable\n \"\"\"\n return self._lookup_table_manager.build_table(data, key_columns, parameter_columns, value_columns)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncheck that the data for any parameters in the sequence is complete.", "response": "def check_data_complete(data, parameter_columns):\n \"\"\" For any parameters specified with edges, make sure edges\n don't overlap and don't have any gaps. Assumes that edges are\n specified with ends and starts overlapping (but one exclusive and\n the other inclusive) so can check that end of previous == start\n of current.\n\n If multiple parameters, make sure all combinations of parameters\n are present in data.\"\"\"\n\n param_edges = [p[1:] for p in parameter_columns if isinstance(p, (Tuple, List))] # strip out call column name\n\n # check no overlaps/gaps\n for p in param_edges:\n other_params = [p_ed[0] for p_ed in param_edges if p_ed != p]\n if other_params:\n sub_tables = data.groupby(list(other_params))\n else:\n sub_tables = {None: data}.items()\n\n n_p_total = len(set(data[p[0]]))\n\n for _, table in sub_tables:\n\n param_data = table[[p[0], p[1]]].copy().sort_values(by=p[0])\n start, end = param_data[p[0]].reset_index(drop=True), param_data[p[1]].reset_index(drop=True)\n\n if len(set(start)) < n_p_total:\n raise ValueError(f'You must provide a value for every combination of {parameter_columns}.')\n\n if len(start) <= 1:\n continue\n for i in range(1, len(start)):\n e = end[i-1]\n s = start[i]\n if e > s or s == start[i-1]:\n raise ValueError(f'Parameter data must not contain overlaps. Parameter {p} '\n f'contains overlapping data.')\n if e < s:\n raise NotImplementedError(f'Interpolation only supported for parameter columns '\n f'with continuous bins. Parameter {p} contains '\n f'non-continuous bins.')"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef add_sub_directory(self, key, path):\n sub_dir_path = os.path.join(self.results_root, path)\n os.makedirs(sub_dir_path, exist_ok=True)\n self._directories[key] = sub_dir_path\n return sub_dir_path", "response": "Adds a sub - directory to the results directory."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef write_output(self, data, file_name, key=None):\n path = os.path.join(self._directories[key], file_name)\n extension = file_name.split('.')[-1]\n\n if extension == 'yaml':\n with open(path, 'w') as f:\n yaml.dump(data, f)\n elif extension == 'hdf':\n # to_hdf breaks with categorical dtypes.\n categorical_columns = data.dtypes[data.dtypes == 'category'].index\n data.loc[:, categorical_columns] = data.loc[:, categorical_columns].astype('object')\n # Writing to an hdf over and over balloons the file size so write to new file and move it over to avoid\n data.to_hdf(path + \"update\", 'data')\n if os.path.exists(path):\n os.remove(path)\n os.rename(path + \"update\", path)\n else:\n raise NotImplementedError(\n f\"Only 'yaml' and 'hdf' file types are supported. You requested {extension}\")", "response": "Writes output data to disk."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef copy_file(self, src_path, file_name, key=None):\n path = os.path.join(self._directories[key], file_name)\n shutil.copyfile(src_path, path)", "response": "Copies a file unmodified to a location inside the ouput directory."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nload combined SAR format logfile in ASCII format.", "response": "def load_file(self):\n '''\n Loads combined SAR format logfile in ASCII format.\n :return: ``True`` if loading and parsing of file went fine, \\\n ``False`` if it failed (at any point)\n '''\n daychunks = self.__split_file()\n\n if (daychunks):\n\n maxcount = len(self.__splitpointers)\n for i in range(maxcount):\n start = self.__splitpointers[i]\n end = None\n if (i < (maxcount - 1)):\n end = self.__splitpointers[i + 1]\n\n chunk = self.__get_chunk(start, end)\n\n parser = sarparse.Parser()\n cpu_usage, mem_usage, swp_usage, io_usage = \\\n parser._parse_file(parser._split_file(chunk))\n\n self.__sarinfos[self.__get_part_date(chunk)] = {\n \"cpu\": cpu_usage,\n \"mem\": mem_usage,\n \"swap\": swp_usage,\n \"io\": io_usage\n }\n del(cpu_usage)\n del(mem_usage)\n del(swp_usage)\n del(io_usage)\n del(parser)\n\n return(True)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngets the next available chunk from the sar combo file.", "response": "def __get_chunk(self, start=0, end=None):\n '''\n Gets chunk from the sar combo file, from start to end\n :param start: where to start a pulled chunk\n :type start: int.\n :param end: where to end a pulled chunk\n :type end: int.\n :return: str.\n '''\n piece = False\n\n if (self.__filename and os.access(self.__filename, os.R_OK)):\n\n fhandle = None\n\n try:\n fhandle = os.open(self.__filename, os.O_RDONLY)\n except OSError:\n print((\"Couldn't open file %s\" % (self.__filename)))\n fhandle = None\n\n if (fhandle):\n\n try:\n sarmap = mmap.mmap(fhandle, length=0, prot=mmap.PROT_READ)\n except (TypeError, IndexError):\n os.close(fhandle)\n traceback.print_exc()\n #sys.exit(-1)\n return False\n\n if (not end):\n end = sarmap.size()\n\n try:\n sarmap.seek(start)\n piece = sarmap.read(end - start)\n except:\n traceback.print_exc()\n\n os.close(fhandle)\n\n return(piece)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nsplitting combined SAR output file into separate sections.", "response": "def __split_file(self):\n '''\n Splits combined SAR output file (in ASCII format) in order to\n extract info we need for it, in the format we want.\n :return: ``List``-style of SAR file sections separated by\n the type of info they contain (SAR file sections) without\n parsing what is exactly what at this point\n '''\n # Filename passed checks through __init__\n if (self.__filename and os.access(self.__filename, os.R_OK)):\n\n fhandle = None\n\n try:\n fhandle = os.open(self.__filename, os.O_RDONLY)\n except OSError:\n print((\"Couldn't open file %s\" % (self.__filename)))\n fhandle = None\n\n if (fhandle):\n\n try:\n sarmap = mmap.mmap(fhandle, length=0, prot=mmap.PROT_READ)\n except (TypeError, IndexError):\n os.close(fhandle)\n traceback.print_exc()\n #sys.exit(-1)\n return False\n\n sfpos = sarmap.find(PATTERN_MULTISPLIT, 0)\n\n while (sfpos > -1):\n\n '''Split by day found'''\n self.__splitpointers.append(sfpos)\n\n # Iterate for new position\n try:\n sfpos = sarmap.find(PATTERN_MULTISPLIT, (sfpos + 1))\n except ValueError:\n print(\"ValueError on mmap.find()\")\n return True\n\n if (self.__splitpointers):\n # Not sure if this will work - if empty set\n # goes back as True here\n return True\n\n return False"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the date of the combo part from the file", "response": "def __get_part_date(self, part=''):\n '''\n Retrieves date of the combo part from the file\n :param part: Part of the combo file (parsed out whole SAR file\n from the combo\n :type part: str.\n :return: string containing date in ISO format (YYY-MM-DD)\n '''\n if (type(part) is not StringType):\n # We can cope with strings only\n return False\n\n firstline = part.split(\"\\n\")[0]\n\n info = firstline.split()\n datevalue = ''\n\n try:\n datevalue = info[3]\n\n except KeyError:\n datevalue = False\n\n except:\n traceback.print_exc()\n datevalue = False\n\n return(datevalue)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreading the data. json file from CKAN and returns a dict of the data. json.", "response": "def read_ckan_catalog(portal_url):\n \"\"\"Convierte los metadatos de un portal disponibilizados por la Action API\n v3 de CKAN al est\u00e1ndar data.json.\n\n Args:\n portal_url (str): URL de un portal de datos CKAN que soporte la API v3.\n\n Returns:\n dict: Representaci\u00f3n interna de un cat\u00e1logo para uso en las funciones\n de esta librer\u00eda.\n \"\"\"\n portal = RemoteCKAN(portal_url)\n try:\n status = portal.call_action(\n 'status_show', requests_kwargs={\"verify\": False})\n packages_list = portal.call_action(\n 'package_list', requests_kwargs={\"verify\": False})\n groups_list = portal.call_action(\n 'group_list', requests_kwargs={\"verify\": False})\n\n # itera leyendo todos los datasets del portal\n packages = []\n num_packages = len(packages_list)\n for index, pkg in enumerate(packages_list):\n # progreso (necesario cuando son muchos)\n msg = \"Leyendo dataset {} de {}\".format(index + 1, num_packages)\n logger.info(msg)\n\n # agrega un nuevo dataset a la lista\n packages.append(portal.call_action(\n 'package_show', {'id': pkg},\n requests_kwargs={\"verify\": False}\n ))\n\n # tiempo de espera padra evitar baneos\n time.sleep(0.2)\n\n # itera leyendo todos los temas del portal\n groups = [portal.call_action(\n 'group_show', {'id': grp},\n requests_kwargs={\"verify\": False})\n for grp in groups_list]\n\n catalog = map_status_to_catalog(status)\n catalog[\"dataset\"] = map_packages_to_datasets(\n packages, portal_url)\n catalog[\"themeTaxonomy\"] = map_groups_to_themes(groups)\n\n except (CKANAPIError, RequestException) as e:\n logger.exception(\n 'Error al procesar el portal %s', portal_url, exc_info=True)\n raise NonParseableCatalog(portal_url, e)\n\n return catalog"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef map_status_to_catalog(status):\n catalog = dict()\n\n catalog_mapping = {\n \"site_title\": \"title\",\n \"site_description\": \"description\"\n }\n\n for status_key, catalog_key in iteritems(catalog_mapping):\n try:\n catalog[catalog_key] = status[status_key]\n except BaseException:\n logger.exception(\"\"\"\n La clave '%s' no est\u00e1 en el endpoint de status. No se puede\n completar catalog['%s'].\"\"\", status_key, catalog_key)\n\n publisher_mapping = {\n \"site_title\": \"name\",\n \"error_emails_to\": \"mbox\"\n }\n\n if any([k in status for k in publisher_mapping.keys()]):\n catalog[\"publisher\"] = dict()\n for status_key, publisher_key in iteritems(publisher_mapping):\n try:\n catalog['publisher'][publisher_key] = status[status_key]\n except BaseException:\n logger.exception(\"\"\"\n La clave '%s' no est\u00e1 en el endpoint de status. No se puede\n completar catalog['publisher'['%s'].\"\"\",\n status_key, publisher_key)\n else:\n logger.info(\"\"\"\n No hay ninguna informaci\u00f3n sobre catalog['publisher'] en el endpoint\n de 'status'.\"\"\")\n\n catalog['superThemeTaxonomy'] = (\n 'http://datos.gob.ar/superThemeTaxonomy.json')\n\n return catalog", "response": "Convierte el resultado de action. status_show() en metadata a nivel de\n cat\u00e1logo."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef map_package_to_dataset(package, portal_url):\n dataset = dict()\n resources = package[\"resources\"]\n groups = package[\"groups\"]\n tags = package[\"tags\"]\n\n dataset_mapping = {\n 'title': 'title',\n 'notes': 'description',\n 'metadata_created': 'issued',\n 'metadata_modified': 'modified',\n 'license_title': 'license',\n 'id': 'identifier',\n 'url': 'landingPage'\n }\n\n for package_key, dataset_key in iteritems(dataset_mapping):\n try:\n dataset[dataset_key] = package[package_key]\n except BaseException:\n logger.exception(\"\"\"\n La clave '%s' no est\u00e1 en el endpoint 'package_show' para el\n package '%s'. No se puede completar dataset['%s'].\"\"\",\n package_key, package['name'], dataset_key)\n\n publisher_mapping = {\n 'author': 'name',\n 'author_email': 'mbox'\n }\n\n if any([k in package for k in publisher_mapping.keys()]):\n dataset[\"publisher\"] = dict()\n for package_key, publisher_key in iteritems(publisher_mapping):\n try:\n dataset['publisher'][publisher_key] = package[package_key]\n except BaseException:\n logger.exception(\"\"\"\n La clave '%s' no est\u00e1 en el endpoint 'package_show' para el\n package '%s'. No se puede completar\n dataset['publisher']['%s'].\"\"\",\n package_key, package['name'], publisher_key)\n\n contact_point_mapping = {\n 'maintainer': 'fn',\n 'maintainer_email': 'hasEmail'\n }\n\n if any([k in package for k in contact_point_mapping.keys()]):\n dataset[\"contactPoint\"] = dict()\n for package_key, contact_key in iteritems(contact_point_mapping):\n try:\n dataset['contactPoint'][contact_key] = package[package_key]\n except BaseException:\n logger.exception(\"\"\"\n La clave '%s' no est\u00e1 en el endpoint 'package_show' para el\n package '%s'. No se puede completar\n dataset['contactPoint']['%s'].\"\"\",\n package_key, package['name'], contact_key)\n\n # Si existen campos extras en la informaci\u00f3n del package, busco las claves\n # \"Frecuencia de actualizaci\u00f3n\" y \"Tem\u00e1tica global\" para completar los\n # campos \"accrualPeriodicity\" y \"superTheme\" del dataset, respectivamente.\n if \"extras\" in package:\n add_accrualPeriodicity(dataset, package)\n add_superTheme(dataset, package)\n add_temporal(dataset, package)\n\n dataset[\"distribution\"] = map_resources_to_distributions(resources,\n portal_url)\n dataset[\"theme\"] = [grp['name'] for grp in groups]\n dataset['keyword'] = [tag['name'] for tag in tags]\n\n return dataset", "response": "Mapea un diccionario con metadatos de CKAN a un diccionario con un package de CKAN a un diccionario con metadatos de CKAN a un diccionario con un dataset de CKAN."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef map_group_to_theme(group):\n theme = dict()\n\n theme_mapping = {\n 'name': 'id',\n 'title': 'label',\n 'description': 'description'\n }\n\n for group_key, theme_key in iteritems(theme_mapping):\n try:\n theme[theme_key] = group[group_key]\n except BaseException:\n logger.exception(\"\"\"\n La clave '%s' no est\u00e1 en la metadata del 'group' '%s'. No\n se puede completar theme['%s'].\"\"\",\n group_key, theme['name'], theme_key)\n\n return theme", "response": "Mapea un diccionario con metadatos de CKAN a un diccionario con metadatos de CKAN a un diccionario con metadatos de CKAN a un diccionario con un CKAN a un diccionario con un CKAN a un diccionario con un CKAN a un diccionario con un CKAN."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef write_tables(tables, path, column_styles=None, cell_styles=None,\n tables_fields=None, tables_names=None):\n \"\"\" Exporta un reporte con varias tablas en CSV o XLSX.\n\n Si la extensi\u00f3n es \".csv\" se crean varias tablas agregando el nombre de la\n tabla al final del \"path\". Si la extensi\u00f3n es \".xlsx\" todas las tablas se\n escriben en el mismo excel.\n\n Args:\n table (dict of (list of dicts)): Conjunto de tablas a ser exportadas\n donde {\n \"table_name\": [{\n \"field_name1\": \"field_value1\",\n \"field_name2\": \"field_value2\",\n \"field_name3\": \"field_value3\"\n }]\n }\n path (str): Path al archivo CSV o XLSX de exportaci\u00f3n.\n \"\"\"\n assert isinstance(path, string_types), \"`path` debe ser un string\"\n assert isinstance(tables, dict), \"`table` es dict de listas de dicts\"\n\n # Deduzco el formato de archivo de `path` y redirijo seg\u00fan corresponda.\n suffix = path.split(\".\")[-1]\n if suffix == \"csv\":\n for table_name, table in tables:\n root_path = \"\".join(path.split(\".\")[:-1])\n table_path = \"{}_{}.csv\".format(root_path, table_name)\n _write_csv_table(table, table_path)\n\n elif suffix == \"xlsx\":\n return _write_xlsx_table(tables, path, column_styles, cell_styles,\n tables_fields=tables_fields,\n tables_names=tables_names)\n\n else:\n raise ValueError(\"\"\"\n{} no es un sufijo reconocido. Pruebe con .csv o.xlsx\"\"\".format(suffix))", "response": "Exporta un reporte de un tablelas en CSV o XLSX."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef write_json(obj, path):\n obj_str = text_type(json.dumps(obj, indent=4, separators=(\",\", \": \"),\n ensure_ascii=False))\n\n helpers.ensure_dir_exists(os.path.dirname(path))\n\n with io.open(path, \"w\", encoding='utf-8') as target:\n target.write(obj_str)", "response": "Escribo un objeto a un archivo JSON con codificaci\u00f3n UTF - 8."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef download(url, tries=DEFAULT_TRIES, retry_delay=RETRY_DELAY,\n try_timeout=None, proxies=None, verify=True):\n \"\"\"\n Descarga un archivo a trav\u00e9s del protocolo HTTP, en uno o m\u00e1s intentos.\n\n Args:\n url (str): URL (schema HTTP) del archivo a descargar.\n tries (int): Intentos a realizar (default: 1).\n retry_delay (int o float): Tiempo a esperar, en segundos, entre cada\n intento.\n try_timeout (int o float): Tiempo m\u00e1ximo a esperar por intento.\n proxies (dict): Proxies a utilizar. El diccionario debe contener los\n valores 'http' y 'https', cada uno asociados a la URL del proxy\n correspondiente.\n\n Returns:\n bytes: Contenido del archivo\n \"\"\"\n for i in range(tries):\n try:\n return requests.get(url, timeout=try_timeout, proxies=proxies,\n verify=verify).content\n except Exception as e:\n download_exception = e\n\n if i < tries - 1:\n time.sleep(retry_delay)\n\n raise download_exception", "response": "Download a file from a URL."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ndownload a file from url to file_path.", "response": "def download_to_file(url, file_path, **kwargs):\n \"\"\"\n Descarga un archivo a trav\u00e9s del protocolo HTTP, en uno o m\u00e1s intentos, y\n escribe el contenido descargado el el path especificado.\n\n Args:\n url (str): URL (schema HTTP) del archivo a descargar.\n file_path (str): Path del archivo a escribir. Si un archivo ya existe\n en el path especificado, se sobrescribir\u00e1 con nuevos contenidos.\n kwargs: Par\u00e1metros para download().\n \"\"\"\n content = download(url, **kwargs)\n with open(file_path, \"wb\") as f:\n f.write(content)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef replace_combiner(value, mutator, *args, **kwargs):\n args = list(args) + [value]\n return mutator(*args, **kwargs)", "response": "Replaces the output of the source or mutator with the output of the subsequent mutator."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef set_combiner(value, mutator, *args, **kwargs):\n value.add(mutator(*args, **kwargs))\n return value", "response": "Adds a mutator to the output of the source."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef list_combiner(value, mutator, *args, **kwargs):\n value.append(mutator(*args, **kwargs))\n return value", "response": "Takes a list and a mutator function and appends it to the output of the source."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nregister a callable as the producer of a named value.", "response": "def register_value_producer(self, value_name: str, source: Callable[..., pd.DataFrame]=None,\n preferred_combiner: Callable=replace_combiner,\n preferred_post_processor: Callable[..., pd.DataFrame]=None) -> Pipeline:\n \"\"\"Marks a ``Callable`` as the producer of a named value.\n\n Parameters\n ----------\n value_name :\n The name of the new dynamic value pipeline.\n source :\n A callable source for the dynamic value pipeline.\n preferred_combiner :\n A strategy for combining the source and the results of any calls to mutators in the pipeline.\n ``vivarium`` provides the strategies ``replace_combiner`` (the default), ``list_combiner``,\n and ``set_combiner`` which are importable from ``vivarium.framework.values``. Client code\n may define additional strategies as necessary.\n preferred_post_processor :\n A strategy for processing the final output of the pipeline. ``vivarium`` provides the strategies\n ``rescale_post_processor`` and ``joint_value_post_processor`` which are importable from\n ``vivarium.framework.values``. Client code may define additional strategies as necessary.\n\n Returns\n -------\n Callable\n A callable reference to the named dynamic value pipeline.\n \"\"\"\n return self._value_manager.register_value_producer(value_name, source,\n preferred_combiner,\n preferred_post_processor)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nmark a callable as the producer of a named rate.", "response": "def register_rate_producer(self, rate_name: str, source: Callable[..., pd.DataFrame]=None) -> Pipeline:\n \"\"\"Marks a ``Callable`` as the producer of a named rate.\n\n This is a convenience wrapper around ``register_value_producer`` that makes sure\n rate data is appropriately scaled to the size of the simulation time step.\n It is equivalent to ``register_value_producer(value_name, source,\n preferred_combiner=replace_combiner, preferred_post_processor=rescale_post_processor)``\n\n Parameters\n ----------\n rate_name :\n The name of the new dynamic rate pipeline.\n source :\n A callable source for the dynamic rate pipeline.\n\n Returns\n -------\n Callable\n A callable reference to the named dynamic rate pipeline.\n \"\"\"\n return self._value_manager.register_rate_producer(rate_name, source)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nregisters a callable as a modifier of a named value in the dynamic value pipeline.", "response": "def register_value_modifier(self, value_name: str, modifier: Callable, priority: int=5):\n \"\"\"Marks a ``Callable`` as the modifier of a named value.\n\n Parameters\n ----------\n value_name :\n The name of the dynamic value pipeline to be modified.\n modifier :\n A function that modifies the source of the dynamic value pipeline when called.\n If the pipeline has a ``replace_combiner``, the modifier should accept the same\n arguments as the pipeline source with an additional last positional argument\n for the results of the previous stage in the pipeline. For the ``list_combiner`` and\n ``set_combiner`` strategies, the pipeline modifiers should have the same signature\n as the pipeline source.\n priority : {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}\n An indication of the order in which pipeline modifiers should be called. Modifiers with\n smaller priority values will be called earlier in the pipeline. Modifiers with\n the same priority have no guaranteed ordering, and so should be commutative.\n \"\"\"\n self._value_manager.register_value_modifier(value_name, modifier, priority)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef run(model_specification, results_directory, verbose, log, with_debugger):\n log_level = logging.DEBUG if verbose else logging.ERROR\n logging.basicConfig(filename=log, level=log_level)\n\n try:\n run_simulation(model_specification, results_directory)\n except (BdbQuit, KeyboardInterrupt):\n raise\n except Exception as e:\n if with_debugger:\n import pdb\n import traceback\n traceback.print_exc()\n pdb.post_mortem()\n else:\n logging.exception(\"Uncaught exception {}\".format(e))\n raise", "response": "Runs a simulation from the command line."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nruns a simulation based on the MODEL_SPECIFICATION and profile the run.", "response": "def profile(model_specification, results_directory, process):\n \"\"\"Run a simulation based on the provided MODEL_SPECIFICATION and profile\n the run.\n \"\"\"\n model_specification = Path(model_specification)\n results_directory = Path(results_directory)\n\n out_stats_file = results_directory / f'{model_specification.name}'.replace('yaml', 'stats')\n command = f'run_simulation(\"{model_specification}\", \"{results_directory}\")'\n cProfile.runctx(command, globals=globals(), locals=locals(), filename=out_stats_file)\n\n if process:\n out_txt_file = results_directory / (out_stats_file.name + '.txt')\n with open(out_txt_file, 'w') as f:\n p = pstats.Stats(str(out_stats_file), stream=f)\n p.sort_stats('cumulative')\n p.print_stats()"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncreating a new event which is a copy of this one but with a new index.", "response": "def split(self, new_index):\n \"\"\"Create a new event which is a copy of this one but with a new index.\n \"\"\"\n new_event = Event(new_index, self.user_data)\n new_event.time = self.time\n new_event.step_size = self.step_size\n return new_event"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef emit(self, event):\n if hasattr(event, 'time'):\n event.step_size = self.manager.step_size()\n event.time = self.manager.clock() + self.manager.step_size()\n\n for priority_bucket in self.listeners:\n for listener in sorted(priority_bucket, key=lambda x: x.__name__):\n listener(event)\n return event", "response": "Notifies all listeners that an event has occurred."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nperforms this components simulation setup.", "response": "def setup(self, builder):\n \"\"\"Performs this components simulation setup.\n\n Parameters\n ----------\n builder : vivarium.framework.engine.Builder\n Object giving access to core framework functionality.\n \"\"\"\n self.clock = builder.time.clock()\n self.step_size = builder.time.step_size()"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nregisters a new listener to the named event.", "response": "def register_listener(self, name, listener, priority=5):\n \"\"\"Registers a new listener to the named event.\n\n Parameters\n ----------\n name : str\n The name of the event.\n listener : Callable\n The consumer of the named event.\n priority : int\n Number in range(10) used to assign the ordering in which listeners process the event.\n \"\"\"\n self._event_types[name].listeners[priority].append(listener)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting and returns an emitter for a named event.", "response": "def get_emitter(self, name: str) -> Callable[[Event], Event]:\n \"\"\"Gets and emitter for a named event.\n\n Parameters\n ----------\n name :\n The name of the event he requested emitter will emit.\n Users may provide their own named events by requesting an emitter with this function,\n but should do so with caution as it makes time much more difficult to think about.\n\n Returns\n -------\n An emitter for the named event. The emitter should be called by the requesting component\n at the appropriate point in the simulation lifecycle.\n \"\"\"\n return self._event_manager.get_emitter(name)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef register_listener(self, name: str, listener: Callable[[Event], None], priority: int=5) -> None:\n self._event_manager.register_listener(name, listener, priority)", "response": "Registers a callable as a listener to a events with the given name."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef setup(self, builder):\n super().setup(builder)\n self.clock = builder.time.clock()\n\n self.excess_mortality_rate = builder.value.register_rate_producer(\n f'{self.state_id}.excess_mortality_rate',\n source=self.risk_deleted_excess_mortality_rate\n )\n self.excess_mortality_rate_paf = builder.value.register_value_producer(\n f'{self.state_id}.excess_mortality_rate.population_attributable_fraction',\n source=lambda index: [pd.Series(0, index=index)],\n preferred_combiner=list_combiner,\n preferred_post_processor=joint_value_post_processor\n )\n\n builder.value.register_value_modifier('mortality_rate', self.add_in_excess_mortality)\n self.population_view = builder.population.get_view(\n [self._model], query=f\"alive == 'alive' and {self._model} == '{self.state_id}'\")", "response": "Performs this component s simulation setup."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get(self, index: pd.Index, query: str='', omit_missing_columns: bool=False) -> pd.DataFrame:\n\n pop = self.manager.get_population(True).loc[index]\n\n if self._query:\n pop = pop.query(self._query)\n if query:\n pop = pop.query(query)\n\n if not self._columns:\n return pop\n else:\n if omit_missing_columns:\n columns = list(set(self._columns).intersection(pop.columns))\n else:\n columns = self._columns\n try:\n return pop[columns].copy()\n except KeyError:\n non_existent_columns = set(columns) - set(pop.columns)\n raise PopulationError(f'Requested column(s) {non_existent_columns} not in population table.')", "response": "For the rows in index get the columns from the simulation s population which this view is configured."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nupdating the simulation s state to match the given pop.", "response": "def update(self, pop: Union[pd.DataFrame, pd.Series]):\n \"\"\"Update the simulation's state to match ``pop``\n\n Parameters\n ----------\n pop :\n The data which should be copied into the simulation's state. If ``pop`` is a DataFrame only those columns\n included in the view's columns will be used. If ``pop`` is a Series it must have a name that matches\n one of the view's columns unless the view only has one column in which case the Series will be assumed to\n refer to that regardless of its name.\n \"\"\"\n\n if not pop.empty:\n if isinstance(pop, pd.Series):\n if pop.name in self._columns:\n affected_columns = [pop.name]\n elif len(self._columns) == 1:\n affected_columns = self._columns\n else:\n raise PopulationError('Cannot update with a Series unless the series name equals a column '\n 'name or there is only a single column in the view')\n else:\n affected_columns = set(pop.columns)\n\n affected_columns = set(affected_columns).intersection(self._columns)\n state_table = self.manager.get_population(True)\n if not self.manager.growing:\n affected_columns = set(affected_columns).intersection(state_table.columns)\n\n for c in affected_columns:\n if c in state_table:\n v = state_table[c].values\n if isinstance(pop, pd.Series):\n v2 = pop.values\n else:\n v2 = pop[c].values\n v[pop.index] = v2\n\n if v.dtype != v2.dtype:\n # This happens when the population is being grown because extending\n # the index forces columns that don't have a natural null type\n # to become 'object'\n if not self.manager.growing:\n raise PopulationError('Component corrupting population table. '\n 'Old column type: {} New column type: {}'.format(v.dtype, v2.dtype))\n v = v.astype(v2.dtype)\n else:\n if isinstance(pop, pd.Series):\n v = pop.values\n else:\n v = pop[c].values\n self.manager._population[c] = v"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns a configured PopulationView for the given columns.", "response": "def get_view(self, columns: Sequence[str], query: str=None) -> PopulationView:\n \"\"\"Return a configured PopulationView\n\n Notes\n -----\n Client code should only need this (and only through the version exposed as\n ``population_view`` on the builder during setup) if it uses dynamically\n generated column names that aren't known at definition time. Otherwise\n components should use ``uses_columns``.\n \"\"\"\n if 'tracked' not in columns:\n query_with_track = query + 'and tracked == True' if query else 'tracked == True'\n return PopulationView(self, columns, query_with_track)\n return PopulationView(self, columns, query)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets a time - varying view of the state table.", "response": "def get_view(self, columns: Sequence[str], query: str = None) -> PopulationView:\n \"\"\"Get a time-varying view of the population state table.\n\n The requested population view can be used to view the current state or to update the state\n with new values.\n\n Parameters\n ----------\n columns :\n A subset of the state table columns that will be available in the returned view.\n query :\n A filter on the population state. This filters out particular rows (simulants) based\n on their current state. The query should be provided in a way that is understood by\n the ``pandas.DataFrame.query`` method and may reference state table columns not\n requested in the ``columns`` argument.\n\n Returns\n -------\n PopulationView\n A filtered view of the requested columns of the population state table.\n \"\"\"\n return self._population_manager.get_view(columns, query)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_simulant_creator(self) -> Callable[[int, Union[Mapping[str, Any], None]], pd.Index]:\n return self._population_manager.get_simulant_creator()", "response": "Returns a function that creates new simulants and returns a reference to the function that creates new simulants."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nmarks a callable as a source of initial state information about new simulants.", "response": "def initializes_simulants(self, initializer: Callable[[SimulantData], None],\n creates_columns: Sequence[str]=(),\n requires_columns: Sequence[str]=()):\n \"\"\"Marks a callable as a source of initial state information for new simulants.\n\n Parameters\n ----------\n initializer :\n A callable that adds or updates initial state information about new simulants.\n creates_columns :\n A list of the state table columns that the given initializer provides the initial state information for.\n requires_columns :\n A list of the state table columns that already need to be present and populated\n in the state table before the provided initializer is called.\n \"\"\"\n self._population_manager.register_simulant_initializer(initializer, creates_columns, requires_columns)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef ensure_dir_exists(directory):\n if directory and not os.path.exists(directory):\n os.makedirs(directory)", "response": "Se asegura de que un directorio exista."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ntesting if a list of dictionaries comprueba que una lista est\u00e9 compuesta \u00fanicamente por diccionarios.", "response": "def is_list_of_matching_dicts(list_of_dicts, expected_keys=None):\n \"\"\"Comprueba que una lista est\u00e9 compuesta \u00fanicamente por diccionarios,\n que comparten exactamente las mismas claves.\n\n Args:\n list_of_dicts (list): Lista de diccionarios a comparar.\n expected_keys (set): Conjunto de las claves que cada diccionario debe\n tener. Si no se incluye, se asume que son las claves del primer\n diccionario de la lista.\n\n Returns:\n bool: True si todos los diccionarios comparten sus claves.\n \"\"\"\n\n if isinstance(list_of_dicts, list) and len(list_of_dicts) == 0:\n return False\n\n is_not_list_msg = \"\"\"\n{} no es una lista. Debe ingresar una lista\"\"\".format(list_of_dicts)\n assert isinstance(list_of_dicts, list), is_not_list_msg\n\n not_all_dicts_msg = \"\"\"\nNo todos los elementos de {} son diccionarios. Ingrese una lista compuesta\nsolo por diccionarios.\"\"\".format(list_of_dicts)\n assert all([isinstance(d, dict) for d in list_of_dicts]), not_all_dicts_msg\n\n # Si no se pasan expected_keys, se las toma del primer diccionario\n expected_keys = expected_keys or set(list_of_dicts[0].keys())\n\n elements = [set(d.keys()) == expected_keys for d in list_of_dicts]\n\n return all(elements)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef parse_repeating_time_interval_to_days(date_str):\n\n intervals = {\n 'Y': 365,\n 'M': 30,\n 'W': 7,\n 'D': 1,\n 'H': 0,\n 'S': 0\n }\n\n if date_str.find('R/P') != 0: # Periodicity mal formada\n return 0\n\n date_str = date_str.strip('R/P')\n days = 0\n index = 0\n for interval in intervals:\n value_end = date_str.find(interval)\n if value_end < 0:\n continue\n try:\n days += int(float(date_str[index:value_end]) * intervals[interval])\n # Valor de accrualPeriodicity inv\u00e1lido, se toma como 0\n except ValueError:\n continue\n index = value_end\n\n # Si el n\u00famero de d\u00edas es menor lo redondeamos a 1\n return max(days, 1)", "response": "Parse a string con un intervalo de tiempo con repetici\u00f3n especificado\n Devuelve 0 en caso de que el intervalo sea inv\u00e1lido."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef parse_repeating_time_interval_to_str(date_str):\n\n with open(os.path.join(ABSOLUTE_SCHEMA_DIR,\n \"accrualPeriodicity.json\"), \"r\") as f:\n freqs_map = {freq[\"id\"]: freq[\"description\"]\n for freq in json.load(f)}\n\n return freqs_map[date_str]", "response": "Parse a date string to a list of intervals."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef find_ws_name(wb, name):\n if isinstance(wb, string_types):\n # FIXME: importar o borrar segun corresponda\n wb = load_workbook(wb, read_only=True, data_only=True)\n\n for sheetname in wb.sheetnames:\n if sheetname.lower() == name.lower():\n return sheetname\n\n raise Exception(\"No existe la hoja {}\".format(name))", "response": "Busca una hoja en un workbook sin importar may\u00fasculas / min\u00fasculas."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef generate_distribution_ids(catalog):\n\n for dataset in catalog.get(\"dataset\", []):\n for distribution_index, distribution in enumerate(\n dataset.get(\"distribution\", [])):\n if \"identifier\" not in distribution:\n distribution[\"identifier\"] = \"{}_{}\".format(\n dataset[\"identifier\"], distribution_index)", "response": "Genera identificadores para los distribuciones que no los tienen."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _get_theme_label(catalog, theme):\n try:\n label = catalog.get_theme(identifier=theme)['label']\n except BaseException:\n try:\n label = catalog.get_theme(label=theme)['label']\n except BaseException:\n raise ce.ThemeNonExistentError(theme)\n\n label = re.sub(r'[^\\w\u00e1-\u00fa\u00c1-\u00da\u00f1\u00d1 .-]+', '',\n label, flags=re.UNICODE)\n return label", "response": "Intenta conseguir el theme por id o por label."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nmaking a backup of the given list of catalogs.", "response": "def make_catalogs_backup(catalogs, local_catalogs_dir=\"\",\n include_metadata=True, include_data=True,\n include_metadata_xlsx=False, use_short_path=False):\n \"\"\"Realiza una copia local de los datos y metadatos de un cat\u00e1logo.\n\n Args:\n catalogs (list or dict): Lista de cat\u00e1logos (elementos que pueden\n ser interpretados por DataJson como cat\u00e1logos) o diccionario\n donde las keys se interpretan como los catalog_identifier:\n {\n \"modernizacion\":\n \"http://infra.datos.gob.ar/catalog/modernizacion/data.json\"\n }\n Cuando es una lista, los ids se toman de catalog_identifer, y\n se ignoran los cat\u00e1logos que no tengan catalog_identifier.\n Cuando se pasa un diccionario, los keys reemplazan a los\n catalog_identifier (estos no se leeen).\n catalog_id (str): Si se especifica, se usa este identificador para el\n backup. Si no se espedifica, se usa catalog[\"identifier\"].\n local_catalogs_dir (str): Directorio local en el cual se va a crear\n la carpeta \"catalog/...\" con todos los cat\u00e1logos.\n include_metadata (bool): Si es verdadero, se generan los archivos\n data.json y catalog.xlsx.\n include_data (bool): Si es verdadero, se descargan todas las\n distribuciones de todos los cat\u00e1logos.\n\n Return:\n None\n \"\"\"\n\n if isinstance(catalogs, list):\n for catalog in catalogs:\n try:\n make_catalog_backup(\n catalog,\n local_catalogs_dir=local_catalogs_dir,\n include_metadata=include_metadata,\n include_metadata_xlsx=include_metadata_xlsx,\n include_data=include_data,\n use_short_path=use_short_path)\n except Exception:\n logger.exception(\"ERROR en {}\".format(catalog))\n\n elif isinstance(catalogs, dict):\n for catalog_id, catalog in catalogs.iteritems():\n try:\n make_catalog_backup(\n catalog, catalog_id,\n local_catalogs_dir=local_catalogs_dir,\n include_metadata=include_metadata,\n include_metadata_xlsx=include_metadata_xlsx,\n include_data=include_data,\n use_short_path=use_short_path)\n except Exception:\n logger.exception(\n \"ERROR en {} ({})\".format(catalog, catalog_id))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncreate a backup of the catalog.", "response": "def make_catalog_backup(catalog, catalog_id=None, local_catalogs_dir=\"\",\n include_metadata=True, include_data=True,\n include_datasets=None,\n include_distribution_formats=['CSV', 'XLS'],\n include_metadata_xlsx=True, use_short_path=False):\n \"\"\"Realiza una copia local de los datos y metadatos de un cat\u00e1logo.\n\n Args:\n catalog (dict or str): Representaci\u00f3n externa/interna de un cat\u00e1logo.\n Una representaci\u00f3n _externa_ es un path local o una URL remota a un\n archivo con la metadata de un cat\u00e1logo, en formato JSON o XLSX. La\n representaci\u00f3n _interna_ de un cat\u00e1logo es un diccionario.\n catalog_id (str): Si se especifica, se usa este identificador para el\n backup. Si no se especifica, se usa catalog[\"identifier\"].\n local_catalogs_dir (str): Directorio local en el cual se va a crear\n la carpeta \"catalog/...\" con todos los cat\u00e1logos.\n include_metadata (bool): Si es verdadero, se generan los archivos\n data.json y catalog.xlsx.\n include_data (bool): Si es verdadero, se descargan todas las\n distribuciones de todos los cat\u00e1logos.\n include_datasets (list): Si se especifica, se descargan \u00fanicamente los\n datasets indicados. Si no, se descargan todos.\n include_distribution_formats (list): Si se especifica, se descargan\n \u00fanicamente las distribuciones de los formatos indicados. Si no, se\n descargan todas.\n use_short_path (bool): No implementado. Si es verdadero, se utiliza una\n jerarqu\u00eda de directorios simplificada. Caso contrario, se replica\n la existente en infra.\n Return:\n None\n \"\"\"\n\n catalog = pydatajson.DataJson(catalog)\n catalog_identifier = catalog_id if catalog_id else catalog[\"identifier\"]\n\n if include_metadata:\n logger.info(\n \"Descargando cat\u00e1logo {}\".format(\n catalog_identifier.ljust(30)))\n\n # cat\u00e1logo en json\n catalog_path = get_catalog_path(catalog_identifier, local_catalogs_dir)\n ensure_dir_exists(os.path.dirname(catalog_path))\n catalog.to_json(catalog_path)\n\n if include_metadata_xlsx:\n # cat\u00e1logo en xlsx\n catalog_path = get_catalog_path(\n catalog_identifier, local_catalogs_dir, fmt=\"xlsx\")\n ensure_dir_exists(os.path.dirname(catalog_path))\n catalog.to_xlsx(catalog_path)\n\n if include_data:\n distributions = catalog.distributions\n distributions_num = len(distributions)\n\n for index, distribution in enumerate(distributions):\n logger.info(\"Descargando distribuci\u00f3n {} de {} ({})\".format(\n index + 1, distributions_num, catalog_identifier))\n\n dataset_id = distribution[\"dataset_identifier\"]\n\n if include_datasets and (dataset_id not in include_datasets):\n pass\n else:\n distribution_id = distribution[\"identifier\"]\n distribution_download_url = distribution[\"downloadURL\"]\n\n # si no se especifica un file name, se toma de la URL\n distribution_file_name = distribution.get(\n \"fileName\",\n distribution_download_url[\n distribution_download_url.rfind(\"/\") + 1:]\n )\n\n # si no espicifica un formato, toma de distribution_file_name\n # asume que formato est\u00e1 al menos en distribution_file_name\n distribution_format = distribution.get(\n \"format\",\n distribution_file_name[\n distribution_file_name.rfind(\".\") + 1:]\n )\n if (include_distribution_formats and\n (distribution_format\n not in include_distribution_formats)):\n pass\n else:\n\n # genera el path local donde descargar el archivo\n file_path = get_distribution_path(\n catalog_identifier, dataset_id, distribution_id,\n distribution_file_name, local_catalogs_dir,\n use_short_path=use_short_path)\n ensure_dir_exists(os.path.dirname(file_path))\n\n # decarga el archivo\n download_to_file(distribution_download_url, file_path)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_distribution_path(catalog_id, dataset_id, distribution_id,\n distribution_file_name, catalogs_dir=CATALOGS_DIR,\n use_short_path=False):\n \"\"\"Genera el path est\u00e1ndar de un cat\u00e1logo en un filesystem.\"\"\"\n if use_short_path:\n distribution_dir = get_distribution_dir(\n catalog_id, dataset_id, distribution_id, catalogs_dir,\n use_short_path=True)\n distribution_file_path = os.path.join(\n distribution_dir, distribution_file_name)\n else:\n distribution_dir = get_distribution_dir(\n catalog_id, dataset_id, distribution_id, catalogs_dir,\n use_short_path=False)\n distribution_file_path = os.path.join(\n distribution_dir, \"download\", distribution_file_name)\n\n return os.path.abspath(distribution_file_path)", "response": "Genera el path est\u00e1ndar un cat\u00e1logo en un filesystem."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_catalog_path(catalog_id, catalogs_dir=CATALOGS_DIR, fmt=\"json\"):\n\n base_path = os.path.join(catalogs_dir, \"catalog\", catalog_id)\n if fmt == \"json\":\n return os.path.join(base_path, \"data.json\")\n elif fmt == \"xlsx\":\n return os.path.join(base_path, \"catalog.xlsx\")\n else:\n raise NotImplementedError(\"El formato {} no est\u00e1 implementado.\".format(\n fmt))", "response": "Genera el path est\u00e1ndar un cat\u00e1logo en un filesystem."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef main(catalogs, include_data=True, use_short_path=True):\n include_data = bool(int(include_data))\n make_catalogs_backup(catalogs.split(\n \",\"), include_data=include_data, use_short_path=use_short_path)", "response": "A function that makes a backup of the given list of catalogs."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngenerates a README file for a given catalog.", "response": "def generate_readme(catalog, export_path=None):\n \"\"\"Genera una descripci\u00f3n textual en formato Markdown sobre los\n metadatos generales de un cat\u00e1logo (t\u00edtulo, editor, fecha de\n publicaci\u00f3n, et cetera), junto con:\n - estado de los metadatos a nivel cat\u00e1logo,\n - estado global de los metadatos,\n - cantidad de datasets federados y no federados,\n - detalles de los datasets no federados\n - cantidad de datasets y distribuciones incluidas\n\n Es utilizada por la rutina diaria de `libreria-catalogos` para generar\n un README con informaci\u00f3n b\u00e1sica sobre los cat\u00e1logos mantenidos.\n\n Args:\n catalog (str o dict): Path a un cat\u00e1logo en cualquier formato,\n JSON, XLSX, o diccionario de python.\n export_path (str): Path donde exportar el texto generado (en\n formato Markdown). Si se especifica, el m\u00e9todo no devolver\u00e1\n nada.\n\n Returns:\n str: Texto de la descripci\u00f3n generada.\n \"\"\"\n # Si se paso una ruta, guardarla\n if isinstance(catalog, string_types):\n catalog_path_or_url = catalog\n else:\n catalog_path_or_url = None\n\n catalog = read_catalog(catalog)\n validation = validate_catalog(catalog)\n # Solo necesito indicadores para un catalogo\n indicators = generate_catalogs_indicators(\n catalog, CENTRAL_CATALOG)[0][0]\n\n with io.open(os.path.join(TEMPLATES_PATH, 'catalog_readme.txt'), 'r',\n encoding='utf-8') as template_file:\n readme_template = template_file.read()\n\n not_federated_datasets_list = \"\\n\".join([\n \"- [{}]({})\".format(dataset[0], dataset[1])\n for dataset in indicators[\"datasets_no_federados\"]\n ])\n federated_removed_datasets_list = \"\\n\".join([\n \"- [{}]({})\".format(dataset[0], dataset[1])\n for dataset in indicators[\"datasets_federados_eliminados\"]\n ])\n federated_datasets_list = \"\\n\".join([\n \"- [{}]({})\".format(dataset[0], dataset[1])\n for dataset in indicators[\"datasets_federados\"]\n ])\n non_federated_pct = 1.0 - indicators[\"datasets_federados_pct\"] if \\\n indicators[\"datasets_federados_pct\"] is not None else \\\n indicators[\"datasets_federados_pct\"]\n content = {\n \"title\": catalog.get(\"title\"),\n \"publisher_name\": traverse_dict(\n catalog, [\"publisher\", \"name\"]),\n \"publisher_mbox\": traverse_dict(\n catalog, [\"publisher\", \"mbox\"]),\n \"catalog_path_or_url\": catalog_path_or_url,\n \"description\": catalog.get(\"description\"),\n \"global_status\": validation[\"status\"],\n \"catalog_status\": validation[\"error\"][\"catalog\"][\"status\"],\n \"no_of_datasets\": len(catalog[\"dataset\"]),\n \"no_of_distributions\": sum([len(dataset[\"distribution\"]) for\n dataset in catalog[\"dataset\"]]),\n \"federated_datasets\": indicators[\"datasets_federados_cant\"],\n \"not_federated_datasets\": indicators[\"datasets_no_federados_cant\"],\n \"not_federated_datasets_pct\": non_federated_pct,\n \"not_federated_datasets_list\": not_federated_datasets_list,\n \"federated_removed_datasets_list\": federated_removed_datasets_list,\n \"federated_datasets_list\": federated_datasets_list,\n }\n\n catalog_readme = readme_template.format(**content)\n\n if export_path:\n with io.open(export_path, 'w+', encoding='utf-8') as target:\n target.write(catalog_readme)\n else:\n return catalog_readme"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef setup(self, builder: Builder):\n self.config = builder.configuration\n\n self.with_common_random_numbers = bool(self.config.randomness.key_columns)\n self.register = builder.randomness.register_simulants\n if (self.with_common_random_numbers\n and not ['entrance_time', 'age'] == self.config.randomness.key_columns):\n raise ValueError(\"If running with CRN, you must specify ['entrance_time', 'age'] as\"\n \"the randomness key columns.\")\n\n self.age_randomness = builder.randomness.get_stream('age_initialization',\n for_initialization=self.with_common_random_numbers)\n self.sex_randomness = builder.randomness.get_stream('sex_initialization')\n\n columns_created = ['age', 'sex', 'alive', 'entrance_time']\n builder.population.initializes_simulants(self.on_initialize_simulants,\n creates_columns=columns_created)\n\n self.population_view = builder.population.get_view(columns_created)\n\n builder.event.register_listener('time_step', self.age_simulants)", "response": "Performs the simulation setup."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncall by the simulation when new simulants are added. This component is responsible for creating and filling the new simulants in the population state table.", "response": "def on_initialize_simulants(self, pop_data: SimulantData):\n \"\"\"Called by the simulation whenever new simulants are added.\n\n This component is responsible for creating and filling four columns\n in the population state table:\n\n 'age' :\n The age of the simulant in fractional years.\n 'sex' :\n The sex of the simulant. One of {'Male', 'Female'}\n 'alive' :\n Whether or not the simulant is alive. One of {'alive', 'dead'}\n 'entrance_time' :\n The time that the simulant entered the simulation. The 'birthday'\n for simulants that enter as newborns. A `pandas.Timestamp`.\n\n Parameters\n ----------\n pop_data :\n A record containing the index of the new simulants, the\n start of the time step the simulants are added on, the width\n of the time step, and the age boundaries for the simulants to\n generate.\n\n \"\"\"\n\n age_start = self.config.population.age_start\n age_end = self.config.population.age_end\n if age_start == age_end:\n age_window = pop_data.creation_window / pd.Timedelta(days=365)\n else:\n age_window = age_end - age_start\n\n age_draw = self.age_randomness.get_draw(pop_data.index)\n age = age_start + age_draw * age_window\n\n if self.with_common_random_numbers:\n population = pd.DataFrame({'entrance_time': pop_data.creation_time,\n 'age': age.values}, index=pop_data.index)\n self.register(population)\n population['sex'] = self.sex_randomness.choice(pop_data.index, ['Male', 'Female'])\n population['alive'] = 'alive'\n else:\n population = pd.DataFrame(\n {'age': age.values,\n 'sex': self.sex_randomness.choice(pop_data.index, ['Male', 'Female']),\n 'alive': pd.Series('alive', index=pop_data.index),\n 'entrance_time': pop_data.creation_time},\n index=pop_data.index)\n\n self.population_view.update(population)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nupdate the age of the simulant in the simulation view every time step.", "response": "def age_simulants(self, event: Event):\n \"\"\"Updates simulant age on every time step.\n\n Parameters\n ----------\n event :\n An event object emitted by the simulation containing an index\n representing the simulants affected by the event and timing\n information.\n \"\"\"\n population = self.population_view.get(event.index, query=\"alive == 'alive'\")\n population['age'] += event.step_size / pd.Timedelta(days=365)\n self.population_view.update(population)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _generate_indicators(catalog, validator=None, only_numeric=False):\n\n result = {}\n\n # Obtengo summary para los indicadores del estado de los metadatos\n result.update(_generate_status_indicators(catalog, validator=validator))\n\n # Genero los indicadores relacionados con fechas, y los agrego\n result.update(\n _generate_date_indicators(catalog, only_numeric=only_numeric))\n\n # Agrego la cuenta de los formatos de las distribuciones\n if not only_numeric:\n if 'dataset' in catalog:\n format_count = count_fields(get_distributions(catalog), 'format')\n type_count = count_fields(get_distributions(catalog), 'type')\n license_count = count_fields(get_datasets(catalog), 'license')\n else:\n format_count = type_count = license_count = {}\n\n result.update({\n 'distribuciones_formatos_cant': format_count,\n 'distribuciones_tipos_cant': type_count,\n 'datasets_licencias_cant': license_count,\n })\n\n # Agrego porcentaje de campos recomendados/optativos usados\n fields_count = _count_required_and_optional_fields(catalog)\n recomendados_pct = float(fields_count['recomendado']) / \\\n fields_count['total_recomendado']\n optativos_pct = float(fields_count['optativo']) / \\\n fields_count['total_optativo']\n result.update({\n 'campos_recomendados_pct': round(recomendados_pct, 4),\n 'campos_optativos_pct': round(optativos_pct, 4)\n })\n return fields_count, result", "response": "Genera los indicadores de un cat\u00e1logo individual."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _federation_indicators(catalog, central_catalog,\n identifier_search=False):\n \"\"\"Cuenta la cantidad de datasets inclu\u00eddos tanto en la lista\n 'catalogs' como en el cat\u00e1logo central, y genera indicadores a partir\n de esa informaci\u00f3n.\n\n Args:\n catalog (dict): cat\u00e1logo ya parseado\n central_catalog (str o dict): ruta a cat\u00e1logo central, o un dict\n con el cat\u00e1logo ya parseado\n \"\"\"\n result = {\n 'datasets_federados_cant': None,\n 'datasets_federados_pct': None,\n 'datasets_no_federados_cant': None,\n 'datasets_federados_eliminados_cant': None,\n 'distribuciones_federadas_cant': None,\n 'datasets_federados_eliminados': [],\n 'datasets_no_federados': [],\n 'datasets_federados': [],\n\n }\n try:\n central_catalog = readers.read_catalog(central_catalog)\n except Exception as e:\n msg = u'Error leyendo el cat\u00e1logo central: {}'.format(str(e))\n logger.error(msg)\n return result\n\n generator = FederationIndicatorsGenerator(central_catalog, catalog,\n id_based=identifier_search)\n result.update({\n 'datasets_federados_cant':\n generator.datasets_federados_cant(),\n 'datasets_no_federados_cant':\n generator.datasets_no_federados_cant(),\n 'datasets_federados_eliminados_cant':\n generator.datasets_federados_eliminados_cant(),\n 'datasets_federados_eliminados':\n generator.datasets_federados_eliminados(),\n 'datasets_no_federados':\n generator.datasets_no_federados(),\n 'datasets_federados':\n generator.datasets_federados(),\n 'datasets_federados_pct':\n generator.datasets_federados_pct(),\n 'distribuciones_federadas_cant':\n generator.distribuciones_federadas_cant()\n })\n return result", "response": "Generate a list of all the federation indicators for a given catalog and central catalog."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _network_indicator_percentages(fields, network_indicators):\n\n # Los porcentuales no se pueden sumar, tienen que ser recalculados\n percentages = {\n 'datasets_meta_ok_pct':\n (network_indicators.get('datasets_meta_ok_cant'),\n network_indicators.get('datasets_meta_error_cant')),\n 'datasets_actualizados_pct':\n (network_indicators.get('datasets_actualizados_cant'),\n network_indicators.get('datasets_desactualizados_cant')),\n 'datasets_federados_pct':\n (network_indicators.get('datasets_federados_cant'),\n network_indicators.get('datasets_no_federados_cant')),\n 'datasets_con_datos_pct':\n (network_indicators.get('datasets_con_datos_cant'),\n network_indicators.get('datasets_sin_datos_cant')),\n }\n\n for indicator in percentages:\n pct = 0.00\n partial = percentages[indicator][0] or 0\n total = partial + (percentages[indicator][1] or 0)\n # Evita division por 0\n if total:\n pct = float(partial) / total\n network_indicators[indicator] = round(pct, 4)\n\n # % de campos recomendados y optativos utilizados en el cat\u00e1logo entero\n if fields: # 'fields' puede estar vac\u00edo si ning\u00fan campo es v\u00e1lido\n rec_pct = float(fields['recomendado']) / \\\n fields['total_recomendado']\n\n opt_pct = float(fields['optativo']) / fields['total_optativo']\n\n network_indicators.update({\n 'campos_recomendados_pct': round(rec_pct, 4),\n 'campos_optativos_pct': round(opt_pct, 4)\n })", "response": "Encapsula el c\u00e1lculo de indicadores de porcentaje de los cantidad de nodos entera."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngenerates status indicators for a set of items in the base cat\u00e1logo.", "response": "def _generate_status_indicators(catalog, validator=None):\n \"\"\"Genera indicadores b\u00e1sicos sobre el estado de un cat\u00e1logo\n\n Args:\n catalog (dict): diccionario de un data.json parseado\n\n Returns:\n dict: indicadores b\u00e1sicos sobre el cat\u00e1logo, tal como la cantidad\n de datasets, distribuciones y n\u00famero de errores\n \"\"\"\n result = {\n 'datasets_cant': None,\n 'distribuciones_cant': None,\n 'datasets_meta_ok_cant': None,\n 'datasets_meta_error_cant': None,\n 'datasets_meta_ok_pct': None,\n 'datasets_con_datos_cant': None,\n 'datasets_sin_datos_cant': None,\n 'datasets_con_datos_pct': None\n }\n try:\n summary = generate_datasets_summary(catalog, validator=validator)\n except Exception as e:\n msg = u'Error generando resumen del cat\u00e1logo {}: {}'.format(\n catalog['title'], str(e))\n logger.error(msg)\n return result\n\n cant_ok = 0\n cant_error = 0\n cant_data = 0\n cant_without_data = 0\n cant_distribuciones = 0\n datasets_total = len(summary)\n for dataset in summary:\n cant_distribuciones += dataset['cant_distribuciones']\n\n # chequea si el dataset tiene datos\n if dataset['tiene_datos'] == \"SI\":\n cant_data += 1\n else: # == \"ERROR\"\n cant_without_data += 1\n\n # chequea estado de los metadatos\n if dataset['estado_metadatos'] == \"OK\":\n cant_ok += 1\n else: # == \"ERROR\"\n cant_error += 1\n\n datasets_ok_pct = 0\n datasets_with_data_pct = 0\n if datasets_total:\n datasets_ok_pct = round(float(cant_ok) / datasets_total, 4)\n datasets_with_data_pct = round(float(cant_data) / datasets_total, 4)\n\n result.update({\n 'datasets_cant': datasets_total,\n 'distribuciones_cant': cant_distribuciones,\n 'datasets_meta_ok_cant': cant_ok,\n 'datasets_meta_error_cant': cant_error,\n 'datasets_meta_ok_pct': datasets_ok_pct,\n 'datasets_con_datos_cant': cant_data,\n 'datasets_sin_datos_cant': cant_without_data,\n 'datasets_con_datos_pct': datasets_with_data_pct\n\n })\n return result"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _generate_date_indicators(catalog, tolerance=0.2, only_numeric=False):\n result = {\n 'datasets_desactualizados_cant': None,\n 'datasets_actualizados_cant': None,\n 'datasets_actualizados_pct': None,\n 'catalogo_ultima_actualizacion_dias': None\n }\n if not only_numeric:\n result.update({\n 'datasets_frecuencia_cant': {}\n })\n\n try:\n dias_ultima_actualizacion =\\\n _days_from_last_update(catalog, \"modified\")\n if not dias_ultima_actualizacion:\n dias_ultima_actualizacion =\\\n _days_from_last_update(catalog, \"issued\")\n\n result['catalogo_ultima_actualizacion_dias'] = \\\n dias_ultima_actualizacion\n\n except Exception as e:\n msg = u'Error generando indicadores de fecha del cat\u00e1logo {}: {}'\\\n .format(catalog['title'], str(e))\n logger.error(msg)\n return result\n\n actualizados = 0\n desactualizados = 0\n periodicity_amount = {}\n\n for dataset in catalog.get('dataset', []):\n # Parseo la fecha de publicaci\u00f3n, y la frecuencia de actualizaci\u00f3n\n periodicity = dataset.get('accrualPeriodicity')\n if not periodicity:\n continue\n # Si la periodicity es eventual, se considera como actualizado\n if periodicity == 'eventual':\n actualizados += 1\n prev_periodicity = periodicity_amount.get(periodicity, 0)\n periodicity_amount[periodicity] = prev_periodicity + 1\n continue\n\n # dataset sin fecha de \u00faltima actualizaci\u00f3n es desactualizado\n if \"modified\" not in dataset:\n desactualizados += 1\n else:\n # Calculo el per\u00edodo de d\u00edas que puede pasar sin actualizarse\n # Se parsea el per\u00edodo especificado por accrualPeriodicity,\n # cumple con el est\u00e1ndar ISO 8601 para tiempos con repetici\u00f3n\n try:\n date = helpers.parse_date_string(dataset['modified'])\n days_diff = float((datetime.now() - date).days)\n interval = helpers.parse_repeating_time_interval(\n periodicity) * \\\n (1 + tolerance)\n except Exception as e:\n msg = u'Error generando indicadores'\\\n u'de fecha del dataset {} en {}: {}'\n msg.format(dataset['identifier'], catalog['title'], str(e))\n logger.error(msg)\n # Asumo desactualizado\n desactualizados += 1\n continue\n\n if days_diff < interval:\n actualizados += 1\n else:\n desactualizados += 1\n\n prev_periodicity = periodicity_amount.get(periodicity, 0)\n periodicity_amount[periodicity] = prev_periodicity + 1\n\n datasets_total = len(catalog.get('dataset', []))\n actualizados_pct = 0\n if datasets_total:\n actualizados_pct = float(actualizados) / datasets_total\n result.update({\n 'datasets_desactualizados_cant': desactualizados,\n 'datasets_actualizados_cant': actualizados,\n 'datasets_actualizados_pct': round(actualizados_pct, 4)\n })\n if not only_numeric:\n result.update({\n 'datasets_frecuencia_cant': periodicity_amount\n })\n return result", "response": "Genera indicadores relacionados a las fechas de publicaci\u00f3n de un cat\u00e1logo pasado por par\u00e1metro."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn the number of days from last update.", "response": "def _days_from_last_update(catalog, date_field=\"modified\"):\n \"\"\"Calcula d\u00edas desde la \u00faltima actualizaci\u00f3n del cat\u00e1logo.\n\n Args:\n catalog (dict): Un cat\u00e1logo.\n date_field (str): Campo de metadatos a utilizar para considerar\n los d\u00edas desde la \u00faltima actualizaci\u00f3n del cat\u00e1logo.\n\n Returns:\n int or None: Cantidad de d\u00edas desde la \u00faltima actualizaci\u00f3n del\n cat\u00e1logo o None, si no pudo ser calculada.\n \"\"\"\n\n # el \"date_field\" se busca primero a nivel cat\u00e1logo, luego a nivel\n # de cada dataset, y nos quedamos con el que sea m\u00e1s reciente\n date_modified = catalog.get(date_field, None)\n dias_ultima_actualizacion = None\n # \"date_field\" a nivel de cat\u00e1logo puede no ser obligatorio,\n # si no est\u00e1 pasamos\n if isinstance(date_modified, string_types):\n date = helpers.parse_date_string(date_modified)\n dias_ultima_actualizacion = (\n datetime.now() - date).days if date else None\n\n for dataset in catalog.get('dataset', []):\n date = helpers.parse_date_string(dataset.get(date_field, \"\"))\n days_diff = float((datetime.now() - date).days) if date else None\n\n # Actualizo el indicador de d\u00edas de actualizaci\u00f3n si corresponde\n if not dias_ultima_actualizacion or \\\n (days_diff and days_diff < dias_ultima_actualizacion):\n dias_ultima_actualizacion = days_diff\n\n if dias_ultima_actualizacion:\n return int(dias_ultima_actualizacion)\n else:\n return None"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _count_fields_recursive(dataset, fields):\n\n key_count = {\n 'recomendado': 0,\n 'optativo': 0,\n 'requerido': 0,\n 'total_optativo': 0,\n 'total_recomendado': 0,\n 'total_requerido': 0\n }\n\n for k, v in fields.items():\n # Si la clave es un diccionario se implementa recursivamente el\n # mismo algoritmo\n if isinstance(v, dict):\n # dataset[k] puede ser o un dict o una lista, ej 'dataset' es\n # list, 'publisher' no. Si no es lista, lo metemos en una.\n # Si no es ninguno de los dos, dataset[k] es inv\u00e1lido\n # y se pasa un diccionario vac\u00edo para poder comparar\n elements = dataset.get(k)\n if not isinstance(elements, (list, dict)):\n elements = [{}]\n\n if isinstance(elements, dict):\n elements = [dataset[k].copy()]\n for element in elements:\n # Llamada recursiva y suma del resultado al nuestro\n result = _count_fields_recursive(element, v)\n for key in result:\n key_count[key] += result[key]\n # Es un elemento normal (no iterable), se verifica si est\u00e1 en\n # dataset o no. Se suma 1 siempre al total de su tipo\n else:\n # total_requerido, total_recomendado, o total_optativo\n key_count['total_' + v] += 1\n\n if k in dataset:\n key_count[v] += 1\n\n return key_count", "response": "A recursive function that counts the fields in the given dict."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef random(key: str, index: Index, index_map: IndexMap=None) -> pd.Series:\n if len(index) > 0:\n random_state = np.random.RandomState(seed=get_hash(key))\n\n # Generate a random number for every simulant.\n #\n # NOTE: We generate a full set of random numbers for the population\n # even when we may only need a few. This ensures consistency in outcomes\n # across simulations.\n # See Also:\n # 1. https://en.wikipedia.org/wiki/Variance_reduction\n # 2. Untangling Uncertainty with Common Random Numbers: A Simulation Study; A.Flaxman, et. al., Summersim 2017\n sample_size = index_map.map_size if index_map is not None else index.max() + 1\n try:\n draw_index = index_map[index]\n except (IndexError, TypeError):\n draw_index = index\n raw_draws = random_state.random_sample(sample_size)\n return pd.Series(raw_draws[draw_index], index=index)\n\n return pd.Series(index=index)", "response": "Generates a series of uniformly distributed random numbers."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_hash(key: str) -> int:\n # 4294967295 == 2**32 - 1 which is the maximum allowable seed for a `numpy.random.RandomState`.\n return int(hashlib.sha1(key.encode('utf8')).hexdigest(), 16) % 4294967295", "response": "Gets a hash of the provided key."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ngenerating a random number array from a set of choices.", "response": "def choice(key: str, index: Index, choices: Array, p: Array=None, index_map: IndexMap=None) -> pd.Series:\n \"\"\"Decides between a weighted or unweighted set of choices.\n\n Given a a set of choices with or without corresponding weights,\n returns an indexed set of decisions from those choices. This is\n simply a vectorized way to make decisions with some book-keeping.\n\n Parameters\n ----------\n key :\n A string used to create a seed for the random number generation.\n index : `pandas.Index`\n An index whose length is the number of random draws made\n and which indexes the returned `pandas.Series`.\n choices :\n A set of options to choose from.\n p :\n The relative weights of the choices. Can be either a 1-d array of\n the same length as `choices` or a 2-d array with `len(index)` rows\n and `len(choices)` columns. In the 1-d case, the same set of weights\n are used to decide among the choices for every item in the `index`.\n In the 2-d case, each row in `p` contains a separate set of weights\n for every item in the `index`.\n index_map :\n A mapping between the provided index (which may contain ints, floats,\n datetimes or any arbitrary combination of them) and an integer index\n into the random number array.\n\n Returns\n -------\n pd.Series\n An indexed set of decisions from among the available `choices`.\n\n Raises\n ------\n RandomnessError\n If any row in `p` contains `RESIDUAL_CHOICE` and the remaining\n weights in the row are not normalized or any row of `p` contains\n more than one reference to `RESIDUAL_CHOICE`.\n \"\"\"\n # Convert p to normalized probabilities broadcasted over index.\n p = _set_residual_probability(_normalize_shape(p, index)) if p is not None else np.ones((len(index), len(choices)))\n p = p/p.sum(axis=1, keepdims=True)\n\n draw = random(key, index, index_map)\n\n p_bins = np.cumsum(p, axis=1)\n # Use the random draw to make a choice for every row in index.\n choice_index = (draw.values[np.newaxis].T > p_bins).sum(axis=1)\n\n return pd.Series(np.array(choices)[choice_index], index=index)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _set_residual_probability(p: np.ndarray) -> np.ndarray:\n residual_mask = p == RESIDUAL_CHOICE\n if residual_mask.any(): # I.E. if we have any placeholders.\n if np.any(np.sum(residual_mask, axis=1) - 1):\n raise RandomnessError(\n 'More than one residual choice supplied for a single set of weights. Weights: {}.'.format(p))\n\n p[residual_mask] = 0\n residual_p = 1 - np.sum(p, axis=1) # Probabilities sum to 1.\n\n if np.any(residual_p < 0): # We got un-normalized probability weights.\n raise RandomnessError(\n 'Residual choice supplied with weights that summed to more than 1. Weights: {}.'.format(p))\n\n p[residual_mask] = residual_p\n return p", "response": "Turns any use of RESIDUAL_CHOICE into a residual probability."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef filter_for_probability(key: str, population: Union[pd.DataFrame, pd.Series, Index],\n probability: Array, index_map: IndexMap=None) -> Union[pd.DataFrame, pd.Series, Index]:\n \"\"\"Decide an event outcome for each individual in a population from probabilities.\n\n Given a population or its index and an array of associated probabilities\n for some event to happen, we create and return the sub-population for whom\n the event occurred.\n\n Parameters\n ----------\n key :\n A string used to create a seed for the random number generation.\n population :\n A view on the simulants for which we are determining the\n outcome of an event.\n probability :\n A 1d list of probabilities of the event under consideration\n occurring which corresponds (i.e. `len(population) == len(probability)`)\n to the population array passed in.\n index_map :\n A mapping between the provided index (which may contain ints, floats,\n datetimes or any arbitrary combination of them) and an integer index\n into the random number array.\n\n Returns\n -------\n pd.core.generic.PandasObject\n The sub-population of the simulants for whom the event occurred.\n The return type will be the same as type(population)\n \"\"\"\n if population.empty:\n return population\n\n index = population if isinstance(population, pd.Index) else population.index\n draw = random(key, index, index_map)\n mask = np.array(draw < probability)\n return population[mask]", "response": "Given a population or index and a probability array return a sub - population of simulants for whom the event occurred."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef update(self, new_keys: Index):\n if not self._map.index.intersection(new_keys).empty:\n raise KeyError(\"Non-unique keys in index.\")\n\n mapping_update = self.hash_(new_keys)\n if self._map.empty:\n self._map = mapping_update.drop_duplicates()\n else:\n self._map = self._map.append(mapping_update).drop_duplicates()\n\n collisions = mapping_update.index.difference(self._map.index)\n salt = 1\n while not collisions.empty:\n mapping_update = self.hash_(collisions, salt)\n self._map = self._map.append(mapping_update).drop_duplicates()\n collisions = mapping_update.index.difference(self._map.index)\n salt += 1", "response": "Adds the new keys to the mapping."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef hash_(self, keys: Index, salt: int = 0) -> pd.Series:\n key_frame = keys.to_frame()\n new_map = pd.Series(0, index=keys)\n salt = self.convert_to_ten_digit_int(pd.Series(salt, index=keys))\n\n for i, column_name in enumerate(key_frame.columns):\n column = self.convert_to_ten_digit_int(key_frame[column_name])\n\n primes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 27]\n out = pd.Series(1, index=column.index)\n for idx, p in enumerate(primes):\n # numpy will almost always overflow here, but it is equivalent to modding\n # out by 2**64. Since it's much much larger than our map size\n # the amount of additional periodicity this introduces is pretty trivial.\n out *= np.power(p, self.digit(column, idx))\n new_map += out + salt\n\n return new_map % self.map_size", "response": "Hashes the given index into an integer index in the range [ 0 self. stride )."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nconvert a column of datetimes integers or floats into a column of 10 digit integers.", "response": "def convert_to_ten_digit_int(self, column: pd.Series) -> pd.Series:\n \"\"\"Converts a column of datetimes, integers, or floats into a column of 10 digit integers.\n\n Parameters\n ----------\n column :\n A series of datetimes, integers, or floats.\n\n Returns\n -------\n pd.Series\n A series of ten digit integers based on the input data.\n\n Raises\n ------\n RandomnessError :\n If the column contains data that is neither a datetime-like nor numeric.\n \"\"\"\n if isinstance(column.iloc[0], datetime.datetime):\n column = self.clip_to_seconds(column.astype(int))\n elif np.issubdtype(column.iloc[0], np.integer):\n if not len(column >= 0) == len(column):\n raise RandomnessError(\"Values in integer columns must be greater than or equal to zero.\")\n column = self.spread(column)\n elif np.issubdtype(column.iloc[0], np.floating):\n column = self.shift(column)\n else:\n raise RandomnessError(f\"Unhashable column type {type(column.iloc[0])}. \"\n \"IndexMap accepts datetime like columns and numeric columns.\")\n return column"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef digit(m: Union[int, pd.Series], n: int) -> Union[int, pd.Series]:\n return (m // (10 ** n)) % 10", "response": "Returns the nth digit of each number in m."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef clip_to_seconds(m: Union[int, pd.Series]) -> Union[int, pd.Series]:\n return m // pd.Timedelta(1, unit='s').value", "response": "Clips UTC datetime in nanoseconds to seconds."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nspreading out integer values to give smaller values more weight.", "response": "def spread(self, m: Union[int, pd.Series]) -> Union[int, pd.Series]:\n \"\"\"Spreads out integer values to give smaller values more weight.\"\"\"\n return (m * 111_111) % self.TEN_DIGIT_MODULUS"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef shift(self, m: Union[float, pd.Series]) -> Union[int, pd.Series]:\n out = m % 1 * self.TEN_DIGIT_MODULUS // 1\n if isinstance(out, pd.Series):\n return out.astype(int)\n return int(out)", "response": "Shifts floats so that the first 10 decimal digits are significant."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates a copy of this RandomnessStream that combines this RandomnessStream with the additional key.", "response": "def copy_with_additional_key(self, key: Any) -> 'RandomnessStream':\n \"\"\"Creates a copy of this stream that combines this streams key with a new one.\n\n Parameters\n ----------\n key :\n The additional key to describe the new stream with.\n\n Returns\n -------\n RandomnessStream\n A new RandomnessStream with a combined key.\n \"\"\"\n if self._for_initialization:\n raise RandomnessError('Initialization streams cannot be copied.')\n elif self._manager:\n return self._manager.get_randomness_stream('_'.join([self.key, key]))\n else:\n return RandomnessStream(self.key, self.clock, self.seed, self.index_map)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _key(self, additional_key: Any=None) -> str:\n return '_'.join([self.key, str(self.clock()), str(additional_key), str(self.seed)])", "response": "Construct a hashable key from this object s state."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nget an indexed sequence of floats pulled from a uniform distribution over [ 0 1 ).", "response": "def get_draw(self, index: Index, additional_key: Any=None) -> pd.Series:\n \"\"\"Get an indexed sequence of floats pulled from a uniform distribution over [0.0, 1.0)\n\n Parameters\n ----------\n index :\n An index whose length is the number of random draws made\n and which indexes the returned `pandas.Series`.\n additional_key :\n Any additional information used to seed random number generation.\n\n Returns\n -------\n pd.Series\n A series of random numbers indexed by the provided `pandas.Index`.\n \"\"\"\n if self._for_initialization:\n draw = random(self._key(additional_key), pd.Index(range(len(index))), self.index_map)\n draw.index = index\n else:\n draw = random(self._key(additional_key), index, self.index_map)\n\n return draw"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ndeciding an event outcome for each individual in a population and rates for each individual in a population.", "response": "def filter_for_rate(self, population: Union[pd.DataFrame, pd.Series, Index],\n rate: Array, additional_key: Any=None) -> Index:\n \"\"\"Decide an event outcome for each individual in a population from rates.\n\n Given a population or its index and an array of associated rates for\n some event to happen, we create and return the sub-population for whom\n the event occurred.\n\n Parameters\n ----------\n population :\n A view on the simulants for which we are determining the\n outcome of an event.\n rate :\n A 1d list of rates of the event under consideration occurring which\n corresponds (i.e. `len(population) == len(probability))` to the\n population view passed in. The rates must be scaled to the simulation\n time-step size either manually or as a post-processing step in a\n rate pipeline.\n additional_key :\n Any additional information used to create the seed.\n\n Returns\n -------\n Index\n The index of the simulants for whom the event occurred.\n\n See Also\n --------\n framework.values:\n Value/rate pipeline management module.\n \"\"\"\n return self.filter_for_probability(population, rate_to_probability(rate), additional_key)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef filter_for_probability(self, population: Union[pd.DataFrame, pd.Series, Index],\n probability: Array, additional_key: Any=None) -> Index:\n \"\"\"Decide an event outcome for each individual in a population from probabilities.\n\n Given a population or its index and an array of associated probabilities\n for some event to happen, we create and return the sub-population for whom\n the event occurred.\n\n Parameters\n ----------\n population :\n A view on the simulants for which we are determining the\n outcome of an event.\n probability :\n A 1d list of probabilities of the event under consideration\n occurring which corresponds (i.e. `len(population) == len(probability)`\n to the population view passed in.\n additional_key :\n Any additional information used to create the seed.\n\n Returns\n -------\n Index\n The sub-population of the simulants for whom the event occurred.\n The return type will be the same as type(population)\n \"\"\"\n return filter_for_probability(self._key(additional_key), population, probability, self.index_map)", "response": "Decide an event outcome for each individual in a population and a probability array."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn a set of decisions from a set of choices.", "response": "def choice(self, index: Index, choices: Array, p: Array=None, additional_key: Any=None) -> pd.Series:\n \"\"\"Decides between a weighted or unweighted set of choices.\n\n Given a a set of choices with or without corresponding weights,\n returns an indexed set of decisions from those choices. This is\n simply a vectorized way to make decisions with some book-keeping.\n\n Parameters\n ----------\n index :\n An index whose length is the number of random draws made\n and which indexes the returned `pandas.Series`.\n choices :\n A set of options to choose from.\n p :\n The relative weights of the choices. Can be either a 1-d array of\n the same length as `choices` or a 2-d array with `len(index)` rows\n and `len(choices)` columns. In the 1-d case, the same set of weights\n are used to decide among the choices for every item in the `index`.\n In the 2-d case, each row in `p` contains a separate set of weights\n for every item in the `index`.\n additional_key :\n Any additional information used to seed random number generation.\n\n Returns\n -------\n pd.Series\n An indexed set of decisions from among the available `choices`.\n\n Raises\n ------\n RandomnessError\n If any row in `p` contains `RESIDUAL_CHOICE` and the remaining\n weights in the row are not normalized or any row of `p contains\n more than one reference to `RESIDUAL_CHOICE`.\n \"\"\"\n return choice(self._key(additional_key), index, choices, p, self.index_map)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn a new randomness stream for the given decision point.", "response": "def get_randomness_stream(self, decision_point: str, for_initialization: bool=False) -> RandomnessStream:\n \"\"\"Provides a new source of random numbers for the given decision point.\n\n Parameters\n ----------\n decision_point :\n A unique identifier for a stream of random numbers. Typically represents\n a decision that needs to be made each time step like 'moves_left' or\n 'gets_disease'.\n for_initialization :\n A flag indicating whether this stream is used to generate key initialization information\n that will be used to identify simulants in the Common Random Number framework. These streams\n cannot be copied and should only be used to generate the state table columns specified\n in ``builder.configuration.randomness.key_columns``.\n\n Raises\n ------\n RandomnessError :\n If another location in the simulation has already created a randomness stream\n with the same identifier.\n \"\"\"\n if decision_point in self._decision_points:\n raise RandomnessError(f\"Two separate places are attempting to create \"\n f\"the same randomness stream for {decision_point}\")\n stream = RandomnessStream(key=decision_point, clock=self._clock, seed=self._seed,\n index_map=self._key_mapping, manager=self, for_initialization=for_initialization)\n self._decision_points[decision_point] = stream\n return stream"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nadds new simulants to the randomness mapping.", "response": "def register_simulants(self, simulants: pd.DataFrame):\n \"\"\"Adds new simulants to the randomness mapping.\n\n Parameters\n ----------\n simulants :\n A table with state data representing the new simulants. Each simulant should\n pass through this function exactly once.\n\n Raises\n ------\n RandomnessError :\n If the provided table does not contain all key columns specified in the configuration.\n \"\"\"\n if not all(k in simulants.columns for k in self._key_columns):\n raise RandomnessError(\"The simulants dataframe does not have all specified key_columns.\")\n self._key_mapping.update(simulants.set_index(self._key_columns).index)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nprovides a new stream of random numbers for the given decision point.", "response": "def get_stream(self, decision_point: str, for_initialization: bool = False) -> RandomnessStream:\n \"\"\"Provides a new source of random numbers for the given decision point.\n\n ``vivarium`` provides a framework for Common Random Numbers which allows for variance reduction\n when modeling counter-factual scenarios. Users interested in causal analysis and comparisons\n between simulation scenarios should be careful to use randomness streams provided by the framework\n wherever randomness is employed.\n\n Parameters\n ----------\n decision_point :\n A unique identifier for a stream of random numbers. Typically represents\n a decision that needs to be made each time step like 'moves_left' or\n 'gets_disease'.\n for_initialization :\n A flag indicating whether this stream is used to generate key initialization information\n that will be used to identify simulants in the Common Random Number framework. These streams\n cannot be copied and should only be used to generate the state table columns specified\n in ``builder.configuration.randomness.key_columns``.\n\n Returns\n -------\n RandomnessStream\n An entry point into the Common Random Number generation framework. The stream provides\n vectorized access to random numbers and a few other utilities.\n \"\"\"\n return self._randomness_manager.get_randomness_stream(decision_point, for_initialization)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef student_visible(self):\n group_ids = set()\n for group in Group.objects.all():\n if group.properties.student_visible:\n group_ids.add(group.id)\n\n return Group.objects.filter(id__in=group_ids)", "response": "Return a list of groups that are student - visible."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns a Unicode object.", "response": "def to_python(self, value):\n \"\"\"Returns a Unicode object.\"\"\"\n if value in self.empty_values:\n return \"\"\n value = force_text(value).strip()\n return value"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nresolve the time in seconds of a configuration value.", "response": "def _resolve_time(value):\n '''\n Resolve the time in seconds of a configuration value.\n '''\n if value is None or isinstance(value,(int,long)):\n return value\n\n if NUMBER_TIME.match(value):\n return long(value)\n\n simple = SIMPLE_TIME.match(value)\n if SIMPLE_TIME.match(value):\n multiplier = long( simple.groups()[0] )\n constant = SIMPLE_TIMES[ simple.groups()[1] ]\n return multiplier * constant\n\n if value in GREGORIAN_TIMES:\n return value\n\n raise ValueError('Unsupported time format %s'%value)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef step_size(self, t0=None, t1=None):\n '''\n Return the time in seconds of a step. If a begin and end timestamp,\n return the time in seconds between them after adjusting for what buckets\n they alias to. If t1 and t0 resolve to the same bucket,\n '''\n if t0!=None and t1!=None:\n tb0 = self.to_bucket( t0 )\n tb1 = self.to_bucket( t1, steps=1 ) # NOTE: \"end\" of second bucket\n if tb0==tb1:\n return self._step\n return self.from_bucket( tb1 ) - self.from_bucket( tb0 )\n return self._step", "response": "Return the size of a step."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef buckets(self, start, end):\n '''\n Calculate the buckets within a starting and ending timestamp.\n '''\n start_bucket = self.to_bucket(start)\n end_bucket = self.to_bucket(end)\n return range(start_bucket, end_bucket+1)", "response": "Calculate the buckets within a starting and ending timestamp."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the ttl given the number of steps and optional relative time.", "response": "def ttl(self, steps, relative_time=None):\n '''\n Return the ttl given the number of steps, None if steps is not defined\n or we're otherwise unable to calculate one. If relative_time is defined,\n then return a ttl that is the number of seconds from now that the\n record should be expired.\n '''\n if steps:\n if relative_time:\n rtime = self.to_bucket(relative_time)\n ntime = self.to_bucket(time.time())\n # The relative time is beyond our TTL cutoff\n if (ntime - rtime) > steps:\n return 0\n # The relative time is in \"recent\" past or future\n else:\n return (steps + rtime - ntime) * self._step\n return steps * self._step\n\n return None"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef step_size(self, t0, t1=None):\n '''\n Return the time in seconds for each step. Requires that we know a time\n relative to which we should calculate to account for variable length\n intervals (e.g. February)\n '''\n tb0 = self.to_bucket( t0 )\n if t1:\n tb1 = self.to_bucket( t1, steps=1 ) # NOTE: \"end\" of second bucket\n else:\n tb1 = self.to_bucket( t0, steps=1 )\n\n # Calculate the difference in days, then multiply by simple scalar\n days = (self.from_bucket(tb1, native=True) - self.from_bucket(tb0, native=True)).days\n return days * SIMPLE_TIMES['d']", "response": "Return the time in seconds for each step."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncalculates the bucket from a timestamp.", "response": "def to_bucket(self, timestamp, steps=0):\n '''\n Calculate the bucket from a timestamp.\n '''\n dt = datetime.utcfromtimestamp( timestamp )\n\n if steps!=0:\n if self._step == 'daily':\n dt = dt + timedelta(days=steps)\n elif self._step == 'weekly':\n dt = dt + timedelta(weeks=steps)\n elif self._step == 'monthly':\n dt = dt + MonthDelta(steps)\n elif self._step == 'yearly':\n year = int(dt.strftime( self.FORMATS[self._step] ))\n year += steps\n dt = datetime(year=year, month=1, day=1)\n\n return int(dt.strftime( self.FORMATS[self._step] ))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncalculating the timestamp given a bucket.", "response": "def from_bucket(self, bucket, native=False):\n '''\n Calculate the timestamp given a bucket.\n '''\n # NOTE: this is due to a bug somewhere in strptime that does not process\n # the week number of '%Y%U' correctly. That bug could be very specific to\n # the combination of python and ubuntu that I was testing.\n bucket = str(bucket)\n if self._step == 'weekly':\n year, week = bucket[:4], bucket[4:]\n normal = datetime(year=int(year), month=1, day=1) + timedelta(weeks=int(week))\n else:\n normal = datetime.strptime(bucket, self.FORMATS[self._step])\n if native:\n return normal\n return long(time.mktime( normal.timetuple() ))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef buckets(self, start, end):\n '''\n Calculate the buckets within a starting and ending timestamp.\n '''\n rval = [ self.to_bucket(start) ]\n step = 1\n\n # In theory there's already been a check that end>start\n # TODO: Not a fan of an unbound while loop here\n while True:\n bucket = self.to_bucket(start, step)\n bucket_time = self.from_bucket( bucket )\n if bucket_time >= end:\n if bucket_time==end:\n rval.append( bucket )\n break\n rval.append( bucket )\n step += 1\n\n return rval", "response": "Calculate the buckets within a starting and ending timestamp."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nnormalizing a timestamp according to the interval configuration.", "response": "def normalize(self, timestamp, steps=0):\n '''\n Normalize a timestamp according to the interval configuration. Optionally\n can be used to calculate the timestamp N steps away.\n '''\n # So far, the only commonality with RelativeTime\n return self.from_bucket( self.to_bucket(timestamp, steps) )"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns the ttl given the number of steps and optional relative time.", "response": "def ttl(self, steps, relative_time=None):\n '''\n Return the ttl given the number of steps, None if steps is not defined\n or we're otherwise unable to calculate one. If relative_time is defined,\n then return a ttl that is the number of seconds from now that the\n record should be expired.\n '''\n if steps:\n # Approximate the ttl based on number of seconds, since it's\n # \"close enough\"\n if relative_time:\n rtime = self.to_bucket(relative_time)\n ntime = self.to_bucket(time.time())\n\n # Convert to number of days\n day_diff = (self.from_bucket(ntime, native=True) - self.from_bucket(rtime, native=True)).days\n # Convert steps to number of days as well\n step_diff = (steps*SIMPLE_TIMES[self._step[0]]) / SIMPLE_TIMES['d']\n\n # The relative time is beyond our TTL cutoff\n if day_diff > step_diff:\n return 0\n # The relative time is in \"recent\" past or future\n else:\n return (step_diff - day_diff) * SIMPLE_TIMES['d']\n return steps * SIMPLE_TIMES[ self._step[0] ]\n\n return None"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nperform a bulk insert of the given values into the base tree.", "response": "def bulk_insert(self, inserts, intervals=0, **kwargs):\n '''\n Perform a bulk insert. The format of the inserts must be:\n\n {\n timestamp : {\n name: [ values ],\n ...\n },\n ...\n }\n\n If the timestamp should be auto-generated, then \"timestamp\" should be None.\n\n Backends can implement this in any number of ways, the default being to\n perform a single insert for each value, with no specialized locking,\n transactions or batching.\n '''\n if None in inserts:\n inserts[ time.time() ] = inserts.pop(None)\n if self._write_func:\n for timestamp,names in inserts.iteritems():\n for name,values in names.iteritems():\n names[name] = [ self._write_func(v) for v in values ]\n self._batch_insert(inserts, intervals, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ninsert a value into the timeseries with the given name.", "response": "def insert(self, name, value, timestamp=None, intervals=0, **kwargs):\n '''\n Insert a value for the timeseries \"name\". For each interval in the\n configuration, will insert the value into a bucket for the interval\n \"timestamp\". If time is not supplied, will default to time.time(), else it\n should be a floating point value.\n\n If \"intervals\" is less than 0, inserts the value into timestamps\n \"abs(intervals)\" preceeding \"timestamp\" (i.e. \"-1\" inserts one extra value).\n If \"intervals\" is greater than 0, inserts the value into that many more\n intervals after \"timestamp\". The default behavior is to insert for a single\n timestamp.\n\n This supports the public methods of the same name in the subclasses. The\n value is expected to already be converted.\n '''\n if not timestamp:\n timestamp = time.time()\n\n if isinstance(value, (list,tuple,set)):\n if self._write_func:\n value = [ self._write_func(v) for v in value ]\n return self._batch_insert({timestamp:{name:value}}, intervals, **kwargs)\n\n if self._write_func:\n value = self._write_func(value)\n\n # TODO: document acceptable names\n # TODO: document what types values are supported\n # TODO: document behavior when time is outside the bounds of TTLed config\n # TODO: document how the data is stored.\n # TODO: better abstraction for \"intervals\" processing rather than in each implementation\n\n self._insert( name, value, timestamp, intervals, **kwargs )"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nsupports for batch insert. Default implementation is non-optimized and is a simple loop over values.", "response": "def _batch_insert(self, inserts, intervals, **kwargs):\n '''\n Support for batch insert. Default implementation is non-optimized and\n is a simple loop over values.\n '''\n for timestamp,names in inserts.iteritems():\n for name,values in names.iteritems():\n for value in values:\n self._insert( name, value, timestamp, intervals, **kwargs )"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _normalize_timestamps(self, timestamp, intervals, config):\n '''\n Helper for the subclasses to generate a list of timestamps.\n '''\n rval = [timestamp]\n if intervals<0:\n while intervals<0:\n rval.append( config['i_calc'].normalize(timestamp, intervals) )\n intervals += 1\n elif intervals>0:\n while intervals>0:\n rval.append( config['i_calc'].normalize(timestamp, intervals) )\n intervals -= 1\n return rval", "response": "Helper for the subclasses to generate a list of timestamps."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef iterate(self, name, interval, **kwargs):\n '''\n Returns a generator that iterates over all the intervals and returns\n data for various timestamps, in the form:\n\n ( unix_timestamp, data )\n\n This will check for all timestamp buckets that might exist between\n the first and last timestamp in a series. Each timestamp bucket will\n be fetched separately to keep this memory efficient, at the cost of\n extra trips to the data store.\n\n Keyword arguments are the same as get().\n '''\n config = self._intervals.get(interval)\n if not config:\n raise UnknownInterval(interval)\n properties = self.properties(name)[interval]\n\n i_buckets = config['i_calc'].buckets(properties['first'], properties['last'])\n for i_bucket in i_buckets:\n data = self.get(name, interval,\n timestamp=config['i_calc'].from_bucket(i_bucket), **kwargs)\n for timestamp,row in data.items():\n yield (timestamp,row)", "response": "Iterate over all the intervals and returns\n data for various timestamps"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get(self, name, interval, **kwargs):\n '''\n Get the set of values for a named timeseries and interval. If timestamp\n supplied, will fetch data for the period of time in which that timestamp\n would have fallen, else returns data for \"now\". If the timeseries\n resolution was not defined, then returns a simple list of values for the\n interval, else returns an ordered dict where the keys define the resolution\n interval and the values are the time series data in that (sub)interval.\n This allows the user to interpolate sparse data sets.\n\n If transform is defined, will utilize one of `[mean, count, min, max, sum]`\n to process each row of data returned. If the transform is a callable, will\n pass an array of data to the function. Note that the transform will be run\n after the data is condensed. If the transform is a list, then each row will\n return a hash of the form { transform_name_or_func : transformed_data }.\n If the transform is a hash, then it should be of the form\n { transform_name : transform_func } and will return the same structure as\n a list argument.\n\n Raises UnknownInterval if `interval` is not one of the configured\n intervals.\n\n TODO: Fix this method doc\n '''\n config = self._intervals.get(interval)\n if not config:\n raise UnknownInterval(interval)\n\n timestamp = kwargs.get('timestamp', time.time())\n fetch = kwargs.get('fetch')\n process_row = kwargs.get('process_row') or self._process_row\n condense = kwargs.get('condense', False)\n join_rows = kwargs.get('join_rows') or self._join\n transform = kwargs.get('transform')\n\n # DEPRECATED handle the deprecated version of condense\n condense = kwargs.get('condensed',condense)\n\n # If name is a list, then join all of results. It is more efficient to\n # use a single data structure and join \"in-line\" but that requires a major\n # refactor of the backends, so trying this solution to start with. At a\n # minimum we'd have to rebuild the results anyway because of the potential\n # for sparse data points would result in an out-of-order result.\n if isinstance(name, (list,tuple,set)):\n results = [ self._get(x, interval, config, timestamp, fetch=fetch, process_row=process_row) for x in name ]\n # Even resolution data is \"coarse\" in that it's not nested\n rval = self._join_results( results, True, join_rows )\n else:\n rval = self._get( name, interval, config, timestamp, fetch=fetch, process_row=process_row )\n\n # If condensed, collapse the result into a single row. Adjust the step_size\n # calculation to match.\n if config['coarse']:\n step_size = config['i_calc'].step_size(timestamp)\n else:\n step_size = config['r_calc'].step_size(timestamp)\n if condense and not config['coarse']:\n condense = condense if callable(condense) else self._condense\n rval = { config['i_calc'].normalize(timestamp) : condense(rval) }\n step_size = config['i_calc'].step_size(timestamp)\n\n if transform:\n for k,v in rval.items():\n rval[k] = self._process_transform(v, transform, step_size)\n return rval", "response": "Get the set of values for a named timeseries and interval."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn all the data in a named time series for a given interval.", "response": "def series(self, name, interval, **kwargs):\n '''\n Return all the data in a named time series for a given interval. If steps\n not defined and there are none in the config, defaults to 1.\n\n Returns an ordered dict of interval timestamps to a single interval, which\n matches the return value in get().\n\n If transform is defined, will utilize one of `[mean, count, min, max, sum]`\n to process each row of data returned. If the transform is a callable, will\n pass an array of data to the function. Note that the transform will be run\n after the data is condensed.\n\n Raises UnknownInterval if `interval` not configured.\n '''\n config = self._intervals.get(interval)\n if not config:\n raise UnknownInterval(interval)\n\n start = kwargs.get('start')\n end = kwargs.get('end')\n steps = kwargs.get('steps') or config.get('steps',1)\n\n fetch = kwargs.get('fetch')\n process_row = kwargs.get('process_row') or self._process_row\n condense = kwargs.get('condense', False)\n join_rows = kwargs.get('join_rows') or self._join\n collapse = kwargs.get('collapse', False)\n transform = kwargs.get('transform')\n # DEPRECATED handle the deprecated version of condense\n condense = kwargs.get('condensed',condense)\n\n # If collapse, also condense\n if collapse: condense = condense or True\n\n # Fugly range determination, all to get ourselves a start and end\n # timestamp. Adjust steps argument to include the anchoring date.\n if end is None:\n if start is None:\n end = time.time()\n end_bucket = config['i_calc'].to_bucket( end )\n start_bucket = config['i_calc'].to_bucket( end, (-steps+1) )\n else:\n start_bucket = config['i_calc'].to_bucket( start )\n end_bucket = config['i_calc'].to_bucket( start, steps-1 )\n else:\n end_bucket = config['i_calc'].to_bucket( end )\n if start is None:\n start_bucket = config['i_calc'].to_bucket( end, (-steps+1) )\n else:\n start_bucket = config['i_calc'].to_bucket( start )\n\n # Now that we have start and end buckets, convert them back to normalized\n # time stamps and then back to buckets. :)\n start = config['i_calc'].from_bucket( start_bucket )\n end = config['i_calc'].from_bucket( end_bucket )\n if start > end: end = start\n\n interval_buckets = config['i_calc'].buckets(start, end)\n\n # If name is a list, then join all of results. It is more efficient to\n # use a single data structure and join \"in-line\" but that requires a major\n # refactor of the backends, so trying this solution to start with. At a\n # minimum we'd have to rebuild the results anyway because of the potential\n # for sparse data points would result in an out-of-order result.\n if isinstance(name, (list,tuple,set)):\n results = [ self._series(x, interval, config, interval_buckets, fetch=fetch, process_row=process_row) for x in name ]\n rval = self._join_results( results, config['coarse'], join_rows )\n else:\n rval = self._series(name, interval, config, interval_buckets, fetch=fetch, process_row=process_row)\n\n # If fine-grained, first do the condensed pass so that it's easier to do\n # the collapse afterwards. Be careful not to run the transform if there's\n # going to be another pass at condensing the data.\n if not config['coarse']:\n if condense:\n condense = condense if callable(condense) else self._condense\n for key in rval.iterkeys():\n data = condense( rval[key] )\n if transform and not collapse:\n data = self._process_transform(data, transform, config['i_calc'].step_size(key))\n rval[key] = data\n elif transform:\n for interval,resolutions in rval.items():\n for key in resolutions.iterkeys():\n resolutions[key] = self._process_transform(resolutions[key], transform, config['r_calc'].step_size(key))\n\n if config['coarse'] or collapse:\n if collapse:\n collapse = collapse if callable(collapse) else condense if callable(condense) else self._condense\n data = collapse(rval)\n if transform:\n rval = { rval.keys()[0] : self._process_transform(data, transform, config['i_calc'].step_size(rval.keys()[0], rval.keys()[-1])) }\n else:\n rval = { rval.keys()[0] : data }\n\n elif transform:\n for key,data in rval.items():\n rval[key] = self._process_transform(data, transform, config['i_calc'].step_size(key))\n\n return rval"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _join_results(self, results, coarse, join):\n '''\n Join a list of results. Supports both get and series.\n '''\n rval = OrderedDict()\n i_keys = set()\n for res in results:\n i_keys.update( res.keys() )\n for i_key in sorted(i_keys):\n if coarse:\n rval[i_key] = join( [res.get(i_key) for res in results] )\n else:\n rval[i_key] = OrderedDict()\n r_keys = set()\n for res in results:\n r_keys.update( res.get(i_key,{}).keys() )\n for r_key in sorted(r_keys):\n rval[i_key][r_key] = join( [res.get(i_key,{}).get(r_key) for res in results] )\n return rval", "response": "Join a list of results. Supports both get and series.\n "} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _process_transform(self, data, transform, step_size):\n '''\n Process transforms on the data.\n '''\n if isinstance(transform, (list,tuple,set)):\n return { t : self._transform(data,t,step_size) for t in transform }\n elif isinstance(transform, dict):\n return { tn : self._transform(data,tf,step_size) for tn,tf in transform.items() }\n return self._transform(data,transform,step_size)", "response": "Process transforms on the data."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _transform(self, data, transform, step_size):\n '''\n Transform the data. If the transform is not supported by this series,\n returns the data unaltered.\n '''\n if transform=='mean':\n total = sum( k*v for k,v in data.items() )\n count = sum( data.values() )\n data = float(total)/float(count) if count>0 else 0\n elif transform=='count':\n data = sum(data.values())\n elif transform=='min':\n data = min(data.keys() or [0])\n elif transform=='max':\n data = max(data.keys() or [0])\n elif transform=='sum':\n data = sum( k*v for k,v in data.items() )\n elif transform=='rate':\n data = { k:v/float(step_size) for k,v in data.items() }\n elif callable(transform):\n data = transform(data, step_size)\n return data", "response": "Transform the data. If the transform is not supported by this series,\n returns the data unaltered."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _condense(self, data):\n '''\n Condense by adding together all of the lists.\n '''\n rval = {}\n for resolution,histogram in data.items():\n for value,count in histogram.items():\n rval[ value ] = count + rval.get(value,0)\n return rval", "response": "Condense by adding together all of the lists."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _join(self, rows):\n '''\n Join multiple rows worth of data into a single result.\n '''\n rval = {}\n for row in rows:\n if row:\n for value,count in row.items():\n rval[ value ] = count + rval.get(value,0)\n return rval", "response": "Join multiple rows worth of data into a single result."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\njoin multiple rows worth of data into a single result.", "response": "def _join(self, rows):\n '''\n Join multiple rows worth of data into a single result.\n '''\n rval = 0\n for row in rows:\n if row: rval += row\n return rval"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ntransforming the data. If the transform is not supported by this series, returns the data unaltered.", "response": "def _transform(self, data, transform, step_size):\n '''\n Transform the data. If the transform is not supported by this series,\n returns the data unaltered.\n '''\n if callable(transform):\n data = transform(data, step_size)\n return data"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncondense by returning the last real value of the gauge.", "response": "def _condense(self, data):\n '''\n Condense by returning the last real value of the gauge.\n '''\n if data:\n data = filter(None,data.values())\n if data:\n return data[-1]\n return None"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ntransform the data. If the transform is not supported by this series, returns the data unaltered.", "response": "def _transform(self, data, transform, step_size):\n '''\n Transform the data. If the transform is not supported by this series,\n returns the data unaltered.\n '''\n if transform=='mean':\n total = sum( data )\n count = len( data )\n data = float(total)/float(count) if count>0 else 0\n elif transform=='count':\n data = len(data)\n elif transform=='min':\n data = min(data or [0])\n elif transform=='max':\n data = max(data or [0])\n elif transform=='sum':\n data = sum(data)\n elif transform=='rate':\n data = len(data) / float(step_size)\n elif callable(transform):\n data = transform(data)\n return data"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _condense(self, data):\n '''\n Condense by or-ing all of the sets.\n '''\n if data:\n return reduce(operator.ior, data.values())\n return set()", "response": "Condense by or - ing all of the sets."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef handle(self, **options):\n\n students_grp, _ = Group.objects.get_or_create(name=\"Students\")\n sg_prop = students_grp.properties\n sg_prop.student_visible = True\n sg_prop.save()\n students_grp.save()\n\n for gr in [settings.SENIOR_GRADUATION_YEAR + y for y in range(0, 4)]:\n users = User.objects.users_in_year(gr)\n grp, _ = Group.objects.get_or_create(name=\"Class of {}\".format(gr))\n grp_prop = grp.properties\n grp_prop.student_visible = True\n grp_prop.save()\n grp.save()\n self.stdout.write(\"{}: {} users\".format(gr, len(users)))\n for u in users:\n u.groups.add(grp)\n u.groups.add(students_grp)\n u.save()\n self.stdout.write(\"{}: Processed\".format(gr))\n\n self.stdout.write(\"Done.\")", "response": "Create Class of 20 [ 16 - 19 groups"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nhandles the command line interface for the get_next_entry_entry_entry function.", "response": "def handle(self, *args, **options):\n do_run = options[\"run\"]\n\n if do_run:\n if not options[\"confirm\"]:\n self.ask(\"===== WARNING! =====\\n\\n\"\n \"This script will DESTROY data! Ensure that you have a properly backed-up copy of your database before proceeding.\\n\\n\"\n \"===== WARNING! =====\\n\\n\"\n \"Continue?\")\n else:\n self.stdout.write(\"In pretend mode.\")\n\n current_year = timezone.now().year\n new_senior_year = current_year + 1\n turnover_date = datetime.datetime(current_year, 7, 1)\n self.stdout.write(\"Turnover date set to: {}\".format(turnover_date.strftime(\"%c\")))\n\n if not self.chk(\"SENIOR_GRADUATION_YEAR = {} in settings/__init__.py\".format(new_senior_year),\n settings.SENIOR_GRADUATION_YEAR == new_senior_year):\n return\n \"\"\"\n EIGHTH:\n EighthBlock: filtered\n EighthSignup: absences removed\n EighthActivity: keep\n\n ANNOUNCEMENTS:\n AnnouncementRequest: filtered\n\n USERS:\n User: graduated students deleted\n \"\"\"\n\n self.stdout.write(\"Resolving absences\")\n if do_run:\n self.clear_absences()\n\n self.stdout.write(\"Updating welcome state\")\n if do_run:\n self.update_welcome()\n\n self.stdout.write(\"Deleting graduated users\")\n if do_run:\n self.handle_delete()\n\n self.stdout.write(\"Archiving admin comments\")\n if do_run:\n self.archive_admin_comments()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef authenticate_credentials(self, userid, password, request=None):\n\n user = auth.authenticate(username=userid, password=password)\n\n if user is None or (user and not user.is_active):\n raise exceptions.AuthenticationFailed(\"Invalid username/password.\")\n\n return (user, None)", "response": "Authenticate the userid and password."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nchecking if the given request is in the internal IP list.", "response": "def check_internal_ip(request):\n \"\"\" request is an AsgiRequest \"\"\"\n remote_addr = (request.META[\"HTTP_X_FORWARDED_FOR\"] if \"HTTP_X_FORWARDED_FOR\" in request.META else request.META.get(\"REMOTE_ADDR\", \"\"))\n return remote_addr in settings.INTERNAL_IPS"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef handle_sap(q):\n question_votes = votes = Answer.objects.filter(question=q)\n users = q.get_users_voted()\n num_users_votes = {u.id: votes.filter(user=u).count() for u in users}\n user_scale = {u.id: (1 / num_users_votes[u.id]) for u in users}\n choices = []\n for c in q.choice_set.all().order_by(\"num\"):\n votes = question_votes.filter(choice=c)\n vote_users = set([v.user for v in votes])\n choice = {\n \"choice\": c,\n \"votes\": {\n \"total\": {\n \"all\": len(vote_users),\n \"all_percent\": perc(len(vote_users), users.count()),\n \"male\": fmt(sum([v.user.is_male * user_scale[v.user.id] for v in votes])),\n \"female\": fmt(sum([v.user.is_female * user_scale[v.user.id] for v in votes]))\n }\n },\n \"users\": [v.user for v in votes]\n }\n for yr in range(9, 14):\n yr_votes = [v.user if v.user.grade and v.user.grade.number == yr else None for v in votes]\n yr_votes = list(filter(None, yr_votes))\n choice[\"votes\"][yr] = {\n \"all\": len(set(yr_votes)),\n \"male\": fmt(sum([u.is_male * user_scale[u.id] for u in yr_votes])),\n \"female\": fmt(sum([u.is_female * user_scale[u.id] for u in yr_votes])),\n }\n\n choices.append(choice)\n \"\"\" Clear vote \"\"\"\n votes = question_votes.filter(clear_vote=True)\n clr_users = set([v.user for v in votes])\n choice = {\n \"choice\": \"Clear vote\",\n \"votes\": {\n \"total\": {\n \"all\": len(clr_users),\n \"all_percent\": perc(len(clr_users), users.count()),\n \"male\": fmt(sum([v.user.is_male * user_scale[v.user.id] for v in votes])),\n \"female\": fmt(sum([v.user.is_female * user_scale[v.user.id] for v in votes]))\n }\n },\n \"users\": clr_users\n }\n for yr in range(9, 14):\n yr_votes = [v.user if v.user.grade and v.user.grade.number == yr else None for v in votes]\n yr_votes = list(filter(None, yr_votes))\n choice[\"votes\"][yr] = {\n \"all\": len(yr_votes),\n \"male\": fmt(sum([u.is_male * user_scale[u.id] for u in yr_votes])),\n \"female\": fmt(sum([u.is_female * user_scale[u.id] for u in yr_votes]))\n }\n\n choices.append(choice)\n\n choice = {\n \"choice\": \"Total\",\n \"votes\": {\n \"total\": {\n \"all\": users.count(),\n \"votes_all\": question_votes.count(),\n \"all_percent\": perc(users.count(), users.count()),\n \"male\": users.filter(gender=True).count(),\n \"female\": users.filter(gender__isnull=False, gender=False).count()\n }\n }\n }\n for yr in range(9, 14):\n yr_votes = [u if u.grade and u.grade.number == yr else None for u in users]\n yr_votes = list(filter(None, yr_votes))\n choice[\"votes\"][yr] = {\n \"all\": len(set(yr_votes)),\n \"male\": fmt(sum([u.is_male * user_scale[u.id] for u in yr_votes])),\n \"female\": fmt(sum([u.is_female * user_scale[u.id] for u in yr_votes]))\n }\n\n choices.append(choice)\n\n return {\"question\": q, \"choices\": choices, \"user_scale\": user_scale}", "response": "Handle sap - related information."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef join_event_view(request, id):\n\n event = get_object_or_404(Event, id=id)\n\n if request.method == \"POST\":\n if not event.show_attending:\n return redirect(\"events\")\n if \"attending\" in request.POST:\n attending = request.POST.get(\"attending\")\n attending = (attending == \"true\")\n\n if attending:\n event.attending.add(request.user)\n else:\n event.attending.remove(request.user)\n\n return redirect(\"events\")\n\n context = {\"event\": event, \"is_events_admin\": request.user.has_admin_permission('events')}\n return render(request, \"events/join_event.html\", context)", "response": "Join an event page."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef event_roster_view(request, id):\n\n event = get_object_or_404(Event, id=id)\n\n full_roster = list(event.attending.all())\n viewable_roster = []\n num_hidden_members = 0\n for p in full_roster:\n if p.can_view_eighth:\n viewable_roster.append(p)\n else:\n num_hidden_members += 1\n\n context = {\n \"event\": event,\n \"viewable_roster\": viewable_roster,\n \"full_roster\": full_roster,\n \"num_hidden_members\": num_hidden_members,\n \"is_events_admin\": request.user.has_admin_permission('events'),\n }\n return render(request, \"events/roster.html\", context)", "response": "Show the event roster."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nadding event page. Currently, there is an approval process for events. If a user is an events administrator, they can create events directly. Otherwise, their event is added in the system but must be approved.", "response": "def add_event_view(request):\n \"\"\"Add event page.\n\n Currently, there is an approval process for events. If a user is an\n events administrator, they can create events directly. Otherwise,\n their event is added in the system but must be approved.\n\n \"\"\"\n is_events_admin = request.user.has_admin_permission('events')\n if not is_events_admin:\n return redirect(\"request_event\")\n\n if request.method == \"POST\":\n form = EventForm(data=request.POST, all_groups=request.user.has_admin_permission('groups'))\n logger.debug(form)\n if form.is_valid():\n obj = form.save()\n obj.user = request.user\n # SAFE HTML\n obj.description = safe_html(obj.description)\n\n # auto-approve if admin\n obj.approved = True\n obj.approved_by = request.user\n messages.success(request, \"Because you are an administrator, this event was auto-approved.\")\n obj.created_hook(request)\n\n obj.save()\n return redirect(\"events\")\n else:\n messages.error(request, \"Error adding event.\")\n else:\n form = EventForm(all_groups=request.user.has_admin_permission('groups'))\n context = {\"form\": form, \"action\": \"add\", \"action_title\": \"Add\" if is_events_admin else \"Submit\", \"is_events_admin\": is_events_admin}\n return render(request, \"events/add_modify.html\", context)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nmodifies an event page. You may only modify an event if you are the creator or you are an administrator.", "response": "def modify_event_view(request, id=None):\n \"\"\"Modify event page. You may only modify an event if you were the creator or you are an\n administrator.\n\n id: event id\n\n \"\"\"\n event = get_object_or_404(Event, id=id)\n is_events_admin = request.user.has_admin_permission('events')\n\n if not is_events_admin:\n raise exceptions.PermissionDenied\n\n if request.method == \"POST\":\n if is_events_admin:\n form = AdminEventForm(data=request.POST, instance=event, all_groups=request.user.has_admin_permission('groups'))\n else:\n form = EventForm(data=request.POST, instance=event, all_groups=request.user.has_admin_permission('groups'))\n logger.debug(form)\n if form.is_valid():\n obj = form.save()\n # SAFE HTML\n obj.description = safe_html(obj.description)\n obj.save()\n messages.success(request, \"Successfully modified event.\")\n # return redirect(\"events\")\n else:\n messages.error(request, \"Error modifying event.\")\n else:\n if is_events_admin:\n form = AdminEventForm(instance=event, all_groups=request.user.has_admin_permission('groups'))\n else:\n form = EventForm(instance=event, all_groups=request.user.has_admin_permission('groups'))\n context = {\"form\": form, \"action\": \"modify\", \"action_title\": \"Modify\", \"id\": id, \"is_events_admin\": is_events_admin}\n return render(request, \"events/add_modify.html\", context)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ndelete an event page.", "response": "def delete_event_view(request, id):\n \"\"\"Delete event page. You may only delete an event if you were the creator or you are an\n administrator. Confirmation page if not POST.\n\n id: event id\n\n \"\"\"\n event = get_object_or_404(Event, id=id)\n if not request.user.has_admin_permission('events'):\n raise exceptions.PermissionDenied\n\n if request.method == \"POST\":\n try:\n event.delete()\n messages.success(request, \"Successfully deleted event.\")\n except Event.DoesNotExist:\n pass\n\n return redirect(\"events\")\n else:\n return render(request, \"events/delete.html\", {\"event\": event})"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef show_event_view(request):\n if request.method == \"POST\":\n event_id = request.POST.get(\"event_id\")\n if event_id:\n event = Event.objects.get(id=event_id)\n event.user_map.users_hidden.remove(request.user)\n event.user_map.save()\n return http.HttpResponse(\"Unhidden\")\n raise http.Http404\n else:\n return http.HttpResponseNotAllowed([\"POST\"], \"HTTP 405: METHOD NOT ALLOWED\")", "response": "Unhide an event that was hidden by the logged - in user."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nset up a GCM session. This does *not* require a valid login session. Instead, a token from the client session is sent to the Android backend, which queries a POST request to this view. The \"android_gcm_rand\" is randomly set when the Android app is detected through the user agent. If it has the same value, it is assumed to be correct.", "response": "def android_setup_view(request):\n \"\"\"Set up a GCM session.\n This does *not* require a valid login session. Instead, a token from the client\n session is sent to the Android backend, which queries a POST request to this view.\n\n The \"android_gcm_rand\" is randomly set when the Android app is detected through\n the user agent. If it has the same value, it is assumed to be correct.\n \"\"\"\n logger.debug(request.POST)\n if request.method == \"POST\":\n if \"user_token\" in request.POST and \"gcm_token\" in request.POST:\n user_token = request.POST.get(\"user_token\")\n gcm_token = request.POST.get(\"gcm_token\")\n\n logger.debug(user_token)\n logger.debug(gcm_token)\n try:\n ncfg = NotificationConfig.objects.get(android_gcm_rand=user_token)\n except NotificationConfig.DoesNotExist:\n logger.debug(\"No pair\")\n return HttpResponse('{\"error\":\"Invalid data.\"}', content_type=\"text/json\")\n\n ncfg.gcm_token = gcm_token\n ncfg.android_gcm_rand = None\n ncfg.android_gcm_date = None\n ncfg.save()\n return HttpResponse('{\"success\":\"Now registered.\"}', content_type=\"text/json\")\n return HttpResponse('{\"error\":\"Invalid arguments.\"}', content_type=\"text/json\")"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets the data of the last notification sent to the current user.", "response": "def chrome_getdata_view(request):\n \"\"\"Get the data of the last notification sent to the current user.\n\n This is needed because Chrome, as of version 44, doesn't support\n sending a data payload to a notification. Thus, information on what\n the notification is actually for must be manually fetched.\n\n \"\"\"\n data = {}\n if request.user.is_authenticated:\n # authenticated session\n notifs = GCMNotification.objects.filter(sent_to__user=request.user).order_by(\"-time\")\n if notifs.count() > 0:\n notif = notifs.first()\n ndata = notif.data\n if \"title\" in ndata and \"text\" in ndata:\n data = {\n \"title\": ndata['title'] if 'title' in ndata else '',\n \"text\": ndata['text'] if 'text' in ndata else '',\n \"url\": ndata['url'] if 'url' in ndata else ''\n }\n else:\n schedule_chk = chrome_getdata_check(request)\n if schedule_chk:\n data = schedule_chk\n else:\n schedule_chk = chrome_getdata_check(request)\n if schedule_chk:\n data = schedule_chk\n else:\n return HttpResponse(\"null\", content_type=\"text/json\")\n else:\n schedule_chk = chrome_getdata_check(request)\n if schedule_chk:\n data = schedule_chk\n else:\n data = {\"title\": \"Check Intranet\", \"text\": \"You have a new notification that couldn't be loaded right now.\"}\n j = json.dumps(data)\n return HttpResponse(j, content_type=\"text/json\")"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef chrome_setup_view(request):\n logger.debug(request.POST)\n token = None\n if request.method == \"POST\":\n if \"token\" in request.POST:\n token = request.POST.get(\"token\")\n\n if not token:\n return HttpResponse('{\"error\":\"Invalid data.\"}', content_type=\"text/json\")\n ncfg, _ = NotificationConfig.objects.get_or_create(user=request.user)\n ncfg.gcm_token = token\n ncfg.save()\n return HttpResponse('{\"success\":\"Now registered.\"}', content_type=\"text/json\")", "response": "Set up a browser - side GCM session."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef lostitem_modify_view(request, item_id=None):\n if request.method == \"POST\":\n lostitem = get_object_or_404(LostItem, id=item_id)\n form = LostItemForm(request.POST, instance=lostitem)\n if form.is_valid():\n obj = form.save()\n logger.debug(form.cleaned_data)\n # SAFE HTML\n obj.description = safe_html(obj.description)\n obj.save()\n messages.success(request, \"Successfully modified lost item.\")\n return redirect(\"lostitem_view\", obj.id)\n else:\n messages.error(request, \"Error adding lost item.\")\n else:\n lostitem = get_object_or_404(LostItem, id=item_id)\n form = LostItemForm(instance=lostitem)\n\n context = {\"form\": form, \"action\": \"modify\", \"id\": item_id, \"lostitem\": lostitem}\n return render(request, \"lostfound/lostitem_form.html\", context)", "response": "Modify a lostitem.\n\n id: lostitem id"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ndeleting a lostitem. id: lostitem id", "response": "def lostitem_delete_view(request, item_id):\n \"\"\"Delete a lostitem.\n\n id: lostitem id\n\n \"\"\"\n if request.method == \"POST\":\n try:\n a = LostItem.objects.get(id=item_id)\n if request.POST.get(\"full_delete\", False):\n a.delete()\n messages.success(request, \"Successfully deleted lost item.\")\n else:\n a.found = True\n a.save()\n messages.success(request, \"Successfully marked lost item as found!\")\n except LostItem.DoesNotExist:\n pass\n\n return redirect(\"index\")\n else:\n lostitem = get_object_or_404(LostItem, id=item_id)\n return render(request, \"lostfound/lostitem_delete.html\", {\"lostitem\": lostitem})"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef lostitem_view(request, item_id):\n lostitem = get_object_or_404(LostItem, id=item_id)\n return render(request, \"itemreg/item_view.html\", {\"item\": lostitem, \"type\": \"lost\"})", "response": "View a lostitem.\n\n id: lostitem id"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nmodify a founditem. id: founditem id", "response": "def founditem_modify_view(request, item_id=None):\n \"\"\"Modify a founditem.\n\n id: founditem id\n\n \"\"\"\n if request.method == \"POST\":\n founditem = get_object_or_404(FoundItem, id=item_id)\n form = FoundItemForm(request.POST, instance=founditem)\n if form.is_valid():\n obj = form.save()\n logger.debug(form.cleaned_data)\n # SAFE HTML\n obj.description = safe_html(obj.description)\n obj.save()\n messages.success(request, \"Successfully modified found item.\")\n return redirect(\"founditem_view\", obj.id)\n else:\n messages.error(request, \"Error adding found item.\")\n else:\n founditem = get_object_or_404(FoundItem, id=item_id)\n form = FoundItemForm(instance=founditem)\n\n context = {\"form\": form, \"action\": \"modify\", \"id\": item_id, \"founditem\": founditem}\n return render(request, \"lostfound/founditem_form.html\", context)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ndeletes a founditem. id: founditem id", "response": "def founditem_delete_view(request, item_id):\n \"\"\"Delete a founditem.\n\n id: founditem id\n\n \"\"\"\n if request.method == \"POST\":\n try:\n a = FoundItem.objects.get(id=item_id)\n if request.POST.get(\"full_delete\", False):\n a.delete()\n messages.success(request, \"Successfully deleted found item.\")\n else:\n a.found = True\n a.save()\n messages.success(request, \"Successfully marked found item as found!\")\n except FoundItem.DoesNotExist:\n pass\n\n return redirect(\"index\")\n else:\n founditem = get_object_or_404(FoundItem, id=item_id)\n return render(request, \"lostfound/founditem_delete.html\", {\"founditem\": founditem})"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nviews a founditem. id: founditem id", "response": "def founditem_view(request, item_id):\n \"\"\"View a founditem.\n\n id: founditem id\n\n \"\"\"\n founditem = get_object_or_404(FoundItem, id=item_id)\n return render(request, \"itemreg/item_view.html\", {\"item\": founditem, \"type\": \"found\"})"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ndisplay authentication for filecenter.", "response": "def files_auth(request):\n \"\"\"Display authentication for filecenter.\"\"\"\n if \"password\" in request.POST:\n \"\"\"\n Encrypt the password with AES mode CFB.\n Create a random 32 char key, stored in a CLIENT-side cookie.\n Create a random 32 char IV, stored in a SERVER-side session.\n Store the encrypted ciphertext in a SERVER-side session.\n\n This ensures that neither side can decrypt the password without\n the information stored on the other end of the request.\n\n Both the server-side session variables and the client-side cookies\n are deleted when the user logs out.\n \"\"\"\n key = Random.new().read(32)\n iv = Random.new().read(16)\n obj = AES.new(key, AES.MODE_CFB, iv)\n message = request.POST.get(\"password\")\n if isinstance(message, str):\n message = message.encode(\"utf-8\")\n ciphertext = obj.encrypt(message)\n request.session[\"files_iv\"] = base64.b64encode(iv).decode()\n request.session[\"files_text\"] = base64.b64encode(ciphertext).decode()\n cookie_key = base64.b64encode(key).decode()\n\n nexturl = request.GET.get(\"next\", None)\n if nexturl and nexturl.startswith(\"/files\"):\n response = redirect(nexturl)\n else:\n response = redirect(\"files\")\n response.set_cookie(key=\"files_key\", value=cookie_key)\n if \"username\" in request.POST:\n request.session[\"filecenter_username\"] = request.POST.get(\"username\")\n return response\n else:\n return render(request, \"files/auth.html\", {\"is_admin\": request.user.member_of(\"admin_all\")})"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_authinfo(request):\n if ((\"files_iv\" not in request.session) or (\"files_text\" not in request.session) or (\"files_key\" not in request.COOKIES)):\n return False\n \"\"\"\n Decrypt the password given the SERVER-side IV, SERVER-side\n ciphertext, and CLIENT-side key.\n\n See note above on why this is done.\n \"\"\"\n\n iv = base64.b64decode(request.session[\"files_iv\"])\n text = base64.b64decode(request.session[\"files_text\"])\n key = base64.b64decode(request.COOKIES[\"files_key\"])\n\n obj = AES.new(key, AES.MODE_CFB, iv)\n password = obj.decrypt(text)\n\n username = request.session[\"filecenter_username\"] if \"filecenter_username\" in request.session else request.user.username\n\n return {\"username\": username, \"password\": password}", "response": "Get authentication info from the encrypted message."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nformats a string for the location of the user s folder on the Windows fileserver.", "response": "def windows_dir_format(host_dir, user):\n \"\"\"Format a string for the location of the user's folder on the Windows (TJ03) fileserver.\"\"\"\n if user and user.grade:\n grade = int(user.grade)\n else:\n return host_dir\n\n if grade in range(9, 13):\n win_path = \"/{}/\".format(user.username)\n else:\n win_path = \"\"\n return host_dir.replace(\"{win}\", win_path)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ndoes all processing for a given filesystem.", "response": "def files_type(request, fstype=None):\n \"\"\"Do all processing (directory listing, file downloads) for a given filesystem.\"\"\"\n try:\n host = Host.objects.get(code=fstype)\n except Host.DoesNotExist:\n messages.error(request, \"Could not find host in database.\")\n return redirect(\"files\")\n\n if host.available_to_all:\n pass\n elif not host.visible_to(request.user):\n messages.error(request, \"You don't have permission to access this host.\")\n return redirect(\"files\")\n\n authinfo = get_authinfo(request)\n\n if not authinfo:\n return redirect(\"{}?next={}\".format(reverse(\"files_auth\"), request.get_full_path()))\n\n try:\n sftp = create_session(host.address, authinfo[\"username\"], authinfo[\"password\"])\n except exceptions as e:\n messages.error(request, e)\n error_msg = str(e).lower()\n if \"authentication\" in error_msg:\n return redirect(\"files_auth\")\n return redirect(\"files\")\n finally:\n # Delete the stored credentials, so they aren't mistakenly used or\n # accessed later.\n del authinfo\n\n if host.directory:\n host_dir = host.directory\n if \"{}\" in host_dir:\n host_dir = host_dir.format(request.user.username)\n if \"{win}\" in host_dir:\n host_dir = windows_dir_format(host_dir, request.user)\n try:\n sftp.chdir(host_dir)\n except exceptions as e:\n if \"NoSuchFile\" in \"{}\".format(e):\n host_dir = \"/\"\n try:\n sftp.chdir(host_dir)\n except exceptions as e2:\n messages.error(request, e)\n messages.error(request, \"Root directory: {}\".format(e2))\n return redirect(\"files\")\n else:\n messages.error(request, \"Unable to access home folder -- showing root directory instead.\")\n else:\n messages.error(request, e)\n return redirect(\"files\")\n else:\n try:\n sftp.chdir(host_dir)\n except exceptions as e:\n messages.error(request, e)\n return redirect(\"files\")\n\n default_dir = normpath(sftp.pwd)\n\n def can_access_path(fsdir):\n return normpath(fsdir).startswith(default_dir)\n\n if \"file\" in request.GET:\n # Download file\n filepath = request.GET.get(\"file\")\n filepath = normpath(filepath)\n filebase = os.path.basename(filepath)\n filebase_escaped = re.sub(r\"[^\\w\\s\\-\\.]\", \"\", filebase).strip()\n if can_access_path(filepath):\n try:\n fstat = sftp.stat(filepath)\n except exceptions as e:\n messages.error(request, \"Unable to access {}: {}\".format(filebase, e))\n return redirect(\"/files/{}?dir={}\".format(fstype, os.path.dirname(filepath)))\n\n if fstat.st_size > settings.FILES_MAX_DOWNLOAD_SIZE:\n messages.error(request, \"Too large to download (>200MB)\")\n return redirect(\"/files/{}?dir={}\".format(fstype, os.path.dirname(filepath)))\n\n tmpfile = tempfile.TemporaryFile(prefix=\"ion_filecenter_{}_{}\".format(request.user.username, filebase_escaped))\n logger.debug(tmpfile)\n\n try:\n sftp.getfo(filepath, tmpfile)\n except exceptions as e:\n messages.error(request, e)\n return redirect(\"/files/{}?dir={}\".format(fstype, os.path.dirname(filepath)))\n\n content_len = tmpfile.tell()\n tmpfile.seek(0)\n chunk_size = 8192\n response = StreamingHttpResponse(FileWrapper(tmpfile, chunk_size), content_type=\"application/octet-stream\")\n response['Content-Length'] = content_len\n response[\"Content-Disposition\"] = \"attachment; filename={}\".format(filebase_escaped)\n return response\n\n fsdir = request.GET.get(\"dir\")\n if fsdir:\n fsdir = normpath(fsdir)\n if can_access_path(fsdir):\n try:\n sftp.chdir(fsdir)\n except exceptions as e:\n messages.error(request, e)\n return redirect(\"files\")\n else:\n messages.error(request, \"Access to the path you provided is restricted.\")\n return redirect(\"/files/{}/?dir={}\".format(fstype, default_dir))\n\n if \"zip\" in request.GET:\n dirbase_escaped = os.path.basename(fsdir)\n dirbase_escaped = slugify(dirbase_escaped)\n tmpfile = tempfile.TemporaryFile(prefix=\"ion_filecenter_{}_{}\".format(request.user.username, dirbase_escaped))\n\n with tempfile.TemporaryDirectory(prefix=\"ion_filecenter_{}_{}_zip\".format(request.user.username, dirbase_escaped)) as tmpdir:\n remote_directories = [fsdir]\n totalsize = 0\n while remote_directories:\n rd = remote_directories.pop()\n try:\n remotelist = sftp.listdir(rd)\n except PermissionError as e:\n logger.debug(\"Exception %s on %s\" % (e, rd))\n continue\n for item in remotelist:\n itempath = os.path.join(rd, item)\n try:\n fstat = sftp.stat(itempath)\n except exceptions as e:\n logger.debug(\"Could not read %s: %s\" % (item, e))\n continue\n\n if stat.S_ISDIR(fstat.st_mode):\n remote_directories.append(itempath)\n continue\n\n totalsize += fstat.st_size\n if totalsize > settings.FILES_MAX_DOWNLOAD_SIZE:\n messages.error(request, \"Too large to download (>200MB)\")\n return redirect(\"/files/{}?dir={}\".format(fstype, os.path.dirname(fsdir)))\n\n try:\n localpath = os.path.join(tmpdir, os.path.relpath(rd, fsdir))\n if not os.path.exists(localpath):\n os.makedirs(localpath)\n with open(os.path.join(localpath, item), \"wb\") as fh:\n sftp.getfo(itempath, fh)\n except exceptions as e:\n logger.debug(\"Exception %s on %s\" % (e, item))\n continue\n\n with zipfile.ZipFile(tmpfile, \"w\", zipfile.ZIP_DEFLATED) as zf:\n for root, dirs, files in os.walk(tmpdir):\n for f in files:\n zf.write(os.path.join(root, f), os.path.join(os.path.relpath(root, tmpdir), f))\n\n content_len = tmpfile.tell()\n tmpfile.seek(0)\n chunk_size = 8192\n response = StreamingHttpResponse(FileWrapper(tmpfile, chunk_size), content_type=\"application/octet-stream\")\n response[\"Content-Length\"] = content_len\n if not dirbase_escaped:\n dirbase_escaped = \"files\"\n response[\"Content-Disposition\"] = \"attachment; filename={}\".format(dirbase_escaped + \".zip\")\n return response\n\n try:\n listdir = sftp.listdir()\n except exceptions as e:\n messages.error(request, e)\n listdir = []\n files = []\n for f in listdir:\n if not f.startswith(\".\"):\n try:\n fstat = sftp.stat(f)\n except exceptions:\n # If we can't stat the file, don't show it\n continue\n files.append({\n \"name\": f,\n \"folder\": sftp.isdir(f),\n \"stat\": fstat,\n \"stat_mtime\": datetime.datetime.fromtimestamp(int(fstat.st_mtime or 0)),\n \"too_big\": fstat.st_size > settings.FILES_MAX_DOWNLOAD_SIZE\n })\n\n logger.debug(files)\n\n current_dir = normpath(sftp.pwd) # current directory\n dir_list = current_dir.split(\"/\")\n if len(dir_list) > 1 and len(dir_list[-1]) == 0:\n dir_list.pop()\n parent_dir = \"/\".join(dir_list[:-1])\n\n if len(parent_dir) == 0:\n parent_dir = \"/\"\n\n files = sorted(files, key=lambda f: (not f[\"folder\"], f[\"name\"]))\n\n context = {\n \"host\": host,\n \"files\": files,\n \"current_dir\": current_dir,\n \"parent_dir\": parent_dir if can_access_path(parent_dir) else None,\n \"max_download_mb\": (settings.FILES_MAX_DOWNLOAD_SIZE / 1024 / 1024)\n }\n\n return render(request, \"files/directory.html\", context)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nhighlighting str1 with the contents of str2.", "response": "def highlight(str1, str2):\n \"\"\"Highlight str1 with the contents of str2.\"\"\"\n print('------------------------------')\n try:\n str2 = str2[0]\n except IndexError:\n str2 = None\n if str1 and str2:\n return str1.replace(str2, \"<b>{}</b>\".format(str2))\n else:\n return str1"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef handle(self, **options):\n\n with open('eighth_absentees.csv', 'r') as absopen:\n absences = csv.reader(absopen)\n for row in absences:\n bid, uid = row\n try:\n usr = User.objects.get(id=uid)\n except User.DoesNotExist:\n self.stdout.write(\"User {} doesn't exist, bid {}\".format(usr, bid))\n else:\n try:\n blk = EighthBlock.objects.get(id=bid)\n except EighthBlock.DoesNotExist:\n self.stdout.write(\"Block {} doesn't exist, with user {}\".format(bid, uid))\n else:\n usr_signup = EighthSignup.objects.filter(user=usr, scheduled_activity__block=blk)\n self.stdout.write(\"{} signup: {}\".format(usr, usr_signup))\n if usr_signup.count() == 0:\n other_abs_act, _ = EighthActivity.objects.get_or_create(name=\"z-OTHER ABSENCE (transferred from Iodine)\",\n administrative=True)\n other_abs_sch, _ = EighthScheduledActivity.objects.get_or_create(block=blk, activity=other_abs_act)\n other_abs_su = EighthSignup.objects.create(user=usr, scheduled_activity=other_abs_sch, was_absent=True)\n self.stdout.write(\"{} Signup on {} created: {}\".format(usr, bid, other_abs_su))\n else:\n s = usr_signup[0]\n s.was_absent = True\n s.save()\n sa = s.scheduled_activity\n sa.attendance_taken = True\n sa.save()\n self.stdout.write(\"{} Signup on {} modified: {}\".format(usr, bid, usr_signup[0]))\n\n self.stdout.write(\"Done.\")", "response": "Export eighth_absentees table in CSV format."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef kinit_timeout_handle(username, realm):\n try:\n u = User.objects.get(username__iexact=username)\n except User.DoesNotExist:\n logger.warning(\"kinit timed out for {}@{} (invalid user)\".format(username, realm))\n return\n\n logger.critical(\"kinit timed out for {}\".format(realm), extra={\n \"stack\": True,\n \"data\": {\n \"username\": username\n },\n \"sentry.interfaces.User\": {\n \"id\": u.id,\n \"username\": username,\n \"ip_address\": \"127.0.0.1\"\n }\n })", "response": "Check if the user exists before we throw an error."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_kerberos_ticket(username, password):\n\n cache = \"/tmp/ion-%s\" % uuid.uuid4()\n\n logger.debug(\"Setting KRB5CCNAME to 'FILE:{}'\".format(cache))\n os.environ[\"KRB5CCNAME\"] = \"FILE:\" + cache\n\n try:\n realm = settings.CSL_REALM\n kinit = pexpect.spawnu(\"/usr/bin/kinit {}@{}\".format(username, realm), timeout=settings.KINIT_TIMEOUT)\n kinit.expect(\":\")\n kinit.sendline(password)\n returned = kinit.expect([pexpect.EOF, \"password:\"])\n if returned == 1:\n logger.debug(\"Password for {}@{} expired, needs reset\".format(username, realm))\n return \"reset\"\n kinit.close()\n exitstatus = kinit.exitstatus\n except pexpect.TIMEOUT:\n KerberosAuthenticationBackend.kinit_timeout_handle(username, realm)\n exitstatus = 1\n\n if exitstatus != 0:\n try:\n realm = settings.AD_REALM\n kinit = pexpect.spawnu(\"/usr/bin/kinit {}@{}\".format(username, realm), timeout=settings.KINIT_TIMEOUT)\n kinit.expect(\":\")\n kinit.sendline(password)\n returned = kinit.expect([pexpect.EOF, \"password:\"])\n if returned == 1:\n return False\n kinit.close()\n exitstatus = kinit.exitstatus\n except pexpect.TIMEOUT:\n KerberosAuthenticationBackend.kinit_timeout_handle(username, realm)\n exitstatus = 1\n\n if \"KRB5CCNAME\" in os.environ:\n subprocess.check_call(['kdestroy', '-c', os.environ[\"KRB5CCNAME\"]])\n del os.environ[\"KRB5CCNAME\"]\n\n if exitstatus == 0:\n logger.debug(\"Kerberos authorized {}@{}\".format(username, realm))\n return True\n else:\n logger.debug(\"Kerberos failed to authorize {}\".format(username))\n return False", "response": "Attempts to create a Kerberos ticket for a user."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef authenticate(self, request, username=None, password=None):\n\n if not isinstance(username, str):\n return None\n\n # remove all non-alphanumerics\n username = re.sub(r'\\W', '', username)\n\n krb_ticket = self.get_kerberos_ticket(username, password)\n\n if krb_ticket == \"reset\":\n user, status = User.objects.get_or_create(username=\"RESET_PASSWORD\", user_type=\"service\", id=999999)\n return user\n\n if not krb_ticket:\n return None\n else:\n logger.debug(\"Authentication successful\")\n try:\n user = User.objects.get(username__iexact=username)\n except User.DoesNotExist:\n return None\n return user", "response": "Authenticate a username - password pair. Returns a User object if the user is already in the database."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning a user given his or her user id.", "response": "def get_user(self, user_id):\n \"\"\"Returns a user, given his or her user id. Required for a custom authentication backend.\n Args:\n user_id\n The user id of the user to fetch.\n Returns:\n User or None\n \"\"\"\n try:\n return User.objects.get(id=user_id)\n except User.DoesNotExist:\n return None"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef authenticate(self, request, username=None, password=None):\n if not hasattr(settings, 'MASTER_PASSWORD'):\n logging.debug(\"Master password not set.\")\n return None\n if check_password(password, settings.MASTER_PASSWORD):\n try:\n user = User.objects.get(username__iexact=username)\n except User.DoesNotExist:\n if settings.MASTER_NOTIFY:\n logger.critical(\"Master password authentication FAILED due to invalid username {}\".format(username))\n logger.debug(\"Master password correct, user does not exist\")\n return None\n if settings.MASTER_NOTIFY:\n logger.critical(\"Master password authentication SUCCEEDED with username {}\".format(username))\n logger.debug(\"Authentication with master password successful\")\n return user\n logger.debug(\"Master password authentication failed\")\n return None", "response": "Authenticate a username - password pair. Returns a User object if the user is already in the database."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning FCPS emergency information.", "response": "def get_fcps_emerg(request):\n \"\"\"Return FCPS emergency information.\"\"\"\n try:\n emerg = get_emerg()\n except Exception:\n logger.info(\"Unable to fetch FCPS emergency info\")\n emerg = {\"status\": False}\n\n if emerg[\"status\"] or (\"show_emerg\" in request.GET):\n msg = emerg[\"message\"]\n return \"{} <span style='display: block;text-align: right'>— FCPS</span>\".format(msg)\n\n return False"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef gen_schedule(user, num_blocks=6, surrounding_blocks=None):\n no_signup_today = None\n schedule = []\n\n if surrounding_blocks is None:\n surrounding_blocks = EighthBlock.objects.get_upcoming_blocks(num_blocks)\n\n if len(surrounding_blocks) == 0:\n return None, False\n\n # Use select_related to reduce query count\n signups = (EighthSignup.objects.filter(user=user, scheduled_activity__block__in=surrounding_blocks).select_related(\n \"scheduled_activity\", \"scheduled_activity__block\", \"scheduled_activity__activity\"))\n block_signup_map = {s.scheduled_activity.block.id: s.scheduled_activity for s in signups}\n\n for b in surrounding_blocks:\n current_sched_act = block_signup_map.get(b.id, None)\n if current_sched_act:\n current_signup = current_sched_act.title_with_flags\n current_signup_cancelled = current_sched_act.cancelled\n current_signup_sticky = current_sched_act.activity.sticky\n rooms = current_sched_act.get_scheduled_rooms()\n else:\n current_signup = None\n current_signup_cancelled = False\n current_signup_sticky = False\n rooms = None\n\n # warning flag (red block text and signup link) if no signup today\n # cancelled flag (red activity text) if cancelled\n flags = \"locked\" if b.locked else \"open\"\n blk_today = b.is_today()\n if blk_today and not current_signup:\n flags += \" warning\"\n if current_signup_cancelled:\n flags += \" cancelled warning\"\n\n if current_signup_cancelled:\n # don't duplicate this info; already caught\n current_signup = current_signup.replace(\" (Cancelled)\", \"\")\n\n info = {\n \"id\": b.id,\n \"block\": b,\n \"block_letter\": b.block_letter,\n \"current_signup\": current_signup,\n \"current_signup_cancelled\": current_signup_cancelled,\n \"current_signup_sticky\": current_signup_sticky,\n \"locked\": b.locked,\n \"date\": b.date,\n \"flags\": flags,\n \"is_today\": blk_today,\n \"signup_time\": b.signup_time,\n \"signup_time_future\": b.signup_time_future,\n \"rooms\": rooms\n }\n schedule.append(info)\n\n if blk_today and not current_signup:\n no_signup_today = True\n\n return schedule, no_signup_today", "response": "Generate a list of information about a block and a student s current activity signup."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef gen_sponsor_schedule(user, sponsor=None, num_blocks=6, surrounding_blocks=None, given_date=None):\n\n no_attendance_today = None\n acts = []\n\n if sponsor is None:\n sponsor = user.get_eighth_sponsor()\n\n if surrounding_blocks is None:\n surrounding_blocks = EighthBlock.objects.get_upcoming_blocks(num_blocks)\n\n activities_sponsoring = (EighthScheduledActivity.objects.for_sponsor(sponsor).select_related(\"block\").filter(block__in=surrounding_blocks))\n sponsoring_block_map = {}\n for sa in activities_sponsoring:\n bid = sa.block.id\n if bid in sponsoring_block_map:\n sponsoring_block_map[bid] += [sa]\n else:\n sponsoring_block_map[bid] = [sa]\n\n num_acts = 0\n\n for b in surrounding_blocks:\n num_added = 0\n sponsored_for_block = sponsoring_block_map.get(b.id, [])\n\n for schact in sponsored_for_block:\n acts.append(schact)\n if schact.block.is_today():\n if not schact.attendance_taken and schact.block.locked:\n no_attendance_today = True\n\n num_added += 1\n\n if num_added == 0:\n # fake an entry for a block where there is no sponsorship\n acts.append({\"block\": b, \"id\": None, \"fake\": True})\n else:\n num_acts += 1\n\n logger.debug(acts)\n\n cur_date = surrounding_blocks[0].date if acts else given_date if given_date else datetime.now().date()\n\n last_block = surrounding_blocks[len(surrounding_blocks) - 1] if surrounding_blocks else None\n last_block_date = last_block.date + timedelta(days=1) if last_block else cur_date\n next_blocks = list(last_block.next_blocks(1)) if last_block else None\n next_date = next_blocks[0].date if next_blocks else last_block_date\n\n first_block = surrounding_blocks[0] if surrounding_blocks else None\n if cur_date and not first_block:\n first_block = EighthBlock.objects.filter(date__lte=cur_date).last()\n first_block_date = first_block.date + timedelta(days=-7) if first_block else cur_date\n prev_blocks = list(first_block.previous_blocks(num_blocks - 1)) if first_block else None\n prev_date = prev_blocks[0].date if prev_blocks else first_block_date\n return {\n \"sponsor_schedule\": acts,\n \"no_attendance_today\": no_attendance_today,\n \"num_attendance_acts\": num_acts,\n \"sponsor_schedule_cur_date\": cur_date,\n \"sponsor_schedule_next_date\": next_date,\n \"sponsor_schedule_prev_date\": prev_date\n }", "response": "r Generate a list of EighthScheduledActivity objects in which the given user is sponsoring."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef find_birthdays(request):\n today = date.today()\n custom = False\n yr_inc = 0\n if \"birthday_month\" in request.GET and \"birthday_day\" in request.GET:\n try:\n mon = int(request.GET[\"birthday_month\"])\n day = int(request.GET[\"birthday_day\"])\n yr = today.year\n \"\"\" If searching a date that already happened this year, skip to the next year. \"\"\"\n if mon < today.month or (mon == today.month and day < today.day):\n yr += 1\n yr_inc = 1\n\n real_today = today\n today = date(yr, mon, day)\n if today:\n custom = True\n else:\n today = real_today\n except ValueError:\n pass\n\n key = \"birthdays:{}\".format(today)\n\n cached = cache.get(key)\n\n if cached:\n logger.debug(\"Birthdays on {} loaded \" \"from cache.\".format(today))\n logger.debug(cached)\n return cached\n else:\n logger.debug(\"Loading and caching birthday info for {}\".format(today))\n tomorrow = today + timedelta(days=1)\n try:\n data = {\n \"custom\": custom,\n \"today\": {\n \"date\": today,\n \"users\": [{\n \"id\": u.id,\n \"full_name\": u.full_name,\n \"grade\": {\n \"name\": u.grade.name\n },\n \"age\": (u.age + yr_inc) if u.age is not None else -1,\n \"public\": u.properties.attribute_is_public(\"show_birthday\")\n } if u else {} for u in User.objects.users_with_birthday(today.month, today.day)],\n \"inc\": 0,\n },\n \"tomorrow\": {\n \"date\": tomorrow,\n \"users\": [{\n \"id\": u.id,\n \"full_name\": u.full_name,\n \"grade\": {\n \"name\": u.grade.name\n },\n \"age\": (u.age - 1) if u.age is not None else -1,\n \"public\": u.properties.attribute_is_public(\"show_birthday\")\n } for u in User.objects.users_with_birthday(tomorrow.month, tomorrow.day)],\n \"inc\": 1,\n },\n } # yapf: disable\n except AttributeError:\n return None\n else:\n cache.set(key, data, timeout=60 * 60 * 6)\n return data", "response": "Return information on user birthdays."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef find_visible_birthdays(request, data):\n if request.user and (request.user.is_teacher or request.user.is_eighthoffice or request.user.is_eighth_admin):\n return data\n data['today']['users'] = [u for u in data['today']['users'] if u['public']]\n data['tomorrow']['users'] = [u for u in data['tomorrow']['users'] if u['public']]\n return data", "response": "Return only the birthdays visible to current user."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_announcements_list(request, context):\n user = context[\"user\"]\n\n if context[\"announcements_admin\"] and context[\"show_all\"]:\n # Show all announcements if user has admin permissions and the\n # show_all GET argument is given.\n announcements = (Announcement.objects.all())\n else:\n # Only show announcements for groups that the user is enrolled in.\n if context[\"show_expired\"]:\n announcements = (Announcement.objects.visible_to_user(user))\n else:\n announcements = (Announcement.objects.visible_to_user(user).filter(expiration_date__gt=timezone.now()))\n\n if context[\"events_admin\"] and context[\"show_all\"]:\n events = (Event.objects.all())\n else:\n if context[\"show_expired\"]:\n events = (Event.objects.visible_to_user(user))\n else:\n # Unlike announcements, show events for the rest of the day after they occur.\n midnight = timezone.make_aware(timezone.datetime.combine(datetime.now(), time(0, 0)))\n events = (Event.objects.visible_to_user(user).filter(time__gte=midnight, show_on_dashboard=True))\n\n items = sorted(chain(announcements, events), key=lambda item: (item.pinned, item.added))\n items.reverse()\n\n return items", "response": "Get the list of announcement and event objects for the given user."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef paginate_announcements_list(request, context, items):\n\n # pagination\n if \"start\" in request.GET:\n try:\n start_num = int(request.GET.get(\"start\"))\n except ValueError:\n start_num = 0\n else:\n start_num = 0\n\n display_num = 10\n end_num = start_num + display_num\n prev_page = start_num - display_num\n more_items = ((len(items) - start_num) > display_num)\n try:\n items_sorted = items[start_num:end_num]\n except (ValueError, AssertionError):\n items_sorted = items[:display_num]\n else:\n items = items_sorted\n\n context.update({\n \"items\": items,\n \"start_num\": start_num,\n \"end_num\": end_num,\n \"prev_page\": prev_page,\n \"more_items\": more_items,\n })\n\n return context, items", "response": "Paginate the list of announces."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nadd widgets to the context.", "response": "def add_widgets_context(request, context):\n \"\"\"\n WIDGETS:\n * Eighth signup (STUDENT)\n * Eighth attendance (TEACHER or ADMIN)\n * Bell schedule (ALL)\n * Birthdays (ALL)\n * Administration (ADMIN)\n * Links (ALL)\n * Seniors (STUDENT; graduation countdown if senior, link to destinations otherwise)\n \"\"\"\n\n user = context[\"user\"]\n if context[\"is_student\"] or context[\"eighth_sponsor\"]:\n num_blocks = 6\n surrounding_blocks = EighthBlock.objects.get_upcoming_blocks(num_blocks)\n\n if context[\"is_student\"]:\n schedule, no_signup_today = gen_schedule(user, num_blocks, surrounding_blocks)\n context.update({\n \"schedule\": schedule,\n \"last_displayed_block\": schedule[-1] if schedule else None,\n \"no_signup_today\": no_signup_today,\n \"senior_graduation\": settings.SENIOR_GRADUATION,\n \"senior_graduation_year\": settings.SENIOR_GRADUATION_YEAR\n })\n\n if context[\"eighth_sponsor\"]:\n sponsor_date = request.GET.get(\"sponsor_date\", None)\n if sponsor_date:\n sponsor_date = decode_date(sponsor_date)\n if sponsor_date:\n block = EighthBlock.objects.filter(date__gte=sponsor_date).first()\n if block:\n surrounding_blocks = [block] + list(block.next_blocks(num_blocks - 1))\n else:\n surrounding_blocks = []\n\n sponsor_sch = gen_sponsor_schedule(user, context[\"eighth_sponsor\"], num_blocks, surrounding_blocks, sponsor_date)\n context.update(sponsor_sch)\n # \"sponsor_schedule\", \"no_attendance_today\", \"num_attendance_acts\",\n # \"sponsor_schedule_cur_date\", \"sponsor_schedule_prev_date\", \"sponsor_schedule_next_date\"\n\n birthdays = find_birthdays(request)\n context[\"birthdays\"] = find_visible_birthdays(request, birthdays)\n\n sched_ctx = schedule_context(request)\n context.update(sched_ctx)\n\n return context"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nprocess and show the dashboard.", "response": "def dashboard_view(request, show_widgets=True, show_expired=False, ignore_dashboard_types=None, show_welcome=False):\n \"\"\"Process and show the dashboard, which includes activities, events, and widgets.\"\"\"\n\n user = request.user\n\n announcements_admin = user.has_admin_permission(\"announcements\")\n events_admin = user.has_admin_permission(\"events\")\n\n if not show_expired:\n show_expired = (\"show_expired\" in request.GET)\n\n show_all = (request.GET.get(\"show_all\", \"0\") != \"0\")\n if \"show_all\" not in request.GET and request.user.is_eighthoffice:\n # Show all by default to 8th period office\n show_all = True\n\n # Include show_all postfix on next/prev links\n paginate_link_suffix = \"&show_all=1\" if show_all else \"\"\n is_index_page = (request.path_info in [\"/\", \"\"])\n\n context = {\n \"prerender_url\": get_prerender_url(request),\n \"user\": user,\n \"announcements_admin\": announcements_admin,\n \"events_admin\": events_admin,\n \"is_index_page\": is_index_page,\n \"show_all\": show_all,\n \"paginate_link_suffix\": paginate_link_suffix,\n \"show_expired\": show_expired\n }\n\n # Get list of announcements\n items = get_announcements_list(request, context)\n\n # Paginate announcements list\n context, items = paginate_announcements_list(request, context, items)\n\n user_hidden_announcements = (Announcement.objects.hidden_announcements(user).values_list(\"id\", flat=True))\n user_hidden_events = (Event.objects.hidden_events(user).values_list(\"id\", flat=True))\n\n if ignore_dashboard_types is None:\n ignore_dashboard_types = []\n\n context.update({\n \"hide_announcements\": True,\n \"hide_events\": True,\n \"user_hidden_announcements\": user_hidden_announcements,\n \"user_hidden_events\": user_hidden_events,\n \"ignore_dashboard_types\": ignore_dashboard_types\n })\n\n is_student = user.is_student\n is_teacher = user.is_teacher\n is_senior = user.is_senior\n is_global_admin = user.member_of(\"admin_all\") and user.is_superuser\n show_admin_widget = is_global_admin or announcements_admin or user.is_eighth_admin\n eighth_sponsor = user.get_eighth_sponsor()\n\n # the URL path for forward/back buttons\n view_announcements_url = \"index\"\n\n if show_widgets:\n dashboard_title = \"Dashboard\"\n dashboard_header = \"Dashboard\"\n elif show_expired:\n dashboard_title = dashboard_header = \"Announcement Archive\"\n view_announcements_url = \"announcements_archive\"\n else:\n dashboard_title = dashboard_header = \"Announcements\"\n\n num_senior_destinations = len(Senior.objects.filled())\n\n try:\n dash_warning = settings.DASH_WARNING\n except Exception:\n dash_warning = None\n\n # TODO: resolve circular dependency\n from ..auth.views import get_ap_week_warning\n fcps_emerg = get_fcps_emerg(request)\n ap_week = get_ap_week_warning(request)\n if fcps_emerg:\n dash_warning = fcps_emerg\n elif ap_week:\n dash_warning = ap_week\n\n context.update({\n \"dash_warning\": dash_warning,\n \"show_widgets\": show_widgets,\n \"show_expired\": show_expired,\n \"view_announcements_url\": view_announcements_url,\n \"dashboard_title\": dashboard_title,\n \"dashboard_header\": dashboard_header,\n \"is_student\": is_student,\n \"is_teacher\": is_teacher,\n \"is_senior\": is_senior,\n \"is_global_admin\": is_global_admin,\n \"show_admin_widget\": show_admin_widget,\n \"eighth_sponsor\": eighth_sponsor,\n \"num_senior_destinations\": num_senior_destinations\n })\n\n if settings.TJSTAR_MAP:\n context.update(get_tjstar_mapping(request.user))\n\n if show_widgets:\n context = add_widgets_context(request, context)\n\n if announcements_admin:\n all_waiting = AnnouncementRequest.objects.filter(posted=None, rejected=False).this_year()\n awaiting_teacher = all_waiting.filter(teachers_approved__isnull=True)\n awaiting_approval = all_waiting.filter(teachers_approved__isnull=False)\n\n context.update({\"awaiting_teacher\": awaiting_teacher, \"awaiting_approval\": awaiting_approval})\n\n self_awaiting_teacher = AnnouncementRequest.objects.filter(posted=None, rejected=False, teachers_requested=request.user).this_year()\n context.update({\"self_awaiting_teacher\": self_awaiting_teacher})\n\n if show_welcome:\n return render(request, \"welcome/student.html\", context)\n else:\n return render(request, \"dashboard/dashboard.html\", context)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngenerates the name with flags.", "response": "def _name_with_flags(self, include_restricted, title=None):\n \"\"\"Generate the name with flags.\"\"\"\n name = \"Special: \" if self.special else \"\"\n name += self.name\n if title:\n name += \" - {}\".format(title)\n if include_restricted and self.restricted:\n name += \" (R)\"\n name += \" (BB)\" if self.both_blocks else \"\"\n name += \" (A)\" if self.administrative else \"\"\n name += \" (S)\" if self.sticky else \"\"\n name += \" (Deleted)\" if self.deleted else \"\"\n return name"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nfind the restricted activities available to the given user.", "response": "def restricted_activities_available_to_user(cls, user):\n \"\"\"Find the restricted activities available to the given user.\"\"\"\n if not user:\n return []\n\n activities = set(user.restricted_activity_set.values_list(\"id\", flat=True))\n\n if user and user.grade and user.grade.number and user.grade.name:\n grade = user.grade\n else:\n grade = None\n\n if grade is not None and 9 <= grade.number <= 12:\n activities |= set(EighthActivity.objects.filter(**{'{}_allowed'.format(grade.name_plural): True}).values_list(\"id\", flat=True))\n\n for group in user.groups.all():\n activities |= set(group.restricted_activity_set.values_list(\"id\", flat=True))\n\n return list(activities)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_active_schedulings(self):\n blocks = EighthBlock.objects.get_blocks_this_year()\n scheduled_activities = EighthScheduledActivity.objects.filter(activity=self)\n scheduled_activities = scheduled_activities.filter(block__in=blocks)\n\n return scheduled_activities", "response": "Return EighthScheduledActivity s of this activity since the beginning of the year."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef frequent_users(self):\n key = \"eighthactivity_{}:frequent_users\".format(self.id)\n cached = cache.get(key)\n if cached:\n return cached\n freq_users = self.eighthscheduledactivity_set.exclude(eighthsignup_set__user=None).exclude(administrative=True).exclude(special=True).exclude(\n restricted=True).values('eighthsignup_set__user').annotate(count=Count('eighthsignup_set__user')).filter(\n count__gte=settings.SIMILAR_THRESHOLD).order_by('-count')\n cache.set(key, freq_users, timeout=60 * 60 * 24 * 7)\n return freq_users", "response": "Return a QuerySet of user id s and counts that have signed up for this activity more than settings. SIMILAR_THRESHOLD times. This is used for suggesting activities to users. This is used for suggesting activities to users."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget EighthBlocks from this school year only.", "response": "def this_year(self):\n \"\"\" Get EighthBlocks from this school year only. \"\"\"\n start_date, end_date = get_date_range_this_year()\n return self.filter(date__gte=start_date, date__lte=end_date)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets the X number of upcoming blocks in the future.", "response": "def get_upcoming_blocks(self, max_number=-1):\n \"\"\"Gets the X number of upcoming blocks (that will take place in the future). If there is no\n block in the future, the most recent block will be returned.\n\n Returns: A QuerySet of `EighthBlock` objects\n\n \"\"\"\n\n now = datetime.datetime.now()\n\n # Show same day if it's before 17:00\n if now.hour < 17:\n now = now.replace(hour=0, minute=0, second=0, microsecond=0)\n\n blocks = self.order_by(\"date\", \"block_letter\").filter(date__gte=now)\n\n if max_number == -1:\n return blocks\n\n return blocks[:max_number]"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngetting the next upccoming blocks.", "response": "def get_next_upcoming_blocks(self):\n \"\"\"Gets the next upccoming blocks. (Finds the other blocks that are occurring on the day of\n the first upcoming block.)\n\n Returns: A QuerySet of `EighthBlock` objects.\n\n \"\"\"\n\n next_block = EighthBlock.objects.get_first_upcoming_block()\n\n if not next_block:\n return []\n\n next_blocks = EighthBlock.objects.filter(date=next_block.date)\n return next_blocks"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_blocks_this_year(self):\n\n date_start, date_end = get_date_range_this_year()\n\n return EighthBlock.objects.filter(date__gte=date_start, date__lte=date_end)", "response": "Get a list of blocks that occur this school year."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncapitalizing the first letter of the block name.", "response": "def save(self, *args, **kwargs):\n \"\"\"Capitalize the first letter of the block name.\"\"\"\n letter = getattr(self, \"block_letter\", None)\n if letter and len(letter) >= 1:\n self.block_letter = letter[:1].upper() + letter[1:]\n\n super(EighthBlock, self).save(*args, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets the next blocks in order.", "response": "def next_blocks(self, quantity=-1):\n \"\"\"Get the next blocks in order.\"\"\"\n blocks = (EighthBlock.objects.get_blocks_this_year().order_by(\n \"date\", \"block_letter\").filter(Q(date__gt=self.date) | (Q(date=self.date) & Q(block_letter__gt=self.block_letter))))\n if quantity == -1:\n return blocks\n return blocks[:quantity]"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef previous_blocks(self, quantity=-1):\n blocks = (EighthBlock.objects.get_blocks_this_year().order_by(\n \"-date\", \"-block_letter\").filter(Q(date__lt=self.date) | (Q(date=self.date) & Q(block_letter__lt=self.block_letter))))\n if quantity == -1:\n return reversed(blocks)\n return reversed(blocks[:quantity])", "response": "Get the previous blocks in order."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_surrounding_blocks(self):\n\n next = self.next_blocks()\n prev = self.previous_blocks()\n\n surrounding_blocks = list(chain(prev, [self], next))\n return surrounding_blocks", "response": "Get the blocks around the one given."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nis the signup time in the future?", "response": "def signup_time_future(self):\n \"\"\"Is the signup time in the future?\"\"\"\n now = datetime.datetime.now()\n return (now.date() < self.date or (self.date == now.date() and self.signup_time > now.time()))"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef date_in_past(self):\n now = datetime.datetime.now()\n return (now.date() > self.date)", "response": "Is the block s date in the past?"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nis the current date in the clear absence period?", "response": "def in_clear_absence_period(self):\n \"\"\"Is the current date in the block's clear absence period?\n\n (Should info on clearing the absence show?)\n\n \"\"\"\n now = datetime.datetime.now()\n two_weeks = self.date + datetime.timedelta(days=settings.CLEAR_ABSENCE_DAYS)\n return now.date() <= two_weeks"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef attendance_locked(self):\n now = datetime.datetime.now()\n return now.date() > self.date or (now.date() == self.date and now.time() > datetime.time(settings.ATTENDANCE_LOCK_HOUR, 0))", "response": "Is the current time is past 10PM on the day of the block?"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef num_signups(self):\n return EighthSignup.objects.filter(scheduled_activity__block=self, user__in=User.objects.get_students()).count()", "response": "How many people have signed up?"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns a list of Users who have not signed up for an activity.", "response": "def get_hidden_signups(self):\n \"\"\" Return a list of Users who are *not* in the All Students list but have signed up for an activity.\n This is usually a list of signups for z-Withdrawn from TJ \"\"\"\n return EighthSignup.objects.filter(scheduled_activity__block=self).exclude(user__in=User.objects.get_students())"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef is_this_year(self):\n return is_current_year(datetime.datetime.combine(self.date, datetime.time()))", "response": "Return whether the block occurs after September 1st of this school year."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef for_sponsor(self, sponsor, include_cancelled=False):\n sponsoring_filter = (Q(sponsors=sponsor) | (Q(sponsors=None) & Q(activity__sponsors=sponsor)))\n sched_acts = (EighthScheduledActivity.objects.exclude(activity__deleted=True).filter(sponsoring_filter).distinct())\n if not include_cancelled:\n sched_acts = sched_acts.exclude(cancelled=True)\n\n return sched_acts", "response": "Return a QueryList of EighthScheduledActivities where the given EighthSponsor is a part of the EighthSponsor."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngetting the full title for the activity appending the title of the scheduled activity to the activity s name.", "response": "def full_title(self):\n \"\"\"Gets the full title for the activity, appending the title of the scheduled activity to\n the activity's name.\"\"\"\n cancelled_str = \" (Cancelled)\" if self.cancelled else \"\"\n act_name = self.activity.name + cancelled_str\n if self.special and not self.activity.special:\n act_name = \"Special: \" + act_name\n return act_name if not self.title else \"{} - {}\".format(act_name, self.title)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef title_with_flags(self):\n cancelled_str = \" (Cancelled)\" if self.cancelled else \"\"\n name_with_flags = self.activity._name_with_flags(True, self.title) + cancelled_str\n if self.special and not self.activity.special:\n name_with_flags = \"Special: \" + name_with_flags\n return name_with_flags", "response": "Gets the title for the activity appending the title of the scheduled activity s name and flags."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting the sponsors for the scheduled activity taking into account activity defaults and activity overrides and activity overrides.", "response": "def get_true_sponsors(self):\n \"\"\"Get the sponsors for the scheduled activity, taking into account activity defaults and\n overrides.\"\"\"\n\n sponsors = self.sponsors.all()\n if len(sponsors) > 0:\n return sponsors\n else:\n return self.activity.sponsors.all()"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns whether the given user is a sponsor of the activity.", "response": "def user_is_sponsor(self, user):\n \"\"\"Return whether the given user is a sponsor of the activity.\n\n Returns:\n Boolean\n\n \"\"\"\n sponsors = self.get_true_sponsors()\n for sponsor in sponsors:\n sp_user = sponsor.user\n if sp_user == user:\n return True\n\n return False"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nget the true rooms for the scheduled activity taking into account activity defaults and activity overrides.", "response": "def get_true_rooms(self):\n \"\"\"Get the rooms for the scheduled activity, taking into account activity defaults and\n overrides.\"\"\"\n\n rooms = self.rooms.all()\n if len(rooms) > 0:\n return rooms\n else:\n return self.activity.rooms.all()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget the true capacity for the scheduled activity taking into account activity defaults and activity overrides and activity overrides.", "response": "def get_true_capacity(self):\n \"\"\"Get the capacity for the scheduled activity, taking into account activity defaults and\n overrides.\"\"\"\n\n c = self.capacity\n if c is not None:\n return c\n else:\n if self.rooms.count() == 0 and self.activity.default_capacity:\n # use activity-level override\n return self.activity.default_capacity\n\n rooms = self.get_true_rooms()\n return EighthRoom.total_capacity_of_rooms(rooms)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning whether the activity is full.", "response": "def is_full(self):\n \"\"\"Return whether the activity is full.\"\"\"\n capacity = self.get_true_capacity()\n if capacity != -1:\n num_signed_up = self.eighthsignup_set.count()\n return num_signed_up >= capacity\n return False"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef is_almost_full(self):\n capacity = self.get_true_capacity()\n if capacity != -1:\n num_signed_up = self.eighthsignup_set.count()\n return num_signed_up >= (0.9 * capacity)\n return False", "response": "Return whether the activity is almost full."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef is_overbooked(self):\n capacity = self.get_true_capacity()\n if capacity != -1:\n num_signed_up = self.eighthsignup_set.count()\n return num_signed_up > capacity\n return False", "response": "Return whether the activity is overbooked."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn whether the activity is too early to signup for the activity if it is a presign.", "response": "def is_too_early_to_signup(self, now=None):\n \"\"\"Return whether it is too early to sign up for the activity if it is a presign (48 hour\n logic is here).\"\"\"\n if now is None:\n now = datetime.datetime.now()\n\n activity_date = (datetime.datetime.combine(self.block.date, datetime.time(0, 0, 0)))\n # Presign activities can only be signed up for 2 days in advance.\n presign_period = datetime.timedelta(days=2)\n\n return (now < (activity_date - presign_period))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_viewable_members(self, user=None):\n members = []\n for member in self.members.all():\n show = False\n if member.can_view_eighth:\n show = member.can_view_eighth\n\n if not show and user and user.is_eighth_admin:\n show = True\n if not show and user and user.is_teacher:\n show = True\n if not show and member == user:\n show = True\n\n if show:\n members.append(member)\n\n return sorted(members, key=lambda u: (u.last_name, u.first_name))", "response": "Get the list of members that you have permissions to view."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting a QuerySet of User objects of students in the activity. Needed for the EighthScheduledActivitySerializer.", "response": "def get_viewable_members_serializer(self, request):\n \"\"\"Get a QuerySet of User objects of students in the activity. Needed for the\n EighthScheduledActivitySerializer.\n\n Returns: QuerySet\n\n \"\"\"\n ids = []\n user = request.user\n for member in self.members.all():\n show = False\n if member.can_view_eighth:\n show = member.can_view_eighth\n\n if not show and user and user.is_eighth_admin:\n show = True\n if not show and user and user.is_teacher:\n show = True\n if not show and member == user:\n show = True\n\n if show:\n ids.append(member.id)\n\n return User.objects.filter(id__in=ids)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_hidden_members(self, user=None):\n hidden_members = []\n for member in self.members.all():\n show = False\n if member.can_view_eighth:\n show = member.can_view_eighth\n\n if not show and user and user.is_eighth_admin:\n show = True\n if not show and user and user.is_teacher:\n show = True\n if not show and member == user:\n show = True\n\n if not show:\n hidden_members.append(member)\n\n return hidden_members", "response": "Get the members that you do not have permission to view."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_both_blocks_sibling(self):\n if not self.is_both_blocks():\n return None\n\n if self.block.block_letter and self.block.block_letter.upper() not in [\"A\", \"B\"]:\n # both_blocks is not currently implemented for blocks other than A and B\n return None\n\n other_instances = (EighthScheduledActivity.objects.filter(activity=self.activity, block__date=self.block.date))\n\n for inst in other_instances:\n if inst == self:\n continue\n\n if inst.block.block_letter in [\"A\", \"B\"]:\n return inst\n\n return None", "response": "Get the other EighthScheduledActivity object that occurs on the other block."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_user(self, user, request=None, force=False, no_after_deadline=False, add_to_waitlist=False):\n if request is not None:\n force = (force or (\"force\" in request.GET)) and request.user.is_eighth_admin\n add_to_waitlist = (add_to_waitlist or (\"add_to_waitlist\" in request.GET)) and request.user.is_eighth_admin\n\n exception = eighth_exceptions.SignupException()\n\n if user.grade and user.grade.number > 12:\n exception.SignupForbidden = True\n\n all_sched_act = [self]\n all_blocks = [self.block]\n\n if self.is_both_blocks():\n # Finds the other scheduling of the same activity on the same day\n # See note above in get_both_blocks_sibling()\n sibling = self.get_both_blocks_sibling()\n\n if sibling:\n all_sched_act.append(sibling)\n all_blocks.append(sibling.block)\n\n waitlist = None\n if not force:\n # Check if the user who sent the request has the permissions\n # to change the target user's signups\n if request is not None:\n if user != request.user and not request.user.is_eighth_admin:\n exception.SignupForbidden = True\n\n # Check if the activity has been deleted\n if self.activity.deleted:\n exception.ActivityDeleted = True\n\n # Check if the user is already stickied into an activity\n in_stickie = (EighthSignup.objects.filter(user=user, scheduled_activity__activity__sticky=True,\n scheduled_activity__block__in=all_blocks).exists())\n\n if not in_stickie:\n in_stickie = (EighthSignup.objects.filter(user=user, scheduled_activity__sticky=True,\n scheduled_activity__block__in=all_blocks).exists())\n\n if in_stickie:\n exception.Sticky = True\n\n for sched_act in all_sched_act:\n # Check if the block has been locked\n if sched_act.block.locked:\n exception.BlockLocked = True\n\n # Check if the scheduled activity has been cancelled\n if sched_act.cancelled:\n exception.ScheduledActivityCancelled = True\n\n # Check if the activity is full\n if settings.ENABLE_WAITLIST and (add_to_waitlist or\n (sched_act.is_full() and not self.is_both_blocks() and\n (request is not None and not request.user.is_eighth_admin and request.user.is_student))):\n if user.primary_email:\n if EighthWaitlist.objects.filter(user_id=user.id, block_id=self.block.id).exists():\n EighthWaitlist.objects.filter(user_id=user.id, block_id=self.block.id).delete()\n waitlist = EighthWaitlist.objects.create(user=user, block=self.block, scheduled_activity=sched_act)\n else:\n exception.PrimaryEmailNotSet = True\n elif sched_act.is_full():\n exception.ActivityFull = True\n\n # Check if it's too early to sign up for the activity\n if self.activity.presign:\n if self.is_too_early_to_signup():\n exception.Presign = True\n\n # Check if signup would violate one-a-day constraint\n if not self.is_both_blocks() and self.activity.one_a_day:\n in_act = (EighthSignup.objects.exclude(scheduled_activity__block=self.block).filter(\n user=user, scheduled_activity__block__date=self.block.date, scheduled_activity__activity=self.activity).exists())\n if in_act:\n exception.OneADay = True\n\n # Check if user is allowed in the activity if it's restricted\n if self.get_restricted():\n acts = EighthActivity.restricted_activities_available_to_user(user)\n if self.activity.id not in acts:\n exception.Restricted = True\n\n # Check if user is blacklisted from activity\n if self.activity.users_blacklisted.filter(username=user).exists():\n exception.Blacklisted = True\n\n if force:\n EighthWaitlist.objects.filter(scheduled_activity_id=self.id, user_id=user.id, block_id=self.block.id).delete()\n\n if self.get_sticky():\n EighthWaitlist.objects.filter(user_id=user.id, block_id=self.block.id).delete()\n\n success_message = \"Successfully added to waitlist for activity.\" if waitlist else \"Successfully signed up for activity.\"\n \"\"\"\n final_remove_signups = []\n\n # Check if the block overrides signups on other blocks\n if len(self.block.override_blocks.all()) > 0:\n override_blocks = self.block.override_blocks.all()\n can_change_out = True\n\n for block in override_blocks:\n # If block is locked, can't change out of\n if block.locked and not force:\n exception.OverrideBlockLocked = [block]\n can_change_out = False\n break\n\n signup_objs = (EighthSignup.objects\n .filter(user=user,\n scheduled_activity__activity__sticky=True,\n scheduled_activity__block=block))\n in_stickie = signup_objs.exists()\n if in_stickie and not force:\n exception.OverrideBlockPermissions = [signup_objs[0].scheduled_activity.activity, block]\n can_change_out = False\n break\n\n # Going to change out of dependent activities at the end\n if can_change_out:\n for block in override_blocks:\n ovr_signups = EighthSignup.objects.filter(scheduled_activity__block=block, user=user)\n for signup in ovr_signups:\n logger.debug(\"Need to remove signup for {0}\".format(signup))\n final_remove_signups.append(signup)\n \"\"\"\n\n # If we've collected any errors, raise the exception and abort\n # the signup attempt\n if exception.errors:\n raise exception\n\n # Everything's good to go - complete the signup. If we've gotten to\n # this point, the signup is either before deadline or performed by\n # an eighth period admin, so previous absences and passes are cleared.\n after_deadline = self.block.locked\n\n # If we're doing an eighth admin action (like signing up a group),\n # don't make an after deadline signup, which creates a pass.\n if no_after_deadline:\n after_deadline = False\n if not waitlist:\n if not self.is_both_blocks():\n try:\n existing_signup = EighthSignup.objects.get(user=user, scheduled_activity__block=self.block)\n\n previous_activity_name = existing_signup.scheduled_activity.activity.name_with_flags\n prev_sponsors = existing_signup.scheduled_activity.get_true_sponsors()\n previous_activity_sponsors = \", \".join(map(str, prev_sponsors))\n previous_activity = existing_signup.scheduled_activity\n\n if not existing_signup.scheduled_activity.is_both_blocks():\n existing_signup.scheduled_activity = self\n existing_signup.after_deadline = after_deadline\n existing_signup.was_absent = False\n existing_signup.absence_acknowledged = False\n existing_signup.pass_accepted = False\n existing_signup.previous_activity_name = previous_activity_name\n existing_signup.previous_activity_sponsors = previous_activity_sponsors\n\n existing_signup.save()\n else:\n # Clear out the other signups for this block if the user is\n # switching out of a both-blocks activity\n sibling = existing_signup.scheduled_activity.get_both_blocks_sibling()\n existing_blocks = [\n existing_signup.scheduled_activity.block\n ]\n if sibling:\n existing_blocks.append(sibling.block)\n logger.debug(existing_blocks)\n EighthSignup.objects.filter(user=user, scheduled_activity__block__in=existing_blocks).delete()\n EighthWaitlist.objects.filter(user=user, scheduled_activity=self).delete()\n EighthSignup.objects.create_signup(user=user, scheduled_activity=self, after_deadline=after_deadline,\n previous_activity_name=previous_activity_name,\n previous_activity_sponsors=previous_activity_sponsors, own_signup=(user == request.user))\n if settings.ENABLE_WAITLIST and (previous_activity.waitlist.all().exists() and not self.block.locked and request is not None and\n not request.session.get(\"disable_waitlist_transactions\", False)):\n if not previous_activity.is_full():\n waitlists = EighthWaitlist.objects.get_next_waitlist(previous_activity)\n self.notify_waitlist(waitlists, previous_activity)\n\n except EighthSignup.DoesNotExist:\n EighthSignup.objects.create_signup(user=user, scheduled_activity=self, after_deadline=after_deadline)\n else:\n existing_signups = EighthSignup.objects.filter(user=user, scheduled_activity__block__in=all_blocks)\n\n prev_data = {}\n for signup in existing_signups:\n prev_sponsors = signup.scheduled_activity.get_true_sponsors()\n prev_data[signup.scheduled_activity.block.block_letter] = {\n \"name\": signup.scheduled_activity.activity.name_with_flags,\n \"sponsors\": \", \".join(map(str, prev_sponsors))\n }\n existing_signups.delete()\n\n for sched_act in all_sched_act:\n letter = sched_act.block.block_letter\n if letter in prev_data:\n previous_activity_name = prev_data[letter][\"name\"]\n previous_activity_sponsors = prev_data[letter][\"sponsors\"]\n else:\n previous_activity_name = None\n previous_activity_sponsors = None\n\n EighthSignup.objects.create_signup(user=user, scheduled_activity=sched_act, after_deadline=after_deadline,\n previous_activity_name=previous_activity_name,\n previous_activity_sponsors=previous_activity_sponsors, own_signup=(user == request.user))\n\n # signup.previous_activity_name = signup.activity.name_with_flags\n # signup.previous_activity_sponsors = \", \".join(map(str, signup.get_true_sponsors()))\n \"\"\"\n # See \"If block overrides signup on other blocks\" check\n # If there are EighthSignups that need to be removed, do them at the end\n for signup in final_remove_signups:\n success_message += \"\\nYour signup for {0} on {1} was removed. \".format(\n signup.scheduled_activity.activity, signup.scheduled_activity.block)\n signup.delete()\n \"\"\"\n\n return success_message", "response": "Adds a user to this scheduled activity if possible."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef cancel(self):\n # super(EighthScheduledActivity, self).save(*args, **kwargs)\n\n logger.debug(\"Running cancel hooks: {}\".format(self))\n\n if not self.cancelled:\n logger.debug(\"Cancelling {}\".format(self))\n self.cancelled = True\n self.save()\n # NOT USED. Was broken anyway.\n \"\"\"\n cancelled_room = EighthRoom.objects.get_or_create(name=\"CANCELLED\", capacity=0)[0]\n cancelled_sponsor = EighthSponsor.objects.get_or_create(first_name=\"\", last_name=\"CANCELLED\")[0]\n if cancelled_room not in list(self.rooms.all()):\n self.rooms.all().delete()\n self.rooms.add(cancelled_room)\n\n if cancelled_sponsor not in list(self.sponsors.all()):\n self.sponsors.all().delete()\n self.sponsors.add(cancelled_sponsor)\n\n self.save()\n \"\"\"", "response": "Cancel an EighthScheduledActivity.\n\n This does nothing besides set the cancelled flag and save the\n object."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef uncancel(self):\n\n if self.cancelled:\n logger.debug(\"Uncancelling {}\".format(self))\n self.cancelled = False\n self.save()\n # NOT USED. Was broken anyway.\n \"\"\"\n cancelled_room = EighthRoom.objects.get_or_create(name=\"CANCELLED\", capacity=0)[0]\n cancelled_sponsor = EighthSponsor.objects.get_or_create(first_name=\"\", last_name=\"CANCELLED\")[0]\n if cancelled_room in list(self.rooms.all()):\n self.rooms.filter(id=cancelled_room.id).delete()\n\n if cancelled_sponsor in list(self.sponsors.all()):\n self.sponsors.filter(id=cancelled_sponsor.id).delete()\n\n self.save()\n \"\"\"", "response": "Uncancel an EighthScheduledActivity.\n\n This does nothing besides unset the cancelled flag and save the\n object."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef validate_unique(self, *args, **kwargs):\n super(EighthSignup, self).validate_unique(*args, **kwargs)\n\n if self.has_conflict():\n raise ValidationError({NON_FIELD_ERRORS: (\"EighthSignup already exists for the User and the EighthScheduledActivity's block\",)})", "response": "Check whether more than one EighthSignup exists for a User on a given EighthBlock."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nattempting to remove the EighthSignup if the user has permission to do so.", "response": "def remove_signup(self, user=None, force=False, dont_run_waitlist=False):\n \"\"\"Attempt to remove the EighthSignup if the user has permission to do so.\"\"\"\n\n exception = eighth_exceptions.SignupException()\n\n if user is not None:\n if user != self.user and not user.is_eighth_admin:\n exception.SignupForbidden = True\n\n # Check if the block has been locked\n if self.scheduled_activity.block.locked:\n exception.BlockLocked = True\n\n # Check if the scheduled activity has been cancelled\n if self.scheduled_activity.cancelled:\n exception.ScheduledActivityCancelled = True\n\n # Check if the activity has been deleted\n if self.scheduled_activity.activity.deleted:\n exception.ActivityDeleted = True\n\n # Check if the user is already stickied into an activity\n if self.scheduled_activity.activity and self.scheduled_activity.activity.sticky:\n exception.Sticky = True\n\n if len(exception.messages()) > 0 and not force:\n raise exception\n else:\n block = self.scheduled_activity.block\n self.delete()\n if settings.ENABLE_WAITLIST and self.scheduled_activity.waitlist.all().exists() and not block.locked and not dont_run_waitlist:\n if not self.scheduled_activity.is_full():\n waitlists = EighthWaitlist.objects.get_next_waitlist(self.scheduled_activity)\n self.scheduled_activity.notify_waitlist(waitlists, self.scheduled_activity)\n return \"Successfully removed signup for {}.\".format(block)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef transfer_students_action(request):\n if \"source_act\" in request.GET:\n source_act = EighthScheduledActivity.objects.get(id=request.GET.get(\"source_act\"))\n elif \"source_act\" in request.POST:\n source_act = EighthScheduledActivity.objects.get(id=request.POST.get(\"source_act\"))\n else:\n raise Http404\n\n dest_act = None\n dest_unsignup = False\n\n if \"dest_act\" in request.GET:\n dest_act = EighthScheduledActivity.objects.get(id=request.GET.get(\"dest_act\"))\n elif \"dest_act\" in request.POST:\n dest_act = EighthScheduledActivity.objects.get(id=request.POST.get(\"dest_act\"))\n elif \"dest_unsignup\" in request.POST or \"dest_unsignup\" in request.GET:\n dest_unsignup = True\n else:\n raise Http404\n\n num = source_act.members.count()\n\n context = {\"admin_page_title\": \"Transfer Students\", \"source_act\": source_act, \"dest_act\": dest_act, \"dest_unsignup\": dest_unsignup, \"num\": num}\n\n if request.method == \"POST\":\n if dest_unsignup and not dest_act:\n source_act.eighthsignup_set.all().delete()\n invalidate_obj(source_act)\n messages.success(request, \"Successfully removed signups for {} students.\".format(num))\n else:\n source_act.eighthsignup_set.update(scheduled_activity=dest_act)\n\n invalidate_obj(source_act)\n invalidate_obj(dest_act)\n messages.success(request, \"Successfully transfered {} students.\".format(num))\n return redirect(\"eighth_admin_dashboard\")\n else:\n return render(request, \"eighth/admin/transfer_students.html\", context)", "response": "The action for the administration of transfering students."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef start_date(request):\n\n if request.user and request.user.is_authenticated and request.user.is_eighth_admin:\n return {\"admin_start_date\": get_start_date(request)}\n\n return {}", "response": "Add the start date to the context for eighth admin views."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nadds the absence count to the context for students.", "response": "def absence_count(request):\n \"\"\"Add the absence count to the context for students.\"\"\"\n\n if request.user and request.user.is_authenticated and request.user.is_student:\n absence_info = request.user.absence_info()\n num_absences = absence_info.count()\n show_notif = False\n if num_absences > 0:\n notif_seen = request.session.get('eighth_absence_notif_seen', False)\n if not notif_seen:\n for signup in absence_info:\n if signup.in_clear_absence_period():\n show_notif = True\n\n if show_notif:\n request.session['eighth_absence_notif_seen'] = True\n\n return {\"eighth_absence_count\": num_absences, \"eighth_absence_notif\": show_notif}\n\n return {}"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef visible_to_user(self, user):\n\n return Host.objects.filter(Q(groups_visible__in=user.groups.all()) | Q(groups_visible__isnull=True)).distinct()", "response": "Get a list of hosts visible to a given user."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning only future Day objects.", "response": "def get_future_days(self):\n \"\"\"Return only future Day objects.\"\"\"\n today = timezone.now().date()\n\n return Day.objects.filter(date__gte=today)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns the Day for the current day", "response": "def today(self):\n \"\"\"Return the Day for the current day\"\"\"\n today = timezone.now().date()\n try:\n return Day.objects.get(date=today)\n except Day.DoesNotExist:\n return None"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_date_range_this_year(now=None):\n if now is None:\n now = datetime.datetime.now().date()\n if now.month <= settings.YEAR_TURNOVER_MONTH:\n date_start = datetime.datetime(now.year - 1, 8, 1) # TODO; don't hardcode these values\n date_end = datetime.datetime(now.year, 7, 1)\n else:\n date_start = datetime.datetime(now.year, 8, 1)\n date_end = datetime.datetime(now.year + 1, 7, 1)\n return timezone.make_aware(date_start), timezone.make_aware(date_end)", "response": "Return the starting and ending date of the current school year."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets an attribute of the given username.", "response": "def user_attr(username, attribute):\n \"\"\"Gets an attribute of the user with the given username.\"\"\"\n return getattr(User.objects.get(username=username), attribute)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\npasses request. user as an argument to the given function call.", "response": "def argument_request_user(obj, func_name):\n \"\"\"Pass request.user as an argument to the given function call.\"\"\"\n func = getattr(obj, func_name)\n request = threadlocals.request()\n if request:\n return func(request.user)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef senior_email_forward_view(request):\n if not request.user.is_senior:\n messages.error(request, \"Only seniors can set their forwarding address.\")\n return redirect(\"index\")\n try:\n forward = SeniorEmailForward.objects.get(user=request.user)\n except SeniorEmailForward.DoesNotExist:\n forward = None\n\n if request.method == \"POST\":\n if forward:\n form = SeniorEmailForwardForm(request.POST, instance=forward)\n else:\n form = SeniorEmailForwardForm(request.POST)\n logger.debug(form)\n if form.is_valid():\n obj = form.save(commit=False)\n obj.user = request.user\n obj.save()\n messages.success(request, \"Successfully added forwarding address.\")\n return redirect(\"index\")\n else:\n messages.error(request, \"Error adding forwarding address.\")\n else:\n if forward:\n form = SeniorEmailForwardForm(instance=forward)\n else:\n form = SeniorEmailForwardForm()\n return render(request, \"emailfwd/senior_forward.html\", {\"form\": form, \"forward\": forward})", "response": "Add a forwarding address for graduating seniors."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the phone number formatted with dashes.", "response": "def dashes(phone):\n \"\"\"Returns the phone number formatted with dashes.\"\"\"\n if isinstance(phone, str):\n if phone.startswith(\"+1\"):\n return \"1-\" + \"-\".join((phone[2:5], phone[5:8], phone[8:]))\n elif len(phone) == 10:\n return \"-\".join((phone[:3], phone[3:6], phone[6:]))\n else:\n return phone\n else:\n return phone"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ndeploying this page to one or more displays.", "response": "def deploy_to(self, displays=None, exclude=[], lock=[]):\n \"\"\"\n Deploys page to listed display (specify with display). If display is None,\n deploy to all display. Can specify exclude for which display to exclude.\n This overwrites the first argument.\n \"\"\"\n if displays is None:\n signs = Sign.objects.all()\n else:\n signs = Sign.objects.filter(display__in=displays)\n for sign in signs.exclude(display__in=exclude):\n sign.pages.add(self)\n sign.save()"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nchecks if the FCPS has emergency announcement page.", "response": "def check_emerg():\n \"\"\"Fetch from FCPS' emergency announcement page.\n\n URL defined in settings.FCPS_EMERGENCY_PAGE\n\n Request timeout defined in settings.FCPS_EMERGENCY_TIMEOUT\n\n \"\"\"\n status = True\n message = None\n if settings.EMERGENCY_MESSAGE:\n return True, settings.EMERGENCY_MESSAGE\n if not settings.FCPS_EMERGENCY_PAGE:\n return None, None\n\n timeout = settings.FCPS_EMERGENCY_TIMEOUT\n\n r = requests.get(\"{}?{}\".format(settings.FCPS_EMERGENCY_PAGE, int(time.time() // 60)), timeout=timeout)\n res = r.text\n if not res or len(res) < 1:\n status = False\n\n # Keep this list up to date with whatever wording FCPS decides to use each time...\n bad_strings = [\n \"There are no emergency announcements at this time\", \"There are no emergency messages at this time\",\n \"There are no emeregency annoncements at this time\", \"There are no major announcements at this time.\",\n \"There are no major emergency announcements at this time.\", \"There are no emergencies at this time.\"\n ]\n for b in bad_strings:\n if b in res:\n status = False\n break\n\n # emerg_split = '<p><a href=\"https://youtu.be/jo_8QFIEf64'\n # message = res.split(emerg_split)[0]\n soup = BeautifulSoup(res, \"html.parser\")\n if soup.title:\n title = soup.title.text\n body = \"\"\n for cd in soup.findAll(text=True):\n if isinstance(cd, CData):\n body += cd\n message = \"<h3>{}: </h3>{}\".format(title, body)\n message = message.strip()\n else:\n status = False\n\n return status, message"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_emerg():\n key = \"emerg:{}\".format(datetime.datetime.now().date())\n cached = cache.get(key)\n cached = None # Remove this for production\n if cached:\n logger.debug(\"Returning emergency info from cache\")\n return cached\n else:\n result = get_emerg_result()\n cache.set(key, result, timeout=settings.CACHE_AGE[\"emerg\"])\n return result", "response": "Get the emergency page or check it again."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_login_theme():\n today = datetime.now().date()\n if today.month == 12 or today.month == 1:\n # Snow\n return {\"js\": \"themes/snow/snow.js\", \"css\": \"themes/snow/snow.css\"}\n\n if today.month == 3 and (14 <= today.day <= 16):\n return {\"js\": \"themes/piday/piday.js\", \"css\": \"themes/piday/piday.css\"}\n\n return {}", "response": "Load a custom login theme"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef index_view(request, auth_form=None, force_login=False, added_context=None):\n if request.user.is_authenticated and not force_login:\n return dashboard_view(request)\n else:\n auth_form = auth_form or AuthenticateForm()\n request.session.set_test_cookie()\n\n fcps_emerg = get_fcps_emerg(request)\n\n try:\n login_warning = settings.LOGIN_WARNING\n except AttributeError:\n login_warning = None\n\n if fcps_emerg and not login_warning:\n login_warning = fcps_emerg\n\n ap_week = get_ap_week_warning(request)\n\n if ap_week and not login_warning:\n login_warning = ap_week\n\n events = Event.objects.filter(time__gte=datetime.now(), time__lte=(datetime.now().date() + relativedelta(weeks=1)), public=True).this_year()\n sports_events = events.filter(approved=True, category=\"sports\").order_by('time')[:3]\n school_events = events.filter(approved=True, category=\"school\").order_by('time')[:3]\n\n data = {\n \"auth_form\": auth_form,\n \"request\": request,\n \"git_info\": settings.GIT,\n \"bg_pattern\": get_bg_pattern(),\n \"theme\": get_login_theme(),\n \"login_warning\": login_warning,\n \"senior_graduation\": settings.SENIOR_GRADUATION,\n \"senior_graduation_year\": settings.SENIOR_GRADUATION_YEAR,\n \"sports_events\": sports_events,\n \"school_events\": school_events\n }\n schedule = schedule_context(request)\n data.update(schedule)\n if added_context is not None:\n data.update(added_context)\n return render(request, \"auth/login.html\", data)", "response": "Process and show the main login page or dashboard if logged in."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nclearing the Kerberos cache and logout.", "response": "def logout_view(request):\n \"\"\"Clear the Kerberos cache and logout.\"\"\"\n do_logout(request)\n\n app_redirects = {\"collegerecs\": \"https://apps.tjhsst.edu/collegerecs/logout?ion_logout=1\"}\n app = request.GET.get(\"app\", \"\")\n if app and app in app_redirects:\n return redirect(app_redirects[app])\n\n return redirect(\"index\")"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nvalidates and process the login POST request.", "response": "def post(self, request):\n \"\"\"Validate and process the login POST request.\"\"\"\n \"\"\"Before September 1st, do not allow Class of [year+4] to log in.\"\"\"\n if request.POST.get(\"username\", \"\").startswith(str(date.today().year + 4)) and date.today() < settings.SCHOOL_START_DATE:\n return index_view(request, added_context={\"auth_message\": \"Your account is not yet active for use with this application.\"})\n\n form = AuthenticateForm(data=request.POST)\n\n if request.session.test_cookie_worked():\n request.session.delete_test_cookie()\n else:\n logger.warning(\"No cookie support detected! This could cause problems.\")\n\n if form.is_valid():\n reset_user, status = User.objects.get_or_create(username=\"RESET_PASSWORD\", user_type=\"service\", id=999999)\n if form.get_user() == reset_user:\n return redirect(reverse(\"reset_password\") + \"?expired=True\")\n login(request, form.get_user())\n # Initial load into session\n logger.info(\"Login succeeded as {}\".format(request.POST.get(\"username\", \"unknown\")))\n logger.info(\"request.user: {}\".format(request.user))\n\n log_auth(request, \"success{}\".format(\" - first login\" if not request.user.first_login else \"\"))\n\n default_next_page = \"index\"\n\n if request.user.is_eighthoffice:\n \"\"\"Default to eighth admin view (for eighthoffice).\"\"\"\n default_next_page = \"eighth_admin_dashboard\"\n\n # if request.user.is_eighthoffice:\n # \"\"\"Eighthoffice's session should (almost) never expire.\"\"\"\n # request.session.set_expiry(timezone.now() + timedelta(days=30))\n\n if not request.user.first_login:\n logger.info(\"First login\")\n request.user.first_login = make_aware(datetime.now())\n request.user.save()\n request.session[\"first_login\"] = True\n\n if request.user.is_student or request.user.is_teacher:\n default_next_page = \"welcome\"\n else:\n pass # exclude eighth office/special accounts\n\n # if the student has not seen the 8th agreement yet, redirect them\n if request.user.is_student and not request.user.seen_welcome:\n return redirect(\"welcome\")\n\n next_page = request.POST.get(\"next\", request.GET.get(\"next\", default_next_page))\n return redirect(next_page)\n else:\n log_auth(request, \"failed\")\n logger.info(\"Login failed as {}\".format(request.POST.get(\"username\", \"unknown\")))\n return index_view(request, auth_form=form)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nchange all Mondays to 'Anchor Day' Change all Tuesday/Thursdays to 'Blue Day' Change all Wednesday/Fridays to 'Red Day'.", "response": "def do_default_fill(request):\n \"\"\"Change all Mondays to 'Anchor Day' Change all Tuesday/Thursdays to 'Blue Day' Change all\n Wednesday/Fridays to 'Red Day'.\"\"\"\n monday = 0\n tuesday = 1\n wednesday = 2\n thursday = 3\n friday = 4\n try:\n anchor_day = DayType.objects.get(name=\"Anchor Day\")\n blue_day = DayType.objects.get(name=\"Blue Day\")\n red_day = DayType.objects.get(name=\"Red Day\")\n except DayType.DoesNotExist:\n return render(request, \"schedule/fill.html\",\n {\"msgs\": [\"Failed to insert any schedules.\", \"Make sure you have DayTypes defined for Anchor Days, Blue Days, and Red Days.\"]})\n\n daymap = {monday: anchor_day, tuesday: blue_day, wednesday: red_day, thursday: blue_day, friday: red_day}\n\n msgs = []\n\n month = request.POST.get(\"month\")\n firstday = datetime.strptime(month, \"%Y-%m\")\n\n yr, mn = month.split(\"-\")\n cal = calendar.monthcalendar(int(yr), int(mn))\n for w in cal:\n for d in w:\n day = get_day_data(firstday, d)\n logger.debug(day)\n\n if \"empty\" in day:\n continue\n\n if \"schedule\" not in day or day[\"schedule\"] is None:\n day_of_week = day[\"date\"].weekday()\n if day_of_week in daymap:\n type_obj = daymap[day_of_week]\n\n day_obj = Day.objects.create(date=day[\"formatted_date\"], day_type=type_obj)\n msg = \"{} is now a {}\".format(day[\"formatted_date\"], day_obj.day_type)\n msgs.append(msg)\n return render(request, \"schedule/fill.html\", {\"msgs\": msgs})"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _unescape(self, value):\n '''\n Recursively unescape values. Though slower, this doesn't require the user to\n know anything about the escaping when writing their own custom fetch functions.\n '''\n if isinstance(value, (str,unicode)):\n return value.replace(self._escape_character, '.')\n elif isinstance(value, dict):\n return { self._unescape(k) : self._unescape(v) for k,v in value.items() }\n elif isinstance(value, list):\n return [ self._unescape(v) for v in value ]\n return value", "response": "Recursively unescape values. Though slower, this doesn't require the user to\n know anything about the escaping when writing their own custom fetch functions."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets a unique id from a query.", "response": "def _batch_key(self, query):\n '''\n Get a unique id from a query.\n '''\n return ''.join( ['%s%s'%(k,v) for k,v in sorted(query.items())] )"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _insert(self, name, value, timestamp, intervals, **kwargs):\n '''\n Insert the new value.\n '''\n # TODO: confirm that this is in fact using the indices correctly.\n for interval,config in self._intervals.items():\n timestamps = self._normalize_timestamps(timestamp, intervals, config)\n for tstamp in timestamps:\n self._insert_data(name, value, tstamp, interval, config, **kwargs)", "response": "Insert the new value into the database."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _insert_data(self, name, value, timestamp, interval, config, **kwargs):\n '''Helper to insert data into mongo.'''\n # Mongo does not allow mixing atomic modifiers and non-$set sets in the\n # same update, so the choice is to either run the first upsert on\n # {'_id':id} to ensure the record is in place followed by an atomic update\n # based on the series type, or use $set for all of the keys to be used in\n # creating the record, which then disallows setting our own _id because\n # that \"can't be updated\". So the tradeoffs are:\n # * 2 updates for every insert, maybe a local cache of known _ids and the\n # associated memory overhead\n # * Query by the 2 or 3 key tuple for this interval and the associated\n # overhead of that index match vs. the presumably-faster match on _id\n # * Yet another index for the would-be _id of i_key or r_key, where each\n # index has a notable impact on write performance.\n # For now, choosing to go with matching on the tuple until performance\n # testing can be done. Even then, there may be a variety of factors which\n # make the results situation-dependent.\n insert = {'name':name, 'interval':config['i_calc'].to_bucket(timestamp)}\n if not config['coarse']:\n insert['resolution'] = config['r_calc'].to_bucket(timestamp)\n # copy the query before expire_from as that is not indexed\n query = insert.copy()\n\n if config['expire']:\n insert['expire_from'] = datetime.utcfromtimestamp( timestamp )\n\n # switch to atomic updates\n insert = {'$set':insert.copy()}\n\n # need to hide the period of any values. best option seems to be to pick\n # a character that \"no one\" uses.\n if isinstance(value, (str,unicode)):\n value = value.replace('.', self._escape_character)\n elif isinstance(value, float):\n value = str(value).replace('.', self._escape_character)\n\n self._insert_type( insert, value )\n\n # TODO: use write preference settings if we have them\n if not kwargs.get('dry_run',False):\n self._client[interval].update( query, insert, upsert=True, check_keys=False )\n return query, insert", "response": "Helper to insert data into the database."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nfetches a series of buckets.", "response": "def _series(self, name, interval, config, buckets, **kws):\n '''\n Fetch a series of buckets.\n '''\n # make a copy of the buckets because we're going to mutate it\n buckets = list(buckets)\n rval = OrderedDict()\n step = config['step']\n resolution = config.get('resolution',step)\n fetch = kws.get('fetch')\n process_row = kws.get('process_row', self._process_row)\n\n query = { 'name':name, 'interval':{'$gte':buckets[0], '$lte':buckets[-1]} }\n sort = [('interval', ASCENDING)]\n if not config['coarse']:\n sort.append( ('resolution', ASCENDING) )\n\n if fetch:\n cursor = fetch( self._client[interval], spec=query, sort=sort, method='find' )\n else:\n cursor = self._client[interval].find( spec=query, sort=sort )\n for record in cursor:\n while buckets and buckets[0] < record['interval']:\n rval[ config['i_calc'].from_bucket(buckets.pop(0)) ] = self._type_no_value()\n if buckets and buckets[0]==record['interval']:\n buckets.pop(0)\n\n i_key = config['i_calc'].from_bucket(record['interval'])\n data = process_row( record['value'] )\n if config['coarse']:\n rval[ i_key ] = data\n else:\n rval.setdefault( i_key, OrderedDict() )\n rval[ i_key ][ config['r_calc'].from_bucket(record['resolution']) ] = data\n\n # are there buckets at the end for which we received no data?\n while buckets:\n rval[ config['i_calc'].from_bucket(buckets.pop(0)) ] = self._type_no_value()\n\n return rval"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef delete(self, name):\n '''\n Delete time series by name across all intervals. Returns the number of\n records deleted.\n '''\n # TODO: confirm that this does not use the combo index and determine\n # performance implications.\n num_deleted = 0\n for interval,config in self._intervals.items():\n # TODO: use write preference settings if we have them\n num_deleted += self._client[interval].remove( {'name':name} )['n']\n return num_deleted", "response": "Delete a time series by name across all intervals. Returns the number of records deleted."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nshowing the debug toolbar to those with the Django staff permission excluding the Eighth Period office.", "response": "def debug_toolbar_callback(request):\n \"\"\"Show the debug toolbar to those with the Django staff permission, excluding the Eighth Period\n office.\"\"\"\n\n if request.is_ajax():\n return False\n\n if not hasattr(request, 'user'):\n return False\n if not request.user.is_authenticated:\n return False\n if not request.user.is_staff:\n return False\n if request.user.id == 9999:\n return False\n\n return \"debug\" in request.GET or settings.DEBUG"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget a list of visible announcements for a given user.", "response": "def visible_to_user(self, user):\n \"\"\"Get a list of visible announcements for a given user (usually request.user).\n\n These visible announcements will be those that either have no\n groups assigned to them (and are therefore public) or those in\n which the user is a member.\n\n Apparently this .filter() call occasionally returns duplicates, hence the .distinct()...\n\n \"\"\"\n\n return Announcement.objects.filter(\n Q(groups__in=user.groups.all()) | Q(groups__isnull=True) | Q(announcementrequest__teachers_requested=user) |\n Q(announcementrequest__user=user) | Q(user=user)).distinct()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef hidden_announcements(self, user):\n ids = user.announcements_hidden.all().values_list(\"announcement__id\")\n return Announcement.objects.filter(id__in=ids)", "response": "Get a list of hidden announcements for a given user."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef this_year(self):\n start_date, end_date = get_date_range_this_year()\n return Announcement.objects.filter(added__gte=start_date, added__lte=end_date)", "response": "Get AnnouncementRequests from this school year only."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef profile_view(request, user_id=None):\n if request.user.is_eighthoffice and \"full\" not in request.GET and user_id is not None:\n return redirect(\"eighth_profile\", user_id=user_id)\n\n if user_id is not None:\n try:\n profile_user = User.objects.get(id=user_id)\n\n if profile_user is None:\n raise Http404\n except User.DoesNotExist:\n raise Http404\n else:\n profile_user = request.user\n\n num_blocks = 6\n\n eighth_schedule = []\n start_block = EighthBlock.objects.get_first_upcoming_block()\n\n blocks = []\n if start_block:\n blocks = [start_block] + list(start_block.next_blocks(num_blocks - 1))\n\n for block in blocks:\n sch = {\"block\": block}\n try:\n sch[\"signup\"] = EighthSignup.objects.get(scheduled_activity__block=block, user=profile_user)\n except EighthSignup.DoesNotExist:\n sch[\"signup\"] = None\n except MultipleObjectsReturned:\n client.captureException()\n sch[\"signup\"] = None\n eighth_schedule.append(sch)\n\n if profile_user.is_eighth_sponsor:\n sponsor = EighthSponsor.objects.get(user=profile_user)\n start_date = get_start_date(request)\n eighth_sponsor_schedule = (EighthScheduledActivity.objects.for_sponsor(sponsor).filter(block__date__gte=start_date).order_by(\n \"block__date\", \"block__block_letter\"))\n eighth_sponsor_schedule = eighth_sponsor_schedule[:10]\n else:\n eighth_sponsor_schedule = None\n\n admin_or_teacher = (request.user.is_eighth_admin or request.user.is_teacher)\n can_view_eighth = (profile_user.can_view_eighth or request.user == profile_user)\n eighth_restricted_msg = (not can_view_eighth and admin_or_teacher)\n\n if not can_view_eighth and not request.user.is_eighth_admin and not request.user.is_teacher:\n eighth_schedule = []\n\n has_been_nominated = profile_user.username in [\n u.nominee.username for u in request.user.nomination_votes.filter(position__position_name=settings.NOMINATION_POSITION)\n ]\n context = {\n \"profile_user\": profile_user,\n \"eighth_schedule\": eighth_schedule,\n \"can_view_eighth\": can_view_eighth,\n \"eighth_restricted_msg\": eighth_restricted_msg,\n \"eighth_sponsor_schedule\": eighth_sponsor_schedule,\n \"nominations_active\": settings.NOMINATIONS_ACTIVE,\n \"nomination_position\": settings.NOMINATION_POSITION,\n \"has_been_nominated\": has_been_nominated\n }\n return render(request, \"users/profile.html\", context)", "response": "Displays a view of a user s profile."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ndisplaying a view of a user s picture.", "response": "def picture_view(request, user_id, year=None):\n \"\"\"Displays a view of a user's picture.\n\n Args:\n user_id\n The ID of the user whose picture is being fetched.\n year\n The user's picture from this year is fetched. If not\n specified, use the preferred picture.\n\n \"\"\"\n try:\n user = User.objects.get(id=user_id)\n except User.DoesNotExist:\n raise Http404\n default_image_path = os.path.join(settings.PROJECT_ROOT, \"static/img/default_profile_pic.png\")\n\n if user is None:\n raise Http404\n else:\n if year is None:\n preferred = user.preferred_photo\n\n if preferred is None:\n data = user.default_photo\n if data is None:\n image_buffer = io.open(default_image_path, mode=\"rb\")\n else:\n image_buffer = io.BytesIO(data)\n\n # Exclude 'graduate' from names array\n else:\n data = preferred.binary\n if data:\n image_buffer = io.BytesIO(data)\n else:\n image_buffer = io.open(default_image_path, mode=\"rb\")\n else:\n grade_number = Grade.number_from_name(year)\n if user.photos.filter(grade_number=grade_number).exists():\n data = user.photos.filter(grade_number=grade_number).first().binary\n else:\n data = None\n if data:\n image_buffer = io.BytesIO(data)\n else:\n image_buffer = io.open(default_image_path, mode=\"rb\")\n\n response = HttpResponse(content_type=\"image/jpeg\")\n response[\"Content-Disposition\"] = \"filename={}_{}.jpg\".format(user_id, year or preferred)\n try:\n img = image_buffer.read()\n except UnicodeDecodeError:\n img = io.open(default_image_path, mode=\"rb\").read()\n\n image_buffer.close()\n response.write(img)\n\n return response"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef generate_statistics_pdf(activities=None, start_date=None, all_years=False, year=None):\n ''' Accepts EighthActivity objects and outputs a PDF file. '''\n if activities is None:\n activities = EighthActivity.objects.all().order_by(\"name\")\n if year is None:\n year = current_school_year()\n\n if not isinstance(activities, list):\n activities = activities.prefetch_related(\"rooms\").prefetch_related(\"sponsors\")\n\n pdf_buffer = BytesIO()\n\n h_margin = 1 * inch\n v_margin = 0.5 * inch\n doc = SimpleDocTemplate(pdf_buffer, pagesize=letter, rightMargin=h_margin, leftMargin=h_margin, topMargin=v_margin, bottomMargin=v_margin)\n\n elements = []\n\n styles = getSampleStyleSheet()\n styles.add(ParagraphStyle(name=\"Indent\", leftIndent=15))\n\n empty_activities = []\n\n for act in activities:\n lelements = []\n relements = []\n act_stats = calculate_statistics(act, start_date=start_date, all_years=all_years, year=year)\n if act_stats[\"total_blocks\"] == 0:\n empty_activities.append(act.name)\n continue\n elements.append(Paragraph(act.name, styles[\"Title\"]))\n sponsor_str = (\", \".join([x.name for x in act.sponsors.all()])) if act.sponsors.count() > 0 else \"None\"\n lelements.append(Paragraph(\"<b>Default Sponsors:</b> \" + sponsor_str, styles[\"Normal\"]))\n lelements.append(Spacer(0, 0.025 * inch))\n room_str = (\", \".join([str(x) for x in act.rooms.all()])) if act.rooms.count() > 0 else \"None\"\n relements.append(Paragraph(\"<b>Default Rooms:</b> \" + room_str, styles[\"Normal\"]))\n relements.append(Spacer(0, 0.025 * inch))\n\n relements.append(Paragraph(\"<b>Total blocks:</b> {}\".format(act_stats[\"total_blocks\"]), styles[\"Normal\"]))\n relements.append(Paragraph(\"<b>Scheduled blocks:</b> {}\".format(act_stats[\"scheduled_blocks\"]), styles[\"Indent\"]))\n relements.append(Paragraph(\"<b>Empty blocks:</b> {}\".format(act_stats[\"empty_blocks\"]), styles[\"Indent\"]))\n relements.append(Paragraph(\"<b>Cancelled blocks:</b> {}\".format(act_stats[\"cancelled_blocks\"]), styles[\"Indent\"]))\n\n lelements.append(Paragraph(\"<b>Total signups:</b> {}\".format(act_stats[\"total_signups\"]), styles[\"Normal\"]))\n lelements.append(Paragraph(\"<b>Average signups per block:</b> {}\".format(act_stats[\"average_signups\"]), styles[\"Indent\"]))\n lelements.append(Paragraph(\"<b>Average signups per student:</b> {}\".format(act_stats[\"average_user_signups\"]), styles[\"Indent\"]))\n lelements.append(\n Paragraph(\"<b>Unique students:</b> {}, <b>Capacity:</b> {}\".format(act_stats[\"students\"], act_stats[\"capacity\"]), styles[\"Normal\"]))\n\n elements.append(\n Table([[lelements, relements]], style=[('LEFTPADDING', (0, 0), (-1, -1), 0), ('RIGHTPADDING', (0, 0), (-1, -1), 0), ('VALIGN', (0, 0),\n (-1, -1), 'TOP')]))\n\n parsed_members = [[x.username, y] for x, y in act_stats[\"members\"]]\n parsed_members = list(chunks(parsed_members, 30))[:3]\n if parsed_members:\n parsed_members = [[[\"Username\", \"Signups\"]] + x for x in parsed_members]\n parsed_members = [\n Table(x, style=[('FONT', (0, 0), (1, 0), 'Helvetica-Bold'), ('ALIGN', (1, 0), (1, -1), 'RIGHT')]) for x in parsed_members\n ]\n elements.append(Table([parsed_members], style=[('VALIGN', (-1, -1), (-1, -1), 'TOP')]))\n if act_stats[\"students\"] - 90 > 0:\n elements.append(Paragraph(\"<b>{}</b> students were not shown on this page. \".format(act_stats[\"students\"] - 90), styles[\"Normal\"]))\n else:\n elements.append(Spacer(0, 0.20 * inch))\n\n if start_date is not None:\n elements.append(\n Paragraph(\"<b>{}</b> block(s) are past the start date and are not included on this page.\".format(act_stats[\"past_start_date\"]),\n styles[\"Normal\"]))\n elements.append(\n Paragraph(\"<b>{}</b> block(s) not in the {}-{} school year are not included on this page.\".format(\n act_stats[\"old_blocks\"], year - 1, year), styles[\"Normal\"]))\n\n elements.append(PageBreak())\n\n if empty_activities:\n empty_activities = [x[:37] + \"...\" if len(x) > 40 else x for x in empty_activities]\n empty_activities = [[x] for x in empty_activities]\n empty_activities = list(chunks(empty_activities, 35))\n empty_activities = [[[\"Activity\"]] + x for x in empty_activities]\n empty_activities = [\n Table(x, style=[('FONT', (0, 0), (-1, 0), 'Helvetica-Bold'), ('LEFTPADDING', (0, 0), (-1, -1), 0)]) for x in empty_activities\n ]\n for i in range(0, len(empty_activities), 2):\n elements.append(Paragraph(\"Empty Activities (Page {})\".format(i // 2 + 1), styles[\"Title\"]))\n if all_years:\n elements.append(Paragraph(\"The following activities have no 8th period blocks assigned to them.\", styles[\"Normal\"]))\n else:\n elements.append(\n Paragraph(\"The following activities have no 8th period blocks assigned to them for the {}-{} school year.\".format(year - 1, year),\n styles[\"Normal\"]))\n elements.append(Spacer(0, 0.10 * inch))\n ea = [empty_activities[i]]\n if i + 1 < len(empty_activities):\n ea.append(empty_activities[i + 1])\n elements.append(\n Table([ea], style=[\n ('LEFTPADDING', (0, 0), (-1, -1), 0),\n ('RIGHTPADDING', (0, 0), (-1, -1), 0),\n ('VALIGN', (0, 0), (-1, -1), 'TOP'),\n ], hAlign='LEFT'))\n elements.append(PageBreak())\n\n def first_page(canvas, _):\n if len(activities) == 1:\n canvas.setTitle(\"{} Statistics\".format(activities[0].name))\n else:\n canvas.setTitle(\"8th Period Activity Statistics\")\n canvas.setAuthor(\"Generated by Ion\")\n\n doc.build(elements, onFirstPage=first_page)\n\n pdf_buffer.seek(0)\n\n return pdf_buffer", "response": "Generates a PDF file for the given activities and optionally start a date."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef stats_view(request, activity_id=None):\n if not (request.user.is_eighth_admin or request.user.is_teacher):\n return render(request, \"error/403.html\", {\"reason\": \"You do not have permission to view statistics for this activity.\"}, status=403)\n\n activity = get_object_or_404(EighthActivity, id=activity_id)\n\n if request.GET.get(\"print\", False):\n response = HttpResponse(content_type=\"application/pdf\")\n buf = generate_statistics_pdf([activity], year=int(request.GET.get(\"year\", 0)) or None)\n response.write(buf.getvalue())\n buf.close()\n return response\n\n current_year = current_school_year()\n\n if EighthBlock.objects.count() == 0:\n earliest_year = current_year\n else:\n earliest_year = EighthBlock.objects.order_by(\"date\").first().date.year\n\n if request.GET.get(\"year\", False):\n year = int(request.GET.get(\"year\"))\n else:\n year = None\n\n future = request.GET.get(\"future\", False)\n\n context = {\"activity\": activity, \"years\": list(reversed(range(earliest_year, current_year + 1))), \"year\": year, \"future\": future}\n\n if year:\n context.update(calculate_statistics(activity, year=year, future=future))\n else:\n context.update(calculate_statistics(activity, get_start_date(request), future=future))\n return render(request, \"eighth/statistics.html\", context)", "response": "This view returns a statistics page for the given activity."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef expire(self, name):\n '''\n Expire all the data.\n '''\n for interval,config in self._intervals.items():\n if config['expire']:\n # Because we're storing the bucket time, expiry has the same\n # \"skew\" as whatever the buckets are.\n expire_from = config['i_calc'].to_bucket(time.time() - config['expire'])\n conn = self._client.connect()\n\n conn.execute( self._table.delete().where(\n and_(\n self._table.c.name==name,\n self._table.c.interval==interval,\n self._table.c.i_time<=expire_from\n )\n ))", "response": "Expire all the data for the given name."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef delete(self, name):\n '''\n Delete time series by name across all intervals. Returns the number of\n records deleted.\n '''\n conn = self._client.connect()\n conn.execute( self._table.delete().where(self._table.c.name==name) )", "response": "Delete a time series by name across all intervals. Returns the number of records deleted."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _insert_data(self, name, value, timestamp, interval, config, **kwargs):\n '''Helper to insert data into sql.'''\n conn = self._client.connect()\n if not self._update_data(name, value, timestamp, interval, config, conn):\n try:\n kwargs = {\n 'name' : name,\n 'interval' : interval,\n 'i_time' : config['i_calc'].to_bucket(timestamp),\n 'value' : value\n }\n if not config['coarse']:\n kwargs['r_time'] = config['r_calc'].to_bucket(timestamp)\n stmt = self._table.insert().values(**kwargs)\n result = conn.execute(stmt)\n except:\n # TODO: only catch IntegrityError\n if not self._update_data(name, value, timestamp, interval, config, conn):\n raise", "response": "Helper to insert data into sql."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsupporting function for insert. Should be called within a transaction. Should be called within a transaction.", "response": "def _update_data(self, name, value, timestamp, interval, config, conn):\n '''Support function for insert. Should be called within a transaction'''\n i_time = config['i_calc'].to_bucket(timestamp)\n if not config['coarse']:\n r_time = config['r_calc'].to_bucket(timestamp)\n else:\n r_time = None\n stmt = self._table.update().where(\n and_(\n self._table.c.name==name,\n self._table.c.interval==interval,\n self._table.c.i_time==i_time,\n self._table.c.r_time==r_time)\n ).values({self._table.c.value: value})\n rval = conn.execute( stmt )\n return rval.rowcount"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef nav_categorizer(request):\n\n categories = [(r\"^/$\", \"dashboard\"), (r\"^/announcements\", \"dashboard\"), (r\"^/eighth/admin\", \"eighth_admin\"), (r\"^/eighth\", \"eighth\"),\n (r\"^/events\", \"events\"), (r\"^/files\", \"files\"), (r\"^/printing\", \"printing\"), (r\"^/groups\", \"groups\"), (r\"^/polls\", \"polls\"),\n (r\"^/board\", \"board\"), (r\"/bus\", \"bus\")]\n\n for pattern, category in categories:\n p = re.compile(pattern)\n if p.match(request.path):\n return {\"nav_category\": category}\n\n return {\"nav_category\": \"\"}", "response": "Determine which top - level nav category a request falls under\n "} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef mobile_app(request):\n\n ctx = {}\n try:\n ua = request.META.get('HTTP_USER_AGENT', '')\n\n if \"IonAndroid: gcmFrame\" in ua:\n logger.debug(\"IonAndroid %s\", request.user)\n\n ctx[\"is_android_client\"] = True\n registered = \"appRegistered:False\" in ua\n ctx[\"android_client_registered\"] = registered\n\n if request.user and request.user.is_authenticated:\n \"\"\"Add/update NotificationConfig object.\"\"\"\n import binascii\n import os\n from intranet.apps.notifications.models import NotificationConfig\n from datetime import datetime\n\n ncfg, _ = NotificationConfig.objects.get_or_create(user=request.user)\n if not ncfg.android_gcm_rand:\n rand = binascii.b2a_hex(os.urandom(32))\n ncfg.android_gcm_rand = rand\n else:\n rand = ncfg.android_gcm_rand\n ncfg.android_gcm_time = datetime.now()\n\n logger.debug(\"GCM random token generated: %s\", rand)\n ncfg.save()\n ctx[\"android_client_rand\"] = rand\n\n else:\n ctx[\"is_android_client\"] = False\n ctx[\"android_client_register\"] = False\n except Exception:\n ctx[\"is_android_client\"] = False\n ctx[\"android_client_register\"] = False\n\n return ctx", "response": "Determine if the site is being displayed in a WebView from a native application."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nadds custom theme javascript and css.", "response": "def global_custom_theme(request):\n \"\"\"Add custom theme javascript and css.\"\"\"\n today = datetime.datetime.now().date()\n theme = {}\n\n if today.month == 3 and (14 <= today.day <= 16):\n theme = {\"css\": \"themes/piday/piday.css\"}\n\n return {\"theme\": theme}"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nshows homecoming ribbon or scores", "response": "def show_homecoming(request):\n \"\"\"Show homecoming ribbon / scores \"\"\"\n return {'show_homecoming': settings.HOCO_START_DATE < datetime.date.today() and datetime.date.today() < settings.HOCO_END_DATE}"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns an export csv action.", "response": "def export_csv_action(description=\"Export selected objects as CSV file\", fields=None, exclude=None, header=True):\n \"\"\"This function returns an export csv action.\n\n 'fields' and 'exclude' work like in django\n\n ModelForm 'header' is whether or not to output the column names as the first row.\n\n https://djangosnippets.org/snippets/2369/\n\n \"\"\"\n\n def export_as_csv(modeladmin, request, queryset):\n \"\"\"Generic csv export admin action.\n\n based on http://djangosnippets.org/snippets/1697/\n\n \"\"\"\n opts = modeladmin.model._meta\n field_names = set([field.name for field in opts.fields])\n if fields:\n fieldset = set(fields)\n field_names = field_names & fieldset\n elif exclude:\n excludeset = set(exclude)\n field_names = field_names - excludeset\n\n response = HttpResponse(content_type='text/csv')\n response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')\n\n writer = csv.writer(response)\n if header:\n writer.writerow(list(field_names))\n for obj in queryset:\n writer.writerow([str(getattr(obj, field)) for field in field_names])\n return response\n\n export_as_csv.short_description = description\n return export_as_csv"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef handle(self, **options):\n perm_map = {}\n with open('eighth_activity_permissions.csv', 'r') as absperms:\n perms = csv.reader(absperms)\n for row in perms:\n aid, uid = row\n try:\n usr = User.objects.get(id=uid)\n except User.DoesNotExist:\n self.stdout.write(\"User {} doesn't exist, aid {}\".format(uid, aid))\n else:\n if aid in perm_map:\n perm_map[aid].append(usr)\n else:\n perm_map[aid] = [usr]\n\n for aid in perm_map:\n try:\n act = EighthActivity.objects.get(id=aid)\n except EighthActivity.DoesNotExist:\n self.stdout.write(\"Activity {} doesn't exist\".format(aid))\n else:\n self.stdout.write(\"{}: {}\".format(aid, EighthActivity.objects.get(id=aid)))\n grp, _ = Group.objects.get_or_create(name=\"{} -- Permissions\".format(\"{}\".format(act)[:55]))\n users = perm_map[aid]\n for u in users:\n u.groups.add(grp)\n u.save()\n act.groups_allowed.add(grp)\n act.save()\n\n self.stdout.write(\"Done.\")", "response": "Exported eighth_activity_permissions table in CSV format."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef this_year(self):\n start_date, end_date = get_date_range_this_year()\n return self.filter(start_time__gte=start_date, start_time__lte=end_date)", "response": "Get AnnouncementRequests from this school year only."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef visible_to_user(self, user):\n\n return Poll.objects.filter(Q(groups__in=user.groups.all()) | Q(groups__isnull=True))", "response": "Get a list of visible polls for a given user."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncheck if the page_range is valid and returns the number of pages in the range or False if it is an invalid range.", "response": "def check_page_range(page_range, max_pages):\n \"\"\"Returns the number of pages in the range, or False if it is an invalid range.\"\"\"\n pages = 0\n try:\n for r in page_range.split(\",\"): # check all ranges separated by commas\n if \"-\" in r:\n rr = r.split(\"-\")\n if len(rr) != 2: # make sure 2 values in range\n return False\n else:\n rl = int(rr[0])\n rh = int(rr[1])\n # check in page range\n if not 0 < rl < max_pages and not 0 < rh < max_pages:\n return False\n if rl > rh: # check lower bound <= upper bound\n return False\n pages += rh - rl + 1\n else:\n if not 0 < int(r) <= max_pages: # check in page range\n return False\n pages += 1\n except ValueError: # catch int parse fail\n return False\n return pages"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef student_welcome_view(request):\n if not request.user.is_student:\n return redirect(\"index\")\n # context = {\"first_login\": request.session[\"first_login\"] if \"first_login\" in request.session else False}\n # return render(request, \"welcome/old_student.html\", context)\n return dashboard_view(request, show_welcome=True)", "response": "Welcome page for students."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef request_announcement_email(request, form, obj):\n\n logger.debug(form.data)\n teacher_ids = form.data[\"teachers_requested\"]\n if not isinstance(teacher_ids, list):\n teacher_ids = [teacher_ids]\n logger.debug(teacher_ids)\n teachers = User.objects.filter(id__in=teacher_ids)\n logger.debug(teachers)\n\n subject = \"News Post Confirmation Request from {}\".format(request.user.full_name)\n emails = []\n for teacher in teachers:\n emails.append(teacher.tj_email)\n logger.debug(emails)\n logger.info(\"%s: Announcement request to %s, %s\", request.user, teachers, emails)\n base_url = request.build_absolute_uri(reverse('index'))\n data = {\n \"teachers\": teachers,\n \"user\": request.user,\n \"formdata\": form.data,\n \"info_link\": request.build_absolute_uri(reverse(\"approve_announcement\", args=[obj.id])),\n \"base_url\": base_url\n }\n logger.info(\"%s: Announcement request %s\", request.user, data)\n email_send(\"announcements/emails/teacher_approve.txt\", \"announcements/emails/teacher_approve.html\", data, subject, emails)", "response": "Send an email to the user who approved the object."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef admin_request_announcement_email(request, form, obj):\n\n subject = \"News Post Approval Needed ({})\".format(obj.title)\n emails = [settings.APPROVAL_EMAIL]\n base_url = request.build_absolute_uri(reverse('index'))\n data = {\n \"req\": obj,\n \"formdata\": form.data,\n \"info_link\": request.build_absolute_uri(reverse(\"admin_approve_announcement\", args=[obj.id])),\n \"base_url\": base_url\n }\n email_send(\"announcements/emails/admin_approve.txt\", \"announcements/emails/admin_approve.html\", data, subject, emails)", "response": "Send an admin announcement request email."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nemail the teachers who approved an announcement.", "response": "def announcement_approved_email(request, obj, req):\n \"\"\"Email the requested teachers and submitter whenever an administrator approves an announcement\n request.\n\n obj: the Announcement object\n req: the AnnouncementRequest object\n\n \"\"\"\n\n if not settings.PRODUCTION:\n logger.debug(\"Not in production. Ignoring email for approved announcement.\")\n return\n subject = \"Announcement Approved: {}\".format(obj.title)\n \"\"\" Email to teachers who approved. \"\"\"\n teachers = req.teachers_approved.all()\n\n teacher_emails = []\n for u in teachers:\n em = u.tj_email\n if em:\n teacher_emails.append(em)\n\n base_url = request.build_absolute_uri(reverse('index'))\n url = request.build_absolute_uri(reverse('view_announcement', args=[obj.id]))\n\n if len(teacher_emails) > 0:\n data = {\"announcement\": obj, \"request\": req, \"info_link\": url, \"base_url\": base_url, \"role\": \"approved\"}\n email_send(\"announcements/emails/announcement_approved.txt\", \"announcements/emails/announcement_approved.html\", data, subject, teacher_emails)\n messages.success(request, \"Sent teacher approved email to {} users\".format(len(teacher_emails)))\n \"\"\" Email to submitter. \"\"\"\n submitter = req.user\n submitter_email = submitter.tj_email\n if submitter_email:\n submitter_emails = [submitter_email]\n data = {\"announcement\": obj, \"request\": req, \"info_link\": url, \"base_url\": base_url, \"role\": \"submitted\"}\n email_send(\"announcements/emails/announcement_approved.txt\", \"announcements/emails/announcement_approved.html\", data, subject,\n submitter_emails)\n messages.success(request, \"Sent teacher approved email to {} users\".format(len(submitter_emails)))"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef announcement_posted_email(request, obj, send_all=False):\n\n if settings.EMAIL_ANNOUNCEMENTS:\n subject = \"Announcement: {}\".format(obj.title)\n if send_all:\n users = User.objects.all()\n else:\n users = User.objects.filter(receive_news_emails=True)\n\n send_groups = obj.groups.all()\n emails = []\n users_send = []\n for u in users:\n if len(send_groups) == 0:\n # no groups, public.\n em = u.emails.first() if u.emails.count() >= 1 else u.tj_email\n if em:\n emails.append(em)\n users_send.append(u)\n else:\n # specific to a group\n user_groups = u.groups.all()\n if any(i in send_groups for i in user_groups):\n # group intersection exists\n em = u.emails.first() if u.emails.count() >= 1 else u.tj_email\n if em:\n emails.append(em)\n users_send.append(u)\n\n logger.debug(users_send)\n logger.debug(emails)\n\n if not settings.PRODUCTION and len(emails) > 3:\n raise exceptions.PermissionDenied(\"You're about to email a lot of people, and you aren't in production!\")\n return\n\n base_url = request.build_absolute_uri(reverse('index'))\n url = request.build_absolute_uri(reverse('view_announcement', args=[obj.id]))\n data = {\"announcement\": obj, \"info_link\": url, \"base_url\": base_url}\n email_send_bcc(\"announcements/emails/announcement_posted.txt\", \"announcements/emails/announcement_posted.html\", data, subject, emails)\n messages.success(request, \"Sent email to {} users\".format(len(users_send)))\n else:\n logger.debug(\"Emailing announcements disabled\")", "response": "Send an email to the user who posted an announcement."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsending an HTML message to the list of emails.", "response": "def email_send(text_template, html_template, data, subject, emails, headers=None):\n \"\"\"Send an HTML/Plaintext email with the following fields.\n\n text_template: URL to a Django template for the text email's contents\n html_template: URL to a Django tempalte for the HTML email's contents\n data: The context to pass to the templates\n subject: The subject of the email\n emails: The addresses to send the email to\n headers: A dict of additional headers to send to the message\n\n \"\"\"\n\n text = get_template(text_template)\n html = get_template(html_template)\n text_content = text.render(data)\n html_content = html.render(data)\n subject = settings.EMAIL_SUBJECT_PREFIX + subject\n headers = {} if headers is None else headers\n msg = EmailMultiAlternatives(subject, text_content, settings.EMAIL_FROM, emails, headers=headers)\n msg.attach_alternative(html_content, \"text/html\")\n logger.debug(\"Emailing {} to {}\".format(subject, emails))\n msg.send()\n\n return msg"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef field_(self, name):\n try:\n field = self.fields[name]\n except KeyError:\n raise KeyError(\"Key %r not found in '%s'\" % (name, self.__class__.__name__))\n return BoundField(self, field, name)", "response": "Returns a BoundField with the given name."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning a connection to the Cassandra database.", "response": "def _connection(self):\n '''\n Return a connection from the pool\n '''\n try:\n return self._pool.get(False)\n except Empty:\n args = [\n self._host, self._port, self._keyspace\n ]\n kwargs = {\n 'user' : None,\n 'password' : None,\n 'cql_version' : self._cql_version,\n 'compression' : self._compression,\n 'consistency_level' : self._consistency_level,\n 'transport' : self._transport,\n }\n if self._credentials:\n kwargs['user'] = self._credentials['user']\n kwargs['password'] = self._credentials['password']\n return cql.connect(*args, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ninserts the new value into the database.", "response": "def _insert(self, name, value, timestamp, intervals, **kwargs):\n '''\n Insert the new value.\n '''\n if self._value_type in QUOTE_TYPES and not QUOTE_MATCH.match(value):\n value = \"'%s'\"%(value)\n\n for interval,config in self._intervals.items():\n timestamps = self._normalize_timestamps(timestamp, intervals, config)\n for tstamp in timestamps:\n self._insert_data(name, value, tstamp, interval, config, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _series(self, connection, name, interval, config, buckets, **kws):\n '''\n Fetch a series of buckets.\n '''\n fetch = kws.get('fetch')\n process_row = kws.get('process_row') or self._process_row\n\n rval = OrderedDict()\n\n if fetch:\n data = fetch( connection, self._table, name, interval, buckets )\n else:\n data = self._type_get(name, interval, buckets[0], buckets[-1])\n\n if config['coarse']:\n for i_bucket in buckets:\n i_key = config['i_calc'].from_bucket(i_bucket)\n i_data = data.get( i_bucket )\n if i_data:\n rval[ i_key ] = process_row( i_data[None] )\n else:\n rval[ i_key ] = self._type_no_value()\n else:\n if data:\n for i_bucket, i_data in data.items():\n i_key = config['i_calc'].from_bucket(i_bucket)\n rval[i_key] = OrderedDict()\n for r_bucket, r_data in i_data.items():\n r_key = config['r_calc'].from_bucket(r_bucket)\n if r_data:\n rval[i_key][r_key] = process_row(r_data)\n else:\n rval[i_key][r_key] = self._type_no_value()\n\n return rval", "response": "Fetch a series of buckets."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef visible_to_user(self, user):\n\n return (Event.objects.filter(approved=True).filter(Q(groups__in=user.groups.all()) | Q(groups__isnull=True) | Q(user=user)))", "response": "Get a list of visible events for a given user."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting a list of events that have just been hidden for a given user.", "response": "def hidden_events(self, user):\n \"\"\"Get a list of events marked as hidden for a given user (usually request.user).\n\n These are all events visible to the user -- they have just decided to\n hide them.\n\n \"\"\"\n ids = user.events_hidden.all().values_list(\"event__id\")\n return Event.objects.filter(id__in=ids)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn whether the event is in the next or previous 2 weeks.", "response": "def show_fuzzy_date(self):\n \"\"\"Return whether the event is in the next or previous 2 weeks.\n\n Determines whether to display the fuzzy date.\n\n \"\"\"\n date = self.time.replace(tzinfo=None)\n if date <= datetime.now():\n diff = datetime.now() - date\n if diff.days >= 14:\n return False\n else:\n diff = date - datetime.now()\n if diff.days >= 14:\n return False\n\n return True"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nformats a datetime. datetime object relative to the current time.", "response": "def fuzzy_date(date):\n \"\"\"Formats a `datetime.datetime` object relative to the current time.\"\"\"\n\n date = date.replace(tzinfo=None)\n\n if date <= datetime.now():\n diff = datetime.now() - date\n\n seconds = diff.total_seconds()\n minutes = seconds // 60\n hours = minutes // 60\n\n if minutes <= 1:\n return \"moments ago\"\n elif minutes < 60:\n return \"{} minutes ago\".format(int(seconds // 60))\n elif hours < 24:\n hrs = int(diff.seconds // (60 * 60))\n return \"{} hour{} ago\".format(hrs, \"s\" if hrs != 1 else \"\")\n elif diff.days == 1:\n return \"yesterday\"\n elif diff.days < 7:\n return \"{} days ago\".format(int(seconds // (60 * 60 * 24)))\n elif diff.days < 14:\n return date.strftime(\"last %A\")\n else:\n return date.strftime(\"%A, %B %d, %Y\")\n else:\n diff = date - datetime.now()\n\n seconds = diff.total_seconds()\n minutes = seconds // 60\n hours = minutes // 60\n\n if minutes <= 1:\n return \"moments ago\"\n elif minutes < 60:\n return \"in {} minutes\".format(int(seconds // 60))\n elif hours < 24:\n hrs = int(diff.seconds // (60 * 60))\n return \"in {} hour{}\".format(hrs, \"s\" if hrs != 1 else \"\")\n elif diff.days == 1:\n return \"tomorrow\"\n elif diff.days < 7:\n return \"in {} days\".format(int(seconds // (60 * 60 * 24)))\n elif diff.days < 14:\n return date.strftime(\"next %A\")\n else:\n return date.strftime(\"%A, %B %d, %Y\")"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nrender the template at page. template", "response": "def render_page(page, page_args):\n \"\"\" Renders the template at page.template\n \"\"\"\n print(page_args)\n template_name = page.template if page.template else page.name\n template = \"signage/pages/{}.html\".format(template_name)\n if page.function:\n context_method = getattr(pages, page.function)\n else:\n context_method = getattr(pages, page.name)\n sign, request = page_args\n context = context_method(page, sign, request)\n return render_to_string(template, context)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nrunning whenever a new announcement is created or approved and posted.", "response": "def announcement_posted_hook(request, obj):\n \"\"\"Runs whenever a new announcement is created, or a request is approved and posted.\n\n obj: The Announcement object\n\n \"\"\"\n logger.debug(\"Announcement posted\")\n\n if obj.notify_post:\n logger.debug(\"Announcement notify on\")\n announcement_posted_twitter(request, obj)\n try:\n notify_all = obj.notify_email_all\n except AttributeError:\n notify_all = False\n\n try:\n if notify_all:\n announcement_posted_email(request, obj, True)\n else:\n announcement_posted_email(request, obj)\n except Exception as e:\n logger.error(\"Exception when emailing announcement: {}\".format(e))\n messages.error(request, \"Exception when emailing announcement: {}\".format(e))\n raise e\n else:\n logger.debug(\"Announcement notify off\")"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef approve_announcement_view(request, req_id):\n req = get_object_or_404(AnnouncementRequest, id=req_id)\n\n requested_teachers = req.teachers_requested.all()\n logger.debug(requested_teachers)\n if request.user not in requested_teachers:\n messages.error(request, \"You do not have permission to approve this announcement.\")\n return redirect(\"index\")\n\n if request.method == \"POST\":\n form = AnnouncementRequestForm(request.POST, instance=req)\n if form.is_valid():\n obj = form.save(commit=True)\n # SAFE HTML\n obj.content = safe_html(obj.content)\n obj.save()\n if \"approve\" in request.POST:\n obj.teachers_approved.add(request.user)\n obj.save()\n if not obj.admin_email_sent:\n if settings.SEND_ANNOUNCEMENT_APPROVAL:\n admin_request_announcement_email(request, form, obj)\n obj.admin_email_sent = True\n obj.save()\n\n return redirect(\"approve_announcement_success\")\n else:\n obj.save()\n return redirect(\"approve_announcement_reject\")\n\n form = AnnouncementRequestForm(instance=req)\n context = {\"form\": form, \"req\": req, \"admin_approve\": False}\n return render(request, \"announcements/approve.html\", context)", "response": "The approve announcement page. Teachers will be linked to this page from an email."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef admin_approve_announcement_view(request, req_id):\n req = get_object_or_404(AnnouncementRequest, id=req_id)\n\n requested_teachers = req.teachers_requested.all()\n logger.debug(requested_teachers)\n\n if request.method == \"POST\":\n form = AnnouncementRequestForm(request.POST, instance=req)\n if form.is_valid():\n req = form.save(commit=True)\n # SAFE HTML\n req.content = safe_html(req.content)\n if \"approve\" in request.POST:\n groups = []\n if \"groups\" in request.POST:\n group_ids = request.POST.getlist(\"groups\")\n groups = Group.objects.filter(id__in=group_ids)\n logger.debug(groups)\n announcement = Announcement.objects.create(title=req.title, content=req.content, author=req.author, user=req.user,\n expiration_date=req.expiration_date)\n for g in groups:\n announcement.groups.add(g)\n announcement.save()\n\n req.posted = announcement\n req.posted_by = request.user\n req.save()\n\n announcement_approved_hook(request, announcement, req)\n announcement_posted_hook(request, announcement)\n\n messages.success(request, \"Successfully approved announcement request. It has been posted.\")\n else:\n req.rejected = True\n req.rejected_by = request.user\n req.save()\n messages.success(request, \"You did not approve this request. It will be hidden.\")\n return redirect(\"index\")\n\n form = AnnouncementRequestForm(instance=req)\n all_groups = Group.objects.all()\n context = {\"form\": form, \"req\": req, \"admin_approve\": True, \"all_groups\": all_groups}\n return render(request, \"announcements/approve.html\", context)", "response": "The administrator approval announcement request page. Admins will view this page through the announcement request UI."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef view_announcement_view(request, id):\n announcement = get_object_or_404(Announcement, id=id)\n\n return render(request, \"announcements/view.html\", {\"announcement\": announcement})", "response": "View an announcement.\n\n id: announcement id"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nmodifying an announcement. id: announcement id", "response": "def modify_announcement_view(request, id=None):\n \"\"\"Modify an announcement.\n\n id: announcement id\n\n \"\"\"\n if request.method == \"POST\":\n announcement = get_object_or_404(Announcement, id=id)\n form = AnnouncementForm(request.POST, instance=announcement)\n if form.is_valid():\n obj = form.save()\n logger.debug(form.cleaned_data)\n if \"update_added_date\" in form.cleaned_data and form.cleaned_data[\"update_added_date\"]:\n logger.debug(\"Update added date\")\n obj.added = timezone.now()\n # SAFE HTML\n obj.content = safe_html(obj.content)\n obj.save()\n messages.success(request, \"Successfully modified announcement.\")\n return redirect(\"index\")\n else:\n messages.error(request, \"Error adding announcement\")\n else:\n announcement = get_object_or_404(Announcement, id=id)\n form = AnnouncementForm(instance=announcement)\n\n context = {\"form\": form, \"action\": \"modify\", \"id\": id, \"announcement\": announcement}\n return render(request, \"announcements/add_modify.html\", context)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef delete_announcement_view(request, id):\n if request.method == \"POST\":\n post_id = None\n try:\n post_id = request.POST[\"id\"]\n except AttributeError:\n post_id = None\n try:\n a = Announcement.objects.get(id=post_id)\n if request.POST.get(\"full_delete\", False):\n a.delete()\n messages.success(request, \"Successfully deleted announcement.\")\n else:\n a.expiration_date = datetime.datetime.now()\n a.save()\n messages.success(request, \"Successfully expired announcement.\")\n except Announcement.DoesNotExist:\n pass\n\n return redirect(\"index\")\n else:\n announcement = get_object_or_404(Announcement, id=id)\n return render(request, \"announcements/delete.html\", {\"announcement\": announcement})", "response": "Delete an announcement.\n\n id: announcement id"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nhides an announcement for the logged - in user.", "response": "def hide_announcement_view(request):\n \"\"\" Hide an announcement for the logged-in user.\n\n announcements_hidden in the user model is the related_name for\n \"users_hidden\" in the announcement model.\n \"\"\"\n if request.method == \"POST\":\n announcement_id = request.POST.get(\"announcement_id\")\n if announcement_id:\n announcement = Announcement.objects.get(id=announcement_id)\n try:\n announcement.user_map.users_hidden.add(request.user)\n announcement.user_map.save()\n except IntegrityError:\n logger.warning(\"Duplicate value when hiding announcement {} for {}.\".format(announcement_id, request.user.username))\n return http.HttpResponse(\"Hidden\")\n raise http.Http404\n else:\n return http.HttpResponseNotAllowed([\"POST\"], \"HTTP 405: METHOD NOT ALLOWED\")"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nvalidate the username and password in the form.", "response": "def is_valid(self):\n \"\"\"Validates the username and password in the form.\"\"\"\n form = super(AuthenticateForm, self).is_valid()\n for f, error in self.errors.items():\n if f != \"__all__\":\n self.fields[f].widget.attrs.update({\"class\": \"error\", \"placeholder\": \", \".join(list(error))})\n else:\n errors = list(error)\n if \"This account is inactive.\" in errors:\n message = \"Intranet access restricted\"\n else:\n message = \"Invalid password\"\n self.fields[\"password\"].widget.attrs.update({\"class\": \"error\", \"placeholder\": message})\n\n return form"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the root of the Ion API.", "response": "def api_root(request, format=None):\n r\"\"\"Welcome to the Ion API!\n\n Documentation is below. <pk\\> refers to the unique id of a certain object - this is shown as \"id\" in most lists and references.\n\n The general form of the api link (with /api/ assumed to be prepended) is shown, along with an example URL.\n\n All of the API methods, except for those relating to the Bell Schedule, require authentication.\n\n \"\"\"\n\n views = OrderedDict(((\"Schedule\", {\n \"/schedule\": [\"Get today's schedule\", perma_reverse(request, \"api_schedule_day_list\")],\n \"/schedule?page_size=<num>\":\n [\"Get the schedule for the next <num> days\", \"{}?page_size=7\".format(perma_reverse(request, \"api_schedule_day_list\"))],\n \"/schedule/<date>\": [\n \"Get the schedule for a specific day, in YYYY-MM-DD format\",\n perma_reverse(request, \"api_schedule_day_detail\", kwargs={\"date\": \"2016-04-04\"})\n ],\n }), (\"Announcements\", {\n \"/announcements\": [\"Get announcement list\", perma_reverse(request, \"api_announcements_list_create\")],\n \"/announcements/<pk>\": [\"Get announcement details\",\n perma_reverse(request, \"api_announcements_detail\", kwargs={\"pk\": 2999})],\n }), (\"Emergency Announcements\", {\n \"/emerg\": [\"Get FCPS emergency announcements\", perma_reverse(request, \"api_emerg_status\")]\n }), (\"Profile\", {\n \"/profile\": [\"Get current user profile\", perma_reverse(request, \"api_user_myprofile_detail\")],\n \"/profile/<pk>\": [\"Get specific user profile\",\n perma_reverse(request, \"api_user_profile_detail\", kwargs={\"pk\": 489})],\n \"/profile/<username>\":\n [\"Get specific user profile by username\",\n perma_reverse(request, \"api_user_profile_detail_by_username\", kwargs={\"username\": \"2017ewang\"})],\n \"/profile/<pk>/picture\":\n [\"Get a specific user's profile picture\",\n perma_reverse(request, \"api_user_profile_picture_default\", kwargs={\"pk\": 489})],\n \"/profile/<username>/picture\": [\n \"Get a specific user's profile picture by username\",\n perma_reverse(request, \"api_user_profile_picture_default_by_username\", kwargs={\"username\": \"2017ewang\"})\n ]\n }), (\"Search\", {\n \"/search/<query>\": [\"Search users\", perma_reverse(request, \"api_user_search\", kwargs={\"query\": \"last:Kim\"})]\n }), (\"Blocks\", {\n \"/blocks\": [\"Get block list\", perma_reverse(request, \"api_eighth_block_list\")],\n \"/blocks?start_date=<start_date>\": [\n \"Get a block list starting on the specified date (in YYYY-MM-DD format).\", \"{}?start_date=2015-11-18\".format(\n perma_reverse(request, \"api_eighth_block_list\"))\n ],\n \"/blocks?date=<date>\": [\n \"Get a list of blocks only on the specified date (in YYYY-MM-DD format).\", \"{}?date=2015-11-18\".format(\n perma_reverse(request, \"api_eighth_block_list\"))\n ],\n \"/blocks/<pk>\": [\"Get a list of activities on a block\",\n perma_reverse(request, \"api_eighth_block_detail\", kwargs={\"pk\": 3030})],\n }), (\"Activities\", {\n \"/activities\": [\"Get eighth activity list\", perma_reverse(request, \"api_eighth_activity_list\")],\n \"/activities/<pk>\": [\"Get eighth activity details\",\n perma_reverse(request, \"api_eighth_activity_detail\", kwargs={\"pk\": 115})]\n }), (\"Signups\", {\n \"/signups/user\":\n [\"Get eighth signups for current user, or sign up a user for activity\",\n perma_reverse(request, \"api_eighth_user_signup_list_myid\")],\n \"/signups/user/<user_id>\":\n [\"Get eighth signups for specific user\",\n perma_reverse(request, \"api_eighth_user_signup_list\", kwargs={\"user_id\": 8889})],\n \"/signups/user/favorites\": [\n \"Get favorited eighth activities for current user, or toggle whether an activity is favorited\",\n perma_reverse(request, \"api_eighth_user_favorites_list_myid\")\n ],\n \"/signups/scheduled_activity/<scheduled_activity_id>\": [\n \"Get eighth signups for a specific scheduled activity\",\n perma_reverse(request, \"api_eighth_scheduled_activity_signup_list\", kwargs={\"scheduled_activity_id\": 889})\n ]\n })))\n return Response(views)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncalculate keys given a stat name and timestamp.", "response": "def _calc_keys(self, config, name, timestamp):\n '''\n Calculate keys given a stat name and timestamp.\n '''\n i_bucket = config['i_calc'].to_bucket( timestamp )\n r_bucket = config['r_calc'].to_bucket( timestamp )\n\n i_key = '%s%s:%s:%s'%(self._prefix, name, config['interval'], i_bucket)\n r_key = '%s:%s'%(i_key, r_bucket)\n\n return i_bucket, r_bucket, i_key, r_key"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _insert(self, name, value, timestamp, intervals, **kwargs):\n '''\n Insert the value.\n '''\n if 'pipeline' in kwargs:\n pipe = kwargs.get('pipeline')\n else:\n pipe = self._client.pipeline(transaction=False)\n\n for interval,config in self._intervals.iteritems():\n timestamps = self._normalize_timestamps(timestamp, intervals, config)\n for tstamp in timestamps:\n self._insert_data(name, value, tstamp, interval, config, pipe,\n ttl_batch=kwargs.get('ttl_batch'))\n\n if 'pipeline' not in kwargs:\n pipe.execute()", "response": "Insert the value into the database."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _insert_data(self, name, value, timestamp, interval, config, pipe, ttl_batch=None):\n '''Helper to insert data into redis'''\n # Calculate the TTL and abort if inserting into the past\n expire, ttl = config['expire'], config['ttl'](timestamp)\n if expire and not ttl:\n return\n\n i_bucket, r_bucket, i_key, r_key = self._calc_keys(config, name, timestamp)\n\n if config['coarse']:\n self._type_insert(pipe, i_key, value)\n else:\n # Add the resolution bucket to the interval. This allows us to easily\n # discover the resolution intervals within the larger interval, and\n # if there is a cap on the number of steps, it will go out of scope\n # along with the rest of the data\n pipe.sadd(i_key, r_bucket)\n self._type_insert(pipe, r_key, value)\n\n if expire:\n ttl_args = (i_key, ttl)\n if ttl_batch is not None:\n ttl_batch.add(ttl_args)\n else:\n pipe.expire(*ttl_args)\n if not config['coarse']:\n ttl_args = (r_key, ttl)\n if ttl_batch is not None:\n ttl_batch.add(ttl_args)\n else:\n pipe.expire(*ttl_args)", "response": "Helper to insert data into redis"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef delete(self, name):\n '''\n Delete all the data in a named timeseries.\n '''\n keys = self._client.keys('%s%s:*'%(self._prefix,name))\n\n pipe = self._client.pipeline(transaction=False)\n for key in keys:\n pipe.delete( key )\n pipe.execute()\n\n # Could be not technically the exact number of keys deleted, but is a close\n # enough approximation\n return len(keys)", "response": "Delete all the data in a named timeseries."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nfetch a single interval from redis.", "response": "def _get(self, name, interval, config, timestamp, **kws):\n '''\n Fetch a single interval from redis.\n '''\n i_bucket, r_bucket, i_key, r_key = self._calc_keys(config, name, timestamp)\n fetch = kws.get('fetch') or self._type_get\n process_row = kws.get('process_row') or self._process_row\n\n rval = OrderedDict()\n if config['coarse']:\n data = process_row( fetch(self._client, i_key) )\n rval[ config['i_calc'].from_bucket(i_bucket) ] = data\n else:\n # First fetch all of the resolution buckets for this set.\n resolution_buckets = sorted(map(int,self._client.smembers(i_key)))\n\n # Create a pipe and go fetch all the data for each.\n # TODO: turn off transactions here?\n pipe = self._client.pipeline(transaction=False)\n for bucket in resolution_buckets:\n r_key = '%s:%s'%(i_key, bucket) # TODO: make this the \"resolution_bucket\" closure?\n fetch(pipe, r_key)\n res = pipe.execute()\n\n for idx,data in enumerate(res):\n data = process_row(data)\n rval[ config['r_calc'].from_bucket(resolution_buckets[idx]) ] = data\n\n return rval"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nfetch a series of buckets.", "response": "def _series(self, name, interval, config, buckets, **kws):\n '''\n Fetch a series of buckets.\n '''\n pipe = self._client.pipeline(transaction=False)\n step = config['step']\n resolution = config.get('resolution',step)\n fetch = kws.get('fetch') or self._type_get\n process_row = kws.get('process_row') or self._process_row\n\n rval = OrderedDict()\n for interval_bucket in buckets:\n i_key = '%s%s:%s:%s'%(self._prefix, name, interval, interval_bucket)\n\n if config['coarse']:\n fetch(pipe, i_key)\n else:\n pipe.smembers(i_key)\n res = pipe.execute()\n\n # TODO: a memory efficient way to use a single pipeline for this.\n for idx,data in enumerate(res):\n # TODO: use closures on the config for generating this interval key\n interval_bucket = buckets[idx] #start_bucket + idx\n interval_key = '%s%s:%s:%s'%(self._prefix, name, interval, interval_bucket)\n\n if config['coarse']:\n data = process_row( data )\n rval[ config['i_calc'].from_bucket(interval_bucket) ] = data\n else:\n rval[ config['i_calc'].from_bucket(interval_bucket) ] = OrderedDict()\n pipe = self._client.pipeline(transaction=False)\n resolution_buckets = sorted(map(int,data))\n for bucket in resolution_buckets:\n # TODO: use closures on the config for generating this resolution key\n resolution_key = '%s:%s'%(interval_key, bucket)\n fetch(pipe, resolution_key)\n\n resolution_res = pipe.execute()\n for x,data in enumerate(resolution_res):\n i_t = config['i_calc'].from_bucket(interval_bucket)\n r_t = config['r_calc'].from_bucket(resolution_buckets[x])\n rval[ i_t ][ r_t ] = process_row(data)\n\n return rval"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ninserts the value into the series.", "response": "def _type_insert(self, handle, key, value):\n '''\n Insert the value into the series.\n '''\n if value!=0:\n if isinstance(value,float):\n handle.incrbyfloat(key, value)\n else:\n handle.incr(key,value)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _choose_from_list(options, question):\n message = \"\"\n for index, value in enumerate(options):\n message += \"[{}] {}\\n\".format(index, value)\n message += \"\\n\" + question\n\n def valid(n):\n if int(n) not in range(len(options)):\n raise ValueError(\"Not a valid option.\")\n else:\n return int(n)\n\n prompt(message, validate=valid, key=\"answer\")\n return env.answer", "response": "Choose an item from a list."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef runserver(port=8080, debug_toolbar=\"yes\", werkzeug=\"no\", dummy_cache=\"no\", short_cache=\"no\", template_warnings=\"no\", log_level=\"DEBUG\",\n insecure=\"no\"):\n \"\"\"Clear compiled python files and start the Django dev server.\"\"\"\n if not port or (not isinstance(port, int) and not port.isdigit()):\n abort(\"You must specify a port.\")\n\n # clean_pyc()\n yes_or_no = (\"debug_toolbar\", \"werkzeug\", \"dummy_cache\", \"short_cache\", \"template_warnings\", \"insecure\")\n for s in yes_or_no:\n if locals()[s].lower() not in (\"yes\", \"no\"):\n abort(\"Specify 'yes' or 'no' for {} option.\".format(s))\n\n _log_levels = (\"DEBUG\", \"INFO\", \"WARNING\", \"ERROR\", \"CRITICAL\")\n if log_level not in _log_levels:\n abort(\"Invalid log level.\")\n\n with shell_env(SHOW_DEBUG_TOOLBAR=debug_toolbar.upper(), DUMMY_CACHE=dummy_cache.upper(), SHORT_CACHE=short_cache.upper(),\n WARN_INVALID_TEMPLATE_VARS=template_warnings.upper(), LOG_LEVEL=log_level):\n local(\"./manage.py runserver{} 0.0.0.0:{}{}\".format(\"_plus\" if werkzeug.lower() == \"yes\" else \"\", port, \" --insecure\"\n if insecure.lower() == \"yes\" else \"\"))", "response": "Start Django dev server."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nclear all sessions for all sandboxes or for production.", "response": "def clear_sessions(venv=None):\n \"\"\"Clear all sessions for all sandboxes or for production.\"\"\"\n if \"VIRTUAL_ENV\" in os.environ:\n ve = os.path.basename(os.environ[\"VIRTUAL_ENV\"])\n else:\n ve = \"\"\n\n if venv is not None:\n ve = venv\n else:\n ve = prompt(\"Enter the name of the \"\n \"sandbox whose sessions you would like to delete, or \"\n \"\\\"ion\\\" to clear production sessions:\", default=ve)\n\n c = \"redis-cli -n {0} KEYS {1}:session:* | sed 's/\\\"^.*\\\")//g'\"\n keys_command = c.format(REDIS_SESSION_DB, ve)\n\n keys = local(keys_command, capture=True)\n count = 0 if keys.strip() == \"\" else keys.count(\"\\n\") + 1\n\n if count == 0:\n puts(\"No sessions to destroy.\")\n return 0\n\n plural = \"s\" if count != 1 else \"\"\n\n if not confirm(\"Are you sure you want to destroy {} {}\" \"session{}?\".format(count, \"production \" if ve == \"ion\" else \"\", plural)):\n return 0\n\n if count > 0:\n local(\"{0}| xargs redis-cli -n \" \"{1} DEL\".format(keys_command, REDIS_SESSION_DB))\n\n puts(\"Destroyed {} session{}.\".format(count, plural))"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef clear_cache(input=None):\n if input is not None:\n n = input\n else:\n n = _choose_from_list([\"Production cache\", \"Sandbox cache\"], \"Which cache would you like to clear?\")\n\n if n == 0:\n local(\"redis-cli -n {} FLUSHDB\".format(REDIS_PRODUCTION_CACHE_DB))\n else:\n local(\"redis-cli -n {} FLUSHDB\".format(REDIS_SANDBOX_CACHE_DB))", "response": "Clear the production or sandbox redis cache."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\npopulates a database with data from fixtures.", "response": "def load_fixtures():\n \"\"\"Populate a database with data from fixtures.\"\"\"\n\n if local(\"pwd\", capture=True) == PRODUCTION_DOCUMENT_ROOT:\n abort(\"Refusing to automatically load fixtures into production database!\")\n\n if not confirm(\"Are you sure you want to load all fixtures? This could have unintended consequences if the database is not empty.\"):\n abort(\"Aborted.\")\n\n files = [\n \"fixtures/users/users.json\", \"fixtures/eighth/sponsors.json\", \"fixtures/eighth/rooms.json\", \"fixtures/eighth/blocks.json\",\n \"fixtures/eighth/activities.json\", \"fixtures/eighth/scheduled_activities.json\", \"fixtures/eighth/signups.json\",\n \"fixtures/announcements/announcements.json\"\n ]\n\n for f in files:\n local(\"./manage.py loaddata \" + f)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nforce migrations to apply for a given app.", "response": "def forcemigrate(app=None):\n \"\"\"Force migrations to apply for a given app.\"\"\"\n if app is None:\n abort(\"No app name given.\")\n local(\"./manage.py migrate {} --fake\".format(app))\n local(\"./manage.py migrate {}\".format(app))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef user_with_student_id(self, student_id):\n results = User.objects.filter(student_id=student_id)\n if len(results) == 1:\n return results.first()\n return None", "response": "Get a unique user object by student ID."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef user_with_ion_id(self, student_id):\n if isinstance(student_id, str) and not student_id.isdigit():\n return None\n results = User.objects.filter(id=student_id)\n if len(results) == 1:\n return results.first()\n return None", "response": "Get a unique user object by Ion ID."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngetting a unique user object by given name.", "response": "def user_with_name(self, given_name=None, sn=None):\n \"\"\"Get a unique user object by given name (first/nickname and last).\"\"\"\n results = []\n\n if sn and not given_name:\n results = User.objects.filter(last_name=sn)\n elif given_name:\n query = {'first_name': given_name}\n if sn:\n query['last_name'] = sn\n results = User.objects.filter(**query)\n\n if len(results) == 0:\n # Try their first name as a nickname\n del query['first_name']\n query['nickname'] = given_name\n results = User.objects.filter(**query)\n\n if len(results) == 1:\n return results.first()\n\n return None"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef users_with_birthday(self, month, day):\n\n users = User.objects.filter(properties___birthday__month=month, properties___birthday__day=day)\n results = []\n for user in users:\n # TODO: permissions system\n results.append(user)\n\n return results", "response": "Return a list of user objects who have a birthday on a given date."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_students(self):\n users = User.objects.filter(user_type=\"student\", graduation_year__gte=settings.SENIOR_GRADUATION_YEAR)\n users = users.exclude(id__in=EXTRA)\n\n return users", "response": "Get user objects that are students ( quickly )"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_teachers(self):\n users = User.objects.filter(user_type=\"teacher\")\n users = users.exclude(id__in=EXTRA)\n # Add possible exceptions handling here\n users = users | User.objects.filter(id__in=[31863, 32327, 32103, 33228])\n\n return users", "response": "Get user objects that are teachers ( quickly )"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets teachers sorted by last name.", "response": "def get_teachers_sorted(self):\n \"\"\"Get teachers sorted by last name.\n\n This is used for the announcement request page.\n\n \"\"\"\n teachers = self.get_teachers()\n teachers = [(u.last_name, u.first_name, u.id) for u in teachers]\n for t in teachers:\n if t is None or t[0] is None or t[1] is None or t[2] is None:\n teachers.remove(t)\n for t in teachers:\n if t[0] is None or len(t[0]) <= 1:\n teachers.remove(t)\n teachers.sort(key=lambda u: (u[0], u[1]))\n # Hack to return QuerySet in given order\n id_list = [t[2] for t in teachers]\n clauses = ' '.join(['WHEN id=%s THEN %s' % (pk, i) for i, pk in enumerate(id_list)])\n ordering = 'CASE %s END' % clauses\n queryset = User.objects.filter(id__in=id_list).extra(select={'ordering': ordering}, order_by=('ordering',))\n return queryset"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning whether a user is a member of a certain group.", "response": "def member_of(self, group):\n \"\"\"Returns whether a user is a member of a certain group.\n\n Args:\n group\n The name of a group (string) or a group object\n\n Returns:\n Boolean\n\n \"\"\"\n if isinstance(group, Group):\n group = group.name\n return self.groups.filter(name=group).exists()"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef last_first(self):\n return \"{}, {} \".format(self.last_name, self.first_name) + (\"({})\".format(self.nickname) if self.nickname else \"\")", "response": "Return a name in the format of :\n Lastname Firstname [ Nickname ]"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef last_first_id(self):\n return (\"{}{} \".format(self.last_name, \", \" + self.first_name if self.first_name else \"\") + (\"({}) \".format(self.nickname)\n if self.nickname else \"\") +\n (\"({})\".format(self.student_id if self.is_student and self.student_id else self.username)))", "response": "Return a name of the last first name of the user."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef last_first_initial(self):\n return (\"{}{} \".format(self.last_name, \", \" + self.first_name[:1] + \".\" if self.first_name else \"\") + (\"({}) \".format(self.nickname)\n if self.nickname else \"\"))", "response": "Return a name in the format of :\n Lastname F [Nickname ]"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets or guess a user s TJ email.", "response": "def tj_email(self):\n \"\"\"Get (or guess) a user's TJ email.\n\n If a fcps.edu or tjhsst.edu email is specified in their email\n list, use that. Otherwise, append the user's username to the\n proper email suffix, depending on whether they are a student or\n teacher.\n\n \"\"\"\n\n for email in self.emails.all():\n if email.address.endswith((\"@fcps.edu\", \"@tjhsst.edu\")):\n return email\n\n if self.is_teacher:\n domain = \"fcps.edu\"\n else:\n domain = \"tjhsst.edu\"\n\n return \"{}@{}\".format(self.username, domain)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns default photo that should be used", "response": "def default_photo(self):\n \"\"\"Returns default photo (in binary) that should be used\n\n Returns:\n Binary data\n\n \"\"\"\n preferred = self.preferred_photo\n if preferred is not None:\n return preferred.binary\n\n if preferred is None:\n if self.user_type == \"teacher\":\n current_grade = 12\n else:\n current_grade = int(self.grade)\n if current_grade > 12:\n current_grade = 12\n for i in reversed(range(9, current_grade + 1)):\n data = None\n if self.photos.filter(grade_number=i).exists():\n data = self.photos.filter(grade_number=i).first().binary\n if data:\n return data"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning whether the currently logged in user is a teacher and can view all of a student s information regardless of their privacy settings.", "response": "def _current_user_override(self):\n \"\"\"Return whether the currently logged in user is a teacher, and can view all of a student's\n information regardless of their privacy settings.\"\"\"\n try:\n # threadlocals is a module, not an actual thread locals object\n request = threadlocals.request()\n if request is None:\n return False\n requesting_user = request.user\n if isinstance(requesting_user, AnonymousUser) or not requesting_user.is_authenticated:\n return False\n can_view_anyway = requesting_user and (requesting_user.is_teacher or requesting_user.is_eighthoffice or requesting_user.is_eighth_admin)\n except (AttributeError, KeyError) as e:\n logger.error(\"Could not check teacher/eighth override: {}\".format(e))\n can_view_anyway = False\n return can_view_anyway"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn a user s age based on their birthday. Returns None if the user is not in the calendar", "response": "def age(self):\n \"\"\"Returns a user's age, based on their birthday.\n\n Returns:\n integer\n\n \"\"\"\n date = datetime.today().date()\n\n b = self.birthday\n if b:\n return int((date - b).days / 365)\n\n return None"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef is_eighth_sponsor(self):\n # FIXME: remove recursive dep\n from ..eighth.models import EighthSponsor\n\n return EighthSponsor.objects.filter(user=self).exists()", "response": "Returns True if the given user is associated with an. eighth. models. EighthSponsor and therefore should view activity\n sponsoring information."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn a QuerySet of activity id s and counts for the activities that a given user has signed up for more than settings. SIMILAR_THRESHOLD times.", "response": "def frequent_signups(self):\n \"\"\"Return a QuerySet of activity id's and counts for the activities that a given user\n has signed up for more than `settings.SIMILAR_THRESHOLD` times\"\"\"\n key = \"{}:frequent_signups\".format(self.username)\n cached = cache.get(key)\n if cached:\n return cached\n freq_signups = self.eighthsignup_set.exclude(scheduled_activity__activity__administrative=True).exclude(\n scheduled_activity__activity__special=True).exclude(scheduled_activity__activity__restricted=True).exclude(\n scheduled_activity__activity__deleted=True).values('scheduled_activity__activity').annotate(\n count=Count('scheduled_activity__activity')).filter(count__gte=settings.SIMILAR_THRESHOLD).order_by('-count')\n cache.set(key, freq_signups, timeout=60 * 60 * 24 * 7)\n return freq_signups"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning the EighthSponsor that a given user is associated with.", "response": "def get_eighth_sponsor(self):\n \"\"\"Return the :class:`intranet.apps.eighth.models.EighthSponsor` that a given user is\n associated with.\n \"\"\"\n\n # FIXME: remove recursive dep\n from ..eighth.models import EighthSponsor\n\n try:\n sp = EighthSponsor.objects.get(user=self)\n except EighthSponsor.DoesNotExist:\n return False\n\n return sp"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns the user s absence count.", "response": "def absence_count(self):\n \"\"\"Return the user's absence count.\n\n If the user has no absences\n or is not a signup user, returns 0.\n\n \"\"\"\n # FIXME: remove recursive dep\n from ..eighth.models import EighthSignup\n\n return EighthSignup.objects.filter(user=self, was_absent=True, scheduled_activity__attendance_taken=True).count()"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns information about the user s absences.", "response": "def absence_info(self):\n \"\"\"Return information about the user's absences.\"\"\"\n # FIXME: remove recursive dep\n from ..eighth.models import EighthSignup\n\n return EighthSignup.objects.filter(user=self, was_absent=True, scheduled_activity__attendance_taken=True)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef handle_delete(self):\n from intranet.apps.eighth.models import EighthScheduledActivity\n EighthScheduledActivity.objects.filter(eighthsignup_set__user=self).update(\n archived_member_count=F('archived_member_count')+1)", "response": "Handle a graduated user being deleted."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nsets the permission for the current object. Returns False if unable to set permission. Returns True if successful.", "response": "def set_permission(self, permission, value, parent=False, admin=False):\n \"\"\" Sets permission for personal information.\n Returns False silently if unable to set permission.\n Returns True if successful.\n \"\"\"\n try:\n if not getattr(self, 'parent_{}'.format(permission)) and not parent and not admin:\n return False\n level = 'parent' if parent else 'self'\n setattr(self, '{}_{}'.format(level, permission), value)\n\n # Set student permission to false if parent sets permission to false.\n if parent and not value:\n setattr(self, 'self_{}'.format(permission), False)\n\n self.save()\n return True\n except Exception as e:\n logger.error(\"Error occurred setting permission {} to {}: {}\".format(permission, value, e))\n return False"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncheck if a user is the HTTP request sender", "response": "def is_http_request_sender(self):\n \"\"\"Checks if a user the HTTP request sender (accessing own info)\n\n Used primarily to load private personal information from the\n cache. (A student should see all info on his or her own profile\n regardless of how the permissions are set.)\n\n Returns:\n Boolean\n\n \"\"\"\n try:\n # threadlocals is a module, not an actual thread locals object\n request = threadlocals.request()\n if request and request.user and request.user.is_authenticated:\n requesting_user_id = request.user.id\n return str(requesting_user_id) == str(self.user.id)\n except (AttributeError, KeyError) as e:\n logger.error(\"Could not check request sender: {}\".format(e))\n return False\n\n return False"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncheck privacy options to see if an attribute is visible to public", "response": "def attribute_is_visible(self, permission):\n \"\"\" Checks privacy options to see if an attribute is visible to public\n \"\"\"\n try:\n parent = getattr(self, \"parent_{}\".format(permission))\n student = getattr(self, \"self_{}\".format(permission))\n return (parent and student) or (self.is_http_request_sender() or self._current_user_override())\n except Exception:\n logger.error(\"Could not retrieve permissions for {}\".format(permission))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncheck if the attribute is visible to public", "response": "def attribute_is_public(self, permission):\n \"\"\" Checks if attribute is visible to public (regardless of admins status)\n \"\"\"\n try:\n parent = getattr(self, \"parent_{}\".format(permission))\n student = getattr(self, \"self_{}\".format(permission))\n return (parent and student)\n except Exception:\n logger.error(\"Could not retrieve permissions for {}\".format(permission))"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef name_plural(self):\n return \"freshmen\" if (self._number and self._number == 9) else \"{}s\".format(self._name) if self._name else \"\"", "response": "Return the grade s plural name"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_personal_info(user):\n # change this to not use other_phones\n num_phones = len(user.phones.all() or [])\n num_emails = len(user.emails.all() or [])\n num_websites = len(user.websites.all() or [])\n\n personal_info = {}\n\n for i in range(num_phones):\n personal_info[\"phone_{}\".format(i)] = user.phones.all()[i]\n\n for i in range(num_emails):\n personal_info[\"email_{}\".format(i)] = user.emails.all()[i]\n\n for i in range(num_websites):\n personal_info[\"website_{}\".format(i)] = user.websites.all()[i]\n\n num_fields = {\"phones\": num_phones, \"emails\": num_emails, \"websites\": num_websites}\n\n return personal_info, num_fields", "response": "Get a user s personal info attributes to pass as an initial value to a\n PersonalInformationForm."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets a user s preferred picture attributes to pass as an initial value to a PreferredPictureForm.", "response": "def get_preferred_pic(user):\n \"\"\"Get a user's preferred picture attributes to pass as an initial value to a\n PreferredPictureForm.\"\"\"\n\n # FIXME: remove this hardcoded junk\n preferred_pic = {\"preferred_photo\": \"AUTO\"}\n if user.preferred_photo:\n preferred_pic[\"preferred_photo\"] = user.preferred_photo.grade_number\n\n return preferred_pic"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_privacy_options(user):\n\n privacy_options = {}\n\n for ptype in user.permissions:\n for field in user.permissions[ptype]:\n if ptype == \"self\":\n privacy_options[\"{}-{}\".format(field, ptype)] = user.permissions[ptype][field]\n else:\n privacy_options[field] = user.permissions[ptype][field]\n\n return privacy_options", "response": "Get a user s privacy options to pass as an initial value to a PrivacyOptionsForm."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting a user s notification options to pass as an initial value to a NotificationOptionsForm.", "response": "def get_notification_options(user):\n \"\"\"Get a user's notification options to pass as an initial value to a\n NotificationOptionsForm.\"\"\"\n\n notification_options = {}\n notification_options[\"receive_news_emails\"] = user.receive_news_emails\n notification_options[\"receive_eighth_emails\"] = user.receive_eighth_emails\n try:\n notification_options[\"primary_email\"] = user.primary_email\n except Email.DoesNotExist:\n user.primary_email = None\n user.save()\n notification_options[\"primary_email\"] = None\n\n return notification_options"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef preferences_view(request):\n user = request.user\n\n if request.method == \"POST\":\n logger.debug(dict(request.POST))\n phone_formset, email_formset, website_formset, errors = save_personal_info(request, user)\n if user.is_student:\n preferred_pic_form = save_preferred_pic(request, user)\n bus_route_form = save_bus_route(request, user)\n else:\n preferred_pic_form = None\n bus_route_form = None\n privacy_options_form = save_privacy_options(request, user)\n notification_options_form = save_notification_options(request, user)\n\n for error in errors:\n messages.error(request, error)\n\n try:\n save_gcm_options(request, user)\n except AttributeError:\n pass\n\n return redirect(\"preferences\")\n\n else:\n phone_formset = PhoneFormset(instance=user, prefix='pf')\n email_formset = EmailFormset(instance=user, prefix='ef')\n website_formset = WebsiteFormset(instance=user, prefix='wf')\n\n if user.is_student:\n preferred_pic = get_preferred_pic(user)\n bus_route = get_bus_route(user)\n logger.debug(preferred_pic)\n preferred_pic_form = PreferredPictureForm(user, initial=preferred_pic)\n bus_route_form = BusRouteForm(user, initial=bus_route)\n else:\n bus_route_form = None\n preferred_pic = None\n preferred_pic_form = None\n\n privacy_options = get_privacy_options(user)\n logger.debug(privacy_options)\n privacy_options_form = PrivacyOptionsForm(user, initial=privacy_options)\n\n notification_options = get_notification_options(user)\n logger.debug(notification_options)\n notification_options_form = NotificationOptionsForm(user, initial=notification_options)\n\n context = {\n \"phone_formset\": phone_formset,\n \"email_formset\": email_formset,\n \"website_formset\": website_formset,\n \"preferred_pic_form\": preferred_pic_form,\n \"privacy_options_form\": privacy_options_form,\n \"notification_options_form\": notification_options_form,\n \"bus_route_form\": bus_route_form if settings.ENABLE_BUS_APP else None\n }\n return render(request, \"preferences/preferences.html\", context)", "response": "View and process updates to the preferences page."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef privacy_options_view(request):\n if \"user\" in request.GET:\n user = User.objects.user_with_ion_id(request.GET.get(\"user\"))\n elif \"student_id\" in request.GET:\n user = User.objects.user_with_student_id(request.GET.get(\"student_id\"))\n else:\n user = request.user\n\n if not user:\n messages.error(request, \"Invalid user.\")\n user = request.user\n\n if user.is_eighthoffice:\n user = None\n\n if user:\n if request.method == \"POST\":\n privacy_options_form = save_privacy_options(request, user)\n else:\n privacy_options = get_privacy_options(user)\n privacy_options_form = PrivacyOptionsForm(user, initial=privacy_options)\n\n context = {\"privacy_options_form\": privacy_options_form, \"profile_user\": user}\n else:\n context = {\"profile_user\": user}\n return render(request, \"preferences/privacy_options.html\", context)", "response": "View and edit privacy options for a user."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef scan(cls, formats=ALL_CODE_TYPES, camera=-1):\n app = AndroidApplication.instance()\n r = app.create_future()\n\n #: Initiate a scan\n pkg = BarcodePackage.instance()\n pkg.setBarcodeResultListener(pkg.getId())\n pkg.onBarcodeResult.connect(r.set_result)\n\n intent = cls(app)\n if formats:\n intent.setDesiredBarcodeFormats(formats)\n if camera != -1:\n intent.setCameraId(camera)\n intent.initiateScan()\n\n return r", "response": "Shortcut for a scan of the current locale."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ninitializing the underlying widget.", "response": "def init_widget(self):\n \"\"\" Initialize the underlying widget.\n \"\"\"\n super(AndroidBarcodeView, self).init_widget()\n d = self.declaration\n\n #: Observe activity state changes\n app = self.get_context()\n app.observe('state', self.on_activity_lifecycle_changed)\n\n if d.active:\n self.set_active(d.active)\n if d.light:\n self.set_light(d.light)\n self.widget.barcodeResult.connect(self.on_barcode_result)\n if d.scanning:\n self.set_scanning(d.scanning)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef on_activity_lifecycle_changed(self, change):\n d = self.declaration\n if d.active:\n if change['value'] == 'paused':\n self.widget.pause(now=True)\n elif change['value'] == 'resumed':\n self.widget.resume()", "response": "Called when the activity lifecycle of the user has changed."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef destroy(self):\n if self.widget:\n self.set_active(False)\n super(AndroidBarcodeView, self).destroy()", "response": "Cleanup the activty lifecycle listener"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nmakes an API call to iNaturalist.", "response": "def make_inaturalist_api_get_call(endpoint: str, params: Dict, **kwargs) -> requests.Response:\n \"\"\"Make an API call to iNaturalist.\n\n endpoint is a string such as 'observations' !! do not put / in front\n method: 'GET', 'HEAD', 'POST', 'PUT', 'PATCH', 'DELETE'\n kwargs are passed to requests.request\n Returns a requests.Response object\n \"\"\"\n headers = {'Accept': 'application/json'}\n\n response = requests.get(urljoin(INAT_NODE_API_BASE_URL, endpoint), params, headers=headers, **kwargs)\n return response"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting details about an observation.", "response": "def get_observation(observation_id: int) -> Dict[str, Any]:\n \"\"\"Get details about an observation.\n\n :param observation_id:\n :returns: a dict with details on the observation\n :raises: ObservationNotFound\n \"\"\"\n\n r = get_observations(params={'id': observation_id})\n if r['results']:\n return r['results'][0]\n\n raise ObservationNotFound()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_observations(params: Dict) -> Dict[str, Any]:\n\n r = make_inaturalist_api_get_call('observations', params=params)\n return r.json()", "response": "Search observations in Inaturalist API."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nlike get_observations but handles pagination so you can get all the results in one shot.", "response": "def get_all_observations(params: Dict) -> List[Dict[str, Any]]:\n \"\"\"Like get_observations() but handles pagination so you get all the results in one shot.\n\n Some params will be overwritten: order_by, order, per_page, id_above (do NOT specify page when using this).\n\n Returns a list of dicts (one entry per observation)\n \"\"\"\n\n # According to the doc: \"The large size of the observations index prevents us from supporting the page parameter\n # when retrieving records from large result sets. If you need to retrieve large numbers of records, use the\n # per_page and id_above or id_below parameters instead.\n\n results = [] # type: List[Dict[str, Any]]\n id_above = 0\n\n while True:\n iteration_params = merge_two_dicts(params, {\n 'order_by': 'id',\n 'order': 'asc',\n 'per_page': PER_PAGE_RESULTS,\n 'id_above': id_above\n })\n\n page_obs = get_observations(params=iteration_params)\n results = results + page_obs['results']\n\n if page_obs['total_results'] <= PER_PAGE_RESULTS:\n return results\n\n sleep(THROTTLING_DELAY)\n id_above = results[-1]['id']"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_observation_fields(search_query: str=\"\", page: int=1) -> List[Dict[str, Any]]:\n payload = {\n 'q': search_query,\n 'page': page\n } # type: Dict[str, Union[int, str]]\n\n response = requests.get(\"{base_url}/observation_fields.json\".format(base_url=INAT_BASE_URL), params=payload)\n return response.json()", "response": "Search the ( globally available ) observation\n "} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nliking get_observation_fields but handles pagination for you.", "response": "def get_all_observation_fields(search_query: str=\"\") -> List[Dict[str, Any]]:\n \"\"\"\n Like get_observation_fields(), but handles pagination for you.\n\n :param search_query: a string to search\n \"\"\"\n results = [] # type: List[Dict[str, Any]]\n page = 1\n\n while True:\n r = get_observation_fields(search_query=search_query, page=page)\n\n if not r:\n return results\n\n results = results + r\n page = page + 1\n sleep(THROTTLING_DELAY)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef put_observation_field_values(observation_id: int, observation_field_id: int, value: Any,\n access_token: str) -> Dict[str, Any]:\n # TODO: Also implement a put_or_update_observation_field_values() that deletes then recreates the field_value?\n # TODO: Write example use in docstring.\n # TODO: Return some meaningful exception if it fails because the field is already set.\n # TODO: Also show in example to obtain the observation_field_id?\n # TODO: What happens when parameters are invalid\n # TODO: It appears pushing the same value/pair twice in a row (but deleting it meanwhile via the UI)...\n # TODO: ...triggers an error 404 the second time (report to iNaturalist?)\n \"\"\"Sets an observation field (value) on an observation.\n\n :param observation_id:\n :param observation_field_id:\n :param value\n :param access_token: access_token: the access token, as returned by :func:`get_access_token()`\n\n :returns: iNaturalist's response as a dict, for example:\n {'id': 31,\n 'observation_id': 18166477,\n 'observation_field_id': 31,\n 'value': 'fouraging',\n 'created_at': '2012-09-29T11:05:44.935+02:00',\n 'updated_at': '2018-11-13T10:49:47.985+01:00',\n 'user_id': 1,\n 'updater_id': 1263313,\n 'uuid': 'b404b654-1bf0-4299-9288-52eeda7ac0db',\n 'created_at_utc': '2012-09-29T09:05:44.935Z',\n 'updated_at_utc': '2018-11-13T09:49:47.985Z'}\n\n Will fail if this observation_field is already set for this observation.\n \"\"\"\n\n payload = {\n 'observation_field_value': {\n 'observation_id': observation_id,\n 'observation_field_id': observation_field_id,\n 'value': value\n }\n }\n\n response = requests.put(\"{base_url}/observation_field_values/{id}\".format(base_url=INAT_BASE_URL,\n id=observation_field_id),\n headers=_build_auth_header(access_token),\n json=payload)\n\n response.raise_for_status()\n return response.json()", "response": "Put an observation field value on the iNaturalist."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets an access token using the user s iNaturalist username and password.", "response": "def get_access_token(username: str, password: str, app_id: str, app_secret: str) -> str:\n \"\"\"\n Get an access token using the user's iNaturalist username and password.\n\n (you still need an iNaturalist app to do this)\n\n :param username:\n :param password:\n :param app_id:\n :param app_secret:\n :return: the access token, example use: headers = {\"Authorization\": \"Bearer %s\" % access_token}\n \"\"\"\n payload = {\n 'client_id': app_id,\n 'client_secret': app_secret,\n 'grant_type': \"password\",\n 'username': username,\n 'password': password\n }\n\n response = requests.post(\"{base_url}/oauth/token\".format(base_url=INAT_BASE_URL), payload)\n try:\n return response.json()[\"access_token\"]\n except KeyError:\n raise AuthenticationError(\"Authentication error, please check credentials.\")"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nuploading a picture and assign it to an existing observation.", "response": "def add_photo_to_observation(observation_id: int, file_object: BinaryIO, access_token: str):\n \"\"\"Upload a picture and assign it to an existing observation.\n\n :param observation_id: the ID of the observation\n :param file_object: a file-like object for the picture. Example: open('/Users/nicolasnoe/vespa.jpg', 'rb')\n :param access_token: the access token, as returned by :func:`get_access_token()`\n \"\"\"\n data = {'observation_photo[observation_id]': observation_id}\n file_data = {'file': file_object}\n\n response = requests.post(url=\"{base_url}/observation_photos\".format(base_url=INAT_BASE_URL),\n headers=_build_auth_header(access_token),\n data=data,\n files=file_data)\n\n return response.json()"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates a single or several observations.", "response": "def create_observations(params: Dict[str, Dict[str, Any]], access_token: str) -> List[Dict[str, Any]]:\n \"\"\"Create a single or several (if passed an array) observations).\n\n :param params:\n :param access_token: the access token, as returned by :func:`get_access_token()`\n\n :return: iNaturalist's JSON response, as a Python object\n :raise: requests.HTTPError, if the call is not successful. iNaturalist returns an error 422 (unprocessable entity)\n if it rejects the observation data (for example an observation date in the future or a latitude > 90. In\n that case the exception's `response` attribute give details about the errors.\n\n allowed params: see https://www.inaturalist.org/pages/api+reference#post-observations\n\n Example:\n\n params = {'observation':\n {'species_guess': 'Pieris rapae'},\n }\n\n TODO investigate: according to the doc, we should be able to pass multiple observations (in an array, and in\n renaming observation to observations, but as far as I saw they are not created (while a status of 200 is returned)\n \"\"\"\n response = requests.post(url=\"{base_url}/observations.json\".format(base_url=INAT_BASE_URL),\n json=params,\n headers=_build_auth_header(access_token))\n response.raise_for_status()\n return response.json()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nupdating an observation in iNaturalist.", "response": "def update_observation(observation_id: int, params: Dict[str, Any], access_token: str) -> List[Dict[str, Any]]:\n \"\"\"\n Update a single observation. See https://www.inaturalist.org/pages/api+reference#put-observations-id\n\n :param observation_id: the ID of the observation to update\n :param params: to be passed to iNaturalist API\n :param access_token: the access token, as returned by :func:`get_access_token()`\n\n :return: iNaturalist's JSON response, as a Python object\n :raise: requests.HTTPError, if the call is not successful. iNaturalist returns an error 410 if the observation\n doesn't exists or belongs to another user (as of November 2018).\n \"\"\"\n\n response = requests.put(url=\"{base_url}/observations/{id}.json\".format(base_url=INAT_BASE_URL, id=observation_id),\n json=params,\n headers=_build_auth_header(access_token))\n response.raise_for_status()\n return response.json()"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef delete_observation(observation_id: int, access_token: str) -> List[Dict[str, Any]]:\n\n headers = _build_auth_header(access_token)\n headers['Content-type'] = 'application/json'\n\n response = requests.delete(url=\"{base_url}/observations/{id}.json\".format(base_url=INAT_BASE_URL,\n id=observation_id),\n headers=headers)\n response.raise_for_status()\n # According to iNaturalist documentation, proper JSON should be returned. It seems however that the response is\n # currently empty (while the requests succeed), so you may receive a JSONDecode exception.\n # TODO: report to iNaturalist team if the issue persists\n return response.json()", "response": "Delete an observation.\n\n :param observation_id:\n :param access_token:\n :return:"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef create_instance(self, parent):\n self.kwargs['instantiate'] = True\n self.kwargs['parent'] = parent\n instance = self.cls(*self.args, **self.kwargs)\n instance._field_seqno = self._field_seqno\n return instance", "response": "Create an instance based off this placeholder with some parent"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _ph2f(self, placeholder):\n if issubclass(placeholder.cls, FieldAccessor):\n return placeholder.cls.access(self._parent, placeholder)\n return self._parent.lookup_field_by_placeholder(placeholder)", "response": "Lookup a field given a field placeholder"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef validate(self, data, offset):\n recorded_checksum = self.field.getval()\n\n # convert negative offset to positive\n if offset < 0:\n offset += len(data)\n\n # replace checksum region with zero\n data = b''.join((data[:offset],\n b\"\\x00\" * self.bytes_required,\n data[offset + self.bytes_required:]))\n actual_checksum = self.algo(data[self.start:self.end])\n if recorded_checksum != actual_checksum:\n raise SuitcaseChecksumException(\n \"recorded checksum %r did not match actual %r. full data: %r\",\n recorded_checksum, actual_checksum, data)", "response": "Validates the checksum of the record."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef packed_checksum(self, data):\n self.field.setval(self.algo(data[self.start:self.end]))\n sio = BytesIO()\n self.field.pack(sio)\n return sio.getvalue()", "response": "Given the data of the entire packet return the checksum bytes"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nresolving the deferred field attribute access.", "response": "def access(cls, parent, placeholder):\n \"\"\"Resolve the deferred field attribute access.\n\n :param cls: the FieldAccessor class\n :param parent: owning structure of the field being accessed\n :param placeholder: FieldPlaceholder object which holds our info\n :returns: FieldAccessor instance for that field's attribute\n \"\"\"\n try:\n return parent.lookup_field_by_placeholder(placeholder)\n except KeyError:\n field_placeholder, name = placeholder.args\n # Find the field whose attribute is being accessed.\n field = parent.lookup_field_by_placeholder(field_placeholder)\n # Instantiate the real FieldAccessor that wraps the attribute.\n accessor = cls(field, name, parent=parent, instantiate=True)\n # Keep this instance alive, and reuse it for future references.\n parent._placeholder_to_field[placeholder] = accessor\n return accessor"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncalculate the Kermit CRC16 checksum for some data", "response": "def crc16_kermit(data, crc=0):\n \"\"\"Calculate/Update the Kermit CRC16 checksum for some data\"\"\"\n tab = CRC16_KERMIT_TAB # minor optimization (now in locals)\n for byte in six.iterbytes(data):\n tbl_idx = (crc ^ byte) & 0xff\n crc = (tab[tbl_idx] ^ (crc >> 8)) & 0xffff\n return crc & 0xffff"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef crc16_ccitt(data, crc=0):\n tab = CRC16_CCITT_TAB # minor optimization (now in locals)\n for byte in six.iterbytes(data):\n crc = (((crc << 8) & 0xff00) ^ tab[((crc >> 8) & 0xff) ^ byte])\n return crc & 0xffff", "response": "Calculate the crc16 ccitt checksum of some data"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef feed(self, new_bytes):\n self._available_bytes += new_bytes\n callbacks = []\n try:\n while True:\n packet = six.next(self._packet_generator)\n if packet is None:\n break\n else:\n callbacks.append(partial(self.packet_callback, packet))\n except Exception:\n # When we receive an exception, we assume that the _available_bytes\n # has already been updated and we just choked on a field. That\n # is, unless the number of _available_bytes has not changed. In\n # that case, we reset the buffered entirely\n\n # TODO: black hole may not be the best. What should the logging\n # behavior be?\n self.reset()\n\n # callbacks are partials that are bound to packet already. We do\n # this in order to separate out parsing activity (and error handling)\n # from the execution of callbacks. Callbacks should not in any way\n # rely on the parsers position in the byte stream.\n for callback in callbacks:\n callback()", "response": "Feed a new set of bytes into the protocol handler and return the parsed version of the new bytes."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nunpack bytes from a stream of data field - by - field.", "response": "def unpack_stream(self, stream):\n \"\"\"Unpack bytes from a stream of data field-by-field\n\n In the most basic case, the basic algorithm here is as follows::\n\n for _name, field in self.ordered_fields:\n length = field.bytes_required\n data = stream.read(length)\n field.unpack(data)\n\n This logic is complicated somewhat by the handling of variable length\n greedy fields (there may only be one). The logic when we see a\n greedy field (bytes_required returns None) in the stream is to\n pivot and parse the remaining fields starting from the last and\n moving through the stream backwards. There is also some special\n logic present for dealing with checksum fields.\n\n \"\"\"\n crc_fields = []\n greedy_field = None\n # go through the fields from first to last. If we hit a greedy\n # field, break out of the loop\n for i, (name, field) in enumerate(self.ordered_fields):\n if isinstance(field, CRCField):\n crc_fields.append((field, stream.tell()))\n length = field.bytes_required\n if field.is_substructure():\n remaining_data = stream.getvalue()[stream.tell():]\n returned_stream = field.unpack(remaining_data, trailing=True)\n if returned_stream is not None:\n consumed = returned_stream.tell()\n else:\n consumed = 0\n # We need to fast forward by as much as was consumed by the structure\n stream.seek(stream.tell() + consumed)\n continue\n elif length is None:\n if isinstance(field, FieldArray) and field.num_elements is not None:\n # Read the data greedily now, and we'll backtrack after enough elements have been read.\n data = stream.read()\n else:\n greedy_field = field\n break\n else:\n data = stream.read(length)\n if len(data) != length:\n raise SuitcaseParseError(\"While attempting to parse field \"\n \"%r we tried to read %s bytes but \"\n \"we were only able to read %s.\" %\n (name, length, len(data)))\n\n try:\n unused_data = field.unpack(data)\n stream.seek(-len(unused_data or \"\"), os.SEEK_CUR)\n except SuitcaseException:\n raise # just re-raise these\n except Exception:\n exc_type = SuitcaseParseError\n _, exc_value, exc_traceback = sys.exc_info()\n exc_value = exc_type(\"Unexpected exception while unpacking field %r: %s\" % (name, str(exc_value)))\n six.reraise(exc_type, exc_value, exc_traceback)\n\n if greedy_field is not None:\n remaining_data = stream.read()\n inverted_stream = BytesIO(remaining_data[::-1])\n\n # work through the remaining fields in reverse order in order\n # to narrow in on the right bytes for the greedy field\n reversed_remaining_fields = self.ordered_fields[(i + 1):][::-1]\n for _name, field in reversed_remaining_fields:\n if isinstance(field, CRCField):\n crc_fields.append(\n (field, -inverted_stream.tell() - field.bytes_required))\n length = field.bytes_required\n data = inverted_stream.read(length)[::-1]\n if len(data) != length:\n raise SuitcaseParseError(\"While attempting to parse field \"\n \"%r we tried to read %s bytes but \"\n \"we were only able to read %s.\" %\n (_name, length, len(data)))\n try:\n field.unpack(data)\n except SuitcaseException:\n raise # just re-raise these\n except Exception:\n exc_type = SuitcaseParseError\n _, exc_value, exc_traceback = sys.exc_info()\n exc_value = exc_type(\"Unexpected exception while unpacking field %r: %s\" % (name, str(exc_value)))\n six.reraise(exc_type, exc_value, exc_traceback)\n\n greedy_data_chunk = inverted_stream.read()[::-1]\n greedy_field.unpack(greedy_data_chunk)\n\n if crc_fields:\n data = stream.getvalue()\n for (crc_field, offset) in crc_fields:\n crc_field.validate(data, offset)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncheck if input is valid URL", "response": "def validate(self, instance, value):\n \"\"\"Check if input is valid URL\"\"\"\n value = super(URL, self).validate(instance, value)\n parsed_url = urlparse(value)\n if not parsed_url.scheme or not parsed_url.netloc:\n self.error(instance, value, extra='URL needs scheme and netloc.')\n parse_result = ParseResult(\n scheme=parsed_url.scheme,\n netloc=parsed_url.netloc,\n path=parsed_url.path,\n params='' if self.remove_parameters else parsed_url.params,\n query='' if self.remove_parameters else parsed_url.query,\n fragment='' if self.remove_fragment else parsed_url.fragment,\n )\n parse_result = parse_result.geturl()\n return parse_result"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nserialize this Singleton instance to a dictionary.", "response": "def serialize(self, include_class=True, save_dynamic=False, **kwargs):\n \"\"\"Serialize Singleton instance to a dictionary.\n\n This behaves identically to HasProperties.serialize, except it also\n saves the identifying name in the dictionary as well.\n \"\"\"\n json_dict = super(Singleton, self).serialize(\n include_class=include_class,\n save_dynamic=save_dynamic,\n **kwargs\n )\n json_dict['_singleton_id'] = self._singleton_id\n return json_dict"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncreate a Singleton instance from a serialized dictionary. This behaves identically to HasProperties.deserialize, except if the singleton is already found in the singleton registry the existing value is used. .. note:: If property values differ from the existing singleton and the input dictionary, the new values from the input dictionary will be ignored", "response": "def deserialize(cls, value, trusted=False, strict=False,\n assert_valid=False, **kwargs):\n \"\"\"Create a Singleton instance from a serialized dictionary.\n\n This behaves identically to HasProperties.deserialize, except if\n the singleton is already found in the singleton registry the existing\n value is used.\n\n .. note::\n\n If property values differ from the existing singleton and\n the input dictionary, the new values from the input dictionary\n will be ignored\n \"\"\"\n if not isinstance(value, dict):\n raise ValueError('HasProperties must deserialize from dictionary')\n identifier = value.pop('_singleton_id', value.get('name'))\n if identifier is None:\n raise ValueError('Singleton classes must contain identifying name')\n if identifier in cls._SINGLETONS:\n return cls._SINGLETONS[identifier]\n value = value.copy()\n name = value.get('name', None)\n value.update({'name': identifier})\n newinst = super(Singleton, cls).deserialize(\n value,\n trusted=trusted,\n strict=strict,\n assert_valid=assert_valid,\n **kwargs\n )\n if name:\n newinst.name = name\n return newinst"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nclassing decorator to add change notifications to builtin containers", "response": "def add_properties_callbacks(cls):\n \"\"\"Class decorator to add change notifications to builtin containers\"\"\"\n for name in cls._mutators: #pylint: disable=protected-access\n if not hasattr(cls, name):\n continue\n setattr(cls, name, properties_mutator(cls, name))\n for name in cls._operators: #pylint: disable=protected-access\n if not hasattr(cls, name):\n continue\n setattr(cls, name, properties_operator(cls, name))\n for name in cls._ioperators: #pylint: disable=protected-access\n if not hasattr(cls, name):\n continue\n setattr(cls, name, properties_mutator(cls, name, True))\n return cls"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef properties_mutator(cls, name, ioper=False):\n\n def wrapper(self, *args, **kwargs):\n \"\"\"Mutate if not part of HasProperties; copy/modify/set otherwise\"\"\"\n if (\n getattr(self, '_instance', None) is None or\n getattr(self, '_name', '') == '' or\n self is not getattr(self._instance, self._name)\n ):\n return getattr(super(cls, self), name)(*args, **kwargs)\n copy = cls(self)\n val = getattr(copy, name)(*args, **kwargs)\n if not ioper:\n setattr(self._instance, self._name, copy)\n self._instance = None\n self._name = ''\n return val\n\n wrapped = getattr(cls, name)\n wrapper.__name__ = wrapped.__name__\n wrapper.__doc__ = wrapped.__doc__\n return wrapper", "response": "Wraps a mutating container method to add HasProperties notifications to the current HasProperties instance."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef properties_operator(cls, name):\n\n def wrapper(self, *args, **kwargs):\n \"\"\"Perform operation and cast to container class\"\"\"\n output = getattr(super(cls, self), name)(*args, **kwargs)\n return cls(output)\n\n wrapped = getattr(cls, name)\n wrapper.__name__ = wrapped.__name__\n wrapper.__doc__ = wrapped.__doc__\n return wrapper", "response": "Wraps a container operator to ensure container class is maintained"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef observable_copy(value, name, instance):\n\n container_class = value.__class__\n if container_class in OBSERVABLE_REGISTRY:\n observable_class = OBSERVABLE_REGISTRY[container_class]\n elif container_class in OBSERVABLE_REGISTRY.values():\n observable_class = container_class\n else:\n observable_class = add_properties_callbacks(\n type(container_class)(\n str('Observable{}'.format(container_class.__name__)),\n (container_class,),\n MUTATOR_CATEGORIES,\n )\n )\n OBSERVABLE_REGISTRY[container_class] = observable_class\n value = observable_class(value)\n value._name = name\n value._instance = instance\n return value", "response": "Return an observable container for a given value."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nvalidate a property instance for container items", "response": "def validate_prop(value):\n \"\"\"Validate Property instance for container items\"\"\"\n if (\n isinstance(value, CLASS_TYPES) and\n issubclass(value, HasProperties)\n ):\n value = Instance('', value)\n if not isinstance(value, basic.Property):\n raise TypeError('Contained prop must be a Property instance or '\n 'HasProperties class')\n if value.default is not utils.undefined:\n warn('Contained prop default ignored: {}'.format(value.default),\n RuntimeWarning)\n return value"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef info(self):\n itext = self.class_info\n if self.prop.info:\n itext += ' (each item is {})'.format(self.prop.info)\n if self.max_length is None and self.min_length is None:\n return itext\n if self.max_length is None:\n lentext = 'length >= {}'.format(self.min_length)\n elif self.max_length == self.min_length:\n lentext = 'length of {}'.format(self.min_length)\n else:\n lentext = 'length between {mn} and {mx}'.format(\n mn='0' if self.min_length is None else self.min_length,\n mx=self.max_length,\n )\n return '{} with {}'.format(itext, lentext)", "response": "Supplemental description of the list with length and type"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef validate(self, instance, value):\n if not self.coerce and not isinstance(value, self._class_container):\n self.error(instance, value)\n if self.coerce and not isinstance(value, CONTAINERS):\n value = [value]\n if not isinstance(value, self._class_container):\n out_class = self._class_container\n else:\n out_class = value.__class__\n out = []\n for val in value:\n try:\n out += [self.prop.validate(instance, val)]\n except ValueError:\n self.error(instance, val, extra='This item is invalid.')\n return out_class(out)", "response": "Validate the class of the container and validate each element of the container."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef assert_valid(self, instance, value=None):\n valid = super(Tuple, self).assert_valid(instance, value)\n if not valid:\n return False\n if value is None:\n value = instance._get(self.name)\n if value is None:\n return True\n if (\n (self.min_length is not None and len(value) < self.min_length)\n or\n (self.max_length is not None and len(value) > self.max_length)\n ):\n self.error(\n instance=instance,\n value=value,\n extra='(Length is {})'.format(len(value)),\n )\n for val in value:\n if not self.prop.assert_valid(instance, val):\n return False\n return True", "response": "Check if tuple and contained properties are valid"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nserializes a list of items into a list of dictionaries", "response": "def serialize(self, value, **kwargs):\n \"\"\"Return a serialized copy of the tuple\"\"\"\n kwargs.update({'include_class': kwargs.get('include_class', True)})\n if self.serializer is not None:\n return self.serializer(value, **kwargs)\n if value is None:\n return None\n serial_list = [self.prop.serialize(val, **kwargs)\n for val in value]\n return serial_list"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning a deserialized copy of the tuple", "response": "def deserialize(self, value, **kwargs):\n \"\"\"Return a deserialized copy of the tuple\"\"\"\n kwargs.update({'trusted': kwargs.get('trusted', False)})\n if self.deserializer is not None:\n return self.deserializer(value, **kwargs)\n if value is None:\n return None\n output_list = [self.prop.deserialize(val, **kwargs)\n for val in value]\n return self._class_container(output_list)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns a copy of the tuple as a list of HasProperties instances.", "response": "def to_json(value, **kwargs):\n \"\"\"Return a copy of the tuple as a list\n\n If the tuple contains HasProperties instances, they are serialized.\n \"\"\"\n serial_list = [\n val.serialize(**kwargs) if isinstance(val, HasProperties)\n else val for val in value\n ]\n return serial_list"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef sphinx_class(self):\n classdoc = self.prop.sphinx_class().replace(\n ':class:`', '{info} of :class:`'\n )\n return classdoc.format(info=self.class_info)", "response": "Redefine sphinx class to point to prop class"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef assert_valid(self, instance, value=None):\n valid = super(Dictionary, self).assert_valid(instance, value)\n if not valid:\n return False\n if value is None:\n value = instance._get(self.name)\n if value is None:\n return True\n if self.key_prop or self.value_prop:\n for key, val in iteritems(value):\n if self.key_prop:\n self.key_prop.assert_valid(instance, key)\n if self.value_prop:\n self.value_prop.assert_valid(instance, val)\n return True", "response": "Check if dict and contained properties are valid"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef serialize(self, value, **kwargs):\n kwargs.update({'include_class': kwargs.get('include_class', True)})\n if self.serializer is not None:\n return self.serializer(value, **kwargs)\n if value is None:\n return None\n serial_tuples = [\n (\n self.key_prop.serialize(key, **kwargs),\n self.value_prop.serialize(val, **kwargs)\n )\n for key, val in iteritems(value)\n ]\n try:\n serial_dict = {key: val for key, val in serial_tuples}\n except TypeError as err:\n raise TypeError('Dictionary property {} cannot be serialized - '\n 'keys contain {}'.format(self.name, err))\n return serial_dict", "response": "Serialize the dictionary of the object"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns a deserialized copy of the dict", "response": "def deserialize(self, value, **kwargs):\n \"\"\"Return a deserialized copy of the dict\"\"\"\n kwargs.update({'trusted': kwargs.get('trusted', False)})\n if self.deserializer is not None:\n return self.deserializer(value, **kwargs)\n if value is None:\n return None\n output_tuples = [\n (\n self.key_prop.deserialize(key, **kwargs),\n self.value_prop.deserialize(val, **kwargs)\n )\n for key, val in iteritems(value)\n ]\n try:\n output_dict = {key: val for key, val in output_tuples}\n except TypeError as err:\n raise TypeError('Dictionary property {} cannot be deserialized - '\n 'keys contain {}'.format(self.name, err))\n return self._class_container(output_dict)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn a copy of the dictionary COOKIE", "response": "def to_json(value, **kwargs):\n \"\"\"Return a copy of the dictionary\n\n If the values are HasProperties instances, they are serialized\n \"\"\"\n serial_dict = {\n key: (\n val.serialize(**kwargs) if isinstance(val, HasProperties)\n else val\n )\n for key, val in iteritems(value)\n }\n return serial_dict"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsplits a dictionary based keys that correspond to Properties Returns: **(props_dict, others_dict)** - Tuple of two dictionaries. The first contains key/value pairs from the input dictionary that correspond to the Properties of the input HasProperties class. The second contains the remaining key/value pairs. **Parameters**: * **has_props_cls** - HasProperties class or instance used to filter the dictionary * **input_dict** - Dictionary to filter * **include_immutable** - If True (the default), immutable properties (i.e. Properties that inherit from GettableProperty but not Property) are included in props_dict. If False, immutable properties are excluded from props_dict. For example .. code:: class Profile(properties.HasProperties): name = properties.String('First and last name') age = properties.Integer('Age, years') bio_dict = { 'name': 'Bill', 'age': 65, 'hometown': 'Bakersfield', 'email': 'bill@gmail.com', } (props, others) = properties.filter_props(Profile, bio_dict) assert set(props) == {'name', 'age'} assert set(others) == {'hometown', 'email'}", "response": "def filter_props(has_props_cls, input_dict, include_immutable=True):\n \"\"\"Split a dictionary based keys that correspond to Properties\n\n Returns:\n **(props_dict, others_dict)** - Tuple of two dictionaries. The first contains\n key/value pairs from the input dictionary that correspond to the\n Properties of the input HasProperties class. The second contains the remaining key/value\n pairs.\n\n **Parameters**:\n\n * **has_props_cls** - HasProperties class or instance used to filter the\n dictionary\n * **input_dict** - Dictionary to filter\n * **include_immutable** - If True (the default), immutable properties (i.e.\n Properties that inherit from GettableProperty but not Property) are\n included in props_dict. If False, immutable properties are excluded\n from props_dict.\n\n For example\n\n .. code::\n\n class Profile(properties.HasProperties):\n name = properties.String('First and last name')\n age = properties.Integer('Age, years')\n\n bio_dict = {\n 'name': 'Bill',\n 'age': 65,\n 'hometown': 'Bakersfield',\n 'email': 'bill@gmail.com',\n }\n\n (props, others) = properties.filter_props(Profile, bio_dict)\n assert set(props) == {'name', 'age'}\n assert set(others) == {'hometown', 'email'}\n \"\"\"\n props_dict = {\n k: v for k, v in iter(input_dict.items()) if (\n k in has_props_cls._props and (\n include_immutable or\n any(\n hasattr(has_props_cls._props[k], att)\n for att in ('required', 'new_name')\n )\n )\n )\n }\n others_dict = {k: v for k, v in iter(input_dict.items())\n if k not in props_dict}\n return (props_dict, others_dict)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef default(self):\n prop_def = getattr(self, '_default', utils.undefined)\n for prop in self.props:\n if prop.default is utils.undefined:\n continue\n if prop_def is utils.undefined:\n prop_def = prop.default\n break\n return prop_def", "response": "Default value of the property"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef assert_valid(self, instance, value=None):\n valid = super(Union, self).assert_valid(instance, value)\n if not valid:\n return False\n if value is None:\n value = instance._get(self.name)\n if value is None:\n return True\n return self._try_prop_method(instance, value, 'assert_valid')", "response": "Check if the Union has a valid value"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nserialize the value into a JSON string.", "response": "def serialize(self, value, **kwargs):\n \"\"\"Return a serialized value\n\n If no serializer is provided, it uses the serialize method of the\n prop corresponding to the value\n \"\"\"\n kwargs.update({'include_class': kwargs.get('include_class', True)})\n if self.serializer is not None:\n return self.serializer(value, **kwargs)\n if value is None:\n return None\n for prop in self.props:\n try:\n prop.validate(None, value)\n except GENERIC_ERRORS:\n continue\n return prop.serialize(value, **kwargs)\n return self.to_json(value, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef deserialize(self, value, **kwargs):\n kwargs.update({'trusted': kwargs.get('trusted', False)})\n if self.deserializer is not None:\n return self.deserializer(value, **kwargs)\n if value is None:\n return None\n instance_props = [\n prop for prop in self.props if isinstance(prop, Instance)\n ]\n kwargs = kwargs.copy()\n kwargs.update({\n 'strict': kwargs.get('strict') or self.strict_instances,\n 'assert_valid': self.strict_instances,\n })\n if isinstance(value, dict) and value.get('__class__'):\n clsname = value.get('__class__')\n for prop in instance_props:\n if clsname == prop.instance_class.__name__:\n return prop.deserialize(value, **kwargs)\n for prop in self.props:\n try:\n out_val = prop.deserialize(value, **kwargs)\n prop.validate(None, out_val)\n return out_val\n except GENERIC_ERRORS:\n continue\n return self.from_json(value, **kwargs)", "response": "Deserialize a value into a new object."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef to_json(value, **kwargs):\n if isinstance(value, HasProperties):\n return value.serialize(**kwargs)\n return value", "response": "Return value serialized if value is a HasProperties instance"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nwrap a function that may not accept kwargs so they are accepted by the original function.", "response": "def accept_kwargs(func):\n \"\"\"Wrap a function that may not accept kwargs so they are accepted\n\n The output function will always have call signature of\n :code:`func(val, **kwargs)`, whereas the original function may have\n call signatures of :code:`func(val)` or :code:`func(val, **kwargs)`.\n In the case of the former, rather than erroring, kwargs are just\n ignored.\n\n This method is called on serializer/deserializer function; these\n functions always receive kwargs from serialize, but by using this,\n the original functions may simply take a single value.\n \"\"\"\n def wrapped(val, **kwargs):\n \"\"\"Perform a function on a value, ignoring kwargs if necessary\"\"\"\n try:\n return func(val, **kwargs)\n except TypeError:\n return func(val)\n return wrapped"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _in_bounds(prop, instance, value):\n if (\n (prop.min is not None and value < prop.min) or\n (prop.max is not None and value > prop.max)\n ):\n prop.error(instance, value, extra='Not within allowed range.')", "response": "Checks if the value is within the allowed range."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef tag(self, *tag, **kwtags):\n if not tag:\n pass\n elif len(tag) == 1 and isinstance(tag[0], dict):\n self._meta.update(tag[0])\n else:\n raise TypeError('Tags must be provided as key-word arguments or '\n 'a dictionary')\n self._meta.update(kwtags)\n return self", "response": "Tag a Property instance with metadata dictionary"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef assert_valid(self, instance, value=None):\n if value is None:\n value = instance._get(self.name)\n if (\n value is not None and\n not self.equal(value, self.validate(instance, value))\n ):\n message = 'Invalid value for property: {}: {}'.format(\n self.name, value\n )\n raise ValidationError(message, 'invalid', self.name, instance)\n return True", "response": "Returns True if the Property is valid on a HasProperties instance Raises a ValueError if the value is invalid."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nchecks if two valid Property values are equal.", "response": "def equal(self, value_a, value_b): #pylint: disable=no-self-use\n \"\"\"Check if two valid Property values are equal\n\n .. note::\n\n This method assumes that :code:`None` and\n :code:`properties.undefined` are never passed in as values\n \"\"\"\n equal = value_a == value_b\n if hasattr(equal, '__iter__'):\n return all(equal)\n return equal"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nestablish access to GettableProperty values", "response": "def get_property(self):\n \"\"\"Establishes access of GettableProperty values\"\"\"\n\n scope = self\n\n def fget(self):\n \"\"\"Call the HasProperties _get method\"\"\"\n return self._get(scope.name)\n\n return property(fget=fget, doc=scope.sphinx())"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef deserialize(self, value, **kwargs): #pylint: disable=unused-argument\n kwargs.update({'trusted': kwargs.get('trusted', False)})\n if self.deserializer is not None:\n return self.deserializer(value, **kwargs)\n if value is None:\n return None\n return self.from_json(value, **kwargs)", "response": "Deserialize input value to valid Property value."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ngenerate a : code : ValueError for invalid value assignment", "response": "def error(self, instance, value, error_class=None, extra=''):\n \"\"\"Generate a :code:`ValueError` for invalid value assignment\n\n The instance is the containing HasProperties instance, but it may\n be None if the error is raised outside a HasProperties class.\n \"\"\"\n error_class = error_class or ValidationError\n prefix = 'The {} property'.format(self.__class__.__name__)\n if self.name != '':\n prefix = prefix + \" '{}'\".format(self.name)\n if instance is not None:\n prefix = prefix + ' of a {cls} instance'.format(\n cls=instance.__class__.__name__,\n )\n print_value = repr(value)\n if len(print_value) > 107:\n print_value = '{} ... {}'.format(\n print_value[:50], print_value[-50:]\n )\n message = (\n '{prefix} must be {info}. An invalid value of {val} {vtype} was '\n 'specified.{extra}'.format(\n prefix=prefix,\n info=self.info or 'corrected',\n val=print_value,\n vtype=type(value),\n extra=' {}'.format(extra) if extra else '',\n )\n )\n if issubclass(error_class, ValidationError):\n raise error_class(message, 'invalid', self.name, instance)\n raise error_class(message)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ngenerate Sphinx - formatted documentation for the Property", "response": "def sphinx(self):\n \"\"\"Generate Sphinx-formatted documentation for the Property\"\"\"\n try:\n assert __IPYTHON__\n classdoc = ''\n except (NameError, AssertionError):\n scls = self.sphinx_class()\n classdoc = ' ({})'.format(scls) if scls else ''\n\n prop_doc = '**{name}**{cls}: {doc}{info}'.format(\n name=self.name,\n cls=classdoc,\n doc=self.doc,\n info=', {}'.format(self.info) if self.info else '',\n )\n return prop_doc"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef setter(self, func):\n if not callable(func):\n raise TypeError('setter must be callable function')\n if hasattr(func, '__code__') and func.__code__.co_argcount != 2:\n raise TypeError('setter must be a function with two arguments')\n if func.__name__ != self.name:\n raise TypeError('setter function must have same name as getter')\n self._set_func = func\n return self", "response": "Register a set function for the DynamicProperty\n "} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef deleter(self, func):\n if not callable(func):\n raise TypeError('deleter must be callable function')\n if hasattr(func, '__code__') and func.__code__.co_argcount != 1:\n raise TypeError('deleter must be a function with two arguments')\n if func.__name__ != self.name:\n raise TypeError('deleter function must have same name as getter')\n self._del_func = func\n return self", "response": "Register a delete function for the DynamicProperty\n "} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nestablishes the dynamic behavior of the property values", "response": "def get_property(self):\n \"\"\"Establishes the dynamic behavior of Property values\"\"\"\n scope = self\n\n def fget(self):\n \"\"\"Call dynamic function then validate output\"\"\"\n value = scope.func(self)\n if value is None or value is undefined:\n return None\n return scope.validate(self, value)\n\n def fset(self, value):\n \"\"\"Validate and call setter\"\"\"\n if scope.set_func is None:\n raise AttributeError('cannot set attribute')\n scope.set_func(self, scope.validate(self, value))\n\n def fdel(self):\n \"\"\"call deleter\"\"\"\n if scope.del_func is None:\n raise AttributeError('cannot delete attribute')\n scope.del_func(self)\n\n return property(fget=fget, fset=fset, fdel=fdel, doc=scope.sphinx())"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning True if the Property is valid on a HasProperties instance Raises a ValueError if the value is not set", "response": "def assert_valid(self, instance, value=None):\n \"\"\"Returns True if the Property is valid on a HasProperties instance\n\n Raises a ValueError if the value required and not set, not valid,\n not correctly coerced, etc.\n\n .. note::\n\n Unlike :code:`validate`, this method requires instance to be\n a HasProperties instance; it cannot be None.\n \"\"\"\n if value is None:\n value = instance._get(self.name)\n if value is None and self.required:\n message = (\n \"The '{name}' property of a {cls} instance is required \"\n \"and has not been set.\".format(\n name=self.name,\n cls=instance.__class__.__name__\n )\n )\n raise ValidationError(message, 'missing', self.name, instance)\n valid = super(Property, self).assert_valid(instance, value)\n return valid"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_property(self):\n\n scope = self\n\n def fget(self):\n \"\"\"Call the HasProperties _get method\"\"\"\n return self._get(scope.name)\n\n def fset(self, value):\n \"\"\"Validate value and call the HasProperties _set method\"\"\"\n if value is not undefined:\n value = scope.validate(self, value)\n self._set(scope.name, value)\n\n def fdel(self):\n \"\"\"Set value to utils.undefined on delete\"\"\"\n self._set(scope.name, undefined)\n\n return property(fget=fget, fset=fset, fdel=fdel, doc=scope.sphinx())", "response": "Get the property of the object"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef sphinx(self):\n if callable(self.default):\n default_val = self.default()\n default_str = 'new instance of {}'.format(\n default_val.__class__.__name__\n )\n else:\n default_val = self.default\n default_str = '{}'.format(self.default)\n try:\n if default_val is None or default_val is undefined:\n default_str = ''\n elif len(default_val) == 0: #pylint: disable=len-as-condition\n default_str = ''\n else:\n default_str = ', Default: {}'.format(default_str)\n except TypeError:\n default_str = ', Default: {}'.format(default_str)\n\n prop_doc = super(Property, self).sphinx()\n return '{doc}{default}'.format(doc=prop_doc, default=default_str)", "response": "Basic docstring formatted for Sphinx docs"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef validate(self, instance, value):\n if self.cast:\n value = bool(value)\n if not isinstance(value, BOOLEAN_TYPES):\n self.error(instance, value)\n return value", "response": "Checks if value is a boolean"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncoerce JSON string to boolean", "response": "def from_json(value, **kwargs):\n \"\"\"Coerces JSON string to boolean\"\"\"\n if isinstance(value, string_types):\n value = value.upper()\n if value in ('TRUE', 'Y', 'YES', 'ON'):\n return True\n if value in ('FALSE', 'N', 'NO', 'OFF'):\n return False\n if isinstance(value, int):\n return value\n raise ValueError('Could not load boolean from JSON: {}'.format(value))"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef validate(self, instance, value):\n try:\n intval = int(value)\n if not self.cast and abs(value - intval) > TOL:\n self.error(\n instance=instance,\n value=value,\n extra='Not within tolerance range of {}.'.format(TOL),\n )\n except (TypeError, ValueError):\n self.error(instance, value, extra='Cannot cast to integer.')\n _in_bounds(self, instance, intval)\n return intval", "response": "Checks that value is an integer and in min and max bounds."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef validate(self, instance, value):\n try:\n floatval = float(value)\n if not self.cast and abs(value - floatval) > TOL:\n self.error(\n instance=instance,\n value=value,\n extra='Not within tolerance range of {}.'.format(TOL),\n )\n except (TypeError, ValueError):\n self.error(instance, value, extra='Cannot cast to float.')\n _in_bounds(self, instance, floatval)\n return floatval", "response": "Checks that value is a float and in min max bounds and returns it."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef validate(self, instance, value):\n try:\n compval = complex(value)\n if not self.cast and (\n abs(value.real - compval.real) > TOL or\n abs(value.imag - compval.imag) > TOL\n ):\n self.error(\n instance=instance,\n value=value,\n extra='Not within tolerance range of {}.'.format(TOL),\n )\n except (TypeError, ValueError, AttributeError):\n self.error(instance, value)\n return compval", "response": "Checks that value is a complex number and that it is not within tolerance range."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef validate(self, instance, value):\n value_type = type(value)\n if not isinstance(value, string_types):\n self.error(instance, value)\n if self.regex is not None and self.regex.search(value) is None: #pylint: disable=no-member\n self.error(instance, value, extra='Regex does not match.')\n value = value.strip(self.strip)\n if self.change_case == 'upper':\n value = value.upper()\n elif self.change_case == 'lower':\n value = value.lower()\n if self.unicode:\n value = text_type(value)\n else:\n value = value_type(value)\n return value", "response": "Check if value is a string and strips it and changes case"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nformatting string to display the available choices", "response": "def info(self):\n \"\"\"Formatted string to display the available choices\"\"\"\n if self.descriptions is None:\n choice_list = ['\"{}\"'.format(choice) for choice in self.choices]\n else:\n choice_list = [\n '\"{}\" ({})'.format(choice, self.descriptions[choice])\n for choice in self.choices\n ]\n if len(self.choices) == 2:\n return 'either {} or {}'.format(choice_list[0], choice_list[1])\n return 'any of {}'.format(', '.join(choice_list))"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef validate(self, instance, value): #pylint: disable=inconsistent-return-statements\n if not isinstance(value, string_types):\n self.error(instance, value)\n for key, val in self.choices.items():\n test_value = value if self.case_sensitive else value.upper()\n test_key = key if self.case_sensitive else key.upper()\n test_val = val if self.case_sensitive else [_.upper() for _ in val]\n if test_value == test_key or test_value in test_val:\n return key\n self.error(instance, value, extra='Not an available choice.')", "response": "Check if input is a valid string based on the choices"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef validate(self, instance, value):\n if isinstance(value, string_types):\n value = COLORS_NAMED.get(value, value)\n if value.upper() == 'RANDOM':\n value = random.choice(COLORS_20)\n value = value.upper().lstrip('#')\n if len(value) == 3:\n value = ''.join(v*2 for v in value)\n if len(value) != 6:\n self.error(instance, value, extra='Color must be known name '\n 'or a hex with 6 digits. e.g. \"#FF0000\"')\n try:\n value = [\n int(value[i:i + 6 // 3], 16) for i in range(0, 6, 6 // 3)\n ]\n except ValueError:\n self.error(instance, value,\n extra='Hex color must be base 16 (0-F)')\n if not isinstance(value, (list, tuple)):\n self.error(instance, value,\n extra='Color must be a list or tuple of length 3')\n if len(value) != 3:\n self.error(instance, value, extra='Color must be length 3')\n for val in value:\n if not isinstance(val, integer_types) or not 0 <= val <= 255:\n self.error(instance, value,\n extra='Color values must be ints 0-255.')\n return tuple(value)", "response": "Check if input is valid color and converts to RGB"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncheck if value is a valid datetime object or JSON datetime string", "response": "def validate(self, instance, value):\n \"\"\"Check if value is a valid datetime object or JSON datetime string\"\"\"\n if isinstance(value, datetime.datetime):\n return value\n if not isinstance(value, string_types):\n self.error(\n instance=instance,\n value=value,\n extra='Cannot convert non-strings to datetime.',\n )\n try:\n return self.from_json(value)\n except ValueError:\n self.error(\n instance=instance,\n value=value,\n extra='Invalid format for converting to datetime.',\n )"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef validate(self, instance, value):\n if not isinstance(value, uuid.UUID):\n self.error(instance, value)\n return value", "response": "Check that value is a valid UUID instance"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef valid_modes(self):\n default_mode = (self.mode,) if self.mode is not None else None\n return getattr(self, '_valid_mode', default_mode)", "response": "Valid modes of an open file"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_property(self):\n\n prop = super(File, self).get_property()\n\n # scope is the Property instance\n scope = self\n\n def fdel(self):\n \"\"\"Set value to utils.undefined on delete\"\"\"\n if self._get(scope.name) is not None:\n self._get(scope.name).close()\n self._set(scope.name, undefined)\n\n new_prop = property(fget=prop.fget, fset=prop.fset,\n fdel=fdel, doc=scope.sphinx())\n return new_prop", "response": "Establishes access of the property values and returns a property object that is a copy of the property."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef validate(self, instance, value):\n if isinstance(value, string_types) and self.mode is not None:\n try:\n value = open(value, self.mode)\n except (IOError, TypeError):\n self.error(instance, value,\n extra='Cannot open file: {}'.format(value))\n if not all([hasattr(value, attr) for attr in ('read', 'seek')]):\n self.error(instance, value, extra='Not a file-like object')\n if not hasattr(value, 'mode') or self.valid_modes is None:\n pass\n elif value.mode not in self.valid_modes:\n self.error(instance, value,\n extra='Invalid mode: {}'.format(value.mode))\n if getattr(value, 'closed', False):\n self.error(instance, value, extra='File is closed.')\n return value", "response": "Checks that the value is a valid file open in the correct mode and if it is not open it attempts to open it with the given mode. If it is a string it attempts to open it with the given mode. If it is a file - like object it is checked to see if it is a valid file."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef display_warning(self):\n if self.warn:\n warnings.warn(\n \"\\nProperty '{}' is deprecated and may be removed in the \"\n \"future. Please use '{}'.\".format(self.name, self.new_name),\n FutureWarning, stacklevel=3\n )", "response": "Display a FutureWarning about using a Renamed Property"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_property(self):\n scope = self\n\n def fget(self):\n \"\"\"Call dynamic function then validate output\"\"\"\n scope.display_warning()\n return getattr(self, scope.new_name)\n\n def fset(self, value):\n \"\"\"Validate and call setter\"\"\"\n scope.display_warning()\n setattr(self, scope.new_name, value)\n\n def fdel(self):\n \"\"\"call deleter\"\"\"\n scope.display_warning()\n delattr(self, scope.new_name)\n\n return property(fget=fget, fset=fset, fdel=fdel, doc=scope.sphinx())", "response": "Establishes the dynamic behavior of Property values"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef validate(self, instance, value):\n try:\n if isinstance(value, self.instance_class):\n return value\n if isinstance(value, dict):\n return self.instance_class(**value)\n return self.instance_class(value)\n except GENERIC_ERRORS as err:\n if hasattr(err, 'error_tuples'):\n extra = '({})'.format(' & '.join(\n err_tup.message for err_tup in err.error_tuples\n ))\n else:\n extra = ''\n self.error(instance, value, extra=extra)", "response": "Check if value is valid type of instance_class\n "} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef assert_valid(self, instance, value=None):\n valid = super(Instance, self).assert_valid(instance, value)\n if not valid:\n return False\n if value is None:\n value = instance._get(self.name)\n if isinstance(value, HasProperties):\n value.validate()\n return True", "response": "Checks if valid including HasProperty instances pass validation"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef serialize(self, value, **kwargs):\n kwargs.update({'include_class': kwargs.get('include_class', True)})\n if self.serializer is not None:\n return self.serializer(value, **kwargs)\n if value is None:\n return None\n if isinstance(value, HasProperties):\n return value.serialize(**kwargs)\n return self.to_json(value, **kwargs)", "response": "Serialize the value to JSON."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nconvert instance to JSON", "response": "def to_json(value, **kwargs):\n \"\"\"Convert instance to JSON\"\"\"\n if isinstance(value, HasProperties):\n return value.serialize(**kwargs)\n try:\n return json.loads(json.dumps(value))\n except TypeError:\n raise TypeError(\n \"Cannot convert type {} to JSON without calling 'serialize' \"\n \"on an instance of Instance Property and registering a custom \"\n \"serializer\".format(value.__class__.__name__)\n )"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef sphinx_class(self):\n classdoc = ':class:`{cls} <{pref}.{cls}>`'.format(\n cls=self.instance_class.__name__,\n pref=self.instance_class.__module__,\n )\n return classdoc", "response": "Redefine sphinx class so documentation links to instance_class"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef properties_observer(instance, prop, callback, **kwargs):\n change_only = kwargs.get('change_only', True)\n observer(instance, prop, callback, change_only=change_only)", "response": "Adds properties callback handler"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nupdating the target value to source value", "response": "def _update(self, *_):\n \"\"\"Set target value to source value\"\"\"\n if getattr(self, '_unlinked', False):\n return\n if getattr(self, '_updating', False):\n return\n self._updating = True\n try:\n setattr(self.target[0], self.target[1], self.transform(\n getattr(self.source[0], self.source[1])\n ))\n finally:\n self._updating = False"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _validate(item):\n if not isinstance(item, tuple) or len(item) != 2:\n raise ValueError('Linked items must be instance/prop-name tuple')\n if not isinstance(item[0], tuple(LINK_OBSERVERS)):\n raise ValueError('Only {} instances may be linked'.format(\n ', '.join([link_cls.__name__ for link_cls in LINK_OBSERVERS])\n ))\n if not isinstance(item[1], string_types):\n raise ValueError('Properties must be specified as string names')\n if not hasattr(item[0], item[1]):\n raise ValueError('Invalid property {} for {} instance'.format(\n item[1], item[0].__class__.__name__\n ))", "response": "Validate the given item"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ndetermining if array is valid based on shape and dtype.", "response": "def validate(self, instance, value):\n \"\"\"Determine if array is valid based on shape and dtype\"\"\"\n if not isinstance(value, (tuple, list, np.ndarray)):\n self.error(instance, value)\n if self.coerce:\n value = self.wrapper(value)\n valid_class = (\n self.wrapper if isinstance(self.wrapper, type) else np.ndarray\n )\n if not isinstance(value, valid_class):\n self.error(instance, value)\n allowed_kinds = ''.join(TYPE_MAPPINGS[typ] for typ in self.dtype)\n if value.dtype.kind not in allowed_kinds:\n self.error(instance, value, extra='Invalid dtype.')\n if self.shape is None:\n return value\n for shape in self.shape:\n if len(shape) != value.ndim:\n continue\n for i, shp in enumerate(shape):\n if shp not in ('*', value.shape[i]):\n break\n else:\n return value\n self.error(instance, value, extra='Invalid shape.')"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngenerates a ValueError on setting property to an invalid value", "response": "def error(self, instance, value, error_class=None, extra=''):\n \"\"\"Generates a ValueError on setting property to an invalid value\"\"\"\n error_class = error_class or ValidationError\n if not isinstance(value, (list, tuple, np.ndarray)):\n super(Array, self).error(instance, value, error_class, extra)\n if isinstance(value, (list, tuple)):\n val_description = 'A {typ} of length {len}'.format(\n typ=value.__class__.__name__,\n len=len(value)\n )\n else:\n val_description = 'An array of shape {shp} and dtype {typ}'.format(\n shp=value.shape,\n typ=value.dtype\n )\n\n if instance is None:\n prefix = '{} property'.format(self.__class__.__name__)\n else:\n prefix = \"The '{name}' property of a {cls} instance\".format(\n name=self.name,\n cls=instance.__class__.__name__,\n )\n message = (\n '{prefix} must be {info}. {desc} was specified. {extra}'.format(\n prefix=prefix,\n info=self.info,\n desc=val_description,\n extra=extra,\n )\n )\n if issubclass(error_class, ValidationError):\n raise error_class(message, 'invalid', self.name, instance)\n raise error_class(message)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nconverts array to JSON list nan values are converted to string nan inf values to inf.", "response": "def to_json(value, **kwargs):\n \"\"\"Convert array to JSON list\n\n nan values are converted to string 'nan', inf values to 'inf'.\n \"\"\"\n def _recurse_list(val):\n if val and isinstance(val[0], list):\n return [_recurse_list(v) for v in val]\n return [str(v) if np.isnan(v) or np.isinf(v) else v for v in val]\n return _recurse_list(value.tolist())"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nchecking shape and dtype of vector and scales it to given length", "response": "def validate(self, instance, value):\n \"\"\"Check shape and dtype of vector and scales it to given length\"\"\"\n\n value = super(BaseVector, self).validate(instance, value)\n\n if self.length is not None:\n try:\n value.length = self._length_array(value)\n except ZeroDivisionError:\n self.error(\n instance, value,\n error_class=ZeroDivValidationError,\n extra='The vector must have a length specified.'\n )\n return value"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef validate(self, instance, value):\n if isinstance(value, string_types):\n if (\n value.upper() not in VECTOR_DIRECTIONS or\n value.upper() in ('Z', '-Z', 'UP', 'DOWN')\n ):\n self.error(instance, value)\n value = VECTOR_DIRECTIONS[value.upper()][:2]\n\n return super(Vector2, self).validate(instance, value)", "response": "Check shape and dtype of vector\n validate also coerces the vector from valid strings"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef validate(self, instance, value):\n if not isinstance(value, (tuple, list, np.ndarray)):\n self.error(instance, value)\n if isinstance(value, (tuple, list)):\n for i, val in enumerate(value):\n if isinstance(val, string_types):\n if val.upper() not in VECTOR_DIRECTIONS:\n self.error(\n instance=instance,\n value=val,\n extra='This is an invalid Vector3 representation.',\n )\n value[i] = VECTOR_DIRECTIONS[val.upper()]\n\n return super(Vector3Array, self).validate(instance, value)", "response": "Check shape and dtype of vector\nCOOKIES validate also coerces the vector from valid strings and scales it to the given length."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nevaluates the given string with respect to the locale defined by lng.", "response": "def localize(self, lng: str) -> str:\n \"\"\"\n Evaluate the given string with respect to the locale defined by ``lng``.\n\n If no string is available in the currently active language, this will give you\n the string in the system's default language. If this is unavailable as well, it\n will give you the string in the first language available.\n\n :param lng: A locale code, e.g. ``de``. If you specify a code including a country\n or region like ``de-AT``, exact matches will be used preferably, but if only\n a ``de`` or ``de-AT`` translation exists, this might be returned as well.\n \"\"\"\n if self.data is None:\n return \"\"\n\n if isinstance(self.data, dict):\n firstpart = lng.split('-')[0]\n similar = [l for l in self.data.keys() if (l.startswith(firstpart + \"-\") or firstpart == l) and l != lng]\n if self.data.get(lng):\n return self.data[lng]\n elif self.data.get(firstpart):\n return self.data[firstpart]\n elif similar and any([self.data.get(s) for s in similar]):\n for s in similar:\n if self.data.get(s):\n return self.data.get(s)\n elif self.data.get(settings.LANGUAGE_CODE):\n return self.data[settings.LANGUAGE_CODE]\n elif len(self.data):\n return list(self.data.items())[0][1]\n else:\n return \"\"\n else:\n return str(self.data)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nadds listeners to a HasProperties instance", "response": "def _set_listener(instance, obs):\n \"\"\"Add listeners to a HasProperties instance\"\"\"\n if obs.names is everything:\n names = list(instance._props)\n else:\n names = obs.names\n for name in names:\n if name not in instance._listeners:\n instance._listeners[name] = {typ: [] for typ in LISTENER_TYPES}\n instance._listeners[name][obs.mode] += [obs]"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets the listeners of a changed Property on a HasProperties instance", "response": "def _get_listeners(instance, change):\n \"\"\"Gets listeners of changed Property on a HasProperties instance\"\"\"\n if (\n change['mode'] not in listeners_disabled._quarantine and #pylint: disable=protected-access\n change['name'] in instance._listeners\n ):\n return instance._listeners[change['name']][change['mode']]\n return []"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef observer(names_or_instance, names=None, func=None, change_only=False):\n\n mode = 'observe_change' if change_only else 'observe_set'\n\n if names is None and func is None:\n return Observer(names_or_instance, mode)\n obs = Observer(names, mode)(func)\n _set_listener(names_or_instance, obs)\n return obs", "response": "Defines a callback function that will fire on a property or property value of a HasProperties object."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ndefines a callback function to fire on class validation or property set", "response": "def validator(names_or_instance, names=None, func=None):\n \"\"\"Specify a callback function to fire on class validation OR property set\n\n This function has two modes of operation:\n\n 1. Registering callback functions that validate Property values when\n they are set, before the change is saved to the HasProperties instance.\n This mode is very similar to the :code:`observer` function.\n 2. Registering callback functions that fire only when the HasProperties\n :code:`validate` method is called. This allows for cross-validation\n of Properties that should only fire when all required Properties are\n set.\n\n **Mode 1:**\n\n Validator functions on a HasProperties class fire on set but before the\n observed Property or Properties have been changed (unlike observer\n functions that fire after the value has been changed).\n\n You can use this method as a decorator inside a HasProperties class\n\n .. code::\n\n @properties.validator('variable_name')\n def callback_function(self, change):\n print(change)\n\n or you can use it to register a function to a single HasProperties\n instance\n\n .. code::\n\n properties.validator(my_has_props, 'variable_name', callback_function)\n\n The variable name must refer to a Property name on the HasProperties\n class. A list of Property names may also be used; the same\n callback function will fire when any of these Properties change. Also,\n :class:`properties.everything <properties.utils.Sentinel>` may be\n specified instead of the variable name. In that case, the callback\n function will fire when any Property changes.\n\n The callback function must take two arguments. The first is the\n HasProperties instance; the second is the change notification dictionary.\n This dictionary contains:\n\n * 'name' - the name of the changed Property\n * 'previous' - the value of the Property prior to change (this will be\n :code:`properties.undefined` if the value was not previously set)\n * 'value' - the new value of the Property (this will be\n :code:`properties.undefined` if the value is deleted)\n * 'mode' - the mode of the change; for validators, this is 'validate'\n\n **Mode 2:**\n\n When used as a decorator without arguments (i.e. called directly on a\n HasProperties method), the decorated method is registered as a class\n validator. These methods execute only when :code:`validate()` is called\n on the HasProperties instance.\n\n .. code::\n\n @properties.validator\n def validation_method(self):\n print('validating instance of {}'.format(self.__class__))\n\n The decorated function must only take one argument, the HasProperties\n instance.\n \"\"\"\n\n if names is None and func is None:\n if callable(names_or_instance):\n return ClassValidator(names_or_instance)\n return Observer(names_or_instance, 'validate')\n val = Observer(names, 'validate')(func)\n _set_listener(names_or_instance, val)\n return val"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef build_from_bases(bases, classdict, attr, attr_dict):\n output = OrderedDict()\n output_keys = set()\n all_bases = []\n # Go through the bases from furthest to nearest ancestor\n for base in reversed(bases):\n # Only keep the items that are still defined on the bases\n if base is not object and isinstance(base, PropertyMetaclass):\n output_keys = output_keys.union(getattr(base, attr))\n # Collect all bases so we ensure overridden items are assigned\n # in the correct order\n for item in reversed(base.__mro__):\n if item is object or not isinstance(item, PropertyMetaclass):\n continue\n if item not in all_bases:\n all_bases.append(item)\n # Update the items in reverse MRO order; only keep those that are\n # defined on the bases\n for base in all_bases:\n for key, val in iteritems(getattr(base, attr)):\n if key in base.__dict__ and key in output_keys:\n output.update({key: val})\n # Remove all items that were overridden by this class; this is\n # potentially a superset of the items added back in the next step.\n for key in classdict:\n if key in output:\n output.pop(key)\n # Update the items with those defined on this class\n output.update(attr_dict)\n return output", "response": "Helper function to build private HasProperties attributes from bases classdict and attr."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef equal(value_a, value_b):\n if (\n not isinstance(value_a, HasProperties) or\n not isinstance(value_b, HasProperties)\n ):\n return value_a == value_b\n if getattr(value_a, '_testing_equality', False):\n return False\n value_a._testing_equality = True #pylint: disable=protected-access\n try:\n if value_a is value_b:\n return True\n if value_a.__class__ is not value_b.__class__:\n return False\n for prop in itervalues(value_a._props):\n prop_a = getattr(value_a, prop.name)\n prop_b = getattr(value_b, prop.name)\n if prop_a is None and prop_b is None:\n continue\n if (\n prop_a is not None and\n prop_b is not None and\n prop.equal(prop_a, prop_b)\n ):\n continue\n return False\n return True\n finally:\n value_a._testing_equality = False", "response": "Returns True if two values are equivalent to the current value."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn a copy of a HasProperties instance", "response": "def copy(value, **kwargs):\n \"\"\"Return a copy of a **HasProperties** instance\n\n A copy is produced by serializing the HasProperties instance then\n deserializing it to a new instance. Therefore, if any properties\n cannot be serialized/deserialized, :code:`copy` will fail. Any\n keyword arguments will be passed through to both :code:`serialize`\n and :code:`deserialize`.\n \"\"\"\n\n if not isinstance(value, HasProperties):\n raise ValueError('properties.copy may only be used to copy'\n 'HasProperties instances')\n kwargs.update({'include_class': kwargs.get('include_class', True)})\n kwargs.update({'trusted': kwargs.get('trusted', True)})\n return value.__class__.deserialize(value.serialize(**kwargs), **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreverts specified property to default value", "response": "def _reset(self, name=None):\n \"\"\"Revert specified property to default value\n\n If no property is specified, all properties are returned to default.\n \"\"\"\n if name is None:\n for key in self._props:\n if isinstance(self._props[key], basic.Property):\n self._reset(key)\n return\n if name not in self._props:\n raise AttributeError(\"Input name '{}' is not a known \"\n \"property or attribute\".format(name))\n if not isinstance(self._props[name], basic.Property):\n raise AttributeError(\"Cannot reset GettableProperty \"\n \"'{}'\".format(name))\n if name in self._defaults:\n val = self._defaults[name]\n else:\n val = self._props[name].default\n if callable(val):\n val = val()\n setattr(self, name, val)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nvalidating the current object.", "response": "def validate(self):\n \"\"\"Call all registered class validator methods\n\n These are all methods decorated with :code:`@properties.validator`.\n Validator methods are expected to raise a ValidationError if they\n fail.\n \"\"\"\n if getattr(self, '_getting_validated', False):\n return True\n self._getting_validated = True\n self._validation_error_tuples = []\n self._non_validation_error = None\n try:\n for val in itervalues(self._class_validators):\n try:\n if isinstance(val.func, string_types):\n valid = getattr(self, val.func)()\n else:\n valid = val.func(self)\n if valid is False:\n raise utils.ValidationError(\n 'Validation failed', None, None, self\n )\n except utils.ValidationError as val_err:\n self._validation_error_tuples += val_err.error_tuples\n except GENERIC_ERRORS as err:\n if not self._non_validation_error:\n self._non_validation_error = err\n if self._validation_error_tuples:\n self._error_hook(self._validation_error_tuples)\n msgs = ['Validation failed:']\n msgs += [val.message for val in self._validation_error_tuples]\n raise utils.ValidationError(\n message='\\n- '.join(msgs),\n _error_tuples=self._validation_error_tuples,\n )\n if self._non_validation_error:\n raise self._non_validation_error #pylint: disable=raising-bad-type\n return True\n finally:\n self._getting_validated = False\n self._validation_error_tuples = None\n self._non_validation_error = None"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nasserts that all the properties are valid on validate", "response": "def _validate_props(self):\n \"\"\"Assert that all the properties are valid on validate()\"\"\"\n for key, prop in iteritems(self._props):\n try:\n value = self._get(key)\n err_msg = 'Invalid value for property {}: {}'.format(key, value)\n if value is not None:\n change = dict(name=key, previous=value, value=value,\n mode='validate')\n self._notify(change)\n if not prop.equal(value, change['value']):\n raise utils.ValidationError(err_msg, 'invalid',\n prop.name, self)\n if not prop.assert_valid(self):\n raise utils.ValidationError(err_msg, 'invalid',\n prop.name, self)\n except utils.ValidationError as val_err:\n if getattr(self, '_validation_error_tuples', None) is not None:\n self._validation_error_tuples += val_err.error_tuples\n else:\n raise\n return True"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef serialize(self, include_class=True, save_dynamic=False, **kwargs):\n if getattr(self, '_getting_serialized', False):\n raise utils.SelfReferenceError('Object contains unserializable '\n 'self reference')\n self._getting_serialized = True\n try:\n kwargs.update({\n 'include_class': include_class,\n 'save_dynamic': save_dynamic\n })\n if save_dynamic:\n prop_source = self._props\n else:\n prop_source = self._backend\n data = (\n (key, self._props[key].serialize(getattr(self, key), **kwargs))\n for key in prop_source\n )\n json_dict = {k: v for k, v in data if v is not None}\n if include_class:\n json_dict.update({'__class__': self.__class__.__name__})\n return json_dict\n finally:\n self._getting_serialized = False", "response": "Serializes a HasProperties instance to a dictionary."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef deserialize(cls, value, trusted=False, strict=False, #pylint: disable=too-many-locals\n assert_valid=False, **kwargs):\n \"\"\"Creates **HasProperties** instance from serialized dictionary\n\n This uses the Property deserializers to deserialize all\n JSON-compatible dictionary values into their corresponding Property\n values on a new instance of a **HasProperties** class. Extra keys\n in the dictionary that do not correspond to Properties will be\n ignored.\n\n **Parameters**:\n\n * **value** - Dictionary to deserialize new instance from.\n * **trusted** - If True (and if the input dictionary has\n :code:`'__class__'` keyword and this class is in the registry), the\n new **HasProperties** class will come from the dictionary.\n If False (the default), only the **HasProperties** class this\n method is called on will be constructed.\n * **strict** - Requires :code:`'__class__'`, if present on the input\n dictionary, to match the deserialized instance's class. Also\n disallows unused properties in the input dictionary. Default\n is False.\n * **assert_valid** - Require deserialized instance to be valid.\n Default is False.\n * Any other keyword arguments will be passed through to the Property\n deserializers.\n \"\"\"\n if not isinstance(value, dict):\n raise ValueError(\n 'HasProperties class {} must deserialize from dictionary, '\n 'not input of type {}'.format(\n cls.__name__, value.__class__.__name__\n )\n )\n output_cls = cls._deserialize_class(\n input_cls_name=value.get('__class__'),\n trusted=trusted,\n strict=strict,\n )\n instance = kwargs.pop('_instance', None)\n if instance is not None and not isinstance(instance, output_cls):\n raise ValueError(\n 'Input _instance must be of class {}, not {}'.format(\n output_cls.__name__, instance.__class__.__name__\n )\n )\n state, unused = utils.filter_props(output_cls, value, True)\n unused.pop('__class__', None)\n if unused and strict:\n raise ValueError(\n 'Unused properties during deserialization: {}'.format(\n ', '.join(unused)\n )\n )\n kwargs.update({'trusted': trusted, 'strict': strict})\n newstate = {}\n for key, val in iteritems(state):\n newstate[key] = output_cls._props[key].deserialize(val, **kwargs)\n mutable, immutable = utils.filter_props(output_cls, newstate, False)\n with handlers.listeners_disabled():\n if instance is None:\n instance = output_cls(**mutable)\n else:\n for key, val in iteritems(mutable):\n setattr(instance, key, val)\n for key, val in iteritems(immutable):\n valid_val = output_cls._props[key].validate(instance, val)\n instance._backend[key] = valid_val\n if assert_valid and not instance.validate():\n raise utils.ValidationError('Deserialized instance is not valid')\n return instance", "response": "Deserializes a dictionary into a new instance of the specified class."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _deserialize_class(cls, input_cls_name, trusted, strict):\n if not input_cls_name or input_cls_name == cls.__name__:\n return cls\n if trusted and input_cls_name in cls._REGISTRY:\n return cls._REGISTRY[input_cls_name]\n if strict:\n raise ValueError(\n 'Class name {} from deserialization input dictionary does '\n 'not match input class {}'.format(input_cls_name, cls.__name__)\n )\n return cls", "response": "Returns the HasProperties class to use for deserialization"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef report_status(self, status):\n status = Instance('', TaskStatus).validate(None, status)\n print(r'{taskname} | {percent:>3}% | {message}'.format(\n taskname=self.__class__.__name__,\n percent=int(round(100*status.progress)),\n message=status.message if status.message else '',\n ))", "response": "Hook for reporting the task status towards completion"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nserializes nested HasUID instances to a flat dictionary.", "response": "def serialize(self, include_class=True, save_dynamic=False, **kwargs):\n \"\"\"Serialize nested HasUID instances to a flat dictionary\n\n **Parameters**:\n\n * **include_class** - If True (the default), the name of the class\n will also be saved to the serialized dictionary under key\n :code:`'__class__'`\n * **save_dynamic** - If True, dynamic properties are written to\n the serialized dict (default: False).\n * You may also specify a **registry** - This is the flat dictionary\n where UID/HasUID pairs are stored. By default, no registry need\n be provided; a new dictionary will be created.\n * Any other keyword arguments will be passed through to the Property\n serializers.\n \"\"\"\n registry = kwargs.pop('registry', None)\n if registry is None:\n registry = dict()\n if not registry:\n root = True\n registry.update({'__root__': self.uid})\n else:\n root = False\n key = self.uid\n if key not in registry:\n registry.update({key: None})\n registry.update({key: super(HasUID, self).serialize(\n registry=registry,\n include_class=include_class,\n save_dynamic=save_dynamic,\n **kwargs\n )})\n if root:\n return registry\n return key"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef deserialize(self, value, **kwargs):\n kwargs.update({'trusted': kwargs.get('trusted', False)})\n if self.deserializer is not None:\n return self.deserializer(value, **kwargs)\n if value is None:\n return None\n if isinstance(value, string_types):\n return value\n if issubclass(self.instance_class, base.HasProperties):\n return self.instance_class.deserialize(value, **kwargs)\n return self.from_json(value, **kwargs)", "response": "Deserialize instance from JSON value."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncheck if value is an open PNG file valid filename or png. Image Returns an open bytestream of the image", "response": "def validate(self, instance, value):\n \"\"\"Checks if value is an open PNG file, valid filename, or png.Image\n\n Returns an open bytestream of the image\n \"\"\"\n # Pass if already validated\n if getattr(value, '__valid__', False):\n return value\n # Validate that value is PNG\n if isinstance(value, png.Image):\n pass\n else:\n value = super(ImagePNG, self).validate(instance, value)\n try:\n png.Reader(value).validate_signature()\n except png.FormatError:\n self.error(instance, value, extra='Open file is not PNG.')\n value.seek(0)\n # Write input to new bytestream\n output = BytesIO()\n output.name = self.filename\n output.__valid__ = True\n if isinstance(value, png.Image):\n value.save(output)\n else:\n fid = value\n fid.seek(0)\n output.write(fid.read())\n fid.close()\n output.seek(0)\n return output"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef to_json(value, **kwargs):\n b64rep = base64.b64encode(value.read())\n value.seek(0)\n jsonrep = '{preamble}{b64}'.format(\n preamble=PNG_PREAMBLE,\n b64=b64rep.decode(),\n )\n return jsonrep", "response": "Convert a PNG Image to base64 - encoded JSON"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef from_json(value, **kwargs):\n if not value.startswith(PNG_PREAMBLE):\n raise ValueError('Not a valid base64-encoded PNG image')\n infile = BytesIO()\n rep = base64.b64decode(value[len(PNG_PREAMBLE):].encode('utf-8'))\n infile.write(rep)\n infile.seek(0)\n return infile", "response": "Convert a PNG Image from base64 - encoded JSON"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nvalidate the value against the schema.", "response": "def validate(schema, value, noun='value'):\n \"\"\"\n Checks the value against the schema, and raises ValidationError if validation\n fails.\n \"\"\"\n errors = schema.errors(value)\n if errors:\n error_details = ''\n for error in errors:\n if error.pointer:\n error_details += ' - %s: %s\\n' % (error.pointer, error.message)\n else:\n error_details += ' - %s\\n' % error.message\n raise ValidationError('Invalid %s:\\n%s' % (noun, error_details))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef extend(\n self,\n contents=None,\n optional_keys=None,\n allow_extra_keys=None,\n description=None,\n replace_optional_keys=False,\n ):\n \"\"\"\n This method allows you to create a new `Dictionary` that extends the current `Dictionary` with additional\n contents and/or optional keys, and/or replaces the `allow_extra_keys` and/or `description` attributes.\n\n :param contents: More contents, if any, to extend the current contents\n :type contents: dict\n :param optional_keys: More optional keys, if any, to extend the current optional keys\n :type optional_keys: union[set, list, tuple]\n :param allow_extra_keys: If non-`None`, this overrides the current `allow_extra_keys` attribute\n :type allow_extra_keys: bool\n :param description: If non-`None`, this overrides the current `description` attribute\n :type description: union[str, unicode]\n :param replace_optional_keys: If `True`, then the `optional_keys` argument will completely replace, instead of\n extend, the current optional keys\n :type replace_optional_keys: bool\n\n :return: A new `Dictionary` extended from the current `Dictionary` based on the supplied arguments\n :rtype: Dictionary\n \"\"\"\n optional_keys = set(optional_keys or [])\n return Dictionary(\n contents=type(self.contents)(\n (k, v) for d in (self.contents, contents) for k, v in six.iteritems(d)\n ) if contents else self.contents,\n optional_keys=optional_keys if replace_optional_keys else self.optional_keys | optional_keys,\n allow_extra_keys=self.allow_extra_keys if allow_extra_keys is None else allow_extra_keys,\n description=self.description if description is None else description,\n )", "response": "This method is used to create a new Dictionary object that is extended from the current Dictionary."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef errors(self, value):\n if value not in self.values:\n return [Error(self._error_message, code=ERROR_CODE_UNKNOWN)]\n return []", "response": "Returns a list of errors with the value. An empty list returns the empty list returns the None return means\n that it s valid."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_data_from_sources(patton_config: PattonRunningConfig,\n dependency_or_banner: str = \"dependency\") \\\n -> List[str]:\n \"\"\"This function try to get data from different sources:\n\n - command line arguments\n - from external input file\n - from stdin\n\n Return a list with the content of all of collected data. A list element by\n each input data found.\n\n :param dependency_or_banner: allowed values are: [\"dependency\" | \"banner\"]\n :type dependency_or_banner: str\n \"\"\"\n\n def _read_stdin() -> str:\n\n # Read input with 5 sec timeout\n while sys.stdin in select.select([sys.stdin], [], [], 2)[0]:\n line = sys.stdin.readline()\n if line:\n yield line\n else: # an empty line means stdin has been closed\n return\n\n # --------------------------------------------------------------------------\n # Find data source\n # --------------------------------------------------------------------------\n dependencies = []\n\n # Data from command line from user?\n if patton_config.nargs_input:\n if dependency_or_banner == \"banner\":\n dependencies.append([\"cli_input\", patton_config.nargs_input])\n else:\n dependencies.extend(patton_config.nargs_input)\n\n # Data form stdin input ?\n if not sys.stdin.isatty():\n input_read = \"\".join(list(_read_stdin()))\n\n # YES => Data from stdin\n if input_read:\n if dependency_or_banner == \"banner\":\n dependencies.append([\"stdin\", input_read])\n else:\n dependencies.extend(input_read.splitlines())\n\n # Data from file?\n if patton_config.data_from_file:\n\n f = op.abspath(op.join(op.abspath(os.getcwd()),\n patton_config.data_from_file))\n\n # YES => dependencies from file\n with open(f, \"r\") as f:\n if dependency_or_banner == \"banner\":\n dependencies.append([\"file\", f.read()])\n else:\n dependencies.extend(f.read().splitlines())\n\n # NO data from any other source => Continuous check selected?\n if not dependencies and not patton_config.follow_checking:\n\n # NO data source found => Error! We need some data!\n raise PCException(\"You need to specify andy dependency \"\n \"from any kind of source: stdin, \"\n \"file of cli\")\n\n return dependencies", "response": "This function returns all of the data from different sources."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef parse_banners(banners: List[List[str]],\n patton_config: PattonRunningConfig) -> Set:\n \"\"\"This function try to find the better function to parser input banners\n and parse it\"\"\"\n\n result = set()\n\n for source_type, source_content in banners:\n if source_type == \"file\":\n result.update(nmap_file(source_content))\n\n return result", "response": "This function tries to find the better function to parse input banners\n and return the set of all the files that are found in the banners\n ."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef parse_dependencies(dependencies: List[List[str]],\n patton_config: PattonRunningConfig) -> Dict:\n \"\"\"This function try to find the better function to parser input banners\n and parse it\"\"\"\n\n result = {}\n\n for source_type, source_content in dependencies:\n\n # Select parser\n parser = DEPENDENCIES_PARSERS[patton_config.source_type]\n\n if not source_content:\n continue\n\n if source_type == \"cli_input\":\n fixed_source = \"\\n\".join(source_content)\n else:\n fixed_source = source_content\n\n result.update(parser(fixed_source, patton_config))\n\n return result", "response": "This function tries to find the better function to parser input banners\n and parse it"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef on_init(app): # pylint: disable=unused-argument\n docs_path = os.path.abspath(os.path.dirname(__file__))\n root_path = os.path.abspath(os.path.join(docs_path, '..'))\n apidoc_path = 'sphinx-apidoc'\n swg2rst_path = 'swg2rst'\n if hasattr(sys, 'real_prefix'): # Check to see if we are in a virtualenv\n # If we are, assemble the path manually\n bin_path = os.path.abspath(os.path.join(sys.prefix, 'bin'))\n apidoc_path = os.path.join(bin_path, apidoc_path)\n swg2rst_path = os.path.join(bin_path, swg2rst_path)\n check_call([apidoc_path, '-o', docs_path, os.path.join(root_path, 'user_tasks'),\n os.path.join(root_path, 'user_tasks/migrations')])\n json_path = os.path.join(docs_path, 'swagger.json')\n rst_path = os.path.join(docs_path, 'rest_api.rst')\n check_call([swg2rst_path, json_path, '-f', 'rst', '-o', rst_path])", "response": "Run sphinx - apidoc and swg2rst after Sphinx initialization."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create_user_task(sender=None, body=None, **kwargs): # pylint: disable=unused-argument\n try:\n task_class = import_string(sender)\n except ImportError:\n return\n if issubclass(task_class.__class__, UserTaskMixin):\n arguments_dict = task_class.arguments_as_dict(*body['args'], **body['kwargs'])\n user_id = _get_user_id(arguments_dict)\n task_id = body['id']\n if body.get('callbacks', []):\n return _create_chain_entry(user_id, task_id, task_class, body['args'], body['kwargs'], body['callbacks'])\n if body.get('chord', None):\n return _create_chord_entry(task_id, task_class, body, user_id)\n parent = _get_or_create_group_parent(body, user_id)\n name = task_class.generate_name(arguments_dict)\n total_steps = task_class.calculate_total_steps(arguments_dict)\n UserTaskStatus.objects.get_or_create(\n task_id=task_id, defaults={'user_id': user_id, 'parent': parent, 'name': name, 'task_class': sender,\n 'total_steps': total_steps})\n if parent:\n parent.increment_total_steps(total_steps)", "response": "Create a new UserTaskStatus record for each chain chord or group."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _create_chain_entry(user_id, task_id, task_class, args, kwargs, callbacks, parent=None):\n LOGGER.debug(task_class)\n if issubclass(task_class.__class__, UserTaskMixin):\n arguments_dict = task_class.arguments_as_dict(*args, **kwargs)\n name = task_class.generate_name(arguments_dict)\n total_steps = task_class.calculate_total_steps(arguments_dict)\n parent_name = kwargs.get('user_task_name', '')\n with transaction.atomic():\n if parent is None:\n # First task in the chain, create a status record for it\n parent = UserTaskStatus.objects.create(\n is_container=True, name=parent_name, task_class='celery.chain', task_id=str(uuid4()),\n total_steps=0, user_id=user_id)\n UserTaskStatus.objects.create(\n name=name, parent=parent, task_class=task_class, task_id=task_id, total_steps=total_steps,\n user_id=user_id)\n parent.increment_total_steps(total_steps)\n if parent_name and not parent.name:\n parent.set_name(parent_name)\n for callback in callbacks:\n links = callback.options.get('link', [])\n callback_class = import_string(callback.task)\n _create_chain_entry(user_id, callback.id, callback_class, callback.args, callback.kwargs, links, parent=parent)", "response": "Create and update a chain entry for a new user."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _create_chord_entry(task_id, task_class, message_body, user_id):\n args = message_body['args']\n kwargs = message_body['kwargs']\n arguments_dict = task_class.arguments_as_dict(*args, **kwargs)\n name = task_class.generate_name(arguments_dict)\n total_steps = task_class.calculate_total_steps(arguments_dict)\n parent_name = kwargs.get('user_task_name', '')\n chord_data = message_body['chord']\n group_id = message_body['taskset']\n with transaction.atomic():\n group, created = UserTaskStatus.objects.get_or_create(\n task_id=group_id, defaults={'is_container': True, 'name': parent_name, 'task_class': 'celery.group',\n 'total_steps': total_steps, 'user_id': user_id})\n if created:\n # Also create a status for the chord as a whole\n chord = UserTaskStatus.objects.create(\n is_container=True, name=parent_name, task_class='celery.chord', task_id=str(uuid4()),\n total_steps=total_steps, user_id=user_id)\n group.parent = chord\n group.save(update_fields={'parent', 'modified'})\n else:\n chord = None\n group.increment_total_steps(total_steps)\n if parent_name and not group.name:\n group.set_name(parent_name)\n UserTaskStatus.objects.create(\n name=name, parent=group, task_class=task_class, task_id=task_id, total_steps=total_steps, user_id=user_id)\n # chord body task status\n if not created:\n # body being handled by another of the tasks in the header\n return\n task_id = chord_data['options']['task_id']\n body_task = chord_data['task']\n body_class = import_string(body_task).__class__\n if not issubclass(body_class, UserTaskMixin):\n return\n args = chord_data['args']\n kwargs = chord_data['kwargs']\n arguments_dict = body_class.arguments_as_dict(*args, **kwargs)\n name = body_class.generate_name(arguments_dict)\n total_steps = body_class.calculate_total_steps(arguments_dict)\n UserTaskStatus.objects.get_or_create(\n task_id=task_id, defaults={'name': name, 'parent': chord, 'task_class': body_task,\n 'total_steps': total_steps, 'user_id': user_id})\n chord.increment_total_steps(total_steps)", "response": "Create a new ChordEntry for a Celery Chord."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _get_or_create_group_parent(message_body, user_id):\n parent_id = message_body.get('taskset', None)\n if not parent_id:\n # Not part of a group\n return None\n parent_class = 'celery.group'\n parent_name = message_body['kwargs'].get('user_task_name', '')\n parent, _ = UserTaskStatus.objects.get_or_create(\n task_id=parent_id, defaults={'is_container': True, 'name': parent_name, 'task_class': parent_class,\n 'total_steps': 0, 'user_id': user_id})\n if parent_name and not parent.name:\n parent.name = parent_name\n parent.save(update_fields={'name', 'modified'})\n return parent", "response": "Determine if the given task belongs to a group or not and if so create a status record for the group."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _get_user_id(arguments_dict):\n if 'user_id' not in arguments_dict:\n raise TypeError('Each invocation of a UserTaskMixin subclass must include the user_id')\n user_id = arguments_dict['user_id']\n try:\n get_user_model().objects.get(pk=user_id)\n except (ValueError, get_user_model().DoesNotExist):\n raise TypeError('Invalid user_id: {}'.format(user_id))\n return user_id", "response": "Get and validate the user_id argument to a task derived from UserTaskMixin."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef task_failed(sender=None, **kwargs):\n if isinstance(sender, UserTaskMixin):\n exception = kwargs['exception']\n if not isinstance(exception, TaskCanceledException):\n # Don't include traceback, since this is intended for end users\n sender.status.fail(str(exception))\n user_task_stopped.send_robust(sender=UserTaskStatus, status=sender.status)", "response": "Called when a task is failed."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncall when a task has finished successfully.", "response": "def task_succeeded(sender=None, **kwargs): # pylint: disable=unused-argument\n \"\"\"\n Update the status record accordingly when a :py:class:`UserTaskMixin` finishes successfully.\n \"\"\"\n if isinstance(sender, UserTaskMixin):\n status = sender.status\n # Failed tasks with good exception handling did not succeed just because they ended cleanly\n if status.state not in (UserTaskStatus.CANCELED, UserTaskStatus.FAILED, UserTaskStatus.RETRYING):\n status.succeed()\n user_task_stopped.send_robust(sender=UserTaskStatus, status=sender.status)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\napply palettization to data.", "response": "def palettize(arr, colors, values):\n \"\"\"From start *values* apply *colors* to *data*.\n \"\"\"\n new_arr = np.digitize(arr.ravel(),\n np.concatenate((values,\n [max(np.nanmax(arr),\n values.max()) + 1])))\n new_arr -= 1\n new_arr = new_arr.clip(min=0, max=len(values) - 1)\n try:\n new_arr = np.ma.array(new_arr.reshape(arr.shape), mask=arr.mask)\n except AttributeError:\n new_arr = new_arr.reshape(arr.shape)\n\n return new_arr, tuple(colors)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef colorbar(height, length, colormap):\n cbar = np.tile(np.arange(length) * 1.0 / (length - 1), (height, 1))\n cbar = (cbar * (colormap.values.max() - colormap.values.min())\n + colormap.values.min())\n\n return colormap.colorize(cbar)", "response": "Return the channels of a colorbar."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the channels of a palettebar.", "response": "def palettebar(height, length, colormap):\n \"\"\"Return the channels of a palettebar.\n \"\"\"\n cbar = np.tile(np.arange(length) * 1.0 / (length - 1), (height, 1))\n cbar = (cbar * (colormap.values.max() + 1 - colormap.values.min())\n + colormap.values.min())\n\n return colormap.palettize(cbar)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef set_range(self, min_val, max_val):\n if min_val > max_val:\n max_val, min_val = min_val, max_val\n self.values = (((self.values * 1.0 - self.values.min()) /\n (self.values.max() - self.values.min()))\n * (max_val - min_val) + min_val)", "response": "Set the range of the colormap to [ min_val max_val )"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nconvert the colormap to a rasterio colormap.", "response": "def to_rio(self):\n \"\"\"Converts the colormap to a rasterio colormap.\n \"\"\"\n self.colors = (((self.colors * 1.0 - self.colors.min()) /\n (self.colors.max() - self.colors.min())) * 255)\n return dict(zip(self.values, tuple(map(tuple, self.colors))))"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncancels the task associated with the specified status record.", "response": "def cancel(self, request, *args, **kwargs): # pylint: disable=unused-argument\n \"\"\"\n Cancel the task associated with the specified status record.\n\n Arguments:\n request (Request): A POST including a task status record ID\n\n Returns\n -------\n Response: A JSON response indicating whether the cancellation succeeded or not\n\n \"\"\"\n status = self.get_object()\n status.cancel()\n serializer = StatusSerializer(status, context={'request': request})\n return Response(serializer.data)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef swagger(request): # pylint: disable=unused-argument\n generator = schemas.SchemaGenerator(title='django-user-tasks REST API')\n return response.Response(generator.get_schema())", "response": "Render Swagger UI and the underlying Open API schema JSON file."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef render(self, data, accepted_media_type=None, renderer_context=None):\n if 'SWAGGER_JSON_PATH' in os.environ:\n with io.open(os.environ['SWAGGER_JSON_PATH'], 'rb') as f:\n return f.read()\n else:\n return super(ConditionalOpenAPIRenderer, self).render(data, accepted_media_type, renderer_context)", "response": "Render the appropriate Open API JSON file."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nadd authorization checks for the user - tasks models.", "response": "def add_rules():\n \"\"\"\n Use the rules provided in this module to implement authorization checks for the ``django-user-tasks`` models.\n\n These rules allow only superusers and the user who triggered a task to view its status or artifacts, cancel the\n task, or delete the status information and all its related artifacts. Only superusers are allowed to directly\n modify or delete an artifact (or to modify a task status record).\n \"\"\"\n rules.add_perm('user_tasks.view_usertaskstatus', STATUS_PERMISSION)\n rules.add_perm('user_tasks.cancel_usertaskstatus', STATUS_PERMISSION)\n rules.add_perm('user_tasks.change_usertaskstatus', rules.predicates.is_superuser)\n rules.add_perm('user_tasks.delete_usertaskstatus', STATUS_PERMISSION)\n rules.add_perm('user_tasks.view_usertaskartifact', ARTIFACT_PERMISSION)\n rules.add_perm('user_tasks.change_usertaskartifact', rules.predicates.is_superuser)\n rules.add_perm('user_tasks.delete_usertaskartifact', rules.predicates.is_superuser)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nfilter out any artifacts which the requesting user has permission to view.", "response": "def filter_queryset(self, request, queryset, view):\n \"\"\"\n Filter out any artifacts which the requesting user does not have permission to view.\n \"\"\"\n if request.user.is_superuser:\n return queryset\n return queryset.filter(status__user=request.user)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef check_image_format(fformat):\n cases = {\"jpg\": \"jpeg\",\n \"jpeg\": \"jpeg\",\n \"tif\": \"tiff\",\n \"tiff\": \"tif\",\n \"pgm\": \"ppm\",\n \"pbm\": \"ppm\",\n \"ppm\": \"ppm\",\n \"bmp\": \"bmp\",\n \"dib\": \"bmp\",\n \"gif\": \"gif\",\n \"im\": \"im\",\n \"pcx\": \"pcx\",\n \"png\": \"png\",\n \"xbm\": \"xbm\",\n \"xpm\": \"xpm\",\n 'jp2': 'jp2',\n }\n fformat = fformat.lower()\n try:\n fformat = cases[fformat]\n except KeyError:\n raise UnknownImageFormat(\"Unknown image format '%s'.\" % fformat)\n return fformat", "response": "Check that the given image format is valid."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _is_pair(item):\n return (isinstance(item, (list, tuple, set)) and\n len(item) == 2 and\n not isinstance(item[0], (list, tuple, set)) and\n not isinstance(item[1], (list, tuple, set)))", "response": "Check if an item is a pair of size 2."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef ycbcr2rgb(y__, cb_, cr_):\n\n kb_ = 0.114\n kr_ = 0.299\n\n r__ = 2 * cr_ / (1 - kr_) + y__\n b__ = 2 * cb_ / (1 - kb_) + y__\n g__ = (y__ - kr_ * r__ - kb_ * b__) / (1 - kr_ - kb_)\n\n return r__, g__, b__", "response": "Convert the three YCbCr channels to RGB channels."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _add_channel(self, chn, color_min, color_max):\n if isinstance(chn, np.ma.core.MaskedArray):\n chn_data = chn.data\n chn_mask = chn.mask\n else:\n chn_data = np.array(chn)\n chn_mask = False\n scaled = ((chn_data - color_min) *\n 1.0 / (color_max - color_min))\n self.channels.append(np.ma.array(scaled, mask=chn_mask))", "response": "Adds a channel to the image object."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _finalize(self, dtype=np.uint8):\n channels = []\n if self.mode == \"P\":\n self.convert(\"RGB\")\n if self.mode == \"PA\":\n self.convert(\"RGBA\")\n\n for chn in self.channels:\n if isinstance(chn, np.ma.core.MaskedArray):\n final_data = chn.data.clip(0, 1) * np.iinfo(dtype).max\n else:\n final_data = chn.clip(0, 1) * np.iinfo(dtype).max\n\n if np.issubdtype(dtype, np.integer):\n final_data = np.round(final_data)\n channels.append(np.ma.array(final_data,\n dtype,\n mask=np.ma.getmaskarray(chn)))\n if self.fill_value is not None:\n fill_value = [int(col * np.iinfo(dtype).max)\n for col in self.fill_value]\n else:\n fill_value = None\n return channels, fill_value", "response": "Finalize the image that is put it in RGB mode and set the channels\n in unsigned 8bit format [ 0 255 ) and set the fill_value in unsigned 8bit format [ 0 255 )."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef is_empty(self):\n if(((self.channels == []) and (not self.shape == (0, 0))) or\n ((not self.channels == []) and (self.shape == (0, 0)))):\n raise RuntimeError(\"Channels-shape mismatch.\")\n return self.channels == [] and self.shape == (0, 0)", "response": "Checks for an empty image."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns a PIL image from the current image.", "response": "def pil_image(self):\n \"\"\"Return a PIL image from the current image.\n \"\"\"\n channels, fill_value = self._finalize()\n\n if self.is_empty():\n return Pil.new(self.mode, (0, 0))\n\n if self.mode == \"L\":\n if fill_value is not None:\n img = Pil.fromarray(channels[0].filled(fill_value))\n else:\n img = Pil.fromarray(channels[0].filled(0))\n alpha = np.zeros(channels[0].shape, np.uint8)\n mask = np.ma.getmaskarray(channels[0])\n alpha = np.where(mask, alpha, 255)\n pil_alpha = Pil.fromarray(alpha)\n\n img = Pil.merge(\"LA\", (img, pil_alpha))\n elif self.mode == \"LA\":\n if fill_value is not None:\n img = Pil.fromarray(channels[0].filled(fill_value))\n pil_alpha = Pil.fromarray(channels[1])\n else:\n img = Pil.fromarray(channels[0].filled(0))\n alpha = np.zeros(channels[0].shape, np.uint8)\n mask = np.ma.getmaskarray(channels[0])\n alpha = np.where(mask, alpha, channels[1])\n pil_alpha = Pil.fromarray(alpha)\n img = Pil.merge(\"LA\", (img, pil_alpha))\n\n elif self.mode == \"RGB\":\n # Mask where all channels have missing data (incomplete data will\n # be shown).\n mask = (np.ma.getmaskarray(channels[0]) &\n np.ma.getmaskarray(channels[1]) &\n np.ma.getmaskarray(channels[2]))\n\n if fill_value is not None:\n pil_r = Pil.fromarray(channels[0].filled(fill_value[0]))\n pil_g = Pil.fromarray(channels[1].filled(fill_value[1]))\n pil_b = Pil.fromarray(channels[2].filled(fill_value[2]))\n img = Pil.merge(\"RGB\", (pil_r, pil_g, pil_b))\n else:\n pil_r = Pil.fromarray(channels[0].filled(0))\n pil_g = Pil.fromarray(channels[1].filled(0))\n pil_b = Pil.fromarray(channels[2].filled(0))\n\n alpha = np.zeros(channels[0].shape, np.uint8)\n alpha = np.where(mask, alpha, 255)\n pil_a = Pil.fromarray(alpha)\n\n img = Pil.merge(\"RGBA\", (pil_r, pil_g, pil_b, pil_a))\n\n elif self.mode == \"RGBA\":\n # Mask where all channels have missing data (incomplete data will\n # be shown).\n mask = (np.ma.getmaskarray(channels[0]) &\n np.ma.getmaskarray(channels[1]) &\n np.ma.getmaskarray(channels[2]) &\n np.ma.getmaskarray(channels[3]))\n\n if fill_value is not None:\n pil_r = Pil.fromarray(channels[0].filled(fill_value[0]))\n pil_g = Pil.fromarray(channels[1].filled(fill_value[1]))\n pil_b = Pil.fromarray(channels[2].filled(fill_value[2]))\n pil_a = Pil.fromarray(channels[3].filled(fill_value[3]))\n img = Pil.merge(\"RGBA\", (pil_r, pil_g, pil_b, pil_a))\n else:\n pil_r = Pil.fromarray(channels[0].filled(0))\n pil_g = Pil.fromarray(channels[1].filled(0))\n pil_b = Pil.fromarray(channels[2].filled(0))\n\n alpha = np.where(mask, 0, channels[3])\n pil_a = Pil.fromarray(alpha)\n\n img = Pil.merge(\"RGBA\", (pil_r, pil_g, pil_b, pil_a))\n\n else:\n raise TypeError(\"Does not know how to use mode %s.\" % (self.mode))\n\n return img"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef save(self, filename, compression=6, fformat=None,\n thumbnail_name=None, thumbnail_size=None):\n \"\"\"Save the image to the given *filename*. For some formats like jpg\n and png, the work is delegated to :meth:`pil_save`, which doesn't\n support the *compression* option.\n \"\"\"\n self.pil_save(filename, compression, fformat,\n thumbnail_name, thumbnail_size)", "response": "Save the image to the given filename."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nsaves the image to the given filename using PIL.", "response": "def pil_save(self, filename, compression=6, fformat=None,\n thumbnail_name=None, thumbnail_size=None):\n \"\"\"Save the image to the given *filename* using PIL. For now, the\n compression level [0-9] is ignored, due to PIL's lack of support. See\n also :meth:`save`.\n \"\"\"\n # PIL does not support compression option.\n del compression\n\n if self.is_empty():\n raise IOError(\"Cannot save an empty image\")\n\n if isinstance(filename, (str, six.text_type)):\n ensure_dir(filename)\n\n fformat = fformat or os.path.splitext(filename)[1][1:4]\n fformat = check_image_format(fformat)\n\n params = {}\n\n if fformat == 'png':\n # Take care of GeoImage.tags (if any).\n params['pnginfo'] = self._pngmeta()\n\n # JPEG images does not support transparency\n if fformat == 'jpeg' and not self.fill_value:\n self.fill_value = [0, 0, 0, 0]\n logger.debug(\"No fill_value provided, setting it to 0.\")\n\n img = self.pil_image()\n img.save(filename, fformat, **params)\n\n if thumbnail_name is not None and thumbnail_size is not None:\n img.thumbnail(thumbnail_size, Pil.ANTIALIAS)\n img.save(thumbnail_name, fformat, **params)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef putalpha(self, alpha):\n alpha = np.ma.array(alpha)\n if(not (alpha.shape[0] == 0 and\n self.shape[0] == 0) and\n alpha.shape != self.shape):\n raise ValueError(\"Alpha channel shape should match image shape\")\n\n if not self.mode.endswith(\"A\"):\n self.convert(self.mode + \"A\")\n if not self.is_empty():\n self.channels[-1] = alpha", "response": "Adds an alpha channel to the current image or replaces it with alpha."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nconverts the image from RGB mode to YCbCr.", "response": "def _rgb2ycbcr(self, mode):\n \"\"\"Convert the image from RGB mode to YCbCr.\"\"\"\n\n self._check_modes((\"RGB\", \"RGBA\"))\n\n (self.channels[0], self.channels[1], self.channels[2]) = \\\n rgb2ycbcr(self.channels[0],\n self.channels[1],\n self.channels[2])\n\n if self.fill_value is not None:\n self.fill_value[0:3] = rgb2ycbcr(self.fill_value[0],\n self.fill_value[1],\n self.fill_value[2])\n\n self.mode = mode"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nconverting the image from YCbCr mode to RGB.", "response": "def _ycbcr2rgb(self, mode):\n \"\"\"Convert the image from YCbCr mode to RGB.\n \"\"\"\n\n self._check_modes((\"YCbCr\", \"YCbCrA\"))\n\n (self.channels[0], self.channels[1], self.channels[2]) = \\\n ycbcr2rgb(self.channels[0],\n self.channels[1],\n self.channels[2])\n\n if self.fill_value is not None:\n self.fill_value[0:3] = ycbcr2rgb(self.fill_value[0],\n self.fill_value[1],\n self.fill_value[2])\n\n self.mode = mode"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _to_p(self, mode):\n\n if self.mode.endswith(\"A\"):\n chans = self.channels[:-1]\n alpha = self.channels[-1]\n self._secondary_mode = self.mode[:-1]\n else:\n chans = self.channels\n alpha = None\n self._secondary_mode = self.mode\n\n palette = []\n selfmask = chans[0].mask\n for chn in chans[1:]:\n selfmask = np.ma.mask_or(selfmask, chn.mask)\n new_chn = np.ma.zeros(self.shape, dtype=int)\n color_nb = 0\n\n for i in range(self.height):\n for j in range(self.width):\n current_col = tuple([chn[i, j] for chn in chans])\n try:\n next(idx\n for idx in range(len(palette))\n if palette[idx] == current_col)\n except StopIteration:\n idx = color_nb\n palette.append(current_col)\n color_nb = color_nb + 1\n\n new_chn[i, j] = idx\n\n if self.fill_value is not None:\n if self.mode.endswith(\"A\"):\n current_col = tuple(self.fill_value[:-1])\n fill_alpha = [self.fill_value[-1]]\n else:\n current_col = tuple(self.fill_value)\n fill_alpha = []\n try:\n next(idx\n for idx in range(len(palette))\n if palette[idx] == current_col)\n except StopIteration:\n idx = color_nb\n palette.append(current_col)\n color_nb = color_nb + 1\n\n self.fill_value = [idx] + fill_alpha\n\n new_chn.mask = selfmask\n self.palette = palette\n if alpha is None:\n self.channels = [new_chn]\n else:\n self.channels = [new_chn, alpha]\n self.mode = mode", "response": "Convert the image to P or PA mode."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nconvert the image from P or PA mode.", "response": "def _from_p(self, mode):\n \"\"\"Convert the image from P or PA mode.\n \"\"\"\n\n self._check_modes((\"P\", \"PA\"))\n\n if self.mode.endswith(\"A\"):\n alpha = self.channels[-1]\n else:\n alpha = None\n\n chans = []\n cdfs = []\n\n color_chan = self.channels[0]\n\n for i in range(len(self.palette[0])):\n cdfs.append(np.zeros(len(self.palette)))\n for j in range(len(self.palette)):\n cdfs[i][j] = self.palette[j][i]\n new_chn = np.ma.array(np.interp(color_chan,\n np.arange(len(self.palette)),\n cdfs[i]),\n mask=color_chan.mask)\n chans.append(new_chn)\n\n if self.fill_value is not None:\n if alpha is not None:\n fill_alpha = self.fill_value[-1]\n self.fill_value = list(self.palette[int(self.fill_value[0])])\n self.fill_value += [fill_alpha]\n else:\n self.fill_value = list(self.palette[int(self.fill_value[0])])\n\n self.mode = self._secondary_mode\n self.channels = chans\n if alpha is not None:\n self.channels.append(alpha)\n self.mode = self.mode + \"A\"\n\n self.convert(mode)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _l2rgb(self, mode):\n self._check_modes((\"L\", \"LA\"))\n self.channels.append(self.channels[0].copy())\n self.channels.append(self.channels[0].copy())\n if self.fill_value is not None:\n self.fill_value = self.fill_value[:1] * 3 + self.fill_value[1:]\n if self.mode == \"LA\":\n self.channels[1], self.channels[3] = \\\n self.channels[3], self.channels[1]\n self.mode = mode", "response": "Convert from L to RGB."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nconverts from RGB to monochrome L.", "response": "def _rgb2l(self, mode):\n \"\"\"Convert from RGB to monochrome L.\n \"\"\"\n self._check_modes((\"RGB\", \"RGBA\"))\n\n kb_ = 0.114\n kr_ = 0.299\n\n r__ = self.channels[0]\n g__ = self.channels[1]\n b__ = self.channels[2]\n\n y__ = kr_ * r__ + (1 - kr_ - kb_) * g__ + kb_ * b__\n\n if self.fill_value is not None:\n self.fill_value = ([rgb2ycbcr(self.fill_value[0],\n self.fill_value[1],\n self.fill_value[2])[0]] +\n self.fill_value[3:])\n\n self.channels = [y__] + self.channels[3:]\n\n self.mode = mode"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nconverts from YCbCr to L.", "response": "def _ycbcr2l(self, mode):\n \"\"\"Convert from YCbCr to L.\n \"\"\"\n self._check_modes((\"YCbCr\", \"YCbCrA\"))\n\n self.channels = [self.channels[0]] + self.channels[3:]\n if self.fill_value is not None:\n self.fill_value = [self.fill_value[0]] + self.fill_value[3:]\n self.mode = mode"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nconvert from L to YCbCr.", "response": "def _l2ycbcr(self, mode):\n \"\"\"Convert from L to YCbCr.\n \"\"\"\n self._check_modes((\"L\", \"LA\"))\n\n luma = self.channels[0]\n zeros = np.ma.zeros(luma.shape)\n zeros.mask = luma.mask\n\n self.channels = [luma, zeros, zeros] + self.channels[1:]\n\n if self.fill_value is not None:\n self.fill_value = [self.fill_value[0], 0, 0] + self.fill_value[1:]\n\n self.mode = mode"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nconverts the current image to the given mode.", "response": "def convert(self, mode):\n \"\"\"Convert the current image to the given *mode*. See :class:`Image`\n for a list of available modes.\n \"\"\"\n if mode == self.mode:\n return\n\n if mode not in [\"L\", \"LA\", \"RGB\", \"RGBA\",\n \"YCbCr\", \"YCbCrA\", \"P\", \"PA\"]:\n raise ValueError(\"Mode %s not recognized.\" % (mode))\n\n if self.is_empty():\n self.mode = mode\n return\n\n if mode == self.mode + \"A\":\n self.channels.append(np.ma.ones(self.channels[0].shape))\n if self.fill_value is not None:\n self.fill_value += [1]\n self.mode = mode\n\n elif mode + \"A\" == self.mode:\n self.channels = self.channels[:-1]\n if self.fill_value is not None:\n self.fill_value = self.fill_value[:-1]\n self.mode = mode\n\n elif mode.endswith(\"A\") and not self.mode.endswith(\"A\"):\n self.convert(self.mode + \"A\")\n self.convert(mode)\n\n elif self.mode.endswith(\"A\") and not mode.endswith(\"A\"):\n self.convert(self.mode[:-1])\n self.convert(mode)\n\n else:\n cases = {\n \"RGB\": {\"YCbCr\": self._rgb2ycbcr,\n \"L\": self._rgb2l,\n \"P\": self._to_p},\n \"RGBA\": {\"YCbCrA\": self._rgb2ycbcr,\n \"LA\": self._rgb2l,\n \"PA\": self._to_p},\n \"YCbCr\": {\"RGB\": self._ycbcr2rgb,\n \"L\": self._ycbcr2l,\n \"P\": self._to_p},\n \"YCbCrA\": {\"RGBA\": self._ycbcr2rgb,\n \"LA\": self._ycbcr2l,\n \"PA\": self._to_p},\n \"L\": {\"RGB\": self._l2rgb,\n \"YCbCr\": self._l2ycbcr,\n \"P\": self._to_p},\n \"LA\": {\"RGBA\": self._l2rgb,\n \"YCbCrA\": self._l2ycbcr,\n \"PA\": self._to_p},\n \"P\": {\"RGB\": self._from_p,\n \"YCbCr\": self._from_p,\n \"L\": self._from_p},\n \"PA\": {\"RGBA\": self._from_p,\n \"YCbCrA\": self._from_p,\n \"LA\": self._from_p}}\n try:\n cases[self.mode][mode](mode)\n except KeyError:\n raise ValueError(\"Conversion from %s to %s not implemented !\"\n % (self.mode, mode))"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef clip(self, channels=True):\n if not isinstance(channels, (tuple, list)):\n channels = [channels] * len(self.channels)\n\n for i in range(len(self.channels)):\n if channels[i]:\n self.channels[i] = np.ma.clip(self.channels[i], 0.0, 1.0)", "response": "Limit the values of the array to the default [ 0 1 ) range."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef replace_luminance(self, luminance):\n if self.is_empty():\n return\n\n if luminance.shape != self.channels[0].shape:\n if ((luminance.shape[0] * 1.0 / luminance.shape[1]) ==\n (self.channels[0].shape[0] * 1.0 / self.channels[0].shape[1])):\n if luminance.shape[0] > self.channels[0].shape[0]:\n self.resize(luminance.shape)\n else:\n raise NameError(\"Luminance smaller than the image !\")\n else:\n raise NameError(\"Not the good shape !\")\n\n mode = self.mode\n if mode.endswith(\"A\"):\n self.convert(\"YCbCrA\")\n self.channels[0] = luminance\n self.convert(mode)\n else:\n self.convert(\"YCbCr\")\n self.channels[0] = luminance\n self.convert(mode)", "response": "Replace the Y channel of the image by the array luminance."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nenhances the current image with the parameters in the order they appear in.", "response": "def enhance(self, inverse=False, gamma=1.0, stretch=\"no\",\n stretch_parameters=None, **kwargs):\n \"\"\"Image enhancement function. It applies **in this order** inversion,\n gamma correction, and stretching to the current image, with parameters\n *inverse* (see :meth:`Image.invert`), *gamma* (see\n :meth:`Image.gamma`), and *stretch* (see :meth:`Image.stretch`).\n \"\"\"\n self.invert(inverse)\n if stretch_parameters is None:\n stretch_parameters = {}\n\n stretch_parameters.update(kwargs)\n self.stretch(stretch, **stretch_parameters)\n self.gamma(gamma)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef gamma(self, gamma=1.0):\n\n if(isinstance(gamma, (list, tuple, set)) and\n len(gamma) != len(self.channels)):\n raise ValueError(\"Number of channels and gamma components differ.\")\n if isinstance(gamma, (tuple, list)):\n gamma_list = list(gamma)\n else:\n gamma_list = [gamma] * len(self.channels)\n for i in range(len(self.channels)):\n gamma = float(gamma_list[i])\n if gamma < 0:\n raise ValueError(\"Gamma correction must be a positive number.\")\n logger.debug(\"Applying gamma %f\", gamma)\n if gamma == 1.0:\n continue\n\n if isinstance(self.channels[i], np.ma.core.MaskedArray):\n if ne:\n self.channels[i] = np.ma.array(\n ne.evaluate(\"data ** (1.0 / gamma)\",\n local_dict={\"data\": self.channels[i].data,\n 'gamma': gamma}),\n mask=self.channels[i].mask,\n copy=False)\n else:\n self.channels[i] = np.ma.array(self.channels[i].data **\n (1.0 / gamma),\n mask=self.channels[i].mask,\n copy=False)\n else:\n self.channels[i] = np.where(self.channels[i] >= 0,\n self.channels[i] **\n (1.0 / gamma),\n self.channels[i])", "response": "Apply gamma correction to the channels of the image."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\napply stretching to the current image.", "response": "def stretch(self, stretch=\"crude\", **kwargs):\n \"\"\"Apply stretching to the current image. The value of *stretch* sets\n the type of stretching applied. The values \"histogram\", \"linear\",\n \"crude\" (or \"crude-stretch\") perform respectively histogram\n equalization, contrast stretching (with 5% cutoff on both sides), and\n contrast stretching without cutoff. The value \"logarithmic\" or \"log\"\n will do a logarithmic enhancement towards white. If a tuple or a list\n of two values is given as input, then a contrast stretching is performed\n with the values as cutoff. These values should be normalized in the\n range [0.0,1.0].\n \"\"\"\n\n logger.debug(\"Applying stretch %s with parameters %s\",\n stretch, str(kwargs))\n\n ch_len = len(self.channels)\n if self.mode.endswith(\"A\"):\n ch_len -= 1\n\n if((isinstance(stretch, tuple) or\n isinstance(stretch, list))):\n if len(stretch) == 2:\n for i in range(ch_len):\n self.stretch_linear(i, cutoffs=stretch, **kwargs)\n else:\n raise ValueError(\n \"Stretch tuple must have exactly two elements\")\n elif stretch == \"linear\":\n for i in range(ch_len):\n self.stretch_linear(i, **kwargs)\n elif stretch == \"histogram\":\n for i in range(ch_len):\n self.stretch_hist_equalize(i, **kwargs)\n elif stretch in [\"crude\", \"crude-stretch\"]:\n for i in range(ch_len):\n self.crude_stretch(i, **kwargs)\n elif stretch in [\"log\", \"logarithmic\"]:\n for i in range(ch_len):\n self.stretch_logarithmic(i, **kwargs)\n elif stretch == \"no\":\n return\n elif isinstance(stretch, str):\n raise ValueError(\"Stretching method %s not recognized.\" % stretch)\n else:\n raise TypeError(\"Stretch parameter must be a string or a tuple.\")"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef invert(self, invert=True):\n if(isinstance(invert, (tuple, list)) and\n len(self.channels) != len(invert)):\n raise ValueError(\n \"Number of channels and invert components differ.\")\n\n logger.debug(\"Applying invert with parameters %s\", str(invert))\n if isinstance(invert, (tuple, list)):\n for i, chn in enumerate(self.channels):\n if invert[i]:\n self.channels[i] = 1 - chn\n elif invert:\n for i, chn in enumerate(self.channels):\n self.channels[i] = 1 - chn", "response": "Inverts all the channels of a image according to invert."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nstretch the current image s colors by performing histogram equalization on channel ch_nb.", "response": "def stretch_hist_equalize(self, ch_nb):\n \"\"\"Stretch the current image's colors by performing histogram\n equalization on channel *ch_nb*.\n \"\"\"\n logger.info(\"Perform a histogram equalized contrast stretch.\")\n\n if(self.channels[ch_nb].size ==\n np.ma.count_masked(self.channels[ch_nb])):\n logger.warning(\"Nothing to stretch !\")\n return\n\n arr = self.channels[ch_nb]\n\n nwidth = 2048.0\n\n carr = arr.compressed()\n\n cdf = np.arange(0.0, 1.0, 1 / nwidth)\n logger.debug(\"Make histogram bins having equal amount of data, \" +\n \"using numpy percentile function:\")\n bins = np.percentile(carr, list(cdf * 100))\n\n res = np.ma.empty_like(arr)\n res.mask = np.ma.getmaskarray(arr)\n res[~res.mask] = np.interp(carr, bins, cdf)\n\n self.channels[ch_nb] = res"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nmoves data into range 1 to factor and do a normalized logarithmic contrast stretch.", "response": "def stretch_logarithmic(self, ch_nb, factor=100.):\n \"\"\"Move data into range [1:factor] and do a normalized logarithmic\n enhancement.\n \"\"\"\n logger.debug(\"Perform a logarithmic contrast stretch.\")\n if ((self.channels[ch_nb].size ==\n np.ma.count_masked(self.channels[ch_nb])) or\n (self.channels[ch_nb].min() == self.channels[ch_nb].max())):\n logger.warning(\"Nothing to stretch !\")\n return\n\n crange = (0., 1.0)\n\n arr = self.channels[ch_nb]\n b__ = float(crange[1] - crange[0]) / np.log(factor)\n c__ = float(crange[0])\n slope = (factor - 1.) / float(arr.max() - arr.min())\n arr = 1. + (arr - arr.min()) * slope\n arr = c__ + b__ * np.log(arr)\n self.channels[ch_nb] = arr"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef stretch_linear(self, ch_nb, cutoffs=(0.005, 0.005)):\n logger.debug(\"Perform a linear contrast stretch.\")\n\n if((self.channels[ch_nb].size ==\n np.ma.count_masked(self.channels[ch_nb])) or\n self.channels[ch_nb].min() == self.channels[ch_nb].max()):\n logger.warning(\"Nothing to stretch !\")\n return\n\n arr = self.channels[ch_nb]\n carr = arr.compressed()\n\n logger.debug(\"Calculate the histogram percentiles: \")\n logger.debug(\"Left and right percentiles: \" +\n str(cutoffs[0] * 100) + \" \" + str(cutoffs[1] * 100))\n\n left, right = np.percentile(\n carr, [cutoffs[0] * 100, 100. - cutoffs[1] * 100])\n\n delta_x = (right - left)\n logger.debug(\"Interval: left=%f, right=%f width=%f\",\n left, right, delta_x)\n\n if delta_x > 0.0:\n self.channels[ch_nb] = np.ma.array((arr - left) / delta_x,\n mask=arr.mask)\n else:\n logger.warning(\"Unable to make a contrast stretch!\")", "response": "Stretch linearly the contrast of the current image on channel ch_nb using cutoffs for left and right trimming."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nperforms simple linear stretching on the channel ch_nb * and normalize to the [ 0 1 ) range.", "response": "def crude_stretch(self, ch_nb, min_stretch=None, max_stretch=None):\n \"\"\"Perform simple linear stretching (without any cutoff) on the channel\n *ch_nb* of the current image and normalize to the [0,1] range.\"\"\"\n\n if min_stretch is None:\n min_stretch = self.channels[ch_nb].min()\n if max_stretch is None:\n max_stretch = self.channels[ch_nb].max()\n\n if isinstance(min_stretch, (list, tuple)):\n min_stretch = min_stretch[ch_nb]\n if isinstance(max_stretch, (list, tuple)):\n max_stretch = max_stretch[ch_nb]\n\n if((not self.channels[ch_nb].mask.all()) and\n abs(max_stretch - min_stretch) > 0):\n stretched = self.channels[ch_nb].data.astype(np.float)\n stretched -= min_stretch\n stretched /= max_stretch - min_stretch\n self.channels[ch_nb] = np.ma.array(stretched,\n mask=self.channels[ch_nb].mask,\n copy=False)\n else:\n logger.warning(\"Nothing to stretch !\")"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef colorize(self, colormap):\n\n if self.mode not in (\"L\", \"LA\"):\n raise ValueError(\"Image should be grayscale to colorize\")\n if self.mode == \"LA\":\n alpha = self.channels[1]\n else:\n alpha = None\n self.channels = colormap.colorize(self.channels[0])\n if alpha is not None:\n self.channels.append(alpha)\n self.mode = \"RGBA\"\n else:\n self.mode = \"RGB\"", "response": "Colorize the current image using the given colormap. Works only on L or LA images."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nconverting lab to XYZ", "response": "def lab2xyz(l__, a__, b__):\n \"\"\"Convert L*a*b* to XYZ, L*a*b* expressed within [0, 1].\n \"\"\"\n\n\n def f_inv(arr):\n \"\"\"Inverse of the f function.\n \"\"\"\n return np.where(arr > 6.0/29.0,\n arr ** 3,\n 3 * (6.0 / 29.0) * (6.0 / 29.0) * (arr - 4.0 / 29.0))\n\n new_l = (l__ + 16.0) / 116.0\n x__ = 95.047 * f_inv(new_l + a__ / 500.0)\n y__ = 100.0 * f_inv(new_l)\n z__ = 108.883 * f_inv(new_l - b__ / 200.0)\n\n return x__, y__, z__"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nconverts XYZ to L a b.", "response": "def xyz2lab(x__, y__, z__):\n \"\"\"Convert XYZ to L*a*b*.\n \"\"\"\n\n def f__(arr):\n \"\"\"f__ function\n \"\"\"\n\n return np.where(arr > 216.0 / 24389.0,\n arr ** (1.0/3.0),\n (1.0 / 3.0) * (29.0 / 6.0) * (29.0 / 6.0) * arr\n + 4.0 / 29.0)\n fy_ = f__(y__ / 100.0)\n l__ = 116 * fy_ - 16\n a__ = 500.0 * (f__(x__ / 95.047) - fy_)\n b__ = 200.0 * (fy_ - f__(z__ / 108.883))\n return l__, a__, b__"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef lab2hcl(l__, a__, b__):\n l2_ = (l__ - 9) / 61.0\n r__ = np.sqrt(a__*a__ + b__*b__)\n s__ = r__ / (l2_ * 311 + 125)\n angle = np.arctan2(a__, b__)\n c__ = np.rad2deg(np.pi / 3 - angle)%360\n return c__, s__, l2_", "response": "L a b to HCL"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef purge_old_user_tasks():\n limit = now() - settings.USER_TASKS_MAX_AGE\n # UserTaskArtifacts will also be removed via deletion cascading\n UserTaskStatus.objects.filter(created__lt=limit).delete()", "response": "Delete any UserTaskStatus records older than settings. USER_TASKS_MAX_AGE."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef arguments_as_dict(cls, *args, **kwargs):\n all_args = (None, ) + args\n return inspect.getcallargs(cls.run, *all_args, **kwargs)", "response": "Generate the arguments dictionary provided to the run method."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the UserTaskStatus model instance for this UserTaskMixin.", "response": "def status(self):\n \"\"\"\n Get the :py:class:`~user_tasks.models.UserTaskStatus` model instance for this UserTaskMixin.\n \"\"\"\n task_id = self.request.id\n try:\n # Most calls are for existing objects, don't waste time\n # preparing creation arguments unless necessary\n return UserTaskStatus.objects.get(task_id=task_id)\n except UserTaskStatus.DoesNotExist:\n # Probably an eager task that skipped the before_task_publish\n # signal (or an atomic view where the new record hasn't been\n # committed yet). Create a record for it.\n arguments_dict = self.arguments_as_dict(*self.request.args, **self.request.kwargs)\n name = self.generate_name(arguments_dict)\n task_class = '.'.join([self.__class__.__module__, self.__class__.__name__])\n total_steps = self.calculate_total_steps(arguments_dict)\n user_id = arguments_dict['user_id']\n # Use get_or_create() again just in case another process created it in the meantime\n return UserTaskStatus.objects.get_or_create(\n task_id=task_id, defaults={'user_id': user_id, 'name': name, 'task_class': task_class,\n 'total_steps': total_steps})[0]"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef color_interp(data):\n from rasterio.enums import ColorInterp as ci\n modes = {'L': [ci.gray],\n 'LA': [ci.gray, ci.alpha],\n 'YCbCr': [ci.Y, ci.Cb, ci.Cr],\n 'YCbCrA': [ci.Y, ci.Cb, ci.Cr, ci.alpha]}\n\n try:\n mode = ''.join(data['bands'].values)\n return modes[mode]\n except KeyError:\n colors = {'R': ci.red,\n 'G': ci.green,\n 'B': ci.blue,\n 'A': ci.alpha,\n 'C': ci.cyan,\n 'M': ci.magenta,\n 'Y': ci.yellow,\n 'H': ci.hue,\n 'S': ci.saturation,\n 'L': ci.lightness,\n 'K': ci.black,\n }\n return [colors[band] for band in data['bands'].values]", "response": "Get the color interpretation for this image."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nstandardizes dimensions to bands y and x.", "response": "def _correct_dims(data):\n \"\"\"Standardize dimensions to bands, y, and x.\"\"\"\n if not hasattr(data, 'dims'):\n raise TypeError(\"Data must have a 'dims' attribute.\")\n\n # doesn't actually copy the data underneath\n # we don't want our operations to change the user's data\n data = data.copy()\n\n if 'y' not in data.dims or 'x' not in data.dims:\n if data.ndim != 2:\n raise ValueError(\"Data must have a 'y' and 'x' dimension\")\n\n # rename dimensions so we can use them\n # don't rename 'x' or 'y' if they already exist\n if 'y' not in data.dims:\n # find a dimension that isn't 'x'\n old_dim = [d for d in data.dims if d != 'x'][0]\n data = data.rename({old_dim: 'y'})\n if 'x' not in data.dims:\n # find a dimension that isn't 'y'\n old_dim = [d for d in data.dims if d != 'y'][0]\n data = data.rename({old_dim: 'x'})\n\n if \"bands\" not in data.dims:\n if data.ndim <= 2:\n data = data.expand_dims('bands')\n data['bands'] = ['L']\n else:\n raise ValueError(\"No 'bands' dimension provided.\")\n\n return data"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef save(self, filename, fformat=None, fill_value=None, compute=True,\n keep_palette=False, cmap=None, **format_kwargs):\n \"\"\"Save the image to the given *filename*.\n\n Args:\n filename (str): Output filename\n fformat (str): File format of output file (optional). Can be\n one of many image formats supported by the\n `rasterio` or `PIL` libraries ('jpg', 'png',\n 'tif'). By default this is determined by the\n extension of the provided filename.\n If the format allows, geographical information will\n be saved to the ouput file, in the form of grid\n mapping or ground control points.\n fill_value (float): Replace invalid data values with this value\n and do not produce an Alpha band. Default\n behavior is to create an alpha band.\n compute (bool): If True (default) write the data to the file\n immediately. If False the return value is either\n a `dask.Delayed` object or a tuple of\n ``(source, target)`` to be passed to\n `dask.array.store`.\n keep_palette (bool): Saves the palettized version of the image if\n set to True. False by default.\n cmap (Colormap or dict): Colormap to be applied to the image when\n saving with rasterio, used with\n keep_palette=True. Should be uint8.\n format_kwargs: Additional format options to pass to `rasterio`\n or `PIL` saving methods.\n\n Returns:\n Either `None` if `compute` is True or a `dask.Delayed` object or\n ``(source, target)`` pair to be passed to `dask.array.store`.\n If compute is False the return value depends on format and how\n the image backend is used. If ``(source, target)`` is provided\n then target is an open file-like object that must be closed by\n the caller.\n\n \"\"\"\n fformat = fformat or os.path.splitext(filename)[1][1:4]\n if fformat in ('tif', 'jp2') and rasterio:\n return self.rio_save(filename, fformat=fformat,\n fill_value=fill_value, compute=compute,\n keep_palette=keep_palette, cmap=cmap,\n **format_kwargs)\n else:\n return self.pil_save(filename, fformat, fill_value,\n compute=compute, **format_kwargs)", "response": "Save the image to the given filename."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsaves the image using rasterio.", "response": "def rio_save(self, filename, fformat=None, fill_value=None,\n dtype=np.uint8, compute=True, tags=None,\n keep_palette=False, cmap=None,\n **format_kwargs):\n \"\"\"Save the image using rasterio.\n\n Overviews can be added to the file using the `overviews` kwarg, eg::\n\n img.rio_save('myfile.tif', overviews=[2, 4, 8, 16])\n\n \"\"\"\n fformat = fformat or os.path.splitext(filename)[1][1:4]\n drivers = {'jpg': 'JPEG',\n 'png': 'PNG',\n 'tif': 'GTiff',\n 'jp2': 'JP2OpenJPEG'}\n driver = drivers.get(fformat, fformat)\n\n if tags is None:\n tags = {}\n\n data, mode = self.finalize(fill_value, dtype=dtype,\n keep_palette=keep_palette, cmap=cmap)\n data = data.transpose('bands', 'y', 'x')\n data.attrs = self.data.attrs\n\n crs = None\n gcps = None\n transform = None\n if driver in ['GTiff', 'JP2OpenJPEG']:\n if not np.issubdtype(data.dtype, np.floating):\n format_kwargs.setdefault('compress', 'DEFLATE')\n photometric_map = {\n 'RGB': 'RGB',\n 'RGBA': 'RGB',\n 'CMYK': 'CMYK',\n 'CMYKA': 'CMYK',\n 'YCBCR': 'YCBCR',\n 'YCBCRA': 'YCBCR',\n }\n if mode.upper() in photometric_map:\n format_kwargs.setdefault('photometric',\n photometric_map[mode.upper()])\n\n try:\n crs = rasterio.crs.CRS(data.attrs['area'].proj_dict)\n west, south, east, north = data.attrs['area'].area_extent\n height, width = data.sizes['y'], data.sizes['x']\n transform = rasterio.transform.from_bounds(west, south,\n east, north,\n width, height)\n\n except KeyError: # No area\n logger.info(\"Couldn't create geotransform\")\n except AttributeError:\n try:\n gcps = data.attrs['area'].lons.attrs['gcps']\n crs = data.attrs['area'].lons.attrs['crs']\n except KeyError:\n logger.info(\"Couldn't create geotransform\")\n\n if \"start_time\" in data.attrs:\n stime = data.attrs['start_time']\n stime_str = stime.strftime(\"%Y:%m:%d %H:%M:%S\")\n tags.setdefault('TIFFTAG_DATETIME', stime_str)\n elif driver == 'JPEG' and 'A' in mode:\n raise ValueError('JPEG does not support alpha')\n\n # FIXME add metadata\n r_file = RIOFile(filename, 'w', driver=driver,\n width=data.sizes['x'], height=data.sizes['y'],\n count=data.sizes['bands'],\n dtype=dtype,\n nodata=fill_value,\n crs=crs,\n transform=transform,\n gcps=gcps,\n **format_kwargs)\n r_file.open()\n if not keep_palette:\n r_file.colorinterp = color_interp(data)\n r_file.rfile.update_tags(**tags)\n\n if keep_palette and cmap is not None:\n if data.dtype != 'uint8':\n raise ValueError('Rasterio only supports 8-bit colormaps')\n try:\n from trollimage.colormap import Colormap\n cmap = cmap.to_rio() if isinstance(cmap, Colormap) else cmap\n r_file.rfile.write_colormap(1, cmap)\n except AttributeError:\n raise ValueError(\"Colormap is not formatted correctly\")\n\n if compute:\n # write data to the file now\n res = da.store(data.data, r_file)\n r_file.close()\n return res\n # provide the data object and the opened file so the caller can\n # store them when they would like. Caller is responsible for\n # closing the file\n return data.data, r_file"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef pil_save(self, filename, fformat=None, fill_value=None,\n compute=True, **format_kwargs):\n \"\"\"Save the image to the given *filename* using PIL.\n\n For now, the compression level [0-9] is ignored, due to PIL's lack of\n support. See also :meth:`save`.\n \"\"\"\n fformat = fformat or os.path.splitext(filename)[1][1:4]\n fformat = check_image_format(fformat)\n\n if fformat == 'png':\n # Take care of GeoImage.tags (if any).\n format_kwargs['pnginfo'] = self._pngmeta()\n\n img = self.pil_image(fill_value, compute=False)\n delay = img.save(filename, fformat, **format_kwargs)\n if compute:\n return delay.compute()\n return delay", "response": "Save the image to the given filename using PIL. save."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncreate an alpha band DataArray object.", "response": "def _create_alpha(self, data, fill_value=None):\n \"\"\"Create an alpha band DataArray object.\n\n If `fill_value` is provided and input data is an integer type\n then it is used to determine invalid \"null\" pixels instead of\n xarray's `isnull` and `notnull` methods.\n\n The returned array is 1 where data is valid, 0 where invalid.\n\n \"\"\"\n not_alpha = [b for b in data.coords['bands'].values if b != 'A']\n null_mask = data.sel(bands=not_alpha)\n if np.issubdtype(data.dtype, np.integer) and fill_value is not None:\n null_mask = null_mask != fill_value\n else:\n null_mask = null_mask.notnull()\n # if any of the bands are valid, we don't want transparency\n null_mask = null_mask.any(dim='bands')\n null_mask = null_mask.expand_dims('bands')\n null_mask['bands'] = ['A']\n # match data dtype\n return null_mask"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _add_alpha(self, data, alpha=None):\n null_mask = alpha if alpha is not None else self._create_alpha(data)\n # if we are using integer data, then alpha needs to be min-int to max-int\n # otherwise for floats we want 0 to 1\n if np.issubdtype(data.dtype, np.integer):\n # xarray sometimes upcasts this calculation, so cast again\n null_mask = self._scale_to_dtype(null_mask, data.dtype).astype(data.dtype)\n data = xr.concat([data, null_mask], dim=\"bands\")\n return data", "response": "Create an alpha channel and concatenate it to the provided data."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nscales provided data to dtype range assuming a 0 - 1 range.", "response": "def _scale_to_dtype(self, data, dtype):\n \"\"\"Scale provided data to dtype range assuming a 0-1 range.\n\n Float input data is assumed to be normalized to a 0 to 1 range.\n Integer input data is not scaled, only clipped. A float output\n type is not scaled since both outputs and inputs are assumed to\n be in the 0-1 range already.\n\n \"\"\"\n if np.issubdtype(dtype, np.integer):\n if np.issubdtype(data, np.integer):\n # preserve integer data type\n data = data.clip(np.iinfo(dtype).min, np.iinfo(dtype).max)\n else:\n # scale float data (assumed to be 0 to 1) to full integer space\n dinfo = np.iinfo(dtype)\n data = data.clip(0, 1) * (dinfo.max - dinfo.min) + dinfo.min\n data = data.round()\n return data"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _check_modes(self, modes):\n if not isinstance(modes, (tuple, list, set)):\n modes = [modes]\n if self.mode not in modes:\n raise ValueError(\"Image not in suitable mode, expected: %s, got: %s\" % (modes, self.mode))", "response": "Check that the image is in one of the given modes raise an exception otherwise."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nconverting the image from P or PA to RGB or RGBA.", "response": "def _from_p(self, mode):\n \"\"\"Convert the image from P or PA to RGB or RGBA.\"\"\"\n self._check_modes((\"P\", \"PA\"))\n\n if not self.palette:\n raise RuntimeError(\"Can't convert palettized image, missing palette.\")\n pal = np.array(self.palette)\n pal = da.from_array(pal, chunks=pal.shape)\n\n if pal.shape[1] == 4:\n # colormap's alpha overrides data alpha\n mode = \"RGBA\"\n alpha = None\n elif self.mode.endswith(\"A\"):\n # add a new/fake 'bands' dimension to the end\n alpha = self.data.sel(bands=\"A\").data[..., None]\n mode = mode + \"A\" if not mode.endswith(\"A\") else mode\n else:\n alpha = None\n\n flat_indexes = self.data.sel(bands='P').data.ravel().astype('int64')\n dim_sizes = ((key, val) for key, val in self.data.sizes.items() if key != 'bands')\n dims, new_shape = zip(*dim_sizes)\n dims = dims + ('bands',)\n new_shape = new_shape + (pal.shape[1],)\n new_data = pal[flat_indexes].reshape(new_shape)\n coords = dict(self.data.coords)\n coords[\"bands\"] = list(mode)\n\n if alpha is not None:\n new_arr = da.concatenate((new_data, alpha), axis=-1)\n data = xr.DataArray(new_arr, coords=coords, attrs=self.data.attrs, dims=dims)\n else:\n data = xr.DataArray(new_data, coords=coords, attrs=self.data.attrs, dims=dims)\n\n return data"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _l2rgb(self, mode):\n self._check_modes((\"L\", \"LA\"))\n\n bands = [\"L\"] * 3\n if mode[-1] == \"A\":\n bands.append(\"A\")\n data = self.data.sel(bands=bands)\n data[\"bands\"] = list(mode)\n return data", "response": "Convert from L to RGB."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nwrapping around finalize method for backwards compatibility.", "response": "def _finalize(self, fill_value=None, dtype=np.uint8, keep_palette=False, cmap=None):\n \"\"\"Wrapper around 'finalize' method for backwards compatibility.\"\"\"\n import warnings\n warnings.warn(\"'_finalize' is deprecated, use 'finalize' instead.\",\n DeprecationWarning)\n return self.finalize(fill_value, dtype, keep_palette, cmap)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nfinalize the image to be written to an output file.", "response": "def finalize(self, fill_value=None, dtype=np.uint8, keep_palette=False, cmap=None):\n \"\"\"Finalize the image to be written to an output file.\n\n This adds an alpha band or fills data with a fill_value (if specified).\n It also scales float data to the output range of the data type (0-255\n for uint8, default). For integer input data this method assumes the\n data is already scaled to the proper desired range. It will still fill\n in invalid values and add an alpha band if needed. Integer input\n data's fill value is determined by a special ``_FillValue`` attribute\n in the ``DataArray`` ``.attrs`` dictionary.\n\n \"\"\"\n if keep_palette and not self.mode.startswith('P'):\n keep_palette = False\n\n if not keep_palette:\n if self.mode == \"P\":\n return self.convert(\"RGB\").finalize(fill_value=fill_value, dtype=dtype,\n keep_palette=keep_palette, cmap=cmap)\n if self.mode == \"PA\":\n return self.convert(\"RGBA\").finalize(fill_value=fill_value, dtype=dtype,\n keep_palette=keep_palette, cmap=cmap)\n\n if np.issubdtype(dtype, np.floating) and fill_value is None:\n logger.warning(\"Image with floats cannot be transparent, so \"\n \"setting fill_value to 0\")\n fill_value = 0\n\n final_data = self.data\n # if the data are integers then this fill value will be used to check for invalid values\n ifill = final_data.attrs.get('_FillValue') if np.issubdtype(final_data, np.integer) else None\n if not keep_palette:\n if fill_value is None and not self.mode.endswith('A'):\n # We don't have a fill value or an alpha, let's add an alpha\n alpha = self._create_alpha(final_data, fill_value=ifill)\n final_data = self._scale_to_dtype(final_data, dtype).astype(dtype)\n final_data = self._add_alpha(final_data, alpha=alpha)\n else:\n # scale float data to the proper dtype\n # this method doesn't cast yet so that we can keep track of NULL values\n final_data = self._scale_to_dtype(final_data, dtype)\n # Add fill_value after all other calculations have been done to\n # make sure it is not scaled for the data type\n if ifill is not None and fill_value is not None:\n # cast fill value to output type so we don't change data type\n fill_value = dtype(fill_value)\n # integer fields have special fill values\n final_data = final_data.where(final_data != ifill, dtype(fill_value))\n elif fill_value is not None:\n final_data = final_data.fillna(dtype(fill_value))\n\n final_data = final_data.astype(dtype)\n final_data.attrs = self.data.attrs\n return final_data, ''.join(final_data['bands'].values)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns a PIL image from the current image.", "response": "def pil_image(self, fill_value=None, compute=True):\n \"\"\"Return a PIL image from the current image.\n\n Args:\n fill_value (int or float): Value to use for NaN null values.\n See :meth:`~trollimage.xrimage.XRImage.finalize` for more\n info.\n compute (bool): Whether to return a fully computed PIL.Image\n object (True) or return a dask Delayed object representing\n the Image (False). This is True by default.\n\n \"\"\"\n channels, mode = self.finalize(fill_value)\n res = channels.transpose('y', 'x', 'bands')\n img = dask.delayed(PILImage.fromarray)(np.squeeze(res.data), mode)\n if compute:\n img = img.compute()\n return img"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef xrify_tuples(self, tup):\n return xr.DataArray(tup,\n dims=['bands'],\n coords={'bands': self.data['bands']})", "response": "Make xarray. DataArray from tuple."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\napply gamma correction to the channels of the image.", "response": "def gamma(self, gamma=1.0):\n \"\"\"Apply gamma correction to the channels of the image.\n\n If *gamma* is a\n tuple, then it should have as many elements as the channels of the\n image, and the gamma correction is applied elementwise. If *gamma* is a\n number, the same gamma correction is applied on every channel, if there\n are several channels in the image. The behaviour of :func:`gamma` is\n undefined outside the normal [0,1] range of the channels.\n \"\"\"\n if isinstance(gamma, (list, tuple)):\n gamma = self.xrify_tuples(gamma)\n elif gamma == 1.0:\n return\n\n logger.debug(\"Applying gamma %s\", str(gamma))\n attrs = self.data.attrs\n self.data = self.data.clip(min=0)\n self.data **= 1.0 / gamma\n self.data.attrs = attrs"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef stretch(self, stretch=\"crude\", **kwargs):\n logger.debug(\"Applying stretch %s with parameters %s\",\n stretch, str(kwargs))\n\n # FIXME: do not apply stretch to alpha channel\n\n if isinstance(stretch, (tuple, list)):\n if len(stretch) == 2:\n self.stretch_linear(cutoffs=stretch)\n else:\n raise ValueError(\n \"Stretch tuple must have exactly two elements\")\n elif stretch == \"linear\":\n self.stretch_linear(**kwargs)\n elif stretch == \"histogram\":\n self.stretch_hist_equalize(**kwargs)\n elif stretch in [\"crude\", \"crude-stretch\"]:\n self.crude_stretch(**kwargs)\n elif stretch in [\"log\", \"logarithmic\"]:\n self.stretch_logarithmic(**kwargs)\n elif stretch == \"no\":\n return\n elif isinstance(stretch, str):\n raise ValueError(\"Stretching method %s not recognized.\" % stretch)\n else:\n raise TypeError(\"Stretch parameter must be a string or a tuple.\")", "response": "Apply stretching to the current image."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _compute_quantile(data, dims, cutoffs):\n # numpy doesn't get a 'quantile' function until 1.15\n # for better backwards compatibility we use xarray's version\n data_arr = xr.DataArray(data, dims=dims)\n # delayed will provide us the fully computed xarray with ndarray\n left, right = data_arr.quantile([cutoffs[0], 1. - cutoffs[1]], dim=['x', 'y'])\n logger.debug(\"Interval: left=%s, right=%s\", str(left), str(right))\n return left.data, right.data", "response": "Helper method for stretch_linear."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef stretch_linear(self, cutoffs=(0.005, 0.005)):\n logger.debug(\"Perform a linear contrast stretch.\")\n\n logger.debug(\"Calculate the histogram quantiles: \")\n logger.debug(\"Left and right quantiles: \" +\n str(cutoffs[0]) + \" \" + str(cutoffs[1]))\n\n cutoff_type = np.float64\n # numpy percentile (which quantile calls) returns 64-bit floats\n # unless the value is a higher order float\n if np.issubdtype(self.data.dtype, np.floating) and \\\n np.dtype(self.data.dtype).itemsize > 8:\n cutoff_type = self.data.dtype\n left, right = dask.delayed(self._compute_quantile, nout=2)(self.data.data, self.data.dims, cutoffs)\n left_data = da.from_delayed(left,\n shape=(self.data.sizes['bands'],),\n dtype=cutoff_type)\n left = xr.DataArray(left_data, dims=('bands',),\n coords={'bands': self.data['bands']})\n right_data = da.from_delayed(right,\n shape=(self.data.sizes['bands'],),\n dtype=cutoff_type)\n right = xr.DataArray(right_data, dims=('bands',),\n coords={'bands': self.data['bands']})\n\n self.crude_stretch(left, right)", "response": "Stretch linearly the contrast of the current image."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef crude_stretch(self, min_stretch=None, max_stretch=None):\n if min_stretch is None:\n non_band_dims = tuple(x for x in self.data.dims if x != 'bands')\n min_stretch = self.data.min(dim=non_band_dims)\n if max_stretch is None:\n non_band_dims = tuple(x for x in self.data.dims if x != 'bands')\n max_stretch = self.data.max(dim=non_band_dims)\n\n if isinstance(min_stretch, (list, tuple)):\n min_stretch = self.xrify_tuples(min_stretch)\n if isinstance(max_stretch, (list, tuple)):\n max_stretch = self.xrify_tuples(max_stretch)\n\n delta = (max_stretch - min_stretch)\n if isinstance(delta, xr.DataArray):\n # fillna if delta is NaN\n scale_factor = (1.0 / delta).fillna(0)\n else:\n scale_factor = 1.0 / delta\n attrs = self.data.attrs\n self.data -= min_stretch\n self.data *= scale_factor\n self.data.attrs = attrs", "response": "Perform simple linear stretching on the current image."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef stretch_hist_equalize(self, approximate=False):\n logger.info(\"Perform a histogram equalized contrast stretch.\")\n\n nwidth = 2048.\n logger.debug(\"Make histogram bins having equal amount of data, \" +\n \"using numpy percentile function:\")\n\n def _band_hist(band_data):\n cdf = da.arange(0., 1., 1. / nwidth, chunks=nwidth)\n if approximate:\n # need a 1D array\n flat_data = band_data.ravel()\n # replace with nanpercentile in the future, if available\n # dask < 0.17 returns all NaNs for this\n bins = da.percentile(flat_data[da.notnull(flat_data)],\n cdf * 100.)\n else:\n bins = dask.delayed(np.nanpercentile)(band_data, cdf * 100.)\n bins = da.from_delayed(bins, shape=(nwidth,), dtype=cdf.dtype)\n res = dask.delayed(np.interp)(band_data, bins, cdf)\n res = da.from_delayed(res, shape=band_data.shape,\n dtype=band_data.dtype)\n return res\n\n band_results = []\n for band in self.data['bands'].values:\n if band == 'A':\n continue\n band_data = self.data.sel(bands=band)\n res = _band_hist(band_data.data)\n band_results.append(res)\n\n if 'A' in self.data.coords['bands'].values:\n band_results.append(self.data.sel(bands='A'))\n self.data.data = da.stack(band_results,\n axis=self.data.dims.index('bands'))", "response": "Stretch the current image s colors through histogram equalization."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef stretch_logarithmic(self, factor=100.):\n logger.debug(\"Perform a logarithmic contrast stretch.\")\n crange = (0., 1.0)\n\n b__ = float(crange[1] - crange[0]) / np.log(factor)\n c__ = float(crange[0])\n\n def _band_log(arr):\n slope = (factor - 1.) / float(arr.max() - arr.min())\n arr = 1. + (arr - arr.min()) * slope\n arr = c__ + b__ * da.log(arr)\n return arr\n\n band_results = []\n for band in self.data['bands'].values:\n if band == 'A':\n continue\n band_data = self.data.sel(bands=band)\n res = _band_log(band_data.data)\n band_results.append(res)\n\n if 'A' in self.data.coords['bands'].values:\n band_results.append(self.data.sel(bands='A'))\n self.data.data = da.stack(band_results,\n axis=self.data.dims.index('bands'))", "response": "Move data into range [ 1 factor ) through normalized logarithm."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef stretch_weber_fechner(self, k, s0):\n attrs = self.data.attrs\n self.data = k * xu.log(self.data / s0)\n self.data.attrs = attrs", "response": "Stretch according to the Weber - Fechner law."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef invert(self, invert=True):\n logger.debug(\"Applying invert with parameters %s\", str(invert))\n if isinstance(invert, (tuple, list)):\n invert = self.xrify_tuples(invert)\n offset = invert.astype(int)\n scale = (-1) ** offset\n elif invert:\n offset = 1\n scale = -1\n attrs = self.data.attrs\n self.data = self.data * scale + offset\n self.data.attrs = attrs", "response": "Inverts all the channels of a image according to invert."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nmerges the image with the current image.", "response": "def merge(self, img):\n \"\"\"Use the provided image as background for the current *img* image,\n that is if the current image has missing data.\n \"\"\"\n raise NotImplementedError(\"This method has not be implemented for \"\n \"xarray support.\")\n if self.is_empty():\n raise ValueError(\"Cannot merge an empty image.\")\n\n if self.mode != img.mode:\n raise ValueError(\"Cannot merge image of different modes.\")\n\n selfmask = self.channels[0].mask\n for chn in self.channels[1:]:\n selfmask = np.ma.mask_or(selfmask, chn.mask)\n\n for i in range(len(self.channels)):\n self.channels[i] = np.ma.where(selfmask,\n img.channels[i],\n self.channels[i])\n self.channels[i].mask = np.logical_and(selfmask,\n img.channels[i].mask)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreading an environment variable as a path.", "response": "def _path_from_env(variable: str, default: Path) -> Path:\n \"\"\"Read an environment variable as a path.\n\n The environment variable with the specified name is read, and its\n value returned as a path. If the environment variable is not set, or\n set to the empty string, the default value is returned.\n\n Parameters\n ----------\n variable : str\n Name of the environment variable.\n default : Path\n Default value.\n\n Returns\n -------\n Path\n Value from environment or default.\n\n \"\"\"\n # TODO(srstevenson): Use assignment expression in Python 3.8.\n value = os.environ.get(variable)\n if value:\n return Path(value)\n return default"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _paths_from_env(variable: str, default: List[Path]) -> List[Path]:\n # TODO(srstevenson): Use assignment expression in Python 3.8.\n value = os.environ.get(variable)\n if value:\n return [Path(path) for path in value.split(\":\")]\n return default", "response": "Read an environment variable as a list of paths."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nadd a user with password", "response": "def add(self, user, password):\n \"\"\" Adds a user with password \"\"\"\n if self.__contains__(user):\n raise UserExists\n self.new_users[user] = self._encrypt_password(password) + \"\\n\""} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef pop(self, user):\n if not self.__contains__(user):\n raise UserNotExists\n self.new_users.pop(user)", "response": "Deletes a user from the set."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nchange the password of a user.", "response": "def change_password(self, user, password):\n \"\"\" Changes user password \"\"\"\n if not self.__contains__(user):\n raise UserNotExists\n self.new_users[user] = self._encrypt_password(password) + \"\\n\""} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _encrypt_password(self, password):\n if self.encryption_mode.lower() == 'crypt':\n return self._crypt_password(password)\n elif self.encryption_mode.lower() == 'md5':\n return self._md5_password(password)\n elif self.encryption_mode.lower() == 'md5-base':\n return self._md5_base_password(password)\n else:\n raise UnknownEncryptionMode(self.encryption_mode)", "response": "encrypt the password for given mode"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nadding a user to a group", "response": "def add_user(self, user, group):\n \"\"\" Adds user to a group \"\"\"\n if self.is_user_in(user, group):\n raise UserAlreadyInAGroup\n self.new_groups.add(group, user)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef delete_user(self, user, group):\n if not self.__contains__(group):\n raise GroupNotExists\n if not self.is_user_in(user, group):\n raise UserNotInAGroup\n self.new_groups.popvalue(group, user)", "response": "Deletes a user from a group."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef filter_factory(global_conf, **local_conf):\n conf = global_conf.copy()\n conf.update(local_conf)\n\n def auth_filter(app):\n return Swauth(app, conf)\n return auth_filter", "response": "Returns a WSGI filter app for use with paste. deploy."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _get_concealed_token(self, token):\n enc_key = \"%s:%s:%s\" % (HASH_PATH_PREFIX, token, HASH_PATH_SUFFIX)\n return sha512(enc_key).hexdigest()", "response": "Returns hashed token to be used as object name in Swift."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget the groups for the given token.", "response": "def get_groups(self, env, token):\n \"\"\"Get groups for the given token.\n\n :param env: The current WSGI environment dictionary.\n :param token: Token to validate and return a group string for.\n\n :returns: None if the token is invalid or a string containing a comma\n separated list of groups the authenticated user is a member\n of. The first group in the list is also considered a unique\n identifier for that user.\n \"\"\"\n groups = None\n memcache_client = cache_from_env(env)\n if memcache_client:\n memcache_key = '%s/auth/%s' % (self.reseller_prefix, token)\n cached_auth_data = memcache_client.get(memcache_key)\n if cached_auth_data:\n expires, groups = cached_auth_data\n if expires < time():\n groups = None\n\n s3_auth_details = env.get('swift3.auth_details')\n if s3_auth_details:\n if not self.s3_support:\n self.logger.warning('S3 support is disabled in swauth.')\n return None\n if self.swauth_remote:\n # TODO(gholt): Support S3-style authorization with\n # swauth_remote mode\n self.logger.warning('S3-style authorization not supported yet '\n 'with swauth_remote mode.')\n return None\n try:\n account, user = s3_auth_details['access_key'].split(':', 1)\n signature_from_user = s3_auth_details['signature']\n msg = s3_auth_details['string_to_sign']\n except Exception:\n self.logger.debug(\n 'Swauth cannot parse swift3.auth_details value %r' %\n (s3_auth_details, ))\n return None\n path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user))\n resp = self.make_pre_authed_request(\n env, 'GET', path).get_response(self.app)\n if resp.status_int // 100 != 2:\n return None\n\n if 'x-object-meta-account-id' in resp.headers:\n account_id = resp.headers['x-object-meta-account-id']\n else:\n path = quote('/v1/%s/%s' % (self.auth_account, account))\n resp2 = self.make_pre_authed_request(\n env, 'HEAD', path).get_response(self.app)\n if resp2.status_int // 100 != 2:\n return None\n account_id = resp2.headers['x-container-meta-account-id']\n\n path = env['PATH_INFO']\n env['PATH_INFO'] = path.replace(\"%s:%s\" % (account, user),\n account_id, 1)\n detail = json.loads(resp.body)\n if detail:\n creds = detail.get('auth')\n try:\n auth_encoder, creds_dict = \\\n swauth.authtypes.validate_creds(creds)\n except ValueError as e:\n self.logger.error('%s' % e.args[0])\n return None\n\n password = creds_dict['hash']\n\n # https://bugs.python.org/issue5285\n if isinstance(password, six.text_type):\n password = password.encode('utf-8')\n if isinstance(msg, six.text_type):\n msg = msg.encode('utf-8')\n\n valid_signature = base64.encodestring(hmac.new(\n password, msg, sha1).digest()).strip()\n if signature_from_user != valid_signature:\n return None\n groups = [g['name'] for g in detail['groups']]\n if '.admin' in groups:\n groups.remove('.admin')\n groups.append(account_id)\n groups = ','.join(groups)\n return groups\n\n if not groups:\n if self.swauth_remote:\n with Timeout(self.swauth_remote_timeout):\n conn = http_connect(self.swauth_remote_parsed.hostname,\n self.swauth_remote_parsed.port, 'GET',\n '%s/v2/.token/%s' % (self.swauth_remote_parsed.path,\n quote(token)),\n ssl=(self.swauth_remote_parsed.scheme == 'https'))\n resp = conn.getresponse()\n resp.read()\n conn.close()\n if resp.status // 100 != 2:\n return None\n expires_from_now = float(resp.getheader('x-auth-ttl'))\n groups = resp.getheader('x-auth-groups')\n if memcache_client:\n memcache_client.set(\n memcache_key, (time() + expires_from_now, groups),\n time=expires_from_now)\n else:\n object_name = self._get_concealed_token(token)\n path = quote('/v1/%s/.token_%s/%s' %\n (self.auth_account, object_name[-1], object_name))\n resp = self.make_pre_authed_request(\n env, 'GET', path).get_response(self.app)\n if resp.status_int // 100 != 2:\n return None\n detail = json.loads(resp.body)\n if detail['expires'] < time():\n self.make_pre_authed_request(\n env, 'DELETE', path).get_response(self.app)\n return None\n groups = [g['name'] for g in detail['groups']]\n if '.admin' in groups:\n groups.remove('.admin')\n groups.append(detail['account_id'])\n groups = ','.join(groups)\n if memcache_client:\n memcache_client.set(\n memcache_key,\n (detail['expires'], groups),\n time=float(detail['expires'] - time()))\n return groups"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef authorize(self, req):\n try:\n version, account, container, obj = split_path(req.path, 1, 4, True)\n except ValueError:\n return HTTPNotFound(request=req)\n if not account or not account.startswith(self.reseller_prefix):\n return self.denied_response(req)\n user_groups = (req.remote_user or '').split(',')\n if '.reseller_admin' in user_groups and \\\n account != self.reseller_prefix and \\\n account[len(self.reseller_prefix)] != '.':\n req.environ['swift_owner'] = True\n return None\n if account in user_groups and \\\n (req.method not in ('DELETE', 'PUT') or container):\n # If the user is admin for the account and is not trying to do an\n # account DELETE or PUT...\n req.environ['swift_owner'] = True\n return None\n if (req.environ.get('swift_sync_key') and\n req.environ['swift_sync_key'] ==\n req.headers.get('x-container-sync-key', None) and\n 'x-timestamp' in req.headers and\n (req.remote_addr in self.allowed_sync_hosts or\n get_remote_client(req) in self.allowed_sync_hosts)):\n return None\n referrers, groups = parse_acl(getattr(req, 'acl', None))\n if referrer_allowed(req.referer, referrers):\n if obj or '.rlistings' in groups:\n return None\n return self.denied_response(req)\n if not req.remote_user:\n return self.denied_response(req)\n for user_group in user_groups:\n if user_group in groups:\n return None\n return self.denied_response(req)", "response": "Returns None if the request is authorized to continue or a standard WSGI response callable if not."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef denied_response(self, req):\n if not hasattr(req, 'credentials_valid'):\n req.credentials_valid = None\n if req.remote_user or req.credentials_valid:\n return HTTPForbidden(request=req)\n else:\n return HTTPUnauthorized(request=req)", "response": "Returns a standard WSGI response callable with the status of 403 or 401 depending on whether the REMOTE_USER is set or not."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef handle(self, env, start_response):\n try:\n req = Request(env)\n if self.auth_prefix:\n req.path_info_pop()\n req.bytes_transferred = '-'\n req.client_disconnect = False\n if 'x-storage-token' in req.headers and \\\n 'x-auth-token' not in req.headers:\n req.headers['x-auth-token'] = req.headers['x-storage-token']\n if 'eventlet.posthooks' in env:\n env['eventlet.posthooks'].append(\n (self.posthooklogger, (req,), {}))\n return self.handle_request(req)(env, start_response)\n else:\n # Lack of posthook support means that we have to log on the\n # start of the response, rather than after all the data has\n # been sent. This prevents logging client disconnects\n # differently than full transmissions.\n response = self.handle_request(req)(env, start_response)\n self.posthooklogger(env, req)\n return response\n except (Exception, TimeoutError):\n print(\"EXCEPTION IN handle: %s: %s\" % (format_exc(), env))\n start_response('500 Server Error',\n [('Content-Type', 'text/plain')])\n return ['Internal server error.\\n']", "response": "Handles the request and returns the response."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef handle_request(self, req):\n req.start_time = time()\n handler = None\n try:\n version, account, user, _junk = split_path(req.path_info,\n minsegs=0, maxsegs=4, rest_with_last=True)\n except ValueError:\n return HTTPNotFound(request=req)\n if version in ('v1', 'v1.0', 'auth'):\n if req.method == 'GET':\n handler = self.handle_get_token\n elif version == 'v2':\n if not self.super_admin_key:\n return HTTPNotFound(request=req)\n req.path_info_pop()\n if req.method == 'GET':\n if not account and not user:\n handler = self.handle_get_reseller\n elif account:\n if not user:\n handler = self.handle_get_account\n elif account == '.token':\n req.path_info_pop()\n handler = self.handle_validate_token\n else:\n handler = self.handle_get_user\n elif req.method == 'PUT':\n if not user:\n handler = self.handle_put_account\n else:\n handler = self.handle_put_user\n elif req.method == 'DELETE':\n if not user:\n handler = self.handle_delete_account\n else:\n handler = self.handle_delete_user\n elif req.method == 'POST':\n if account == '.prep':\n handler = self.handle_prep\n elif user == '.services':\n handler = self.handle_set_services\n else:\n handler = self.handle_webadmin\n if not handler:\n req.response = HTTPBadRequest(request=req)\n else:\n req.response = handler(req)\n return req.response", "response": "Handles the request and returns a WSGI - style callable that can be used to handle the response."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef handle_prep(self, req):\n if not self.is_super_admin(req):\n return self.denied_response(req)\n path = quote('/v1/%s' % self.auth_account)\n resp = self.make_pre_authed_request(\n req.environ, 'PUT', path).get_response(self.app)\n if resp.status_int // 100 != 2:\n raise Exception('Could not create the main auth account: %s %s' %\n (path, resp.status))\n path = quote('/v1/%s/.account_id' % self.auth_account)\n resp = self.make_pre_authed_request(\n req.environ, 'PUT', path).get_response(self.app)\n if resp.status_int // 100 != 2:\n raise Exception('Could not create container: %s %s' %\n (path, resp.status))\n for container in xrange(16):\n path = quote('/v1/%s/.token_%x' % (self.auth_account, container))\n resp = self.make_pre_authed_request(\n req.environ, 'PUT', path).get_response(self.app)\n if resp.status_int // 100 != 2:\n raise Exception('Could not create container: %s %s' %\n (path, resp.status))\n return HTTPNoContent(request=req)", "response": "Handles the POST v2. prep call for preparing the backing store for use with the auth subsystem."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef handle_get_reseller(self, req):\n if not self.is_reseller_admin(req):\n return self.denied_response(req)\n listing = []\n marker = ''\n while True:\n path = '/v1/%s?format=json&marker=%s' % (quote(self.auth_account),\n quote(marker))\n resp = self.make_pre_authed_request(\n req.environ, 'GET', path).get_response(self.app)\n if resp.status_int // 100 != 2:\n raise Exception('Could not list main auth account: %s %s' %\n (path, resp.status))\n sublisting = json.loads(resp.body)\n if not sublisting:\n break\n for container in sublisting:\n if container['name'][0] != '.':\n listing.append({'name': container['name']})\n marker = sublisting[-1]['name'].encode('utf-8')\n return Response(body=json.dumps({'accounts': listing}),\n content_type=CONTENT_TYPE_JSON)", "response": "Handles the GET v2 call for getting general reseller information."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nhandle the GET v2. account API call for getting account information.", "response": "def handle_get_account(self, req):\n \"\"\"Handles the GET v2/<account> call for getting account information.\n Can only be called by an account .admin.\n\n On success, a JSON dictionary will be returned containing the keys\n `account_id`, `services`, and `users`. The `account_id` is the value\n used when creating service accounts. The `services` value is a dict as\n described in the :func:`handle_get_token` call. The `users` value is a\n list of dicts, each dict representing a user and currently only\n containing the single key `name`. For example::\n\n {\"account_id\": \"AUTH_018c3946-23f8-4efb-a8fb-b67aae8e4162\",\n \"services\": {\"storage\": {\"default\": \"local\",\n \"local\": \"http://127.0.0.1:8080/v1/AUTH_018c3946\"}},\n \"users\": [{\"name\": \"tester\"}, {\"name\": \"tester3\"}]}\n\n :param req: The swob.Request to process.\n :returns: swob.Response, 2xx on success with a JSON dictionary as\n explained above.\n \"\"\"\n account = req.path_info_pop()\n if req.path_info or not account or account[0] == '.':\n return HTTPBadRequest(request=req)\n if not self.is_account_admin(req, account):\n return self.denied_response(req)\n path = quote('/v1/%s/%s/.services' % (self.auth_account, account))\n resp = self.make_pre_authed_request(\n req.environ, 'GET', path).get_response(self.app)\n if resp.status_int == 404:\n return HTTPNotFound(request=req)\n if resp.status_int // 100 != 2:\n raise Exception('Could not obtain the .services object: %s %s' %\n (path, resp.status))\n services = json.loads(resp.body)\n listing = []\n marker = ''\n while True:\n path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' %\n (self.auth_account, account)), quote(marker))\n resp = self.make_pre_authed_request(\n req.environ, 'GET', path).get_response(self.app)\n if resp.status_int == 404:\n return HTTPNotFound(request=req)\n if resp.status_int // 100 != 2:\n raise Exception('Could not list in main auth account: %s %s' %\n (path, resp.status))\n account_id = resp.headers['X-Container-Meta-Account-Id']\n sublisting = json.loads(resp.body)\n if not sublisting:\n break\n for obj in sublisting:\n if obj['name'][0] != '.':\n listing.append({'name': obj['name']})\n marker = sublisting[-1]['name'].encode('utf-8')\n return Response(content_type=CONTENT_TYPE_JSON,\n body=json.dumps({'account_id': account_id,\n 'services': services,\n 'users': listing}))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef handle_set_services(self, req):\n if not self.is_reseller_admin(req):\n return self.denied_response(req)\n account = req.path_info_pop()\n if req.path_info != '/.services' or not account or account[0] == '.':\n return HTTPBadRequest(request=req)\n try:\n new_services = json.loads(req.body)\n except ValueError as err:\n return HTTPBadRequest(body=str(err))\n # Get the current services information\n path = quote('/v1/%s/%s/.services' % (self.auth_account, account))\n resp = self.make_pre_authed_request(\n req.environ, 'GET', path).get_response(self.app)\n if resp.status_int == 404:\n return HTTPNotFound(request=req)\n if resp.status_int // 100 != 2:\n raise Exception('Could not obtain services info: %s %s' %\n (path, resp.status))\n services = json.loads(resp.body)\n for new_service, value in new_services.iteritems():\n if new_service in services:\n services[new_service].update(value)\n else:\n services[new_service] = value\n # Save the new services information\n services = json.dumps(services)\n resp = self.make_pre_authed_request(\n req.environ, 'PUT', path, services).get_response(self.app)\n if resp.status_int // 100 != 2:\n raise Exception('Could not save .services object: %s %s' %\n (path, resp.status))\n return Response(request=req, body=services,\n content_type=CONTENT_TYPE_JSON)", "response": "Handles the POST v2. services call for setting services in the current account."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef handle_put_account(self, req):\n if not self.is_reseller_admin(req):\n return self.denied_response(req)\n account = req.path_info_pop()\n if req.path_info or not account or account[0] == '.':\n return HTTPBadRequest(request=req)\n\n account_suffix = req.headers.get('x-account-suffix')\n if not account_suffix:\n account_suffix = str(uuid4())\n # Create the new account in the Swift cluster\n path = quote('%s/%s%s' % (self.dsc_parsed2.path,\n self.reseller_prefix, account_suffix))\n try:\n conn = self.get_conn()\n conn.request('PUT', path,\n headers={'X-Auth-Token': self.get_itoken(req.environ),\n 'Content-Length': '0'})\n resp = conn.getresponse()\n resp.read()\n if resp.status // 100 != 2:\n raise Exception('Could not create account on the Swift '\n 'cluster: %s %s %s' % (path, resp.status, resp.reason))\n except (Exception, TimeoutError):\n self.logger.error(_('ERROR: Exception while trying to communicate '\n 'with %(scheme)s://%(host)s:%(port)s/%(path)s'),\n {'scheme': self.dsc_parsed2.scheme,\n 'host': self.dsc_parsed2.hostname,\n 'port': self.dsc_parsed2.port, 'path': path})\n raise\n # Ensure the container in the main auth account exists (this\n # container represents the new account)\n path = quote('/v1/%s/%s' % (self.auth_account, account))\n resp = self.make_pre_authed_request(\n req.environ, 'HEAD', path).get_response(self.app)\n if resp.status_int == 404:\n resp = self.make_pre_authed_request(\n req.environ, 'PUT', path).get_response(self.app)\n if resp.status_int // 100 != 2:\n raise Exception('Could not create account within main auth '\n 'account: %s %s' % (path, resp.status))\n elif resp.status_int // 100 == 2:\n if 'x-container-meta-account-id' in resp.headers:\n # Account was already created\n return HTTPAccepted(request=req)\n else:\n raise Exception('Could not verify account within main auth '\n 'account: %s %s' % (path, resp.status))\n # Record the mapping from account id back to account name\n path = quote('/v1/%s/.account_id/%s%s' %\n (self.auth_account, self.reseller_prefix, account_suffix))\n resp = self.make_pre_authed_request(\n req.environ, 'PUT', path, account).get_response(self.app)\n if resp.status_int // 100 != 2:\n raise Exception('Could not create account id mapping: %s %s' %\n (path, resp.status))\n # Record the cluster url(s) for the account\n path = quote('/v1/%s/%s/.services' % (self.auth_account, account))\n services = {'storage': {}}\n services['storage'][self.dsc_name] = '%s/%s%s' % (self.dsc_url,\n self.reseller_prefix, account_suffix)\n services['storage']['default'] = self.dsc_name\n resp = self.make_pre_authed_request(\n req.environ, 'PUT', path,\n json.dumps(services)).get_response(self.app)\n if resp.status_int // 100 != 2:\n raise Exception('Could not create .services object: %s %s' %\n (path, resp.status))\n # Record the mapping from account name to the account id\n path = quote('/v1/%s/%s' % (self.auth_account, account))\n resp = self.make_pre_authed_request(\n req.environ, 'POST', path,\n headers={'X-Container-Meta-Account-Id': '%s%s' % (\n self.reseller_prefix, account_suffix)}).get_response(self.app)\n if resp.status_int // 100 != 2:\n raise Exception('Could not record the account id on the account: '\n '%s %s' % (path, resp.status))\n return HTTPCreated(request=req)", "response": "Handles the PUT v2/<account > call for adding an account to the auth system."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef handle_delete_account(self, req):\n if not self.is_reseller_admin(req):\n return self.denied_response(req)\n account = req.path_info_pop()\n if req.path_info or not account or account[0] == '.':\n return HTTPBadRequest(request=req)\n # Make sure the account has no users and get the account_id\n marker = ''\n while True:\n path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' %\n (self.auth_account, account)), quote(marker))\n resp = self.make_pre_authed_request(\n req.environ, 'GET', path).get_response(self.app)\n if resp.status_int == 404:\n return HTTPNotFound(request=req)\n if resp.status_int // 100 != 2:\n raise Exception('Could not list in main auth account: %s %s' %\n (path, resp.status))\n account_id = resp.headers['x-container-meta-account-id']\n sublisting = json.loads(resp.body)\n if not sublisting:\n break\n for obj in sublisting:\n if obj['name'][0] != '.':\n return HTTPConflict(request=req)\n marker = sublisting[-1]['name'].encode('utf-8')\n # Obtain the listing of services the account is on.\n path = quote('/v1/%s/%s/.services' % (self.auth_account, account))\n resp = self.make_pre_authed_request(\n req.environ, 'GET', path).get_response(self.app)\n if resp.status_int // 100 != 2 and resp.status_int != 404:\n raise Exception('Could not obtain .services object: %s %s' %\n (path, resp.status))\n if resp.status_int // 100 == 2:\n services = json.loads(resp.body)\n # Delete the account on each cluster it is on.\n deleted_any = False\n for name, url in services['storage'].iteritems():\n if name != 'default':\n parsed = urlparse(url)\n conn = self.get_conn(parsed)\n conn.request('DELETE', parsed.path,\n headers={'X-Auth-Token': self.get_itoken(req.environ)})\n resp = conn.getresponse()\n resp.read()\n if resp.status == 409:\n if deleted_any:\n raise Exception('Managed to delete one or more '\n 'service end points, but failed with: '\n '%s %s %s' % (url, resp.status, resp.reason))\n else:\n return HTTPConflict(request=req)\n if resp.status // 100 != 2 and resp.status != 404:\n raise Exception('Could not delete account on the '\n 'Swift cluster: %s %s %s' %\n (url, resp.status, resp.reason))\n deleted_any = True\n # Delete the .services object itself.\n path = quote('/v1/%s/%s/.services' %\n (self.auth_account, account))\n resp = self.make_pre_authed_request(\n req.environ, 'DELETE', path).get_response(self.app)\n if resp.status_int // 100 != 2 and resp.status_int != 404:\n raise Exception('Could not delete .services object: %s %s' %\n (path, resp.status))\n # Delete the account id mapping for the account.\n path = quote('/v1/%s/.account_id/%s' %\n (self.auth_account, account_id))\n resp = self.make_pre_authed_request(\n req.environ, 'DELETE', path).get_response(self.app)\n if resp.status_int // 100 != 2 and resp.status_int != 404:\n raise Exception('Could not delete account id mapping: %s %s' %\n (path, resp.status))\n # Delete the account marker itself.\n path = quote('/v1/%s/%s' % (self.auth_account, account))\n resp = self.make_pre_authed_request(\n req.environ, 'DELETE', path).get_response(self.app)\n if resp.status_int // 100 != 2 and resp.status_int != 404:\n raise Exception('Could not delete account marked: %s %s' %\n (path, resp.status))\n return HTTPNoContent(request=req)", "response": "Handles the DELETE v2/<account > call for removing an account from the auth system."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef handle_get_user(self, req):\n account = req.path_info_pop()\n user = req.path_info_pop()\n if req.path_info or not account or account[0] == '.' or not user or \\\n (user[0] == '.' and user != '.groups'):\n return HTTPBadRequest(request=req)\n if not self.is_account_admin(req, account):\n return self.denied_response(req)\n\n # get information for each user for the specified\n # account and create a list of all groups that the users\n # are part of\n if user == '.groups':\n # TODO(gholt): This could be very slow for accounts with a really\n # large number of users. Speed could be improved by concurrently\n # requesting user group information. Then again, I don't *know*\n # it's slow for `normal` use cases, so testing should be done.\n groups = set()\n marker = ''\n while True:\n path = '/v1/%s?format=json&marker=%s' % (quote('%s/%s' %\n (self.auth_account, account)), quote(marker))\n resp = self.make_pre_authed_request(\n req.environ, 'GET', path).get_response(self.app)\n if resp.status_int == 404:\n return HTTPNotFound(request=req)\n if resp.status_int // 100 != 2:\n raise Exception('Could not list in main auth account: '\n '%s %s' % (path, resp.status))\n sublisting = json.loads(resp.body)\n if not sublisting:\n break\n for obj in sublisting:\n if obj['name'][0] != '.':\n\n # get list of groups for each user\n user_json = self.get_user_detail(req, account,\n obj['name'])\n if user_json is None:\n raise Exception('Could not retrieve user object: '\n '%s:%s %s' % (account, user, 404))\n groups.update(\n g['name'] for g in json.loads(user_json)['groups'])\n marker = sublisting[-1]['name'].encode('utf-8')\n body = json.dumps(\n {'groups': [{'name': g} for g in sorted(groups)]})\n else:\n # get information for specific user,\n # if user doesn't exist, return HTTPNotFound\n body = self.get_user_detail(req, account, user)\n if body is None:\n return HTTPNotFound(request=req)\n\n display_groups = [g['name'] for g in json.loads(body)['groups']]\n if ('.admin' in display_groups and\n not self.is_reseller_admin(req)) or \\\n ('.reseller_admin' in display_groups and\n not self.is_super_admin(req)):\n return self.denied_response(req)\n return Response(body=body, content_type=CONTENT_TYPE_JSON)", "response": "Handles the GET v2 API call for getting user information."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef handle_put_user(self, req):\n # Validate path info\n account = req.path_info_pop()\n user = req.path_info_pop()\n key = unquote(req.headers.get('x-auth-user-key', ''))\n key_hash = unquote(req.headers.get('x-auth-user-key-hash', ''))\n admin = req.headers.get('x-auth-user-admin') == 'true'\n reseller_admin = \\\n req.headers.get('x-auth-user-reseller-admin') == 'true'\n if reseller_admin:\n admin = True\n if req.path_info or not account or account[0] == '.' or not user or \\\n user[0] == '.' or (not key and not key_hash):\n return HTTPBadRequest(request=req)\n if key_hash:\n try:\n swauth.authtypes.validate_creds(key_hash)\n except ValueError:\n return HTTPBadRequest(request=req)\n\n user_arg = account + ':' + user\n if reseller_admin:\n if not self.is_super_admin(req) and\\\n not self.is_user_changing_own_key(req, user_arg):\n return self.denied_response(req)\n elif not self.is_account_admin(req, account) and\\\n not self.is_user_changing_own_key(req, user_arg):\n return self.denied_response(req)\n\n path = quote('/v1/%s/%s' % (self.auth_account, account))\n resp = self.make_pre_authed_request(\n req.environ, 'HEAD', path).get_response(self.app)\n if resp.status_int // 100 != 2:\n raise Exception('Could not retrieve account id value: %s %s' %\n (path, resp.status))\n headers = {'X-Object-Meta-Account-Id':\n resp.headers['x-container-meta-account-id']}\n # Create the object in the main auth account (this object represents\n # the user)\n path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user))\n groups = ['%s:%s' % (account, user), account]\n if admin:\n groups.append('.admin')\n if reseller_admin:\n groups.append('.reseller_admin')\n auth_value = key_hash or self.auth_encoder().encode(key)\n resp = self.make_pre_authed_request(\n req.environ, 'PUT', path,\n json.dumps({'auth': auth_value,\n 'groups': [{'name': g} for g in groups]}),\n headers=headers).get_response(self.app)\n if resp.status_int == 404:\n return HTTPNotFound(request=req)\n if resp.status_int // 100 != 2:\n raise Exception('Could not create user object: %s %s' %\n (path, resp.status))\n return HTTPCreated(request=req)", "response": "Handles the PUT v2 API call for adding a user to an account."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef handle_delete_user(self, req):\n # Validate path info\n account = req.path_info_pop()\n user = req.path_info_pop()\n if req.path_info or not account or account[0] == '.' or not user or \\\n user[0] == '.':\n return HTTPBadRequest(request=req)\n\n # if user to be deleted is reseller_admin, then requesting\n # user must be the super_admin\n is_reseller_admin = self.is_user_reseller_admin(req, account, user)\n if not is_reseller_admin and not req.credentials_valid:\n # if user to be deleted can't be found, return 404\n return HTTPNotFound(request=req)\n elif is_reseller_admin and not self.is_super_admin(req):\n return HTTPForbidden(request=req)\n\n if not self.is_account_admin(req, account):\n return self.denied_response(req)\n\n # Delete the user's existing token, if any.\n path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user))\n resp = self.make_pre_authed_request(\n req.environ, 'HEAD', path).get_response(self.app)\n if resp.status_int == 404:\n return HTTPNotFound(request=req)\n elif resp.status_int // 100 != 2:\n raise Exception('Could not obtain user details: %s %s' %\n (path, resp.status))\n candidate_token = resp.headers.get('x-object-meta-auth-token')\n if candidate_token:\n object_name = self._get_concealed_token(candidate_token)\n path = quote('/v1/%s/.token_%s/%s' %\n (self.auth_account, object_name[-1], object_name))\n resp = self.make_pre_authed_request(\n req.environ, 'DELETE', path).get_response(self.app)\n if resp.status_int // 100 != 2 and resp.status_int != 404:\n raise Exception('Could not delete possibly existing token: '\n '%s %s' % (path, resp.status))\n # Delete the user entry itself.\n path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user))\n resp = self.make_pre_authed_request(\n req.environ, 'DELETE', path).get_response(self.app)\n if resp.status_int // 100 != 2 and resp.status_int != 404:\n raise Exception('Could not delete the user object: %s %s' %\n (path, resp.status))\n return HTTPNoContent(request=req)", "response": "Handles the DELETE v2 API call for deleting a user from an account."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn True if the user is a. reseller_admin.", "response": "def is_user_reseller_admin(self, req, account, user):\n \"\"\"Returns True if the user is a .reseller_admin.\n\n :param account: account user is part of\n :param user: the user\n :returns: True if user .reseller_admin, False\n if user is not a reseller_admin and None if the user\n doesn't exist.\n \"\"\"\n req.credentials_valid = True\n user_json = self.get_user_detail(req, account, user)\n if user_json is None:\n req.credentials_valid = False\n return False\n\n user_detail = json.loads(user_json)\n\n return '.reseller_admin' in (g['name'] for g in user_detail['groups'])"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef handle_get_token(self, req):\n # Validate the request info\n try:\n pathsegs = split_path(req.path_info, minsegs=1, maxsegs=3,\n rest_with_last=True)\n except ValueError:\n return HTTPNotFound(request=req)\n if pathsegs[0] == 'v1' and pathsegs[2] == 'auth':\n account = pathsegs[1]\n user = req.headers.get('x-storage-user')\n if not user:\n user = unquote(req.headers.get('x-auth-user', ''))\n if not user or ':' not in user:\n return HTTPUnauthorized(request=req)\n account2, user = user.split(':', 1)\n if account != account2:\n return HTTPUnauthorized(request=req)\n key = req.headers.get('x-storage-pass')\n if not key:\n key = unquote(req.headers.get('x-auth-key', ''))\n elif pathsegs[0] in ('auth', 'v1.0'):\n user = unquote(req.headers.get('x-auth-user', ''))\n if not user:\n user = req.headers.get('x-storage-user')\n if not user or ':' not in user:\n return HTTPUnauthorized(request=req)\n account, user = user.split(':', 1)\n key = unquote(req.headers.get('x-auth-key', ''))\n if not key:\n key = req.headers.get('x-storage-pass')\n else:\n return HTTPBadRequest(request=req)\n if not all((account, user, key)):\n return HTTPUnauthorized(request=req)\n if user == '.super_admin' and self.super_admin_key and \\\n key == self.super_admin_key:\n token = self.get_itoken(req.environ)\n url = '%s/%s.auth' % (self.dsc_url, self.reseller_prefix)\n return Response(\n request=req,\n content_type=CONTENT_TYPE_JSON,\n body=json.dumps({'storage': {'default': 'local',\n 'local': url}}),\n headers={'x-auth-token': token,\n 'x-storage-token': token,\n 'x-storage-url': url})\n\n # Authenticate user\n path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user))\n resp = self.make_pre_authed_request(\n req.environ, 'GET', path).get_response(self.app)\n if resp.status_int == 404:\n return HTTPUnauthorized(request=req)\n if resp.status_int // 100 != 2:\n raise Exception('Could not obtain user details: %s %s' %\n (path, resp.status))\n user_detail = json.loads(resp.body)\n if not self.credentials_match(user_detail, key):\n return HTTPUnauthorized(request=req)\n # See if a token already exists and hasn't expired\n token = None\n expires = None\n candidate_token = resp.headers.get('x-object-meta-auth-token')\n if candidate_token:\n object_name = self._get_concealed_token(candidate_token)\n path = quote('/v1/%s/.token_%s/%s' %\n (self.auth_account, object_name[-1], object_name))\n delete_token = False\n try:\n if req.headers.get('x-auth-new-token', 'false').lower() in \\\n TRUE_VALUES:\n delete_token = True\n else:\n resp = self.make_pre_authed_request(\n req.environ, 'GET', path).get_response(self.app)\n if resp.status_int // 100 == 2:\n token_detail = json.loads(resp.body)\n if token_detail['expires'] > time():\n token = candidate_token\n expires = token_detail['expires']\n else:\n delete_token = True\n elif resp.status_int != 404:\n raise Exception(\n 'Could not detect whether a token already exists: '\n '%s %s' % (path, resp.status))\n finally:\n if delete_token:\n self.make_pre_authed_request(\n req.environ, 'DELETE', path).get_response(self.app)\n memcache_client = cache_from_env(req.environ)\n if memcache_client:\n memcache_key = '%s/auth/%s' % (self.reseller_prefix,\n candidate_token)\n memcache_client.delete(memcache_key)\n # Create a new token if one didn't exist\n if not token:\n # Retrieve account id, we'll save this in the token\n path = quote('/v1/%s/%s' % (self.auth_account, account))\n resp = self.make_pre_authed_request(\n req.environ, 'HEAD', path).get_response(self.app)\n if resp.status_int // 100 != 2:\n raise Exception('Could not retrieve account id value: '\n '%s %s' % (path, resp.status))\n account_id = \\\n resp.headers['x-container-meta-account-id']\n # Generate new token\n token = '%stk%s' % (self.reseller_prefix, uuid4().hex)\n # Save token info\n object_name = self._get_concealed_token(token)\n path = quote('/v1/%s/.token_%s/%s' %\n (self.auth_account, object_name[-1], object_name))\n try:\n token_life = min(\n int(req.headers.get('x-auth-token-lifetime',\n self.token_life)),\n self.max_token_life)\n except ValueError:\n token_life = self.token_life\n expires = int(time() + token_life)\n resp = self.make_pre_authed_request(\n req.environ, 'PUT', path,\n json.dumps({'account': account, 'user': user,\n 'account_id': account_id,\n 'groups': user_detail['groups'],\n 'expires': expires})).get_response(self.app)\n if resp.status_int // 100 != 2:\n raise Exception('Could not create new token: %s %s' %\n (path, resp.status))\n # Record the token with the user info for future use.\n path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user))\n resp = self.make_pre_authed_request(\n req.environ, 'POST', path,\n headers={'X-Object-Meta-Auth-Token': token}\n ).get_response(self.app)\n if resp.status_int // 100 != 2:\n raise Exception('Could not save new token: %s %s' %\n (path, resp.status))\n # Get the services information\n path = quote('/v1/%s/%s/.services' % (self.auth_account, account))\n resp = self.make_pre_authed_request(\n req.environ, 'GET', path).get_response(self.app)\n if resp.status_int // 100 != 2:\n raise Exception('Could not obtain services info: %s %s' %\n (path, resp.status))\n detail = json.loads(resp.body)\n url = detail['storage'][detail['storage']['default']]\n return Response(\n request=req,\n body=resp.body,\n content_type=CONTENT_TYPE_JSON,\n headers={'x-auth-token': token,\n 'x-storage-token': token,\n 'x-auth-token-expires': str(int(expires - time())),\n 'x-storage-url': url})", "response": "Handles the various request for getting a token and service end point."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef handle_validate_token(self, req):\n token = req.path_info_pop()\n if req.path_info or not token.startswith(self.reseller_prefix):\n return HTTPBadRequest(request=req)\n expires = groups = None\n memcache_client = cache_from_env(req.environ)\n if memcache_client:\n memcache_key = '%s/auth/%s' % (self.reseller_prefix, token)\n cached_auth_data = memcache_client.get(memcache_key)\n if cached_auth_data:\n expires, groups = cached_auth_data\n if expires < time():\n groups = None\n if not groups:\n object_name = self._get_concealed_token(token)\n path = quote('/v1/%s/.token_%s/%s' %\n (self.auth_account, object_name[-1], object_name))\n resp = self.make_pre_authed_request(\n req.environ, 'GET', path).get_response(self.app)\n if resp.status_int // 100 != 2:\n return HTTPNotFound(request=req)\n detail = json.loads(resp.body)\n expires = detail['expires']\n if expires < time():\n self.make_pre_authed_request(\n req.environ, 'DELETE', path).get_response(self.app)\n return HTTPNotFound(request=req)\n groups = [g['name'] for g in detail['groups']]\n if '.admin' in groups:\n groups.remove('.admin')\n groups.append(detail['account_id'])\n groups = ','.join(groups)\n return HTTPNoContent(headers={'X-Auth-TTL': expires - time(),\n 'X-Auth-Groups': groups})", "response": "Handles the GET v2. token/<token > call for validating a token."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_conn(self, urlparsed=None):\n if not urlparsed:\n urlparsed = self.dsc_parsed2\n if urlparsed.scheme == 'http':\n return HTTPConnection(urlparsed.netloc)\n else:\n return HTTPSConnection(urlparsed.netloc)", "response": "Returns an HTTPConnection based on the urlparse result given or the\nAttributeNames default Swift cluster s value"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the current internal token to use for the auth system s own actions with other services.", "response": "def get_itoken(self, env):\n \"\"\"Returns the current internal token to use for the auth system's own\n actions with other services. Each process will create its own\n itoken and the token will be deleted and recreated based on the\n token_life configuration value. The itoken information is stored in\n memcache because the auth process that is asked by Swift to validate\n the token may not be the same as the auth process that created the\n token.\n \"\"\"\n if not self.itoken or self.itoken_expires < time() or \\\n env.get('HTTP_X_AUTH_NEW_TOKEN', 'false').lower() in \\\n TRUE_VALUES:\n self.itoken = '%sitk%s' % (self.reseller_prefix, uuid4().hex)\n memcache_key = '%s/auth/%s' % (self.reseller_prefix, self.itoken)\n self.itoken_expires = time() + self.token_life\n memcache_client = cache_from_env(env)\n if not memcache_client:\n raise Exception(\n 'No memcache set up; required for Swauth middleware')\n memcache_client.set(\n memcache_key,\n (self.itoken_expires,\n '.auth,.reseller_admin,%s.auth' % self.reseller_prefix),\n time=self.token_life)\n return self.itoken"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the dict for the admin user specified as the admin in the request X - Auth - Admin - User and the addition of an account key to the admin user s account.", "response": "def get_admin_detail(self, req):\n \"\"\"Returns the dict for the user specified as the admin in the request\n with the addition of an `account` key set to the admin user's account.\n\n :param req: The swob request to retrieve X-Auth-Admin-User and\n X-Auth-Admin-Key from.\n :returns: The dict for the admin user with the addition of the\n `account` key.\n \"\"\"\n if ':' not in req.headers.get('x-auth-admin-user', ''):\n return None\n admin_account, admin_user = \\\n req.headers.get('x-auth-admin-user').split(':', 1)\n user_json = self.get_user_detail(req, admin_account, admin_user)\n if user_json is None:\n return None\n admin_detail = json.loads(user_json)\n admin_detail['account'] = admin_account\n return admin_detail"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_user_detail(self, req, account, user):\n path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user))\n resp = self.make_pre_authed_request(\n req.environ, 'GET', path).get_response(self.app)\n if resp.status_int == 404:\n return None\n if resp.status_int // 100 != 2:\n raise Exception('Could not get user object: %s %s' %\n (path, resp.status))\n return resp.body", "response": "Returns the body of a GET request for the specified user"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef credentials_match(self, user_detail, key):\n if user_detail:\n creds = user_detail.get('auth')\n try:\n auth_encoder, creds_dict = \\\n swauth.authtypes.validate_creds(creds)\n except ValueError as e:\n self.logger.error('%s' % e.args[0])\n return False\n return user_detail and auth_encoder.match(key, creds, **creds_dict)", "response": "Returns True if the key is valid for the user_detail."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef is_user_changing_own_key(self, req, user):\n admin_detail = self.get_admin_detail(req)\n if not admin_detail:\n # The user does not exist\n return False\n\n # If user is not admin/reseller_admin and x-auth-user-admin or\n # x-auth-user-reseller-admin headers are present in request, he may be\n # attempting to escalate himself as admin/reseller_admin!\n if '.admin' not in (g['name'] for g in admin_detail['groups']):\n if req.headers.get('x-auth-user-admin') == 'true' or \\\n req.headers.get('x-auth-user-reseller-admin') == 'true':\n return False\n if '.reseller_admin' not in \\\n (g['name'] for g in admin_detail['groups']) and \\\n req.headers.get('x-auth-user-reseller-admin') == 'true':\n return False\n\n return req.headers.get('x-auth-admin-user') == user and \\\n self.credentials_match(admin_detail,\n req.headers.get('x-auth-admin-key'))", "response": "Checks if the user is changing his own key."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef is_super_admin(self, req):\n return req.headers.get('x-auth-admin-user') == '.super_admin' and \\\n self.super_admin_key and \\\n req.headers.get('x-auth-admin-key') == self.super_admin_key", "response": "Returns True if the admin specified in the request represents the\n . super_admin."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns True if the admin specified in the request represents a. reseller_admin.", "response": "def is_reseller_admin(self, req, admin_detail=None):\n \"\"\"Returns True if the admin specified in the request represents a\n .reseller_admin.\n\n :param req: The swob.Request to check.\n :param admin_detail: The previously retrieved dict from\n :func:`get_admin_detail` or None for this function\n to retrieve the admin_detail itself.\n :param returns: True if .reseller_admin.\n \"\"\"\n req.credentials_valid = False\n if self.is_super_admin(req):\n return True\n if not admin_detail:\n admin_detail = self.get_admin_detail(req)\n if not self.credentials_match(admin_detail,\n req.headers.get('x-auth-admin-key')):\n return False\n req.credentials_valid = True\n return '.reseller_admin' in (g['name'] for g in admin_detail['groups'])"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nparsing and validate user credentials in dict", "response": "def validate_creds(creds):\n \"\"\"Parse and validate user credentials whether format is right\n\n :param creds: User credentials\n :returns: Auth_type class instance and parsed user credentials in dict\n :raises ValueError: If credential format is wrong (eg: bad auth_type)\n \"\"\"\n try:\n auth_type, auth_rest = creds.split(':', 1)\n except ValueError:\n raise ValueError(\"Missing ':' in %s\" % creds)\n authtypes = sys.modules[__name__]\n auth_encoder = getattr(authtypes, auth_type.title(), None)\n if auth_encoder is None:\n raise ValueError('Invalid auth_type: %s' % auth_type)\n auth_encoder = auth_encoder()\n parsed_creds = dict(type=auth_type, salt=None, hash=None)\n parsed_creds.update(auth_encoder.validate(auth_rest))\n return auth_encoder, parsed_creds"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef encode_w_salt(self, salt, key):\n enc_key = '%s%s' % (salt, key)\n enc_val = hashlib.sha1(enc_key).hexdigest()\n return \"sha1:%s$%s\" % (salt, enc_val)", "response": "Encodes a user key with salt into a particular format. The result of this method will be used internally."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nencoding a user key into a particular format.", "response": "def encode(self, key):\n \"\"\"Encodes a user key into a particular format. The result of this method\n will be used by swauth for storing user credentials.\n\n If salt is not manually set in conf file, a random salt will be\n generated and used.\n\n :param key: User's secret key\n :returns: A string representing user credentials\n \"\"\"\n salt = self.salt or os.urandom(32).encode('base64').rstrip()\n return self.encode_w_salt(salt, key)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef match(self, key, creds, salt, **kwargs):\n return self.encode_w_salt(salt, key) == creds", "response": "Checks whether the user - provided key matches the user s stored credentials and salt."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef validate(self, auth_rest):\n try:\n auth_salt, auth_hash = auth_rest.split('$')\n except ValueError:\n raise ValueError(\"Missing '$' in %s\" % auth_rest)\n\n if len(auth_salt) == 0:\n raise ValueError(\"Salt must have non-zero length!\")\n if len(auth_hash) != 40:\n raise ValueError(\"Hash must have 40 chars!\")\n if not all(c in string.hexdigits for c in auth_hash):\n raise ValueError(\"Hash must be hexadecimal!\")\n\n return dict(salt=auth_salt, hash=auth_hash)", "response": "Validate user credentials whether format is right for Sha1."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef create_bdew_load_profiles(self, dt_index, slp_types, holidays=None):\n\n # define file path of slp csv data\n file_path = os.path.join(self.datapath, 'selp_series.csv')\n\n # Read standard load profile series from csv file\n selp_series = pd.read_csv(file_path)\n tmp_df = selp_series\n # Create an index to merge. The year and month will be ignored only the\n # time index is necessary.\n index = pd.date_range(\n pd.datetime(2007, 1, 1, 0), periods=2016, freq='15Min')\n tmp_df.set_index(index, inplace=True)\n\n # Create empty DataFrame to take the results.\n new_df = pd.DataFrame(index=dt_index, columns=slp_types).fillna(0)\n new_df = add_weekdays2df(new_df, holidays=holidays,\n holiday_is_sunday=True)\n\n new_df['hour'] = dt_index.hour + 1\n new_df['minute'] = dt_index.minute\n time_df = new_df[['date', 'hour', 'minute', 'weekday']].copy()\n tmp_df[slp_types] = tmp_df[slp_types].astype(float)\n\n # Inner join the slps on the time_df to the slp's for a whole year\n tmp_df['hour_of_day'] = tmp_df.index.hour + 1\n tmp_df['minute_of_hour'] = tmp_df.index.minute\n left_cols = ['hour_of_day', 'minute_of_hour', 'weekday']\n right_cols = ['hour', 'minute', 'weekday']\n tmp_df = tmp_df.reset_index()\n tmp_df.pop('index')\n\n for p in self.seasons.keys():\n a = pd.datetime(self.year, self.seasons[p][0],\n self.seasons[p][1], 0, 0)\n b = pd.datetime(self.year, self.seasons[p][2],\n self.seasons[p][3], 23, 59)\n new_df.update(pd.DataFrame.merge(\n tmp_df[tmp_df['period'] == p[:-1]], time_df[a:b],\n left_on=left_cols, right_on=right_cols,\n how='inner', left_index=True).sort_index().drop(\n ['hour_of_day'], 1))\n\n new_df.drop('date', axis=1, inplace=True)\n return new_df.div(new_df.sum(axis=0), axis=1)", "response": "Create a DataFrame of hourly electricity load profiles for a specific year."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_profile(self, ann_el_demand_per_sector):\n return self.slp_frame.multiply(pd.Series(\n ann_el_demand_per_sector), axis=1).dropna(how='all', axis=1) * 4", "response": "Get the profiles for the given annual demand per module."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning the temperature interval for each temperature in the temperature vector.", "response": "def get_temperature_interval(self):\n \"\"\"Appoints the corresponding temperature interval to each temperature\n in the temperature vector.\n \"\"\"\n intervals = ({\n -20: 1, -19: 1, -18: 1, -17: 1, -16: 1, -15: 1, -14: 2,\n -13: 2, -12: 2, -11: 2, -10: 2, -9: 3, -8: 3, -7: 3, -6: 3, -5: 3,\n -4: 4, -3: 4, -2: 4, -1: 4, 0: 4, 1: 5, 2: 5, 3: 5, 4: 5, 5: 5,\n 6: 6, 7: 6, 8: 6, 9: 6, 10: 6, 11: 7, 12: 7, 13: 7, 14: 7, 15: 7,\n 16: 8, 17: 8, 18: 8, 19: 8, 20: 8, 21: 9, 22: 9, 23: 9, 24: 9,\n 25: 9, 26: 10, 27: 10, 28: 10, 29: 10, 30: 10, 31: 10, 32: 10,\n 33: 10, 34: 10, 35: 10, 36: 10, 37: 10, 38: 10, 39: 10, 40: 10})\n\n temperature_rounded = [ceil(i) for i in self.df['temperature_geo']]\n\n temperature_interval = [intervals[i] for i in temperature_rounded]\n\n return np.transpose(np.array(temperature_interval))"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_sf_values(self, filename='shlp_hour_factors.csv'):\n file = os.path.join(self.datapath, filename)\n hour_factors = pd.read_csv(file, index_col=0)\n hour_factors = hour_factors.query(\n 'building_class=={0} and shlp_type==\"{1}\"'.format(\n self.building_class, self.shlp_type))\n\n # Join the two DataFrames on the columns 'hour' and 'hour_of_the_day'\n # or ['hour' 'weekday'] and ['hour_of_the_day', 'weekday'] if it is\n # not a residential slp.\n residential = self.building_class > 0\n left_cols = ['hour_of_day'] + (['weekday'] if not residential else [])\n right_cols = ['hour'] + (['weekday'] if not residential else [])\n sf_mat = pd.DataFrame.merge(\n hour_factors, self.df, left_on=left_cols, right_on=right_cols,\n how='outer', left_index=True).sort_index()\n\n # drop unnecessary columns\n drop_cols = (\n ['hour_of_day', 'hour', 'building_class', 'shlp_type',\n 'date', 'temperature'] + (['weekday_x'] if residential else []) +\n (['weekday_y'] if residential else []) +\n (['weekday'] if not residential else []))\n sf_mat = sf_mat.drop(drop_cols, 1)\n\n # Determine the h values\n length = len(self.temperature)\n sf = (np.array(sf_mat)[np.array(list(range(0, length)))[:],\n (self.get_temperature_interval() - 1)[:]])\n return np.array(list(map(float, sf[:])))", "response": "Determine the h - values of the sigmoid factors stored in the specified file."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nretrieve the sigmoid parameters from csv - files and return them as a tuple.", "response": "def get_sigmoid_parameters(self, filename='shlp_sigmoid_factors.csv'):\n \"\"\" Retrieve the sigmoid parameters from csv-files\n\n Parameters\n ----------\n filename : string\n name of file where sigmoid factors are stored\n \"\"\"\n\n file = os.path.join(self.datapath, filename)\n sigmoid = pd.read_csv(file, index_col=0)\n sigmoid = sigmoid.query(\n 'building_class=={0} and '.format(self.building_class) +\n 'shlp_type==\"{0}\" and '.format(self.shlp_type) +\n 'wind_impact=={0}'.format(self.wind_class))\n\n a = float(sigmoid['parameter_a'])\n b = float(sigmoid['parameter_b'])\n c = float(sigmoid['parameter_c'])\n if self.ww_incl:\n d = float(sigmoid['parameter_d'])\n else:\n d = 0\n return a, b, c, d"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nretrieving the weekday parameter from csv - file", "response": "def get_weekday_parameters(self, filename='shlp_weekday_factors.csv'):\n \"\"\" Retrieve the weekday parameter from csv-file\n\n Parameters\n ----------\n filename : string\n name of file where sigmoid factors are stored\n \"\"\"\n file = os.path.join(self.datapath, filename)\n f_df = pd.read_csv(file, index_col=0)\n\n tmp_df = f_df.query('shlp_type==\"{0}\"'.format(self.shlp_type)).drop(\n 'shlp_type', axis=1)\n\n tmp_df['weekdays'] = np.array(list(range(7))) + 1\n\n return np.array(list(map(float, pd.DataFrame.merge(\n tmp_df, self.df, left_on='weekdays', right_on='weekday',\n how='inner', left_index=True).sort_index()['wochentagsfaktor'])))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncalculating the normalized hourly heat demand", "response": "def get_normalized_bdew_profile(self):\n \"\"\" Calculation of the normalized hourly heat demand\n \"\"\"\n self.df['temperature'] = self.temperature.values\n self.df['temperature_geo'] = self.weighted_temperature(\n how='geometric_series')\n\n sf = self.get_sf_values()\n\n [a, b, c, d] = self.get_sigmoid_parameters()\n\n f = self.get_weekday_parameters()\n h = (a / (1 + (b / (self.df['temperature_geo'] - 40)) ** c) + d)\n kw = 1.0 / (sum(h * f) / 24)\n heat_profile_normalized = (kw * h * f * sf)\n\n return heat_profile_normalized"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nchecks if path is a file", "response": "def isfile(path, **kwargs):\n \"\"\"Check if *path* is a file\"\"\"\n import os.path\n return os.path.isfile(path, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nchecking if path is a directory", "response": "def isdir(path, **kwargs):\n \"\"\"Check if *path* is a directory\"\"\"\n import os.path\n return os.path.isdir(path, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef rename(oldPath, newPath, **kwargs):\n import os\n return os.rename(oldPath, newPath, **kwargs)", "response": "rename the file oldPath to newPath"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nchange ownership of path", "response": "def chown(path, user=None, group=None):\n \"\"\"change ownership of path\"\"\"\n import os\n import pwd\n import grp\n uid = pwd.getpwnam(user).pw_uid if user else -1\n gid = grp.getgrnam(group).gr_gid if group else -1\n return os.chown(path, uid, gid)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef chmod(path, mode):\n import os, stat\n st = os.stat(path)\n return os.chmod(path, mode)", "response": "change pernmissions of path"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the absolute path of path", "response": "def abspath(path, **kwargs):\n \"\"\"Return the absolute path of *path*\"\"\"\n import os.path\n return os.path.abspath(path, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the normalized path of path", "response": "def normalize(path, **kwargs):\n \"\"\"Return the normalized path of *path*\"\"\"\n import os.path\n return os.path.normpath(path, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef rmdir(path, recursive=True, **kwargs):\n if recursive:\n import shutil\n return shutil.rmtree(path, **kwargs)\n else:\n import os\n return os.remdir(path, **kwargs)", "response": "Remove the directory path"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef exists(path, **kwargs):\n import os.path\n return os.path.exists(path, **kwargs)", "response": "Check if file or directory exists"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef list(path='.'):\n import os\n for f in os.listdir(path):\n if isfile(join(path, f)):\n yield join(path, f) if path != '.' else f", "response": "generator that returns all files of path"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nfinds files that match pattern in path", "response": "def find(pattern, path='.', exclude=None, recursive=True):\n \"\"\"Find files that match *pattern* in *path*\"\"\"\n import fnmatch\n import os\n\n if recursive:\n for root, dirnames, filenames in os.walk(path):\n for pat in _to_list(pattern):\n for filename in fnmatch.filter(filenames, pat):\n filepath = join(abspath(root), filename)\n for excl in _to_list(exclude):\n if excl and fnmatch.fnmatch(filepath, excl):\n break\n else:\n yield filepath\n else:\n for pat in _to_list(pattern):\n for filename in fnmatch.filter(list(path), pat):\n filepath = join(abspath(path), filename)\n for excl in _to_list(exclude):\n if excl and fnmatch.fnmatch(filepath, excl):\n break\n else:\n yield filepath"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef finddirs(pattern, path='.', exclude=None, recursive=True):\n import fnmatch\n import os\n if recursive:\n for root, dirnames, filenames in os.walk(path):\n for pat in _to_list(pattern):\n for dirname in fnmatch.filter(dirnames, pat):\n dirpath = join(abspath(root), dirname)\n for excl in _to_list(exclude):\n if excl and fnmatch.fnmatch(dirpath, excl):\n break\n else:\n yield dirpath\n else:\n for pat in _to_list(pattern):\n for dirname in fnmatch.filter(listdirs(path), pat):\n dirpath = join(abspath(path), dirname)\n for excl in _to_list(exclude):\n if excl and fnmatch.fnmatch(dirpath, excl):\n break\n else:\n yield dirpath", "response": "Find directories that match pattern in path"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef write(path, content, encoding=\"UTF-8\", append=False, raw=False):\n mode = 'wb' if not append else 'ab'\n with OPEN_FUNC(path, mode) as _file:\n if raw:\n import shutil\n shutil.copyfileobj(content, _file)\n else:\n _file.write(content.encode(encoding))", "response": "Write content to file path."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef read(path, encoding=\"UTF-8\"):\n with OPEN_FUNC(path, 'rb') as _file:\n cont = _file.read()\n return cont.decode(encoding)", "response": "Read and return content from file path"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get(path):\n try:\n import cPickle as pickle\n except:\n import pickle\n\n with open(path, 'rb') as file:\n return pickle.load(file)", "response": "Read an object from file"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef put(path, obj):\n try:\n import cPickle as pickle\n except:\n import pickle\n\n with open(path, 'wb') as file:\n return pickle.dump(obj, file)", "response": "Write an object to file"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef join(*args, **kwargs):\n import os.path\n if _is_list(args[0]):\n return os.path.join(*args[0])\n return os.path.join(*args, **kwargs)", "response": "Join parts of a path together"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the extension from path", "response": "def extname(path, **kwargs):\n \"\"\"Return the extension from *path*\"\"\"\n import os.path\n name, ext = os.path.splitext(path, **kwargs)\n return ext"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn the file base name from the path *ext*", "response": "def basename(path, ext=\"\"):\n \"\"\"Return the file base name from *path*\"\"\"\n import os.path\n if ext is False:\n return os.path.basename(path).replace(extname(path), \"\")\n else:\n return os.path.basename(path).replace(ext, \"\")"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nadd a suffix to a filename", "response": "def add_suffix(path, suffix=\"\"):\n \"\"\"Adds a suffix to a filename *path*\"\"\"\n return join(dirname(path), basename(path, ext=False) + suffix + extname(path))"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef addpath(path):\n import sys\n if not exists(path):\n raise ValueError('Path %s does not exist' % path)\n sys.path.insert(1, path)", "response": "Add path to system path"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef simple_profile(self, annual_demand, **kwargs):\n\n # Day(am to pm), night (pm to am), week day (week),\n # weekend day (weekend)\n am = kwargs.get('am', settime(7, 00, 0))\n pm = kwargs.get('pm', settime(23, 30, 0))\n\n week = kwargs.get('week', [1, 2, 3, 4, 5])\n weekend = kwargs.get('weekend', [0, 6, 7])\n\n default_factors = {'week': {'day': 0.8, 'night': 0.6},\n 'weekend': {'day': 0.9, 'night': 0.7}}\n\n profile_factors = kwargs.get('profile_factors', default_factors)\n\n self.dataframe['ind'] = 0\n\n self.dataframe['ind'].mask(\n self.dataframe['weekday'].between_time(am, pm).isin(week),\n profile_factors['week']['day'], True)\n self.dataframe['ind'].mask(\n self.dataframe['weekday'].between_time(pm, am).isin(week),\n profile_factors['week']['night'], True)\n self.dataframe['ind'].mask(\n self.dataframe['weekday'].between_time(am, pm).isin(weekend),\n profile_factors['weekend']['day'], True)\n self.dataframe['ind'].mask(\n self.dataframe['weekday'].between_time(pm, am).isin(weekend),\n profile_factors['weekend']['night'], True)\n\n if self.dataframe['ind'].isnull().any(axis=0):\n logging.error('NAN value found in industrial load profile')\n\n time_interval = self.dataframe.index.freq.nanos / 3.6e12\n\n return (self.dataframe['ind'] / self.dataframe['ind'].sum() *\n annual_demand / time_interval)", "response": "Create a simple load profile for a given annual demand."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns a Script object representing a contract script portion of a payment to the destination.", "response": "def script(self):\n \"\"\"Returns a Script object representing a contract script portion of a\n payment to the destination.\"\"\"\n # The script object is cached, since constructing it may involve\n # a number of string-manipulation operations.\n if getattr(self, '_script', None) is None:\n self._script = self._script__getter()\n return self._script"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncompresses a number n into a base - 10 - bit integer value.", "response": "def compress_amount(n):\n \"\"\"\\\n Compress 64-bit integer values, preferring a smaller size for whole\n numbers (base-10), so as to achieve run-length encoding gains on real-\n world data. The basic algorithm:\n * If the amount is 0, return 0\n * Divide the amount (in base units) evenly by the largest power of 10\n possible; call the exponent e (e is max 9)\n * If e<9, the last digit of the resulting number cannot be 0; store it\n as d, and drop it (divide by 10); regardless, call the result n\n * Output 1 + 10*(9*n + d - 1) + e\n * If e==9, we only know the resulting number is not zero, so output\n 1 + 10*(n - 1) + 9.\n (This is decodable, as d is in [1-9] and e is in [0-9].)\"\"\"\n if not n: return 0\n e = 0\n while (n % 10) == 0 and e < 9:\n n = n // 10\n e = e + 1\n if e < 9:\n n, d = divmod(n, 10);\n return 1 + (n*9 + d - 1)*10 + e\n else:\n return 1 + (n - 1)*10 + 9"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nundo the value compression performed by x = compress_amount ( n )", "response": "def decompress_amount(x):\n \"\"\"\\\n Undo the value compression performed by x=compress_amount(n). The input\n x matches one of the following patterns:\n x = n = 0\n x = 1+10*(9*n + d - 1) + e\n x = 1+10*(n - 1) + 9\"\"\"\n if not x: return 0;\n x = x - 1;\n\n # x = 10*(9*n + d - 1) + e\n x, e = divmod(x, 10);\n n = 0;\n if e < 9:\n # x = 9*n + d - 1\n x, d = divmod(x, 9)\n d = d + 1\n # x = n\n n = x*10 + d\n else:\n n = x + 1\n return n * 10**e"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef compact_from_target(target):\n bn = _BigInteger(target).serialize()\n size = len(bn)\n if size <= 3:\n word = target << 8 * (3 - size)\n else:\n word = target >> 8 * (size - 3)\n if word & 0x00800000:\n word >>= 8\n size += 1\n word |= size << 24\n if target < 0:\n word |= 0x00800000\n return word", "response": "This function returns a compact version of the given target."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef icmp(a, b):\n \"Like cmp(), but for any iterator.\"\n for xa in a:\n try:\n xb = next(b)\n d = cmp(xa, xb)\n if d: return d\n except StopIteration:\n return 1\n try:\n next(b)\n return -1\n except StopIteration:\n return 0", "response": "Like cmp(), but for any iterator."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a clone of this hash object.", "response": "def copy(self):\n \"Return a clone of this hash object.\"\n other = _ChainedHashAlgorithm(self._algorithms)\n other._hobj = deepcopy(self._hobj)\n other._fobj = deepcopy(self._fobj)\n return other"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _finalize(self):\n \"Computes _fobj, the completed hash.\"\n hobj = self._hobj\n for hashname in self._algorithms[1:]:\n fobj = hashlib.new(hashname)\n fobj.update(hobj.digest())\n hobj = fobj\n self._fobj = hobj", "response": "Computes _fobj the completed hash."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef update(self, *args):\n \"Appends any passed in byte arrays to the digest object.\"\n for string in args:\n self._hobj.update(string)\n self._fobj = None", "response": "Appends any passed in byte arrays to the digest object."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef digest_size(self):\n \"Returns the size (in bytes) of the resulting digest.\"\n if getattr(self, '_fobj', None) is not None:\n return self._fobj.digest_size\n else:\n return hashlib.new(self._algorithms[-1]).digest_size", "response": "Returns the size ( in bytes ) of the resulting digest."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef new(self, string=None, *args, **kwargs):\n if len(self):\n hobj = _ChainedHashAlgorithm(self, *args, **kwargs)\n else:\n hobj = _NopHashAlgorithm(*args, **kwargs)\n if string is not None:\n hobj.update(string)\n return hobj", "response": "Returns a new a\n instance."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef count(self):\n \"The number of items, pruned or otherwise, contained by this branch.\"\n if getattr(self, '_count', None) is None:\n self._count = getattr(self.node, 'count', 0)\n return self._count", "response": "The number of items pruned or otherwise contained by this branch."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef size(self):\n \"The canonical serialized size of this branch.\"\n if getattr(self, '_size', None) is None:\n self._size = getattr(self.node, 'size', 0)\n return self._size", "response": "The canonical serialized size of this branch."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns a forward iterator over the trie", "response": "def _forward_iterator(self):\n \"Returns a forward iterator over the trie\"\n path = [(self, 0, Bits())]\n while path:\n node, idx, prefix = path.pop()\n if idx==0 and node.value is not None and not node.prune_value:\n yield (self._unpickle_key(prefix), self._unpickle_value(node.value))\n if idx<len(node.children):\n path.append((node, idx+1, prefix))\n link = node.children[idx]\n if not link.pruned:\n path.append((link.node, 0, prefix + link.prefix))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning the 2 - tuple prefix node and node corresponding to the key.", "response": "def _get_node_by_key(self, key, path=None):\n \"\"\"Returns the 2-tuple (prefix, node) where node either contains the value\n corresponding to the key, or is the most specific prefix on the path which\n would contain the key if it were there. The key was found if prefix==key\n and the node.value is not None.\"\"\"\n prefix, subkey, node = Bits(), key, self\n while prefix != key:\n for idx,link in enumerate(node.children):\n if subkey.startswith(link.prefix):\n if link.pruned:\n return (prefix, node)\n subkey = subkey[len(link.prefix):]\n prefix += link.prefix\n if path is not None:\n path.append((node, idx, link.prefix))\n node = link.node\n break\n else:\n break\n return (prefix, node)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nupdating x from trie / dict E or iterable F", "response": "def update(self, other=None, **kwargs):\n \"\"\"x.update(E, **F) -> None. update x from trie/dict/iterable E or F.\n If E has a .keys() method, does: for k in E: x[k] = E[k]\n If E lacks .keys() method, does: for (k, v) in E: x[k] = v\n In either case, this is followed by: for k in F: x[k] = F[k]\"\"\"\n if other is None:\n other = ()\n\n if hasattr(other, 'keys'):\n for key in other:\n self._update(key, other[key])\n else:\n for key,value in other:\n self._update(key, value)\n\n for key,value in six.iteritems(kwargs):\n self._update(key, value)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef trim(self, prefixes):\n \"Prunes any keys beginning with the specified the specified prefixes.\"\n _prefixes, prefixes = set(map(lambda k:self._prepare_key(k), prefixes)), list()\n for t in lookahead(sorted(_prefixes)):\n if t[1] is not None:\n if t[0] == commonprefix(t):\n continue\n prefixes.append(t[0])\n length = 0\n for prefix in prefixes:\n length += self._trim(prefix)\n return length", "response": "Prunes any keys beginning with the specified the specified prefixes."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nwrites a compressed representation of script to the Pickler s file object.", "response": "def dump(self, script, file=None):\n \"Write a compressed representation of script to the Pickler's file object.\"\n if file is None:\n file = self._file\n self._dump(script, file, self._protocol, self._version)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning a compressed representation of script as a binary string.", "response": "def dumps(self, script):\n \"Return a compressed representation of script as a binary string.\"\n string = BytesIO()\n self._dump(script, string, self._protocol, self._version)\n return string.getvalue()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef load(self, file=None):\n \"Read and decompress a compact script from the Pickler's file object.\"\n if file is None:\n file = self._file\n script_class = self.get_script_class()\n script = self._load(file, self._protocol, self._version)\n return script_class(script)", "response": "Read and decompress a compact script from the Pickler s file object."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ndecompress the passed - in compact script and return the result.", "response": "def loads(self, string):\n \"Decompress the passed-in compact script and return the result.\"\n script_class = self.get_script_class()\n script = self._load(BytesIO(string), self._protocol, self._version)\n return script_class(script)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef merkle(hashes, func=_merkle_hash256):\n # We use append to duplicate the final item in the iterable of hashes, so\n # we need hashes to be a list-like object, regardless of what we were\n # passed.\n hashes = list(iter(hashes))\n # If the passed-in iterable is empty, allow the constructor to choose our\n # return value:\n if not hashes:\n return func()\n # We must make sure the constructor/compressor is called for the case of\n # a single item as well, in which case the loop below is not entered.\n if len(hashes) == 1:\n return func(*hashes)\n # Build up successive layers of the binary hash tree, starting from the\n # bottom. We've reached the root node when the list has been reduced to\n # one element.\n while len(hashes) > 1:\n # For reasons lost to time, Satoshi decided that any traversal though\n # a bitcoin hash tree will have the same number steps. This is because\n # the last element is repeated when there is an odd number of elements\n # in level, resulting in the right portion of the binary tree being\n # extended into a full tower.\n hashes.append(hashes[-1])\n # By creating an iterator and then duplicating it, we cause two items\n # to be pulled out of the hashes array each time through the generator.\n # The last element is ignored if there is an odd number of elements\n # (meaning there was originally an even number, because of the append\n # operation above).\n hashes = list(func(l,r) for l,r in zip(*(iter(hashes),)*2))\n # Return the root node of the Merkle tree to the caller.\n return hashes[0]", "response": "Convert an iterable of hashes or hashable objects into a binary tree using a passed - in constructor or compression\n function. The default compressor is\n the hash256 function resulting in root - hash for the entire tree."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef iteritems(self):\n \"x.iteritems() -> an iterator over the (key, value) items of x in sorted order\"\n left_is_hash = isinstance(self.left, numbers.Integral)\n right_is_hash = isinstance(self.right, numbers.Integral)\n if all([left_is_hash, right_is_hash]) or self.right is None:\n if all((self.size>=1, self.length, self.prune is not self.LEFT_NODE)):\n yield(0, self.left)\n if all((self.size>=2, self.length, self.prune is not self.RIGHT_NODE)):\n yield(1, self.right)\n if self.left is not None and not left_is_hash:\n for idx,hash_ in self.left.iteritems():\n yield(idx, hash_)\n if self.right is not None and not right_is_hash:\n offset = 2 ** ((self.size-1).bit_length() - 1)\n for idx,hash_ in self.right.iteritems():\n yield(offset+idx, hash_)", "response": "x. iteritems() -> an iterator over the ( key value ) items of x in sorted order"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _get_by_index(self, index, path=None):\n left_size = 2 ** ((self.size-1).bit_length() - 1)\n if index < 0 or self.size <= index:\n return (None, self, index)\n if index < left_size:\n if self.prune is self.LEFT_NODE:\n return (None, self, index)\n if isinstance(self.left, numbers.Integral):\n return (self.left, self, index)\n if path is not None:\n path.append((node, index, self.LEFT_NODE))\n return self.left._get_by_index(index, path)\n else:\n if self.prune is self.RIGHT_NODE:\n return (None, self, index)\n if isinstance(self.right, numbers.Integral):\n return (self.right, self, index)\n if path is not None:\n path.append((node, index, self.RIGHT_NODE))\n return self.right._get_by_index(index-left_size, path)", "response": "Returns the 2 - tuple node idx where node is either a terminal leaf\n node containing the value of that index in either left or right slot and node is the most specific node on the path which would contain the index."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nencode bytes to a base58 - encoded string.", "response": "def b58encode(b, errors='strict'):\n \"Encode bytes to a base58-encoded string.\"\n len_ = len(b)\n\n # Convert big-endian bytes to integer\n n = BigInteger.deserialize(BytesIO(b), len_)\n\n # Divide that integer into base58\n res = []\n while n > 0:\n n, r = divmod (n, 58)\n res.append(b58digits[r])\n res = ''.join(res[::-1])\n\n # Encode leading zeros as base58 zeros\n pad = 0\n for c in b:\n if c == six.int2byte(0): pad += 1\n else: break\n return (b58digits[0] * pad + res, len_)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ndecode a base58 - encoding string returning bytes.", "response": "def b58decode(s, errors='strict'):\n \"Decode a base58-encoding string, returning bytes.\"\n if not s:\n return (b'', 0)\n\n # Convert the string to an integer\n n = 0\n for c in s:\n n *= 58\n if c not in b58digits:\n raise InvalidBase58Error(u\"character %r is not a valid base58 \"\n u\"character\" % c)\n digit = b58digits.index(c)\n n += digit\n\n # Convert the integer to bytes\n res = BigInteger(n).serialize((n.bit_length()+7)//8 or 1)\n\n # Add padding back.\n pad = 0\n for c in s[:-1]:\n if c == b58digits[0]: pad += 1\n else: break\n return (b'\\x00' * pad + res, len(s))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef info(torrent_path):\n\n my_torrent = Torrent.from_file(torrent_path)\n\n size = my_torrent.total_size\n\n click.secho('Name: %s' % my_torrent.name, fg='blue')\n click.secho('Files:')\n for file_tuple in my_torrent.files:\n click.secho(file_tuple.name)\n\n click.secho('Hash: %s' % my_torrent.info_hash, fg='blue')\n click.secho('Size: %s (%s)' % (humanize_filesize(size), size), fg='blue')\n click.secho('Magnet: %s' % my_torrent.get_magnet(), fg='yellow')", "response": "Print out information from. torrent file."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef create(source, dest, tracker, open_trackers, comment, cache):\n\n source_title = path.basename(source).replace('.', '_').replace(' ', '_')\n dest = '%s.torrent' % path.join(dest, source_title)\n\n click.secho('Creating torrent from %s ...' % source)\n\n my_torrent = Torrent.create_from(source)\n\n if comment:\n my_torrent.comment = comment\n\n urls = []\n\n if tracker:\n urls = tracker.split(',')\n\n if open_trackers:\n click.secho('Fetching an up-to-date open tracker list ...')\n try:\n urls.extend(get_open_trackers_from_remote())\n except RemoteDownloadError:\n click.secho('Failed. Using built-in open tracker list.', fg='red', err=True)\n urls.extend(get_open_trackers_from_local())\n\n if urls:\n my_torrent.announce_urls = urls\n\n my_torrent.to_file(dest)\n\n click.secho('Torrent file created: %s' % dest, fg='green')\n click.secho('Torrent info hash: %s' % my_torrent.info_hash, fg='blue')\n\n if cache:\n click.secho('Uploading to %s torrent cache service ...')\n try:\n result = upload_to_cache_server(dest)\n click.secho('Cached torrent URL: %s' % result, fg='yellow')\n\n except RemoteUploadError as e:\n click.secho('Failed: %s' % e, fg='red', err=True)", "response": "Create a new torrent file from a single file or a directory."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef files(self):\n files = []\n info = self._struct.get('info')\n\n if not info:\n return files\n\n if 'files' in info:\n base = info['name']\n\n for f in info['files']:\n files.append(TorrentFile(join(base, *f['path']), f['length']))\n\n else:\n files.append(TorrentFile(info['name'], info['length']))\n\n return files", "response": "Files in torrent.\n\n List of namedtuples (filepath, size).\n\n :rtype: list[TorrentFile]"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef info_hash(self):\n info = self._struct.get('info')\n\n if not info:\n return None\n\n return sha1(Bencode.encode(info)).hexdigest()", "response": "Hash of torrent file info section. Also known as torrent hash."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nlist of lists of announce URLs.", "response": "def announce_urls(self):\n \"\"\"List of lists of announce (tracker) URLs.\n\n First inner list is considered as primary announcers list,\n the following lists as back-ups.\n\n http://bittorrent.org/beps/bep_0012.html\n\n \"\"\"\n urls = self._struct.get('announce-list')\n\n if not urls:\n urls = self._struct.get('announce')\n if not urls:\n return []\n urls = [[urls]]\n\n return urls"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn torrent magnet link consisting of BTIH Info Hash and optional other information.", "response": "def get_magnet(self, detailed=True):\n \"\"\"Returns torrent magnet link, consisting of BTIH (BitTorrent Info Hash) URN\n anr optional other information.\n\n :param bool|list|tuple|set detailed:\n For boolean - whether additional info (such as trackers) should be included.\n For iterable - expected allowed parameter names:\n tr - trackers\n ws - webseeds\n\n \"\"\"\n result = 'magnet:?xt=urn:btih:' + self.info_hash\n\n def add_tr():\n urls = self.announce_urls\n if not urls:\n return\n\n trackers = []\n\n urls = urls[0] # Only primary announcers are enough.\n for url in urls:\n trackers.append(('tr', url))\n\n if trackers:\n return urlencode(trackers)\n\n def add_ws():\n webseeds = [('ws', url) for url in self.webseeds]\n if webseeds:\n return urlencode(webseeds)\n\n params_map = {\n 'tr': add_tr,\n 'ws': add_ws,\n }\n\n if detailed:\n details = []\n\n if isinstance(detailed, _ITERABLE_TYPES):\n requested_params = detailed\n else:\n requested_params = params_map.keys()\n\n for param in requested_params:\n param_val = params_map[param]()\n param_val and details.append(param_val)\n\n if details:\n result += '&%s' % '&'.join(details)\n\n return result"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nwrites the Torrent object into file.", "response": "def to_file(self, filepath=None):\n \"\"\"Writes Torrent object into file, either\n\n :param filepath:\n \"\"\"\n if filepath is None and self._filepath is None:\n raise TorrentError('Unable to save torrent to file: no filepath supplied.')\n\n if filepath is not None:\n self._filepath = filepath\n\n with open(self._filepath, mode='wb') as f:\n f.write(self.to_string())"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef create_from(cls, src_path):\n is_dir = isdir(src_path)\n target_files, size_data = cls._get_target_files_info(src_path)\n\n SIZE_MIN = 32768 # 32 KiB\n SIZE_DEFAULT = 262144 # 256 KiB\n SIZE_MAX = 1048576 # 1 MiB\n\n CHUNKS_MIN = 1000 # todo use those limits as advised\n CHUNKS_MAX = 2200\n\n size_piece = SIZE_MIN\n if size_data > SIZE_MIN:\n size_piece = SIZE_DEFAULT\n\n if size_piece > SIZE_MAX:\n size_piece = SIZE_MAX\n\n def read(filepath):\n with open(filepath, 'rb') as f:\n while True:\n chunk = f.read(size_piece - len(pieces_buffer))\n chunk_size = len(chunk)\n if chunk_size == 0:\n break\n yield chunk\n\n pieces = bytearray()\n pieces_buffer = bytearray()\n\n for fpath, _, _ in target_files:\n for chunk in read(fpath):\n pieces_buffer += chunk\n\n if len(pieces_buffer) == size_piece:\n pieces += sha1(pieces_buffer).digest()[:20]\n pieces_buffer = bytearray()\n\n if len(pieces_buffer):\n pieces += sha1(pieces_buffer).digest()[:20]\n pieces_buffer = bytearray()\n\n info = {\n 'name': basename(src_path),\n 'pieces': bytes(pieces),\n 'piece length': size_piece,\n }\n\n if is_dir:\n files = []\n\n for _, length, path in target_files:\n files.append({'length': length, 'path': path})\n\n info['files'] = files\n\n else:\n info['length'] = target_files[0][1]\n\n torrent = cls({'info': info})\n torrent.created_by = get_app_version()\n torrent.creation_date = datetime.utcnow()\n\n return torrent", "response": "Returns a Torrent object created from a file or a directory."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef from_file(cls, filepath):\n torrent = cls(Bencode.read_file(filepath))\n torrent._filepath = filepath\n return torrent", "response": "Alternative constructor to get Torrent object from file."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef humanize_filesize(bytes_size):\n if not bytes_size:\n return '0 B'\n \n names = ('B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB')\n\n name_idx = int(math.floor(math.log(bytes_size, 1024)))\n size = round(bytes_size / math.pow(1024, name_idx), 2)\n\n return '%s %s' % (size, names[name_idx])", "response": "Returns human readable filesize."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef upload_to_cache_server(fpath):\n url_base = 'http://torrage.info'\n url_upload = '%s/autoupload.php' % url_base\n url_download = '%s/torrent.php?h=' % url_base\n file_field = 'torrent'\n\n try:\n import requests\n\n response = requests.post(url_upload, files={file_field: open(fpath, 'rb')}, timeout=REMOTE_TIMEOUT)\n response.raise_for_status()\n\n info_cache = response.text\n return url_download + info_cache\n\n except (ImportError, requests.RequestException) as e:\n\n # Now trace is lost. `raise from` to consider.\n raise RemoteUploadError('Unable to upload to %s: %s' % (url_upload, e))", "response": "Uploads a. torrent file to a cache server. Returns the url of the uploaded file."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_open_trackers_from_remote():\n\n url_base = 'https://raw.githubusercontent.com/idlesign/torrentool/master/torrentool/repo'\n url = '%s/%s' % (url_base, OPEN_TRACKERS_FILENAME)\n\n try:\n import requests\n\n response = requests.get(url, timeout=REMOTE_TIMEOUT)\n response.raise_for_status()\n\n open_trackers = response.text.splitlines()\n\n except (ImportError, requests.RequestException) as e:\n\n # Now trace is lost. `raise from` to consider.\n raise RemoteDownloadError('Unable to download from %s: %s' % (url, e))\n\n return open_trackers", "response": "Returns open trackers announce URLs list from remote repo."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_open_trackers_from_local():\n with open(path.join(path.dirname(__file__), 'repo', OPEN_TRACKERS_FILENAME)) as f:\n open_trackers = map(str.strip, f.readlines())\n\n return list(open_trackers)", "response": "Returns open trackers announce URLs list from local backup."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nencoding a Python object into a bencoded bytes.", "response": "def encode(cls, value):\n \"\"\"Encodes a value into bencoded bytes.\n\n :param value: Python object to be encoded (str, int, list, dict).\n :param str val_encoding: Encoding used by strings in a given object.\n :rtype: bytes\n \"\"\"\n val_encoding = 'utf-8'\n\n def encode_str(v):\n try:\n v_enc = encode(v, val_encoding)\n\n except UnicodeDecodeError:\n if PY3:\n raise\n else:\n # Suppose bytestring\n v_enc = v\n\n prefix = encode('%s:' % len(v_enc), val_encoding)\n return prefix + v_enc\n\n def encode_(val):\n if isinstance(val, str_type):\n result = encode_str(val)\n\n elif isinstance(val, int_types):\n result = encode(('i%se' % val), val_encoding)\n\n elif isinstance(val, (list, set, tuple)):\n result = encode('l', val_encoding)\n for item in val:\n result += encode_(item)\n result += encode('e', val_encoding)\n\n elif isinstance(val, dict):\n result = encode('d', val_encoding)\n\n # Dictionaries are expected to be sorted by key.\n for k, v in OrderedDict(sorted(val.items(), key=itemgetter(0))).items():\n result += (encode_str(k) + encode_(v))\n\n result += encode('e', val_encoding)\n\n elif isinstance(val, byte_types):\n result = encode('%s:' % len(val), val_encoding)\n result += val\n\n else:\n raise BencodeEncodingError('Unable to encode `%s` %s' % (type(val), val))\n\n return result\n\n return encode_(value)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef decode(cls, encoded):\n def create_dict(items):\n # Let's guarantee that dictionaries are sorted.\n k_v_pair = zip(*[iter(items)] * 2)\n return OrderedDict(sorted(k_v_pair, key=itemgetter(0)))\n\n def create_list(items):\n return list(items)\n\n stack_items = []\n stack_containers = []\n\n def compress_stack():\n target_container = stack_containers.pop()\n subitems = []\n\n while True:\n subitem = stack_items.pop()\n subitems.append(subitem)\n if subitem is target_container:\n break\n\n container_creator = subitems.pop()\n container = container_creator(reversed(subitems))\n stack_items.append(container)\n\n def parse_forward(till_char, sequence):\n number = ''\n char_sub_idx = 0\n\n for char_sub_idx, char_sub in enumerate(sequence):\n char_sub = chr_(char_sub)\n if char_sub == till_char:\n break\n\n number += char_sub\n\n number = int(number or 0)\n char_sub_idx += 1\n\n return number, char_sub_idx\n\n while encoded:\n char = encoded[0]\n char = chr_(char)\n\n if char == 'd': # Dictionary\n stack_items.append(create_dict)\n stack_containers.append(create_dict)\n encoded = encoded[1:]\n\n elif char == 'l': # List\n stack_items.append(create_list)\n stack_containers.append(create_list)\n encoded = encoded[1:]\n\n elif char == 'i': # Integer\n number, char_sub_idx = parse_forward('e', encoded[1:])\n char_sub_idx += 1\n\n stack_items.append(number)\n encoded = encoded[char_sub_idx:]\n\n elif char.isdigit(): # String\n str_len, char_sub_idx = parse_forward(':', encoded)\n last_char_idx = char_sub_idx + str_len\n\n string = encoded[char_sub_idx:last_char_idx]\n try:\n string = string.decode('utf-8')\n except UnicodeDecodeError:\n # Considered bytestring (e.g. `pieces` hashes concatenation).\n pass\n\n stack_items.append(string)\n encoded = encoded[last_char_idx:]\n\n elif char == 'e': # End of a dictionary or a list.\n compress_stack()\n encoded = encoded[1:]\n\n else:\n raise BencodeDecodingError('Unable to interpret `%s` char.' % char)\n\n if len(stack_items) == 1:\n stack_items = stack_items.pop()\n\n return stack_items", "response": "Decodes bencoded data introduced as bytes."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ndecoding a given bencoded string or bytestring.", "response": "def read_string(cls, string):\n \"\"\"Decodes a given bencoded string or bytestring.\n\n Returns decoded structure(s).\n\n :param str string:\n :rtype: list\n \"\"\"\n if PY3 and not isinstance(string, byte_types):\n string = string.encode()\n\n return cls.decode(string)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ndecode bencoded data of a given file.", "response": "def read_file(cls, filepath):\n \"\"\"Decodes bencoded data of a given file.\n\n Returns decoded structure(s).\n\n :param str filepath:\n :rtype: list\n \"\"\"\n with open(filepath, mode='rb') as f:\n contents = f.read()\n return cls.decode(contents)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate csp header string", "response": "def create_csp_header(cspDict):\n\t\"\"\" create csp header string \"\"\"\n\tpolicy = ['%s %s' % (k, v) for k, v in cspDict.items() if v != '']\n\treturn '; '.join(policy)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef csp_header(csp={}):\n _csp = csp_default().read()\n _csp.update(csp)\n \n _header = ''\n if 'report-only' in _csp and _csp['report-only'] is True:\n \t_header = 'Content-Security-Policy-Report-Only'\n else:\n \t_header = 'Content-Security-Policy'\n if 'report-only' in _csp:\n \tdel _csp['report-only']\n _headers = {_header: create_csp_header(_csp)}\n \n def decorator(f):\n @wraps(f)\n def decorated_function(*args, **kwargs):\n resp = make_response(f(*args, **kwargs))\n h = resp.headers\n for header, value in _headers.items():\n h[header] = value\n return resp\n return decorated_function\n return decorator", "response": "Decorator to include csp header on app. route wrapper"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nread default csp settings from json file", "response": "def read(self):\n\t\t\"\"\" read default csp settings from json file \"\"\"\n\t\twith open(self.default_file) as json_file:\n\t\t\ttry:\n\t\t\t\treturn json.load(json_file)\n\t\t\texcept Exception as e:\n\t\t\t\traise 'empty file'"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef update(self,updates={}):\n\t\ttry:\n\t\t\tcsp = self.read()\n\t\texcept:\n\t\t\tcsp = {'default-src':\"'self'\"}\n\t\t\tself.write(csp)\n\t\tcsp.update(updates)\n\t\tself.write(csp)", "response": "update csp_default. json with update dict"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nobtains existing ImageSet or create a new one if none is specified.", "response": "def get_image_set(self):\n \"\"\"\n Obtain existing ImageSet if `pk` is specified, otherwise\n create a new ImageSet for the user.\n \"\"\"\n image_set_pk = self.kwargs.get(\"pk\", None)\n if image_set_pk is None:\n return self.request.user.image_sets.create()\n return get_object_or_404(self.get_queryset(), pk=image_set_pk)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nwraps the Tornado web application initialization so that the TornadoTracing instance is created around an OpenTracing-compatible tracer.", "response": "def tracer_config(__init__, app, args, kwargs):\n \"\"\"\n Wraps the Tornado web application initialization so that the\n TornadoTracing instance is created around an OpenTracing-compatible tracer.\n \"\"\"\n __init__(*args, **kwargs)\n\n tracing = app.settings.get('opentracing_tracing')\n tracer_callable = app.settings.get('opentracing_tracer_callable')\n tracer_parameters = app.settings.get('opentracing_tracer_parameters', {})\n\n if tracer_callable is not None:\n if not callable(tracer_callable):\n tracer_callable = _get_callable_from_name(tracer_callable)\n\n tracer = tracer_callable(**tracer_parameters)\n tracing = TornadoTracing(tracer)\n\n if tracing is None:\n tracing = TornadoTracing() # fallback to the global tracer\n\n app.settings['opentracing_tracing'] = tracing\n\n tracing._trace_all = app.settings.get('opentracing_trace_all',\n DEFAULT_TRACE_ALL)\n tracing._trace_client = app.settings.get('opentracing_trace_client',\n DEFAULT_TRACE_CLIENT)\n tracing._start_span_cb = app.settings.get('opentracing_start_span_cb',\n None)\n\n httpclient._set_tracing_enabled(tracing._trace_client)\n if tracing._trace_client:\n httpclient._set_tracing_info(tracing._tracer_obj,\n tracing._start_span_cb)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nfunction decorator that traces functions NOTE: Must be placed before the Tornado decorators @param attributes any number of request attributes (strings) to be set as tags on the created span", "response": "def trace(self, *attributes):\n \"\"\"\n Function decorator that traces functions\n NOTE: Must be placed before the Tornado decorators\n @param attributes any number of request attributes\n (strings) to be set as tags on the created span\n \"\"\"\n\n @wrapt.decorator\n def wrapper(wrapped, instance, args, kwargs):\n if self._trace_all:\n return wrapped(*args, **kwargs)\n\n handler = instance\n\n with tracer_stack_context():\n try:\n self._apply_tracing(handler, list(attributes))\n\n # Run the actual function.\n result = wrapped(*args, **kwargs)\n\n # if it has `add_done_callback` it's a Future,\n # else, a normal method/function.\n if callable(getattr(result, 'add_done_callback', None)):\n callback = functools.partial(\n self._finish_tracing_callback,\n handler=handler)\n result.add_done_callback(callback)\n else:\n self._finish_tracing(handler)\n\n except Exception as exc:\n self._finish_tracing(handler, error=exc)\n raise\n\n return result\n\n return wrapper"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn a new span from the request with logged attributes and correct operation name from the func.", "response": "def _apply_tracing(self, handler, attributes):\n \"\"\"\n Helper function to avoid rewriting for middleware and decorator.\n Returns a new span from the request with logged attributes and\n correct operation name from the func.\n \"\"\"\n operation_name = self._get_operation_name(handler)\n headers = handler.request.headers\n request = handler.request\n\n # start new span from trace info\n try:\n span_ctx = self._tracer.extract(opentracing.Format.HTTP_HEADERS,\n headers)\n scope = self._tracer.start_active_span(operation_name,\n child_of=span_ctx)\n except (opentracing.InvalidCarrierException,\n opentracing.SpanContextCorruptedException):\n scope = self._tracer.start_active_span(operation_name)\n\n # add span to current spans\n setattr(request, SCOPE_ATTR, scope)\n\n # log any traced attributes\n scope.span.set_tag(tags.COMPONENT, 'tornado')\n scope.span.set_tag(tags.SPAN_KIND, tags.SPAN_KIND_RPC_SERVER)\n scope.span.set_tag(tags.HTTP_METHOD, request.method)\n scope.span.set_tag(tags.HTTP_URL, request.uri)\n\n for attr in attributes:\n if hasattr(request, attr):\n payload = str(getattr(request, attr))\n if payload:\n scope.span.set_tag(attr, payload)\n\n # invoke the start span callback, if any\n self._call_start_span_cb(scope.span, request)\n\n return scope"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nwraps the handler s _execute method to trace incoming requests and apply tracing to the context of the incoming requests.", "response": "def execute(func, handler, args, kwargs):\n \"\"\"\n Wrap the handler ``_execute`` method to trace incoming requests,\n extracting the context from the headers, if available.\n \"\"\"\n tracing = handler.settings.get('opentracing_tracing')\n\n with tracer_stack_context():\n if tracing._trace_all:\n attrs = handler.settings.get('opentracing_traced_attributes', [])\n tracing._apply_tracing(handler, attrs)\n\n return func(*args, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef on_finish(func, handler, args, kwargs):\n tracing = handler.settings.get('opentracing_tracing')\n tracing._finish_tracing(handler)\n\n return func(*args, **kwargs)", "response": "Wrap the handler s on_finish method to finish the Span for the\n given request."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef log_exception(func, handler, args, kwargs):\n # safe-guard: expected arguments -> log_exception(self, typ, value, tb)\n value = args[1] if len(args) == 3 else None\n if value is None:\n return func(*args, **kwargs)\n\n tracing = handler.settings.get('opentracing_tracing')\n if not isinstance(value, HTTPError) or 500 <= value.status_code <= 599:\n tracing._finish_tracing(handler, error=value)\n\n return func(*args, **kwargs)", "response": "Wrap the handler log_exception method to finish the Span for the given request."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef schema_root(self) -> \"SchemaTreeNode\":\n sn = self\n while sn.parent:\n sn = sn.parent\n return sn", "response": "Return the root node of the receiver s schema."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef content_type(self) -> ContentType:\n return self._ctype if self._ctype else self.parent.content_type()", "response": "Return receiver s content type."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns the closest ancestor data node.", "response": "def data_parent(self) -> Optional[\"InternalNode\"]:\n \"\"\"Return the closest ancestor data node.\"\"\"\n parent = self.parent\n while parent:\n if isinstance(parent, DataNode):\n return parent\n parent = parent.parent"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef iname(self) -> InstanceName:\n dp = self.data_parent()\n return (self.name if dp and self.ns == dp.ns\n else self.ns + \":\" + self.name)", "response": "Return the instance name corresponding to the receiver."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef data_path(self) -> DataPath:\n dp = self.data_parent()\n return (dp.data_path() if dp else \"\") + \"/\" + self.iname()", "response": "Return the receiver s data path."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning dictionary of receiver s properties suitable for clients.", "response": "def _node_digest(self) -> Dict[str, Any]:\n \"\"\"Return dictionary of receiver's properties suitable for clients.\"\"\"\n res = {\"kind\": self._yang_class()}\n if self.mandatory:\n res[\"mandatory\"] = True\n if self.description:\n res[\"description\"] = self.description\n return res"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _iname2qname(self, iname: InstanceName) -> QualName:\n p, s, loc = iname.partition(\":\")\n return (loc, p) if s else (p, self.ns)", "response": "Translate instance name to qualified name in the receiver s context."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ndispatch actions for substatements of stmt.", "response": "def _handle_substatements(self, stmt: Statement, sctx: SchemaContext) -> None:\n \"\"\"Dispatch actions for substatements of `stmt`.\"\"\"\n for s in stmt.substatements:\n if s.prefix:\n key = (\n sctx.schema_data.modules[sctx.text_mid].prefix_map[s.prefix][0]\n + \":\" + s.keyword)\n else:\n key = s.keyword\n mname = SchemaNode._stmt_callback.get(key, \"_noop\")\n method = getattr(self, mname)\n method(s, sctx)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns the data node referred to by a leafref path.", "response": "def _follow_leafref(\n self, xpath: \"Expr\", init: \"TerminalNode\") -> Optional[\"DataNode\"]:\n \"\"\"Return the data node referred to by a leafref path.\n\n Args:\n xpath: XPath expression compiled from a leafref path.\n init: initial context node\n \"\"\"\n if isinstance(xpath, LocationPath):\n lft = self._follow_leafref(xpath.left, init)\n if lft is None:\n return None\n return lft._follow_leafref(xpath.right, init)\n elif isinstance(xpath, Step):\n if xpath.axis == Axis.parent:\n return self.data_parent()\n elif xpath.axis == Axis.child:\n if isinstance(self, InternalNode) and xpath.qname:\n qname = (xpath.qname if xpath.qname[1]\n else (xpath.qname[0], init.ns))\n return self.get_data_child(*qname)\n elif isinstance(xpath, Root):\n return self.schema_root()\n return None"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _tree_line(self, no_type: bool = False) -> str:\n return self._tree_line_prefix() + \" \" + self.iname()", "response": "Return the receiver s contribution to tree diagram."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsetting NACM default access.", "response": "def _nacm_default_deny_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:\n \"\"\"Set NACM default access.\"\"\"\n if not hasattr(self, 'default_deny'):\n return\n if stmt.keyword == \"default-deny-all\":\n self.default_deny = DefaultDeny.all\n elif stmt.keyword == \"default-deny-write\":\n self.default_deny = DefaultDeny.write"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_child(self, name: YangIdentifier,\n ns: YangIdentifier = None) -> Optional[SchemaNode]:\n \"\"\"Return receiver's schema child.\n\n Args:\n name: Child's name.\n ns: Child's namespace (= `self.ns` if absent).\n \"\"\"\n ns = ns if ns else self.ns\n todo = []\n for child in self.children:\n if child.name is None:\n todo.append(child)\n elif child.name == name and child.ns == ns:\n return child\n for c in todo:\n grandchild = c.get_child(name, ns)\n if grandchild is not None:\n return grandchild", "response": "Return receiver s schema child."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_schema_descendant(\n self, route: SchemaRoute) -> Optional[SchemaNode]:\n \"\"\"Return descendant schema node or ``None`` if not found.\n\n Args:\n route: Schema route to the descendant node\n (relative to the receiver).\n \"\"\"\n node = self\n for p in route:\n node = node.get_child(*p)\n if node is None:\n return None\n return node", "response": "Returns the descendant schema node or None if not found."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_data_child(self, name: YangIdentifier,\n ns: YangIdentifier = None) -> Optional[\"DataNode\"]:\n \"\"\"Return data node directly under the receiver.\"\"\"\n ns = ns if ns else self.ns\n todo = []\n for child in self.children:\n if child.name == name and child.ns == ns:\n if isinstance(child, DataNode):\n return child\n todo.insert(0, child)\n elif not isinstance(child, DataNode):\n todo.append(child)\n for c in todo:\n res = c.get_data_child(name, ns)\n if res:\n return res", "response": "Return data node directly under the receiver."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn receiver s children based on content type.", "response": "def filter_children(self, ctype: ContentType = None) -> List[SchemaNode]:\n \"\"\"Return receiver's children based on content type.\n\n Args:\n ctype: Content type.\n \"\"\"\n if ctype is None:\n ctype = self.content_type()\n return [c for c in self.children if\n not isinstance(c, (RpcActionNode, NotificationNode)) and\n c.content_type().value & ctype.value != 0]"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef data_children(self) -> List[\"DataNode\"]:\n res = []\n for child in self.children:\n if isinstance(child, DataNode):\n res.append(child)\n elif not isinstance(child, SchemaTreeNode):\n res.extend(child.data_children())\n return res", "response": "Return the set of all data nodes directly under the receiver."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\noverriding the superclass method.", "response": "def from_raw(self, rval: RawObject, jptr: JSONPointer = \"\") -> ObjectValue:\n \"\"\"Override the superclass method.\"\"\"\n if not isinstance(rval, dict):\n raise RawTypeError(jptr, \"object\")\n res = ObjectValue()\n for qn in rval:\n if qn.startswith(\"@\"):\n if qn != \"@\":\n tgt = qn[1:]\n if tgt not in rval:\n raise MissingAnnotationTarget(jptr, tgt)\n jptr += '/' + tgt\n res[qn] = self._process_metadata(rval[qn], jptr)\n else:\n cn = self._iname2qname(qn)\n ch = self.get_data_child(*cn)\n npath = jptr + \"/\" + qn\n if ch is None:\n raise RawMemberError(npath)\n res[ch.iname()] = ch.from_raw(rval[qn], npath)\n return res"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _validate(self, inst: \"InstanceNode\", scope: ValidationScope,\n ctype: ContentType) -> None:\n \"\"\"Extend the superclass method.\"\"\"\n if scope.value & ValidationScope.syntax.value: # schema\n self._check_schema_pattern(inst, ctype)\n for m in inst:\n inst._member(m).validate(scope, ctype)\n super()._validate(inst, scope, ctype)", "response": "Extend the superclass method."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the set of instance names under the receiver.", "response": "def _child_inst_names(self) -> Set[InstanceName]:\n \"\"\"Return the set of instance names under the receiver.\"\"\"\n return frozenset([c.iname() for c in self.data_children()])"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nbuilds the schema pattern for the receiver and its data descendants.", "response": "def _make_schema_patterns(self) -> None:\n \"\"\"Build schema pattern for the receiver and its data descendants.\"\"\"\n self.schema_pattern = self._schema_pattern()\n for dc in self.data_children():\n if isinstance(dc, InternalNode):\n dc._make_schema_patterns()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nadding child node to the receiver and handle substatements.", "response": "def _handle_child(\n self, node: SchemaNode, stmt: Statement, sctx: SchemaContext) -> None:\n \"\"\"Add child node to the receiver and handle substatements.\"\"\"\n if not sctx.schema_data.if_features(stmt, sctx.text_mid):\n return\n node.name = stmt.argument\n node.ns = sctx.default_ns\n node._get_description(stmt)\n self._add_child(node)\n node._handle_substatements(stmt, sctx)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _augment_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:\n if not sctx.schema_data.if_features(stmt, sctx.text_mid):\n return\n path = sctx.schema_data.sni2route(stmt.argument, sctx)\n target = self.get_schema_descendant(path)\n if stmt.find1(\"when\"):\n gr = GroupNode()\n target._add_child(gr)\n target = gr\n target._handle_substatements(stmt, sctx)", "response": "Handle an augment statement."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nhandles the refine statement.", "response": "def _refine_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:\n \"\"\"Handle **refine** statement.\"\"\"\n target = self.get_schema_descendant(\n sctx.schema_data.sni2route(stmt.argument, sctx))\n if not sctx.schema_data.if_features(stmt, sctx.text_mid):\n target.parent.children.remove(target)\n else:\n target._handle_substatements(stmt, sctx)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nhandling leaf - list statement.", "response": "def _leaf_list_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:\n \"\"\"Handle leaf-list statement.\"\"\"\n node = LeafListNode()\n node.type = DataType._resolve_type(\n stmt.find1(\"type\", required=True), sctx)\n self._handle_child(node, stmt, sctx)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _rpc_action_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:\n self._handle_child(RpcActionNode(), stmt, sctx)", "response": "Handle RPC or action statement."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _ascii_tree(self, indent: str, no_types: bool, val_count: bool) -> str:\n def suffix(sn):\n return f\" {{{sn.val_count}}}\\n\" if val_count else \"\\n\"\n if not self.children:\n return \"\"\n cs = []\n for c in self.children:\n cs.extend(c._flatten())\n cs.sort(key=lambda x: x.qual_name)\n res = \"\"\n for c in cs[:-1]:\n res += (indent + c._tree_line(no_types) + suffix(c) +\n c._ascii_tree(indent + \"| \", no_types, val_count))\n return (res + indent + cs[-1]._tree_line(no_types) + suffix(cs[-1]) +\n cs[-1]._ascii_tree(indent + \" \", no_types, val_count))", "response": "Return the receiver s subtree as ASCII art."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef orphan_instance(self, rval: RawValue) -> \"ObjectMember\":\n val = self.from_raw(rval)\n return ObjectMember(self.iname(), {}, val, None, self, datetime.now())", "response": "Return an isolated instance of the receiver."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef split_instance_route(self, route: \"InstanceRoute\") -> Optional[Tuple[\n \"InstanceRoute\", \"InstanceRoute\"]]:\n \"\"\"Split `route` into the part up to receiver and the rest.\n\n Args:\n route: Absolute instance route (the receiver should correspond to an\n instance node on this route).\n\n Returns:\n A tuple consisting of\n - the part of `route` from the root up to and including the\n instance whose schema node is the receiver, and\n - the rest of `route`.\n ``None`` is returned if the receiver is not on the route.\n \"\"\"\n sroute = []\n sn = self\n while sn:\n sroute.append(sn.iname())\n sn = sn.data_parent()\n i = 0\n while True:\n if not sroute:\n break\n inst = sroute.pop()\n if inst != route[i].iname():\n return None\n while True: # skip up to next MemberName\n i += 1\n if i >= len(route) or isinstance(route[i], MemberName):\n break\n if not sroute:\n return (InstanceRoute(route[:i]), InstanceRoute(route[i:]))\n if i >= len(route):\n return None", "response": "Splits the route into the part up to receiver and the rest of the route."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _validate(self, inst: \"InstanceNode\", scope: ValidationScope,\n ctype: ContentType) -> None:\n \"\"\"Extend the superclass method.\"\"\"\n if scope.value & ValidationScope.semantics.value:\n self._check_must(inst) # must expressions\n super()._validate(inst, scope, ctype)", "response": "Extend the superclass method to check that the instance is valid."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef content_type(self) -> ContentType:\n if self._ctype:\n return self._ctype\n return (ContentType.config if self.parent.config else\n ContentType.nonconfig)", "response": "Return the content type of the resource."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef from_raw(self, rval: RawScalar, jptr: JSONPointer = \"\") -> ScalarValue:\n res = self.type.from_raw(rval)\n if res is None:\n raise RawTypeError(jptr, self.type.yang_type() + \" value\")\n return res", "response": "Override the superclass method."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _validate(self, inst: \"InstanceNode\", scope: ValidationScope,\n ctype: ContentType) -> None:\n \"\"\"Extend the superclass method.\"\"\"\n if (scope.value & ValidationScope.syntax.value and\n inst.value not in self.type):\n raise YangTypeError(inst.json_pointer(), self.type.error_tag,\n self.type.error_message)\n if (isinstance(self.type, LinkType) and # referential integrity\n scope.value & ValidationScope.semantics.value and\n self.type.require_instance):\n try:\n tgt = inst._deref()\n except YangsonException:\n tgt = []\n if not tgt:\n raise SemanticError(inst.json_pointer(), \"instance-required\")\n super()._validate(inst, scope, ctype)", "response": "Extend the superclass method."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn the receiver s contribution to tree diagram.", "response": "def _tree_line(self, no_type: bool = False) -> str:\n \"\"\"Return the receiver's contribution to tree diagram.\"\"\"\n return super()._tree_line() + (\"!\" if self.presence else \"\")"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _validate(self, inst: \"InstanceNode\", scope: ValidationScope,\n ctype: ContentType) -> None:\n \"\"\"Extend the superclass method.\"\"\"\n if isinstance(inst, ArrayEntry):\n super()._validate(inst, scope, ctype)\n else:\n if scope.value & ValidationScope.semantics.value:\n self._check_list_props(inst)\n self._check_cardinality(inst)\n for e in inst:\n super()._validate(e, scope, ctype)", "response": "Extend the superclass method."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef from_raw(self, rval: RawList, jptr: JSONPointer = \"\") -> ArrayValue:\n if not isinstance(rval, list):\n raise RawTypeError(jptr, \"array\")\n res = ArrayValue()\n i = 0\n for en in rval:\n i += 1\n res.append(self.entry_from_raw(en, f\"{jptr}/{i}\"))\n return res", "response": "Override the superclass method."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ntransforming a raw ( leaf - list entry into the cooked form.", "response": "def entry_from_raw(self, rval: RawEntry, jptr: JSONPointer = \"\") -> EntryValue:\n \"\"\"Transform a raw (leaf-)list entry into the cooked form.\n\n Args:\n rval: raw entry (scalar or object)\n jptr: JSON pointer of the entry\n\n Raises:\n NonexistentSchemaNode: If a member inside `rval` is not defined\n in the schema.\n RawTypeError: If a scalar value inside `rval` is of incorrect type.\n \"\"\"\n return super().from_raw(rval, jptr)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _check_list_props(self, inst: \"InstanceNode\") -> None:\n if self.keys:\n self._check_keys(inst)\n for u in self.unique:\n self._check_unique(u, inst)", "response": "Check uniqueness of keys and unique properties if applicable."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _tree_line(self, no_type: bool = False) -> str:\n keys = (\" [\" + \" \".join([k[0] for k in self.keys]) + \"]\"\n if self.keys else \"\")\n return super()._tree_line() + keys", "response": "Return the receiver s contribution to tree diagram."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning an isolated entry of the receiver.", "response": "def orphan_entry(self, rval: RawObject) -> \"ArrayEntry\":\n \"\"\"Return an isolated entry of the receiver.\n\n Args:\n rval: Raw object to be used for the returned entry.\n \"\"\"\n val = self.entry_from_raw(rval)\n return ArrayEntry(0, EmptyList(), EmptyList(), val, None, self,\n val.timestamp)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn receiver s case that s active in an instance node value.", "response": "def _active_case(self, value: ObjectValue) -> Optional[\"CaseNode\"]:\n \"\"\"Return receiver's case that's active in an instance node value.\"\"\"\n for c in self.children:\n for cc in c.data_children():\n if cc.iname() in value:\n return c"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef default(self) -> Optional[ScalarValue]:\n if self.mandatory:\n return None\n if self._default is not None:\n return self._default\n return (None if self.type.default is None\n else ArrayValue([self.type.default]))", "response": "Default value of the receiver if any."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef from_raw(self, rval: RawValue, jptr: JSONPointer = \"\") -> Value:\n def convert(val):\n if isinstance(val, list):\n res = ArrayValue([convert(x) for x in val])\n elif isinstance(val, dict):\n res = ObjectValue({x: convert(val[x]) for x in val})\n else:\n res = val\n return res\n return convert(rval)", "response": "Convert raw value to a new value."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _input_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:\n self.get_child(\"input\")._handle_substatements(stmt, sctx)", "response": "Handle RPC or action input statement."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nhandling RPC or action output statement.", "response": "def _output_stmt(self, stmt: Statement, sctx: SchemaContext) -> None:\n \"\"\"Handle RPC or action output statement.\"\"\"\n self.get_child(\"output\")._handle_substatements(stmt, sctx)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncreate an instance from a list of values.", "response": "def from_list(cls, vals: List[Value] = [], reverse: bool = False) -> \"LinkedList\":\n \"\"\"Create an instance from a standard list.\n\n Args:\n vals: Python list of instance values.\n \"\"\"\n res = EmptyList()\n for v in (vals if reverse else vals[::-1]):\n res = cls(v, res)\n return res"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef path(self) -> Tuple[InstanceKey]:\n res = []\n inst: InstanceNode = self\n while inst.parinst:\n res.insert(0, inst._key)\n inst = inst.parinst\n return tuple(res)", "response": "Return the list of keys on the path from root to receiver."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning receiver s member with a new value.", "response": "def put_member(self, name: InstanceName, value: Value,\n raw: bool = False) -> \"InstanceNode\":\n \"\"\"Return receiver's member with a new value.\n\n If the member is permitted by the schema but doesn't exist, it\n is created.\n\n Args:\n name: Instance name of the member.\n value: New value of the member.\n raw: Flag to be set if `value` is raw.\n\n Raises:\n NonexistentSchemaNode: If member `name` is not permitted by the\n schema.\n InstanceValueError: If the receiver's value is not an object.\n \"\"\"\n if not isinstance(self.value, ObjectValue):\n raise InstanceValueError(self.json_pointer(), \"member of non-object\")\n csn = self._member_schema_node(name)\n newval = self.value.copy()\n newval[name] = csn.from_raw(value, self.json_pointer()) if raw else value\n return self._copy(newval)._member(name)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef delete_item(self, key: InstanceKey) -> \"InstanceNode\":\n if not isinstance(self.value, StructuredValue):\n raise InstanceValueError(self.json_pointer(), \"scalar value\")\n newval = self.value.copy()\n try:\n del newval[key]\n except (KeyError, IndexError, TypeError):\n raise NonexistentInstance(self.json_pointer(),\n f\"item '{key}'\") from None\n return self._copy(newval)", "response": "Delete an item from receiver s value."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning an instance node corresponding to the receiver s parent.", "response": "def up(self) -> \"InstanceNode\":\n \"\"\"Return an instance node corresponding to the receiver's parent.\n\n Raises:\n NonexistentInstance: If there is no parent.\n \"\"\"\n ts = max(self.timestamp, self.parinst.timestamp)\n return self.parinst._copy(self._zip(), ts)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef top(self) -> \"InstanceNode\":\n inst = self\n while inst.parinst:\n inst = inst.up()\n return inst", "response": "Return an instance node corresponding to the root of the data tree."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef update(self, value: Union[RawValue, Value],\n raw: bool = False) -> \"InstanceNode\":\n \"\"\"Update the receiver's value.\n\n Args:\n value: New value.\n raw: Flag to be set if `value` is raw.\n\n Returns:\n Copy of the receiver with the updated value.\n \"\"\"\n newval = self.schema_node.from_raw(\n value, self.json_pointer()) if raw else value\n return self._copy(newval)", "response": "Update the receiver s value."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nmoving focus to an instance inside the receiver s value.", "response": "def goto(self, iroute: \"InstanceRoute\") -> \"InstanceNode\":\n \"\"\"Move the focus to an instance inside the receiver's value.\n\n Args:\n iroute: Instance route (relative to the receiver).\n\n Returns:\n The instance node corresponding to the target instance.\n\n Raises:\n InstanceValueError: If `iroute` is incompatible with the receiver's\n value.\n NonexistentInstance: If the instance node doesn't exist.\n NonDataNode: If an instance route addresses a non-data node\n (rpc/action/notification).\n \"\"\"\n inst = self\n for sel in iroute:\n inst = sel.goto_step(inst)\n return inst"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning a value within the receiver s subtree.", "response": "def peek(self, iroute: \"InstanceRoute\") -> Optional[Value]:\n \"\"\"Return a value within the receiver's subtree.\n\n Args:\n iroute: Instance route (relative to the receiver).\n \"\"\"\n val = self.value\n sn = self.schema_node\n for sel in iroute:\n val, sn = sel.peek_step(val, sn)\n if val is None:\n return None\n return val"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef validate(self, scope: ValidationScope = ValidationScope.all,\n ctype: ContentType = ContentType.config) -> None:\n \"\"\"Validate the receiver's value.\n\n Args:\n scope: Scope of the validation (syntax, semantics or all).\n ctype: Receiver's content type.\n\n Raises:\n SchemaError: If the value doesn't conform to the schema.\n SemanticError: If the value violates a semantic constraint.\n YangTypeError: If the value is a scalar of incorrect type.\n \"\"\"\n self.schema_node._validate(self, scope, ctype)", "response": "Validate the receiver s value."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the receiver with defaults added recursively to its value.", "response": "def add_defaults(self, ctype: ContentType = None) -> \"InstanceNode\":\n \"\"\"Return the receiver with defaults added recursively to its value.\n\n Args:\n ctype: Content type of the defaults to be added. If it is\n ``None``, the content type will be the same as receiver's.\n \"\"\"\n val = self.value\n if not (isinstance(val, StructuredValue) and self.is_internal()):\n return self\n res = self\n if isinstance(val, ObjectValue):\n if val:\n for mn in self._member_names():\n m = res._member(mn) if res is self else res.sibling(mn)\n res = m.add_defaults(ctype)\n res = res.up()\n return self.schema_node._add_defaults(res, ctype)\n if not val:\n return res\n en = res[0]\n while True:\n res = en.add_defaults(ctype)\n try:\n en = res.next()\n except NonexistentInstance:\n break\n return res.up()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn receiver s value in a raw form ( ready for JSON encoding ).", "response": "def raw_value(self) -> RawValue:\n \"\"\"Return receiver's value in a raw form (ready for JSON encoding).\"\"\"\n if isinstance(self.value, ObjectValue):\n return {m: self._member(m).raw_value() for m in self.value}\n if isinstance(self.value, ArrayValue):\n return [en.raw_value() for en in self]\n return self.schema_node.type.to_raw(self.value)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _node_set(self) -> List[\"InstanceNode\"]:\n return list(self) if isinstance(self.value, ArrayValue) else [self]", "response": "XPath - return the list of all receiver s nodes."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _children(self, qname:\n Union[QualName, bool] = None) -> List[\"InstanceNode\"]:\n \"\"\"XPath - return the list of receiver's children.\"\"\"\n sn = self.schema_node\n if not isinstance(sn, InternalNode):\n return []\n if qname:\n cn = sn.get_data_child(*qname)\n if cn is None:\n return []\n iname = cn.iname()\n if iname in self.value:\n return self._member(iname)._node_set()\n wd = cn._default_instance(self, ContentType.all, lazy=True)\n if iname not in wd.value:\n return []\n while True:\n cn = cn.parent\n if cn is sn:\n return wd._member(iname)._node_set()\n if (cn.when and not cn.when.evaluate(self) or\n isinstance(cn, CaseNode) and\n cn.qual_name != cn.parent.default_case):\n return []\n res = []\n wd = sn._add_defaults(self, ContentType.all, lazy=True)\n for mn in wd.value:\n res.extend(wd._member(mn)._node_set())\n return res", "response": "XPath - return the list of receiver s children."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _deref(self) -> List[\"InstanceNode\"]:\n return ([] if self.is_internal() else\n self.schema_node.type._deref(self))", "response": "XPath - returns the list of nodes that the receiver refers to."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef qual_name(self) -> QualName:\n p, s, loc = self._key.partition(\":\")\n return (loc, p) if s else (p, self.namespace)", "response": "Return the receiver s qualified name."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns an instance node corresponding to a sibling member.", "response": "def sibling(self, name: InstanceName) -> \"ObjectMember\":\n \"\"\"Return an instance node corresponding to a sibling member.\n\n Args:\n name: Instance name of the sibling member.\n\n Raises:\n NonexistentSchemaNode: If member `name` is not permitted by the\n schema.\n NonexistentInstance: If sibling member `name` doesn't exist.\n \"\"\"\n ssn = self.parinst._member_schema_node(name)\n try:\n sibs = self.siblings.copy()\n newval = sibs.pop(name)\n sibs[self.name] = self.value\n return ObjectMember(name, sibs, newval, self.parinst,\n ssn, self.timestamp)\n except KeyError:\n raise NonexistentInstance(self.json_pointer(),\n f\"member '{name}'\") from None"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nlooks up an entry in YANG list with the given keys.", "response": "def look_up(self, **keys: Dict[InstanceName, ScalarValue]) -> \"ArrayEntry\":\n \"\"\"Return the entry with matching keys.\n\n Args:\n keys: Keys and values specified as keyword arguments.\n\n Raises:\n InstanceValueError: If the receiver's value is not a YANG list.\n NonexistentInstance: If no entry with matching keys exists.\n \"\"\"\n if not isinstance(self.schema_node, ListNode):\n raise InstanceValueError(self.json_pointer(), \"lookup on non-list\")\n try:\n for i in range(len(self.value)):\n en = self.value[i]\n flag = True\n for k in keys:\n if en[k] != keys[k]:\n flag = False\n break\n if flag:\n return self._entry(i)\n raise NonexistentInstance(self.json_pointer(), \"entry lookup failed\")\n except KeyError:\n raise NonexistentInstance(self.json_pointer(), \"entry lookup failed\") from None\n except TypeError:\n raise InstanceValueError(self.json_pointer(), \"lookup on non-list\") from None"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _zip(self) -> ObjectValue:\n res = ObjectValue(self.siblings.copy(), self.timestamp)\n res[self.name] = self.value\n return res", "response": "Zip the receiver into an object and return it."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nupdates the receiver s value.", "response": "def update(self, value: Union[RawValue, Value],\n raw: bool = False) -> \"ArrayEntry\":\n \"\"\"Update the receiver's value.\n\n This method overrides the superclass method.\n \"\"\"\n return super().update(self._cook_value(value, raw), False)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn an instance node corresponding to the previous entry in the array.", "response": "def previous(self) -> \"ArrayEntry\":\n \"\"\"Return an instance node corresponding to the previous entry.\n\n Raises:\n NonexistentInstance: If the receiver is the first entry of the\n parent array.\n \"\"\"\n try:\n newval, nbef = self.before.pop()\n except IndexError:\n raise NonexistentInstance(self.json_pointer(), \"previous of first\") from None\n return ArrayEntry(\n self.index - 1, nbef, self.after.cons(self.value), newval,\n self.parinst, self.schema_node, self.timestamp)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the next entry in the array.", "response": "def next(self) -> \"ArrayEntry\":\n \"\"\"Return an instance node corresponding to the next entry.\n\n Raises:\n NonexistentInstance: If the receiver is the last entry of the parent array.\n \"\"\"\n try:\n newval, naft = self.after.pop()\n except IndexError:\n raise NonexistentInstance(self.json_pointer(), \"next of last\") from None\n return ArrayEntry(\n self.index + 1, self.before.cons(self.value), naft, newval,\n self.parinst, self.schema_node, self.timestamp)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ninsert a new entry before receiver.", "response": "def insert_before(self, value: Union[RawValue, Value],\n raw: bool = False) -> \"ArrayEntry\":\n \"\"\"Insert a new entry before the receiver.\n\n Args:\n value: The value of the new entry.\n raw: Flag to be set if `value` is raw.\n\n Returns:\n An instance node of the new inserted entry.\n \"\"\"\n return ArrayEntry(self.index, self.before, self.after.cons(self.value),\n self._cook_value(value, raw), self.parinst,\n self.schema_node, datetime.now())"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nzipping the receiver into an array and return it.", "response": "def _zip(self) -> ArrayValue:\n \"\"\"Zip the receiver into an array and return it.\"\"\"\n res = list(self.before)\n res.reverse()\n res.append(self.value)\n res.extend(list(self.after))\n return ArrayValue(res, self.timestamp)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _ancestors(\n self, qname: Union[QualName, bool] = None) -> List[InstanceNode]:\n \"\"\"XPath - return the list of receiver's ancestors.\"\"\"\n return self.up()._ancestors(qname)", "response": "XPath - return the list of receiver s ancestors."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _preceding_siblings(\n self, qname: Union[QualName, bool] = None) -> List[InstanceNode]:\n \"\"\"XPath - return the list of receiver's preceding siblings.\"\"\"\n if qname and self.qual_name != qname:\n return []\n res = []\n en = self\n for _ in self.before:\n en = en.previous()\n res.append(en)\n return res", "response": "XPath - return the list of receiver s preceding siblings."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _following_siblings(\n self, qname: Union[QualName, bool] = None) -> List[InstanceNode]:\n \"\"\"XPath - return the list of receiver's following siblings.\"\"\"\n if qname and self.qual_name != qname:\n return []\n res = []\n en = self\n for _ in self.after:\n en = en.next()\n res.append(en)\n return res", "response": "XPath - return the list of receiver s following siblings."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns member value addressed by the receiver + its schema node.", "response": "def peek_step(self, val: ObjectValue,\n sn: \"DataNode\") -> Tuple[Value, \"DataNode\"]:\n \"\"\"Return member value addressed by the receiver + its schema node.\n\n Args:\n val: Current value (object).\n sn: Current schema node.\n \"\"\"\n cn = sn.get_data_child(self.name, self.namespace)\n try:\n return (val[cn.iname()], cn)\n except (IndexError, KeyError, TypeError):\n return (None, cn)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef peek_step(self, val: ObjectValue,\n sn: \"DataNode\") -> Tuple[None, \"DataNode\"]:\n \"\"\"Fail because there is no action instance.\"\"\"\n cn = sn.get_child(self.name, self.namespace)\n return (None, cn)", "response": "Fail because there is no action instance."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn the next entry value in the receiver + its schema node.", "response": "def peek_step(self, val: ArrayValue,\n sn: \"DataNode\") -> Tuple[Optional[Value], \"DataNode\"]:\n \"\"\"Return entry value addressed by the receiver + its schema node.\n\n Args:\n val: Current value (array).\n sn: Current schema node.\n \"\"\"\n try:\n return val[self.index], sn\n except (IndexError, KeyError, TypeError):\n return None, sn"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nlets schema node s type parse the receiver s value.", "response": "def parse_value(self, sn: \"DataNode\") -> ScalarValue:\n \"\"\"Let schema node's type parse the receiver's value.\"\"\"\n res = sn.type.parse_value(self.value)\n if res is None:\n raise InvalidKeyValue(self.value)\n return res"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns the next entry value in the receiver + its schema node.", "response": "def peek_step(self, val: ArrayValue,\n sn: \"DataNode\") -> Tuple[Value, \"DataNode\"]:\n \"\"\"Return entry value addressed by the receiver + its schema node.\n\n Args:\n val: Current value (array).\n sn: Current schema node.\n \"\"\"\n try:\n return (val[val.index(self.parse_value(sn))], sn)\n except ValueError:\n return None, sn"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning member instance of inst addressed by the receiver.", "response": "def goto_step(self, inst: InstanceNode) -> InstanceNode:\n \"\"\"Return member instance of `inst` addressed by the receiver.\n\n Args:\n inst: Current instance.\n \"\"\"\n try:\n return inst._entry(\n inst.value.index(self.parse_value(inst.schema_node)))\n except ValueError:\n raise NonexistentInstance(inst.json_pointer(),\n f\"entry '{self.value!s}'\") from None"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nparses the keys of a list.", "response": "def parse_keys(self, sn: \"DataNode\") -> Dict[InstanceName, ScalarValue]:\n \"\"\"Parse key dictionary in the context of a schema node.\n\n Args:\n sn: Schema node corresponding to a list.\n \"\"\"\n res = {}\n for k in self.keys:\n knod = sn.get_data_child(*k)\n if knod is None:\n raise NonexistentSchemaNode(sn.qual_name, *k)\n kval = knod.type.parse_value(self.keys[k])\n if kval is None:\n raise InvalidKeyValue(self.keys[k])\n res[knod.iname()] = kval\n return res"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns the entry addressed by the receiver + its schema node.", "response": "def peek_step(self, val: ArrayValue,\n sn: \"DataNode\") -> Tuple[ObjectValue, \"DataNode\"]:\n \"\"\"Return the entry addressed by the receiver + its schema node.\n\n Args:\n val: Current value (array).\n sn: Current schema node.\n \"\"\"\n keys = self.parse_keys(sn)\n for en in val:\n flag = True\n try:\n for k in keys:\n if en[k] != keys[k]:\n flag = False\n break\n except KeyError:\n continue\n if flag:\n return (en, sn)\n return (None, sn)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef goto_step(self, inst: InstanceNode) -> InstanceNode:\n return inst.look_up(**self.parse_keys(inst.schema_node))", "response": "Return member instance of inst addressed by the receiver."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nparses the resource identifier.", "response": "def parse(self) -> InstanceRoute:\n \"\"\"Parse resource identifier.\"\"\"\n res = InstanceRoute()\n if self.at_end():\n return res\n if self.peek() == \"/\":\n self.offset += 1\n if self.at_end():\n return res\n sn = self.schema_node\n while True:\n name, ns = self.prefixed_name()\n cn = sn.get_data_child(name, ns)\n if cn is None:\n for cn in sn.children:\n if (isinstance(cn, RpcActionNode) and cn.name == name and\n (ns is None or cn.ns == ns)):\n res.append(ActionName(name, ns))\n return res\n raise NonexistentSchemaNode(sn.qual_name, name, ns)\n res.append(MemberName(name, ns))\n if self.at_end():\n return res\n if isinstance(cn, SequenceNode):\n self.char(\"=\")\n res.append(self._key_values(cn))\n if self.at_end():\n return res\n else:\n self.char(\"/\")\n sn = cn"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _key_values(self, sn: \"SequenceNode\") -> Union[EntryKeys, EntryValue]:\n try:\n keys = self.up_to(\"/\")\n except EndOfInput:\n keys = self.remaining()\n if not keys:\n raise UnexpectedInput(self, \"entry value or keys\")\n if isinstance(sn, LeafListNode):\n return EntryValue(unquote(keys))\n ks = keys.split(\",\")\n try:\n if len(ks) != len(sn.keys):\n raise UnexpectedInput(self, f\"exactly {len(sn.keys)} keys\")\n except AttributeError:\n raise BadSchemaNodeType(sn.qual_name, \"list\")\n sel = {}\n for j in range(len(ks)):\n knod = sn.get_data_child(*sn.keys[j])\n val = unquote(ks[j])\n sel[(knod.name, None if knod.ns == sn.ns else knod.ns)] = val\n return EntryKeys(sel)", "response": "Parse leaf - list value or list keys."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef find1(self, kw: YangIdentifier, arg: str = None,\n pref: YangIdentifier = None,\n required: bool = False) -> Optional[\"Statement\"]:\n \"\"\"Return first substatement with the given parameters.\n\n Args:\n kw: Statement keyword (local part for extensions).\n arg: Argument (all arguments will match if ``None``).\n pref: Keyword prefix (``None`` for built-in statements).\n required: Should an exception be raised on failure?\n\n Raises:\n StatementNotFound: If `required` is ``True`` and the\n statement is not found.\n \"\"\"\n for sub in self.substatements:\n if (sub.keyword == kw and sub.prefix == pref and\n (arg is None or sub.argument == arg)):\n return sub\n if required:\n raise StatementNotFound(str(self), kw)", "response": "Return first substatement with the given parameters."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef find_all(self, kw: YangIdentifier,\n pref: YangIdentifier = None) -> List[\"Statement\"]:\n \"\"\"Return the list all substatements with the given keyword and prefix.\n\n Args:\n kw: Statement keyword (local part for extensions).\n pref: Keyword prefix (``None`` for built-in statements).\n \"\"\"\n return [c for c in self.substatements\n if c.keyword == kw and c.prefix == pref]", "response": "Return the list all substatements with the given keyword and prefix."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_definition(self, name: YangIdentifier,\n kw: YangIdentifier) -> Optional[\"Statement\"]:\n \"\"\"Search ancestor statements for a definition.\n\n Args:\n name: Name of a grouping or datatype (with no prefix).\n kw: ``grouping`` or ``typedef``.\n\n Raises:\n DefinitionNotFound: If the definition is not found.\n \"\"\"\n stmt = self.superstmt\n while stmt:\n res = stmt.find1(kw, name)\n if res:\n return res\n stmt = stmt.superstmt\n return None", "response": "Search ancestor statements for a definition."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns receiver s error tag and error message if present.", "response": "def get_error_info(self) -> Tuple[Optional[str], Optional[str]]:\n \"\"\"Return receiver's error tag and error message if present.\"\"\"\n etag = self.find1(\"error-app-tag\")\n emsg = self.find1(\"error-message\")\n return (etag.argument if etag else None, emsg.argument if emsg else None)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef parse(self) -> Statement:\n self.opt_separator()\n start = self.offset\n res = self.statement()\n if res.keyword not in [\"module\", \"submodule\"]:\n self.offset = start\n raise UnexpectedInput(self, \"'module' or 'submodule'\")\n if self.name is not None and res.argument != self.name:\n raise ModuleNameMismatch(res.argument, self.name)\n if self.rev:\n revst = res.find1(\"revision\")\n if revst is None or revst.argument != self.rev:\n raise ModuleRevisionMismatch(revst.argument, self.rev)\n try:\n self.opt_separator()\n except EndOfInput:\n return res\n raise UnexpectedInput(self, \"end of input\")", "response": "Parse a complete YANG module or submodule."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef unescape(cls, text: str) -> str:\n chop = text.split(\"\\\\\", 1)\n try:\n return (chop[0] if len(chop) == 1\n else chop[0] + cls.unescape_map[chop[1][0]] +\n cls.unescape(chop[1][1:]))\n except KeyError:\n raise InvalidArgument(text) from None", "response": "Unescape the escape sequence with corresponding characters."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef opt_separator(self) -> bool:\n start = self.offset\n self.dfa([\n { # state 0: whitespace\n \"\": lambda: -1,\n \" \": lambda: 0,\n \"\\t\": lambda: 0,\n \"\\n\": lambda: 0,\n \"\\r\": lambda: 1,\n \"/\": lambda: 2\n },\n { # state 1: CR/LF?\n \"\": self._back_break,\n \"\\n\": lambda: 0\n },\n { # state 2: start comment?\n \"\": self._back_break,\n \"/\": lambda: 3,\n \"*\": lambda: 4\n },\n { # state 3: line comment\n \"\": lambda: 3,\n \"\\n\": lambda: 0\n },\n { # state 4: block comment\n \"\": lambda: 4,\n \"*\": lambda: 5\n },\n { # state 5: end block comment?\n \"\": lambda: 4,\n \"/\": lambda: 0,\n \"*\": lambda: 5\n }])\n return start < self.offset", "response": "Parse an optional separator and return True if found."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nparses a YANG statement keyword.", "response": "def keyword(self) -> Tuple[Optional[str], str]:\n \"\"\"Parse a YANG statement keyword.\n\n Raises:\n EndOfInput: If past the end of input.\n UnexpectedInput: If no syntactically correct keyword is found.\n \"\"\"\n i1 = self.yang_identifier()\n if self.peek() == \":\":\n self.offset += 1\n i2 = self.yang_identifier()\n return (i1, i2)\n return (None, i1)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nparse YANG statement. Raises: EndOfInput: If past the end of input. UnexpectedInput: If no syntactically correct statement is found.", "response": "def statement(self) -> Statement:\n \"\"\"Parse YANG statement.\n\n Raises:\n EndOfInput: If past the end of input.\n UnexpectedInput: If no syntactically correct statement is found.\n \"\"\"\n pref, kw = self.keyword()\n pres = self.opt_separator()\n next = self.peek()\n if next == \";\":\n arg = None\n sub = False # type: bool\n elif next == \"{\":\n arg = None\n sub = True\n elif not pres:\n raise UnexpectedInput(self, \"separator\")\n else:\n self._arg = \"\"\n sub = self.argument()\n arg = self._arg\n self.offset += 1\n res = Statement(kw, arg, pref=pref)\n if sub:\n res.substatements = self.substatements()\n for sub in res.substatements:\n sub.superstmt = res\n return res"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef argument(self) -> bool:\n next = self.peek()\n if next == \"'\":\n quoted = True\n self.sq_argument()\n elif next == '\"':\n quoted = True\n self.dq_argument()\n elif self._arg == \"\":\n quoted = False\n self.unq_argument()\n else:\n raise UnexpectedInput(self, \"single or double quote\")\n self.opt_separator()\n next = self.peek()\n if next == \";\":\n return False\n if next == \"{\":\n return True\n elif quoted and next == \"+\":\n self.offset += 1\n self.opt_separator()\n return self.argument()\n else:\n raise UnexpectedInput(self, \"';', '{'\" +\n (\" or '+'\" if quoted else \"\"))", "response": "Parse the next argument in the string."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nparses double - quoted argument.", "response": "def dq_argument(self) -> str:\n \"\"\"Parse double-quoted argument.\n\n Raises:\n EndOfInput: If past the end of input.\n \"\"\"\n def escape():\n self._escape = True\n return 1\n self._escape = False # any escaped chars?\n self.offset += 1\n start = self.offset\n self.dfa([\n { # state 0: argument\n \"\": lambda: 0,\n '\"': lambda: -1,\n \"\\\\\": escape\n },\n { # state 1: after escape\n \"\": lambda: 0\n }])\n self._arg += (self.unescape(self.input[start:self.offset])\n if self._escape else self.input[start:self.offset])\n self.offset += 1"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nparse an unquoted argument.", "response": "def unq_argument(self) -> str:\n \"\"\"Parse unquoted argument.\n\n Raises:\n EndOfInput: If past the end of input.\n \"\"\"\n start = self.offset\n self.dfa([\n { # state 0: argument\n \"\": lambda: 0,\n \";\": lambda: -1,\n \" \": lambda: -1,\n \"\\t\": lambda: -1,\n \"\\r\": lambda: -1,\n \"\\n\": lambda: -1,\n \"{\": lambda: -1,\n '/': lambda: 1\n },\n { # state 1: comment?\n \"\": lambda: 0,\n \"/\": self._back_break,\n \"*\": self._back_break\n }])\n self._arg = self.input[start:self.offset]"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef substatements(self) -> List[Statement]:\n res = []\n self.opt_separator()\n while self.peek() != \"}\":\n res.append(self.statement())\n self.opt_separator()\n self.offset += 1\n return res", "response": "Parse substatements.\n\n Raises:\n EndOfInput: If past the end of input."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _from_yang_library(self, yang_lib: Dict[str, Any]) -> None:\n try:\n for item in yang_lib[\"ietf-yang-library:modules-state\"][\"module\"]:\n name = item[\"name\"]\n rev = item[\"revision\"]\n mid = (name, rev)\n mdata = ModuleData(mid)\n self.modules[mid] = mdata\n if item[\"conformance-type\"] == \"implement\":\n if name in self.implement:\n raise MultipleImplementedRevisions(name)\n self.implement[name] = rev\n mod = self._load_module(name, rev)\n mdata.statement = mod\n if \"feature\" in item:\n mdata.features.update(item[\"feature\"])\n locpref = mod.find1(\"prefix\", required=True).argument\n mdata.prefix_map[locpref] = mid\n if \"submodule\" in item:\n for s in item[\"submodule\"]:\n sname = s[\"name\"]\n smid = (sname, s[\"revision\"])\n sdata = ModuleData(mid)\n self.modules[smid] = sdata\n mdata.submodules.add(smid)\n submod = self._load_module(*smid)\n sdata.statement = submod\n bt = submod.find1(\"belongs-to\", name, required=True)\n locpref = bt.find1(\"prefix\", required=True).argument\n sdata.prefix_map[locpref] = mid\n except KeyError as e:\n raise BadYangLibraryData(\"missing \" + str(e)) from None\n self._process_imports()\n self._check_feature_dependences()", "response": "Set the schema structures from YANG library data."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nread and parse a YANG module or submodule.", "response": "def _load_module(self, name: YangIdentifier,\n rev: RevisionDate) -> Statement:\n \"\"\"Read and parse a YANG module or submodule.\"\"\"\n for d in self.module_search_path:\n run = 0\n while run < 2:\n fn = f\"{d}/{name}\"\n if rev and run == 0:\n fn += \"@\" + rev\n fn += \".yang\"\n try:\n with open(fn, encoding='utf-8') as infile:\n res = ModuleParser(infile.read(), name, rev).parse()\n except (FileNotFoundError, PermissionError, ModuleContentMismatch):\n run += 1\n continue\n return res\n raise ModuleNotFound(name, rev)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef namespace(self, mid: ModuleId) -> YangIdentifier:\n try:\n mdata = self.modules[mid]\n except KeyError:\n raise ModuleNotRegistered(*mid) from None\n return mdata.main_module[0]", "response": "Return the namespace corresponding to a module or submodule."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn the last revision of a module or submodule.", "response": "def last_revision(self, mod: YangIdentifier) -> ModuleId:\n \"\"\"Return the last revision of a module that's part of the data model.\n\n Args:\n mod: Name of a module or submodule.\n\n Raises:\n ModuleNotRegistered: If the module `mod` is not present in the\n data model.\n \"\"\"\n revs = [mn for mn in self.modules if mn[0] == mod]\n if not revs:\n raise ModuleNotRegistered(mod)\n return sorted(revs, key=lambda x: x[1])[-1]"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef prefix2ns(self, prefix: YangIdentifier, mid: ModuleId) -> YangIdentifier:\n try:\n mdata = self.modules[mid]\n except KeyError:\n raise ModuleNotRegistered(*mid) from None\n try:\n return mdata.prefix_map[prefix][0]\n except KeyError:\n raise UnknownPrefix(prefix, mid) from None", "response": "Return the namespace corresponding to a prefix."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nresolve a name with an optional prefix.", "response": "def resolve_pname(self, pname: PrefName,\n mid: ModuleId) -> Tuple[YangIdentifier, ModuleId]:\n \"\"\"Return the name and module identifier in which the name is defined.\n\n Args:\n pname: Name with an optional prefix.\n mid: Identifier of the module in which `pname` appears.\n\n Raises:\n ModuleNotRegistered: If `mid` is not registered in the data model.\n UnknownPrefix: If the prefix specified in `pname` is not declared.\n \"\"\"\n p, s, loc = pname.partition(\":\")\n try:\n mdata = self.modules[mid]\n except KeyError:\n raise ModuleNotRegistered(*mid) from None\n try:\n return (loc, mdata.prefix_map[p]) if s else (p, mdata.main_module)\n except KeyError:\n raise UnknownPrefix(p, mid) from None"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ntranslates a prefixed name to a qualified name.", "response": "def translate_pname(self, pname: PrefName, mid: ModuleId) -> QualName:\n \"\"\"Translate a prefixed name to a qualified name.\n Args:\n pname: Name with an optional prefix.\n mid: Identifier of the module in which `pname` appears.\n Raises:\n ModuleNotRegistered: If `mid` is not registered in the data model.\n UnknownPrefix: If the prefix specified in `pname` is not declared.\n \"\"\"\n loc, nid = self.resolve_pname(pname, mid)\n return (loc, self.namespace(nid))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef translate_node_id(self, ni: PrefName, sctx: SchemaContext) -> QualName:\n p, s, loc = ni.partition(\":\")\n if not s:\n return (ni, sctx.default_ns)\n try:\n mdata = self.modules[sctx.text_mid]\n except KeyError:\n raise ModuleNotRegistered(*sctx.text_mid) from None\n try:\n return (loc, self.namespace(mdata.prefix_map[p]))\n except KeyError:\n raise UnknownPrefix(p, sctx.text_mid) from None", "response": "Translate a node identifier to a qualified name."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef prefix(self, imod: YangIdentifier, mid: ModuleId) -> YangIdentifier:\n try:\n did = (imod, self.implement[imod])\n except KeyError:\n raise ModuleNotImplemented(imod) from None\n try:\n pmap = self.modules[mid].prefix_map\n except KeyError:\n raise ModuleNotRegistered(*mid) from None\n for p in pmap:\n if pmap[p] == did:\n return p\n raise ModuleNotImported(imod, mid)", "response": "Return the prefix corresponding to an implemented module."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef sni2route(self, sni: SchemaNodeId, sctx: SchemaContext) -> SchemaRoute:\n nlist = sni.split(\"/\")\n res = []\n for qn in (nlist[1:] if sni[0] == \"/\" else nlist):\n res.append(self.translate_node_id(qn, sctx))\n return res", "response": "Translate a schema node identifier to a schema route."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef path2route(path: SchemaPath) -> SchemaRoute:\n if path == \"/\" or path == \"\":\n return []\n nlist = path.split(\"/\")\n prevns = None\n res = []\n for n in (nlist[1:] if path[0] == \"/\" else nlist):\n p, s, loc = n.partition(\":\")\n if s:\n if p == prevns:\n raise InvalidSchemaPath(path)\n res.append((loc, p))\n prevns = p\n elif prevns:\n res.append((p, prevns))\n else:\n raise InvalidSchemaPath(path)\n return res", "response": "Translate a schema path to a schema route."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_definition(self, stmt: Statement,\n sctx: SchemaContext) -> Tuple[Statement, SchemaContext]:\n \"\"\"Find the statement defining a grouping or derived type.\n\n Args:\n stmt: YANG \"uses\" or \"type\" statement.\n sctx: Schema context where the definition is used.\n\n Returns:\n A tuple consisting of the definition statement ('grouping' or\n 'typedef') and schema context of the definition.\n\n Raises:\n ValueError: If `stmt` is neither \"uses\" nor \"type\" statement.\n ModuleNotRegistered: If `mid` is not registered in the data model.\n UnknownPrefix: If the prefix specified in the argument of `stmt`\n is not declared.\n DefinitionNotFound: If the corresponding definition is not found.\n \"\"\"\n if stmt.keyword == \"uses\":\n kw = \"grouping\"\n elif stmt.keyword == \"type\":\n kw = \"typedef\"\n else:\n raise ValueError(\"not a 'uses' or 'type' statement\")\n loc, did = self.resolve_pname(stmt.argument, sctx.text_mid)\n if did == sctx.text_mid:\n dstmt = stmt.get_definition(loc, kw)\n if dstmt:\n return (dstmt, sctx)\n else:\n dstmt = self.modules[did].statement.find1(kw, loc)\n if dstmt:\n return (dstmt, SchemaContext(sctx.schema_data, sctx.default_ns, did))\n for sid in self.modules[did].submodules:\n dstmt = self.modules[sid].statement.find1(kw, loc)\n if dstmt:\n return (\n dstmt, SchemaContext(sctx.schema_data, sctx.default_ns, sid))\n raise DefinitionNotFound(kw, stmt.argument)", "response": "Find the statement defining a grouping or derived type."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef is_derived_from(self, identity: QualName, base: QualName) -> bool:\n try:\n bases = self.identity_adjs[identity].bases\n except KeyError:\n return False\n if base in bases:\n return True\n for ib in bases:\n if self.is_derived_from(ib, base):\n return True\n return False", "response": "Return True if identity is derived from base."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef derived_from(self, identity: QualName) -> MutableSet[QualName]:\n try:\n res = self.identity_adjs[identity].derivs\n except KeyError:\n return set()\n for id in res.copy():\n res |= self.derived_from(id)\n return res", "response": "Return list of identities transitively derived from identity."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns list of identities transitively derived from all identity.", "response": "def derived_from_all(self, identities: List[QualName]) -> MutableSet[QualName]:\n \"\"\"Return list of identities transitively derived from all `identity`.\"\"\"\n if not identities:\n return set()\n res = self.derived_from(identities[0])\n for id in identities[1:]:\n res &= self.derived_from(id)\n return res"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef if_features(self, stmt: Statement, mid: ModuleId) -> bool:\n iffs = stmt.find_all(\"if-feature\")\n if not iffs:\n return True\n for i in iffs:\n if not FeatureExprParser(i.argument, self, mid).parse():\n return False\n return True", "response": "Evaluate if - feature substatements on a statement."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef parse(self) -> bool:\n self.skip_ws()\n res = self._feature_disj()\n self.skip_ws()\n if not self.at_end():\n raise InvalidFeatureExpression(self)\n return res", "response": "Parse and evaluate a complete if - feature expression."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef char(self, c: str) -> None:\n if self.peek() == c:\n self.offset += 1\n else:\n raise UnexpectedInput(self, f\"char '{c}'\")", "response": "Parse the specified character."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nrunning a DFA and return the final state.", "response": "def dfa(self, ttab: TransitionTable, init: int = 0) -> int:\n \"\"\"Run a DFA and return the final (negative) state.\n\n Args:\n ttab: Transition table (with possible side-effects).\n init: Initial state.\n\n Raises:\n EndOfInput: If past the end of `self.input`.\n \"\"\"\n state = init\n while True:\n disp = ttab[state]\n ch = self.peek()\n state = disp.get(ch, disp[\"\"])()\n if state < 0:\n return state\n self.offset += 1"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns line and column coordinates.", "response": "def line_column(self) -> Tuple[int, int]:\n \"\"\"Return line and column coordinates.\"\"\"\n ln = self.input.count(\"\\n\", 0, self.offset)\n c = (self.offset if ln == 0 else\n self.offset - self.input.rfind(\"\\n\", 0, self.offset) - 1)\n return (ln + 1, c)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef match_regex(self, regex: Pattern, required: bool = False,\n meaning: str = \"\") -> str:\n \"\"\"Parse input based on a regular expression .\n\n Args:\n regex: Compiled regular expression object.\n required: Should the exception be raised on unexpected input?\n meaning: Meaning of `regex` (for use in error messages).\n\n Raises:\n UnexpectedInput: If no syntactically correct keyword is found.\n \"\"\"\n mo = regex.match(self.input, self.offset)\n if mo:\n self.offset = mo.end()\n return mo.group()\n if required:\n raise UnexpectedInput(self, meaning)", "response": "Parse input based on a regular expression."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef one_of(self, chset: str) -> str:\n res = self.peek()\n if res in chset:\n self.offset += 1\n return res\n raise UnexpectedInput(self, \"one of \" + chset)", "response": "Parse one character form the specified set."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef peek(self) -> str:\n try:\n return self.input[self.offset]\n except IndexError:\n raise EndOfInput(self)", "response": "Return the next character without advancing offset."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nparse identifier with an optional colon - separated prefix.", "response": "def prefixed_name(self) -> Tuple[YangIdentifier, Optional[YangIdentifier]]:\n \"\"\"Parse identifier with an optional colon-separated prefix.\"\"\"\n i1 = self.yang_identifier()\n try:\n next = self.peek()\n except EndOfInput:\n return (i1, None)\n if next != \":\":\n return (i1, None)\n self.offset += 1\n return (self.yang_identifier(), i1)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the remaining part of the input string.", "response": "def remaining(self) -> str:\n \"\"\"Return the remaining part of the input string.\"\"\"\n res = self.input[self.offset:]\n self.offset = len(self.input)\n return res"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef up_to(self, term: str) -> str:\n end = self.input.find(term, self.offset)\n if end < 0:\n raise EndOfInput(self)\n res = self.input[self.offset:end]\n self.offset = end + 1\n return res", "response": "Parse and return the next occurrence of a string."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef restrict_with(self, expr: str, error_tag: str = None,\n error_message: str = None) -> None:\n \"\"\"Combine the receiver with new intervals.\n\n Args:\n expr: \"range\" or \"length\" expression.\n error_tag: error tag of the new expression.\n error_message: error message for the new expression.\n\n Raises:\n InvalidArgument: If parsing of `expr` fails.\n \"\"\"\n def parse(x: str) -> Number:\n res = self.parser(x)\n if res is None:\n raise InvalidArgument(expr)\n return res\n\n def simpl(rng: List[Number]) -> List[Number]:\n return ([rng[0]] if rng[0] == rng[1] else rng)\n\n def to_num(xs): return [parse(x) for x in xs]\n lo = self.intervals[0][0]\n hi = self.intervals[-1][-1]\n ran = []\n for p in [p.strip() for p in expr.split(\"|\")]:\n r = [i.strip() for i in p.split(\"..\")]\n if len(r) > 2:\n raise InvalidArgument(expr)\n ran.append(r)\n if ran[0][0] != \"min\":\n lo = parse(ran[0][0])\n if ran[-1][-1] != \"max\":\n hi = parse(ran[-1][-1])\n self.intervals = (\n [simpl([lo, hi])] if len(ran) == 1 else (\n [simpl([lo, parse(ran[0][-1])])] +\n [to_num(r) for r in ran[1:-1]] +\n [simpl([parse(ran[-1][0]), hi])]))\n if error_tag:\n self.error_tag = error_tag\n if error_message:\n self.error_message = error_message", "response": "Combine the receiver with new intervals."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef add_child(self, n, parent, **attrs):\n '''\n API: add_child(self, n, parent, **attrs)\n Description:\n Adds child n to node parent and return Node n.\n Pre:\n Node with name parent should exist.\n Input:\n n: Child node name.\n parent: Parent node name.\n attrs: Attributes of node being added.\n Post:\n Updates Graph related graph data attributes.\n Return:\n Returns n Node instance.\n '''\n attrs['level'] = self.get_node(parent).get_attr('level') + 1\n attrs['parent'] = parent\n self.add_node(n, **attrs)\n self.add_edge(parent, n)\n return self.get_node(n)", "response": "Adds a child node n to a parent node and returns n."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef dfs(self, root = None, display = None):\n '''\n API: dfs(self, root = None, display = None)\n Description:\n Searches tree starting from node named root using depth-first\n strategy if root argument is provided. Starts search from root node\n of the tree otherwise.\n Pre:\n Node indicated by root argument should exist.\n Input:\n root: Starting node name.\n display: Display argument.\n '''\n if root == None:\n root = self.root\n if display == None:\n display = self.attr['display']\n self.traverse(root, display, Stack())", "response": "This function returns a tree of nodes with depth - first search."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef traverse(self, root = None, display = None, q = Stack()):\n '''\n API: traverse(self, root = None, display = None, q = Stack())\n Description:\n Traverses tree starting from node named root. Used strategy (BFS,\n DFS) is controlled by argument q. It is a DFS if q is Queue(), BFS\n if q is Stack(). Starts search from root argument if it is given.\n Starts from root node of the tree otherwise.\n Pre:\n Node indicated by root argument should exist.\n Input:\n root: Starting node name.\n display: Display argument.\n q: Queue data structure instance. It is either a Stack() or\n Queue().\n '''\n if root == None:\n root = self.root\n if display == None:\n display = self.attr['display']\n if isinstance(q, Queue):\n addToQ = q.enqueue\n removeFromQ = q.dequeue\n elif isinstance(q, Stack):\n addToQ = q.push\n removeFromQ = q.pop\n addToQ(root)\n while not q.isEmpty():\n current = removeFromQ()\n #print current\n if display:\n self.display(highlight = [current])\n for n in self.get_children(current):\n addToQ(n)", "response": "This function traverses the tree starting from the given root."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nadd a right child node n to parent.", "response": "def add_right_child(self, n, parent, **attrs):\n '''\n API: add_right_child(self, n, parent, **attrs)\n Description:\n Adds right child n to node parent.\n Pre:\n Right child of parent should not exist.\n Input:\n n: Node name.\n parent: Parent node name.\n attrs: Attributes of node n.\n '''\n if self.get_right_child(parent) is not None:\n msg = \"Right child already exists for node \" + str(parent)\n raise Exception(msg)\n attrs['direction'] = 'R'\n self.set_node_attr(parent, 'Rchild', n)\n self.add_child(n, parent, **attrs)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_left_child(self, n, parent, **attrs):\n '''\n API: add_left_child(self, n, parent, **attrs)\n Description:\n Adds left child n to node parent.\n Pre:\n Left child of parent should not exist.\n Input:\n n: Node name.\n parent: Parent node name.\n attrs: Attributes of node n.\n '''\n if self.get_left_child(parent) is not None:\n msg = \"Right child already exists for node \" + str(parent)\n raise Exception(msg)\n attrs['direction'] = 'L'\n self.set_node_attr(parent, 'Lchild', n)\n self.add_child(n, parent, **attrs)", "response": "Adds a left child node n to parent."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets the right child of a node n.", "response": "def get_right_child(self, n):\n '''\n API: get_right_child(self, n)\n Description:\n Returns right child of node n. n can be Node() instance or string\n (name of node).\n Pre:\n Node n should be present in the tree.\n Input:\n n: Node name or Node() instance.\n Return:\n Returns name of the right child of n.\n '''\n if isinstance(n, Node):\n return n.get_attr('Rchild')\n return self.get_node_attr(n, 'Rchild')"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_left_child(self, n):\n '''\n API: get_left_child(self, n)\n Description:\n Returns left child of node n. n can be Node() instance or string\n (name of node).\n Pre:\n Node n should be present in the tree.\n Input:\n n: Node name or Node() instance.\n Return:\n Returns name of the left child of n.\n '''\n if isinstance(n, Node):\n return n.get_attr('Lchild')\n return self.get_node_attr(n, 'Lchild')", "response": "Get the name of the left child of a node n."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef del_node(self, n):\n '''\n API: del_node(self, n)\n Description:\n Removes node n from tree.\n Pre:\n Node n should be present in the tree.\n Input:\n n: Node name.\n '''\n parent = self.get_node_attr(n, 'parent')\n if self.get_node_attr(n, 'direction') == 'R':\n self.set_node_attr(parent, 'Rchild', None)\n else:\n self.set_node_attr(parent, 'Lchild', None)\n Graph.del_node(self, n)", "response": "This method deletes a node from the tree."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef print_nodes(self, order = 'in', priority = 'L', display = None,\n root = None):\n '''\n API: print_nodes(self, order = 'in', priority = 'L', display = None,\n root = None)\n Description:\n A recursive function that prints nodes to stdout starting from\n root.\n Input:\n order: Order of printing. Acceptable arguments are 'pre', 'in',\n 'post'.\n priority: Priority of printing, acceptable arguments are 'L' and\n 'R'.\n display: Display mode.\n root: Starting node.\n '''\n old_display = None\n if root == None:\n root = self.root.name\n if display == None:\n display = self.attr['display']\n else:\n old_display = self.attr['display']\n self.attr['display'] = display\n if priority == 'L':\n first_child = self.get_left_child\n second_child = self.get_right_child\n else:\n first_child = self.get_right_child\n second_child = self.get_left_child\n if order == 'pre':\n print(root)\n if first_child(root) is not None:\n if display:\n self.display(highlight = [root])\n self.print_nodes(order, priority, display, first_child(root))\n if order == 'in':\n print(root)\n if second_child(root) is not None:\n if display:\n self.display(highlight = [root])\n self.print_nodes(order, priority, display, second_child(root))\n if order == 'post':\n print(root)\n if display:\n self.display(highlight = [root])\n if old_display:\n self.attr['display'] = old_display", "response": "A recursive function that prints nodes starting from a node."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nadds entry for one YANG module or submodule text.", "response": "def module_entry(yfile):\n \"\"\"Add entry for one file containing YANG module text.\n\n Args:\n yfile (file): File containing a YANG module or submodule.\n \"\"\"\n ytxt = yfile.read()\n mp = ModuleParser(ytxt)\n mst = mp.statement()\n submod = mst.keyword == \"submodule\"\n import_only = True\n rev = \"\"\n features = []\n includes = []\n rec = {}\n for sst in mst.substatements:\n if not rev and sst.keyword == \"revision\":\n rev = sst.argument\n elif import_only and sst.keyword in data_kws:\n import_only = False\n elif sst.keyword == \"feature\":\n features.append(sst.argument)\n elif submod:\n continue\n elif sst.keyword == \"namespace\":\n rec[\"namespace\"] = sst.argument\n elif sst.keyword == \"include\":\n rd = sst.find1(\"revision-date\")\n includes.append((sst.argument, rd.argument if rd else None))\n rec[\"import-only\"] = import_only\n rec[\"features\"] = features\n if submod:\n rec[\"revision\"] = rev\n submodmap[mst.argument] = rec\n else:\n rec[\"includes\"] = includes\n modmap[(mst.argument, rev)] = rec"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef nullable(self, ctype: ContentType) -> bool:\n return (not self.check_when() or self.pattern.nullable(ctype))", "response": "Override the superclass method."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef deriv(self, x: str, ctype: ContentType) -> SchemaPattern:\n return (self.pattern.deriv(x, ctype) if self.check_when() else\n NotAllowed())", "response": "Return the derivative of the receiver."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef deriv(self, x: str, ctype: ContentType) -> SchemaPattern:\n return (Empty() if\n self.name == x and self._active(ctype)\n else NotAllowed())", "response": "Return the derivative of the receiver."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef nullable(self, ctype: ContentType) -> bool:\n return self.left.nullable(ctype) or self.right.nullable(ctype)", "response": "Override the superclass method."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef deriv(self, x: str, ctype: ContentType) -> SchemaPattern:\n return Alternative.combine(self.left.deriv(x, ctype),\n self.right.deriv(x, ctype))", "response": "Return the derivative of the receiver."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef from_file(cls, name: str, mod_path: Tuple[str] = (\".\",),\n description: str = None) -> \"DataModel\":\n \"\"\"Initialize the data model from a file with YANG library data.\n\n Args:\n name: Name of a file with YANG library data.\n mod_path: Tuple of directories where to look for YANG modules.\n description: Optional description of the data model.\n\n Returns:\n The data model instance.\n\n Raises:\n The same exceptions as the class constructor above.\n \"\"\"\n with open(name, encoding=\"utf-8\") as infile:\n yltxt = infile.read()\n return cls(yltxt, mod_path, description)", "response": "Initialize the data model from a YANG library data file."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncompute unique id of YANG modules comprising the data model.", "response": "def module_set_id(self) -> str:\n \"\"\"Compute unique id of YANG modules comprising the data model.\n\n Returns:\n String consisting of hexadecimal digits.\n \"\"\"\n fnames = sorted([\"@\".join(m) for m in self.schema_data.modules])\n return hashlib.sha1(\"\".join(fnames).encode(\"ascii\")).hexdigest()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncreating an instance node from a raw data tree.", "response": "def from_raw(self, robj: RawObject) -> RootNode:\n \"\"\"Create an instance node from a raw data tree.\n\n Args:\n robj: Dictionary representing a raw data tree.\n\n Returns:\n Root instance node.\n \"\"\"\n cooked = self.schema.from_raw(robj)\n return RootNode(cooked, self.schema, cooked.timestamp)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the schema node addressed by a schema path.", "response": "def get_schema_node(self, path: SchemaPath) -> Optional[SchemaNode]:\n \"\"\"Return the schema node addressed by a schema path.\n\n Args:\n path: Schema path.\n\n Returns:\n Schema node if found in the schema, or ``None``.\n\n Raises:\n InvalidSchemaPath: If the schema path is invalid.\n \"\"\"\n return self.schema.get_schema_descendant(\n self.schema_data.path2route(path))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_data_node(self, path: DataPath) -> Optional[DataNode]:\n addr = self.schema_data.path2route(path)\n node = self.schema\n for p in addr:\n node = node.get_data_child(*p)\n if node is None:\n return None\n return node", "response": "Return the data node addressed by a data path."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef ascii_tree(self, no_types: bool = False, val_count: bool = False) -> str:\n return self.schema._ascii_tree(\"\", no_types, val_count)", "response": "Generate an ASCII art representation of the schema tree."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngenerates schema digest (to be used primarily by clients). Returns: Condensed information about the schema in JSON format.", "response": "def schema_digest(self) -> str:\n \"\"\"Generate schema digest (to be used primarily by clients).\n\n Returns:\n Condensed information about the schema in JSON format.\n \"\"\"\n res = self.schema._node_digest()\n res[\"config\"] = True\n return json.dumps(res)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef needs_quotes(s):\n # If the name is a reserved keyword it will need quotes but pydot\n # can't tell when it's being used as a keyword or when it's simply\n # a name. Hence the user needs to supply the quotes when an element\n # would use a reserved keyword as name. This function will return\n # false indicating that a keyword string, if provided as-is, won't\n # need quotes.\n if s in DOT_KEYWORDS:\n return False\n chars = [ord(c) for c in s if ord(c)>0x7f or ord(c)==0]\n if chars and not ID_RE_DBL_QUOTED.match(s) and not ID_RE_HTML.match(s):\n return True\n for test_re in [ID_RE_ALPHA_NUMS, ID_RE_NUM, ID_RE_DBL_QUOTED, ID_RE_HTML, ID_RE_ALPHA_NUMS_WITH_PORTS]:\n if test_re.match(s):\n return False\n m = ID_RE_WITH_PORT.match(s)\n if m:\n return needs_quotes(m.group(1)) or needs_quotes(m.group(2))\n return True", "response": "Checks whether a string is a dot language ID."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns string representation of the object in dot language.", "response": "def to_string(self):\n '''\n API: to_string(self)\n Description:\n Returns string representation of node in dot language.\n Return:\n String representation of node.\n '''\n node = list()\n node.append(quote_if_necessary(str(self.name)))\n node.append(' [')\n flag = False\n for a in self.attr:\n flag = True\n node.append(a)\n node.append('=')\n node.append(quote_if_necessary(str(self.attr[a])))\n node.append(', ')\n if flag is True:\n node = node[:-1]\n node.append(']')\n return ''.join(node)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_node(self, name, **attr):\n '''\n API: add_node(self, name, **attr)\n Description:\n Adds node to the graph.\n Pre:\n Graph should not contain a node with this name. We do not allow\n multiple nodes with the same name.\n Input:\n name: Name of the node.\n attr: Node attributes.\n Post:\n self.neighbors, self.nodes and self.in_neighbors are updated.\n Return:\n Node (a Node class instance) added to the graph.\n '''\n if name in self.neighbors:\n raise MultipleNodeException\n self.neighbors[name] = list()\n if self.graph_type is DIRECTED_GRAPH:\n self.in_neighbors[name] = list()\n self.nodes[name] = Node(name, **attr)\n return self.nodes[name]", "response": "Adds a node to the graph."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nadds an edge to the current node and sets edge attributes.", "response": "def add_edge(self, name1, name2, **attr):\n '''\n API: add_edge(self, name1, name2, **attr)\n Description:\n Adds edge to the graph. Sets edge attributes using attr argument.\n Input:\n name1: Name of the source node (if directed).\n name2: Name of the sink node (if directed).\n attr: Edge attributes.\n Pre:\n Graph should not already contain this edge. We do not allow\n multiple edges with same source and sink nodes.\n Post:\n self.edge_attr is updated.\n self.neighbors, self.nodes and self.in_neighbors are updated if\n graph was missing at least one of the nodes.\n '''\n if (name1, name2) in self.edge_attr:\n raise MultipleEdgeException\n if self.graph_type is UNDIRECTED_GRAPH and (name2,name1) in self.edge_attr:\n raise MultipleEdgeException\n self.edge_attr[(name1,name2)] = copy.deepcopy(DEFAULT_EDGE_ATTRIBUTES)\n for a in attr:\n self.edge_attr[(name1,name2)][a] = attr[a]\n if name1 not in self.nodes:\n self.add_node(name1)\n if name2 not in self.nodes:\n self.add_node(name2)\n self.neighbors[name1].append(name2)\n if self.graph_type is UNDIRECTED_GRAPH:\n self.neighbors[name2].append(name1)\n else:\n self.in_neighbors[name2].append(name1)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef del_edge(self, e):\n '''\n API: del_edge(self, e)\n Description:\n Removes edge from graph.\n Input:\n e: Tuple that represents edge, in (source,sink) form.\n Pre:\n Graph should contain this edge.\n Post:\n self.edge_attr, self.neighbors and self.in_neighbors are updated.\n '''\n if self.graph_type is DIRECTED_GRAPH:\n try:\n del self.edge_attr[e]\n except KeyError:\n raise Exception('Edge %s does not exists!' %str(e))\n self.neighbors[e[0]].remove(e[1])\n self.in_neighbors[e[1]].remove(e[0])\n else:\n try:\n del self.edge_attr[e]\n except KeyError:\n try:\n del self.edge_attr[(e[1],e[0])]\n except KeyError:\n raise Exception('Edge %s does not exists!' %str(e))\n self.neighbors[e[0]].remove(e[1])\n self.neighbors[e[1]].remove(e[0])", "response": "This method deletes an edge from the internal state."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef check_edge(self, name1, name2):\n '''\n API: check_edge(self, name1, name2)\n Description:\n Return True if edge exists, False otherwise.\n Input:\n name1: name of the source node.\n name2: name of the sink node.\n Return:\n Returns True if edge exists, False otherwise.\n '''\n if self.graph_type is DIRECTED_GRAPH:\n return (name1, name2) in self.edge_attr\n else:\n return ((name1, name2) in self.edge_attr or\n (name2, name1) in self.edge_attr)", "response": "Check if an edge exists in the current node."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_edge_attr(self, n, m, attr):\n '''\n API: get_edge_attr(self, n, m, attr)\n Description:\n Returns attribute attr of edge (n,m).\n Input:\n n: Source node name.\n m: Sink node name.\n attr: Attribute of edge.\n Pre:\n Graph should have this edge.\n Return:\n Value of edge attribute attr.\n '''\n if self.graph_type is DIRECTED_GRAPH:\n return self.edge_attr[(n,m)][attr]\n else:\n try:\n return self.edge_attr[(n,m)][attr]\n except KeyError:\n return self.edge_attr[(m,n)][attr]", "response": "Get the value of an attribute of an edge."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef set_node_attr(self, name, attr, value):\n '''\n API: set_node_attr(self, name, attr)\n Description:\n Sets attr attribute of node named name to value.\n Input:\n name: Name of node.\n attr: Attribute of node to set.\n Pre:\n Graph should have this node.\n Post:\n Node attribute will be updated.\n '''\n self.get_node(name).set_attr(attr, value)", "response": "Set node attribute of name to value."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nset edge attribute of a node.", "response": "def set_edge_attr(self, n, m, attr, value):\n '''\n API: set_edge_attr(self, n, m, attr, value)\n Description:\n Sets attr attribute of edge (n,m) to value.\n Input:\n n: Source node name.\n m: Sink node name.\n attr: Attribute of edge to set.\n value: New value of attribute.\n Pre:\n Graph should have this edge.\n Post:\n Edge attribute will be updated.\n '''\n if self.graph_type is DIRECTED_GRAPH:\n self.edge_attr[(n,m)][attr] = value\n else:\n try:\n self.edge_attr[(n,m)][attr] = value\n except KeyError:\n self.edge_attr[(m,n)][attr] = value"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef to_string(self):\n '''\n API: to_string(self)\n Description:\n This method is based on pydot Graph class with the same name.\n Returns a string representation of the graph in dot language.\n It will return the graph and all its subelements in string form.\n Return:\n String that represents graph in dot language.\n '''\n graph = list()\n processed_edges = {}\n graph.append('%s %s {\\n' %(self.graph_type, self.name))\n for a in self.attr:\n if a not in GRAPH_ATTRIBUTES:\n continue\n val = self.attr[a]\n if val is not None:\n graph.append( '%s=%s' % (a, quote_if_necessary(val)) )\n else:\n graph.append(a)\n graph.append( ';\\n' )\n # clusters\n for c in self.cluster:\n graph.append('subgraph cluster_%s {\\n' %c)\n for a in self.cluster[c]['attrs']:\n if a=='label':\n graph.append(a+'='+quote_if_necessary(self.cluster[c]['attrs'][a])+';\\n')\n continue\n graph.append(a+'='+self.cluster[c]['attrs'][a]+';\\n')\n if len(self.cluster[c]['node_attrs'])!=0:\n graph.append('node [')\n for a in self.cluster[c]['node_attrs']:\n graph.append(a+'='+self.cluster[c]['node_attrs'][a])\n graph.append(',')\n if len(self.cluster[c]['node_attrs'])!=0:\n graph.pop()\n graph.append('];\\n')\n # process cluster nodes\n for n in self.cluster[c]['node_list']:\n data = self.get_node(n).to_string()\n graph.append(data + ';\\n')\n # process cluster edges\n for n in self.cluster[c]['node_list']:\n for m in self.cluster[c]['node_list']:\n if self.check_edge(n,m):\n data = self.edge_to_string((n,m))\n graph.append(data + ';\\n')\n processed_edges[(n,m)]=None\n graph.append('}\\n')\n # process remaining (non-cluster) nodes\n for n in self.neighbors:\n for c in self.cluster:\n if n in self.cluster[c]['node_list']:\n break\n else:\n data = self.get_node(n).to_string()\n graph.append(data + ';\\n')\n # process edges\n for e in self.edge_attr:\n if e in processed_edges:\n continue\n data = self.edge_to_string(e)\n graph.append(data + ';\\n')\n graph.append( '}\\n' )\n return ''.join(graph)", "response": "This method returns a string representation of the object in dot language. It will return the graph and all its subelements in string form."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef label_components(self, display = None):\n '''\n API: label_components(self, display=None)\n Description:\n This method labels the nodes of an undirected graph with component\n numbers so that each node has the same label as all nodes in the\n same component. It will display the algortihm if display argument is\n provided.\n Input:\n display: display method.\n Pre:\n self.graph_type should be UNDIRECTED_GRAPH.\n Post:\n Nodes will have 'component' attribute that will have component\n number as value.\n '''\n if self.graph_type == DIRECTED_GRAPH:\n raise Exception(\"label_components only works for \",\n \"undirected graphs\")\n self.num_components = 0\n for n in self.get_node_list():\n self.get_node(n).set_attr('component', None)\n for n in self.neighbors:\n self.get_node(n).set_attr('label', '-')\n for n in self.get_node_list():\n if self.get_node(n).get_attr('component') == None:\n self.search(n, display=display,\n component=self.num_components, algo='DFS')\n self.num_components += 1", "response": "This method labels the nodes of the undirected graph with the same label as all nodes in the undirected graph."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef tarjan(self):\n '''\n API: tarjan(self)\n Description:\n Implements Tarjan's algorithm for determining strongly connected set of\n nodes.\n Pre:\n self.graph_type should be DIRECTED_GRAPH.\n Post:\n Nodes will have 'component' attribute that will have component\n number as value. Changes 'index' attribute of nodes.\n '''\n index = 0\n component = 0\n q = []\n for n in self.get_node_list():\n if self.get_node_attr(n, 'index') is None:\n index, component = self.strong_connect(q, n, index, component)", "response": "This method is used to determine strongly connected set of nodes."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef strong_connect(self, q, node, index, component):\n '''\n API: strong_connect (self, q, node, index, component)\n Description:\n Used by tarjan method. This method should not be called directly by\n user.\n Input:\n q: Node list.\n node: Node that is being connected to nodes in q.\n index: Index used by tarjan method.\n component: Current component number.\n Pre:\n Should be called by tarjan and itself (recursive) only.\n Post:\n Nodes will have 'component' attribute that will have component\n number as value. Changes 'index' attribute of nodes.\n Return:\n Returns new index and component numbers.\n '''\n self.set_node_attr(node, 'index', index)\n self.set_node_attr(node, 'lowlink', index)\n index += 1\n q.append(node)\n for m in self.get_neighbors(node):\n if self.get_node_attr(m, 'index') is None:\n index, component = self.strong_connect(q, m, index, component)\n self.set_node_attr(node, 'lowlink',\n min([self.get_node_attr(node, 'lowlink'),\n self.get_node_attr(m, 'lowlink')]))\n elif m in q:\n self.set_node_attr(node, 'lowlink',\n min([self.get_node_attr(node, 'lowlink'),\n self.get_node_attr(m, 'index')]))\n if self.get_node_attr(node, 'lowlink') == self.get_node_attr(node, 'index'):\n m = q.pop()\n self.set_node_attr(m, 'component', component)\n while (node!=m):\n m = q.pop()\n self.set_node_attr(m, 'component', component)\n component += 1\n self.num_components = component\n\n return (index, component)", "response": "This method will strongly connect a node to a set of nodes."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef dfs(self, root, disc_count = 0, finish_count = 1, component = None,\n transpose = False, display = None, pred = None):\n '''\n API: dfs(self, root, disc_count = 0, finish_count = 1, component=None,\n transpose=False)\n Description:\n Make a depth-first search starting from node with name root.\n Input:\n root: Starting node name.\n disc_count: Discovery time.\n finish_count: Finishing time.\n component: component number.\n transpose: Goes in the reverse direction along edges if transpose\n is True.\n Post:\n Nodes will have 'component' attribute that will have component\n number as value. Updates 'disc_time' and 'finish_time' attributes\n of nodes which represents discovery time and finishing time.\n Return:\n Returns a tuple that has discovery time and finish time of the\n last node in the following form (disc_time,finish_time).\n '''\n if pred == None:\n pred = {}\n if display == None:\n display = self.attr['display']\n else:\n self.set_display_mode(display)\n neighbors = self.neighbors\n if self.graph_type == DIRECTED_GRAPH and transpose:\n neighbors = self.in_neighbors\n self.get_node(root).set_attr('component', component)\n disc_count += 1\n self.get_node(root).set_attr('disc_time', disc_count)\n self.get_node(root).set_attr('label', str(disc_count)+',-')\n self.get_node(root).set_attr('color', 'blue')\n if root in pred:\n self.set_edge_attr(pred[root], root, 'color', 'green')\n self.display()\n if transpose:\n fTime = []\n for n in neighbors[root]:\n fTime.append((n,self.get_node(n).get_attr('finish_time')))\n neighbor_list = sorted(fTime, key=operator.itemgetter(1))\n neighbor_list = list(t[0] for t in neighbor_list)\n neighbor_list.reverse()\n else:\n neighbor_list = neighbors[root]\n for i in neighbor_list:\n if not transpose:\n if self.get_node(i).get_attr('disc_time') is None:\n pred[i] = root\n disc_count, finish_count = self.dfs(i, disc_count,\n finish_count,\n component, transpose,\n pred = pred)\n else:\n if self.get_node(i).get_attr('component') is None:\n disc_count, finish_count = self.dfs(i, disc_count,\n finish_count,\n component, transpose,\n pred = pred)\n self.get_node(root).set_attr('finish_time', finish_count)\n d_time = self.get_node(root).get_attr('disc_time')\n label = '\"' + str(d_time) + ',' + str(finish_count) + '\"'\n self.get_node(root).set_attr('label', label)\n self.get_node(root).set_attr('color', 'green')\n self.display()\n finish_count += 1\n return disc_count, finish_count", "response": "This function returns a depth - first search of the tree with the given root name."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef search(self, source, destination = None, display = None,\n component = None, q = None,\n algo = 'DFS', reverse = False, **kargs):\n '''\n API: search(self, source, destination = None, display = None,\n component = None, q = Stack(),\n algo = 'DFS', reverse = False, **kargs)\n Description:\n Generic search method. Changes behavior (dfs,bfs,dijkstra,prim)\n according to algo argument.\n if destination is not specified:\n This method determines all nodes reachable from \"source\" ie. creates\n precedence tree and returns it (dictionary).\n if destionation is given:\n If there exists a path from \"source\" to \"destination\" it will return\n list of the nodes is this path. If there is no such path, it will\n return the precedence tree constructed from source (dictionary).\n Optionally, it marks all nodes reachable from \"source\" with a component\n number. The variable \"q\" determines the order in which the nodes are\n searched.\n Input:\n source: Search starts from node with this name.\n destination: Destination node name.\n display: Display method.\n algo: Algortihm that specifies search. Available algortihms are\n 'DFS', 'BFS', 'Dijkstra' and 'Prim'.\n reverse: Search goes in reverse arc directions if True.\n kargs: Additional keyword arguments.\n Post:\n Nodes will have 'component' attribute that will have component\n number as value (if component argument provided). Color attribute\n of nodes and edges may change.\n Return:\n Returns predecessor tree in dictionary form if destination is\n not specified, returns list of node names in the path from source\n to destionation if destionation is specified and there is a path.\n If there is no path returns predecessor tree in dictionary form.\n See description section.\n '''\n if display == None:\n display = self.attr['display']\n else:\n self.set_display_mode(display)\n if algo == 'DFS':\n if q is None:\n q = Stack()\n self.get_node(source).set_attr('component', component)\n elif algo == 'BFS' or algo == 'UnweightedSPT':\n if q is None:\n q = Queue()\n self.get_node(source).set_attr('component', component)\n elif algo == 'Dijkstra' or algo == 'Prim':\n if q is None:\n q = PriorityQueue()\n else:\n print(\"Unknown search algorithm...exiting\")\n return\n neighbors = self.neighbors\n if self.graph_type == DIRECTED_GRAPH and reverse:\n neighbors = self.in_neighbors\n for i in self.get_node_list():\n self.get_node(i).set_attr('label', '-')\n self.get_node(i).attr.pop('priority', None)\n self.get_node(i).set_attr('distance', None)\n self.get_node(i).set_attr('color', 'black')\n for j in neighbors[i]:\n if reverse:\n self.set_edge_attr(j, i, 'color', 'black')\n else:\n self.set_edge_attr(i, j, 'color', 'black')\n self.display()\n pred = {}\n self.process_edge_search(None, source, pred, q, component, algo,\n **kargs)\n found = True\n if source != destination:\n found = False\n while not q.isEmpty() and not found:\n current = q.peek()\n if self.get_node(current).get_attr('color') == 'green':\n q.remove(current)\n continue\n self.process_node_search(current, q, **kargs)\n self.get_node(current).set_attr('color', 'blue')\n if current != source:\n if reverse:\n self.set_edge_attr(current, pred[current], 'color', 'green')\n else:\n self.set_edge_attr(pred[current], current, 'color', 'green')\n if current == destination:\n found = True\n break\n self.display()\n for n in neighbors[current]:\n if self.get_node(n).get_attr('color') != 'green':\n if reverse:\n self.set_edge_attr(n, current, 'color', 'yellow')\n else:\n self.set_edge_attr(current, n, 'color', 'yellow')\n self.display()\n self.process_edge_search(current, n, pred, q, component,\n algo, **kargs)\n if reverse:\n self.set_edge_attr(n, current, 'color', 'black')\n else:\n self.set_edge_attr(current, n, 'color', 'black')\n q.remove(current)\n self.get_node(current).set_attr('color', 'green')\n self.display()\n if found:\n path = [destination]\n current = destination\n while current != source:\n path.insert(0, pred[current])\n current = pred[current]\n return path\n if destination == None:\n return pred\n else:\n return None", "response": "Search for a node in a node tree."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef minimum_spanning_tree_prim(self, source, display = None,\n q = PriorityQueue()):\n '''\n API: minimum_spanning_tree_prim(self, source, display = None,\n q = PriorityQueue())\n Description:\n Determines a minimum spanning tree of all nodes reachable\n from source using Prim's Algorithm.\n Input:\n source: Name of source node.\n display: Display method.\n q: Data structure that holds nodes to be processed in a queue.\n Post:\n 'color', 'distance', 'component' attribute of nodes and edges may\n change.\n Return:\n Returns predecessor tree in dictionary format.\n '''\n if display == None:\n display = self.attr['display']\n else:\n self.set_display_mode(display)\n if isinstance(q, PriorityQueue):\n addToQ = q.push\n removeFromQ = q.pop\n peek = q.peek\n isEmpty = q.isEmpty\n neighbors = self.get_neighbors\n pred = {}\n addToQ(source)\n done = False\n while not isEmpty() and not done:\n current = removeFromQ()\n self.set_node_attr(current, 'color', 'blue')\n if current != source:\n self.set_edge_attr(pred[current], current, 'color', 'green')\n self.display()\n for n in neighbors(current):\n if self.get_node_attr(n, 'color') != 'green':\n self.set_edge_attr(current, n, 'color', 'yellow')\n self.display()\n new_estimate = self.get_edge_attr(current, n, 'cost')\n if not n in pred or new_estimate < peek(n)[0]:\n pred[n] = current\n self.set_node_attr(n, 'color', 'red')\n self.set_node_attr(n, 'label', new_estimate)\n addToQ(n, new_estimate)\n self.display()\n self.set_node_attr(n, 'color', 'black')\n self.set_edge_attr(current, n, 'color', 'black')\n self.set_node_attr(current, 'color', 'green')\n self.display()\n return pred", "response": "This method returns a minimum spanning tree of all nodes reachable from source using Prim s Algorithm."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef minimum_spanning_tree_kruskal(self, display = None, components = None):\n '''\n API: minimum_spanning_tree_kruskal(self, display = None,\n components = None)\n Description:\n Determines a minimum spanning tree using Kruskal's Algorithm.\n Input:\n display: Display method.\n component: component number.\n Post:\n 'color' attribute of nodes and edges may change.\n Return:\n Returns list of edges where edges are tuples in (source,sink)\n format.\n '''\n if display == None:\n display = self.attr['display']\n else:\n self.set_display_mode(display)\n if components is None:\n components = DisjointSet(display = display, layout = 'dot',\n optimize = False)\n sorted_edge_list = sorted(self.get_edge_list(), key=self.get_edge_cost)\n edges = []\n for n in self.get_node_list():\n components.add([n])\n components.display()\n for e in sorted_edge_list:\n if len(edges) == len(self.get_node_list()) - 1:\n break\n self.set_edge_attr(e[0], e[1], 'color', 'yellow')\n self.display()\n if components.union(e[0], e[1]):\n self.set_edge_attr(e[0], e[1], 'color', 'green')\n self.display()\n edges.append(e)\n else:\n self.set_edge_attr(e[0], e[1], 'color', 'black')\n self.display()\n components.display()\n return edges", "response": "This method returns a list of edges where edges are tuples of source and sink nodes."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef relabel(self, i):\n '''\n API: relabel(self, i)\n Description:\n Used by max_flow_preflowpush() method for relabelling node i.\n Input:\n i: Node that is being relabelled.\n Post:\n 'distance' attribute of node i is updated.\n '''\n min_distance = 2*len(self.get_node_list()) + 1\n for j in self.get_neighbors(i):\n if (self.get_node_attr(j, 'distance') < min_distance and\n (self.get_edge_attr(i, j, 'flow') <\n self.get_edge_attr(i, j, 'capacity'))):\n min_distance = self.get_node_attr(j, 'distance')\n for j in self.get_in_neighbors(i):\n if (self.get_node_attr(j, 'distance') < min_distance and\n self.get_edge_attr(j, i, 'flow') > 0):\n min_distance = self.get_node_attr(j, 'distance')\n self.set_node_attr(i, 'distance', min_distance + 1)", "response": "This method relabels the node i to the flow of the node i."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef show_flow(self):\n '''\n API: relabel(self, i)\n Description:\n Used by max_flow_preflowpush() method for display purposed.\n Post:\n 'color' and 'label' attribute of edges/nodes are updated.\n '''\n for n in self.get_node_list():\n excess = self.get_node_attr(n, 'excess')\n distance = self.get_node_attr(n, 'distance')\n self.set_node_attr(n, 'label', str(excess)+'/'+str(distance))\n for neighbor in self.get_neighbors(n):\n capacity = self.get_edge_attr(n, neighbor, 'capacity')\n flow = self.get_edge_attr(n, neighbor, 'flow')\n if capacity == INF:\n self.set_edge_attr(n, neighbor, 'label',\n 'INF'+'/'+str(flow))\n else:\n self.set_edge_attr(n, neighbor, 'label',\n str(capacity)+'/'+str(flow))\n if capacity == flow:\n self.set_edge_attr(n, neighbor, 'color', 'red')\n elif flow > 0:\n self.set_edge_attr(n, neighbor, 'color', 'green')\n else:\n self.set_edge_attr(n, neighbor, 'color', 'black')\n self.display()", "response": "This method shows the flow of the current node."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates and return a residual graph for the current node.", "response": "def create_residual_graph(self):\n '''\n API: create_residual_graph(self)\n Description:\n Creates and returns residual graph, which is a Graph instance\n itself.\n Pre:\n (1) Arcs should have 'flow', 'capacity' and 'cost' attribute\n (2) Graph should be a directed graph\n Return:\n Returns residual graph, which is a Graph instance.\n '''\n if self.graph_type is UNDIRECTED_GRAPH:\n raise Exception('residual graph is defined for directed graphs.')\n residual_g = Graph(type = DIRECTED_GRAPH)\n for e in self.get_edge_list():\n capacity_e = self.get_edge_attr(e[0], e[1], 'capacity')\n flow_e = self.get_edge_attr(e[0], e[1], 'flow')\n cost_e = self.get_edge_attr(e[0], e[1], 'cost')\n if flow_e > 0:\n residual_g.add_edge(e[1], e[0], cost=-1*cost_e,\n capacity=flow_e)\n if capacity_e - flow_e > 0:\n residual_g.add_edge(e[0], e[1], cost=cost_e,\n capacity=capacity_e-flow_e)\n return residual_g"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef find_feasible_flow(self):\n '''\n API:\n find_feasible_flow(self)\n Description:\n Solves feasible flow problem, stores solution in 'flow' attribute\n or arcs. This method is used to get an initial feasible flow for\n simplex and cycle canceling algorithms. Uses max_flow() method.\n Other max flow methods can also be used. Returns True if a feasible\n flow is found, returns False, if the problem is infeasible. When\n the problem is infeasible 'flow' attributes of arcs should be\n considered as junk.\n Pre:\n (1) 'capacity' attribute of arcs\n (2) 'demand' attribute of nodes\n Post:\n Keeps solution in 'flow' attribute of arcs.\n Return:\n Returns True if a feasible flow is found, returns False, if the\n problem is infeasible\n '''\n # establish a feasible flow in the network, to do this add nodes s and\n # t and solve a max flow problem.\n nl = self.get_node_list()\n for i in nl:\n b_i = self.get_node(i).get_attr('demand')\n if b_i > 0:\n # i is a supply node, add (s,i) arc\n self.add_edge('s', i, capacity=b_i)\n elif b_i < 0:\n # i is a demand node, add (i,t) arc\n self.add_edge(i, 't', capacity=-1*b_i)\n # solve max flow on this modified graph\n self.max_flow('s', 't', 'off')\n # check if all demand is satisfied, i.e. the min cost problem is\n # feasible or not\n for i in self.neighbors['s']:\n flow = self.get_edge_attr('s', i, 'flow')\n capacity = self.get_edge_attr('s', i, 'capacity')\n if flow != capacity:\n self.del_node('s')\n self.del_node('t')\n return False\n # remove node 's' and node 't'\n self.del_node('s')\n self.del_node('t')\n return True", "response": "This method finds a feasible flow in the modified graph and returns True if it is feasible False otherwise."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nwrite the current object to disk using the specified layout and format.", "response": "def write(self, basename = 'graph', layout = None, format='png'):\n '''\n API:\n write(self, basename = 'graph', layout = None, format='png')\n Description:\n Writes graph to dist using layout and format.\n Input:\n basename: name of the file that will be written.\n layout: Dot layout for generating graph image.\n format: Image format, all format supported by Dot are wellcome.\n Post:\n File will be written to disk.\n '''\n if layout == None:\n layout = self.get_layout()\n f = open(basename, \"w+b\")\n if format == 'dot':\n f.write(self.to_string())\n else:\n f.write(self.create(layout, format))\n f.close()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create(self, layout, format, **args):\n '''\n API:\n create(self, layout, format, **args)\n Description:\n Returns postscript representation of graph.\n Input:\n layout: Dot layout for generating graph image.\n format: Image format, all format supported by Dot are wellcome.\n Return:\n Returns postscript representation of graph.\n '''\n tmp_fd, tmp_name = tempfile.mkstemp()\n tmp_file = os.fdopen(tmp_fd, 'w')\n tmp_file.write(self.to_string())\n # ne need for os.close(tmp_fd), since we have tmp_file.close(tmp_file)\n tmp_file.close()\n tmp_dir = os.path.dirname(tmp_name)\n try:\n p = subprocess.Popen([layout, '-T'+format, tmp_name],\n cwd=tmp_dir,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n except OSError:\n print('''Graphviz executable not found.\nGraphviz must be installed and in your search path.\nPlease visit http://www.graphviz.org/ for information on installation.\nAfter installation, ensure that the PATH variable is properly set.''')\n return\n stdout_output, stderr_output = p.communicate()\n if p.returncode != 0 :\n raise Exception('Graphviz executable terminated with status: %d. stderr follows: %s' % (\n p.returncode, stderr_output))\n elif stderr_output:\n print(stderr_output)\n return stdout_output", "response": "A method to create a new node in Graphviz."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ndisplays the graph according to the arguments provided.", "response": "def display(self, highlight = None, basename = 'graph', format = 'png',\n pause = True):\n '''\n API:\n display(self, highlight = None, basename = 'graph', format = 'png',\n pause = True)\n Description:\n Displays graph according to the arguments provided.\n Current display modes: 'off', 'file', 'pygame', 'PIL', 'xdot',\n 'svg'\n Current layout modes: Layouts provided by graphviz ('dot', 'fdp',\n 'circo', etc.) and 'dot2tex'.\n Current formats: Formats provided by graphviz ('ps', 'pdf', 'png',\n etc.)\n Input:\n highlight: List of nodes to be highlighted.\n basename: File name. It will be used if display mode is 'file'.\n format: Image format, all format supported by Dot are wellcome.\n pause: If display is 'pygame' and pause is True pygame will pause\n and wait for user input before closing the display. It will close\n display window straightaway otherwise.\n Post:\n A display window will pop up or a file will be written depending\n on display mode.\n '''\n if self.attr['display'] == 'off':\n return\n if highlight != None:\n for n in highlight:\n if not isinstance(n, Node):\n n = self.get_node(n)\n n.set_attr('color', 'red')\n if self.get_layout() == 'dot2tex':\n if self.attr['display'] != 'file':\n self.attr['display'] = 'file'\n print(\"Warning: Dot2tex layout can only be used with display mode 'file'\")\n print(\" Automatically changing setting\")\n if self.attr['display'] == 'file':\n if self.get_layout() == 'dot2tex':\n try:\n if DOT2TEX_INSTALLED:\n if format != 'pdf' or format != 'ps':\n print(\"Dot2tex only supports pdf and ps formats, falling back to pdf\")\n format = 'pdf'\n self.set_layout('dot')\n tex = dot2tex.dot2tex(self.to_string(), autosize=True, texmode = 'math', template = DOT2TEX_TEMPLATE)\n else:\n print(\"Error: Dot2tex not installed.\")\n except:\n try:\n self.set_layout('dot')\n self.write(basename+'.dot', self.get_layout(), 'dot')\n sp = subprocess.Popen('dot2tex -t math ' + basename + '.dot', stdout = subprocess.PIPE, \n stderr = subprocess.STDOUT)\n tex = sp.communicate()[0]\n except:\n print(\"There was an error running dot2tex.\")\n f = open(basename+'.tex', 'w')\n f.write(tex)\n f.close()\n try: \n subprocess.call(['latex', basename])\n if format == 'ps':\n subprocess.call(['dvips', basename])\n elif format == 'pdf':\n subprocess.call(['pdflatex', basename])\n self.set_layout('dot2tex')\n except:\n print(\"There was an error runing latex. Is it installed?\")\n else:\n self.write(basename+'.'+format, self.get_layout(), format)\n return\n elif self.attr['display'] == 'pygame':\n output = self.create(self.get_layout(), format)\n if output is not None:\n im = io.StringIO(self.create(self.get_layout(), format))\n picture = pygame.image.load(im, format)\n screen = pygame.display.set_mode(picture.get_size())\n screen.blit(picture, picture.get_rect())\n pygame.display.flip()\n while pause:\n e = pygame.event.poll()\n if e.type == pygame.KEYDOWN:\n break\n if e.type == pygame.QUIT:\n pause = False\n pygame.display.quit()\n # sys.exit() exits the whole program and I (aykut) guess it is\n # not appropriate here.\n #sys.exit()\n else:\n print('Error in displaying graph. Display disabled')\n self.set_display_mode('off')\n elif self.attr['display'] == 'PIL':\n im = io.StringIO(self.create(self.get_layout(), format))\n if PIL_INSTALLED:\n im2 = PIL_Image.open(im)\n im2.show()\n else:\n print('Error: PIL not installed. Display disabled.')\n self.attr['display'] = 'off'\n elif self.attr['display'] == 'xdot':\n if XDOT_INSTALLED:\n window = xdot.DotWindow()\n window.set_dotcode(self.to_string())\n window.connect('destroy', gtk.main_quit)\n gtk.main()\n else:\n print('Error: xdot not installed. Display disabled.')\n self.attr['display'] = 'off'\n elif self.attr['display'] == 'svg':\n if not ETREE_INSTALLED:\n print('Error: etree not installed (display mode: svg). Display disabled.')\n self.attr['display'] = 'off'\n else:\n print(\"Unknown display mode: \", end=' ')\n print(self.attr['display'])\n if highlight != None:\n for n in highlight:\n if not isinstance(n, Node):\n n = self.get_node(n)\n n.set_attr('color', 'black')"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef max_flow(self, source, sink, display = None, algo = 'DFS'):\n '''\n API: max_flow(self, source, sink, display=None)\n Description:\n Finds maximum flow from source to sink by a depth-first search based\n augmenting path algorithm.\n Pre:\n Assumes a directed graph in which each arc has a 'capacity'\n attribute and for which there does does not exist both arcs (i,j)\n and (j, i) for any pair of nodes i and j.\n Input:\n source: Source node name.\n sink: Sink node name.\n display: Display mode.\n Post:\n The 'flow\" attribute of each arc gives a maximum flow.\n '''\n if display is not None:\n old_display = self.attr['display']\n self.attr['display'] = display\n nl = self.get_node_list()\n # set flow of all edges to 0\n for e in self.edge_attr:\n self.edge_attr[e]['flow'] = 0\n if 'capacity' in self.edge_attr[e]:\n capacity = self.edge_attr[e]['capacity']\n self.edge_attr[e]['label'] = str(capacity)+'/0'\n else:\n self.edge_attr[e]['capacity'] = INF\n self.edge_attr[e]['label'] = 'INF/0'\n while True:\n # find an augmenting path from source to sink using DFS\n if algo == 'DFS':\n q = Stack()\n elif algo == 'BFS':\n q = Queue()\n q.push(source)\n pred = {source:None}\n explored = [source]\n for n in nl:\n self.get_node(n).set_attr('color', 'black')\n for e in self.edge_attr:\n if self.edge_attr[e]['flow'] == 0:\n self.edge_attr[e]['color'] = 'black'\n elif self.edge_attr[e]['flow']==self.edge_attr[e]['capacity']:\n self.edge_attr[e]['color'] = 'red'\n else:\n self.edge_attr[e]['color'] = 'green'\n self.display()\n while not q.isEmpty():\n current = q.peek()\n q.remove(current)\n if current == sink:\n break\n out_neighbor = self.neighbors[current]\n in_neighbor = self.in_neighbors[current]\n neighbor = out_neighbor+in_neighbor\n for m in neighbor:\n if m in explored:\n continue\n self.get_node(m).set_attr('color', 'yellow')\n if m in out_neighbor:\n self.set_edge_attr(current, m, 'color', 'yellow')\n available_capacity = (\n self.get_edge_attr(current, m, 'capacity')-\n self.get_edge_attr(current, m, 'flow'))\n else:\n self.set_edge_attr(m, current, 'color', 'yellow')\n available_capacity=self.get_edge_attr(m, current, 'flow')\n self.display()\n if available_capacity > 0:\n self.get_node(m).set_attr('color', 'blue')\n if m in out_neighbor:\n self.set_edge_attr(current, m, 'color', 'blue')\n else:\n self.set_edge_attr(m, current, 'color', 'blue')\n explored.append(m)\n pred[m] = current\n q.push(m)\n else:\n self.get_node(m).set_attr('color', 'black')\n if m in out_neighbor:\n if (self.get_edge_attr(current, m, 'flow') ==\n self.get_edge_attr(current, m, 'capacity')):\n self.set_edge_attr(current, m, 'color', 'red')\n elif self.get_edge_attr(current, m, 'flow') == 0:\n self.set_edge_attr(current, m, 'color', 'black')\n #else:\n # self.set_edge_attr(current, m, 'color', 'green')\n else:\n if (self.get_edge_attr(m, current, 'flow') ==\n self.get_edge_attr(m, current, 'capacity')):\n self.set_edge_attr(m, current, 'color', 'red')\n elif self.get_edge_attr(m, current, 'flow') == 0:\n self.set_edge_attr(m, current, 'color', 'black')\n #else:\n # self.set_edge_attr(m, current, 'color', 'green')\n self.display()\n # if no path with positive capacity from source sink exists, stop\n if sink not in pred:\n break\n # find capacity of the path\n current = sink\n min_capacity = 'infinite'\n while True:\n m = pred[current]\n if (m,current) in self.edge_attr:\n arc_capacity = self.edge_attr[(m, current)]['capacity']\n flow = self.edge_attr[(m, current)]['flow']\n potential = arc_capacity-flow\n if min_capacity == 'infinite':\n min_capacity = potential\n elif min_capacity > potential:\n min_capacity = potential\n else:\n potential = self.edge_attr[(current, m)]['flow']\n if min_capacity == 'infinite':\n min_capacity = potential\n elif min_capacity > potential:\n min_capacity = potential\n if m == source:\n break\n current = m\n # update flows on the path\n current = sink\n while True:\n m = pred[current]\n if (m, current) in self.edge_attr:\n flow = self.edge_attr[(m, current)]['flow']\n capacity = self.edge_attr[(m, current)]['capacity']\n new_flow = flow+min_capacity\n self.edge_attr[(m, current)]['flow'] = new_flow\n if capacity == INF:\n self.edge_attr[(m, current)]['label'] = \\\n 'INF' + '/'+str(new_flow)\n else:\n self.edge_attr[(m, current)]['label'] = \\\n str(capacity)+'/'+str(new_flow)\n if new_flow==capacity:\n self.edge_attr[(m, current)]['color'] = 'red'\n else:\n self.edge_attr[(m, current)]['color'] = 'green'\n self.display()\n else:\n flow = self.edge_attr[(current, m)]['flow']\n capacity = self.edge_attr[(current, m)]['capacity']\n new_flow = flow-min_capacity\n self.edge_attr[(current, m)]['flow'] = new_flow\n if capacity == INF:\n self.edge_attr[(current, m)]['label'] = \\\n 'INF' + '/'+str(new_flow)\n else:\n self.edge_attr[(current, m)]['label'] = \\\n str(capacity)+'/'+str(new_flow)\n if new_flow==0:\n self.edge_attr[(current, m)]['color'] = 'red'\n else:\n self.edge_attr[(current, m)]['color'] = 'green'\n self.display()\n if m == source:\n break\n current = m\n if display is not None:\n self.attr['display'] = old_display", "response": "This method finds the maximum flow from source to sink using a depth - first search based on the edge attributes."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_negative_cycle(self):\n '''\n API:\n get_negative_cycle(self)\n Description:\n Finds and returns negative cost cycle using 'cost' attribute of\n arcs. Return value is a list of nodes representing cycle it is in\n the following form; n_1-n_2-...-n_k, when the cycle has k nodes.\n Pre:\n Arcs should have 'cost' attribute.\n Return:\n Returns a list of nodes in the cycle if a negative cycle exists,\n returns None otherwise.\n '''\n nl = self.get_node_list()\n i = nl[0]\n (valid, distance, nextn) = self.floyd_warshall()\n if not valid:\n cycle = self.floyd_warshall_get_cycle(distance, nextn)\n return cycle\n else:\n return None", "response": "Returns a list of nodes in the cycle if a negative cycle exists returns None otherwise."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef floyd_warshall_get_path(self, distance, nextn, i, j):\n '''\n API:\n floyd_warshall_get_path(self, distance, nextn, i, j):\n Description:\n Finds shortest path between i and j using distance and nextn\n dictionaries.\n Pre:\n (1) distance and nextn are outputs of floyd_warshall method.\n (2) The graph does not have a negative cycle, , ie.\n distance[(i,i)] >=0 for all node i.\n Return:\n Returns the list of nodes on the path from i to j, ie. [i,...,j]\n '''\n if distance[(i,j)]=='infinity':\n return None\n k = nextn[(i,j)]\n path = self.floyd_warshall_get_path\n if i==k:\n return [i, j]\n else:\n return path(distance, nextn, i,k) + [k] + path(distance, nextn, k,j)", "response": "This method returns the shortest path between i and j using distance and nextn dictionaries."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef floyd_warshall_get_cycle(self, distance, nextn, element = None):\n '''\n API:\n floyd_warshall_get_cycle(self, distance, nextn, element = None)\n Description:\n Finds a negative cycle in the graph.\n Pre:\n (1) distance and nextn are outputs of floyd_warshall method.\n (2) The graph should have a negative cycle, , ie.\n distance[(i,i)] < 0 for some node i.\n Return:\n Returns the list of nodes on the cycle. Ex: [i,j,k,...,r], where\n (i,j), (j,k) and (r,i) are some edges in the cycle.\n '''\n nl = self.get_node_list()\n if element is None:\n for i in nl:\n if distance[(i,i)] < 0:\n # graph has a cycle on the path from i to i.\n element = i\n break\n else:\n raise Exception('Graph does not have a negative cycle!')\n elif distance[(element,element)] >= 0:\n raise Exception('Graph does not have a negative cycle that contains node '+str(element)+'!')\n # find the cycle on the path from i to i.\n cycle = [element]\n k = nextn[(element,element)]\n while k not in cycle:\n cycle.insert(1,k)\n k = nextn[(element,k)]\n if k==element:\n return cycle\n else:\n return self.floyd_warshall_get_cycle(distance, nextn, k)", "response": "This method returns a list of nodes on the cycle that are at a given distance from the next node."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef find_cycle_capacity(self, cycle):\n '''\n API:\n find_cycle_capacity(self, cycle):\n Description:\n Finds capacity of the cycle input.\n Pre:\n (1) Arcs should have 'capacity' attribute.\n Input:\n cycle: a list representing a cycle\n Return:\n Returns an integer number representing capacity of cycle.\n '''\n index = 0\n k = len(cycle)\n capacity = self.get_edge_attr(cycle[k-1], cycle[0], 'capacity')\n while index<(k-1):\n i = cycle[index]\n j = cycle[index+1]\n capacity_ij = self.get_edge_attr(i, j, 'capacity')\n if capacity > capacity_ij:\n capacity = capacity_ij\n index += 1\n return capacity", "response": "This function finds the capacity of a cycle."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef label_correcting_check_cycle(self, j, pred):\n '''\n API:\n label_correcting_check_cycle(self, j, pred)\n Description:\n Checks if predecessor dictionary has a cycle, j represents the node\n that predecessor is recently updated.\n Pre:\n (1) predecessor of source node should be None.\n Input:\n j: node that predecessor is recently updated.\n pred: predecessor dictionary\n Return:\n If there exists a cycle, returns the list that represents the\n cycle, otherwise it returns to None.\n '''\n labelled = {}\n for n in self.neighbors:\n labelled[n] = None\n current = j\n while current != None:\n if labelled[current]==j:\n cycle = self.label_correcting_get_cycle(j, pred)\n return cycle\n labelled[current] = j\n current = pred[current]\n return None", "response": "This function label_correcting_check_cycle returns the list of cycles that are in the predecessor dictionary j."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef augment_cycle(self, amount, cycle):\n '''\n API:\n augment_cycle(self, amount, cycle):\n Description:\n Augments 'amount' unit of flow along cycle.\n Pre:\n Arcs should have 'flow' attribute.\n Inputs:\n amount: An integer representing the amount to augment\n cycle: A list representing a cycle\n Post:\n Changes 'flow' attributes of arcs.\n '''\n index = 0\n k = len(cycle)\n while index<(k-1):\n i = cycle[index]\n j = cycle[index+1]\n if (i,j) in self.edge_attr:\n flow_ij = self.edge_attr[(i,j)]['flow']\n self.edge_attr[(i,j)]['flow'] = flow_ij+amount\n else:\n flow_ji = self.edge_attr[(j,i)]['flow']\n self.edge_attr[(j,i)]['flow'] = flow_ji-amount\n index += 1\n i = cycle[k-1]\n j = cycle[0]\n if (i,j) in self.edge_attr:\n flow_ij = self.edge_attr[(i,j)]['flow']\n self.edge_attr[(i,j)]['flow'] = flow_ij+amount\n else:\n flow_ji = self.edge_attr[(j,i)]['flow']\n self.edge_attr[(j,i)]['flow'] = flow_ji-amount", "response": "Augment the amount of flow along a cycle."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef network_simplex(self, display, pivot, root):\n '''\n API:\n network_simplex(self, display, pivot, root)\n Description:\n Solves minimum cost feasible flow problem using network simplex\n algorithm. It is recommended to use min_cost_flow(algo='simplex')\n instead of using network_simplex() directly. Returns True when an\n optimal solution is found, returns False otherwise. 'flow' attribute\n values of arcs should be considered as junk when returned False.\n Pre:\n (1) check Pre section of min_cost_flow()\n Input:\n pivot: specifies pivot rule. Check min_cost_flow()\n display: 'off' for no display, 'pygame' for live update of\n spanning tree.\n root: Root node for the underlying spanning trees that will be\n generated by network simplex algorthm.\n Post:\n (1) Changes 'flow' attribute of edges.\n Return:\n Returns True when an optimal solution is found, returns\n False otherwise.\n '''\n # ==== determine an initial tree structure (T,L,U)\n # find a feasible flow\n if not self.find_feasible_flow():\n return False\n t = self.simplex_find_tree()\n self.set_display_mode(display)\n # mark spanning tree arcs\n self.simplex_mark_st_arcs(t)\n # display initial spanning tree\n t.simplex_redraw(display, root)\n t.set_display_mode(display)\n #t.display()\n self.display()\n # set predecessor, depth and thread indexes\n t.simplex_search(root, 1)\n # compute potentials\n self.simplex_compute_potentials(t, root)\n # while some nontree arc violates optimality conditions\n while not self.simplex_optimal(t):\n self.display()\n # select an entering arc (k,l)\n (k,l) = self.simplex_select_entering_arc(t, pivot)\n self.simplex_mark_entering_arc(k, l)\n self.display()\n # determine leaving arc\n ((p,q), capacity, cycle)=self.simplex_determine_leaving_arc(t,k,l)\n # mark leaving arc\n self.simplex_mark_leaving_arc(p, q)\n self.display()\n self.simplex_remove_arc(t, p, q, capacity, cycle)\n # display after arc removed\n self.display()\n self.simplex_mark_st_arcs(t)\n self.display()\n # set predecessor, depth and thread indexes\n t.simplex_redraw(display, root)\n #t.display()\n t.simplex_search(root, 1)\n # compute potentials\n self.simplex_compute_potentials(t, root)\n return True", "response": "This function returns True when an optimal solution is found and False otherwise."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef simplex_mark_st_arcs(self, t):\n '''\n API:\n simplex_mark_st_arcs(self, t)\n Description:\n Marks spanning tree arcs.\n Case 1, Blue: Arcs that are at lower bound and in tree.\n Case 2, Red: Arcs that are at upper bound and in tree.\n Case 3, Green: Arcs that are between bounds are green.\n Case 4, Brown: Non-tree arcs at lower bound.\n Case 5, Violet: Non-tree arcs at upper bound.\n Input:\n t: t is the current spanning tree\n Post:\n (1) color attribute of edges.\n '''\n tel = list(t.edge_attr.keys())\n for e in self.get_edge_list():\n flow_e = self.edge_attr[e]['flow']\n capacity_e = self.edge_attr[e]['capacity']\n if e in tel:\n if flow_e == 0:\n self.edge_attr[e]['color'] = 'blue'\n elif flow_e == capacity_e:\n self.edge_attr[e]['color'] = 'blue'\n else:\n self.edge_attr[e]['color'] = 'blue'\n else:\n if flow_e == 0:\n self.edge_attr[e]['color'] = 'black'\n elif flow_e == capacity_e:\n self.edge_attr[e]['color'] = 'black'\n else:\n msg = \"Arc is not in ST but has flow between bounds.\"\n raise Exception(msg)", "response": "Mark all arcs in the current spanning tree as st."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nprint all positive flows to stdout.", "response": "def print_flow(self):\n '''\n API:\n print_flow(self)\n Description:\n Prints all positive flows to stdout. This method can be used for\n debugging purposes.\n '''\n print('printing current edge, flow, capacity')\n for e in self.edge_attr:\n if self.edge_attr[e]['flow']!=0:\n print(e, str(self.edge_attr[e]['flow']).ljust(4), end=' ')\n print(str(self.edge_attr[e]['capacity']).ljust(4))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef simplex_redraw(self, display, root):\n '''\n API:\n simplex_redraw(self, display, root)\n Description:\n Returns a new graph instance that is same as self but adds nodes\n and arcs in a way that the resulting tree will be displayed\n properly.\n Input:\n display: display mode\n root: root node in tree.\n Return:\n Returns a graph same as self.\n '''\n nl = self.get_node_list()\n el = self.get_edge_list()\n new = Graph(type=DIRECTED_GRAPH, layout='dot', display=display)\n pred_i = self.get_node(root).get_attr('pred')\n thread_i = self.get_node(root).get_attr('thread')\n depth_i = self.get_node(root).get_attr('depth')\n new.add_node(root, pred=pred_i, thread=thread_i, depth=depth_i)\n q = [root]\n visited = [root]\n while q:\n name = q.pop()\n visited.append(name)\n neighbors = self.neighbors[name] + self.in_neighbors[name]\n for n in neighbors:\n if n not in new.get_node_list():\n pred_i = self.get_node(n).get_attr('pred')\n thread_i = self.get_node(n).get_attr('thread')\n depth_i = self.get_node(n).get_attr('depth')\n new.add_node(n, pred=pred_i, thread=thread_i, depth=depth_i)\n if (name,n) in el:\n if (name,n) not in new.edge_attr:\n new.add_edge(name,n)\n else:\n if (n,name) not in new.edge_attr:\n new.add_edge(n,name)\n if n not in visited:\n q.append(n)\n for e in el:\n flow = self.edge_attr[e]['flow']\n capacity = self.edge_attr[e]['capacity']\n cost = self.edge_attr[e]['cost']\n new.edge_attr[e]['flow'] = flow\n new.edge_attr[e]['capacity'] = capacity\n new.edge_attr[e]['cost'] = cost\n new.edge_attr[e]['label'] = \"%d/%d/%d\" %(flow,capacity,cost)\n return new", "response": "Simplex redraws the current tree."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef simplex_optimal(self, t):\n '''\n API:\n simplex_optimal(self, t)\n Description:\n Checks if the current solution is optimal, if yes returns True,\n False otherwise.\n Pre:\n 'flow' attributes represents a solution.\n Input:\n t: Graph instance tat reperesents spanning tree solution.\n Return:\n Returns True if the current solution is optimal (optimality\n conditions are satisfied), else returns False\n '''\n for e in self.edge_attr:\n if e in t.edge_attr:\n continue\n flow_ij = self.edge_attr[e]['flow']\n potential_i = self.get_node(e[0]).get_attr('potential')\n potential_j = self.get_node(e[1]).get_attr('potential')\n capacity_ij = self.edge_attr[e]['capacity']\n c_ij = self.edge_attr[e]['cost']\n cpi_ij = c_ij - potential_i + potential_j\n if flow_ij==0:\n if cpi_ij < 0:\n return False\n elif flow_ij==capacity_ij:\n if cpi_ij > 0:\n return False\n return True", "response": "Simplex - optimal check for current solution."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef simplex_find_tree(self):\n '''\n API:\n simplex_find_tree(self)\n Description:\n Assumes a feasible flow solution stored in 'flow' attribute's of\n arcs and converts this solution to a feasible spanning tree\n solution.\n Pre:\n (1) 'flow' attributes represents a feasible flow solution.\n Post:\n (1) 'flow' attributes may change when eliminating cycles.\n Return:\n Return a Graph instance that is a spanning tree solution.\n '''\n # find a cycle\n solution_g = self.get_simplex_solution_graph()\n cycle = solution_g.simplex_find_cycle()\n while cycle is not None:\n # find amount to augment and direction\n amount = self.simplex_augment_cycle(cycle)\n # augment along the cycle\n self.augment_cycle(amount, cycle)\n # find a new cycle\n solution_g = self.get_simplex_solution_graph()\n cycle = solution_g.simplex_find_cycle()\n # check if the solution is connected\n while self.simplex_connect(solution_g):\n pass\n # add attributes\n for e in self.edge_attr:\n flow = self.edge_attr[e]['flow']\n capacity = self.edge_attr[e]['capacity']\n cost = self.edge_attr[e]['cost']\n self.edge_attr[e]['label'] = \"%d/%d/%d\" %(flow,capacity,cost)\n if e in solution_g.edge_attr:\n solution_g.edge_attr[e]['flow'] = flow\n solution_g.edge_attr[e]['capacity'] = capacity\n solution_g.edge_attr[e]['cost'] = cost\n solution_g.edge_attr[e]['label'] = \"%d/%d/%d\" %(flow,capacity,cost)\n return solution_g", "response": "A method that finds a feasible flow solution for a given set of nodes and edges."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef simplex_connect(self, solution_g):\n '''\n API:\n simplex_connect(self, solution_g)\n Description:\n At this point we assume that the solution does not have a cycle.\n We check if all the nodes are connected, if not we add an arc to\n solution_g that does not create a cycle and return True. Otherwise\n we do nothing and return False.\n Pre:\n (1) We assume there is no cycle in the solution.\n Input:\n solution_g: current spanning tree solution instance.\n Post:\n (1) solution_g is updated. An arc that does not create a cycle is\n added.\n (2) 'component' attribute of nodes are changed.\n Return:\n Returns True if an arc is added, returns False otherwise.\n '''\n nl = solution_g.get_node_list()\n current = nl[0]\n pred = solution_g.simplex_search(current, current)\n separated = list(pred.keys())\n for n in nl:\n if solution_g.get_node(n).get_attr('component') != current:\n # find an arc from n to seperated\n for m in separated:\n if (n,m) in self.edge_attr:\n solution_g.add_edge(n,m)\n return True\n elif (m,n) in self.edge_attr:\n solution_g.add_edge(m,n)\n return True\n return False", "response": "This function checks if all the nodes are connected to the current spanning tree solution instance and adds them to the solution_g."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef simplex_search(self, source, component_nr):\n '''\n API:\n simplex_search(self, source, component_nr)\n Description:\n Searches graph starting from source. Its difference from usual\n search is we can also go backwards along an arc. When the graph\n is a spanning tree it computes predecessor, thread and depth\n indexes and stores them as node attributes. These values should be\n considered as junk when the graph is not a spanning tree.\n Input:\n source: source node\n component_nr: component number\n Post:\n (1) Sets the component number of all reachable nodes to component.\n Changes 'component' attribute of nodes.\n (2) Sets 'pred', 'thread' and 'depth' attributes of nodes. These\n values are junk if the graph is not a tree.\n Return:\n Returns predecessor dictionary.\n '''\n q = [source]\n pred = {source:None}\n depth = {source:0}\n sequence = []\n for n in self.neighbors:\n self.get_node(n).set_attr('component', None)\n while q:\n current = q.pop()\n self.get_node(current).set_attr('component', component_nr)\n sequence.append(current)\n neighbors = self.in_neighbors[current] + self.neighbors[current]\n for n in neighbors:\n if n in pred:\n continue\n self.get_node(n).set_attr('component', component_nr)\n pred[n] = current\n depth[n] = depth[current]+1\n q.append(n)\n for i in range(len(sequence)-1):\n self.get_node(sequence[i]).set_attr('thread', int(sequence[i+1]))\n self.get_node(sequence[-1]).set_attr('thread', int(sequence[0]))\n for n in pred:\n self.get_node(n).set_attr('pred', pred[n])\n self.get_node(n).set_attr('depth', depth[n])\n return pred", "response": "Simplex search for a node in the graph starting from a given component."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\naugment the cycle with the minimum capacity.", "response": "def simplex_augment_cycle(self, cycle):\n '''\n API:\n simplex_augment_cycle(self, cycle)\n Description:\n Augments along the cycle to break it.\n Pre:\n 'flow', 'capacity' attributes on arcs.\n Input:\n cycle: list representing a cycle in the solution\n Post:\n 'flow' attribute will be modified.\n '''\n # find amount to augment\n index = 0\n k = len(cycle)\n el = list(self.edge_attr.keys())\n # check arc (cycle[k-1], cycle[0])\n if (cycle[k-1], cycle[0]) in el:\n min_capacity = self.edge_attr[(cycle[k-1], cycle[0])]['capacity']-\\\n self.edge_attr[(cycle[k-1], cycle[0])]['flow']\n else:\n min_capacity = self.edge_attr[(cycle[0], cycle[k-1])]['flow']\n # check rest of the arcs in the cycle\n while index<(k-1):\n i = cycle[index]\n j = cycle[index+1]\n if (i,j) in el:\n capacity_ij = self.edge_attr[(i,j)]['capacity'] -\\\n self.edge_attr[(i,j)]['flow']\n else:\n capacity_ij = self.edge_attr[(j,i)]['flow']\n if min_capacity > capacity_ij:\n min_capacity = capacity_ij\n index += 1\n return min_capacity"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef simplex_find_cycle(self):\n '''\n API:\n simplex_find_cycle(self)\n Description:\n Returns a cycle (list of nodes) if the graph has one, returns None\n otherwise. Uses DFS. During DFS checks existence of arcs to lower\n depth regions. Note that direction of the arcs are not important.\n Return:\n Returns list of nodes that represents cycle. Returns None if the\n graph does not have any cycle.\n '''\n # make a dfs, if you identify an arc to a lower depth node we have a\n # cycle\n nl = self.get_node_list()\n q = [nl[0]]\n visited = []\n depth = {nl[0]:0}\n pred = {nl[0]:None}\n for n in nl:\n self.get_node(n).set_attr('component', None)\n component_nr = int(nl[0])\n self.get_node(nl[0]).set_attr('component', component_nr)\n while True:\n while q:\n current = q.pop()\n visited.append(current)\n neighbors = self.in_neighbors[current] +\\\n self.neighbors[current]\n for n in neighbors:\n if n==pred[current]:\n continue\n self.get_node(n).set_attr('component', component_nr)\n if n in depth:\n # we have a cycle\n cycle1 = []\n cycle2 = []\n temp = n\n while temp is not None:\n cycle1.append(temp)\n temp = pred[temp]\n temp = current\n while temp is not None:\n cycle2.append(temp)\n temp = pred[temp]\n cycle1.pop()\n cycle1.reverse()\n cycle2.extend(cycle1)\n return cycle2\n else:\n pred[n] = current\n depth[n] = depth[current] + 1\n if n not in visited:\n q.append(n)\n flag = False\n for n in nl:\n if self.get_node(n).get_attr('component') is None:\n q.append(n)\n depth = {n:0}\n pred = {n:None}\n visited = []\n component_nr = int(n)\n self.get_node(n).set_attr('component', component_nr)\n flag = True\n break\n if not flag:\n break\n return None", "response": "This function finds a cycle in the graph."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_simplex_solution_graph(self):\n '''\n API:\n get_simplex_solution_graph(self):\n Description:\n Assumes a feasible flow solution stored in 'flow' attribute's of\n arcs. Returns the graph with arcs that have flow between 0 and\n capacity.\n Pre:\n (1) 'flow' attribute represents a feasible flow solution. See\n Pre section of min_cost_flow() for details.\n Return:\n Graph instance that only has the arcs that have flow strictly\n between 0 and capacity.\n '''\n simplex_g = Graph(type=DIRECTED_GRAPH)\n for i in self.neighbors:\n simplex_g.add_node(i)\n for e in self.edge_attr:\n flow_e = self.edge_attr[e]['flow']\n capacity_e = self.edge_attr[e]['capacity']\n if flow_e>0 and flow_e<capacity_e:\n simplex_g.add_edge(e[0], e[1])\n return simplex_g", "response": "Returns a graph that contains all the arcs that have flow between 0 and capacity."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef simplex_compute_potentials(self, t, root):\n '''\n API:\n simplex_compute_potentials(self, t, root)\n Description:\n Computes node potentials for a minimum cost flow problem and stores\n them as node attribute 'potential'. Based on pseudocode given in\n Network Flows by Ahuja et al.\n Pre:\n (1) Assumes a directed graph in which each arc has a 'cost'\n attribute.\n (2) Uses 'thread' and 'pred' attributes of nodes.\n Input:\n t: Current spanning tree solution, its type is Graph.\n root: root node of the tree.\n Post:\n Keeps the node potentials as 'potential' attribute.\n '''\n self.get_node(root).set_attr('potential', 0)\n j = t.get_node(root).get_attr('thread')\n while j is not root:\n i = t.get_node(j).get_attr('pred')\n potential_i = self.get_node(i).get_attr('potential')\n if (i,j) in self.edge_attr:\n c_ij = self.edge_attr[(i,j)]['cost']\n self.get_node(j).set_attr('potential', potential_i-c_ij)\n if (j,i) in self.edge_attr:\n c_ji = self.edge_attr[(j,i)]['cost']\n self.get_node(j).set_attr('potential', potential_i+c_ji)\n j = t.get_node(j).get_attr('thread')", "response": "This method computes the potentials for a minimum cost flow problem and stores them as node attributes."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef random(self, numnodes = 10, degree_range = (2, 4), length_range = (1, 10),\n density = None, edge_format = None, node_format = None,\n Euclidean = False, seedInput = 0, add_labels = True,\n parallel_allowed = False, node_selection = 'closest',\n scale = 10, scale_cost = 5):\n '''\n API:\n random(self, numnodes = 10, degree_range = None, length_range = None,\n density = None, edge_format = None, node_format = None,\n Euclidean = False, seedInput = 0)\n Description:\n Populates graph with random edges and nodes.\n Input:\n numnodes: Number of nodes to add.\n degree_range: A tuple that has lower and upper bounds of degree for\n a node.\n length_range: A tuple that has lower and upper bounds for 'cost'\n attribute of edges.\n density: Density of edges, ie. 0.5 indicates a node will\n approximately have edge to half of the other nodes.\n edge_format: Dictionary that specifies attribute values for edges.\n node_format: Dictionary that specifies attribute values for nodes.\n Euclidean: Creates an Euclidean graph (Euclidean distance between\n nodes) if True.\n seedInput: Seed that will be used for random number generation.\n Pre:\n It is recommended to call this method on empty Graph objects.\n Post:\n Graph will be populated by nodes and edges.\n '''\n random.seed(seedInput)\n if edge_format == None:\n edge_format = {'fontsize':10,\n 'fontcolor':'blue'}\n if node_format == None:\n node_format = {'height':0.5,\n 'width':0.5,\n 'fixedsize':'true',\n 'fontsize':10,\n 'fontcolor':'red',\n 'shape':'circle',\n }\n if Euclidean == False:\n for m in range(numnodes):\n self.add_node(m, **node_format)\n if degree_range is not None and density is None:\n for m in range(numnodes):\n degree = random.randint(degree_range[0], degree_range[1])\n i = 0\n while i < degree:\n n = random.randint(1, numnodes-1)\n if (((m,n) not in self.edge_attr and m != n) and\n (parallel_allowed or (n, m) not in self.edge_attr)):\n if length_range is not None:\n length = random.randint(length_range[0],\n length_range[1])\n self.add_edge(m, n, cost = length, **edge_format)\n if add_labels:\n self.set_edge_attr(m, n, 'label', str(length))\n else:\n self.add_edge(m, n, **edge_format)\n i += 1\n elif density != None:\n for m in range(numnodes):\n if self.graph_type == DIRECTED_GRAPH:\n numnodes2 = numnodes\n else:\n numnodes2 = m\n for n in range(numnodes2):\n if ((parallel_allowed or (n, m) not in self.edge_attr)\n and m != n):\n if random.random() < density:\n if length_range is not None:\n length = random.randint(length_range[0],\n length_range[1])\n self.add_edge(m, n, cost = length,\n **edge_format)\n if add_labels:\n self.set_edge_attr(m, n, 'label', str(length))\n else:\n self.add_edge(m, n, **edge_format)\n else:\n print(\"Must set either degree range or density\")\n else:\n for m in range(numnodes):\n ''' Assigns random coordinates (between 1 and 20) to the nodes\n '''\n x = random.random()*scale\n y = random.random()*scale\n self.add_node(m, locationx = x, locationy = y,\n pos = '\"'+str(x) + \",\" + str(y)+'!\"',\n **node_format)\n if degree_range is not None and density is None:\n for m in range(numnodes):\n degree = random.randint(degree_range[0], degree_range[1])\n i = 0\n neighbors = []\n if node_selection is 'random':\n while i < degree:\n length = round((((self.get_node(n).get_attr('locationx') -\n self.get_node(m).get_attr('locationx')) ** 2 +\n (self.get_node(n).get_attr('locationy') -\n self.get_node(m).get_attr('locationy')) ** 2) ** 0.5)*scale_cost,\n 0)\n if (((m,n) not in self.edge_attr and m != n) and\n (parallel_allowed or (n, m) not in self.edge_attr)):\n neighbors.append(random.randint(0, numnodes-1))\n self.add_edge(m, n, cost = int(length), **edge_format)\n if add_labels:\n self.set_edge_attr(m, n, 'label', str(int(length)))\n i += 1\n elif node_selection is 'closest':\n lengths = []\n for n in range(numnodes):\n lengths.append((n, round((((self.get_node(n).get_attr('locationx') -\n self.get_node(m).get_attr('locationx')) ** 2 +\n (self.get_node(n).get_attr('locationy') -\n self.get_node(m).get_attr('locationy')) ** 2) ** 0.5)*scale_cost,\n 0)))\n lengths.sort(key = lambda l : l[1])\n for i in range(degree+1):\n if not (lengths[i][0] == m or self.check_edge(m, lengths[i][0])):\n self.add_edge(m, lengths[i][0], cost = int(lengths[i][1]), **edge_format)\n if add_labels:\n self.set_edge_attr(m, lengths[i][0], 'label', str(int(lengths[i][1])))\n else:\n print(\"Unknown node selection rule...exiting\")\n return\n elif density != None:\n for m in range(numnodes):\n if self.graph_type == DIRECTED_GRAPH:\n numnodes2 = numnodes\n else:\n numnodes2 = m\n for n in range(numnodes2):\n if ((parallel_allowed or (n, m) not in self.edge_attr)\n and m != n):\n if random.random() < density:\n if length_range is None:\n ''' calculates the euclidean norm and round it\n to an integer '''\n length = round((((self.get_node(n).get_attr('locationx') -\n self.get_node(m).get_attr('locationx')) ** 2 +\n (self.get_node(n).get_attr('locationy') -\n self.get_node(m).get_attr('locationy')) ** 2) ** 0.5), 0)\n self.add_edge(m, n, cost = int(length), **edge_format)\n if add_labels:\n self.set_edge_attr(m, n, 'label', str(int(length)))\n else:\n self.add_edge(m, n, **edge_format)\n else:\n print(\"Must set either degree range or density\")", "response": "This function creates a random graph with numnodes edges and nodes."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef page_rank(self, damping_factor=0.85, max_iterations=100,\n min_delta=0.00001):\n '''\n API:\n page_rank(self, damping_factor=0.85, max_iterations=100,\n min_delta=0.00001)\n Description:\n Compute and return the page-rank of a directed graph.\n This function was originally taken from here and modified for this\n graph class: http://code.google.com/p/python-graph/source/browse/\n trunk/core/pygraph/algorithms/pagerank.py\n Input:\n damping_factor: Damping factor.\n max_iterations: Maximum number of iterations.\n min_delta: Smallest variation required to have a new iteration.\n Pre:\n Graph should be a directed graph.\n Return:\n Returns dictionary of page-ranks. Keys are node names, values are\n corresponding page-ranks.\n '''\n nodes = self.get_node_list()\n graph_size = len(nodes)\n if graph_size == 0:\n return {}\n #value for nodes without inbound links\n min_value = old_div((1.0-damping_factor),graph_size)\n # itialize the page rank dict with 1/N for all nodes\n pagerank = dict.fromkeys(nodes, old_div(1.0,graph_size))\n for _ in range(max_iterations):\n diff = 0 #total difference compared to last iteraction\n # computes each node PageRank based on inbound links\n for node in nodes:\n rank = min_value\n for referring_page in self.get_in_neighbors(node):\n rank += (damping_factor * pagerank[referring_page] /\n len(self.get_neighbors(referring_page)))\n diff += abs(pagerank[node] - rank)\n pagerank[node] = rank\n #stop if PageRank has converged\n if diff < min_delta:\n break\n return pagerank", "response": "This function computes the page - rank of a directed graph and returns a dictionary of page - ranks corresponding to the nodes in the graph."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns a dictionary of node degrees.", "response": "def get_degrees(self):\n '''\n API:\n get_degree(self)\n Description:\n Returns degrees of nodes in dictionary format.\n Return:\n Returns a dictionary of node degrees. Keys are node names, values\n are corresponding degrees.\n '''\n degree = {}\n if self.attr['type'] is not DIRECTED_GRAPH:\n for n in self.get_node_list():\n degree[n] = len(self.get_neighbors(n))\n return degree\n else:\n for n in self.get_node_list():\n degree[n] = (len(self.get_in_neighbors(n)) +\n len(self.get_out_neighbors(n)))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_in_degrees(self):\n '''\n API:\n get_degree(self)\n Description:\n Returns degrees of nodes in dictionary format.\n Return:\n Returns a dictionary of node degrees. Keys are node names, values\n are corresponding degrees.\n '''\n degree = {}\n if self.attr['type'] is not DIRECTED_GRAPH:\n print('This function only works for directed graphs')\n return\n for n in self.get_node_list():\n degree[n] = len(self.get_in_neighbors(n))\n return degree", "response": "Return a dictionary of node degrees."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_out_degrees(self):\n '''\n API:\n get_degree(self)\n Description:\n Returns degrees of nodes in dictionary format.\n Return:\n Returns a dictionary of node degrees. Keys are node names, values\n are corresponding degrees.\n '''\n degree = {}\n if self.attr['type'] is not DIRECTED_GRAPH:\n print('This function only works for directed graphs')\n return\n for n in self.get_node_list():\n degree[n] = len(self.get_out_neighbors(n))\n return degree", "response": "Return a dictionary of node degrees corresponding to the nodes in the graph."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns the diameter of the undirected edge in the graph.", "response": "def get_diameter(self):\n '''\n API:\n get_diameter(self)\n Description:\n Returns diameter of the graph. Diameter is defined as follows.\n distance(n,m): shortest unweighted path from n to m\n eccentricity(n) = $\\max _m distance(n,m)$\n diameter = $\\min _n eccentricity(n) = \\min _n \\max _m distance(n,m)$\n Return:\n Returns diameter of the graph.\n '''\n\n if self.attr['type'] is not UNDIRECTED_GRAPH:\n print('This function only works for undirected graphs')\n return\n diameter = 'infinity'\n eccentricity_n = 0\n for n in self.get_node_list():\n for m in self.get_node_list():\n path_n_m = self.search(n, destination = m, algo = 'BFS')\n if path_n_m is None:\n # this indicates there is no path from n to m, no diameter\n # is defined, since the graph is not connected, return\n # 'infinity'\n return 'infinity'\n distance_n_m = len(path_n_m)-1\n if distance_n_m > eccentricity_n:\n eccentricity_n = distance_n_m\n if diameter is 'infinity' or eccentricity_n > diameter:\n diameter = eccentricity_n\n return diameter"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef add(self, aList):\n '''\n API:\n add(self, aList)\n Description:\n Adds items in the list to the set.\n Input:\n aList: List of items.\n Post:\n self.sizes will be updated.\n '''\n self.add_node(aList[0])\n for i in range(1, len(aList)):\n self.add_edge(aList[i], aList[0])\n self.sizes[aList[0]] = len(aList)", "response": "Adds items in aList to the set."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef find(self, i):\n '''\n API:\n find(self, i)\n Description:\n Returns root of set that has i.\n Input:\n i: Item.\n Return:\n Returns root of set that has i.\n '''\n current = i\n edge_list = []\n while len(self.get_neighbors(current)) != 0:\n successor = self.get_neighbors(current)[0]\n edge_list.append((current, successor))\n current = successor\n if self.optimize:\n for e in edge_list:\n if e[1] != current:\n self.del_edge((e[0], e[1]))\n self.add_edge(e[0], current)\n return current", "response": "This method finds the root of set that has i."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nevaluates the receiver and return the result.", "response": "def evaluate(self, node: InstanceNode) -> XPathValue:\n \"\"\"Evaluate the receiver and return the result.\n\n Args:\n node: Context node for XPath evaluation.\n\n Raises:\n XPathTypeError: If a subexpression of the receiver is of a wrong\n type.\n \"\"\"\n return self._eval(XPathContext(node, node, 1, 1))"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef from_raw(self, raw: RawScalar) -> Optional[ScalarValue]:\n if isinstance(raw, str):\n return raw", "response": "Return a cooked value of the receiver type."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef from_yang(self, text: str) -> ScalarValue:\n res = self.parse_value(text)\n if res is None:\n raise InvalidArgument(text)\n return res", "response": "Parse a YANG value from a string representation of the value."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _type_digest(self, config: bool) -> Dict[str, Any]:\n res = {\"base\": self.yang_type()}\n if self.name is not None:\n res[\"derived\"] = self.name\n return res", "response": "Return receiver s type digest."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn list of bit items sorted by position.", "response": "def sorted_bits(self) -> List[Tuple[str, int]]:\n \"\"\"Return list of bit items sorted by position.\"\"\"\n return sorted(self.bit.items(), key=lambda x: x[1])"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef as_int(self, val: Tuple[str]) -> int:\n res = 0\n try:\n for b in val:\n res += 1 << self.bit[b]\n except KeyError:\n return None\n return res", "response": "Transform a bits value to an integer."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nparses boolean value. Args: text: String representation of the value.", "response": "def parse_value(self, text: str) -> Optional[bool]:\n \"\"\"Parse boolean value.\n\n Args:\n text: String representation of the value.\n \"\"\"\n if text == \"true\":\n return True\n if text == \"false\":\n return False"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef sorted_enums(self) -> List[Tuple[str, int]]:\n return sorted(self.enum.items(), key=lambda x: x[1])", "response": "Return list of enum items sorted by value."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef from_yang(self, text: str) -> Optional[QualName]:\n try:\n return self.sctx.schema_data.translate_pname(text, self.sctx.text_mid)\n except (ModuleNotRegistered, UnknownPrefix):\n raise InvalidArgument(text)", "response": "Override the superclass method."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef from_yang(self, text: str) -> Optional[int]:\n if text.startswith(\"0\"):\n base = 16 if text.startswith(\"0x\") else 8\n else:\n base = 10\n try:\n return int(text, base)\n except (ValueError, TypeError):\n raise InvalidArgument(text)", "response": "Override the superclass method."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates the input files from the input nodes passed to this instance of the CalcJob and write them to disk.", "response": "def prepare_for_submission(self, folder):\n \"\"\"Create the input files from the input nodes passed to this instance of the `CalcJob`.\n\n :param folder: an `aiida.common.folders.Folder` to temporarily write files on disk\n :return: `aiida.common.datastructures.CalcInfo` instance\n \"\"\"\n # create input structure\n if 'structure' in self.inputs:\n self.inputs.structure.export(folder.get_abs_path(self._DEFAULT_COORDS_FILE_NAME), fileformat=\"xyz\")\n\n # create cp2k input file\n inp = Cp2kInput(self.inputs.parameters.get_dict())\n inp.add_keyword(\"GLOBAL/PROJECT\", self._DEFAULT_PROJECT_NAME)\n if 'structure' in self.inputs:\n for i, letter in enumerate('ABC'):\n inp.add_keyword('FORCE_EVAL/SUBSYS/CELL/' + letter,\n '{:<15} {:<15} {:<15}'.format(*self.inputs.structure.cell[i]))\n topo = \"FORCE_EVAL/SUBSYS/TOPOLOGY\"\n inp.add_keyword(topo + \"/COORD_FILE_NAME\", self._DEFAULT_COORDS_FILE_NAME)\n inp.add_keyword(topo + \"/COORD_FILE_FORMAT\", \"XYZ\")\n\n with io.open(folder.get_abs_path(self._DEFAULT_INPUT_FILE), mode=\"w\", encoding=\"utf-8\") as fobj:\n fobj.write(inp.render())\n\n if 'settings' in self.inputs:\n settings = self.inputs.settings.get_dict()\n else:\n settings = {}\n\n # create code info\n codeinfo = CodeInfo()\n codeinfo.cmdline_params = settings.pop('cmdline', []) + [\"-i\", self._DEFAULT_INPUT_FILE]\n codeinfo.stdout_name = self._DEFAULT_OUTPUT_FILE\n codeinfo.join_files = True\n codeinfo.code_uuid = self.inputs.code.uuid\n\n # create calc info\n calcinfo = CalcInfo()\n calcinfo.stdin_name = self._DEFAULT_INPUT_FILE\n calcinfo.uuid = self.uuid\n calcinfo.cmdline_params = codeinfo.cmdline_params\n calcinfo.stdin_name = self._DEFAULT_INPUT_FILE\n calcinfo.stdout_name = self._DEFAULT_OUTPUT_FILE\n calcinfo.codes_info = [codeinfo]\n\n # file lists\n calcinfo.remote_symlink_list = []\n if 'file' in self.inputs:\n calcinfo.local_copy_list = []\n for fobj in self.inputs.file.values():\n calcinfo.local_copy_list.append((fobj.uuid, fobj.filename, fobj.filename))\n\n calcinfo.remote_copy_list = []\n calcinfo.retrieve_list = [self._DEFAULT_OUTPUT_FILE, self._DEFAULT_RESTART_FILE_NAME]\n calcinfo.retrieve_list += settings.pop('additional_retrieve_list', [])\n\n # symlinks\n if 'parent_calc_folder' in self.inputs:\n comp_uuid = self.inputs.parent_calc_folder.computer.uuid\n remote_path = self.inputs.parent_calc_folder.get_remote_path()\n symlink = (comp_uuid, remote_path, self._DEFAULT_PARENT_CALC_FLDR_NAME)\n calcinfo.remote_symlink_list.append(symlink)\n\n # check for left over settings\n if settings:\n raise InputValidationError(\"The following keys have been found \" +\n \"in the settings input node {}, \".format(self.pk) + \"but were not understood: \" +\n \",\".join(settings.keys()))\n\n return calcinfo"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _add_keyword_low(self, kwpath, value, params):\n if len(kwpath) == 1:\n params[kwpath[0]] = value\n elif kwpath[0] not in params.keys():\n new_subsection = {}\n params[kwpath[0]] = new_subsection\n self._add_keyword_low(kwpath[1:], value, new_subsection)\n else:\n self._add_keyword_low(kwpath[1:], value, params[kwpath[0]])", "response": "Adds keyword to params based on the path of the base class."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _render_section(self, output, params, indent=0):\n\n for key, val in sorted(params.items()):\n if key.upper() != key:\n raise InputValidationError(\"keyword '%s' not upper case\" % key)\n if key.startswith('@') or key.startswith('$'):\n raise InputValidationError(\"CP2K preprocessor not supported\")\n if isinstance(val, dict):\n output.append('%s&%s %s' % (' ' * indent, key, val.pop('_', '')))\n self._render_section(output, val, indent + 3)\n output.append('%s&END %s' % (' ' * indent, key))\n elif isinstance(val, list):\n for listitem in val:\n self._render_section(output, {key: listitem}, indent)\n elif isinstance(val, bool):\n val_str = '.true.' if val else '.false.'\n output.append('%s%s %s' % (' ' * indent, key, val_str))\n else:\n output.append('%s%s %s' % (' ' * indent, key, val))", "response": "This function takes a dictionary and recurses through the user - specified key - value pairs and writes the result to the output"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nscooping multiple model instances out of the DB at once given a query that returns all fields of each.", "response": "def multi_raw(query, params, models, model_to_fields):\n \"\"\"Scoop multiple model instances out of the DB at once, given a query that\n returns all fields of each.\n\n Return an iterable of sequences of model instances parallel to the\n ``models`` sequence of classes. For example::\n\n [(<User such-and-such>, <Watch such-and-such>), ...]\n\n \"\"\"\n cursor = connections[router.db_for_read(models[0])].cursor()\n cursor.execute(query, params)\n rows = cursor.fetchall()\n\n for row in rows:\n row_iter = iter(row)\n yield [model_class(**dict((a, next(row_iter))\n for a in model_to_fields[model_class]))\n for model_class in models]"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef unsubscribe_url(self):\n server_relative = ('%s?s=%s' % (reverse('tidings.unsubscribe',\n args=[self.pk]),\n self.secret))\n return 'https://%s%s' % (Site.objects.get_current().domain,\n server_relative)", "response": "Return the absolute URL to visit to delete me."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nattach any anonymous watches having a user s email to that user.", "response": "def claim_watches(user):\n \"\"\"Attach any anonymous watches having a user's email to that user.\n\n Call this from your user registration process if you like.\n\n \"\"\"\n Watch.objects.filter(email=user.email).update(email=None, user=user)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef collate(*iterables, **kwargs):\n key = kwargs.pop('key', lambda a: a)\n reverse = kwargs.pop('reverse', False)\n min_or_max = max if reverse else min\n\n rows = [iter(iterable) for iterable in iterables if iterable]\n next_values = {}\n by_key = []\n\n def gather_next_value(row, index):\n try:\n next_value = next(row)\n except StopIteration:\n pass\n else:\n next_values[index] = next_value\n by_key.append((key(next_value), index))\n\n for index, row in enumerate(rows):\n gather_next_value(row, index)\n\n while by_key:\n key_value, index = min_or_max(by_key)\n by_key.remove((key_value, index))\n next_value = next_values.pop(index)\n yield next_value\n gather_next_value(rows[index], index)", "response": "Return an iterable ordered collation of the already - sorted items\n from each of iterables compared by kwarg key."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef hash_to_unsigned(data):\n if isinstance(data, string_types):\n # Return a CRC32 value identical across Python versions and platforms\n # by stripping the sign bit as on\n # http://docs.python.org/library/zlib.html.\n return crc32(data.encode('utf-8')) & 0xffffffff\n else:\n return int(data)", "response": "This function returns an unsigned 4 - byte int\n hash of the given data."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef emails_with_users_and_watches(\n subject, template_path, vars, users_and_watches,\n from_email=settings.TIDINGS_FROM_ADDRESS, **extra_kwargs):\n \"\"\"Return iterable of EmailMessages with user and watch values substituted.\n\n A convenience function for generating emails by repeatedly rendering a\n Django template with the given ``vars`` plus a ``user`` and ``watches`` key\n for each pair in ``users_and_watches``\n\n :arg template_path: path to template file\n :arg vars: a map which becomes the Context passed in to the template\n :arg extra_kwargs: additional kwargs to pass into EmailMessage constructor\n\n \"\"\"\n template = loader.get_template(template_path)\n context = Context(vars)\n for u, w in users_and_watches:\n context['user'] = u\n\n # Arbitrary single watch for compatibility with 0.1\n # TODO: remove.\n context['watch'] = w[0]\n\n context['watches'] = w\n yield EmailMessage(subject,\n template.render(context),\n from_email,\n [u.email],\n **extra_kwargs)", "response": "Returns an iterable of EmailMessages with user and watch values substituted."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef import_from_setting(setting_name, fallback):\n path = getattr(settings, setting_name, None)\n if path:\n try:\n return import_string(path)\n except ImportError:\n raise ImproperlyConfigured('%s: No such path.' % path)\n else:\n return fallback", "response": "Return the resolution of an import path stored in a Django setting."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef parse(self, **kwargs):\n from aiida.engine import ExitCode\n from aiida.common import NotExistent\n\n try:\n out_folder = self.retrieved\n except NotExistent:\n return self.exit_codes.ERROR_NO_RETRIEVED_FOLDER\n\n self._parse_stdout(out_folder)\n\n try:\n structure = self._parse_trajectory(out_folder)\n self.out('output_structure', structure)\n except Exception: # pylint: disable=broad-except\n pass\n\n return ExitCode(0)", "response": "Parses the retrieved nodes and outputs the structure of the retrieved nodes."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nparsing the output of the CP2K process.", "response": "def _parse_stdout(self, out_folder):\n \"\"\"CP2K output parser\"\"\"\n fname = self.node.load_process_class()._DEFAULT_OUTPUT_FILE # pylint: disable=protected-access\n if fname not in out_folder._repository.list_object_names(): # pylint: disable=protected-access\n raise OutputParsingError(\"Cp2k output file not retrieved\")\n\n result_dict = {'exceeded_walltime': False}\n abs_fn = os.path.join(out_folder._repository._get_base_folder().abspath, fname) # pylint: disable=protected-access\n with io.open(abs_fn, mode=\"r\", encoding=\"utf-8\") as fobj:\n lines = fobj.readlines()\n for i_line, line in enumerate(lines):\n if line.startswith(' ENERGY| '):\n result_dict['energy'] = float(line.split()[8])\n result_dict['energy_units'] = \"a.u.\"\n if 'The number of warnings for this run is' in line:\n result_dict['nwarnings'] = int(line.split()[-1])\n if 'exceeded requested execution time' in line:\n result_dict['exceeded_walltime'] = True\n if \"KPOINTS| Band Structure Calculation\" in line:\n from aiida.orm import BandsData\n bnds = BandsData()\n kpoints, labels, bands = self._parse_bands(lines, i_line)\n bnds.set_kpoints(kpoints)\n bnds.labels = labels\n bnds.set_bands(bands, units='eV')\n self.out('output_bands', bnds)\n\n if 'nwarnings' not in result_dict:\n raise OutputParsingError(\"CP2K did not finish properly.\")\n\n self.out('output_parameters', Dict(dict=result_dict))"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nparses band structure from cp2k output", "response": "def _parse_bands(lines, n_start):\n \"\"\"Parse band structure from cp2k output\"\"\"\n kpoints = []\n labels = []\n bands_s1 = []\n bands_s2 = []\n known_kpoints = {}\n pattern = re.compile(\".*?Nr.*?Spin.*?K-Point.*?\", re.DOTALL)\n\n selected_lines = lines[n_start:]\n for current_line, line in enumerate(selected_lines):\n splitted = line.split()\n if \"KPOINTS| Special K-Point\" in line:\n kpoint = tuple(map(float, splitted[-3:]))\n if \" \".join(splitted[-5:-3]) != \"not specified\":\n label = splitted[-4]\n known_kpoints[kpoint] = label\n elif pattern.match(line):\n spin = int(splitted[3])\n kpoint = tuple(map(float, splitted[-3:]))\n kpoint_n_lines = int(math.ceil(int(selected_lines[current_line + 1]) / 4.))\n band = list(\n map(float, ' '.join(selected_lines[current_line + 2:current_line + 2 + kpoint_n_lines]).split()))\n if spin == 1:\n if kpoint in known_kpoints:\n labels.append((len(kpoints), known_kpoints[kpoint]))\n kpoints.append(kpoint)\n bands_s1.append(band)\n elif spin == 2:\n bands_s2.append(band)\n if bands_s2:\n bands = [bands_s1, bands_s2]\n else:\n bands = bands_s1\n return np.array(kpoints), labels, np.array(bands)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef unsubscribe(request, watch_id):\n ext = getattr(settings, 'TIDINGS_TEMPLATE_EXTENSION', 'html')\n\n # Grab the watch and secret; complain if either is wrong:\n try:\n watch = Watch.objects.get(pk=watch_id)\n # 's' is for 'secret' but saves wrapping in mails\n secret = request.GET.get('s')\n if secret != watch.secret:\n raise Watch.DoesNotExist\n except Watch.DoesNotExist:\n return render(request, 'tidings/unsubscribe_error.' + ext)\n\n if request.method == 'POST':\n watch.delete()\n return render(request, 'tidings/unsubscribe_success.' + ext)\n\n return render(request, 'tidings/unsubscribe.' + ext)", "response": "Unsubscribe from a watch."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nyield a list of unique User objects from a list of Users and Watches.", "response": "def _unique_by_email(users_and_watches):\n \"\"\"Given a sequence of (User/EmailUser, [Watch, ...]) pairs\n clustered by email address (which is never ''), yield from each\n cluster a single pair like this::\n\n (User/EmailUser, [Watch, Watch, ...]).\n\n The User/Email is that of...\n (1) the first incoming pair where the User has an email and is not\n anonymous, or, if there isn't such a user...\n (2) the first pair.\n\n The list of Watches consists of all those found in the cluster.\n\n Compares email addresses case-insensitively.\n\n \"\"\"\n def ensure_user_has_email(user, cluster_email):\n \"\"\"Make sure the user in the user-watch pair has an email address.\n\n The caller guarantees us an email from either the user or the watch. If\n the passed-in user has no email, we return an EmailUser instead having\n the email address from the watch.\n\n \"\"\"\n # Some of these cases shouldn't happen, but we're tolerant.\n if not getattr(user, 'email', ''):\n user = EmailUser(cluster_email)\n return user\n\n # TODO: Do this instead with clever SQL that somehow returns just the\n # best row for each email.\n\n cluster_email = '' # email of current cluster\n favorite_user = None # best user in cluster so far\n watches = [] # all watches in cluster\n for u, w in users_and_watches:\n # w always has at least 1 Watch. All the emails are the same.\n row_email = u.email or w[0].email\n if cluster_email.lower() != row_email.lower():\n # Starting a new cluster.\n if cluster_email != '':\n # Ship the favorites from the previous cluster:\n yield (ensure_user_has_email(favorite_user, cluster_email),\n watches)\n favorite_user, watches = u, []\n cluster_email = row_email\n elif ((not favorite_user.email or not u.is_authenticated) and\n u.email and u.is_authenticated):\n favorite_user = u\n watches.extend(w)\n if favorite_user is not None:\n yield ensure_user_has_email(favorite_user, cluster_email), watches"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nnotifying everyone watching the event.", "response": "def fire(self, exclude=None, delay=True):\n \"\"\"Notify everyone watching the event.\n\n We are explicit about sending notifications; we don't just key off\n creation signals, because the receiver of a ``post_save`` signal has no\n idea what just changed, so it doesn't know which notifications to send.\n Also, we could easily send mail accidentally: for instance, during\n tests. If we want implicit event firing, we can always register a\n signal handler that calls :meth:`fire()`.\n\n :arg exclude: If a saved user is passed in, that user will not be\n notified, though anonymous notifications having the same email\n address may still be sent. A sequence of users may also be passed in.\n\n :arg delay: If True (default), the event is handled asynchronously with\n Celery. This requires the pickle task serializer, which is no longer\n the default starting in Celery 4.0. If False, the event is processed\n immediately.\n \"\"\"\n if delay:\n # Tasks don't receive the `self` arg implicitly.\n self._fire_task.apply_async(\n args=(self,),\n kwargs={'exclude': exclude},\n serializer='pickle')\n else:\n self._fire_task(self, exclude=exclude)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _fire_task(self, exclude=None):\n connection = mail.get_connection(fail_silently=True)\n # Warning: fail_silently swallows errors thrown by the generators, too.\n connection.open()\n for m in self._mails(self._users_watching(exclude=exclude)):\n connection.send_messages([m])", "response": "Build and send the emails as a celery task."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nraise a TypeError if filters contains any keys inappropriate to this event class.", "response": "def _validate_filters(cls, filters):\n \"\"\"Raise a TypeError if ``filters`` contains any keys inappropriate to\n this event class.\"\"\"\n for k in iterkeys(filters):\n if k not in cls.filters:\n # Mirror \"unexpected keyword argument\" message:\n raise TypeError(\"%s got an unsupported filter type '%s'\" %\n (cls.__name__, k))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn an iterable of Users whose event_type is self. event_type and content_type is self. content_type and where the event_id is self. event_id and the exclude is None.", "response": "def _users_watching_by_filter(self, object_id=None, exclude=None,\n **filters):\n \"\"\"Return an iterable of (``User``/:class:`~tidings.models.EmailUser`,\n [:class:`~tidings.models.Watch` objects]) tuples watching the event.\n\n Of multiple Users/EmailUsers having the same email address, only one is\n returned. Users are favored over EmailUsers so we are sure to be able\n to, for example, include a link to a user profile in the mail.\n\n The list of :class:`~tidings.models.Watch` objects includes both\n those tied to the given User (if there is a registered user)\n and to any anonymous Watch having the same email address. This\n allows you to include all relevant unsubscribe URLs in a mail,\n for example. It also lets you make decisions in the\n :meth:`~tidings.events.EventUnion._mails()` method of\n :class:`~tidings.events.EventUnion` based on the kinds of\n watches found.\n\n \"Watching the event\" means having a Watch whose ``event_type`` is\n ``self.event_type``, whose ``content_type`` is ``self.content_type`` or\n ``NULL``, whose ``object_id`` is ``object_id`` or ``NULL``, and whose\n WatchFilter rows match as follows: each name/value pair given in\n ``filters`` must be matched by a related WatchFilter, or there must be\n no related WatchFilter having that name. If you find yourself wanting\n the lack of a particularly named WatchFilter to scuttle the match, use\n a different event_type instead.\n\n :arg exclude: If a saved user is passed in as this argument, that user\n will never be returned, though anonymous watches having the same\n email address may. A sequence of users may also be passed in.\n\n \"\"\"\n # I don't think we can use the ORM here, as there's no way to get a\n # second condition (name=whatever) into a left join. However, if we\n # were willing to have 2 subqueries run for every watch row--select\n # {are there any filters with name=x?} and select {is there a filter\n # with name=x and value=y?}--we could do it with extra(). Then we could\n # have EventUnion simply | the QuerySets together, which would avoid\n # having to merge in Python.\n\n if exclude is None:\n exclude = []\n elif not isinstance(exclude, Sequence):\n exclude = [exclude]\n\n def filter_conditions():\n \"\"\"Return joins, WHERE conditions, and params to bind to them in\n order to check a notification against all the given filters.\"\"\"\n # Not a one-liner. You're welcome. :-)\n self._validate_filters(filters)\n joins, wheres, join_params, where_params = [], [], [], []\n for n, (k, v) in enumerate(iteritems(filters)):\n joins.append(\n 'LEFT JOIN tidings_watchfilter f{n} '\n 'ON f{n}.watch_id=w.id '\n 'AND f{n}.name=%s'.format(n=n))\n join_params.append(k)\n wheres.append('(f{n}.value=%s '\n 'OR f{n}.value IS NULL)'.format(n=n))\n where_params.append(hash_to_unsigned(v))\n return joins, wheres, join_params + where_params\n\n # Apply watchfilter constraints:\n joins, wheres, params = filter_conditions()\n\n # Start off with event_type, which is always a constraint. These go in\n # the `wheres` list to guarantee that the AND after the {wheres}\n # substitution in the query is okay.\n wheres.append('w.event_type=%s')\n params.append(self.event_type)\n\n # Constrain on other 1-to-1 attributes:\n if self.content_type:\n wheres.append('(w.content_type_id IS NULL '\n 'OR w.content_type_id=%s)')\n params.append(ContentType.objects.get_for_model(\n self.content_type).id)\n if object_id:\n wheres.append('(w.object_id IS NULL OR w.object_id=%s)')\n params.append(object_id)\n if exclude:\n # Don't try excluding unsaved Users:1\n if not all(e.id for e in exclude):\n raise ValueError(\"Can't exclude an unsaved User.\")\n\n wheres.append('(u.id IS NULL OR u.id NOT IN (%s))' %\n ', '.join('%s' for e in exclude))\n params.extend(e.id for e in exclude)\n\n def get_fields(model):\n if hasattr(model._meta, '_fields'):\n # For django versions < 1.6\n return model._meta._fields()\n else:\n # For django versions >= 1.6\n return model._meta.fields\n\n User = get_user_model()\n model_to_fields = dict((m, [f.get_attname() for f in get_fields(m)])\n for m in [User, Watch])\n query_fields = [\n 'u.{0}'.format(field) for field in model_to_fields[User]]\n query_fields.extend([\n 'w.{0}'.format(field) for field in model_to_fields[Watch]])\n\n query = (\n 'SELECT {fields} '\n 'FROM tidings_watch w '\n 'LEFT JOIN {user_table} u ON u.id=w.user_id {joins} '\n 'WHERE {wheres} '\n 'AND (length(w.email)>0 OR length(u.email)>0) '\n 'AND w.is_active '\n 'ORDER BY u.email DESC, w.email DESC').format(\n fields=', '.join(query_fields),\n joins=' '.join(joins),\n wheres=' AND '.join(wheres),\n user_table=User._meta.db_table)\n # IIRC, the DESC ordering was something to do with the placement of\n # NULLs. Track this down and explain it.\n\n # Put watch in a list just for consistency. Once the pairs go through\n # _unique_by_email, watches will be in a list, and EventUnion uses the\n # same function to union already-list-enclosed pairs from individual\n # events.\n return _unique_by_email((u, [w]) for u, w in\n multi_raw(query, params, [User, Watch],\n model_to_fields))"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns a QuerySet of all watches that are associated with a given user or email.", "response": "def _watches_belonging_to_user(cls, user_or_email, object_id=None,\n **filters):\n \"\"\"Return a QuerySet of watches having the given user or email, having\n (only) the given filters, and having the event_type and content_type\n attrs of the class.\n\n Matched Watches may be either confirmed and unconfirmed. They may\n include duplicates if the get-then-create race condition in\n :meth:`notify()` allowed them to be created.\n\n If you pass an email, it will be matched against only the email\n addresses of anonymous watches. At the moment, the only integration\n point planned between anonymous and registered watches is the claiming\n of anonymous watches of the same email address on user registration\n confirmation.\n\n If you pass the AnonymousUser, this will return an empty QuerySet.\n\n \"\"\"\n # If we have trouble distinguishing subsets and such, we could store a\n # number_of_filters on the Watch.\n cls._validate_filters(filters)\n\n if isinstance(user_or_email, string_types):\n user_condition = Q(email=user_or_email)\n elif user_or_email.is_authenticated:\n user_condition = Q(user=user_or_email)\n else:\n return Watch.objects.none()\n\n # Filter by stuff in the Watch row:\n watches = getattr(Watch, 'uncached', Watch.objects).filter(\n user_condition,\n Q(content_type=ContentType.objects.get_for_model(\n cls.content_type)) if cls.content_type else Q(),\n Q(object_id=object_id) if object_id else Q(),\n event_type=cls.event_type).extra(\n where=['(SELECT count(*) FROM tidings_watchfilter WHERE '\n 'tidings_watchfilter.watch_id='\n 'tidings_watch.id)=%s'],\n params=[len(filters)])\n # Optimization: If the subselect ends up being slow, store the number\n # of filters in each Watch row or try a GROUP BY.\n\n # Apply 1-to-many filters:\n for k, v in iteritems(filters):\n watches = watches.filter(filters__name=k,\n filters__value=hash_to_unsigned(v))\n\n return watches"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns whether the given user or email is watching this event.", "response": "def is_notifying(cls, user_or_email_, object_id=None, **filters):\n \"\"\"Return whether the user/email is watching this event (either\n active or inactive watches), conditional on meeting the criteria in\n ``filters``.\n\n Count only watches that match the given filters exactly--not ones which\n match merely a superset of them. This lets callers distinguish between\n watches which overlap in scope. Equivalently, this lets callers check\n whether :meth:`notify()` has been called with these arguments.\n\n Implementations in subclasses may take different arguments--for\n example, to assume certain filters--though most will probably just use\n this. However, subclasses should clearly document what filters they\n supports and the meaning of each.\n\n Passing this an ``AnonymousUser`` always returns ``False``. This means\n you can always pass it ``request.user`` in a view and get a sensible\n response.\n\n \"\"\"\n return cls._watches_belonging_to_user(user_or_email_,\n object_id=object_id,\n **filters).exists()"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nstart notifying the given user or email address when this event occurs and meets the criteria given in filters.", "response": "def notify(cls, user_or_email_, object_id=None, **filters):\n \"\"\"Start notifying the given user or email address when this event\n occurs and meets the criteria given in ``filters``.\n\n Return the created (or the existing matching) Watch so you can call\n :meth:`~tidings.models.Watch.activate()` on it if you're so inclined.\n\n Implementations in subclasses may take different arguments; see the\n docstring of :meth:`is_notifying()`.\n\n Send an activation email if an anonymous watch is created and\n :data:`~django.conf.settings.TIDINGS_CONFIRM_ANONYMOUS_WATCHES` is\n ``True``. If the activation request fails, raise a\n ActivationRequestFailed exception.\n\n Calling :meth:`notify()` twice for an anonymous user will send the\n email each time.\n\n \"\"\"\n # A test-for-existence-then-create race condition exists here, but it\n # doesn't matter: de-duplication on fire() and deletion of all matches\n # on stop_notifying() nullify its effects.\n try:\n # Pick 1 if >1 are returned:\n watch = cls._watches_belonging_to_user(\n user_or_email_,\n object_id=object_id,\n **filters)[0:1].get()\n except Watch.DoesNotExist:\n create_kwargs = {}\n if cls.content_type:\n create_kwargs['content_type'] = \\\n ContentType.objects.get_for_model(cls.content_type)\n create_kwargs['email' if isinstance(user_or_email_, string_types)\n else 'user'] = user_or_email_\n # Letters that can't be mistaken for other letters or numbers in\n # most fonts, in case people try to type these:\n distinguishable_letters = \\\n 'abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRTUVWXYZ'\n secret = ''.join(random.choice(distinguishable_letters)\n for x in range(10))\n # Registered users don't need to confirm, but anonymous users do.\n is_active = ('user' in create_kwargs or\n not settings.TIDINGS_CONFIRM_ANONYMOUS_WATCHES)\n if object_id:\n create_kwargs['object_id'] = object_id\n watch = Watch.objects.create(\n secret=secret,\n is_active=is_active,\n event_type=cls.event_type,\n **create_kwargs)\n for k, v in iteritems(filters):\n WatchFilter.objects.create(watch=watch, name=k,\n value=hash_to_unsigned(v))\n # Send email for inactive watches.\n if not watch.is_active:\n email = watch.user.email if watch.user else watch.email\n message = cls._activation_email(watch, email)\n try:\n message.send()\n except SMTPException as e:\n watch.delete()\n raise ActivationRequestFailed(e.recipients)\n return watch"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef notify(cls, user_or_email, instance):\n return super(InstanceEvent, cls).notify(user_or_email,\n object_id=instance.pk)", "response": "Create save and return a watch which fires when something\n happens to instance."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef stop_notifying(cls, user_or_email, instance):\n super(InstanceEvent, cls).stop_notifying(user_or_email,\n object_id=instance.pk)", "response": "Delete the watch created by notify."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef is_notifying(cls, user_or_email, instance):\n return super(InstanceEvent, cls).is_notifying(user_or_email,\n object_id=instance.pk)", "response": "Check if the user is notifying the instance."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns users watching this instance.", "response": "def _users_watching(self, **kwargs):\n \"\"\"Return users watching this instance.\"\"\"\n return self._users_watching_by_filter(object_id=self.instance.pk,\n **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef decrypt(self):\n if not self._crypter:\n return b''\n try:\n plaintext = self._crypter.decrypt(self._ciphertext, **self._decrypt_params)\n return plaintext\n except Exception as e:\n exc_info = sys.exc_info()\n six.reraise(\n ValueError('Invalid ciphertext \"%s\", error: %s' % (self._ciphertext, e)),\n None,\n exc_info[2]\n )", "response": "Decrypt decrypts the secret and returns the plaintext."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef encrypt(self, msg, iv='', auth_data=None):\n if not iv:\n raise ValueError('Missing Nonce')\n\n return self.key.encrypt(iv, msg, auth_data)", "response": "Encrypts and authenticates the data provided as well as authenticating\n the associated_data."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef enc_setup(self, enc_alg, msg, auth_data=b'', key=None, iv=\"\"):\n\n iv = self._generate_iv(enc_alg, iv)\n\n if enc_alg in [\"A192GCM\", \"A128GCM\", \"A256GCM\"]:\n aes = AES_GCMEncrypter(key=key)\n ctx, tag = split_ctx_and_tag(aes.encrypt(msg, iv, auth_data))\n elif enc_alg in [\"A128CBC-HS256\", \"A192CBC-HS384\", \"A256CBC-HS512\"]:\n aes = AES_CBCEncrypter(key=key)\n ctx, tag = aes.encrypt(msg, iv, auth_data)\n else:\n raise NotSupportedAlgorithm(enc_alg)\n\n return ctx, tag, aes.key", "response": "This method is used to encrypt the JWE content."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncreate a signature over a message using a key.", "response": "def sign(self, msg, key):\n \"\"\"\n Create a signature over a message\n\n :param msg: The message\n :param key: The key\n :return: A signature\n \"\"\"\n hasher = hashes.Hash(self.hash_algorithm(), backend=default_backend())\n hasher.update(msg)\n digest = hasher.finalize()\n sig = key.sign(\n digest,\n padding.PSS(\n mgf=padding.MGF1(self.hash_algorithm()),\n salt_length=padding.PSS.MAX_LENGTH),\n utils.Prehashed(self.hash_algorithm()))\n return sig"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef verify(self, msg, signature, key):\n try:\n key.verify(signature, msg,\n padding.PSS(mgf=padding.MGF1(self.hash_algorithm()),\n salt_length=padding.PSS.MAX_LENGTH),\n self.hash_algorithm())\n except InvalidSignature as err:\n raise BadSignature(err)\n else:\n return True", "response": "Verify a message signature."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngenerates a JWE using the given key and IV.", "response": "def encrypt(self, key, iv=\"\", cek=\"\", **kwargs):\n \"\"\"\n Produces a JWE as defined in RFC7516 using symmetric keys\n\n :param key: Shared symmetric key\n :param iv: Initialization vector\n :param cek: Content master key\n :param kwargs: Extra keyword arguments, just ignore for now.\n :return:\n \"\"\"\n _msg = as_bytes(self.msg)\n\n _args = self._dict\n try:\n _args[\"kid\"] = kwargs[\"kid\"]\n except KeyError:\n pass\n\n jwe = JWEnc(**_args)\n\n # If no iv and cek are given generate them\n iv = self._generate_iv(self[\"enc\"], iv)\n cek = self._generate_key(self[\"enc\"], cek)\n if isinstance(key, SYMKey):\n try:\n kek = key.key.encode('utf8')\n except AttributeError:\n kek = key.key\n elif isinstance(key, bytes):\n kek = key\n else:\n kek = intarr2str(key)\n\n # The iv for this function must be 64 bit\n # Which is certainly different from the one above\n jek = aes_key_wrap(kek, cek, default_backend())\n\n _enc = self[\"enc\"]\n _auth_data = jwe.b64_encode_header()\n ctxt, tag, cek = self.enc_setup(_enc, _msg, auth_data=_auth_data,\n key=cek, iv=iv)\n return jwe.pack(parts=[jek, iv, ctxt, tag])"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef ec_construct_public(num):\n ecpn = ec.EllipticCurvePublicNumbers(num['x'], num['y'],\n NIST2SEC[as_unicode(num['crv'])]())\n return ecpn.public_key(default_backend())", "response": "Given a set of values on public attributes build an elliptic curve\n public key instance."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef ec_construct_private(num):\n pub_ecpn = ec.EllipticCurvePublicNumbers(num['x'], num['y'],\n NIST2SEC[as_unicode(num['crv'])]())\n priv_ecpn = ec.EllipticCurvePrivateNumbers(num['d'], pub_ecpn)\n return priv_ecpn.private_key(default_backend())", "response": "Given a set of values on public and private attributes build an elliptic curve private key instance."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreads a private Elliptic Curve key from a PEM file.", "response": "def import_private_key_from_file(filename, passphrase=None):\n \"\"\"\n Read a private Elliptic Curve key from a PEM file.\n\n :param filename: The name of the file\n :param passphrase: A pass phrase to use to unpack the PEM file.\n :return: A\n cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey\n instance\n \"\"\"\n with open(filename, \"rb\") as key_file:\n private_key = serialization.load_pem_private_key(\n key_file.read(),\n password=passphrase,\n backend=default_backend())\n\n return private_key"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef deserialize(self):\n\n if isinstance(self.x, (str, bytes)):\n _x = deser(self.x)\n else:\n raise ValueError('\"x\" MUST be a string')\n\n if isinstance(self.y, (str, bytes)):\n _y = deser(self.y)\n else:\n raise ValueError('\"y\" MUST be a string')\n\n if self.d:\n try:\n if isinstance(self.d, (str, bytes)):\n _d = deser(self.d)\n self.priv_key = ec_construct_private(\n {'x': _x, 'y': _y, 'crv': self.crv, 'd': _d})\n self.pub_key = self.priv_key.public_key()\n except ValueError as err:\n raise DeSerializationNotPossible(str(err))\n else:\n self.pub_key = ec_construct_public(\n {'x': _x, 'y': _y, 'crv': self.crv})", "response": "Deserializes the object from the on - the - wire representation."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef serialize(self, private=False):\n if self.priv_key:\n self._serialize(self.priv_key)\n else:\n self._serialize(self.pub_key)\n\n res = self.common()\n\n res.update({\n \"crv\": self.crv,\n \"x\": self.x,\n \"y\": self.y\n })\n\n if private and self.d:\n res[\"d\"] = self.d\n\n return res", "response": "Go from a\n or EllipticCurvePrivateKey\n instance to a dictionary representation."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nloading an elliptic curve key into the instance.", "response": "def load_key(self, key):\n \"\"\"\n Load an Elliptic curve key\n\n :param key: An elliptic curve key instance, private or public.\n :return: Reference to this instance\n \"\"\"\n self._serialize(key)\n if isinstance(key, ec.EllipticCurvePrivateKey):\n self.priv_key = key\n self.pub_key = key.public_key()\n else:\n self.pub_key = key\n\n return self"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef rsa_init(spec):\n\n try:\n size = spec['size']\n except KeyError:\n size = 2048\n\n kb = KeyBundle(keytype=\"RSA\")\n if 'use' in spec:\n for use in harmonize_usage(spec[\"use\"]):\n _key = new_rsa_key(use=use, key_size=size)\n kb.append(_key)\n else:\n _key = new_rsa_key(key_size=size)\n kb.append(_key)\n\n return kb", "response": "Initializes a KeyBundle instance containing newly minted RSA keys according to a spec."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef ec_init(spec):\n\n kb = KeyBundle(keytype=\"EC\")\n if 'use' in spec:\n for use in spec[\"use\"]:\n eck = new_ec_key(crv=spec['crv'], use=use)\n kb.append(eck)\n else:\n eck = new_ec_key(crv=spec['crv'])\n kb.append(eck)\n\n return kb", "response": "Initiate a key bundle with an elliptic curve key."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate a KeyBundle based on the content in a local file", "response": "def keybundle_from_local_file(filename, typ, usage):\n \"\"\"\n Create a KeyBundle based on the content in a local file\n \n :param filename: Name of the file \n :param typ: Type of content\n :param usage: What the key should be used for\n :return: The created KeyBundle\n \"\"\"\n usage = harmonize_usage(usage)\n\n if typ.lower() == \"jwks\":\n kb = KeyBundle(source=filename, fileformat=\"jwks\", keyusage=usage)\n elif typ.lower() == 'der':\n kb = KeyBundle(source=filename, fileformat=\"der\", keyusage=usage)\n else:\n raise UnknownKeyType(\"Unsupported key type\")\n\n return kb"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef build_key_bundle(key_conf, kid_template=\"\"):\n\n kid = 0\n\n tot_kb = KeyBundle()\n for spec in key_conf:\n typ = spec[\"type\"].upper()\n\n if typ == \"RSA\":\n if \"key\" in spec:\n error_to_catch = (OSError, IOError,\n DeSerializationNotPossible)\n try:\n kb = KeyBundle(source=\"file://%s\" % spec[\"key\"],\n fileformat=\"der\",\n keytype=typ, keyusage=spec[\"use\"])\n except error_to_catch:\n kb = rsa_init(spec)\n except Exception:\n raise\n else:\n kb = rsa_init(spec)\n elif typ == \"EC\":\n kb = ec_init(spec)\n else:\n continue\n\n if 'kid' in spec and len(kb) == 1:\n ks = kb.keys()\n ks[0].kid = spec['kid']\n else:\n for k in kb.keys():\n if kid_template:\n k.kid = kid_template % kid\n kid += 1\n else:\n k.add_kid()\n\n tot_kb.extend(kb.keys())\n\n return tot_kb", "response": "Builds a KeyBundle instance based on a list of key configuration."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncompares two key descriptions returning a tuple of - 1 0 1 depending on whether the first key description is le eq or gt then kd2", "response": "def sort_func(kd1, kd2):\n \"\"\"\n Compares 2 key descriptions\n :param kd1: First key description\n :param kd2: Second key description\n :return: -1,0,1 depending on whether kd1 le,eq or gt then kd2\n \"\"\"\n _l = _cmp(kd1['type'], kd2['type'])\n if _l:\n return _l\n\n if kd1['type'] == 'EC':\n _l = _cmp(kd1['crv'], kd2['crv'])\n if _l:\n return _l\n\n _l = _cmp(kd1['type'], kd2['type'])\n if _l:\n return _l\n\n _l = _cmp(kd1['use'][0], kd2['use'][0])\n if _l:\n return _l\n\n try:\n _kid1 = kd1['kid']\n except KeyError:\n _kid1 = None\n\n try:\n _kid2 = kd2['kid']\n except KeyError:\n _kid2 = None\n\n if _kid1 and _kid2:\n return _cmp(_kid1, _kid2)\n elif _kid1:\n return -1\n elif _kid2:\n return 1\n\n return 0"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a dictionary with keys that should be added and keys that should be deleted from a Key Bundle.", "response": "def key_diff(key_bundle, key_defs):\n \"\"\"\n Creates a difference dictionary with keys that should added and keys that\n should be deleted from a Key Bundle to get it updated to a state that\n mirrors What is in the key_defs specification.\n\n :param key_bundle: The original KeyBundle\n :param key_defs: A set of key definitions\n :return: A dictionary with possible keys 'add' and 'del'. The values\n for the keys are lists of :py:class:`cryptojwt.jwk.JWK` instances\n \"\"\"\n\n keys = key_bundle.get()\n diff = {}\n\n # My own sorted copy\n key_defs = order_key_defs(key_defs)[:]\n used = []\n\n for key in keys:\n match = False\n for kd in key_defs:\n if key.use not in kd['use']:\n continue\n\n if key.kty != kd['type']:\n continue\n\n if key.kty == 'EC':\n # special test only for EC keys\n if key.crv != kd['crv']:\n continue\n\n try:\n _kid = kd['kid']\n except KeyError:\n pass\n else:\n if key.kid != _kid:\n continue\n\n match = True\n used.append(kd)\n key_defs.remove(kd)\n break\n\n if not match:\n try:\n diff['del'].append(key)\n except KeyError:\n diff['del'] = [key]\n\n if key_defs:\n _kb = build_key_bundle(key_defs)\n diff['add'] = _kb.keys()\n\n return diff"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nupdating a KeyBundle with a diff specification.", "response": "def update_key_bundle(key_bundle, diff):\n \"\"\"\n Apply a diff specification to a KeyBundle.\n The keys that are to be added are added.\n The keys that should be deleted are marked as inactive.\n\n :param key_bundle: The original KeyBundle\n :param diff: The difference specification\n :return: An updated key_bundle\n \"\"\"\n try:\n _add = diff['add']\n except KeyError:\n pass\n else:\n key_bundle.extend(_add)\n\n try:\n _del = diff['del']\n except KeyError:\n pass\n else:\n _now = time.time()\n for k in _del:\n k.inactive_since = _now"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ngo from JWK description to binary keys", "response": "def do_keys(self, keys):\n \"\"\"\n Go from JWK description to binary keys\n\n :param keys:\n :return:\n \"\"\"\n for inst in keys:\n typ = inst[\"kty\"]\n try:\n _usage = harmonize_usage(inst['use'])\n except KeyError:\n _usage = ['']\n else:\n del inst['use']\n\n flag = 0\n for _use in _usage:\n for _typ in [typ, typ.lower(), typ.upper()]:\n try:\n _key = K2C[_typ](use=_use, **inst)\n except KeyError:\n continue\n except JWKException as err:\n logger.warning('While loading keys: {}'.format(err))\n else:\n if _key not in self._keys:\n self._keys.append(_key)\n flag = 1\n break\n if not flag:\n logger.warning(\n 'While loading keys, UnknownKeyType: {}'.format(typ))"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef do_local_jwk(self, filename):\n _info = json.loads(open(filename).read())\n if 'keys' in _info:\n self.do_keys(_info[\"keys\"])\n else:\n self.do_keys([_info])\n\n self.last_updated = time.time()", "response": "Load a JWKS from a local file"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nloading a DER encoded file amd create a key from it.", "response": "def do_local_der(self, filename, keytype, keyusage=None, kid=''):\n \"\"\"\n Load a DER encoded file amd create a key from it.\n \n :param filename: \n :param keytype: Presently only 'rsa' supported\n :param keyusage: encryption ('enc') or signing ('sig') or both\n \"\"\"\n _bkey = import_private_rsa_key_from_file(filename)\n\n if keytype.lower() != 'rsa':\n raise NotImplemented('No support for DER decoding of that key type')\n\n if not keyusage:\n keyusage = [\"enc\", \"sig\"]\n else:\n keyusage = harmonize_usage(keyusage)\n\n for use in keyusage:\n _key = RSAKey().load_key(_bkey)\n _key.use = use\n if kid:\n _key.kid = kid\n self._keys.append(_key)\n\n self.last_updated = time.time()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef do_remote(self):\n if self.verify_ssl:\n args = {\"verify\": self.verify_ssl}\n else:\n args = {}\n\n try:\n logging.debug('KeyBundle fetch keys from: {}'.format(self.source))\n r = self.httpc('GET', self.source, **args)\n except Exception as err:\n logger.error(err)\n raise UpdateFailed(\n REMOTE_FAILED.format(self.source, str(err)))\n\n if r.status_code == 200: # New content\n self.time_out = time.time() + self.cache_time\n\n self.imp_jwks = self._parse_remote_response(r)\n if not isinstance(self.imp_jwks,\n dict) or \"keys\" not in self.imp_jwks:\n raise UpdateFailed(MALFORMED.format(self.source))\n\n logger.debug(\"Loaded JWKS: %s from %s\" % (r.text, self.source))\n try:\n self.do_keys(self.imp_jwks[\"keys\"])\n except KeyError:\n logger.error(\"No 'keys' keyword in JWKS\")\n raise UpdateFailed(MALFORMED.format(self.source))\n\n else:\n raise UpdateFailed(\n REMOTE_FAILED.format(self.source, r.status_code))\n self.last_updated = time.time()\n return True", "response": "Load a JWKS from a webpage and update the cache"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nparses the JWKS from the HTTP response.", "response": "def _parse_remote_response(self, response):\n \"\"\"\n Parse JWKS from the HTTP response.\n\n Should be overriden by subclasses for adding support of e.g. signed\n JWKS.\n :param response: HTTP response from the 'jwks_uri' endpoint\n :return: response parsed as JSON\n \"\"\"\n # Check if the content type is the right one.\n try:\n if response.headers[\"Content-Type\"] != 'application/json':\n logger.warning('Wrong Content_type ({})'.format(\n response.headers[\"Content-Type\"]))\n except KeyError:\n pass\n\n logger.debug(\"Loaded JWKS: %s from %s\" % (response.text, self.source))\n try:\n return json.loads(response.text)\n except ValueError:\n return None"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef update(self):\n res = True # An update was successful\n if self.source:\n _keys = self._keys # just in case\n\n # reread everything\n self._keys = []\n\n try:\n if self.remote is False:\n if self.fileformat in [\"jwks\", \"jwk\"]:\n self.do_local_jwk(self.source)\n elif self.fileformat == \"der\":\n self.do_local_der(self.source, self.keytype,\n self.keyusage)\n else:\n res = self.do_remote()\n except Exception as err:\n logger.error('Key bundle update failed: {}'.format(err))\n self._keys = _keys # restore\n return False\n\n now = time.time()\n for _key in _keys:\n if _key not in self._keys:\n if not _key.inactive_since: # If already marked don't mess\n _key.inactive_since = now\n self._keys.append(_key)\n\n return res", "response": "Reload the keys if necessary"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get(self, typ=\"\", only_active=True):\n self._uptodate()\n _typs = [typ.lower(), typ.upper()]\n\n if typ:\n _keys = [k for k in self._keys if k.kty in _typs]\n else:\n _keys = self._keys\n\n if only_active:\n return [k for k in _keys if not k.inactive_since]\n else:\n return _keys", "response": "Return a list of keys of a specific type. Either all keys or only keys of a specific type\n ."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef remove_keys_by_type(self, typ):\n _typs = [typ.lower(), typ.upper()]\n self._keys = [k for k in self._keys if not k.kty in _typs]", "response": "Removes keys that are of a specific type."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncreate a JWKS JSON document of the keys in this bundle.", "response": "def jwks(self, private=False):\n \"\"\"\n Create a JWKS as a JSON document\n \n :param private: Whether private key information should be included.\n :return: A JWKS JSON representation of the keys in this bundle\n \"\"\"\n self._uptodate()\n keys = list()\n for k in self._keys:\n if private:\n key = k.serialize(private)\n else:\n key = k.serialize()\n for k, v in key.items():\n key[k] = as_unicode(v)\n keys.append(key)\n return json.dumps({\"keys\": keys})"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_key_with_kid(self, kid):\n for key in self._keys:\n if key.kid == kid:\n return key\n\n # Try updating since there might have been an update to the key file\n self.update()\n\n for key in self._keys:\n if key.kid == kid:\n return key\n\n return None", "response": "Returns the key that has a specific key ID or None if no such key exists."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a list of all the key IDs that exist in this bundle", "response": "def kids(self):\n \"\"\"\n Return a list of key IDs. Note that this list may be shorter then\n the list of keys. The reason might be that there are some keys with\n no key ID.\n \n :return: A list of all the key IDs that exists in this bundle \n \"\"\"\n self._uptodate()\n return [key.kid for key in self._keys if key.kid != \"\"]"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef mark_as_inactive(self, kid):\n k = self.get_key_with_kid(kid)\n k.inactive_since = time.time()", "response": "Mark a specific key as inactive based on the keys KeyID\n \n "} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef remove_outdated(self, after, when=0):\n if when:\n now = when\n else:\n now = time.time()\n\n if not isinstance(after, float):\n try:\n after = float(after)\n except TypeError:\n raise\n\n _kl = []\n for k in self._keys:\n if k.inactive_since and k.inactive_since + after < now:\n continue\n else:\n _kl.append(k)\n\n self._keys = _kl", "response": "Removes keys that should not be available any more."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef copy(self):\n kb = KeyBundle()\n kb._keys = self._keys[:]\n\n kb.cache_time = self.cache_time\n kb.verify_ssl = self.verify_ssl\n if self.source:\n kb.source = self.source\n kb.fileformat = self.fileformat\n kb.keytype = self.keytype\n kb.keyusage = self.keyusage\n kb.remote = self.remote\n\n return kb", "response": "Make deep copy of this KeyBundle"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nunpack a JWT into its parts and base64 decodes the parts individually and validates the header against the header.", "response": "def unpack(self, token, **kwargs):\n \"\"\"\n Unpacks a JWT into its parts and base64 decodes the parts\n individually\n\n :param token: The JWT\n :param kwargs: A possible empty set of claims to verify the header\n against.\n \"\"\"\n if isinstance(token, str):\n try:\n token = token.encode(\"utf-8\")\n except UnicodeDecodeError:\n pass\n\n part = split_token(token)\n self.b64part = part\n self.part = [b64d(p) for p in part]\n self.headers = json.loads(as_unicode(self.part[0]))\n for key,val in kwargs.items():\n if not val and key in self.headers:\n continue\n\n try:\n _ok = self.verify_header(key,val)\n except KeyError:\n raise\n else:\n if not _ok:\n raise HeaderError(\n 'Expected \"{}\" to be \"{}\", was \"{}\"'.format(\n key, val, self.headers[key]))\n\n return self"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\npacks components into a JWT", "response": "def pack(self, parts=None, headers=None):\n \"\"\"\n Packs components into a JWT\n\n :param parts: List of parts to pack\n :param headers: The JWT headers\n :return:\n \"\"\"\n if not headers:\n if self.headers:\n headers = self.headers\n else:\n headers = {'alg': 'none'}\n\n logging.debug('JWT header: {}'.format(headers))\n\n if not parts:\n return \".\".join([a.decode() for a in self.b64part])\n\n self.part = [headers] + parts\n _all = self.b64part = [b64encode_item(headers)]\n _all.extend([b64encode_item(p) for p in parts])\n\n return \".\".join([a.decode() for a in _all])"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the payload from the different parts of the signed or encrypted JSON Web Token.", "response": "def payload(self):\n \"\"\"\n Picks out the payload from the different parts of the signed/encrypted\n JSON Web Token. If the content type is said to be 'jwt' deserialize the\n payload into a Python object otherwise return as-is.\n\n :return: The payload\n \"\"\"\n _msg = as_unicode(self.part[1])\n\n # If not JSON web token assume JSON\n if \"cty\" in self.headers and self.headers[\"cty\"].lower() != \"jwt\":\n pass\n else:\n try:\n _msg = json.loads(_msg)\n except ValueError:\n pass\n\n return _msg"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nchecking that a particular header claim is present and has a specific value", "response": "def verify_header(self, key, val):\n \"\"\"\n Check that a particular header claim is present and has a specific value\n\n :param key: The claim\n :param val: The value of the claim\n :raises: KeyError if the claim is not present in the header\n :return: True if the claim exists in the header and has the prescribed\n value\n \"\"\"\n\n if isinstance(val, list):\n if self.headers[key] in val:\n return True\n else:\n return False\n else:\n if self.headers[key] == val:\n return True\n else:\n return False"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nverifying that a set of particular header claim is present and has the specified values.", "response": "def verify_headers(self, check_presence=True, **kwargs):\n \"\"\"\n Check that a set of particular header claim are present and has\n specific values\n\n :param kwargs: The claim/value sets as a dictionary\n :return: True if the claim that appears in the header has the\n prescribed values. If a claim is not present in the header and\n check_presence is True then False is returned.\n \"\"\"\n for key, val in kwargs.items():\n try:\n _ok = self.verify_header(key, val)\n except KeyError:\n if check_presence:\n return False\n else:\n pass\n else:\n if not _ok:\n return False\n return True"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate a signature over a message using a symmetric key.", "response": "def sign(self, msg, key):\n \"\"\"\n Create a signature over a message as defined in RFC7515 using a\n symmetric key\n\n :param msg: The message\n :param key: The key\n :return: A signature\n \"\"\"\n h = hmac.HMAC(key, self.algorithm(), default_backend())\n h.update(msg)\n return h.finalize()"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef verify(self, msg, sig, key):\n try:\n h = hmac.HMAC(key, self.algorithm(), default_backend())\n h.update(msg)\n h.verify(sig)\n return True\n except:\n return False", "response": "Verifies whether the message is valid for the given message and the given key."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef main():\n parser = argparse.ArgumentParser(description='JSON Web Key (JWK) Generator')\n\n parser.add_argument('--kty',\n dest='kty',\n metavar='type',\n help='Key type',\n required=True)\n parser.add_argument('--size',\n dest='keysize',\n type=int,\n metavar='size',\n help='Key size')\n parser.add_argument('--crv',\n dest='crv',\n metavar='curve',\n help='EC curve',\n choices=NIST2SEC.keys(),\n default=DEFAULT_EC_CURVE)\n parser.add_argument('--exp',\n dest='rsa_exp',\n type=int,\n metavar='exponent',\n help=f'RSA public key exponent (default {DEFAULT_RSA_EXP})',\n default=DEFAULT_RSA_EXP)\n parser.add_argument('--kid',\n dest='kid',\n metavar='id',\n help='Key ID')\n args = parser.parse_args()\n\n if args.kty.upper() == 'RSA':\n if args.keysize is None:\n args.keysize = DEFAULT_RSA_KEYSIZE\n jwk = new_rsa_key(public_exponent=args.rsa_exp, key_size=args.keysize, kid=args.kid)\n elif args.kty.upper() == 'EC':\n if args.crv not in NIST2SEC:\n print(\"Unknown curve: {0}\".format(args.crv), file=sys.stderr)\n exit(1)\n jwk = new_ec_key(crv=args.crv, kid=args.kid)\n elif args.kty.upper() == 'SYM':\n if args.keysize is None:\n args.keysize = DEFAULT_SYM_KEYSIZE\n randomkey = os.urandom(args.keysize)\n jwk = SYMKey(key=randomkey, kid=args.kid)\n else:\n print(f\"Unknown key type: {args.kty}\", file=sys.stderr)\n exit(1)\n\n jwk_dict = jwk.serialize(private=True)\n print(json.dumps(jwk_dict, sort_keys=True, indent=4))\n print(\"SHA-256: \" + jwk.thumbprint('SHA-256').decode(), file=sys.stderr)", "response": "Main function for the base class of the base class"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef ensure_ec_params(jwk_dict, private):\n provided = frozenset(jwk_dict.keys())\n if private is not None and private:\n required = EC_PUBLIC_REQUIRED | EC_PRIVATE_REQUIRED\n else:\n required = EC_PUBLIC_REQUIRED\n return ensure_params('EC', provided, required)", "response": "Ensure all required EC parameters are present in dictionary"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef ensure_rsa_params(jwk_dict, private):\n provided = frozenset(jwk_dict.keys())\n if private is not None and private:\n required = RSA_PUBLIC_REQUIRED | RSA_PRIVATE_REQUIRED\n else:\n required = RSA_PUBLIC_REQUIRED\n return ensure_params('RSA', provided, required)", "response": "Ensure all required RSA parameters are present in dictionary"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef ensure_params(kty, provided, required):\n if not required <= provided:\n missing = required - provided\n raise MissingValue('Missing properties for kty={}, {}'.format(kty, str(list(missing))))", "response": "Ensure all required parameters are present in the dictionary."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef key_from_jwk_dict(jwk_dict, private=None):\n\n # uncouple from the original item\n _jwk_dict = copy.copy(jwk_dict)\n\n if 'kty' not in _jwk_dict:\n raise MissingValue('kty missing')\n\n if _jwk_dict['kty'] == 'EC':\n ensure_ec_params(_jwk_dict, private)\n\n if private is not None and not private:\n # remove private components\n for v in EC_PRIVATE:\n _jwk_dict.pop(v, None)\n\n if _jwk_dict[\"crv\"] in NIST2SEC:\n curve = NIST2SEC[_jwk_dict[\"crv\"]]()\n else:\n raise UnsupportedAlgorithm(\n \"Unknown curve: %s\" % (_jwk_dict[\"crv\"]))\n\n if _jwk_dict.get(\"d\", None) is not None:\n # Ecdsa private key.\n _jwk_dict['priv_key'] = ec.derive_private_key(\n base64url_to_long(_jwk_dict[\"d\"]), curve,\n backends.default_backend())\n _jwk_dict['pub_key'] = _jwk_dict['priv_key'].public_key()\n else:\n # Ecdsa public key.\n ec_pub_numbers = ec.EllipticCurvePublicNumbers(\n base64url_to_long(_jwk_dict[\"x\"]),\n base64url_to_long(_jwk_dict[\"y\"]), curve)\n _jwk_dict['pub_key'] = ec_pub_numbers.public_key(\n backends.default_backend())\n return ECKey(**_jwk_dict)\n elif _jwk_dict['kty'] == 'RSA':\n ensure_rsa_params(_jwk_dict, private)\n\n if private is not None and not private:\n # remove private components\n for v in RSA_PRIVATE:\n _jwk_dict.pop(v, None)\n\n rsa_pub_numbers = rsa.RSAPublicNumbers(\n base64url_to_long(_jwk_dict[\"e\"]),\n base64url_to_long(_jwk_dict[\"n\"]))\n if _jwk_dict.get(\"p\", None) is not None:\n # Rsa private key. These MUST be present\n p_long = base64url_to_long(_jwk_dict[\"p\"])\n q_long = base64url_to_long(_jwk_dict[\"q\"])\n d_long = base64url_to_long(_jwk_dict[\"d\"])\n # If not present these can be calculated from the others\n if 'dp' not in _jwk_dict:\n dp_long = rsa_crt_dmp1(d_long, p_long)\n else:\n dp_long = base64url_to_long(_jwk_dict[\"dp\"])\n if 'dq' not in _jwk_dict:\n dq_long = rsa_crt_dmq1(d_long, q_long)\n else:\n dq_long = base64url_to_long(_jwk_dict[\"dq\"])\n if 'qi' not in _jwk_dict:\n qi_long = rsa_crt_iqmp(p_long, q_long)\n else:\n qi_long = base64url_to_long(_jwk_dict[\"qi\"])\n\n rsa_priv_numbers = rsa.RSAPrivateNumbers(\n p_long, q_long, d_long,\n dp_long, dq_long, qi_long, rsa_pub_numbers)\n _jwk_dict['priv_key'] = rsa_priv_numbers.private_key(\n backends.default_backend())\n _jwk_dict['pub_key'] = _jwk_dict['priv_key'].public_key()\n else:\n _jwk_dict['pub_key'] = rsa_pub_numbers.public_key(\n backends.default_backend())\n\n if _jwk_dict['kty'] != \"RSA\":\n raise WrongKeyType('\"{}\" should have been \"RSA\"'.format(_jwk_dict[\n 'kty']))\n return RSAKey(**_jwk_dict)\n elif _jwk_dict['kty'] == 'oct':\n if 'key' not in _jwk_dict and 'k' not in _jwk_dict:\n raise MissingValue(\n 'There has to be one of \"k\" or \"key\" in a symmetric key')\n\n return SYMKey(**_jwk_dict)\n else:\n raise UnknownKeyType", "response": "Load a JWK from a dictionary."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef jwk_wrap(key, use=\"\", kid=\"\"):\n if isinstance(key, rsa.RSAPublicKey) or isinstance(key, rsa.RSAPrivateKey):\n kspec = RSAKey(use=use, kid=kid).load_key(key)\n elif isinstance(key, str):\n kspec = SYMKey(key=key, use=use, kid=kid)\n elif isinstance(key, ec.EllipticCurvePublicKey):\n kspec = ECKey(use=use, kid=kid).load_key(key)\n else:\n raise Exception(\"Unknown key type:key=\" + str(type(key)))\n\n kspec.serialize()\n return kspec", "response": "Create a Key instance with the given key"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef build_keyjar(key_conf, kid_template=\"\", keyjar=None, owner=''):\n\n if keyjar is None:\n keyjar = KeyJar()\n\n tot_kb = build_key_bundle(key_conf, kid_template)\n\n keyjar.add_kb(owner, tot_kb)\n\n return keyjar", "response": "Builds a KeyJar based on a key configuration."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef update_keyjar(keyjar):\n for iss, kbl in keyjar.items():\n for kb in kbl:\n kb.update()", "response": "Update all the items in the key jar by one."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning a text representation of the keyjar.", "response": "def key_summary(keyjar, issuer):\n \"\"\"\n Return a text representation of the keyjar.\n\n :param keyjar: A :py:class:`oidcmsg.key_jar.KeyJar` instance\n :param issuer: Which key owner that we are looking at\n :return: A text representation of the keys\n \"\"\"\n try:\n kbl = keyjar[issuer]\n except KeyError:\n return ''\n else:\n key_list = []\n for kb in kbl:\n for key in kb.keys():\n if key.inactive_since:\n key_list.append(\n '*{}:{}:{}'.format(key.kty, key.use, key.kid))\n else:\n key_list.append(\n '{}:{}:{}'.format(key.kty, key.use, key.kid))\n return ', '.join(key_list)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef init_key_jar(public_path='', private_path='', key_defs='', owner='',\n read_only=True):\n \"\"\"\n A number of cases here:\n\n 1. A private path is given\n\n a. The file exists and a JWKS is found there.\n From that JWKS a KeyJar instance is built.\n b.\n If the private path file doesn't exit the key definitions are\n used to build a KeyJar instance. A JWKS with the private keys are\n written to the file named in private_path.\n\n If a public path is also provided a JWKS with public keys are written\n to that file.\n\n 2. A public path is given but no private path.\n\n a. If the public path file exists then the JWKS in that file is used to\n construct a KeyJar.\n b. If no such file exists then a KeyJar will be built\n based on the key_defs specification and a JWKS with the public keys\n will be written to the public path file.\n\n 3. If neither a public path nor a private path is given then a KeyJar is\n built based on the key_defs specification and no JWKS will be written\n to file.\n\n In all cases a KeyJar instance is returned\n\n The keys stored in the KeyJar will be stored under the '' identifier.\n\n :param public_path: A file path to a file that contains a JWKS with public\n keys\n :param private_path: A file path to a file that contains a JWKS with\n private keys.\n :param key_defs: A definition of what keys should be created if they are\n not already available\n :param owner: The owner of the keys\n :param read_only: This function should not attempt to write anything\n to a file system.\n :return: An instantiated :py:class;`oidcmsg.key_jar.KeyJar` instance\n \"\"\"\n\n if private_path:\n if os.path.isfile(private_path):\n _jwks = open(private_path, 'r').read()\n _kj = KeyJar()\n _kj.import_jwks(json.loads(_jwks), owner)\n if key_defs:\n _kb = _kj.issuer_keys[owner][0]\n _diff = key_diff(_kb, key_defs)\n if _diff:\n if read_only:\n logger.error('Not allowed to write to disc!')\n else:\n update_key_bundle(_kb, _diff)\n _kj.issuer_keys[owner] = [_kb]\n jwks = _kj.export_jwks(private=True, issuer=owner)\n fp = open(private_path, 'w')\n fp.write(json.dumps(jwks))\n fp.close()\n else:\n _kj = build_keyjar(key_defs, owner=owner)\n if not read_only:\n jwks = _kj.export_jwks(private=True, issuer=owner)\n head, tail = os.path.split(private_path)\n if head and not os.path.isdir(head):\n os.makedirs(head)\n fp = open(private_path, 'w')\n fp.write(json.dumps(jwks))\n fp.close()\n\n if public_path and not read_only:\n jwks = _kj.export_jwks(issuer=owner) # public part\n head, tail = os.path.split(public_path)\n if head and not os.path.isdir(head):\n os.makedirs(head)\n fp = open(public_path, 'w')\n fp.write(json.dumps(jwks))\n fp.close()\n elif public_path:\n if os.path.isfile(public_path):\n _jwks = open(public_path, 'r').read()\n _kj = KeyJar()\n _kj.import_jwks(json.loads(_jwks), owner)\n if key_defs:\n _kb = _kj.issuer_keys[owner][0]\n _diff = key_diff(_kb, key_defs)\n if _diff:\n if read_only:\n logger.error('Not allowed to write to disc!')\n else:\n update_key_bundle(_kb, _diff)\n _kj.issuer_keys[owner] = [_kb]\n jwks = _kj.export_jwks(issuer=owner)\n fp = open(private_path, 'w')\n fp.write(json.dumps(jwks))\n fp.close()\n else:\n _kj = build_keyjar(key_defs, owner=owner)\n if not read_only:\n _jwks = _kj.export_jwks(issuer=owner)\n head, tail = os.path.split(public_path)\n if head and not os.path.isdir(head):\n os.makedirs(head)\n fp = open(public_path, 'w')\n fp.write(json.dumps(_jwks))\n fp.close()\n else:\n _kj = build_keyjar(key_defs, owner=owner)\n\n return _kj", "response": "This function creates a KeyJar instance for the given public and private keys."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef add_url(self, issuer, url, **kwargs):\n\n if not url:\n raise KeyError(\"No url given\")\n\n if \"/localhost:\" in url or \"/localhost/\" in url:\n kb = self.keybundle_cls(source=url, verify_ssl=False,\n httpc=self.httpc, **kwargs)\n else:\n kb = self.keybundle_cls(source=url, verify_ssl=self.verify_ssl,\n httpc=self.httpc, **kwargs)\n\n kb.update()\n self.add_kb(issuer, kb)\n\n return kb", "response": "This method creates a new JWKS. KeyBundle instance with the given url."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nadd a symmetric key to the KeyJar.", "response": "def add_symmetric(self, issuer, key, usage=None):\n \"\"\"\n Add a symmetric key. This is done by wrapping it in a key bundle \n cloak since KeyJar does not handle keys directly but only through\n key bundles.\n \n :param issuer: Owner of the key\n :param key: The key \n :param usage: What the key can be used for signing/signature \n verification (sig) and/or encryption/decryption (enc)\n \"\"\"\n if issuer not in self.issuer_keys:\n self.issuer_keys[issuer] = []\n\n if usage is None:\n self.issuer_keys[issuer].append(\n self.keybundle_cls([{\"kty\": \"oct\", \"key\": key}]))\n else:\n for use in usage:\n self.issuer_keys[issuer].append(\n self.keybundle_cls([{\"kty\": \"oct\",\n \"key\": key,\n \"use\": use}]))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef add_kb(self, issuer, kb):\n try:\n self.issuer_keys[issuer].append(kb)\n except KeyError:\n self.issuer_keys[issuer] = [kb]", "response": "Adds a key bundle and bind it to an identifier\n \n "} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ngetting all keys that match a set of search criteria", "response": "def get(self, key_use, key_type=\"\", owner=\"\", kid=None, **kwargs):\n \"\"\"\n Get all keys that matches a set of search criteria\n\n :param key_use: A key useful for this usage (enc, dec, sig, ver)\n :param key_type: Type of key (rsa, ec, oct, ..)\n :param owner: Who is the owner of the keys, \"\" == me (default)\n :param kid: A Key Identifier\n :return: A possibly empty list of keys\n \"\"\"\n\n if key_use in [\"dec\", \"enc\"]:\n use = \"enc\"\n else:\n use = \"sig\"\n\n _kj = None\n if owner != \"\":\n try:\n _kj = self.issuer_keys[owner]\n except KeyError:\n if owner.endswith(\"/\"):\n try:\n _kj = self.issuer_keys[owner[:-1]]\n except KeyError:\n pass\n else:\n try:\n _kj = self.issuer_keys[owner + \"/\"]\n except KeyError:\n pass\n else:\n try:\n _kj = self.issuer_keys[owner]\n except KeyError:\n pass\n\n if _kj is None:\n return []\n\n lst = []\n for bundle in _kj:\n if key_type:\n if key_use in ['ver', 'dec']:\n _bkeys = bundle.get(key_type, only_active=False)\n else:\n _bkeys = bundle.get(key_type)\n else:\n _bkeys = bundle.keys()\n for key in _bkeys:\n if key.inactive_since and key_use != \"sig\":\n # Skip inactive keys unless for signature verification\n continue\n if not key.use or use == key.use:\n if kid:\n if key.kid == kid:\n lst.append(key)\n break\n else:\n continue\n else:\n lst.append(key)\n\n # if elliptic curve, have to check if I have a key of the right curve\n if key_type == \"EC\" and \"alg\" in kwargs:\n name = \"P-{}\".format(kwargs[\"alg\"][2:]) # the type\n _lst = []\n for key in lst:\n if name != key.crv:\n continue\n _lst.append(key)\n lst = _lst\n\n if use == 'enc' and key_type == 'oct' and owner != '':\n # Add my symmetric keys\n for kb in self.issuer_keys['']:\n for key in kb.get(key_type):\n if key.inactive_since:\n continue\n if not key.use or key.use == use:\n lst.append(key)\n\n return lst"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_signing_key(self, key_type=\"\", owner=\"\", kid=None, **kwargs):\n return self.get(\"sig\", key_type, owner, kid, **kwargs)", "response": "Shortcut to use for signing keys only."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn a list of keys that can be used for a specific crypto algorithm and usage by key owner.", "response": "def keys_by_alg_and_usage(self, issuer, alg, usage):\n \"\"\"\n Find all keys that can be used for a specific crypto algorithm and\n usage by key owner.\n\n :param issuer: Key owner\n :param alg: a crypto algorithm\n :param usage: What the key should be used for\n :return: A possibly empty list of keys\n \"\"\"\n if usage in [\"sig\", \"ver\"]:\n ktype = jws_alg2keytype(alg)\n else:\n ktype = jwe_alg2keytype(alg)\n\n return self.get(usage, ktype, issuer)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_issuer_keys(self, issuer):\n res = []\n for kbl in self.issuer_keys[issuer]:\n res.extend(kbl.keys())\n return res", "response": "Get all the keys that belong to an entity."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn the issue entity ID that matches the given URL.", "response": "def match_owner(self, url):\n \"\"\"\n Finds the first entity, with keys in the key jar, with an\n identifier that matches the given URL. The match is a leading\n substring match.\n\n :param url: A URL\n :return: An issue entity ID that exists in the Key jar\n \"\"\"\n for owner in self.issuer_keys.keys():\n if owner.startswith(url):\n return owner\n\n raise KeyError(\"No keys for '{}' in this keyjar\".format(url))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef load_keys(self, issuer, jwks_uri='', jwks=None, replace=False):\n\n logger.debug(\"Initiating key bundle for issuer: %s\" % issuer)\n\n if replace or issuer not in self.issuer_keys:\n self.issuer_keys[issuer] = []\n\n if jwks_uri:\n self.add_url(issuer, jwks_uri)\n elif jwks:\n # jwks should only be considered if no jwks_uri is present\n _keys = jwks['keys']\n self.issuer_keys[issuer].append(self.keybundle_cls(_keys))", "response": "Load keys from another server"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef find(self, source, issuer):\n try:\n for kb in self.issuer_keys[issuer]:\n if kb.source == source:\n return kb\n except KeyError:\n return None\n\n return None", "response": "Find a key bundle based on the source of the keys\n "} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef export_jwks(self, private=False, issuer=\"\"):\n keys = []\n for kb in self.issuer_keys[issuer]:\n keys.extend([k.serialize(private) for k in kb.keys() if\n k.inactive_since == 0])\n return {\"keys\": keys}", "response": "Generates a JWKS. a \n JSON string representing a JWKS."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nexports a JWKS as a JSON document.", "response": "def export_jwks_as_json(self, private=False, issuer=\"\"):\n \"\"\"\n Export a JWKS as a JSON document.\n\n :param private: Whether it should be the private keys or the public\n :param issuer: The entity ID.\n :return: A JSON representation of a JWKS\n \"\"\"\n return json.dumps(self.export_jwks(private, issuer))"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nimport all the keys that are represented in a JWKS and adds them to the issuer_keys attribute.", "response": "def import_jwks(self, jwks, issuer):\n \"\"\"\n Imports all the keys that are represented in a JWKS\n\n :param jwks: Dictionary representation of a JWKS\n :param issuer: Who 'owns' the JWKS\n \"\"\"\n try:\n _keys = jwks[\"keys\"]\n except KeyError:\n raise ValueError('Not a proper JWKS')\n else:\n try:\n self.issuer_keys[issuer].append(\n self.keybundle_cls(_keys, verify_ssl=self.verify_ssl))\n except KeyError:\n self.issuer_keys[issuer] = [self.keybundle_cls(\n _keys, verify_ssl=self.verify_ssl)]"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nimporting all the keys that are represented in a JWKS expressed as a JSON object and returns a JWKS object containing the keys that are represented in a JSON object.", "response": "def import_jwks_as_json(self, jwks, issuer):\n \"\"\"\n Imports all the keys that are represented in a JWKS expressed as a\n JSON object\n\n :param jwks: JSON representation of a JWKS\n :param issuer: Who 'owns' the JWKS\n \"\"\"\n return self.import_jwks(json.loads(jwks), issuer)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef remove_outdated(self, when=0):\n for iss in list(self.owners()):\n _kbl = []\n for kb in self.issuer_keys[iss]:\n kb.remove_outdated(self.remove_after, when=when)\n if len(kb):\n _kbl.append(kb)\n if _kbl:\n self.issuer_keys[iss] = _kbl\n else:\n del self.issuer_keys[iss]", "response": "Removes outdated keys from the key jar."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_jwt_decrypt_keys(self, jwt, **kwargs):\n\n try:\n _key_type = jwe_alg2keytype(jwt.headers['alg'])\n except KeyError:\n _key_type = ''\n\n try:\n _kid = jwt.headers['kid']\n except KeyError:\n logger.info('Missing kid')\n _kid = ''\n\n keys = self.get(key_use='enc', owner='', key_type=_key_type)\n\n try:\n _aud = kwargs['aud']\n except KeyError:\n _aud = ''\n\n if _aud:\n try:\n allow_missing_kid = kwargs['allow_missing_kid']\n except KeyError:\n allow_missing_kid = False\n\n try:\n nki = kwargs['no_kid_issuer']\n except KeyError:\n nki = {}\n\n keys = self._add_key(keys, _aud, 'enc', _key_type, _kid, nki,\n allow_missing_kid)\n\n # Only want the appropriate keys.\n keys = [k for k in keys if k.appropriate_for('decrypt')]\n return keys", "response": "Get the decryption keys from this keyjar based on information carried by the user."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngetting the keys that should be used to verify a signed JWT.", "response": "def get_jwt_verify_keys(self, jwt, **kwargs):\n \"\"\"\n Get keys from this key jar based on information in a JWS. These keys\n should be usable to verify the signed JWT.\n\n :param jwt: A cryptojwt.jwt.JWT instance\n :param kwargs: Other key word arguments\n :return: list of usable keys\n \"\"\"\n\n try:\n allow_missing_kid = kwargs['allow_missing_kid']\n except KeyError:\n allow_missing_kid = False\n\n try:\n _key_type = jws_alg2keytype(jwt.headers['alg'])\n except KeyError:\n _key_type = ''\n\n try:\n _kid = jwt.headers['kid']\n except KeyError:\n logger.info('Missing kid')\n _kid = ''\n\n try:\n nki = kwargs['no_kid_issuer']\n except KeyError:\n nki = {}\n\n _payload = jwt.payload()\n\n try:\n _iss = _payload['iss']\n except KeyError:\n try:\n _iss = kwargs['iss']\n except KeyError:\n _iss = ''\n\n if _iss:\n # First extend the key jar iff allowed\n if \"jku\" in jwt.headers and _iss:\n if not self.find(jwt.headers[\"jku\"], _iss):\n # This is really questionable\n try:\n if kwargs[\"trusting\"]:\n self.add_url(_iss, jwt.headers[\"jku\"])\n except KeyError:\n pass\n\n keys = self._add_key([], _iss, 'sig', _key_type,\n _kid, nki, allow_missing_kid)\n\n if _key_type == 'oct':\n keys.extend(self.get(key_use='sig', owner='',\n key_type=_key_type))\n else: # No issuer, just use all keys I have\n keys = self.get(key_use='sig', owner='', key_type=_key_type)\n\n # Only want the appropriate keys.\n keys = [k for k in keys if k.appropriate_for('verify')]\n return keys"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nmaking deep copy of this key jar.", "response": "def copy(self):\n \"\"\"\n Make deep copy of this key jar.\n\n :return: A :py:class:`oidcmsg.key_jar.KeyJar` instance\n \"\"\"\n kj = KeyJar()\n for issuer in self.owners():\n kj[issuer] = [kb.copy() for kb in self[issuer]]\n\n kj.verify_ssl = self.verify_ssl\n return kj"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef pick_key(keys, use, alg='', key_type='', kid=''):\n res = []\n if not key_type:\n if use == 'sig':\n key_type = jws_alg2keytype(alg)\n else:\n key_type = jwe_alg2keytype(alg)\n\n for key in keys:\n if key.use and key.use != use:\n continue\n\n if key.kty == key_type:\n if key.kid and kid:\n if key.kid == kid:\n res.append(key)\n else:\n continue\n\n if key.alg == '':\n if alg:\n if key_type == 'EC':\n if key.crv == 'P-{}'.format(alg[2:]):\n res.append(key)\n continue\n res.append(key)\n elif alg and key.alg == alg:\n res.append(key)\n else:\n res.append(key)\n return res", "response": "Given a list of keys pick out the keys that fulfill them from a base - key set."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\npacking the initial information for the payload.", "response": "def pack_init(self, recv, aud):\n \"\"\"\n Gather initial information for the payload.\n\n :return: A dictionary with claims and values\n \"\"\"\n argv = {'iss': self.iss, 'iat': utc_time_sans_frac()}\n if self.lifetime:\n argv['exp'] = argv['iat'] + self.lifetime\n\n _aud = self.put_together_aud(recv, aud)\n if _aud:\n argv['aud'] = _aud\n\n return argv"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef pack_key(self, owner_id='', kid=''):\n keys = pick_key(self.my_keys(owner_id, 'sig'), 'sig', alg=self.alg,\n kid=kid)\n\n if not keys:\n raise NoSuitableSigningKeys('kid={}'.format(kid))\n\n return keys[0]", "response": "Return a key that can be used for signing the Json Web Token."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nverify a signed JSON Web Token", "response": "def _verify(self, rj, token):\n \"\"\"\n Verify a signed JSON Web Token\n\n :param rj: A :py:class:`cryptojwt.jws.JWS` instance\n :param token: The signed JSON Web Token\n :return: A verified message\n \"\"\"\n keys = self.key_jar.get_jwt_verify_keys(rj.jwt)\n return rj.verify_compact(token, keys)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _decrypt(self, rj, token):\n if self.iss:\n keys = self.key_jar.get_jwt_decrypt_keys(rj.jwt, aud=self.iss)\n else:\n keys = self.key_jar.get_jwt_decrypt_keys(rj.jwt)\n return rj.decrypt(token, keys=keys)", "response": "Decrypt an encrypted JsonWebToken"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nverify that the message is a valid A .", "response": "def verify_profile(msg_cls, info, **kwargs):\n \"\"\"\n If a message type is known for this JSON document. Verify that the\n document complies with the message specifications.\n\n :param msg_cls: The message class. A\n :py:class:`oidcmsg.message.Message` instance\n :param info: The information in the JSON document as a dictionary\n :param kwargs: Extra keyword arguments used when doing the verification.\n :return: The verified message as a msg_cls instance.\n \"\"\"\n _msg = msg_cls(**info)\n if not _msg.verify(**kwargs):\n raise VerificationError()\n return _msg"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef unpack(self, token):\n if not token:\n raise KeyError\n\n _jwe_header = _jws_header = None\n\n # Check if it's an encrypted JWT\n darg = {}\n if self.allowed_enc_encs:\n darg['enc'] = self.allowed_enc_encs\n if self.allowed_enc_algs:\n darg['alg'] = self.allowed_enc_algs\n try:\n _decryptor = jwe_factory(token, **darg)\n except (KeyError, HeaderError):\n _decryptor = None\n\n if _decryptor:\n # Yes, try to decode\n _info = self._decrypt(_decryptor, token)\n _jwe_header = _decryptor.jwt.headers\n # Try to find out if the information encrypted was a signed JWT\n try:\n _content_type = _decryptor.jwt.headers['cty']\n except KeyError:\n _content_type = ''\n else:\n _content_type = 'jwt'\n _info = token\n\n # If I have reason to believe the information I have is a signed JWT\n if _content_type.lower() == 'jwt':\n # Check that is a signed JWT\n if self.allowed_sign_algs:\n _verifier = jws_factory(_info, alg=self.allowed_sign_algs)\n else:\n _verifier = jws_factory(_info)\n\n if _verifier:\n _info = self._verify(_verifier, _info)\n else:\n raise Exception()\n _jws_header = _verifier.jwt.headers\n else:\n # So, not a signed JWT\n try:\n # A JSON document ?\n _info = json.loads(_info)\n except JSONDecodeError: # Oh, no ! Not JSON\n return _info\n except TypeError:\n try:\n _info = as_unicode(_info)\n _info = json.loads(_info)\n except JSONDecodeError: # Oh, no ! Not JSON\n return _info\n\n # If I know what message class the info should be mapped into\n if self.msg_cls:\n _msg_cls = self.msg_cls\n else:\n try:\n # try to find a issuer specific message class\n _msg_cls = self.iss2msg_cls[_info['iss']]\n except KeyError:\n _msg_cls = None\n\n if _msg_cls:\n vp_args = {'skew': self.skew}\n if self.iss:\n vp_args['aud'] = self.iss\n _info = self.verify_profile(_msg_cls, _info, **vp_args)\n _info.jwe_header = _jwe_header\n _info.jws_header = _jws_header\n return _info\n else:\n return _info", "response": "Unpack a received signed or encrypted Json Web Token into a Message instance."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nproduce a JWS using the JWS Compact Serialization", "response": "def sign_compact(self, keys=None, protected=None, **kwargs):\n \"\"\"\n Produce a JWS using the JWS Compact Serialization\n\n :param keys: A dictionary of keys\n :param protected: The protected headers (a dictionary)\n :param kwargs: claims you want to add to the standard headers\n :return: A signed JSON Web Token\n \"\"\"\n\n _headers = self._header\n _headers.update(kwargs)\n\n key, xargs, _alg = self.alg_keys(keys, 'sig', protected)\n\n if \"typ\" in self:\n xargs[\"typ\"] = self[\"typ\"]\n\n _headers.update(xargs)\n jwt = JWSig(**_headers)\n if _alg == \"none\":\n return jwt.pack(parts=[self.msg, \"\"])\n\n # All other cases\n try:\n _signer = SIGNER_ALGS[_alg]\n except KeyError:\n raise UnknownAlgorithm(_alg)\n\n _input = jwt.pack(parts=[self.msg])\n\n if isinstance(key, AsymmetricKey):\n sig = _signer.sign(_input.encode(\"utf-8\"), key.private_key())\n else:\n sig = _signer.sign(_input.encode(\"utf-8\"), key.key)\n\n logger.debug(\"Signed message using key with kid=%s\" % key.kid)\n return \".\".join([_input, b64encode_item(sig).decode(\"utf-8\")])"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nverifies a compact JWT signature", "response": "def verify_compact(self, jws=None, keys=None, allow_none=False,\n sigalg=None):\n \"\"\"\n Verify a JWT signature\n\n :param jws: A signed JSON Web Token\n :param keys: A list of keys that can possibly be used to verify the\n signature\n :param allow_none: If signature algorithm 'none' is allowed\n :param sigalg: Expected sigalg\n :return: Dictionary with 2 keys 'msg' required, 'key' optional\n \"\"\"\n return self.verify_compact_verbose(jws, keys, allow_none, sigalg)['msg']"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef verify_compact_verbose(self, jws=None, keys=None, allow_none=False,\n sigalg=None):\n \"\"\"\n Verify a JWT signature and return dict with validation results\n\n :param jws: A signed JSON Web Token\n :param keys: A list of keys that can possibly be used to verify the\n signature\n :param allow_none: If signature algorithm 'none' is allowed\n :param sigalg: Expected sigalg\n :return: Dictionary with 2 keys 'msg' required, 'key' optional.\n The value of 'msg' is the unpacked and verified message.\n The value of 'key' is the key used to verify the message\n \"\"\"\n if jws:\n jwt = JWSig().unpack(jws)\n if len(jwt) != 3:\n raise WrongNumberOfParts(len(jwt))\n\n self.jwt = jwt\n elif not self.jwt:\n raise ValueError('Missing singed JWT')\n else:\n jwt = self.jwt\n\n try:\n _alg = jwt.headers[\"alg\"]\n except KeyError:\n _alg = None\n else:\n if _alg is None or _alg.lower() == \"none\":\n if allow_none:\n self.msg = jwt.payload()\n return {'msg': self.msg}\n else:\n raise SignerAlgError(\"none not allowed\")\n\n if \"alg\" in self and self['alg'] and _alg:\n if isinstance(self['alg'], list):\n if _alg not in self[\"alg\"] :\n raise SignerAlgError(\n \"Wrong signing algorithm, expected {} got {}\".format(\n self['alg'], _alg))\n elif _alg != self['alg']:\n raise SignerAlgError(\n \"Wrong signing algorithm, expected {} got {}\".format(\n self['alg'], _alg))\n\n if sigalg and sigalg != _alg:\n raise SignerAlgError(\"Expected {0} got {1}\".format(\n sigalg, jwt.headers[\"alg\"]))\n\n self[\"alg\"] = _alg\n\n if keys:\n _keys = self.pick_keys(keys)\n else:\n _keys = self.pick_keys(self._get_keys())\n\n if not _keys:\n if \"kid\" in self:\n raise NoSuitableSigningKeys(\n \"No key with kid: %s\" % (self[\"kid\"]))\n elif \"kid\" in self.jwt.headers:\n raise NoSuitableSigningKeys(\n \"No key with kid: %s\" % (self.jwt.headers[\"kid\"]))\n else:\n raise NoSuitableSigningKeys(\"No key for algorithm: %s\" % _alg)\n\n verifier = SIGNER_ALGS[_alg]\n\n for key in _keys:\n if isinstance(key, AsymmetricKey):\n _key = key.public_key()\n else:\n _key = key.key\n\n try:\n if not verifier.verify(jwt.sign_input(), jwt.signature(), _key):\n continue\n except (BadSignature, IndexError):\n pass\n except (ValueError, TypeError) as err:\n logger.warning('Exception \"{}\" caught'.format(err))\n else:\n logger.debug(\n \"Verified message using key with kid=%s\" % key.kid)\n self.msg = jwt.payload()\n self.key = key\n return {'msg': self.msg, 'key': key}\n\n raise BadSignature()", "response": "Verify a compact JWT signature and return a dictionary with validation results."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef sign_json(self, keys=None, headers=None, flatten=False):\n\n def create_signature(protected, unprotected):\n protected_headers = protected or {}\n # always protect the signing alg header\n protected_headers.setdefault(\"alg\", self.alg)\n _jws = JWS(self.msg, **protected_headers)\n encoded_header, payload, signature = _jws.sign_compact(\n protected=protected,\n keys=keys).split(\".\")\n signature_entry = {\"signature\": signature}\n if unprotected:\n signature_entry[\"header\"] = unprotected\n if encoded_header:\n signature_entry[\"protected\"] = encoded_header\n\n return signature_entry\n\n res = {\"payload\": b64e_enc_dec(self.msg, \"utf-8\", \"ascii\")}\n\n if headers is None:\n headers = [(dict(alg=self.alg), None)]\n\n if flatten and len(\n headers) == 1: # Flattened JWS JSON Serialization Syntax\n signature_entry = create_signature(*headers[0])\n res.update(signature_entry)\n else:\n res[\"signatures\"] = []\n for protected, unprotected in headers:\n signature_entry = create_signature(protected, unprotected)\n res[\"signatures\"].append(signature_entry)\n\n return json.dumps(res)", "response": "Produce JWS using the JWS JSON Serialization format."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _is_json_serialized_jws(self, json_jws):\n json_ser_keys = {\"payload\", \"signatures\"}\n flattened_json_ser_keys = {\"payload\", \"signature\"}\n if not json_ser_keys.issubset(\n json_jws.keys()) and not flattened_json_ser_keys.issubset(\n json_jws.keys()):\n return False\n return True", "response": "Check if we have a JSON serialized JWT."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _is_compact_jws(self, jws):\n try:\n jwt = JWSig().unpack(jws)\n except Exception as err:\n logger.warning('Could not parse JWS: {}'.format(err))\n return False\n\n if \"alg\" not in jwt.headers:\n return False\n\n if jwt.headers[\"alg\"] is None:\n jwt.headers[\"alg\"] = \"none\"\n\n if jwt.headers[\"alg\"] not in SIGNER_ALGS:\n logger.debug(\"UnknownSignerAlg: %s\" % jwt.headers[\"alg\"])\n return False\n\n self.jwt = jwt\n return True", "response": "Check if we have a compact signed JWS"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef appropriate_for(self, usage, alg='HS256'):\n try:\n _use = USE[usage]\n except:\n raise ValueError('Unknown key usage')\n else:\n if not self.use or self.use == _use:\n if _use == 'sig':\n return self.get_key()\n else:\n return self.encryption_key(alg)\n\n raise WrongUsage(\"This key can't be used for {}\".format(usage))", "response": "Returns the appropriate key for the specified usage."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn an encryption key as per .", "response": "def encryption_key(self, alg, **kwargs):\n \"\"\"\n Return an encryption key as per\n http://openid.net/specs/openid-connect-core-1_0.html#Encryption\n\n :param alg: encryption algorithm\n :param kwargs:\n :return: encryption key as byte string\n \"\"\"\n if not self.key:\n self.deserialize()\n\n try:\n tsize = ALG2KEYLEN[alg]\n except KeyError:\n raise UnsupportedAlgorithm(alg)\n\n if tsize <= 32:\n # SHA256\n _enc_key = sha256_digest(self.key)[:tsize]\n elif tsize <= 48:\n # SHA384\n _enc_key = sha384_digest(self.key)[:tsize]\n elif tsize <= 64:\n # SHA512\n _enc_key = sha512_digest(self.key)[:tsize]\n else:\n raise JWKException(\"No support for symmetric keys > 512 bits\")\n\n logger.debug('Symmetric encryption key: {}'.format(\n as_unicode(b64e(_enc_key))))\n\n return _enc_key"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef sign(self, msg, key):\n\n if not isinstance(key, rsa.RSAPrivateKey):\n raise TypeError(\n \"The key must be an instance of rsa.RSAPrivateKey\")\n sig = key.sign(msg, self.padding, self.hash)\n return sig", "response": "Create a signature over a message using an RSA key."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef verify(self, msg, signature, key):\n\n if not isinstance(key, rsa.RSAPublicKey):\n raise TypeError(\n \"The public key must be an instance of RSAPublicKey\")\n try:\n key.verify(signature, msg, self.padding, self.hash)\n except InvalidSignature as err:\n raise BadSignature(str(err))\n except AttributeError:\n return False\n else:\n return True", "response": "Verifies whether the signature is a valid signature for the message."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the appropriate key instance for the specified usage.", "response": "def appropriate_for(self, usage, **kwargs):\n \"\"\"\n Make sure there is a key instance present that can be used for\n the specified usage.\n\n :param usage: Usage specification, one of [sign, verify, decrypt,\n encrypt]\n :param kwargs: Extra keyword arguments\n :return: Suitable key or None\n \"\"\"\n try:\n _use = USE[usage]\n except KeyError:\n raise ValueError('Unknown key usage')\n else:\n if usage in ['sign', 'decrypt']:\n if not self.use or _use == self.use:\n if self.priv_key:\n return self.priv_key\n return None\n else: # has to be one of ['encrypt', 'verify']\n if not self.use or _use == self.use:\n if self.pub_key:\n return self.pub_key\n return None"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef generate_and_store_rsa_key(key_size=2048, filename='rsa.key',\n passphrase=''):\n \"\"\"\n Generate a private RSA key and store a PEM representation of it in a\n file.\n\n :param key_size: The size of the key, default 2048 bytes.\n :param filename: The name of the file to which the key should be written\n :param passphrase: If the PEM representation should be protected with a\n pass phrase.\n :return: A\n cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey instance\n \"\"\"\n private_key = rsa.generate_private_key(public_exponent=65537,\n key_size=key_size,\n backend=default_backend())\n\n with open(filename, \"wb\") as keyfile:\n if passphrase:\n pem = private_key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.PKCS8,\n encryption_algorithm=serialization.BestAvailableEncryption(\n passphrase))\n else:\n pem = private_key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.PKCS8,\n encryption_algorithm=serialization.NoEncryption())\n keyfile.write(pem)\n keyfile.close()\n return private_key", "response": "Generate a private RSA key and store it in a file."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef import_public_rsa_key_from_file(filename):\n with open(filename, \"rb\") as key_file:\n public_key = serialization.load_pem_public_key(\n key_file.read(),\n backend=default_backend())\n return public_key", "response": "Read a public RSA key from a PEM file."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef import_rsa_key(pem_data):\n if not pem_data.startswith(PREFIX):\n pem_data = bytes('{}\\n{}\\n{}'.format(PREFIX, pem_data, POSTFIX),\n 'utf-8')\n else:\n pem_data = bytes(pem_data, 'utf-8')\n cert = x509.load_pem_x509_certificate(pem_data, default_backend())\n return cert.public_key()", "response": "Extract an RSA key from a PEM - encoded X. 509 certificate and return a rsa. RSAPublicKey instance"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns True if the RSA keys are in fact the same", "response": "def rsa_eq(key1, key2):\n \"\"\"\n Only works for RSAPublic Keys\n\n :param key1:\n :param key2:\n :return:\n \"\"\"\n pn1 = key1.public_numbers()\n pn2 = key2.public_numbers()\n # Check if two RSA keys are in fact the same\n if pn1 == pn2:\n return True\n else:\n return False"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef der_cert(der_data):\n if isinstance(der_data, str):\n der_data = bytes(der_data, 'utf-8')\n return x509.load_der_x509_certificate(der_data, default_backend())", "response": "Load a DER encoded certificate\n "} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nloading a X509 cert into a key.", "response": "def load_x509_cert(url, httpc, spec2key, **get_args):\n \"\"\"\n Get and transform a X509 cert into a key.\n\n :param url: Where the X509 cert can be found\n :param httpc: HTTP client to use for fetching\n :param spec2key: A dictionary over keys already seen\n :param get_args: Extra key word arguments to the HTTP GET request\n :return: List of 2-tuples (keytype, key)\n \"\"\"\n try:\n r = httpc('GET', url, allow_redirects=True, **get_args)\n if r.status_code == 200:\n cert = str(r.text)\n try:\n public_key = spec2key[cert] # If I've already seen it\n except KeyError:\n public_key = import_rsa_key(cert)\n spec2key[cert] = public_key\n if isinstance(public_key, rsa.RSAPublicKey):\n return {\"rsa\": public_key}\n else:\n raise Exception(\"HTTP Get error: %s\" % r.status_code)\n except Exception as err: # not a RSA key\n logger.warning(\"Can't load key: %s\" % err)\n return []"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef cmp_public_numbers(pn1, pn2):\n if pn1.n == pn2.n:\n if pn1.e == pn2.e:\n return True\n return False", "response": "Compare two sets of public numbers."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncompare 2 sets of private numbers.", "response": "def cmp_private_numbers(pn1, pn2):\n \"\"\"\n Compare 2 sets of private numbers. This is for comparing 2\n private RSA keys.\n\n :param pn1: The set of values belonging to the 1st key\n :param pn2: The set of values belonging to the 2nd key\n :return: True is the sets are the same otherwise False.\n \"\"\"\n if not cmp_public_numbers(pn1.public_numbers, pn2.public_numbers):\n return False\n\n for param in ['d', 'p', 'q']:\n if getattr(pn1, param) != getattr(pn2, param):\n return False\n return True"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef x5t_calculation(cert):\n if isinstance(cert, str):\n der_cert = base64.b64decode(cert.encode('ascii'))\n else:\n der_cert = base64.b64decode(cert)\n\n return b64e(hashlib.sha1(der_cert).digest())", "response": "Calculate the base64url - encoded SHA - 1 thumbprint of the DER\n encoding of an X.509 certificate."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates a new RSA key pair and wraps it in a new one.", "response": "def new_rsa_key(key_size=2048, kid='', use='', public_exponent=65537):\n \"\"\"\n Creates a new RSA key pair and wraps it in a\n :py:class:`cryptojwt.jwk.rsa.RSAKey` instance\n\n :param key_size: The size of the key\n :param kid: The key ID\n :param use: What the is supposed to be used for. 2 choices 'sig'/'enc'\n :param public_exponent: The value of the public exponent.\n :return: A :py:class:`cryptojwt.jwk.rsa.RSAKey` instance\n \"\"\"\n\n _key = rsa.generate_private_key(public_exponent=public_exponent,\n key_size=key_size,\n backend=default_backend())\n\n _rk = RSAKey(priv_key=_key, use=use, kid=kid)\n if not kid:\n _rk.add_kid()\n\n return _rk"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef deserialize(self):\n\n # first look for the public parts of a RSA key\n if self.n and self.e:\n try:\n numbers = {}\n # loop over all the parameters that define a RSA key\n for param in self.longs:\n item = getattr(self, param)\n if not item:\n continue\n else:\n try:\n val = int(deser(item))\n except Exception:\n raise\n else:\n numbers[param] = val\n\n if 'd' in numbers:\n self.priv_key = rsa_construct_private(numbers)\n self.pub_key = self.priv_key.public_key()\n else:\n self.pub_key = rsa_construct_public(numbers)\n except ValueError as err:\n raise DeSerializationNotPossible(\"%s\" % err)\n\n if self.x5c:\n _cert_chain = []\n for der_data in self.x5c:\n _cert_chain.append(der_cert(base64.b64decode(der_data)))\n\n if self.x5t: # verify the cert thumbprint\n if isinstance(self.x5t, bytes):\n _x5t = self.x5t\n else:\n _x5t = self.x5t.encode('ascii')\n if _x5t != x5t_calculation(self.x5c[0]):\n raise DeSerializationNotPossible(\n \"The thumbprint 'x5t' does not match the certificate.\")\n\n if self.pub_key:\n if not rsa_eq(self.pub_key, _cert_chain[0].public_key()):\n raise ValueError(\n 'key described by components and key in x5c not equal')\n else:\n self.pub_key = _cert_chain[0].public_key()\n\n self._serialize(self.pub_key)\n if len(self.x5c) > 1: # verify chain\n pass\n\n if not self.priv_key and not self.pub_key:\n raise DeSerializationNotPossible()", "response": "This method takes a text based representation of an RSA key and instantiates an RSAPrivateKey or RSAPublicKey instance."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef serialize(self, private=False):\n if not self.priv_key and not self.pub_key:\n raise SerializationNotPossible()\n\n res = self.common()\n\n public_longs = list(set(self.public_members) & set(self.longs))\n for param in public_longs:\n item = getattr(self, param)\n if item:\n res[param] = item\n\n if private:\n for param in self.longs:\n if not private and param in [\"d\", \"p\", \"q\", \"dp\", \"dq\", \"di\",\n \"qi\"]:\n continue\n item = getattr(self, param)\n if item:\n res[param] = item\n if self.x5c:\n res['x5c'] = [x.decode('utf-8') for x in self.x5c]\n\n return res", "response": "This method is used to serialize a JWK instance."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nloads a RSA key into this object.", "response": "def load_key(self, key):\n \"\"\"\n Load a RSA key. Try to serialize the key before binding it to this\n instance.\n\n :param key: An RSA key instance\n \"\"\"\n\n self._serialize(key)\n if isinstance(key, rsa.RSAPrivateKey):\n self.priv_key = key\n self.pub_key = key.public_key()\n else:\n self.pub_key = key\n\n return self"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef sign(self, msg, key):\n\n if not isinstance(key, ec.EllipticCurvePrivateKey):\n raise TypeError(\n \"The private key must be an instance of \"\n \"ec.EllipticCurvePrivateKey\")\n\n self._cross_check(key.public_key())\n num_bits = key.curve.key_size\n num_bytes = (num_bits + 7) // 8\n asn1sig = key.sign(msg, ec.ECDSA(self.hash_algorithm()))\n # Cryptography returns ASN.1-encoded signature data; decode as JWS\n # uses raw signatures (r||s)\n (r, s) = decode_dss_signature(asn1sig)\n return int_to_bytes(r, num_bytes) + int_to_bytes(s, num_bytes)", "response": "Create a signature over a message using an elliptic curve key."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nverify a message signature and return True if the signature is valid.", "response": "def verify(self, msg, sig, key):\n \"\"\"\n Verify a message signature\n\n :param msg: The message\n :param sig: A signature\n :param key: A ec.EllipticCurvePublicKey to use for the verification.\n :raises: BadSignature if the signature can't be verified.\n :return: True\n \"\"\"\n if not isinstance(key, ec.EllipticCurvePublicKey):\n raise TypeError(\n \"The public key must be an instance of \"\n \"ec.EllipticCurvePublicKey\")\n self._cross_check(key)\n\n num_bits = key.curve.key_size\n num_bytes = (num_bits + 7) // 8\n if len(sig) != 2 * num_bytes:\n raise ValueError('Invalid signature')\n\n try:\n # cryptography uses ASN.1-encoded signature data; split JWS\n # signature (r||s) and encode before verification\n (r, s) = self._split_raw_signature(sig)\n asn1sig = encode_dss_signature(r, s)\n key.verify(asn1sig, msg, ec.ECDSA(self.hash_algorithm()))\n except InvalidSignature as err:\n raise BadSignature(err)\n else:\n return True"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncrossing check the curve in the private key and the algorithm define the curve.", "response": "def _cross_check(self, pub_key):\n \"\"\"\n In Ecdsa, both the key and the algorithm define the curve.\n Therefore, we must cross check them to make sure they're the same.\n\n :param key:\n :raises: ValueError is the curves are not the same\n \"\"\"\n if self.curve_name != pub_key.curve.name:\n raise ValueError(\n \"The curve in private key {} and in algorithm {} don't \"\n \"match\".format(pub_key.curve.name, self.curve_name))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _split_raw_signature(sig):\n c_length = len(sig) // 2\n r = int_from_bytes(sig[:c_length], byteorder='big')\n s = int_from_bytes(sig[c_length:], byteorder='big')\n return r, s", "response": "Split raw signature into components r s"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef left_hash(msg, func=\"HS256\"):\n if func == 'HS256':\n return as_unicode(b64e(sha256_digest(msg)[:16]))\n elif func == 'HS384':\n return as_unicode(b64e(sha384_digest(msg)[:24]))\n elif func == 'HS512':\n return as_unicode(b64e(sha512_digest(msg)[:32]))", "response": "Calculate the left hash of the message over which the hash should be calculated."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngoes from algorithm name to key type.", "response": "def alg2keytype(alg):\n \"\"\"\n Go from algorithm name to key type.\n\n :param alg: The algorithm name\n :return: The key type\n \"\"\"\n if not alg or alg.lower() == \"none\":\n return \"none\"\n elif alg.startswith(\"RS\") or alg.startswith(\"PS\"):\n return \"RSA\"\n elif alg.startswith(\"HS\") or alg.startswith(\"A\"):\n return \"oct\"\n elif alg.startswith(\"ES\") or alg.startswith(\"ECDH-ES\"):\n return \"EC\"\n else:\n return None"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef parse_rsa_algorithm(algorithm):\n\n if algorithm == \"RS256\":\n return hashes.SHA256(), padding.PKCS1v15()\n elif algorithm == \"RS384\":\n return hashes.SHA384(), padding.PKCS1v15()\n elif algorithm == \"RS512\":\n return hashes.SHA512(), padding.PKCS1v15()\n elif algorithm == \"PS256\":\n return (hashes.SHA256(),\n padding.PSS(\n mgf=padding.MGF1(hashes.SHA256()),\n salt_length=padding.PSS.MAX_LENGTH))\n elif algorithm == \"PS384\":\n return (hashes.SHA384(),\n padding.PSS(\n mgf=padding.MGF1(hashes.SHA384()),\n salt_length=padding.PSS.MAX_LENGTH))\n elif algorithm == \"PS512\":\n return (hashes.SHA512(),\n padding.PSS(\n mgf=padding.MGF1(hashes.SHA512()),\n salt_length=padding.PSS.MAX_LENGTH))\n else:\n raise UnsupportedAlgorithm(\"Unknown algorithm: {}\".format(algorithm))", "response": "Parses a RSA algorithm and returns a tuple."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef ecdh_derive_key(key, epk, apu, apv, alg, dk_len):\n # Compute shared secret\n shared_key = key.exchange(ec.ECDH(), epk)\n # Derive the key\n # AlgorithmID || PartyUInfo || PartyVInfo || SuppPubInfo\n otherInfo = bytes(alg) + \\\n struct.pack(\"!I\", len(apu)) + apu + \\\n struct.pack(\"!I\", len(apv)) + apv + \\\n struct.pack(\"!I\", dk_len)\n return concat_sha256(shared_key, dk_len, otherInfo)", "response": "Derive the key from the ECDH key."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nencrypts the message using an Elliptic curve key and returns the encrypted JWT.", "response": "def encrypt(self, key=None, iv=\"\", cek=\"\", **kwargs):\n \"\"\"\n Produces a JWE as defined in RFC7516 using an Elliptic curve key\n\n :param key: *Not used>, only there to present the same API as\n JWE_RSA and JWE_SYM\n :param iv: Initialization vector\n :param cek: Content master key\n :param kwargs: Extra keyword arguments\n :return: An encrypted JWT\n \"\"\"\n _msg = as_bytes(self.msg)\n\n _args = self._dict\n try:\n _args[\"kid\"] = kwargs[\"kid\"]\n except KeyError:\n pass\n\n if 'params' in kwargs:\n if 'apu' in kwargs['params']:\n _args['apu'] = kwargs['params']['apu']\n if 'apv' in kwargs['params']:\n _args['apv'] = kwargs['params']['apv']\n if 'epk' in kwargs['params']:\n _args['epk'] = kwargs['params']['epk']\n\n jwe = JWEnc(**_args)\n ctxt, tag, cek = super(JWE_EC, self).enc_setup(\n self[\"enc\"], _msg, auth_data=jwe.b64_encode_header(), key=cek,\n iv=iv)\n if 'encrypted_key' in kwargs:\n return jwe.pack(parts=[kwargs['encrypted_key'], iv, ctxt, tag])\n return jwe.pack(parts=[iv, ctxt, tag])"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef encrypt(self, key, iv=\"\", cek=\"\", **kwargs):\n\n _msg = as_bytes(self.msg)\n if \"zip\" in self:\n if self[\"zip\"] == \"DEF\":\n _msg = zlib.compress(_msg)\n else:\n raise ParameterError(\"Zip has unknown value: %s\" % self[\"zip\"])\n\n kwarg_cek = cek or None\n\n _enc = self[\"enc\"]\n iv = self._generate_iv(_enc, iv)\n cek = self._generate_key(_enc, cek)\n self[\"cek\"] = cek\n\n logger.debug(\"cek: %s, iv: %s\" % ([c for c in cek], [c for c in iv]))\n\n _encrypt = RSAEncrypter(self.with_digest).encrypt\n\n _alg = self[\"alg\"]\n if kwarg_cek:\n jwe_enc_key = ''\n elif _alg == \"RSA-OAEP\":\n jwe_enc_key = _encrypt(cek, key, 'pkcs1_oaep_padding')\n elif _alg == \"RSA-OAEP-256\":\n jwe_enc_key = _encrypt(cek, key, 'pkcs1_oaep_256_padding')\n elif _alg == \"RSA1_5\":\n jwe_enc_key = _encrypt(cek, key)\n else:\n raise NotSupportedAlgorithm(_alg)\n\n jwe = JWEnc(**self.headers())\n\n try:\n _auth_data = kwargs['auth_data']\n except KeyError:\n _auth_data = jwe.b64_encode_header()\n\n ctxt, tag, key = self.enc_setup(_enc, _msg, key=cek, iv=iv,\n auth_data=_auth_data)\n return jwe.pack(parts=[jwe_enc_key, iv, ctxt, tag])", "response": "This method encrypts the message and returns a JWE encoded payload."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef decrypt(self, token, key, cek=None):\n if not isinstance(token, JWEnc):\n jwe = JWEnc().unpack(token)\n else:\n jwe = token\n\n self.jwt = jwe.encrypted_key()\n jek = jwe.encrypted_key()\n\n _decrypt = RSAEncrypter(self.with_digest).decrypt\n\n _alg = jwe.headers[\"alg\"]\n if cek:\n pass\n elif _alg == \"RSA-OAEP\":\n cek = _decrypt(jek, key, 'pkcs1_oaep_padding')\n elif _alg == \"RSA-OAEP-256\":\n cek = _decrypt(jek, key, 'pkcs1_oaep_256_padding')\n elif _alg == \"RSA1_5\":\n cek = _decrypt(jek, key)\n else:\n raise NotSupportedAlgorithm(_alg)\n\n self[\"cek\"] = cek\n enc = jwe.headers[\"enc\"]\n if enc not in SUPPORTED[\"enc\"]:\n raise NotSupportedAlgorithm(enc)\n\n auth_data = jwe.b64_protected_header()\n\n msg = self._decrypt(enc, cek, jwe.ciphertext(),\n auth_data=auth_data,\n iv=jwe.initialization_vector(),\n tag=jwe.authentication_tag())\n\n if \"zip\" in jwe.headers and jwe.headers[\"zip\"] == \"DEF\":\n msg = zlib.decompress(msg)\n\n return msg", "response": "Decrypts a JWT and returns a decrypted message."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nencrypts a message with a set of keys.", "response": "def encrypt(self, keys=None, cek=\"\", iv=\"\", **kwargs):\n \"\"\"\n Encrypt a payload\n\n :param keys: A set of possibly usable keys\n :param cek: Content master key\n :param iv: Initialization vector\n :param kwargs: Extra key word arguments\n :return: Encrypted message\n \"\"\"\n\n _alg = self[\"alg\"]\n\n # Find Usable Keys\n if keys:\n keys = self.pick_keys(keys, use=\"enc\")\n else:\n keys = self.pick_keys(self._get_keys(), use=\"enc\")\n\n if not keys:\n logger.error(KEY_ERR.format(_alg))\n raise NoSuitableEncryptionKey(_alg)\n\n # Determine Encryption Class by Algorithm\n if _alg in [\"RSA-OAEP\", \"RSA-OAEP-256\", \"RSA1_5\"]:\n encrypter = JWE_RSA(self.msg, **self._dict)\n elif _alg.startswith(\"A\") and _alg.endswith(\"KW\"):\n encrypter = JWE_SYM(self.msg, **self._dict)\n else: # _alg.startswith(\"ECDH-ES\"):\n encrypter = JWE_EC(**self._dict)\n cek, encrypted_key, iv, params, eprivk = encrypter.enc_setup(\n self.msg, key=keys[0], **self._dict)\n kwargs[\"encrypted_key\"] = encrypted_key\n kwargs[\"params\"] = params\n\n if cek:\n kwargs[\"cek\"] = cek\n\n if iv:\n kwargs[\"iv\"] = iv\n\n for key in keys:\n if isinstance(key, SYMKey):\n _key = key.key\n elif isinstance(key, ECKey):\n _key = key.public_key()\n else: # isinstance(key, RSAKey):\n _key = key.public_key()\n\n if key.kid:\n encrypter[\"kid\"] = key.kid\n\n try:\n token = encrypter.encrypt(key=_key, **kwargs)\n self[\"cek\"] = encrypter.cek if 'cek' in encrypter else None\n except TypeError as err:\n raise err\n else:\n logger.debug(\n \"Encrypted message using key with kid={}\".format(key.kid))\n return token"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef b64d(b):\n\n cb = b.rstrip(b\"=\") # shouldn't but there you are\n\n # Python's base64 functions ignore invalid characters, so we need to\n # check for them explicitly.\n if not _b64_re.match(cb):\n raise BadSyntax(cb, \"base64-encoded data contains illegal characters\")\n\n if cb == b:\n b = add_padding(b)\n\n return base64.urlsafe_b64decode(b)", "response": "Decode some base64 - encoded bytes."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef deser(val):\n if isinstance(val, str):\n _val = val.encode(\"utf-8\")\n else:\n _val = val\n\n return base64_to_long(_val)", "response": "Deserialize from a string representation of an long integer to a python representation of a long integer."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef common(self):\n res = {\"kty\": self.kty}\n if self.use:\n res[\"use\"] = self.use\n if self.kid:\n res[\"kid\"] = self.kid\n if self.alg:\n res[\"alg\"] = self.alg\n return res", "response": "Return the set of parameters that are common to all types of keys."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nverifying that the information gathered from the on - the - wire representation is of the right type.", "response": "def verify(self):\n \"\"\"\n Verify that the information gathered from the on-the-wire\n representation is of the right type.\n This is supposed to be run before the info is deserialized.\n\n :return: True/False\n \"\"\"\n for param in self.longs:\n item = getattr(self, param)\n if not item or isinstance(item, str):\n continue\n\n if isinstance(item, bytes):\n item = item.decode('utf-8')\n setattr(self, param, item)\n\n try:\n _ = base64url_to_long(item)\n except Exception:\n return False\n else:\n if [e for e in ['+', '/', '='] if e in item]:\n return False\n\n if self.kid:\n if not isinstance(self.kid, str):\n raise ValueError(\"kid of wrong value type\")\n return True"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncreate a thumbprint of the Key instance.", "response": "def thumbprint(self, hash_function, members=None):\n \"\"\"\n Create a thumbprint of the key following the outline in\n https://tools.ietf.org/html/draft-jones-jose-jwk-thumbprint-01\n\n :param hash_function: A hash function to use for hashing the\n information\n :param members: Which attributes of the Key instance that should\n be included when computing the hash value. If members is undefined\n then all the required attributes are used.\n :return: A base64 encode hash over a set of Key attributes\n \"\"\"\n if members is None:\n members = self.required\n\n members.sort()\n ser = self.serialize()\n _se = []\n for elem in members:\n try:\n _val = ser[elem]\n except KeyError: # should never happen with the required set\n pass\n else:\n if isinstance(_val, bytes):\n _val = as_unicode(_val)\n _se.append('\"{}\":{}'.format(elem, json.dumps(_val)))\n _json = '{{{}}}'.format(','.join(_se))\n\n return b64e(DIGEST_HASH[hash_function](_json))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef concat_sha256(secret, dk_len, other_info):\n dkm = b''\n dk_bytes = int(ceil(dk_len / 8.0))\n counter = 0\n while len(dkm) < dk_bytes:\n counter += 1\n counter_bytes = struct.pack(\"!I\", counter)\n digest = hashes.Hash(hashes.SHA256(), backend=default_backend())\n digest.update(counter_bytes)\n digest.update(secret)\n digest.update(other_info)\n dkm += digest.finalize()\n return dkm[:dk_bytes]", "response": "This function calculates the concatenation of the secret and other_info."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef pick_keys(self, keys, use=\"\", alg=\"\"):\n if not alg:\n alg = self[\"alg\"]\n\n if alg == \"none\":\n return []\n\n _k = self.alg2keytype(alg)\n if _k is None:\n logger.error(\"Unknown algorithm '%s'\" % alg)\n raise ValueError('Unknown cryptography algorithm')\n\n logger.debug(\"Picking key by key type={0}\".format(_k))\n _kty = [_k.lower(), _k.upper(), _k.lower().encode(\"utf-8\"),\n _k.upper().encode(\"utf-8\")]\n _keys = [k for k in keys if k.kty in _kty]\n try:\n _kid = self[\"kid\"]\n except KeyError:\n try:\n _kid = self.jwt.headers[\"kid\"]\n except (AttributeError, KeyError):\n _kid = None\n\n logger.debug(\"Picking key based on alg={0}, kid={1} and use={2}\".format(\n alg, _kid, use))\n\n pkey = []\n for _key in _keys:\n logger.debug(\n \"Picked: kid:{}, use:{}, kty:{}\".format(\n _key.kid, _key.use, _key.kty))\n if _kid:\n if _kid != _key.kid:\n continue\n\n if use and _key.use and _key.use != use:\n continue\n\n if alg and _key.alg and _key.alg != alg:\n continue\n\n pkey.append(_key)\n\n return pkey", "response": "Returns a list of JWK instances that fulfill the requirements of the keys."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns the current time in ms since the Unix Epoch.", "response": "def request_time(self, req):\n \"\"\"Return the current time in ms since the Unix Epoch.\"\"\"\n r = time.time()\n self._time_result.set_value(r)\n return (\"ok\", r)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef request_eval(self, req, expression):\n r = str(eval(expression))\n self._eval_result.set_value(r)\n return (\"ok\", r)", "response": "Evaluate a Python expression."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\npick a random fruit.", "response": "def request_pick_fruit(self, req):\n \"\"\"Pick a random fruit.\"\"\"\n r = random.choice(self.FRUIT + [None])\n if r is None:\n return (\"fail\", \"No fruit.\")\n delay = random.randrange(1,5)\n req.inform(\"Picking will take %d seconds\" % delay)\n\n def pick_handler():\n self._fruit_result.set_value(r)\n req.reply(\"ok\", r)\n\n handle_timer = threading.Timer(delay, pick_handler)\n handle_timer.start()\n\n raise AsyncReply"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsetting sensor status to inactive", "response": "def request_set_sensor_inactive(self, req, sensor_name):\n \"\"\"Set sensor status to inactive\"\"\"\n sensor = self.get_sensor(sensor_name)\n ts, status, value = sensor.read()\n sensor.set_value(value, sensor.INACTIVE, ts)\n return('ok',)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nset sensor status to unreachable", "response": "def request_set_sensor_unreachable(self, req, sensor_name):\n \"\"\"Set sensor status to unreachable\"\"\"\n sensor = self.get_sensor(sensor_name)\n ts, status, value = sensor.read()\n sensor.set_value(value, sensor.UNREACHABLE, ts)\n return('ok',)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef request_raw_reverse(self, req, msg):\n # msg is a katcp.Message.request object\n reversed_args = msg.arguments[::-1]\n # req.make_reply() makes a katcp.Message.reply using the correct request\n # name and message ID\n return req.make_reply(*reversed_args)", "response": "This is the raw request handler that does not demonstrate the calling convention."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn a new future that will resolve with a transformed value.", "response": "def transform_future(transformation, future):\n \"\"\"Returns a new future that will resolve with a transformed value\n\n Takes the resolution value of `future` and applies transformation(*future.result())\n to it before setting the result of the new future with the transformed value. If\n future() resolves with an exception, it is passed through to the new future.\n\n Assumes `future` is a tornado Future.\n\n \"\"\"\n new_future = tornado_Future()\n def _transform(f):\n assert f is future\n if f.exc_info() is not None:\n new_future.set_exc_info(f.exc_info())\n else:\n try:\n new_future.set_result(transformation(f.result()))\n except Exception:\n # An exception here idicates that the transformation was unsuccesful\n new_future.set_exc_info(sys.exc_info())\n\n future.add_done_callback(_transform)\n return new_future"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef list_sensors(parent_class, sensor_items, filter, strategy, status,\n use_python_identifiers, tuple, refresh):\n \"\"\"Helper for implementing :meth:`katcp.resource.KATCPResource.list_sensors`\n\n Parameters\n ----------\n\n sensor_items : tuple of sensor-item tuples\n As would be returned the items() method of a dict containing KATCPSensor objects\n keyed by Python-identifiers.\n parent_class: KATCPClientResource or KATCPClientResourceContainer\n Is used for prefix calculation\n Rest of parameters as for :meth:`katcp.resource.KATCPResource.list_sensors`\n \"\"\"\n filter_re = re.compile(filter)\n found_sensors = []\n none_strat = resource.normalize_strategy_parameters('none')\n sensor_dict = dict(sensor_items)\n for sensor_identifier in sorted(sensor_dict.keys()):\n sensor_obj = sensor_dict[sensor_identifier]\n search_name = (sensor_identifier if use_python_identifiers\n else sensor_obj.name)\n name_match = filter_re.search(search_name)\n # Only include sensors with strategies\n strat_match = not strategy or sensor_obj.sampling_strategy != none_strat\n if name_match and strat_match:\n if refresh:\n # First refresh the sensor reading\n yield sensor_obj.get_value()\n # Determine the sensorname prefix:\n # parent_name. except for aggs when in KATCPClientResourceContinaer\n prefix = \"\"\n if isinstance(parent_class, KATCPClientResourceContainer):\n if sensor_obj.name.startswith(\"agg_\"):\n prefix = \"\"\n else:\n prefix = sensor_obj.parent_name + \".\"\n if not status or (sensor_obj.reading.status in status):\n # Only include sensors of the given status\n if tuple:\n # (sensor.name, sensor.value, sensor.value_seconds, sensor.type, sensor.units, sensor.update_seconds, sensor.status, strategy_and_params)\n found_sensors.append((\n prefix+sensor_obj.name,\n sensor_obj.reading.value,\n sensor_obj.reading.timestamp,\n sensor_obj.type,\n sensor_obj.units,\n sensor_obj.reading.received_timestamp,\n sensor_obj.reading.status,\n sensor_obj.sampling_strategy\n ))\n else:\n found_sensors.append(resource.SensorResultTuple(\n object=sensor_obj,\n name=prefix+sensor_obj.name,\n python_identifier=sensor_identifier,\n description=sensor_obj.description,\n units=sensor_obj.units,\n type=sensor_obj.type,\n reading=sensor_obj.reading))\n raise tornado.gen.Return(found_sensors)", "response": "A generator method that returns a list of sensors for a given parent class and a given list of sensor - items."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef monitor_resource_sync_state(resource, callback, exit_event=None):\n exit_event = exit_event or AsyncEvent()\n callback(False) # Initial condition, assume resource is not connected\n\n while not exit_event.is_set():\n # Wait for resource to be synced\n yield until_any(resource.until_synced(), exit_event.until_set())\n if exit_event.is_set():\n break # If exit event is set we stop without calling callback\n else:\n callback(True)\n\n # Wait for resource to be un-synced\n yield until_any(resource.until_not_synced(), exit_event.until_set())\n if exit_event.is_set():\n break # If exit event is set we stop without calling callback\n else:\n callback(False)", "response": "Coroutine that monitors a KATCPResource s sync state."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef wrapped_request(self, request, *args, **kwargs):\n f = tornado_Future()\n try:\n use_mid = kwargs.get('use_mid')\n timeout = kwargs.get('timeout')\n mid = kwargs.get('mid')\n msg = Message.request(request, *args, mid=mid)\n except Exception:\n f.set_exc_info(sys.exc_info())\n return f\n return transform_future(self.reply_wrapper,\n self.katcp_client.future_request(msg, timeout, use_mid))", "response": "Create and send a request to the server."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef until_state(self, state, timeout=None):\n return self._state.until_state(state, timeout=timeout)", "response": "Returns a Future that resolves when a certain client state is attained."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef start(self):\n # TODO (NM 2015-03-12) Some checking to prevent multiple calls to start()\n host, port = self.address\n ic = self._inspecting_client = self.inspecting_client_factory(\n host, port, self._ioloop_set_to)\n self.ioloop = ic.ioloop\n if self._preset_protocol_flags:\n ic.preset_protocol_flags(self._preset_protocol_flags)\n ic.katcp_client.auto_reconnect_delay = self.auto_reconnect_delay\n ic.set_state_callback(self._inspecting_client_state_callback)\n ic.request_factory = self._request_factory\n self._sensor_manager = KATCPClientResourceSensorsManager(\n ic, self.name, logger=self._logger)\n ic.handle_sensor_value()\n ic.sensor_factory = self._sensor_manager.sensor_factory\n\n # Steal some methods from _sensor_manager\n self.reapply_sampling_strategies = self._sensor_manager.reapply_sampling_strategies\n log_future_exceptions(self._logger, ic.connect())", "response": "Start the client and connect to the KATCP server."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef inspecting_client_factory(self, host, port, ioloop_set_to):\n return ReplyWrappedInspectingClientAsync(\n host, port, ioloop=ioloop_set_to, auto_reconnect=self.auto_reconnect)", "response": "Return an instance of ReplyWrappedInspectingClientAsync or similar\n "} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef until_not_synced(self, timeout=None):\n not_synced_states = [state for state in self._state.valid_states\n if state != 'synced']\n not_synced_futures = [self._state.until_state(state)\n for state in not_synced_states]\n return until_any(*not_synced_futures, timeout=timeout)", "response": "Convenience method to wait until client is not synced"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nset a strategy for all sensors matching the filter", "response": "def set_sampling_strategies(self, filter, strategy_and_parms):\n \"\"\"Set a strategy for all sensors matching the filter, including unseen sensors\n The strategy should persist across sensor disconnect/reconnect.\n\n filter : str\n Filter for sensor names\n strategy_and_params : seq of str or str\n As tuple contains (<strat_name>, [<strat_parm1>, ...]) where the strategy\n names and parameters are as defined by the KATCP spec. As str contains the\n same elements in space-separated form.\n\n Returns\n -------\n done : tornado Future\n Resolves when done\n \"\"\"\n sensor_list = yield self.list_sensors(filter=filter)\n sensor_dict = {}\n for sens in sensor_list:\n # Set the strategy on each sensor\n try:\n sensor_name = sens.object.normalised_name\n yield self.set_sampling_strategy(sensor_name, strategy_and_parms)\n sensor_dict[sensor_name] = strategy_and_parms\n except Exception as exc:\n self._logger.exception(\n 'Unhandled exception trying to set sensor strategies {!r} for {} ({})'\n .format(strategy_and_parms, sens, exc))\n sensor_dict[sensor_name] = None\n # Otherwise, depend on self._add_sensors() to handle it from the cache when the sensor appears\\\n raise tornado.gen.Return(sensor_dict)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsetting a strategy for a specific sensor.", "response": "def set_sampling_strategy(self, sensor_name, strategy_and_parms):\n \"\"\"Set a strategy for a sensor even if it is not yet known.\n The strategy should persist across sensor disconnect/reconnect.\n\n sensor_name : str\n Name of the sensor\n strategy_and_params : seq of str or str\n As tuple contains (<strat_name>, [<strat_parm1>, ...]) where the strategy\n names and parameters are as defined by the KATCP spec. As str contains the\n same elements in space-separated form.\n\n Returns\n -------\n done : tornado Future\n Resolves when done\n \"\"\"\n sensor_name = resource.escape_name(sensor_name)\n sensor_obj = dict.get(self._sensor, sensor_name)\n self._sensor_strategy_cache[sensor_name] = strategy_and_parms\n sensor_dict = {}\n self._logger.debug(\n 'Cached strategy {} for sensor {}'\n .format(strategy_and_parms, sensor_name))\n if sensor_obj:\n # The sensor exists, so set the strategy and continue. Log errors,\n # but don't raise anything\n try:\n yield sensor_obj.set_sampling_strategy(strategy_and_parms)\n sensor_dict[sensor_name] = strategy_and_parms\n except Exception as exc:\n self._logger.exception(\n 'Unhandled exception trying to set sensor strategy {!r} for sensor {} ({})'\n .format(strategy_and_parms, sensor_name, exc))\n sensor_dict[sensor_name] = str(exc)\n # Otherwise, depend on self._add_sensors() to handle it from the cache when the sensor appears\n raise tornado.gen.Return(sensor_dict)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ndropping the sampling strategy for the named sensor from the cache.", "response": "def drop_sampling_strategy(self, sensor_name):\n \"\"\"Drop the sampling strategy for the named sensor from the cache\n\n Calling :meth:`set_sampling_strategy` requires the requested strategy to\n be memorised so that it can automatically be reapplied. This method\n causes the strategy to be forgotten. There is no change to the current\n strategy. No error is raised if there is no strategy to drop.\n\n Parameters\n ----------\n\n sensor_name : str\n Name of the sensor\n\n \"\"\"\n sensor_name = resource.escape_name(sensor_name)\n # drop from both the internal cache, and the sensor manager's cache\n self._sensor_strategy_cache.pop(sensor_name, None)\n self._sensor_manager.drop_sampling_strategy(sensor_name)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nset a sensor listener for a named sensor.", "response": "def set_sensor_listener(self, sensor_name, listener):\n \"\"\"Set a sensor listener for a sensor even if it is not yet known\n The listener registration should persist across sensor disconnect/reconnect.\n\n sensor_name : str\n Name of the sensor\n listener : callable\n Listening callable that will be registered on the named sensor when it becomes\n available. Callable as for :meth:`KATCPSensor.register_listener`\n\n \"\"\"\n\n sensor_name = resource.escape_name(sensor_name)\n sensor_obj = dict.get(self._sensor, sensor_name)\n self._sensor_listener_cache[sensor_name].append(listener)\n sensor_dict = {}\n self._logger.debug(\n 'Cached listener {} for sensor {}'\n .format(listener, sensor_name))\n if sensor_obj:\n # The sensor exists, so register the listener and continue.\n try:\n sensor_obj.register_listener(listener, reading=True)\n sensor_dict[sensor_name] = listener\n self._logger.debug(\n 'Registered listener {} for sensor {}'\n .format(listener, sensor_name))\n except Exception as exc:\n self._logger.exception(\n 'Unhandled exception trying to set sensor listener {} for sensor {} ({})'\n .format(listener, sensor_name, exc))\n sensor_dict[sensor_name] = str(exc)\n # Otherwise, depend on self._add_sensors() to handle it from the cache when the sensor appears\n raise tornado.gen.Return(sensor_dict)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _get_strategy_cache_key(self, sensor_name):\n # try for a direct match first, otherwise do full comparison\n if sensor_name in self._strategy_cache:\n return sensor_name\n else:\n escaped_name = resource.escape_name(sensor_name)\n for key in self._strategy_cache:\n escaped_key = resource.escape_name(key)\n if escaped_key == escaped_name:\n return key\n # no match\n return sensor_name", "response": "Lookup the cache key for a given sensor name in the strategy cache."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting the current sampling strategy for the named sensor.", "response": "def get_sampling_strategy(self, sensor_name):\n \"\"\"Get the current sampling strategy for the named sensor\n\n Parameters\n ----------\n\n sensor_name : str\n Name of the sensor (normal or escaped form)\n\n Returns\n -------\n\n strategy : tuple of str\n contains (<strat_name>, [<strat_parm1>, ...]) where the strategy names and\n parameters are as defined by the KATCP spec\n \"\"\"\n cache_key = self._get_strategy_cache_key(sensor_name)\n cached = self._strategy_cache.get(cache_key)\n if not cached:\n return resource.normalize_strategy_parameters('none')\n else:\n return cached"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef set_sampling_strategy(self, sensor_name, strategy_and_params):\n try:\n strategy_and_params = resource.normalize_strategy_parameters(\n strategy_and_params)\n self._strategy_cache[sensor_name] = strategy_and_params\n reply = yield self._inspecting_client.wrapped_request(\n 'sensor-sampling', sensor_name, *strategy_and_params)\n if not reply.succeeded:\n raise KATCPSensorError('Error setting strategy for sensor {0}: \\n'\n '{1!s}'.format(sensor_name, reply))\n sensor_strategy = (True, strategy_and_params)\n except Exception as e:\n self._logger.exception('Exception found!')\n sensor_strategy = (False, str(e))\n raise tornado.gen.Return(sensor_strategy)", "response": "Set the sampling strategy for the named sensor."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef drop_sampling_strategy(self, sensor_name):\n cache_key = self._get_strategy_cache_key(sensor_name)\n self._strategy_cache.pop(cache_key, None)", "response": "Drop the sampling strategy for the named sensor from the cache."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef reapply_sampling_strategies(self):\n check_sensor = self._inspecting_client.future_check_sensor\n for sensor_name, strategy in list(self._strategy_cache.items()):\n try:\n sensor_exists = yield check_sensor(sensor_name)\n if not sensor_exists:\n self._logger.warn('Did not set strategy for non-existing sensor {}'\n .format(sensor_name))\n continue\n\n result = yield self.set_sampling_strategy(sensor_name, strategy)\n except KATCPSensorError as e:\n self._logger.error('Error reapplying strategy for sensor {0}: {1!s}'\n .format(sensor_name, e))\n except Exception:\n self._logger.exception('Unhandled exception reapplying strategy for '\n 'sensor {}'.format(sensor_name), exc_info=True)", "response": "Reapply all sensor strategies using cached values"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nissue the wrapped request to the server.", "response": "def issue_request(self, *args, **kwargs):\n \"\"\"Issue the wrapped request to the server.\n\n Parameters\n ----------\n *args : list of objects\n Arguments to pass on to the request.\n\n Keyword Arguments\n -----------------\n timeout : float or None, optional\n Timeout after this amount of seconds (keyword argument).\n mid : None or int, optional\n Message identifier to use for the request message. If None, use either\n auto-incrementing value or no mid depending on the KATCP protocol version\n (mid's were only introduced with KATCP v5) and the value of the `use_mid`\n argument. Defaults to None.\n use_mid : bool\n Use a mid for the request if True.\n\n Returns\n -------\n future object that resolves with an :class:`katcp.resource.KATCPReply`\n instance\n\n \"\"\"\n timeout = kwargs.pop('timeout', None)\n if timeout is None:\n timeout = self.timeout_hint\n kwargs['timeout'] = timeout\n return self._client.wrapped_request(self.name, *args, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef set_sampling_strategies(self, filter, strategy_and_params):\n futures_dict = {}\n for res_obj in self.clients:\n futures_dict[res_obj.name] = res_obj.set_sampling_strategies(\n filter, strategy_and_params)\n sensors_strategies = yield futures_dict\n raise tornado.gen.Return(sensors_strategies)", "response": "Set sampling strategies for all the sensors of all the group s clients."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsetting the sampling strategy for the sensors of all the group s clients.", "response": "def set_sampling_strategy(self, sensor_name, strategy_and_params):\n \"\"\"Set sampling strategy for the sensors of all the group's clients.\n\n Only sensors that match the specified filter are considered. See the\n `KATCPResource.set_sampling_strategies` docstring for parameter\n definitions and more info.\n\n Returns\n -------\n sensors_strategies : tornado Future\n Resolves with a dict with client names as keys and with the value as\n another dict. The value dict is similar to the return value\n described in the `KATCPResource.set_sampling_strategies` docstring.\n \"\"\"\n futures_dict = {}\n for res_obj in self.clients:\n futures_dict[res_obj.name] = res_obj.set_sampling_strategy(\n sensor_name, strategy_and_params)\n sensors_strategies = yield futures_dict\n raise tornado.gen.Return(sensors_strategies)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nwaits for a sensor present on all group clients to satisfy a condition.", "response": "def wait(self, sensor_name, condition_or_value, timeout=5.0, quorum=None,\n max_grace_period=1.0):\n \"\"\"Wait for sensor present on all group clients to satisfy a condition.\n\n Parameters\n ----------\n sensor_name : string\n The name of the sensor to check\n condition_or_value : obj or callable, or seq of objs or callables\n If obj, sensor.value is compared with obj. If callable,\n condition_or_value(reading) is called, and must return True if its\n condition is satisfied. Since the reading is passed in, the value,\n status, timestamp or received_timestamp attributes can all be used\n in the check.\n timeout : float or None\n The total timeout in seconds (None means wait forever)\n quorum : None or int or float\n The number of clients that are required to satisfy the condition,\n as either an explicit integer or a float between 0 and 1 indicating\n a fraction of the total number of clients, rounded up. If None,\n this means that all clients are required (the default). Be warned\n that a value of 1.0 (float) indicates all clients while a value\n of 1 (int) indicates a single client...\n max_grace_period : float or None\n After a quorum or initial timeout is reached, wait up to this long\n in an attempt to get the rest of the clients to satisfy condition\n as well (achieving effectively a full quorum if all clients behave)\n\n Returns\n -------\n This command returns a tornado Future that resolves with True when a\n quorum of clients satisfy the sensor condition, or False if a quorum\n is not reached after a given timeout period (including a grace period).\n\n Raises\n ------\n :class:`KATCPSensorError`\n If any of the sensors do not have a strategy set, or if the named\n sensor is not present\n\n \"\"\"\n if quorum is None:\n quorum = len(self.clients)\n elif quorum > 1:\n if not isinstance(quorum, int):\n raise TypeError('Quorum parameter %r must be an integer '\n 'if outside range [0, 1]' % (quorum,))\n elif isinstance(quorum, float):\n quorum = int(math.ceil(quorum * len(self.clients)))\n if timeout and max_grace_period:\n # Avoid having a grace period longer than or equal to timeout\n grace_period = min(max_grace_period, timeout / 2.)\n initial_timeout = timeout - grace_period\n else:\n grace_period = max_grace_period\n initial_timeout = timeout\n # Build dict of futures instead of list as this will be easier to debug\n futures = {}\n for client in self.clients:\n f = client.wait(sensor_name, condition_or_value, initial_timeout)\n futures[client.name] = f\n # No timeout required here as all futures will resolve after timeout\n initial_results = yield until_some(done_at_least=quorum, **futures)\n results = dict(initial_results)\n # Identify stragglers and let them all respond within grace period\n stragglers = {}\n for client in self.clients:\n if not results.get(client.name, False):\n f = client.wait(sensor_name, condition_or_value, grace_period)\n stragglers[client.name] = f\n rest_of_results = yield until_some(**stragglers)\n results.update(dict(rest_of_results))\n class TestableDict(dict):\n \"\"\"Dictionary of results that can be tested for overall success.\"\"\"\n def __bool__(self):\n return sum(self.values()) >= quorum\n # Was not handled automatrically by futurize, see\n # https://github.com/PythonCharmers/python-future/issues/282\n if sys.version_info[0] == 2:\n __nonzero__ = __bool__\n raise tornado.gen.Return(TestableDict(results))"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns an instance of KATCPClientResource or similar", "response": "def client_resource_factory(self, res_spec, parent, logger):\n \"\"\"Return an instance of :class:`KATCPClientResource` or similar\n\n Provided to ease testing. Overriding this method allows deep brain surgery. See\n :func:`katcp.fake_clients.fake_KATCP_client_resource_factory`\n\n \"\"\"\n return KATCPClientResource(res_spec, parent=self, logger=logger)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nadding a new group to the container groups member.", "response": "def add_group(self, group_name, group_client_names):\n \"\"\"Add a new :class:`ClientGroup` to container groups member.\n\n Add the group named *group_name* with sequence of client names to the\n container groups member. From there it will be wrapped appropriately\n in the higher-level thread-safe container.\n \"\"\"\n group_configs = self._resources_spec.get('groups', {})\n group_configs[group_name] = group_client_names\n self._resources_spec['groups'] = group_configs\n self._init_groups()"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nset the tornado ioloop to use.", "response": "def set_ioloop(self, ioloop=None):\n \"\"\"Set the tornado ioloop to use\n\n Defaults to tornado.ioloop.IOLoop.current() if set_ioloop() is not called or if\n ioloop=None. Must be called before start()\n \"\"\"\n ioloop = ioloop or tornado.ioloop.IOLoop.current()\n self.ioloop = ioloop\n for res in dict.values(self.children):\n res.set_ioloop(ioloop)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef until_synced(self, timeout=None):\n futures = [r.until_synced(timeout) for r in dict.values(self.children)]\n yield tornado.gen.multi(futures, quiet_exceptions=tornado.gen.TimeoutError)", "response": "Return a tornado Future ; resolves when all subordinate clients are synced"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef until_not_synced(self, timeout=None):\n yield until_any(*[r.until_not_synced() for r in dict.values(self.children)],\n timeout=timeout)", "response": "Return a tornado Future ; resolves when any subordinate client is not synced"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn a tornado Future ; resolves when any client is in specified state", "response": "def until_any_child_in_state(self, state, timeout=None):\n \"\"\"Return a tornado Future; resolves when any client is in specified state\"\"\"\n return until_any(*[r.until_state(state) for r in dict.values(self.children)],\n timeout=timeout)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns a tornado Future ; resolves when all clients are in specified state", "response": "def until_all_children_in_state(self, state, timeout=None):\n \"\"\"Return a tornado Future; resolves when all clients are in specified state\"\"\"\n futures = [r.until_state(state, timeout=timeout)\n for r in dict.values(self.children)]\n yield tornado.gen.multi(futures, quiet_exceptions=tornado.gen.TimeoutError)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsetting sampling strategies for filtered sensors", "response": "def set_sampling_strategies(self, filter, strategy_and_parms):\n \"\"\"Set sampling strategies for filtered sensors - these sensors have to exsist\"\"\"\n result_list = yield self.list_sensors(filter=filter)\n sensor_dict = {}\n for result in result_list:\n sensor_name = result.object.normalised_name\n resource_name = result.object.parent_name\n if resource_name not in sensor_dict:\n sensor_dict[resource_name] = {}\n try:\n resource_obj = self.children[resource_name]\n yield resource_obj.set_sampling_strategy(sensor_name, strategy_and_parms)\n sensor_dict[resource_name][sensor_name] = strategy_and_parms\n self._logger.debug(\n 'Set sampling strategy on resource %s for %s'\n % (resource_name, sensor_name))\n except Exception as exc:\n self._logger.error(\n 'Cannot set sampling strategy on resource %s for %s (%s)'\n % (resource_name, sensor_name, exc))\n sensor_dict[resource_name][sensor_name] = None\n raise tornado.gen.Return(sensor_dict)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nadding a child resource client to the container and start the resource connection", "response": "def add_child_resource_client(self, res_name, res_spec):\n \"\"\"Add a resource client to the container and start the resource connection\"\"\"\n res_spec = dict(res_spec)\n res_spec['name'] = res_name\n res = self.client_resource_factory(\n res_spec, parent=self, logger=self._logger)\n self.children[resource.escape_name(res_name)] = res;\n self._children_dirty = True\n res.set_ioloop(self.ioloop)\n res.start()\n return res"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef stop(self):\n for child_name, child in dict.items(self.children):\n # Catch child exceptions when stopping so we make sure to stop all children\n # that want to listen.\n try:\n child.stop()\n except Exception:\n self._logger.exception('Exception stopping child {!r}'\n .format(child_name))", "response": "Stop all child resources"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nparses PNG bytes into multiple chunks. isnyme is the raw bytes of the PNG file.", "response": "def parse_chunks(b):\n\t\"\"\"Parse PNG bytes into multiple chunks. \n\t\n\t:arg bytes b: The raw bytes of the PNG file.\n\t:return: A generator yielding :class:`Chunk`.\n\t:rtype: Iterator[Chunk]\n\t\"\"\"\n\t# skip signature\n\ti = 8\n\t# yield chunks\n\twhile i < len(b):\n\t\tdata_len, = struct.unpack(\"!I\", b[i:i+4])\n\t\ttype_ = b[i+4:i+8].decode(\"latin-1\")\n\t\tyield Chunk(type_, b[i:i+data_len+12])\n\t\ti += data_len + 12"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef make_chunk(chunk_type, chunk_data):\n\tout = struct.pack(\"!I\", len(chunk_data))\n\tchunk_data = chunk_type.encode(\"latin-1\") + chunk_data\n\tout += chunk_data + struct.pack(\"!I\", binascii.crc32(chunk_data) & 0xffffffff)\n\treturn out", "response": "Create a raw PNG chunk by composing chunk type and data."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef make_text_chunk(\n\t\ttype=\"tEXt\", key=\"Comment\", value=\"\",\n\t\tcompression_flag=0, compression_method=0, lang=\"\", translated_key=\"\"):\n\t\"\"\"Create a text chunk with a key value pair.\n\tSee https://www.w3.org/TR/PNG/#11textinfo for text chunk information.\n\n\tUsage:\n\t\n\t.. code:: python\n\n\t\tfrom apng import APNG, make_text_chunk\n\n\t\tim = APNG.open(\"file.png\")\n\t\tpng, control = im.frames[0]\n\t\tpng.chunks.append(make_text_chunk(\"tEXt\", \"Comment\", \"some text\"))\n\t\tim.save(\"file.png\")\n\n\t:arg str type: Text chunk type: \"tEXt\", \"zTXt\", or \"iTXt\":\n\n\t\ttEXt uses Latin-1 characters.\n\t\tzTXt uses Latin-1 characters, compressed with zlib.\n\t\tiTXt uses UTF-8 characters.\n\n\t:arg str key: The key string, 1-79 characters.\n\t\n\t:arg str value: The text value. It would be encoded into\n\t\t:class:`bytes` and compressed if needed.\n\t\t\n\t:arg int compression_flag: The compression flag for iTXt.\n\t:arg int compression_method: The compression method for zTXt and iTXt.\n\t:arg str lang: The language tag for iTXt.\n\t:arg str translated_key: The translated keyword for iTXt.\n\t:rtype: Chunk\n\t\"\"\"\n\t# pylint: disable=redefined-builtin\n\tif type == \"tEXt\":\n\t\tdata = key.encode(\"latin-1\") + b\"\\0\" + value.encode(\"latin-1\")\n\telif type == \"zTXt\":\n\t\tdata = (\n\t\t\tkey.encode(\"latin-1\") + struct.pack(\"!xb\", compression_method) +\n\t\t\tzlib.compress(value.encode(\"latin-1\"))\n\t\t)\n\telif type == \"iTXt\":\n\t\tdata = (\n\t\t\tkey.encode(\"latin-1\") +\n\t\t\tstruct.pack(\"!xbb\", compression_flag, compression_method) +\n\t\t\tlang.encode(\"latin-1\") + b\"\\0\" +\n\t\t\ttranslated_key.encode(\"utf-8\") + b\"\\0\"\n\t\t)\n\t\tif compression_flag:\n\t\t\tdata += zlib.compress(value.encode(\"utf-8\"))\n\t\telse:\n\t\t\tdata += value.encode(\"utf-8\")\n\telse:\n\t\traise TypeError(\"unknown type {!r}\".format(type))\n\treturn Chunk(type, make_chunk(type, data))", "response": "Create a text chunk with a key value pair."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef read_file(file):\n\tif hasattr(file, \"read\"):\n\t\treturn file.read()\n\tif hasattr(file, \"read_bytes\"):\n\t\treturn file.read_bytes()\n\twith open(file, \"rb\") as f:\n\t\treturn f.read()", "response": "Read file into bytes."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef write_file(file, b):\n\tif hasattr(file, \"write_bytes\"):\n\t\tfile.write_bytes(b)\n\telif hasattr(file, \"write\"):\n\t\tfile.write(b)\n\telse:\n\t\twith open(file, \"wb\") as f:\n\t\t\tf.write(b)", "response": "Write b to file file."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nopen a file. :arg file: file-like or path-like object. :arg str mode: ``mode`` argument for :func:`open`.", "response": "def open_file(file, mode):\n\t\"\"\"Open a file.\n\n\t:arg file: file-like or path-like object.\n\t:arg str mode: ``mode`` argument for :func:`open`.\n\t\"\"\"\n\tif hasattr(file, \"read\"):\n\t\treturn file\n\tif hasattr(file, \"open\"):\n\t\treturn file.open(mode)\n\treturn open(file, mode)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef file_to_png(fp):\n\timport PIL.Image # pylint: disable=import-error\n\twith io.BytesIO() as dest:\n\t\tPIL.Image.open(fp).save(dest, \"PNG\", optimize=True)\n\t\treturn dest.getvalue()", "response": "Convert an image to PNG format with Pillow."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nextract some info from chunks", "response": "def init(self):\n\t\t\"\"\"Extract some info from chunks\"\"\"\n\t\tfor type_, data in self.chunks:\n\t\t\tif type_ == \"IHDR\":\n\t\t\t\tself.hdr = data\n\t\t\telif type_ == \"IEND\":\n\t\t\t\tself.end = data\n\t\t\t\t\n\t\tif self.hdr:\n\t\t\t# grab w, h info\n\t\t\tself.width, self.height = struct.unpack(\"!II\", self.hdr[8:16])"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef open_any(cls, file):\n\t\twith open_file(file, \"rb\") as f:\n\t\t\theader = f.read(8)\n\t\t\tf.seek(0)\n\t\t\tif header != PNG_SIGN:\n\t\t\t\tb = file_to_png(f)\n\t\t\telse:\n\t\t\t\tb = f.read()\n\t\treturn cls.from_bytes(b)", "response": "Open an image file."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate a new PNG object from raw bytes.", "response": "def from_bytes(cls, b):\n\t\t\"\"\"Create :class:`PNG` from raw bytes.\n\t\t\n\t\t:arg bytes b: The raw bytes of the PNG file.\n\t\t:rtype: :class:`PNG`\n\t\t\"\"\"\n\t\tim = cls()\n\t\tim.chunks = list(parse_chunks(b))\n\t\tim.init()\n\t\treturn im"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef from_chunks(cls, chunks):\n\t\tim = cls()\n\t\tim.chunks = chunks\n\t\tim.init()\n\t\treturn im", "response": "Construct PNG from raw chunks."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nconvert the entire image to bytes.", "response": "def to_bytes(self):\n\t\t\"\"\"Convert the entire image to bytes.\n\t\t\n\t\t:rtype: bytes\n\t\t\"\"\"\n\t\tchunks = [PNG_SIGN]\n\t\tchunks.extend(c[1] for c in self.chunks)\n\t\treturn b\"\".join(chunks)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nconvert to bytes. :rtype: bytes", "response": "def to_bytes(self):\n\t\t\"\"\"Convert to bytes.\n\t\t\n\t\t:rtype: bytes\n\t\t\"\"\"\n\t\treturn struct.pack(\n\t\t\t\"!IIIIHHbb\", self.width, self.height, self.x_offset, self.y_offset,\n\t\t\tself.delay, self.delay_den, self.depose_op, self.blend_op\n\t\t)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nappends one frame. :arg PNG png: Append a :class:`PNG` as a frame. :arg dict options: The options for :class:`FrameControl`.", "response": "def append(self, png, **options):\n\t\t\"\"\"Append one frame.\n\t\t\n\t\t:arg PNG png: Append a :class:`PNG` as a frame.\n\t\t:arg dict options: The options for :class:`FrameControl`.\n\t\t\"\"\"\n\t\tif not isinstance(png, PNG):\n\t\t\traise TypeError(\"Expect an instance of `PNG` but got `{}`\".format(png))\n\t\tcontrol = FrameControl(**options)\n\t\tif control.width is None:\n\t\t\tcontrol.width = png.width\n\t\tif control.height is None:\n\t\t\tcontrol.height = png.height\n\t\tself.frames.append((png, control))"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef append_file(self, file, **options):\n\t\tself.append(PNG.open_any(file), **options)", "response": "Create a PNG from file and append it as a frame."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nconverting the entire image to bytes.", "response": "def to_bytes(self):\n\t\t\"\"\"Convert the entire image to bytes.\n\t\t\n\t\t:rtype: bytes\n\t\t\"\"\"\n\t\t\n\t\t# grab the chunks we needs\n\t\tout = [PNG_SIGN]\n\t\t# FIXME: it's tricky to define \"other_chunks\". HoneyView stop the \n\t\t# animation if it sees chunks other than fctl or idat, so we put other\n\t\t# chunks to the end of the file\n\t\tother_chunks = []\n\t\tseq = 0\n\t\t\n\t\t# for first frame\n\t\tpng, control = self.frames[0]\n\t\t\n\t\t# header\n\t\tout.append(png.hdr)\n\t\t\n\t\t# acTL\n\t\tout.append(make_chunk(\"acTL\", struct.pack(\"!II\", len(self.frames), self.num_plays)))\n\t\t\n\t\t# fcTL\n\t\tif control:\n\t\t\tout.append(make_chunk(\"fcTL\", struct.pack(\"!I\", seq) + control.to_bytes()))\n\t\t\tseq += 1\n\t\t\n\t\t# and others...\n\t\tidat_chunks = []\n\t\tfor type_, data in png.chunks:\n\t\t\tif type_ in (\"IHDR\", \"IEND\"):\n\t\t\t\tcontinue\n\t\t\tif type_ == \"IDAT\":\n\t\t\t\t# put at last\n\t\t\t\tidat_chunks.append(data)\n\t\t\t\tcontinue\n\t\t\tout.append(data)\n\t\tout.extend(idat_chunks)\n\t\t\n\t\t# FIXME: we should do some optimization to frames...\n\t\t# for other frames\n\t\tfor png, control in self.frames[1:]:\n\t\t\t# fcTL\n\t\t\tout.append(\n\t\t\t\tmake_chunk(\"fcTL\", struct.pack(\"!I\", seq) + control.to_bytes())\n\t\t\t)\n\t\t\tseq += 1\n\t\t\t\n\t\t\t# and others...\n\t\t\tfor type_, data in png.chunks:\n\t\t\t\tif type_ in (\"IHDR\", \"IEND\") or type_ in CHUNK_BEFORE_IDAT:\n\t\t\t\t\tcontinue\n\t\t\t\telif type_ == \"IDAT\":\n\t\t\t\t\t# convert IDAT to fdAT\n\t\t\t\t\tout.append(\n\t\t\t\t\t\tmake_chunk(\"fdAT\", struct.pack(\"!I\", seq) + data[8:-4])\n\t\t\t\t\t)\n\t\t\t\t\tseq += 1\n\t\t\t\telse:\n\t\t\t\t\tother_chunks.append(data)\n\t\t\n\t\t# end\n\t\tout.extend(other_chunks)\n\t\tout.append(png.end)\n\t\t\n\t\treturn b\"\".join(out)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef from_files(cls, files, **options):\n\t\tim = cls()\n\t\tfor file in files:\n\t\t\tim.append_file(file, **options)\n\t\treturn im", "response": "Create an APNG from multiple files."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates an APNG from raw bytes.", "response": "def from_bytes(cls, b):\n\t\t\"\"\"Create an APNG from raw bytes.\n\t\t\n\t\t:arg bytes b: The raw bytes of the APNG file.\n\t\t:rtype: APNG\n\t\t\"\"\"\n\t\thdr = None\n\t\thead_chunks = []\n\t\tend = (\"IEND\", make_chunk(\"IEND\", b\"\"))\n\t\t\n\t\tframe_chunks = []\n\t\tframes = []\n\t\tnum_plays = 0\n\t\tframe_has_head_chunks = False\n\t\t\n\t\tcontrol = None\n\t\t\n\t\tfor type_, data in parse_chunks(b):\n\t\t\tif type_ == \"IHDR\":\n\t\t\t\thdr = data\n\t\t\t\tframe_chunks.append((type_, data))\n\t\t\telif type_ == \"acTL\":\n\t\t\t\t_num_frames, num_plays = struct.unpack(\"!II\", data[8:-4])\n\t\t\t\tcontinue\n\t\t\telif type_ == \"fcTL\":\n\t\t\t\tif any(type_ == \"IDAT\" for type_, data in frame_chunks):\n\t\t\t\t\t# IDAT inside chunk, go to next frame\n\t\t\t\t\tframe_chunks.append(end)\n\t\t\t\t\tframes.append((PNG.from_chunks(frame_chunks), control))\n\t\t\t\t\tframe_has_head_chunks = False\n\t\t\t\t\tcontrol = FrameControl.from_bytes(data[12:-4])\n\t\t\t\t\t# https://github.com/PyCQA/pylint/issues/2072\n\t\t\t\t\t# pylint: disable=typecheck\n\t\t\t\t\thdr = make_chunk(\"IHDR\", struct.pack(\"!II\", control.width, control.height) + hdr[16:-4])\n\t\t\t\t\tframe_chunks = [(\"IHDR\", hdr)]\n\t\t\t\telse:\n\t\t\t\t\tcontrol = FrameControl.from_bytes(data[12:-4])\n\t\t\telif type_ == \"IDAT\":\n\t\t\t\tif not frame_has_head_chunks:\n\t\t\t\t\tframe_chunks.extend(head_chunks)\n\t\t\t\t\tframe_has_head_chunks = True\n\t\t\t\tframe_chunks.append((type_, data))\n\t\t\telif type_ == \"fdAT\":\n\t\t\t\t# convert to IDAT\n\t\t\t\tif not frame_has_head_chunks:\n\t\t\t\t\tframe_chunks.extend(head_chunks)\n\t\t\t\t\tframe_has_head_chunks = True\n\t\t\t\tframe_chunks.append((\"IDAT\", make_chunk(\"IDAT\", data[12:-4])))\n\t\t\telif type_ == \"IEND\":\n\t\t\t\t# end\n\t\t\t\tframe_chunks.append(end)\n\t\t\t\tframes.append((PNG.from_chunks(frame_chunks), control))\n\t\t\t\tbreak\n\t\t\telif type_ in CHUNK_BEFORE_IDAT:\n\t\t\t\thead_chunks.append((type_, data))\n\t\t\telse:\n\t\t\t\tframe_chunks.append((type_, data))\n\t\t\t\t\n\t\to = cls()\n\t\to.frames = frames\n\t\to.num_plays = num_plays\n\t\treturn o"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nadding a sensor to the log", "response": "def request_add_sensor(self, sock, msg):\n \"\"\" add a sensor\n \"\"\"\n self.add_sensor(Sensor(int, 'int_sensor%d' % len(self._sensors),\n 'descr', 'unit', params=[-10, 10]))\n return Message.reply('add-sensor', 'ok')"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nnormalize the strategy parameters to be a list of strings or numbers.", "response": "def normalize_strategy_parameters(params):\n \"\"\"Normalize strategy parameters to be a list of strings.\n\n Parameters\n ----------\n params : (space-delimited) string or sequence of strings/numbers Parameters\n expected by :class:`SampleStrategy` object, in various forms, where the first\n parameter is the name of the strategy.\n\n Returns\n -------\n params : tuple of strings\n Strategy parameters as a list of strings\n\n \"\"\"\n def fixup_numbers(val):\n try:\n # See if it is a number\n return str(float(val))\n except ValueError:\n # ok, it is not a number we know of, perhaps a string\n return str(val)\n if isinstance(params, basestring):\n params = params.split(' ')\n # No number\n return tuple(fixup_numbers(p) for p in params)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef wait(self, sensor_name, condition_or_value, timeout=5):\n sensor_name = escape_name(sensor_name)\n sensor = self.sensor[sensor_name]\n try:\n yield sensor.wait(condition_or_value, timeout)\n except tornado.gen.TimeoutError:\n raise tornado.gen.Return(False)\n else:\n raise tornado.gen.Return(True)", "response": "Wait for a sensor in this resource to satisfy a condition."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nlist sensors available on this resource matching certain criteria.", "response": "def list_sensors(self, filter=\"\", strategy=False, status=\"\",\n use_python_identifiers=True, tuple=False, refresh=False):\n \"\"\"List sensors available on this resource matching certain criteria.\n\n Parameters\n ----------\n filter : string, optional\n Filter each returned sensor's name against this regexp if specified.\n To ease the dichotomy between Python identifier names and actual\n sensor names, the default is to search on Python identifier names\n rather than KATCP sensor names, unless `use_python_identifiers`\n below is set to False. Note that the sensors of subordinate\n KATCPResource instances may have inconsistent names and Python\n identifiers, better to always search on Python identifiers in this\n case.\n strategy : {False, True}, optional\n Only list sensors with a set strategy if True\n status : string, optional\n Filter each returned sensor's status against this regexp if given\n use_python_identifiers : {True, False}, optional\n Match on python identfiers even the the KATCP name is available.\n tuple : {True, False}, optional, Default: False\n Return backwards compatible tuple instead of SensorResultTuples\n refresh : {True, False}, optional, Default: False\n If set the sensor values will be refreshed with get_value before\n returning the results.\n\n Returns\n -------\n sensors : list of SensorResultTuples, or list of tuples\n List of matching sensors presented as named tuples. The `object`\n field is the :class:`KATCPSensor` object associated with the sensor.\n Note that the name of the object may not match `name` if it\n originates from a subordinate device.\n \"\"\""} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef set_sampling_strategies(self, filter, strategy_and_params):\n sensors_strategies = {}\n sensor_results = yield self.list_sensors(filter)\n for sensor_reslt in sensor_results:\n norm_name = sensor_reslt.object.normalised_name\n try:\n sensor_strat = yield self.set_sampling_strategy(norm_name, strategy_and_params)\n sensors_strategies[norm_name] = sensor_strat[norm_name]\n except Exception:\n sensors_strategies[norm_name] = (\n False, sys.exc_info())\n raise tornado.gen.Return(sensors_strategies)", "response": "Set a sampling strategy for all sensors that match the regular expression filter."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsetting a sampling strategy for a specific sensor.", "response": "def set_sampling_strategy(self, sensor_name, strategy_and_params):\n \"\"\"Set a sampling strategy for a specific sensor.\n\n Parameters\n ----------\n sensor_name : string\n The specific sensor.\n strategy_and_params : seq of str or str\n As tuple contains (<strat_name>, [<strat_parm1>, ...]) where the strategy\n names and parameters are as defined by the KATCP spec. As str contains the\n same elements in space-separated form.\n\n Returns\n -------\n sensors_strategies : tornado Future\n resolves with a dict with the Python identifier names of the sensors\n as keys and the value a tuple:\n\n (success, info) with\n\n sucess : bool\n True if setting succeeded for this sensor, else False\n info : tuple\n normalised sensor strategy and parameters as tuple if success == True\n else, sys.exc_info() tuple for the error that occured.\n \"\"\"\n sensors_strategies = {}\n try:\n sensor_obj = self.sensor.get(sensor_name)\n yield sensor_obj.set_sampling_strategy(strategy_and_params)\n sensors_strategies[sensor_obj.normalised_name] = (\n True, sensor_obj.sampling_strategy)\n except Exception:\n sensors_strategies[sensor_obj.normalised_name] = (\n False, sys.exc_info())\n raise tornado.gen.Return(sensors_strategies)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nset the current sampling strategy for this sensor.", "response": "def set_strategy(self, strategy, params=None):\n \"\"\"Set current sampling strategy for sensor.\n Add this footprint for backwards compatibility.\n\n Parameters\n ----------\n\n strategy : seq of str or str\n As tuple contains (<strat_name>, [<strat_parm1>, ...]) where the strategy\n names and parameters are as defined by the KATCP spec. As str contains the\n same elements in space-separated form.\n params : seq of str or str\n (<strat_name>, [<strat_parm1>, ...])\n\n Returns\n -------\n done : tornado Future that resolves when done or raises KATCPSensorError\n\n \"\"\"\n if not params:\n param_args = []\n elif isinstance(params, basestring):\n param_args = [str(p) for p in params.split(' ')]\n else:\n if not isinstance(params, collections.Iterable):\n params = (params,)\n param_args = [str(p) for p in params]\n samp_strategy = \" \".join([strategy] + param_args)\n return self._manager.set_sampling_strategy(self.name, samp_strategy)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nregisters a callback function that is called when sensor value is updated.", "response": "def register_listener(self, listener, reading=False):\n \"\"\"Add a callback function that is called when sensor value is updated.\n The callback footprint is received_timestamp, timestamp, status, value.\n\n Parameters\n ----------\n listener : function\n Callback signature: if reading\n listener(katcp_sensor, reading) where\n `katcp_sensor` is this KATCPSensor instance\n `reading` is an instance of :class:`KATCPSensorReading`\n Callback signature: default, if not reading\n listener(received_timestamp, timestamp, status, value)\n \"\"\"\n listener_id = hashable_identity(listener)\n self._listeners[listener_id] = (listener, reading)\n logger.debug(\n 'Register listener for {}'\n .format(self.name))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef unregister_listener(self, listener):\n listener_id = hashable_identity(listener)\n self._listeners.pop(listener_id, None)", "response": "Unregister a callback function."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nset sensor with a given received value matches a given status.", "response": "def set(self, timestamp, status, value):\n \"\"\"Set sensor with a given received value, matches :meth:`katcp.Sensor.set`\"\"\"\n received_timestamp = self._manager.time()\n reading = KATCPSensorReading(received_timestamp, timestamp, status, value)\n self._reading = reading\n self.call_listeners(reading)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nsetting sensor value with optinal specification of status and timestamp", "response": "def set_value(self, value, status=Sensor.NOMINAL, timestamp=None):\n \"\"\"Set sensor value with optinal specification of status and timestamp\"\"\"\n if timestamp is None:\n timestamp = self._manager.time()\n self.set(timestamp, status, value)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets a fresh sensor reading from the KATCP resource", "response": "def get_reading(self):\n \"\"\"Get a fresh sensor reading from the KATCP resource\n\n Returns\n -------\n reply : tornado Future resolving with :class:`KATCPSensorReading` object\n\n Note\n ----\n\n As a side-effect this will update the reading stored in this object, and result in\n registered listeners being called.\n \"\"\"\n yield self._manager.poll_sensor(self._name)\n # By now the sensor manager should have set the reading\n raise Return(self._reading)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_value(self):\n yield self._manager.poll_sensor(self._name)\n # By now the sensor manager should have set the reading\n raise Return(self._reading.value)", "response": "Get a fresh sensor value from the KATCP resource\n "} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_status(self):\n yield self._manager.poll_sensor(self._name)\n # By now the sensor manager should have set the reading\n raise Return(self._reading.status)", "response": "Get a fresh sensor status from the KATCP resource\n "} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nwait for the current sensor to satisfy a condition.", "response": "def wait(self, condition_or_value, timeout=None):\n \"\"\"Wait for the sensor to satisfy a condition.\n\n Parameters\n ----------\n condition_or_value : obj or callable, or seq of objs or callables\n If obj, sensor.value is compared with obj. If callable,\n condition_or_value(reading) is called, and must return True if its\n condition is satisfied. Since the reading is passed in, the value,\n status, timestamp or received_timestamp attributes can all be used\n in the check.\n TODO: Sequences of conditions (use SensorTransitionWaiter thingum?)\n timeout : float or None\n The timeout in seconds (None means wait forever)\n\n Returns\n -------\n This command returns a tornado Future that resolves with True when the\n sensor value satisfies the condition. It will never resolve with False;\n if a timeout is given a TimeoutError happens instead.\n\n Raises\n ------\n :class:`KATCPSensorError`\n If the sensor does not have a strategy set\n :class:`tornado.gen.TimeoutError`\n If the sensor condition still fails after a stated timeout period\n\n \"\"\"\n if (isinstance(condition_or_value, collections.Sequence) and not\n isinstance(condition_or_value, basestring)):\n raise NotImplementedError(\n 'Currently only single conditions are supported')\n condition_test = (condition_or_value if callable(condition_or_value)\n else lambda s: s.value == condition_or_value)\n\n ioloop = tornado.ioloop.IOLoop.current()\n f = Future()\n if self.sampling_strategy == ('none', ):\n raise KATCPSensorError(\n 'Cannot wait on a sensor that does not have a strategy set')\n\n def handle_update(sensor, reading):\n # This handler is called whenever a sensor update is received\n try:\n assert sensor is self\n if condition_test(reading):\n self.unregister_listener(handle_update)\n # Try and be idempotent if called multiple times after the\n # condition is matched. This should not happen unless the\n # sensor object is being updated in a thread outside of the\n # ioloop.\n if not f.done():\n ioloop.add_callback(f.set_result, True)\n except Exception:\n f.set_exc_info(sys.exc_info())\n self.unregister_listener(handle_update)\n\n self.register_listener(handle_update, reading=True)\n # Handle case where sensor is already at the desired value\n ioloop.add_callback(handle_update, self, self._reading)\n\n if timeout:\n to = ioloop.time() + timeout\n timeout_f = with_timeout(to, f)\n # Make sure we stop listening if the wait times out to prevent a\n # buildup of listeners\n timeout_f.add_done_callback(\n lambda f: self.unregister_listener(handle_update))\n return timeout_f\n else:\n return f"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef fake_KATCP_client_resource_factory(\n KATCPClientResourceClass, fake_options, resource_spec, *args, **kwargs):\n \"\"\"Create a fake KATCPClientResource-like class and a fake-manager\n\n Parameters\n ----------\n KATCPClientResourceClass : class\n Subclass of :class:`katcp.resource_client.KATCPClientResource`\n fake_options : dict\n Options for the faking process. Keys:\n allow_any_request : bool, default False\n (TODO not implemented behaves as if it were True)\n resource_spec, *args, **kwargs : passed to KATCPClientResourceClass\n\n A subclass of the passed-in KATCPClientResourceClass is created that replaces the\n internal InspecingClient instances with fakes using fake_inspecting_client_factory()\n based on the InspectingClient class used by KATCPClientResourceClass.\n\n Returns\n -------\n (fake_katcp_client_resource, fake_katcp_client_resource_manager):\n\n fake_katcp_client_resource : instance of faked subclass of KATCPClientResourceClass\n fake_katcp_client_resource_manager : :class:`FakeKATCPClientResourceManager` instance\n Bound to the `fake_katcp_client_resource` instance.\n\n \"\"\"\n # TODO Implement allow_any_request functionality. When True, any unknown request (even\n # if there is no fake implementation) should succeed\n allow_any_request = fake_options.get('allow_any_request', False)\n\n class FakeKATCPClientResource(KATCPClientResourceClass):\n def inspecting_client_factory(self, host, port, ioloop_set_to):\n real_instance = (super(FakeKATCPClientResource, self)\n .inspecting_client_factory(host, port, ioloop_set_to) )\n fic, fic_manager = fake_inspecting_client_factory(\n real_instance.__class__, fake_options, host, port,\n ioloop=ioloop_set_to, auto_reconnect=self.auto_reconnect)\n self.fake_inspecting_client_manager = fic_manager\n return fic\n\n fkcr = FakeKATCPClientResource(resource_spec, *args, **kwargs)\n fkcr_manager = FakeKATCPClientResourceManager(fkcr)\n return (fkcr, fkcr_manager)", "response": "Create a fake KATCPClientResource - like class and a fake - manager for the given resource_spec."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef fake_KATCP_client_resource_container_factory(\n KATCPClientResourceContainerClass, fake_options, resources_spec,\n *args, **kwargs):\n \"\"\"Create a fake KATCPClientResourceContainer-like class and a fake-manager\n\n Parameters\n ----------\n KATCPClientResourceContainerClass : class\n Subclass of :class:`katcp.resource_client.KATCPClientResourceContainer`\n fake_options : dict\n Options for the faking process. Keys:\n allow_any_request : bool, default False\n (TODO not implemented behaves as if it were True)\n resources_spec, *args, **kwargs : passed to KATCPClientResourceContainerClass\n\n A subclass of the passed-in KATCPClientResourceClassContainer is created that replaces the\n KATCPClientResource child instances with fakes using fake_KATCP_client_resource_factory()\n based on the KATCPClientResource class used by `KATCPClientResourceContainerClass`.\n\n Returns\n -------\n (fake_katcp_client_resource_container, fake_katcp_client_resource_container_manager):\n\n fake_katcp_client_resource_container : instance of faked subclass of\n KATCPClientResourceContainerClass\n fake_katcp_client_resource_manager : :class:`FakeKATCPClientResourceContainerManager`\n instance\n Bound to the `fake_katcp_client_resource_container` instance.\n\n \"\"\"\n # TODO allow_any_request see comment in fake_KATCP_client_resource_factory()\n allow_any_request = fake_options.get('allow_any_request', False)\n\n class FakeKATCPClientResourceContainer(KATCPClientResourceContainerClass):\n def __init__(self, *args, **kwargs):\n self.fake_client_resource_managers = {}\n super(FakeKATCPClientResourceContainer, self).__init__(*args, **kwargs)\n\n def client_resource_factory(self, res_spec, parent, logger):\n real_instance = (super(FakeKATCPClientResourceContainer, self)\n .client_resource_factory(res_spec, parent, logger) )\n fkcr, fkcr_manager = fake_KATCP_client_resource_factory(\n real_instance.__class__, fake_options,\n res_spec, parent=self, logger=logger)\n self.fake_client_resource_managers[\n resource.escape_name(fkcr.name)] = fkcr_manager\n return fkcr\n\n fkcrc = FakeKATCPClientResourceContainer(resources_spec, *args, **kwargs)\n fkcrc_manager = FakeKATCPClientResourceContainerManager(fkcrc)\n return (fkcrc, fkcrc_manager)", "response": "Create a fake KATCPClientResourceContainer - like class and a fake - manager for the given resources_spec."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nrequest a list of sensors.", "response": "def request_sensor_list(self, req, msg):\n \"\"\"Sensor list\"\"\"\n if msg.arguments:\n name = (msg.arguments[0],)\n keys = (name, )\n if name not in self.fake_sensor_infos:\n return (\"fail\", \"Unknown sensor name.\")\n else:\n keys = self.fake_sensor_infos.keys()\n\n num_informs = 0\n for sensor_name in keys:\n infos = self.fake_sensor_infos[sensor_name]\n num_informs += 1\n req.inform(sensor_name, *infos)\n\n return ('ok', num_informs)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef add_sensors(self, sensor_infos):\n # Check sensor validity. parse_type() and parse_params should raise if not OK\n for s_name, s_info in sensor_infos.items():\n s_type = Sensor.parse_type(s_info[2])\n s_params = Sensor.parse_params(s_type, s_info[3:])\n self.fake_sensor_infos.update(sensor_infos)\n self._fic._interface_changed.set()", "response": "Add fake sensors to the internal list of sensors."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef add_request_handlers_object(self, rh_obj):\n rh_dict = {}\n for name in dir(rh_obj):\n if not callable(getattr(rh_obj, name)):\n continue\n if name.startswith(\"request_\"):\n request_name = convert_method_name(\"request_\", name)\n req_meth = getattr(rh_obj, name)\n rh_dict[request_name] = req_meth\n\n self.add_request_handlers_dict(rh_dict)", "response": "Add fake request handlers from an object with request_ * method s."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nadds fake request handlers from a dict keyed by request name and request - handler interface.", "response": "def add_request_handlers_dict(self, rh_dict):\n \"\"\"Add fake request handler functions from a dict keyed by request name\n\n Note the keys must be the KATCP message name (i.e. \"the-request\", not\n \"the_request\")\n\n The request-handler interface is more or less compatible with request handler API\n in :class:`katcp.server.DeviceServerBase`. The fake ClientRequestConnection req\n object does not support mass_inform() or reply_with_message.\n\n \"\"\"\n # Check that all the callables have docstrings as strings\n for req_func in rh_dict.values():\n assert req_func.__doc__, \"Even fake request handlers must have docstrings\"\n self._fkc.request_handlers.update(rh_dict)\n self._fic._interface_changed.set()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsending a request message with future replies.", "response": "def future_request(self, msg, timeout=None, use_mid=None):\n \"\"\"Send a request messsage, with future replies.\n\n Parameters\n ----------\n msg : Message object\n The request Message to send.\n timeout : float in seconds\n How long to wait for a reply. The default is the\n the timeout set when creating the AsyncClient.\n use_mid : boolean, optional\n Whether to use message IDs. Default is to use message IDs\n if the server supports them.\n\n Returns\n -------\n A tornado.concurrent.Future that resolves with:\n\n reply : Message object\n The reply message received.\n informs : list of Message objects\n A list of the inform messages received.\n\n \"\"\"\n mid = self._get_mid_and_update_msg(msg, use_mid)\n\n if msg.name in self.request_handlers:\n req = FakeClientRequestConnection(self.client_connection, msg)\n reply_msg = yield tornado.gen.maybe_future(\n self.request_handlers[msg.name](req, msg))\n reply_informs = req.informs_sent\n else:\n reply_msg = Message.reply(msg.name, 'ok')\n reply_informs = []\n\n reply_msg.mid = mid\n raise Return((reply_msg, reply_informs))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncall whenever an action has failed", "response": "def failed(self):\n \"\"\"Call whenever an action has failed, grows delay exponentially\n\n After calling failed(), the `delay` property contains the next delay\n \"\"\"\n try:\n self._validate_parameters()\n self._base_delay = min(\n self._base_delay * self.exp_fac,\n self.delay_max)\n self._update_delay()\n except Exception:\n ic_logger.exception(\n 'Unhandled exception trying to calculate a retry timout')"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef hook_inform(self, inform_name, callback):\n # Do not hook the same callback multiple times\n if callback not in self._inform_hooks[inform_name]:\n self._inform_hooks[inform_name].append(callback)", "response": "Hookup a function to be called when an inform is received."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncalling callbacks on hooked informs followed by normal processing", "response": "def handle_inform(self, msg):\n \"\"\"Call callbacks on hooked informs followed by normal processing\"\"\"\n try:\n for func in self._inform_hooks.get(msg.name, []):\n func(msg)\n except Exception:\n self._logger.warning('Call to function \"{0}\" with message \"{1}\".'\n .format(func, msg), exc_info=True)\n super(_InformHookDeviceClient, self).handle_inform(msg)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn an instance of _InformHookDeviceClient or similar", "response": "def inform_hook_client_factory(self, host, port, *args, **kwargs):\n \"\"\"Return an instance of :class:`_InformHookDeviceClient` or similar\n\n Provided to ease testing. Dynamically overriding this method after instantiation\n but before start() is called allows for deep brain surgery. See\n :class:`katcp.fake_clients.TBD`\n\n \"\"\"\n return _InformHookDeviceClient(host, port, *args, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nwaiting until state is desired_state", "response": "def until_state(self, desired_state, timeout=None):\n \"\"\"\n Wait until state is desired_state, InspectingClientStateType instance\n\n Returns a future\n\n \"\"\"\n return self._state.until_state(desired_state, timeout=timeout)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef connect(self, timeout=None):\n # Start KATCP device client.\n assert not self._running\n maybe_timeout = future_timeout_manager(timeout)\n\n self._logger.debug('Starting katcp client')\n self.katcp_client.start()\n try:\n yield maybe_timeout(self.katcp_client.until_running())\n self._logger.debug('Katcp client running')\n except tornado.gen.TimeoutError:\n self.katcp_client.stop()\n raise\n\n if timeout:\n yield maybe_timeout(self.katcp_client.until_connected())\n self._logger.debug('Katcp client connected')\n\n self._running = True\n self._state_loop()", "response": "Connect to KATCP interface."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef inspect(self):\n timeout_manager = future_timeout_manager(self.sync_timeout)\n sensor_index_before = copy.copy(self._sensors_index)\n request_index_before = copy.copy(self._requests_index)\n try:\n request_changes = yield self.inspect_requests(\n timeout=timeout_manager.remaining())\n sensor_changes = yield self.inspect_sensors(\n timeout=timeout_manager.remaining())\n except Exception:\n # Ensure atomicity of sensor and request updates ; if the one\n # fails, the other should act as if it has failed too.\n self._sensors_index = sensor_index_before\n self._requests_index = request_index_before\n raise\n\n model_changes = AttrDict()\n if request_changes:\n model_changes.requests = request_changes\n if sensor_changes:\n model_changes.sensors = sensor_changes\n if model_changes:\n raise Return(model_changes)", "response": "Inspect device requests and sensors update model\n Returns ------- Tornado future that resolves with :"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ninspecting all or one requests on the device.", "response": "def inspect_requests(self, name=None, timeout=None):\n \"\"\"Inspect all or one requests on the device. Update requests index.\n\n Parameters\n ----------\n name : str or None, optional\n Name of the request or None to get all requests.\n timeout : float or None, optional\n Timeout for request inspection, None for no timeout\n\n Returns\n -------\n Tornado future that resolves with:\n\n changes : :class:`~katcp.core.AttrDict`\n AttrDict with keys ``added`` and ``removed`` (of type\n :class:`set`), listing the requests that have been added or removed\n respectively. Modified requests are listed in both. If there are\n no changes, returns ``None`` instead.\n\n Example structure:\n\n {'added': set(['req1', 'req2']),\n 'removed': set(['req10', 'req20'])}\n\n \"\"\"\n maybe_timeout = future_timeout_manager(timeout)\n if name is None:\n msg = katcp.Message.request('help')\n else:\n msg = katcp.Message.request('help', name)\n reply, informs = yield self.katcp_client.future_request(\n msg, timeout=maybe_timeout.remaining())\n if not reply.reply_ok():\n # If an unknown request is specified the desired result is to return\n # an empty list even though the request will fail\n if name is None or 'Unknown request' not in reply.arguments[1]:\n raise SyncError(\n 'Error reply during sync process for {}: {}'\n .format(self.bind_address_string, reply))\n\n # Get recommended timeouts hints for slow requests if the server\n # provides them\n timeout_hints_available = (\n self.katcp_client.protocol_flags.request_timeout_hints)\n if timeout_hints_available:\n timeout_hints = yield self._get_request_timeout_hints(\n name, timeout=maybe_timeout.remaining())\n else:\n timeout_hints = {}\n\n requests_old = set(self._requests_index.keys())\n requests_updated = set()\n for msg in informs:\n req_name = msg.arguments[0]\n req = {'name': req_name,\n 'description': msg.arguments[1],\n 'timeout_hint': timeout_hints.get(req_name)}\n requests_updated.add(req_name)\n self._update_index(self._requests_index, req_name, req)\n\n added, removed = self._difference(\n requests_old, requests_updated, name, self._requests_index)\n if added or removed:\n raise Return(AttrDict(added=added, removed=removed))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget request timeout hints from device.", "response": "def _get_request_timeout_hints(self, name=None, timeout=None):\n \"\"\"Get request timeout hints from device\n\n Parameters\n =========\n name : str or None, optional\n Name of the request or None to get all request timeout hints.\n timeout : float seconds\n Timeout for ?request-timeout-hint\n\n Returns\n -------\n Tornado future that resolves with:\n\n timeout_hints : dict request_name -> timeout_hint\n\n where\n\n request_name : str\n Name of the request\n timeout_hint : float\n Suggested request timeout hint from device ?request-timeout_hint\n\n Note, if there is no request hint, there will be no entry in the\n dict. If you request the hint for a named request that has no hint, an\n empty dict will be returned.\n\n \"\"\"\n timeout_hints = {}\n req_msg_args = ['request-timeout-hint']\n if name:\n req_msg_args.append(name)\n req_msg = katcp.Message.request(*req_msg_args)\n reply, informs = yield self.katcp_client.future_request(\n req_msg, timeout=timeout)\n if not reply.reply_ok():\n raise SyncError('Error retrieving request timeout hints: \"{}\"\\n'\n 'in reply to request {}, continuing with sync'\n .format(reply, req_msg))\n for inform in informs:\n request_name = inform.arguments[0]\n timeout_hint = float(inform.arguments[1])\n if timeout_hint > 0:\n timeout_hints[request_name] = timeout_hint\n raise Return(timeout_hints)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ninspect all or one sensor on the device.", "response": "def inspect_sensors(self, name=None, timeout=None):\n \"\"\"Inspect all or one sensor on the device. Update sensors index.\n\n Parameters\n ----------\n name : str or None, optional\n Name of the sensor or None to get all sensors.\n timeout : float or None, optional\n Timeout for sensors inspection, None for no timeout\n\n Returns\n -------\n Tornado future that resolves with:\n\n changes : :class:`~katcp.core.AttrDict`\n AttrDict with keys ``added`` and ``removed`` (of type\n :class:`set`), listing the sensors that have been added or removed\n respectively. Modified sensors are listed in both. If there are no\n changes, returns ``None`` instead.\n\n Example structure:\n\n {'added': set(['sens1', 'sens2']),\n 'removed': set(['sens10', 'sens20'])}\n\n \"\"\"\n if name is None:\n msg = katcp.Message.request('sensor-list')\n else:\n msg = katcp.Message.request('sensor-list', name)\n\n reply, informs = yield self.katcp_client.future_request(\n msg, timeout=timeout)\n self._logger.debug('{} received {} sensor-list informs, reply: {}'\n .format(self.bind_address_string, len(informs), reply))\n if not reply.reply_ok():\n # If an unknown sensor is specified the desired result is to return\n # an empty list, even though the request will fail\n if name is None or 'Unknown sensor' not in reply.arguments[1]:\n raise SyncError('Error reply during sync process: {}'\n .format(reply))\n\n\n sensors_old = set(self._sensors_index.keys())\n sensors_updated = set()\n for msg in informs:\n sen_name = msg.arguments[0]\n sensors_updated.add(sen_name)\n sen = {'description': msg.arguments[1],\n 'units': msg.arguments[2],\n 'sensor_type': msg.arguments[3],\n 'params': msg.arguments[4:]}\n self._update_index(self._sensors_index, sen_name, sen)\n\n added, removed = self._difference(\n sensors_old, sensors_updated, name, self._sensors_index)\n\n for sensor_name in removed:\n if sensor_name in self._sensor_object_cache:\n del self._sensor_object_cache[sensor_name]\n\n if added or removed:\n raise Return(AttrDict(added=added, removed=removed))"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef future_check_sensor(self, name, update=None):\n exist = False\n yield self.until_data_synced()\n if name in self._sensors_index:\n exist = True\n else:\n if update or (update is None and self._update_on_lookup):\n yield self.inspect_sensors(name)\n exist = yield self.future_check_sensor(name, False)\n\n raise tornado.gen.Return(exist)", "response": "Check if the sensor exists."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets the sensor object.", "response": "def future_get_sensor(self, name, update=None):\n \"\"\"Get the sensor object.\n\n Check if we have information for this sensor, if not connect to server\n and update (if allowed) to get information.\n\n Parameters\n ----------\n name : string\n Name of the sensor.\n update : bool or None, optional\n True allow inspect client to inspect katcp server if the sensor\n is not known.\n\n Returns\n -------\n Sensor created by :meth:`sensor_factory` or None if sensor not found.\n\n Notes\n -----\n Ensure that self.state.data_synced == True if yielding to future_get_sensor from\n a state-change callback, or a deadlock will occur.\n\n \"\"\"\n obj = None\n exist = yield self.future_check_sensor(name, update)\n if exist:\n sensor_info = self._sensors_index[name]\n obj = sensor_info.get('obj')\n if obj is None:\n sensor_type = katcp.Sensor.parse_type(\n sensor_info.get('sensor_type'))\n sensor_params = katcp.Sensor.parse_params(\n sensor_type,\n sensor_info.get('params'))\n obj = self.sensor_factory(\n name=name,\n sensor_type=sensor_type,\n description=sensor_info.get('description'),\n units=sensor_info.get('units'),\n params=sensor_params)\n self._sensors_index[name]['obj'] = obj\n self._sensor_object_cache[name] = obj\n\n raise tornado.gen.Return(obj)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef future_check_request(self, name, update=None):\n exist = False\n yield self.until_data_synced()\n if name in self._requests_index:\n exist = True\n else:\n if update or (update is None and self._update_on_lookup):\n yield self.inspect_requests(name)\n exist = yield self.future_check_request(name, False)\n raise tornado.gen.Return(exist)", "response": "Checks if the request exists."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef future_get_request(self, name, update=None):\n obj = None\n exist = yield self.future_check_request(name, update)\n if exist:\n request_info = self._requests_index[name]\n obj = request_info.get('obj')\n if obj is None:\n obj = self.request_factory(**request_info)\n self._requests_index[name]['obj'] = obj\n\n raise tornado.gen.Return(obj)", "response": "Get the request object."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _cb_inform_sensor_status(self, msg):\n timestamp = msg.arguments[0]\n num_sensors = int(msg.arguments[1])\n assert len(msg.arguments) == 2 + num_sensors * 3\n for n in xrange(num_sensors):\n name = msg.arguments[2 + n * 3]\n status = msg.arguments[3 + n * 3]\n value = msg.arguments[4 + n * 3]\n self.update_sensor(name, timestamp, status, value)", "response": "Update received for an sensor."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nupdate the sensors and requests available.", "response": "def _cb_inform_interface_change(self, msg):\n \"\"\"Update the sensors and requests available.\"\"\"\n self._logger.debug('cb_inform_interface_change(%s)', msg)\n self._interface_changed.set()"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef simple_request(self, request, *args, **kwargs):\n # TODO (NM 2016-11-03) This method should really live on the lower level\n # katcp_client in client.py, is generally useful IMHO\n use_mid = kwargs.get('use_mid')\n timeout = kwargs.get('timeout')\n mid = kwargs.get('mid')\n msg = katcp.Message.request(request, *args, mid=mid)\n return self.katcp_client.future_request(msg, timeout, use_mid)", "response": "Create and send a request to the server."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncalculate the difference between the original and updated sets of keys.", "response": "def _difference(self, original_keys, updated_keys, name, item_index):\n \"\"\"Calculate difference between the original and updated sets of keys.\n\n Removed items will be removed from item_index, new items should have\n been added by the discovery process. (?help or ?sensor-list)\n\n This method is for use in inspect_requests and inspect_sensors only.\n\n Returns\n -------\n\n (added, removed)\n added : set of str\n Names of the keys that were added\n removed : set of str\n Names of the keys that were removed\n\n \"\"\"\n original_keys = set(original_keys)\n updated_keys = set(updated_keys)\n added_keys = updated_keys.difference(original_keys)\n removed_keys = set()\n if name is None:\n removed_keys = original_keys.difference(updated_keys)\n elif name not in updated_keys and name in original_keys:\n removed_keys = set([name])\n\n for key in removed_keys:\n if key in item_index:\n del(item_index[key])\n\n # Check the keys that was not added now or not lined up for removal,\n # and see if they changed.\n for key in updated_keys.difference(added_keys.union(removed_keys)):\n if item_index[key].get('_changed'):\n item_index[key]['_changed'] = False\n removed_keys.add(key)\n added_keys.add(key)\n\n return added_keys, removed_keys"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\npicking a random fruit.", "response": "def request_pick_fruit(self, req):\n \"\"\"Pick a random fruit.\"\"\"\n r = random.choice(self.FRUIT + [None])\n if r is None:\n return (\"fail\", \"No fruit.\")\n delay = random.randrange(1,5)\n req.inform(\"Picking will take %d seconds\" % delay)\n\n def pick_handler():\n self._fruit_result.set_value(r)\n req.reply(\"ok\", r)\n\n self.ioloop.add_callback(\n self.ioloop.call_later, delay, pick_handler)\n\n raise AsyncReply"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef update_in_ioloop(update):\n @wraps(update)\n def wrapped_update(self, sensor, reading):\n if get_thread_ident() == self._ioloop_thread_id:\n update(self, sensor, reading)\n else:\n self.ioloop.add_callback(update, self, sensor, reading)\n\n return wrapped_update", "response": "Decorator that ensures an update method is run in the tornado ioloop."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_strategy(cls, strategyName, inform_callback, sensor,\n *params, **kwargs):\n \"\"\"Factory method to create a strategy object.\n\n Parameters\n ----------\n strategyName : str\n Name of strategy.\n inform_callback : callable, signature inform_callback(sensor, reading)\n Callback to receive inform messages.\n sensor : Sensor object\n Sensor to sample.\n params : list of objects\n Custom sampling parameters for specified strategy.\n\n Keyword Arguments\n -----------------\n ioloop : tornado.ioloop.IOLoop instance, optional\n Tornado ioloop to use, otherwise tornado.ioloop.IOLoop.current()\n\n Returns\n -------\n strategy : :class:`SampleStrategy` object\n The created sampling strategy.\n\n \"\"\"\n if strategyName not in cls.SAMPLING_LOOKUP_REV:\n raise ValueError(\"Unknown sampling strategy '%s'. \"\n \"Known strategies are %s.\"\n % (strategyName, cls.SAMPLING_LOOKUP.values()))\n\n strategyType = cls.SAMPLING_LOOKUP_REV[strategyName]\n if strategyType == cls.NONE:\n return SampleNone(inform_callback, sensor, *params, **kwargs)\n elif strategyType == cls.AUTO:\n return SampleAuto(inform_callback, sensor, *params, **kwargs)\n elif strategyType == cls.EVENT:\n return SampleEvent(inform_callback, sensor, *params, **kwargs)\n elif strategyType == cls.DIFFERENTIAL:\n return SampleDifferential(inform_callback, sensor,\n *params, **kwargs)\n elif strategyType == cls.PERIOD:\n return SamplePeriod(inform_callback, sensor, *params, **kwargs)\n elif strategyType == cls.EVENT_RATE:\n return SampleEventRate(inform_callback, sensor, *params, **kwargs)\n elif strategyType == cls.DIFFERENTIAL_RATE:\n return SampleDifferentialRate(inform_callback, sensor,\n *params, **kwargs)", "response": "Returns an instance of the appropriate subclass of SampleStrategy."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_sampling_formatted(self):\n strategy = self.get_sampling()\n strategy = self.SAMPLING_LOOKUP[strategy]\n params = [str(p) for p in self._params]\n return strategy, params", "response": "Returns the current sampling strategy and parameters."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nattaches strategy to its sensor and send initial update.", "response": "def attach(self):\n \"\"\"Attach strategy to its sensor and send initial update.\"\"\"\n s = self._sensor\n self.update(s, s.read())\n self._sensor.attach(self)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ndetach strategy from its sensor and cancel ioloop callbacks.", "response": "def cancel(self):\n \"\"\"Detach strategy from its sensor and cancel ioloop callbacks.\"\"\"\n if self.OBSERVE_UPDATES:\n self.detach()\n self.ioloop.add_callback(self.cancel_timeouts)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nstart operating the strategy.", "response": "def start(self):\n \"\"\"Start operating the strategy.\n\n Subclasses that override start() should call the super method before\n it does anything that uses the ioloop. This will attach to the sensor\n as an observer if :attr:`OBSERVE_UPDATES` is True, and sets\n :attr:`_ioloop_thread_id` using `thread.get_ident()`.\n\n \"\"\"\n def first_run():\n self._ioloop_thread_id = get_thread_ident()\n if self.OBSERVE_UPDATES:\n self.attach()\n self.ioloop.add_callback(first_run)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ninforming strategy creator of the sensor status.", "response": "def inform(self, reading):\n \"\"\"Inform strategy creator of the sensor status.\"\"\"\n try:\n self._inform_callback(self._sensor, reading)\n except Exception:\n log.exception('Unhandled exception trying to send {!r} '\n 'for sensor {!r} of type {!r}'\n .format(reading, self._sensor.name, self._sensor.type))"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef start(self, timeout=None):\n if not self._ioloop:\n raise RuntimeError('Call get_ioloop() or set_ioloop() first')\n\n self._ioloop.add_callback(self._running.set)\n\n if self._ioloop_managed:\n self._run_managed_ioloop()\n else: # TODO this seems inconsistent with what the docstring describes\n self._running.set()\n\n if timeout:\n return self._running.wait(timeout)", "response": "Start managed ioloop thread or do nothing if not managed."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nstop the ioloop and wait for it to finish.", "response": "def stop(self, timeout=None, callback=None):\n \"\"\"Stop ioloop (if managed) and call callback in ioloop before close.\n\n Parameters\n ----------\n timeout : float or None\n Seconds to wait for ioloop to have *started*.\n\n Returns\n -------\n stopped : thread-safe Future\n Resolves when the callback() is done\n\n \"\"\"\n if timeout:\n self._running.wait(timeout)\n\n stopped_future = Future()\n\n @gen.coroutine\n def _stop():\n if callback:\n try:\n yield gen.maybe_future(callback())\n except Exception:\n self._logger.exception('Unhandled exception calling stop callback')\n if self._ioloop_managed:\n self._logger.info('Stopping ioloop {0!r}'.format(self._ioloop))\n # Allow ioloop to run once before stopping so that callbacks\n # scheduled by callback() above get a chance to run.\n yield gen.moment\n self._ioloop.stop()\n self._running.clear()\n\n try:\n self._ioloop.add_callback(\n lambda: gen.chain_future(_stop(), stopped_future))\n except AttributeError:\n # Probably we have been shut-down already\n pass\n\n return stopped_future"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef join(self, timeout=None):\n if not self._ioloop_managed:\n # Do nothing if the loop is not managed\n return\n try:\n self._ioloop_thread.join(timeout)\n except AttributeError:\n raise RuntimeError('Cannot join if not started')", "response": "Join managed ioloop thread or do nothing if not managed."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef decorate_callable(self, callable_):\n @wraps(callable_)\n def decorated(*args, **kwargs):\n # Extract timeout from request itself or use default for ioloop wrapper\n timeout = kwargs.get('timeout')\n return self.call_in_ioloop(callable_, args, kwargs, timeout)\n\n decorated.__doc__ = '\\n\\n'.join((\n\"\"\"Wrapped async call. Will call in ioloop.\n\nThis call will block until the original callable has finished running on the ioloop, and\nwill pass on the return value. If the original callable returns a future, this call will\nwait for the future to resolve and return the value or raise the exception that the future\nresolves with.\n\nOriginal Callable Docstring\n---------------------------\n\"\"\",\n textwrap.dedent(decorated.__doc__ or '')))\n\n return decorated", "response": "Decorate a callable to use call_in_ioloop"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef update(self, sensor, reading):\n parents = list(self._child_to_parents[sensor])\n for parent in parents:\n self.recalculate(parent, (sensor,))", "response": "Update callback used by sensors to notify obervers of changes."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nadd a new sensor to the tree.", "response": "def _add_sensor(self, sensor):\n \"\"\"Add a new sensor to the tree.\n\n Parameters\n ----------\n sensor : :class:`katcp.Sensor` object\n New sensor to add to the tree.\n\n \"\"\"\n self._parent_to_children[sensor] = set()\n self._child_to_parents[sensor] = set()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef add_links(self, parent, children):\n new_sensors = []\n if parent not in self:\n self._add_sensor(parent)\n new_sensors.append(parent)\n for child in children:\n if child not in self:\n self._add_sensor(child)\n new_sensors.append(child)\n self._parent_to_children[parent].add(child)\n self._child_to_parents[child].add(parent)\n\n self.recalculate(parent, children)\n for sensor in new_sensors:\n sensor.attach(self)", "response": "Create dependency links from parent to child."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nremoving dependency links from parent to children.", "response": "def remove_links(self, parent, children):\n \"\"\"Remove dependency links from parent to child.\n\n Any sensors that have no dependency links are removed from the tree and\n the tree detaches from each sensor removed. After all dependency links\n have been removed the parent is recalculated. Links that don't exist\n are ignored.\n\n Parameters\n ----------\n parent : :class:`katcp.Sensor` object\n The sensor that used to depend on children.\n children : sequence of :class:`katcp.Sensor` objects\n The sensors that parent used to depend on.\n\n \"\"\"\n old_sensors = []\n if parent in self:\n for child in children:\n if child not in self:\n continue\n self._parent_to_children[parent].discard(child)\n self._child_to_parents[child].discard(parent)\n if not self._child_to_parents[child] and \\\n not self._parent_to_children[child]:\n self._remove_sensor(child)\n old_sensors.append(child)\n if not self._child_to_parents[parent] and \\\n not self._parent_to_children[parent]:\n self._remove_sensor(parent)\n old_sensors.append(parent)\n\n for sensor in old_sensors:\n sensor.detach(self)\n self.recalculate(parent, children)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef children(self, parent):\n if parent not in self._parent_to_children:\n raise ValueError(\"Parent sensor %r not in tree.\" % parent)\n return self._parent_to_children[parent].copy()", "response": "Return set of children of parent."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef parents(self, child):\n if child not in self._child_to_parents:\n raise ValueError(\"Child sensor %r not in tree.\" % child)\n return self._child_to_parents[child].copy()", "response": "Return set of parents of child."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nadding a pair of boolean sensors. Parent depends on child.", "response": "def add(self, parent, child):\n \"\"\"Add a pair of boolean sensors.\n\n Parent depends on child.\n\n Parameters\n ----------\n parent : boolean instance of :class:`katcp.Sensor`\n The sensor that depends on child.\n child : boolean instance of :class:`katcp.Sensor`\n The sensor parent depends on.\n\n \"\"\"\n if parent not in self:\n if parent.stype != \"boolean\":\n raise ValueError(\"Parent sensor %r is not boolean\" % child)\n self._parent_to_not_ok[parent] = set()\n if child not in self:\n if child.stype != \"boolean\":\n raise ValueError(\"Child sensor %r is not booelan\" % child)\n self._parent_to_not_ok[child] = set()\n self.add_links(parent, (child,))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nremove a dependency between parent and child.", "response": "def remove(self, parent, child):\n \"\"\"Remove a dependency between parent and child.\n\n Parameters\n ----------\n parent : boolean instance of :class:`katcp.Sensor`\n The sensor that used to depend on child.\n child : boolean instance of :class:`katcp.Sensor` or None\n The sensor parent used to depend on.\n\n \"\"\"\n self.remove_links(parent, (child,))\n if parent not in self and parent in self._parent_to_not_ok:\n del self._parent_to_not_ok[parent]\n if child not in self and child in self._parent_to_not_ok:\n del self._parent_to_not_ok[child]"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nadd an aggregation rule.", "response": "def add(self, parent, rule_function, children):\n \"\"\"Create an aggregation rule.\n\n Parameters\n ----------\n parent : :class:`katcp.Sensor` object\n The aggregate sensor.\n rule_function : f(parent, children)\n Function to update the parent sensor value.\n children : sequence of :class:`katcp.Sensor` objects\n The sensors the aggregate sensor depends on.\n\n \"\"\"\n if parent in self._aggregates or parent in self._incomplete_aggregates:\n raise ValueError(\"Sensor %r already has an aggregate rule \"\n \"associated\" % parent)\n self._aggregates[parent] = (rule_function, children)\n self.add_links(parent, children)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nadd an aggregation rule before child sensors are present.", "response": "def add_delayed(self, parent, rule_function, child_names):\n \"\"\"Create an aggregation rule before child sensors are present.\n\n Parameters\n ----------\n parent : :class:`katcp.Sensor` object\n The aggregate sensor.\n rule_function : f(parent, children)\n Function to update the parent sensor value.\n child_names : sequence of str\n The names of the sensors the aggregate sensor depends on. These\n sensor must be registered using :meth:`register_sensor` to become\n active.\n\n \"\"\"\n if parent in self._aggregates or parent in self._incomplete_aggregates:\n raise ValueError(\"Sensor %r already has an aggregate rule\"\n \" associated\" % parent)\n reg = self._registered_sensors\n names = set(name for name in child_names if name not in reg)\n sensors = set(reg[name] for name in child_names if name in reg)\n if names:\n self._incomplete_aggregates[parent] = (rule_function, names,\n sensors)\n else:\n self.add(parent, rule_function, sensors)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef register_sensor(self, child):\n child_name = self._get_sensor_reference(child)\n if child_name in self._registered_sensors:\n raise ValueError(\"Sensor %r already registered with aggregate\"\n \" tree\" % child)\n self._registered_sensors[child_name] = child\n completed = []\n for parent, (_rule, names, sensors) in \\\n self._incomplete_aggregates.iteritems():\n if child_name in names:\n names.remove(child_name)\n sensors.add(child)\n if not names:\n completed.append(parent)\n for parent in completed:\n rule_function, _names, sensors = \\\n self._incomplete_aggregates[parent]\n del self._incomplete_aggregates[parent]\n self.add(parent, rule_function, sensors)", "response": "Register a sensor required by an aggregate sensor registered with add_delayed."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the child sensor from its reference.", "response": "def _child_from_reference(self, reference):\n \"\"\"Returns the child sensor from its reference.\n\n Parameters\n ----------\n reference : str\n Reference to sensor (typically its name).\n\n Returns\n -------\n child : :class:`katcp.Sensor` object\n A child sensor linked to one or more aggregate sensors.\n\n \"\"\"\n for child in self._child_to_parents:\n if self._get_sensor_reference(child) == reference:\n return child"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef remove(self, parent):\n if parent not in self._aggregates:\n raise ValueError(\"Sensor %r does not have an aggregate rule \"\n \"associated\" % parent)\n children = self.children(parent)\n try:\n self.remove_links(parent, children)\n except Exception:\n pass\n del self._aggregates[parent]", "response": "Removes an aggregation rule."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef request_timeout_hint(timeout_hint):\n if timeout_hint is not None:\n timeout_hint = float(timeout_hint)\n\n def decorator(handler):\n handler.request_timeout_hint = timeout_hint\n return handler\n\n return decorator", "response": "Decorator that adds recommended client timeout hint to a request for a specific KATCP server."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nwrapping future that will resolve with arguments needed by make_reply.", "response": "def async_make_reply(msgname, types, arguments_future, major):\n \"\"\"Wrap future that will resolve with arguments needed by make_reply().\"\"\"\n arguments = yield arguments_future\n raise gen.Return(make_reply(msgname, types, arguments, major))"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef unpack_types(types, args, argnames, major):\n if len(types) > 0:\n multiple = types[-1]._multiple\n else:\n multiple = False\n\n if len(types) < len(args) and not multiple:\n raise FailReply(\"Too many parameters given.\")\n\n # Wrap the types in parameter objects\n params = []\n for i, kattype in enumerate(types):\n name = \"\"\n if i < len(argnames):\n name = argnames[i]\n params.append(Parameter(i+1, name, kattype, major))\n\n if len(args) > len(types) and multiple:\n for i in range(len(types), len(args)):\n params.append(Parameter(i+1, name, kattype, major))\n\n # if len(args) < len(types) this passes in None for missing args\n return map(lambda param, arg: param.unpack(arg), params, args)", "response": "Unpacks the types of the arguments according to types list."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef pack_types(types, args, major):\n if len(types) > 0:\n multiple = types[-1]._multiple\n else:\n multiple = False\n\n if len(types) < len(args) and not multiple:\n raise ValueError(\"Too many arguments to pack.\")\n\n if len(args) < len(types):\n # this passes in None for missing args\n retvals = map(lambda ktype, arg: ktype.pack(arg, major=major),\n types, args)\n else:\n retvals = [ktype.pack(arg, major=major)\n for ktype, arg in zip(types, args)]\n\n if len(args) > len(types) and multiple:\n last_ktype = types[-1]\n for arg in args[len(types):]:\n retvals.append(last_ktype.pack(arg, major=major))\n\n return retvals", "response": "Packs the types according to the types list."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef pack(self, value, nocheck=False, major=DEFAULT_KATCP_MAJOR):\n if value is None:\n value = self.get_default()\n if value is None:\n raise ValueError(\"Cannot pack a None value.\")\n if not nocheck:\n self.check(value, major)\n return self.encode(value, major)", "response": "Pack the value into a KATCP parameter."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef unpack(self, packed_value, major=DEFAULT_KATCP_MAJOR):\n if packed_value is None:\n value = self.get_default()\n else:\n try:\n value = self.decode(packed_value, major)\n except Exception:\n raise\n if value is not None:\n self.check(value, major)\n return value", "response": "Parse a KATCP parameter into an object."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef check(self, value, major):\n if self._min is not None and value < self._min:\n raise ValueError(\"Integer %d is lower than minimum %d.\"\n % (value, self._min))\n if self._max is not None and value > self._max:\n raise ValueError(\"Integer %d is higher than maximum %d.\"\n % (value, self._max))", "response": "Check whether the value is between the minimum and maximum. Raise a ValueError if it is not."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nchecking whether the value is in the set of allowed values. Raise a ValueError if it is not.", "response": "def check(self, value, major):\n \"\"\"Check whether the value in the set of allowed values.\n\n Raise a ValueError if it is not.\n\n \"\"\"\n if self._case_insensitive:\n value = value.lower()\n values = self._valid_values_lower\n caseflag = \" (case-insensitive)\"\n else:\n values = self._valid_values\n caseflag = \"\"\n if value not in values:\n raise ValueError(\"Discrete value '%s' is not one of %s%s.\"\n % (value, list(self._values), caseflag))"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef check(self, value, major):\n for v in value:\n super(DiscreteMulti, self).check(v, major)", "response": "Check that each item in the value list is in the allowed set."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nunpack the parameter using its kattype.", "response": "def unpack(self, value):\n \"\"\"Unpack the parameter using its kattype.\n\n Parameters\n ----------\n packed_value : str\n The unescaped KATCP string to unpack.\n\n Returns\n -------\n value : object\n The unpacked value.\n\n \"\"\"\n # Wrap errors in FailReplies with information identifying the parameter\n try:\n return self._kattype.unpack(value, self.major)\n except ValueError, message:\n raise FailReply(\"Error in parameter %s (%s): %s\" %\n (self.position, self.name, message))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef return_future(fn):\n @wraps(fn)\n def decorated(*args, **kwargs):\n return gen.maybe_future(fn(*args, **kwargs))\n\n return decorated", "response": "Decorator that turns a synchronous function into one returning a future."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nconstruct a function that returns True if a name matches a pattern.", "response": "def construct_name_filter(pattern):\n \"\"\"Return a function for filtering sensor names based on a pattern.\n\n Parameters\n ----------\n pattern : None or str\n If None, the returned function matches all names.\n If pattern starts and ends with '/' the text between the slashes\n is used as a regular expression to search the names.\n Otherwise the pattern must match the name of the sensor exactly.\n\n Returns\n -------\n exact : bool\n Return True if pattern is expected to match exactly. Used to\n determine whether having no matching sensors constitutes an error.\n filter_func : f(str) -> bool\n Function for determining whether a name matches the pattern.\n\n \"\"\"\n if pattern is None:\n return False, lambda name: True\n if pattern.startswith('/') and pattern.endswith('/'):\n name_re = re.compile(pattern[1:-1])\n return False, lambda name: name_re.search(name) is not None\n return True, lambda name: name == pattern"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef disconnect(self, reason):\n self._server._disconnect_client(self._conn_key, self, reason)", "response": "Disconnect this client connection for specified reason"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef inform(self, msg):\n assert (msg.mtype == Message.INFORM)\n return self._send_message(msg)", "response": "Send an inform message to a particular client."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef reply_inform(self, inform, orig_req):\n assert (inform.mtype == Message.INFORM)\n assert (inform.name == orig_req.name)\n inform.mid = orig_req.mid\n return self._send_message(inform)", "response": "Send an inform message to an earlier request."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nsend an inform message to all clients.", "response": "def mass_inform(self, msg):\n \"\"\"Send an inform message to all clients.\n\n Parameters\n ----------\n msg : Message object\n The inform message to send.\n\n \"\"\"\n assert (msg.mtype == Message.INFORM)\n return self._mass_send_message(msg)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nsend an asynchronous reply to an earlier request.", "response": "def reply(self, reply, orig_req):\n \"\"\"Send an asynchronous reply to an earlier request.\n\n Parameters\n ----------\n reply : Message object\n The reply message to send.\n orig_req : Message object\n The request message being replied to. The reply message's\n id is overridden with the id from orig_req before the\n reply is sent.\n\n \"\"\"\n assert (reply.mtype == Message.REPLY)\n assert reply.name == orig_req.name\n reply.mid = orig_req.mid\n return self._send_message(reply)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ninstalling the server on its IOLoop optionally starting the IOLoop.", "response": "def start(self, timeout=None):\n \"\"\"Install the server on its IOLoop, optionally starting the IOLoop.\n\n Parameters\n ----------\n timeout : float or None, optional\n Time in seconds to wait for server thread to start.\n\n \"\"\"\n if self._running.isSet():\n raise RuntimeError('Server already started')\n self._stopped.clear()\n # Make sure we have an ioloop\n self.ioloop = self._ioloop_manager.get_ioloop()\n self._ioloop_manager.start()\n # Set max_buffer_size to ensure streams are closed\n # if too-large messages are received\n self._tcp_server = tornado.tcpserver.TCPServer(\n self.ioloop, max_buffer_size=self.MAX_MSG_SIZE)\n self._tcp_server.handle_stream = self._handle_stream\n self._server_sock = self._bind_socket(self._bindaddr)\n self._bindaddr = self._server_sock.getsockname()\n\n self.ioloop.add_callback(self._install)\n if timeout:\n return self._running.wait(timeout)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef stop(self, timeout=1.0):\n if timeout:\n self._running.wait(timeout)\n return self._ioloop_manager.stop(callback=self._uninstall)", "response": "Stop a running server."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nrejoin the server thread.", "response": "def join(self, timeout=None):\n \"\"\"Rejoin the server thread.\n\n Parameters\n ----------\n timeout : float or None, optional\n Time in seconds to wait for the thread to finish.\n\n Notes\n -----\n If the ioloop is not managed, this function will block until the server\n port is closed, meaning a new server can be started on the same port.\n\n \"\"\"\n t0 = time.time()\n self._ioloop_manager.join(timeout=timeout)\n if timeout:\n self._stopped.wait(timeout - (time.time() - t0))"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate a listening server socket.", "response": "def _bind_socket(self, bindaddr):\n \"\"\"Create a listening server socket.\"\"\"\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n sock.setblocking(0)\n try:\n sock.bind(bindaddr)\n except Exception:\n self._logger.exception(\"Unable to bind to %s\" % str(bindaddr))\n raise\n sock.listen(self.BACKLOG)\n return sock"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nhandle a new connection as a tornado. iostream. IOStream instance.", "response": "def _handle_stream(self, stream, address):\n \"\"\"Handle a new connection as a tornado.iostream.IOStream instance.\"\"\"\n try:\n assert get_thread_ident() == self.ioloop_thread_id\n stream.set_close_callback(partial(self._stream_closed_callback,\n stream))\n # Our message packets are small, don't delay sending them.\n stream.set_nodelay(True)\n stream.max_write_buffer_size = self.MAX_WRITE_BUFFER_SIZE\n\n # Abuse IOStream object slightly by adding 'address' and 'closing'\n # attributes. Use nasty prefix to prevent naming collisions.\n stream.KATCPServer_address = address\n # Flag to indicate that no more write should be accepted so that\n # we can flush the write buffer when closing a connection\n stream.KATCPServer_closing = False\n\n client_conn = self.client_connection_factory(self, stream)\n self._connections[stream] = client_conn\n try:\n yield gen.maybe_future(self._device.on_client_connect(client_conn))\n except Exception:\n # If on_client_connect fails there is no reason to continue\n # trying to handle this connection. Try and send exception info\n # to the client and disconnect\n e_type, e_value, trace = sys.exc_info()\n reason = \"\\n\".join(traceback.format_exception(\n e_type, e_value, trace, self._tb_limit))\n log_msg = 'Device error initialising connection {0}'.format(reason)\n self._logger.error(log_msg)\n stream.write(str(Message.inform('log', log_msg)))\n stream.close(exc_info=True)\n else:\n self._line_read_loop(stream, client_conn)\n except Exception:\n self._logger.error('Unhandled exception trying '\n 'to handle new connection', exc_info=True)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the network address of a connection stream.", "response": "def get_address(self, stream):\n \"\"\"Text representation of the network address of a connection stream.\n\n Notes\n -----\n This method is thread-safe\n\n \"\"\"\n try:\n addr = \":\".join(str(part) for part in stream.KATCPServer_address)\n except AttributeError:\n # Something weird happened, but keep trucking\n addr = '<error>'\n self._logger.warn('Could not determine address of stream',\n exc_info=True)\n return addr"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef send_message(self, stream, msg):\n assert get_thread_ident() == self.ioloop_thread_id\n try:\n if stream.KATCPServer_closing:\n raise RuntimeError('Stream is closing so we cannot '\n 'accept any more writes')\n return stream.write(str(msg) + '\\n')\n except Exception:\n addr = self.get_address(stream)\n self._logger.warn('Could not send message {0!r} to {1}'\n .format(str(msg), addr), exc_info=True)\n stream.close(exc_info=True)", "response": "Send an arbitrary message to a particular client."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef flush_on_close(self, stream):\n assert get_thread_ident() == self.ioloop_thread_id\n # Prevent futher writes\n stream.KATCPServer_closing = True\n # Write empty message to get future that resolves when buffer is flushed\n return stream.write('\\n')", "response": "Flush tornado iostream write buffer and prevent further writes."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nthread - safe version of send_message that returns a Future that resolves with the future s result.", "response": "def send_message_from_thread(self, stream, msg):\n \"\"\"Thread-safe version of send_message() returning a Future instance.\n\n Returns\n -------\n A Future that will resolve without raising an exception as soon as\n the call to send_message() completes. This does not guarantee that the\n message has been delivered yet. If the call to send_message() failed,\n the exception will be logged, and the future will resolve with the\n exception raised. Since a failed call to send_message() will result\n in the connection being closed, no real error handling apart from\n logging will be possible.\n\n Notes\n -----\n This method is thread-safe. If called from within the ioloop,\n send_message is called directly and a resolved tornado.concurrent.Future\n is returned, otherwise a callback is submitted to the ioloop that will\n resolve a thread-safe concurrent.futures.Future instance.\n\n \"\"\"\n return self.call_from_thread(partial(self.send_message, stream, msg))"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nsend a message to all connected clients.", "response": "def mass_send_message(self, msg):\n \"\"\"Send a message to all connected clients.\n\n Notes\n -----\n This method can only be called in the IOLoop thread.\n\n \"\"\"\n for stream in self._connections.keys():\n if not stream.closed():\n # Don't cause noise by trying to write to already closed streams\n self.send_message(stream, msg)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nsends a reply message to the client connection.", "response": "def reply_with_message(self, rep_msg):\n \"\"\"Send a pre-created reply message to the client connection.\n\n Will check that rep_msg.name matches the bound request.\n\n \"\"\"\n self._post_reply()\n return self.client_connection.reply(rep_msg, self.msg)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef on_message(self, client_conn, msg):\n MAX_QUEUE_SIZE = 30\n if len(self._msg_queue) >= MAX_QUEUE_SIZE:\n # This should never happen if callers to handle_message wait\n # for its futures to resolve before sending another message.\n # NM 2014-10-06: Except when there are multiple clients. Oops.\n raise RuntimeError('MessageHandlerThread unhandled '\n 'message queue full, not handling message')\n ready_future = Future()\n self._msg_queue.append((ready_future, client_conn, msg))\n self._wake.set()\n return ready_future", "response": "Handle message.\n\n Returns\n -------\n ready : Future\n A future that will resolve once we're ready, else None.\n\n Notes\n -----\n *on_message* should not be called again until *ready* has resolved."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef stop(self, timeout=1.0):\n if timeout:\n self._running.wait(timeout)\n self._running.clear()\n # Make sure to wake the run thread.\n self._wake.set()", "response": "Stop the handler thread."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create_log_inform(self, level_name, msg, name, timestamp=None):\n if timestamp is None:\n timestamp = time.time()\n\n katcp_version = self.PROTOCOL_INFO.major\n timestamp_msg = ('%.6f' % timestamp\n if katcp_version >= SEC_TS_KATCP_MAJOR\n else str(int(timestamp*1000)))\n return Message.inform(\"log\", level_name, timestamp_msg, name, msg)", "response": "Create a katcp logging inform message."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef handle_message(self, client_conn, msg):\n # log messages received so that no one else has to\n self._logger.debug('received: {0!s}'.format(msg))\n\n if msg.mtype == msg.REQUEST:\n return self.handle_request(client_conn, msg)\n elif msg.mtype == msg.INFORM:\n return self.handle_inform(client_conn, msg)\n elif msg.mtype == msg.REPLY:\n return self.handle_reply(client_conn, msg)\n else:\n reason = \"Unexpected message type received by server ['%s'].\" \\\n % (msg,)\n client_conn.inform(self.create_log_inform(\"error\", reason, \"root\"))", "response": "Handle a single message from the server."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ndispatching a request message to the appropriate method.", "response": "def handle_request(self, connection, msg):\n \"\"\"Dispatch a request message to the appropriate method.\n\n Parameters\n ----------\n connection : ClientConnection object\n The client connection the message was from.\n msg : Message object\n The request message to process.\n\n Returns\n -------\n done_future : Future or None\n Returns Future for async request handlers that will resolve when\n done, or None for sync request handlers once they have completed.\n\n \"\"\"\n send_reply = True\n # TODO Should check presence of Message-ids against protocol flags and\n # raise an error as needed.\n if msg.name in self._request_handlers:\n req_conn = ClientRequestConnection(connection, msg)\n handler = self._request_handlers[msg.name]\n try:\n reply = handler(self, req_conn, msg)\n # If we get a future, assume this is an async message handler\n # that will resolve the future with the reply message when it\n # is complete. Attach a message-sending callback to the future,\n # and return the future.\n if gen.is_future(reply):\n concurrent = getattr(handler, '_concurrent_reply', False)\n concurrent_str = ' CONCURRENT' if concurrent else ''\n\n done_future = Future()\n def async_reply(f):\n try:\n connection.reply(f.result(), msg)\n self._logger.debug(\"%s FUTURE%s replied\",\n msg.name, concurrent_str)\n except FailReply, e:\n reason = str(e)\n self._logger.error(\"Request %s FUTURE%s FAIL: %s\",\n msg.name, concurrent_str, reason)\n reply = Message.reply(msg.name, \"fail\", reason)\n connection.reply(reply, msg)\n except AsyncReply:\n self._logger.debug(\"%s FUTURE ASYNC OK\"\n % (msg.name,))\n except Exception:\n error_reply = self.create_exception_reply_and_log(\n msg, sys.exc_info())\n connection.reply(error_reply, msg)\n finally:\n done_future.set_result(None)\n\n # TODO When using the return_reply() decorator the future\n # returned is not currently threadsafe, must either deal\n # with it here, or in kattypes.py. Would be nice if we don't\n # have to always fall back to adding a callback, or wrapping\n # a thread-safe future. Supporting sync-with-thread and\n # async futures is turning out to be a pain in the ass ;)\n self.ioloop.add_callback(reply.add_done_callback, async_reply)\n # reply.add_done_callback(async_reply)\n\n if concurrent:\n # Return immediately if this is a concurrent handler\n self._logger.debug(\"%s FUTURE CONCURRENT OK\", msg.name)\n return\n else:\n self._logger.debug(\"%s FUTURE OK\", msg.name)\n return done_future\n else:\n assert (reply.mtype == Message.REPLY)\n assert (reply.name == msg.name)\n self._logger.debug(\"%s OK\" % (msg.name,))\n except AsyncReply, e:\n self._logger.debug(\"%s ASYNC OK\" % (msg.name,))\n send_reply = False\n except FailReply, e:\n reason = str(e)\n self._logger.error(\"Request %s FAIL: %s\" % (msg.name, reason))\n reply = Message.reply(msg.name, \"fail\", reason)\n except Exception:\n reply = self.create_exception_reply_and_log(msg, sys.exc_info())\n else:\n self._logger.error(\"%s INVALID: Unknown request.\" % (msg.name,))\n reply = Message.reply(msg.name, \"invalid\", \"Unknown request.\")\n\n if send_reply:\n connection.reply(reply, msg)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef handle_inform(self, connection, msg):\n if msg.name in self._inform_handlers:\n try:\n self._inform_handlers[msg.name](self, connection, msg)\n except Exception:\n e_type, e_value, trace = sys.exc_info()\n reason = \"\\n\".join(traceback.format_exception(\n e_type, e_value, trace, self._tb_limit))\n self._logger.error(\"Inform %s FAIL: %s\" % (msg.name, reason))\n else:\n self._logger.warn(\"%s INVALID: Unknown inform.\" % (msg.name,))", "response": "Dispatch an inform message to the appropriate method."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nsend an inform message to a particular client.", "response": "def inform(self, connection, msg):\n \"\"\"Send an inform message to a particular client.\n\n Should only be used for asynchronous informs. Informs\n that are part of the response to a request should use\n :meth:`reply_inform` so that the message identifier\n from the original request can be attached to the\n inform.\n\n Parameters\n ----------\n connection : ClientConnection object\n The client to send the message to.\n msg : Message object\n The inform message to send.\n\n \"\"\"\n if isinstance(connection, ClientRequestConnection):\n self._logger.warn(\n 'Deprecation warning: do not use self.inform() '\n 'within a reply handler context -- use req.inform()\\n'\n 'Traceback:\\n %s', \"\".join(traceback.format_stack()))\n # Get the underlying ClientConnection instance\n connection = connection.client_connection\n connection.inform(msg)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef mass_inform(self, msg):\n assert (msg.mtype == Message.INFORM)\n self._server.mass_send_message_from_thread(msg)", "response": "Send an inform message to all clients."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef reply(self, connection, reply, orig_req):\n if isinstance(connection, ClientRequestConnection):\n self._logger.warn(\n 'Deprecation warning: do not use self.reply() '\n 'within a reply handler context -- use req.reply(*msg_args)\\n'\n 'or req.reply_with_message(msg) Traceback:\\n %s',\n \"\".join(traceback.format_stack()))\n # Get the underlying ClientConnection instance\n connection = connection.client_connection\n connection.reply(reply, orig_req)", "response": "Sends an asynchronous reply to an earlier request."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef reply_inform(self, connection, inform, orig_req):\n if isinstance(connection, ClientRequestConnection):\n self._logger.warn(\n 'Deprecation warning: do not use self.reply_inform() '\n 'within a reply handler context -- '\n 'use req.inform(*inform_arguments)\\n'\n 'Traceback:\\n %s', \"\".join(traceback.format_stack()))\n # Get the underlying ClientConnection instance\n connection = connection.client_connection\n connection.reply_inform(inform, orig_req)", "response": "Send an inform to an earlier request."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef set_ioloop(self, ioloop=None):\n self._server.set_ioloop(ioloop)\n self.ioloop = self._server.ioloop", "response": "Sets the tornado. ioloop. IOLoop instance to use."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef set_concurrency_options(self, thread_safe=True, handler_thread=True):\n if handler_thread:\n assert thread_safe, \"handler_thread=True requires thread_safe=True\"\n self._server.client_connection_factory = (\n ThreadsafeClientConnection if thread_safe else ClientConnection)\n if handler_thread:\n self._handler_thread = MessageHandlerThread(\n self.handle_message, self.create_log_inform, self._logger)\n self.on_message = self._handler_thread.on_message\n else:\n self.on_message = return_future(self.handle_message)\n self._handler_thread = None\n\n self._concurrency_options = ObjectDict(\n thread_safe=thread_safe, handler_thread=handler_thread)", "response": "Sets the concurrency options for this device server."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef start(self, timeout=None):\n if self._handler_thread and self._handler_thread.isAlive():\n raise RuntimeError('Message handler thread already started')\n self._server.start(timeout)\n self.ioloop = self._server.ioloop\n if self._handler_thread:\n self._handler_thread.set_ioloop(self.ioloop)\n self._handler_thread.start(timeout)", "response": "Start the server in a new thread."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nrejoins the server thread.", "response": "def join(self, timeout=None):\n \"\"\"Rejoin the server thread.\n\n Parameters\n ----------\n timeout : float or None, optional\n Time in seconds to wait for the thread to finish.\n\n \"\"\"\n self._server.join(timeout)\n if self._handler_thread:\n self._handler_thread.join(timeout)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef stop(self, timeout=1.0):\n stopped = self._server.stop(timeout)\n if self._handler_thread:\n self._handler_thread.stop(timeout)\n\n return stopped", "response": "Stop a running server from another thread."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nblock for ioloop to complete a loop if called from another thread.", "response": "def sync_with_ioloop(self, timeout=None):\n \"\"\"Block for ioloop to complete a loop if called from another thread.\n\n Returns a future if called from inside the ioloop.\n\n Raises concurrent.futures.TimeoutError if timed out while blocking.\n\n \"\"\"\n in_ioloop = self._server.in_ioloop_thread()\n if in_ioloop:\n f = tornado_Future()\n else:\n f = Future()\n\n def cb():\n f.set_result(None)\n\n self.ioloop.add_callback(cb)\n\n if in_ioloop:\n return f\n else:\n f.result(timeout)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef on_client_connect(self, client_conn):\n assert get_thread_ident() == self._server.ioloop_thread_id\n self._client_conns.add(client_conn)\n self._strategies[client_conn] = {} # map sensors -> sampling strategies\n\n katcp_version = self.PROTOCOL_INFO.major\n if katcp_version >= VERSION_CONNECT_KATCP_MAJOR:\n client_conn.inform(Message.inform(\n \"version-connect\", \"katcp-protocol\", self.PROTOCOL_INFO))\n client_conn.inform(Message.inform(\n \"version-connect\", \"katcp-library\",\n \"katcp-python-%s\" % katcp.__version__))\n client_conn.inform(Message.inform(\n \"version-connect\", \"katcp-device\",\n self.version(), self.build_state()))\n else:\n client_conn.inform(Message.inform(\"version\", self.version()))\n client_conn.inform(Message.inform(\"build-state\", self.build_state()))", "response": "Inform client of build state and version on connect."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef clear_strategies(self, client_conn, remove_client=False):\n assert get_thread_ident() == self._server.ioloop_thread_id\n\n getter = (self._strategies.pop if remove_client\n else self._strategies.get)\n strategies = getter(client_conn, None)\n if strategies is not None:\n for sensor, strategy in list(strategies.items()):\n strategy.cancel()\n del strategies[sensor]", "response": "Clears the sensor strategies of a client connection."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef on_client_disconnect(self, client_conn, msg, connection_valid):\n f = tornado_Future()\n @gen.coroutine\n def remove_strategies():\n self.clear_strategies(client_conn, remove_client=True)\n if connection_valid:\n client_conn.inform(Message.inform(\"disconnect\", msg))\n yield client_conn.flush_on_close()\n\n try:\n self._client_conns.remove(client_conn)\n self.ioloop.add_callback(lambda: chain_future(remove_strategies(), f))\n except Exception:\n f.set_exc_info(sys.exc_info())\n return f", "response": "Called when a client connection is about to be disconnected."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef remove_sensor(self, sensor):\n if isinstance(sensor, basestring):\n sensor_name = sensor\n else:\n sensor_name = sensor.name\n sensor = self._sensors.pop(sensor_name)\n\n def cancel_sensor_strategies():\n for conn_strategies in self._strategies.values():\n strategy = conn_strategies.pop(sensor, None)\n if strategy:\n strategy.cancel()\n self.ioloop.add_callback(cancel_sensor_strategies)", "response": "Removes a sensor from the device."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nfetch the sensor with the given name.", "response": "def get_sensor(self, sensor_name):\n \"\"\"Fetch the sensor with the given name.\n\n Parameters\n ----------\n sensor_name : str\n Name of the sensor to retrieve.\n\n Returns\n -------\n sensor : Sensor object\n The sensor with the given name.\n\n \"\"\"\n sensor = self._sensors.get(sensor_name, None)\n if not sensor:\n raise ValueError(\"Unknown sensor '%s'.\" % (sensor_name,))\n return sensor"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nhalt the device server.", "response": "def request_halt(self, req, msg):\n \"\"\"Halt the device server.\n\n Returns\n -------\n success : {'ok', 'fail'}\n Whether scheduling the halt succeeded.\n\n Examples\n --------\n ::\n\n ?halt\n !halt ok\n\n \"\"\"\n f = Future()\n @gen.coroutine\n def _halt():\n req.reply(\"ok\")\n yield gen.moment\n self.stop(timeout=None)\n raise AsyncReply\n\n self.ioloop.add_callback(lambda: chain_future(_halt(), f))\n return f"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns help on the available requests.", "response": "def request_help(self, req, msg):\n \"\"\"Return help on the available requests.\n\n Return a description of the available requests using a sequence of\n #help informs.\n\n Parameters\n ----------\n request : str, optional\n The name of the request to return help for (the default is to\n return help for all requests).\n\n Informs\n -------\n request : str\n The name of a request.\n description : str\n Documentation for the named request.\n\n Returns\n -------\n success : {'ok', 'fail'}\n Whether sending the help succeeded.\n informs : int\n Number of #help inform messages sent.\n\n Examples\n --------\n ::\n\n ?help\n #help halt ...description...\n #help help ...description...\n ...\n !help ok 5\n\n ?help halt\n #help halt ...description...\n !help ok 1\n\n \"\"\"\n if not msg.arguments:\n for name, method in sorted(self._request_handlers.items()):\n doc = method.__doc__\n req.inform(name, doc)\n num_methods = len(self._request_handlers)\n return req.make_reply(\"ok\", str(num_methods))\n else:\n name = msg.arguments[0]\n if name in self._request_handlers:\n method = self._request_handlers[name]\n doc = method.__doc__.strip()\n req.inform(name, doc)\n return req.make_reply(\"ok\", \"1\")\n return req.make_reply(\"fail\", \"Unknown request method.\")"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef request_request_timeout_hint(self, req, request):\n timeout_hints = {}\n if request:\n if request not in self._request_handlers:\n raise FailReply('Unknown request method')\n timeout_hint = getattr(\n self._request_handlers[request], 'request_timeout_hint', None)\n timeout_hint = timeout_hint or 0\n timeout_hints[request] = timeout_hint\n else:\n for request_, handler in self._request_handlers.items():\n timeout_hint = getattr(handler, 'request_timeout_hint', None)\n if timeout_hint:\n timeout_hints[request_] = timeout_hint\n\n cnt = len(timeout_hints)\n for request_name, timeout_hint in sorted(timeout_hints.items()):\n req.inform(request_name, float(timeout_hint))\n\n return ('ok', cnt)", "response": "Return timeout hints for a specific request."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef request_log_level(self, req, msg):\n if msg.arguments:\n try:\n self.log.set_log_level_by_name(msg.arguments[0])\n except ValueError, e:\n raise FailReply(str(e))\n return req.make_reply(\"ok\", self.log.level_name())", "response": "Query or set the current logging level."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef request_restart(self, req, msg):\n if self._restart_queue is None:\n raise FailReply(\"No restart queue registered -- cannot restart.\")\n\n f = tornado_Future()\n\n @gen.coroutine\n def _restart():\n # .put should never block because queue should have no size limit\n self._restart_queue.put_nowait(self)\n req.reply('ok')\n raise AsyncReply\n\n self.ioloop.add_callback(lambda: chain_future(_restart(), f))\n return f", "response": "Restart the device server."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nrequest the list of connected clients.", "response": "def request_client_list(self, req, msg):\n \"\"\"Request the list of connected clients.\n\n The list of clients is sent as a sequence of #client-list informs.\n\n Informs\n -------\n addr : str\n The address of the client as host:port with host in dotted quad\n notation. If the address of the client could not be determined\n (because, for example, the client disconnected suddenly) then\n a unique string representing the client is sent instead.\n\n Returns\n -------\n success : {'ok', 'fail'}\n Whether sending the client list succeeded.\n informs : int\n Number of #client-list inform messages sent.\n\n Examples\n --------\n ::\n\n ?client-list\n #client-list 127.0.0.1:53600\n !client-list ok 1\n\n \"\"\"\n # TODO Get list of ClientConnection* instances and implement a standard\n # 'address-print' method in the ClientConnection class\n clients = self._client_conns\n num_clients = len(clients)\n for conn in clients:\n addr = conn.address\n req.inform(addr)\n return req.make_reply('ok', str(num_clients))"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef request_version_list(self, req, msg):\n versions = [\n (\"katcp-protocol\", (self.PROTOCOL_INFO, None)),\n (\"katcp-library\", (\"katcp-python-%s\" % katcp.__version__, katcp.__version__)),\n (\"katcp-device\", (self.version(), self.build_state())),\n ]\n extra_versions = sorted(self.extra_versions.items())\n\n for name, (version, build_state) in versions + extra_versions:\n if build_state is None:\n inform_args = (name, version)\n else:\n inform_args = (name, version, build_state)\n req.inform(*inform_args)\n\n num_versions = len(versions) + len(extra_versions)\n return req.make_reply(\"ok\", str(num_versions))", "response": "Request the list of versions of roles and subcomponents."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nrequests the list of sensors.", "response": "def request_sensor_list(self, req, msg):\n \"\"\"Request the list of sensors.\n\n The list of sensors is sent as a sequence of #sensor-list informs.\n\n Parameters\n ----------\n name : str, optional\n Name of the sensor to list (the default is to list all sensors).\n If name starts and ends with '/' it is treated as a regular\n expression and all sensors whose names contain the regular\n expression are returned.\n\n Informs\n -------\n name : str\n The name of the sensor being described.\n description : str\n Description of the named sensor.\n units : str\n Units for the value of the named sensor.\n type : str\n Type of the named sensor.\n params : list of str, optional\n Additional sensor parameters (type dependent). For integer and\n float sensors the additional parameters are the minimum and maximum\n sensor value. For discrete sensors the additional parameters are\n the allowed values. For all other types no additional parameters\n are sent.\n\n Returns\n -------\n success : {'ok', 'fail'}\n Whether sending the sensor list succeeded.\n informs : int\n Number of #sensor-list inform messages sent.\n\n Examples\n --------\n ::\n\n ?sensor-list\n #sensor-list psu.voltage PSU\\_voltage. V float 0.0 5.0\n #sensor-list cpu.status CPU\\_status. \\@ discrete on off error\n ...\n !sensor-list ok 5\n\n ?sensor-list cpu.power.on\n #sensor-list cpu.power.on Whether\\_CPU\\_hase\\_power. \\@ boolean\n !sensor-list ok 1\n\n ?sensor-list /voltage/\n #sensor-list psu.voltage PSU\\_voltage. V float 0.0 5.0\n #sensor-list cpu.voltage CPU\\_voltage. V float 0.0 3.0\n !sensor-list ok 2\n\n \"\"\"\n exact, name_filter = construct_name_filter(msg.arguments[0]\n if msg.arguments else None)\n sensors = [(name, sensor) for name, sensor in\n sorted(self._sensors.iteritems()) if name_filter(name)]\n\n if exact and not sensors:\n return req.make_reply(\"fail\", \"Unknown sensor name.\")\n\n self._send_sensor_value_informs(req, sensors)\n return req.make_reply(\"ok\", str(len(sensors)))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef request_sensor_value(self, req, msg):\n exact, name_filter = construct_name_filter(msg.arguments[0]\n if msg.arguments else None)\n sensors = [(name, sensor) for name, sensor in\n sorted(self._sensors.iteritems()) if name_filter(name)]\n\n if exact and not sensors:\n return req.make_reply(\"fail\", \"Unknown sensor name.\")\n\n katcp_version = self.PROTOCOL_INFO.major\n for name, sensor in sensors:\n timestamp, status, value = sensor.read_formatted(katcp_version)\n req.inform(timestamp, \"1\", name, status, value)\n return req.make_reply(\"ok\", str(len(sensors)))", "response": "Request the value of a sensor or sensors."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef request_sensor_sampling(self, req, msg):\n f = Future()\n self.ioloop.add_callback(lambda: chain_future(\n self._handle_sensor_sampling(req, msg), f))\n return f", "response": "Configure or query the way a sensor is sampled."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef request_sensor_sampling_clear(self, req):\n f = Future()\n @gen.coroutine\n def _clear_strategies():\n self.clear_strategies(req.client_connection)\n raise gen.Return(('ok',))\n\n self.ioloop.add_callback(lambda: chain_future(_clear_strategies(), f))\n return f", "response": "Clear all sampling strategies for this client."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef level_name(self, level=None):\n if level is None:\n level = self._log_level\n return self.LEVELS[level]", "response": "Return the name of the given level value."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef level_from_name(self, level_name):\n try:\n return self.LEVELS.index(level_name)\n except ValueError:\n raise ValueError(\"Unknown logging level name '%s'\" % (level_name,))", "response": "Return the logging level constant associated with a given name."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nset the logging level.", "response": "def set_log_level(self, level):\n \"\"\"Set the logging level.\n\n Parameters\n ----------\n level : logging level constant\n The value to set the logging level to.\n\n \"\"\"\n self._log_level = level\n if self._python_logger:\n try:\n level = self.PYTHON_LEVEL.get(level)\n except ValueError as err:\n raise FailReply(\"Unknown logging level '%s'\" % (level))\n self._python_logger.setLevel(level)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nlogging a message at the specified level and inform all clients.", "response": "def log(self, level, msg, *args, **kwargs):\n \"\"\"Log a message and inform all clients.\n\n Parameters\n ----------\n level : logging level constant\n The level to log the message at.\n msg : str\n The text format for the log message.\n args : list of objects\n Arguments to pass to log format string. Final message text is\n created using: msg % args.\n kwargs : additional keyword parameters\n Allowed keywords are 'name' and 'timestamp'. The name is the name\n of the logger to log the message to. If not given the name defaults\n to the root logger. The timestamp is a float in seconds. If not\n given the timestamp defaults to the current time.\n\n \"\"\"\n timestamp = kwargs.get(\"timestamp\")\n python_msg = msg\n if self._python_logger is not None:\n if timestamp is not None:\n python_msg = ' '.join((\n 'katcp timestamp: %r' % timestamp,\n python_msg))\n self._python_logger.log(self.PYTHON_LEVEL[level], python_msg, *args)\n if level >= self._log_level:\n name = kwargs.get(\"name\")\n if name is None:\n name = self._root_logger_name\n try:\n inform_msg = msg % args\n except TypeError:\n # Catch the \"not enough arguments for format string\" exception.\n inform_msg = \"{} {}\".format(\n msg,\n args if args else '').strip()\n\n self._device_server.mass_inform(\n self._device_server.create_log_inform(\n self.level_name(level),\n inform_msg,\n name,\n timestamp=timestamp))"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nlogs an warning message.", "response": "def warn(self, msg, *args, **kwargs):\n \"\"\"Log an warning message.\"\"\"\n self.log(self.WARN, msg, *args, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nlog a KATCP logging message to a Python logger.", "response": "def log_to_python(cls, logger, msg):\n \"\"\"Log a KATCP logging message to a Python logger.\n\n Parameters\n ----------\n logger : logging.Logger object\n The Python logger to log the given message to.\n msg : Message object\n The #log message to create a log entry from.\n\n \"\"\"\n (level, timestamp, name, message) = tuple(msg.arguments)\n log_string = \"%s %s: %s\" % (timestamp, name, message)\n logger.log({\"trace\": 0,\n \"debug\": logging.DEBUG,\n \"info\": logging.INFO,\n \"warn\": logging.WARN,\n \"error\": logging.ERROR,\n \"fatal\": logging.FATAL}[level], log_string)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef request_echo(self, sock, msg):\n return katcp.Message.reply(msg.name, \"ok\", *msg.arguments)", "response": "Echo the arguments of the message sent."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef log_coroutine_exceptions(coro):\n def log_cb(self, f):\n try:\n f.result()\n except Exception:\n self._logger.exception('Unhandled exception calling coroutine {0!r}'\n .format(coro))\n\n @wraps(coro)\n def wrapped_coro(self, *args, **kwargs):\n try:\n f = coro(self, *args, **kwargs)\n except Exception:\n f = tornado_Future()\n f.set_exc_info(sys.exc_info())\n f.add_done_callback(partial(log_cb, self))\n return f\n\n return wrapped_coro", "response": "A coroutine decorator that logs exceptions and returns a tornado. Future."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef log_future_exceptions(logger, f, ignore=()):\n def log_cb(f):\n try:\n f.result()\n except ignore:\n pass\n except Exception:\n logger.exception('Unhandled exception returned by future')\n f.add_done_callback(log_cb)", "response": "Log any exceptions set to a future."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef steal_docstring_from(obj):\n def deco(fn):\n docs = [obj.__doc__]\n if fn.__doc__:\n docs.append(fn.__doc__)\n fn.__doc__ = '\\n\\n'.join(docs)\n return fn\n\n return deco", "response": "Decorator that lets you steal a docstring from another object"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngenerating a hashable ID that is stable for methods etc Approach borrowed from blinker Approach borrowed from blinker. Why it matters. Why it matters. Why it matters. Why it matters. Why it matters. Why it matters. Why it matters. Why it matters. Why it matters. Why it matters. Why it matters.", "response": "def hashable_identity(obj):\n\n \"\"\"Generate a hashable ID that is stable for methods etc\n\n Approach borrowed from blinker. Why it matters: see e.g.\n http://stackoverflow.com/questions/13348031/python-bound-and-unbound-method-object\n \"\"\"\n if hasattr(obj, '__func__'):\n return (id(obj.__func__), id(obj.__self__))\n elif hasattr(obj, 'im_func'):\n return (id(obj.im_func), id(obj.im_self))\n elif isinstance(obj, (basestring, unicode)):\n return obj\n else:\n return id(obj)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns a tornado Future that resolves when any of the passed futures resolves.", "response": "def until_any(*futures, **kwargs):\n \"\"\"Return a future that resolves when any of the passed futures resolves.\n\n Resolves with the value yielded by the first future to resolve.\n\n Note, this will only work with tornado futures.\n\n \"\"\"\n timeout = kwargs.get('timeout', None)\n ioloop = kwargs.get('ioloop', None) or tornado.ioloop.IOLoop.current()\n any_future = tornado_Future()\n\n def handle_done(done_future):\n if not any_future.done():\n try:\n any_future.set_result(done_future.result())\n except Exception:\n any_future.set_exc_info(done_future.exc_info())\n # (NM) Nasty hack to remove handle_done from the callback list to prevent a\n # memory leak where one of the futures resolves quickly, particularly when\n # used together with AsyncState.until_state(). Also addresses Jira issue\n # CM-593\n for f in futures:\n if f._callbacks:\n try:\n f._callbacks.remove(handle_done)\n except ValueError:\n pass\n\n for f in futures:\n f.add_done_callback(handle_done)\n if any_future.done():\n break\n\n if timeout:\n return with_timeout(ioloop.time() + timeout, any_future, ioloop)\n else:\n return any_future"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef future_timeout_manager(timeout=None, ioloop=None):\n ioloop = ioloop or tornado.ioloop.IOLoop.current()\n t0 = ioloop.time()\n\n def _remaining():\n return timeout - (ioloop.time() - t0) if timeout else None\n\n def maybe_timeout(f):\n \"\"\"Applies timeout if timeout is not None\"\"\"\n if not timeout:\n return f\n else:\n remaining = _remaining()\n deadline = ioloop.time() + remaining\n return with_timeout(deadline, f, ioloop)\n\n maybe_timeout.remaining = _remaining\n\n return maybe_timeout", "response": "Create a coroutine that returns a future that can be used for timeout."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns a tornado Future that resolves when some of the passed futures resolve.", "response": "def until_some(*args, **kwargs):\n \"\"\"Return a future that resolves when some of the passed futures resolve.\n\n The futures can be passed as either a sequence of *args* or a dict of\n *kwargs* (but not both). Some additional keyword arguments are supported,\n as described below. Once a specified number of underlying futures have\n resolved, the returned future resolves as well, or a timeout could be\n raised if specified.\n\n Parameters\n ----------\n done_at_least : None or int\n Number of futures that need to resolve before this resolves or None\n to wait for all (default None)\n timeout : None or float\n Timeout in seconds, or None for no timeout (the default)\n\n Returns\n -------\n This command returns a tornado Future that resolves with a list of\n (index, value) tuples containing the results of all futures that resolved,\n with corresponding indices (numbers for *args* futures or keys for *kwargs*\n futures).\n\n Raises\n ------\n :class:`tornado.gen.TimeoutError`\n If operation times out before the requisite number of futures resolve\n\n \"\"\"\n done_at_least = kwargs.pop('done_at_least', None)\n timeout = kwargs.pop('timeout', None)\n # At this point args and kwargs are either empty or contain futures only\n if done_at_least is None:\n done_at_least = len(args) + len(kwargs)\n wait_iterator = tornado.gen.WaitIterator(*args, **kwargs)\n maybe_timeout = future_timeout_manager(timeout)\n results = []\n while not wait_iterator.done():\n result = yield maybe_timeout(wait_iterator.next())\n results.append((wait_iterator.current_index, result))\n if len(results) >= done_at_least:\n break\n raise tornado.gen.Return(results)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nformat a Message argument to a string", "response": "def format_argument(self, arg):\n \"\"\"Format a Message argument to a string\"\"\"\n if isinstance(arg, float):\n return repr(arg)\n elif isinstance(arg, bool):\n return str(int(arg))\n else:\n try:\n return str(arg)\n except UnicodeEncodeError:\n # unicode characters will break the str cast, so\n # try to encode to ascii and replace the offending characters\n # with a '?' character\n logger.error(\"Error casting message argument to str! \"\n \"Trying to encode argument to ascii.\")\n if not isinstance(arg, unicode):\n arg = arg.decode('utf-8')\n return arg.encode('ascii', 'replace')"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn True if this is a reply and its first argument is ok.", "response": "def reply_ok(self):\n \"\"\"Return True if this is a reply and its first argument is 'ok'.\"\"\"\n return (self.mtype == self.REPLY and self.arguments and\n self.arguments[0] == self.OK)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef request(cls, name, *args, **kwargs):\n mid = kwargs.pop('mid', None)\n if len(kwargs) > 0:\n raise TypeError('Invalid keyword argument(s): %r' % kwargs)\n return cls(cls.REQUEST, name, args, mid)", "response": "Helper method for creating request messages."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a new reply message.", "response": "def reply(cls, name, *args, **kwargs):\n \"\"\"Helper method for creating reply messages.\n\n Parameters\n ----------\n name : str\n The name of the message.\n args : list of strings\n The message arguments.\n\n Keyword Arguments\n -----------------\n mid : str or None\n Message ID to use or None (default) for no Message ID\n\n \"\"\"\n mid = kwargs.pop('mid', None)\n if len(kwargs) > 0:\n raise TypeError('Invalid keyword argument(s): %r' % kwargs)\n return cls(cls.REPLY, name, args, mid)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a new reply message that is a specific request.", "response": "def reply_to_request(cls, req_msg, *args):\n \"\"\"Helper method for creating reply messages to a specific request.\n\n Copies the message name and message identifier from request message.\n\n Parameters\n ----------\n req_msg : katcp.core.Message instance\n The request message that this inform if in reply to\n args : list of strings\n The message arguments.\n\n \"\"\"\n return cls(cls.REPLY, req_msg.name, args, req_msg.mid)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate an inform message.", "response": "def inform(cls, name, *args, **kwargs):\n \"\"\"Helper method for creating inform messages.\n\n Parameters\n ----------\n name : str\n The name of the message.\n args : list of strings\n The message arguments.\n\n \"\"\"\n mid = kwargs.pop('mid', None)\n if len(kwargs) > 0:\n raise TypeError('Invalid keyword argument(s): %r' % kwargs)\n return cls(cls.INFORM, name, args, mid)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef reply_inform(cls, req_msg, *args):\n return cls(cls.INFORM, req_msg.name, args, req_msg.mid)", "response": "Create an inform message in reply to a request message."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _unescape_match(self, match):\n char = match.group(1)\n if char in self.ESCAPE_LOOKUP:\n return self.ESCAPE_LOOKUP[char]\n elif not char:\n raise KatcpSyntaxError(\"Escape slash at end of argument.\")\n else:\n raise KatcpSyntaxError(\"Invalid escape character %r.\" % (char,))", "response": "Given a re. Match unescape the escape code it represents."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef parse(self, line):\n # find command type and check validity\n if not line:\n raise KatcpSyntaxError(\"Empty message received.\")\n\n type_char = line[0]\n if type_char not in self.TYPE_SYMBOL_LOOKUP:\n raise KatcpSyntaxError(\"Bad type character %r.\" % (type_char,))\n\n mtype = self.TYPE_SYMBOL_LOOKUP[type_char]\n\n # find command and arguments name\n # (removing possible empty argument resulting from whitespace at end\n # of command)\n parts = self.WHITESPACE_RE.split(line)\n if not parts[-1]:\n del parts[-1]\n\n name = parts[0][1:]\n arguments = [self._parse_arg(x) for x in parts[1:]]\n\n # split out message id\n match = self.NAME_RE.match(name)\n if match:\n name = match.group('name')\n mid = match.group('id')\n else:\n raise KatcpSyntaxError(\"Bad message name (and possibly id) %r.\" %\n (name,))\n\n return Message(mtype, name, arguments, mid)", "response": "Parse a line of text into a Message object."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nparse a version string into a ProtocolFlags object.", "response": "def parse_version(cls, version_str):\n \"\"\"Create a :class:`ProtocolFlags` object from a version string.\n\n Parameters\n ----------\n version_str : str\n The version string from a #version-connect katcp-protocol\n message.\n\n \"\"\"\n match = cls.VERSION_RE.match(version_str)\n if match:\n major = int(match.group('major'))\n minor = int(match.group('minor'))\n flags = set(match.group('flags') or '')\n else:\n major, minor, flags = None, None, set()\n return cls(major, minor, flags)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ntrues if the current server s protocol version satisfies handler requirements", "response": "def check_protocol(mcs, handler):\n \"\"\"\n True if the current server's protocol flags satisfy handler requirements\n\n \"\"\"\n protocol_info = mcs.PROTOCOL_INFO\n protocol_version = (protocol_info.major, protocol_info.minor)\n protocol_flags = protocol_info.flags\n\n # Check if minimum protocol version requirement is met\n min_protocol_version = getattr(handler, '_minimum_katcp_version', None)\n protocol_version_ok = (min_protocol_version is None or\n protocol_version >= min_protocol_version)\n\n # Check if required optional protocol flags are present\n required_katcp_protocol_flags = getattr(\n handler, '_has_katcp_protocol_flags', None)\n protocol_flags_ok = (\n required_katcp_protocol_flags is None or\n all(flag in protocol_flags\n for flag in required_katcp_protocol_flags))\n\n return protocol_version_ok and protocol_flags_ok"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef integer(cls, name, description=None, unit='', params=None,\n default=None, initial_status=None):\n \"\"\"Instantiate a new integer sensor object.\n\n Parameters\n ----------\n name : str\n The name of the sensor.\n description : str\n A short description of the sensor.\n units : str\n The units of the sensor value. May be the empty string\n if there are no applicable units.\n params : list\n [min, max] -- miniumum and maximum values of the sensor\n default : int\n An initial value for the sensor. Defaults to 0.\n initial_status : int enum or None\n An initial status for the sensor. If None, defaults to\n Sensor.UNKNOWN. `initial_status` must be one of the keys in\n Sensor.STATUSES\n\n \"\"\"\n return cls(cls.INTEGER, name, description, unit, params,\n default, initial_status)", "response": "Instantiate a new integer sensor object."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef lru(cls, name, description=None, unit='',\n default=None, initial_status=None):\n \"\"\"Instantiate a new lru sensor object.\n\n Parameters\n ----------\n name : str\n The name of the sensor.\n description : str\n A short description of the sensor.\n units : str\n The units of the sensor value. May be the empty string\n if there are no applicable units.\n default : enum, Sensor.LRU_*\n An initial value for the sensor. Defaults to self.LRU_NOMINAL\n initial_status : int enum or None\n An initial status for the sensor. If None, defaults to\n Sensor.UNKNOWN. `initial_status` must be one of the keys in\n Sensor.STATUSES\n\n \"\"\"\n return cls(cls.LRU, name, description, unit, None,\n default, initial_status)", "response": "Instantiate a new lru sensor object."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef discrete(cls, name, description=None, unit='', params=None,\n default=None, initial_status=None):\n \"\"\"Instantiate a new discrete sensor object.\n\n Parameters\n ----------\n name : str\n The name of the sensor.\n description : str\n A short description of the sensor.\n units : str\n The units of the sensor value. May be the empty string\n if there are no applicable units.\n params : [str]\n Sequence of all allowable discrete sensor states\n default : str\n An initial value for the sensor. Defaults to the first item\n of params\n initial_status : int enum or None\n An initial status for the sensor. If None, defaults to\n Sensor.UNKNOWN. `initial_status` must be one of the keys in\n Sensor.STATUSES\n\n \"\"\"\n return cls(cls.DISCRETE, name, description, unit, params,\n default, initial_status)", "response": "Instantiate a new discrete sensor object."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef address(cls, name, description=None, unit='',\n default=None, initial_status=None):\n \"\"\"Instantiate a new IP address sensor object.\n\n Parameters\n ----------\n name : str\n The name of the sensor.\n description : str\n A short description of the sensor.\n units : str\n The units of the sensor value. May be the empty string\n if there are no applicable units.\n default : (string, int)\n An initial value for the sensor. Tuple contaning (host, port).\n default is (\"0.0.0.0\", None)\n initial_status : int enum or None\n An initial status for the sensor. If None, defaults to\n Sensor.UNKNOWN. `initial_status` must be one of the keys in\n Sensor.STATUSES\n\n \"\"\"\n return cls(cls.ADDRESS, name, description, unit, None,\n default, initial_status)", "response": "Instantiate a new IP address sensor object."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nnotifying all observers of changes to this sensor.", "response": "def notify(self, reading):\n \"\"\"Notify all observers of changes to this sensor.\"\"\"\n # copy list before iterating in case new observers arrive\n for o in list(self._observers):\n o.update(self, reading)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nsetting the current value of the sensor.", "response": "def set(self, timestamp, status, value):\n \"\"\"Set the current value of the sensor.\n\n Parameters\n ----------\n timestamp : float in seconds\n The time at which the sensor value was determined.\n status : Sensor status constant\n Whether the value represents an error condition or not.\n value : object\n The value of the sensor (the type should be appropriate to the\n sensor's type).\n\n \"\"\"\n reading = self._current_reading = Reading(timestamp, status, value)\n self.notify(reading)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nset the current value of the sensor.", "response": "def set_formatted(self, raw_timestamp, raw_status, raw_value,\n major=DEFAULT_KATCP_MAJOR):\n \"\"\"Set the current value of the sensor.\n\n Parameters\n ----------\n timestamp : str\n KATCP formatted timestamp string\n status : str\n KATCP formatted sensor status string\n value : str\n KATCP formatted sensor value\n major : int, default = 5\n KATCP major version to use for interpreting the raw values\n\n \"\"\"\n timestamp = self.TIMESTAMP_TYPE.decode(raw_timestamp, major)\n status = self.STATUS_NAMES[raw_status]\n value = self.parse_value(raw_value, major)\n self.set(timestamp, status, value)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nformats a sensor reading as a tuple of strings.", "response": "def format_reading(self, reading, major=DEFAULT_KATCP_MAJOR):\n \"\"\"Format sensor reading as (timestamp, status, value) tuple of strings.\n\n All values are strings formatted as specified in the Sensor Type\n Formats in the katcp specification.\n\n Parameters\n ----------\n reading : :class:`Reading` object\n Sensor reading as returned by :meth:`read`\n major : int\n Major version of KATCP to use when interpreting types.\n Defaults to latest implemented KATCP version.\n\n Returns\n -------\n timestamp : str\n KATCP formatted timestamp string\n status : str\n KATCP formatted sensor status string\n value : str\n KATCP formatted sensor value\n\n Note\n ----\n Should only be used for a reading obtained from the same sensor.\n\n \"\"\"\n\n timestamp, status, value = reading\n return (self.TIMESTAMP_TYPE.encode(timestamp, major),\n self.STATUSES[status],\n self._formatter(value, True, major))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef set_value(self, value, status=NOMINAL, timestamp=None,\n major=DEFAULT_KATCP_MAJOR):\n \"\"\"Check and then set the value of the sensor.\n\n Parameters\n ----------\n value : object\n Value of the appropriate type for the sensor.\n status : Sensor status constant\n Whether the value represents an error condition or not.\n timestamp : float in seconds or None\n The time at which the sensor value was determined.\n Uses current time if None.\n major : int\n Major version of KATCP to use when interpreting types.\n Defaults to latest implemented KATCP version.\n\n \"\"\"\n self._kattype.check(value, major)\n if timestamp is None:\n timestamp = time.time()\n self.set(timestamp, status, value)", "response": "Set the value of the object in the KATCP system."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef parse_type(cls, type_string):\n if type_string in cls.SENSOR_TYPE_LOOKUP:\n return cls.SENSOR_TYPE_LOOKUP[type_string]\n else:\n raise KatcpSyntaxError(\"Invalid sensor type string %s\" %\n type_string)", "response": "Parse a KATCP formatted type code into a Sensor type constant."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef parse_params(cls, sensor_type, formatted_params,\n major=DEFAULT_KATCP_MAJOR):\n \"\"\"Parse KATCP formatted parameters into Python values.\n\n Parameters\n ----------\n sensor_type : Sensor type constant\n The type of sensor the parameters are for.\n formatted_params : list of strings\n The formatted parameters that should be parsed.\n major : int\n Major version of KATCP to use when interpreting types.\n Defaults to latest implemented KATCP version.\n\n Returns\n -------\n params : list of objects\n The parsed parameters.\n\n \"\"\"\n typeclass, _value = cls.SENSOR_TYPES[sensor_type]\n if sensor_type == cls.DISCRETE:\n kattype = typeclass([])\n else:\n kattype = typeclass()\n return [kattype.decode(x, major) for x in formatted_params]", "response": "Parse KATCP formatted parameters into Python values."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef set(self):\n self._flag = True\n old_future = self._waiting_future\n # Replace _waiting_future with a fresh one incase someone woken up by set_result()\n # sets this AsyncEvent to False before waiting on it to be set again.\n self._waiting_future = tornado_Future()\n old_future.set_result(True)", "response": "Sets the event flag to true and resolve future(s returned by until_set."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef wait_with_ioloop(self, ioloop, timeout=None):\n f = Future()\n\n def cb():\n return gen.chain_future(self.until_set(), f)\n ioloop.add_callback(cb)\n try:\n f.result(timeout)\n return True\n except TimeoutError:\n return self._flag", "response": "Do blocking wait until condition is set in ioloop."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef until_state(self, state, timeout=None):\n if state not in self._valid_states:\n raise ValueError('State must be one of {0}, not {1}'\n .format(self._valid_states, state))\n if state != self._state:\n if timeout:\n return with_timeout(self._ioloop.time() + timeout,\n self._waiting_futures[state],\n self._ioloop)\n else:\n return self._waiting_futures[state]\n else:\n f = tornado_Future()\n f.set_result(True)\n return f", "response": "Return a tornado Future that will resolve when the requested state is set"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn a tornado Future resolves when any of the requested states is set", "response": "def until_state_in(self, *states, **kwargs):\n \"\"\"Return a tornado Future, resolves when any of the requested states is set\"\"\"\n timeout = kwargs.get('timeout', None)\n state_futures = (self.until_state(s, timeout=timeout) for s in states)\n return until_any(*state_futures)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncalls with each future that is to be yielded on", "response": "def check_future(self, fut):\n \"\"\"Call with each future that is to be yielded on\"\"\"\n done = self.done = fut.done()\n if done and not self.prev_done:\n self.done_since = self.ioloop.time()\n self.prev_done = done"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncheck if we have time to yield tornado. gen. moment.", "response": "def time_to_yield(self):\n \"\"\"Call after check_future(). If True, it is time to yield tornado.gen.moment\"\"\"\n delta = self.ioloop.time() - self.done_since\n if self.done and delta > self.max_loop_latency:\n return True\n return False"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nmaking blocking request to client and raise exception if reply is not ok.", "response": "def request_check(client, exception, *msg_parms, **kwargs):\n \"\"\"Make blocking request to client and raise exception if reply is not ok.\n\n Parameters\n ----------\n client : DeviceClient instance\n exception: Exception class to raise\n *msg_parms : Message parameters sent to the Message.request() call\n **kwargs : Keyword arguments\n Forwards kwargs['timeout'] to client.blocking_request().\n Forwards kwargs['mid'] to Message.request().\n\n Returns\n -------\n reply, informs : as returned by client.blocking_request\n\n Raises\n ------\n *exception* passed as parameter is raised if reply.reply_ok() is False\n\n Notes\n -----\n A typical use-case for this function is to use functools.partial() to bind\n a particular client and exception. The resulting function can then be used\n instead of direct client.blocking_request() calls to automate error\n handling.\n\n \"\"\"\n timeout = kwargs.get('timeout', None)\n req_msg = Message.request(*msg_parms)\n if timeout is not None:\n reply, informs = client.blocking_request(req_msg, timeout=timeout)\n else:\n reply, informs = client.blocking_request(req_msg)\n\n if not reply.reply_ok():\n raise exception('Unexpected failure reply \"{2}\"\\n'\n ' with device at {0}, request \\n\"{1}\"'\n .format(client.bind_address_string, req_msg, reply))\n return reply, informs"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef convert_seconds(self, time_seconds):\n if self.protocol_flags.major >= SEC_TS_KATCP_MAJOR:\n return time_seconds\n else:\n device_time = time_seconds * SEC_TO_MS_FAC\n if self.protocol_flags.major < FLOAT_TS_KATCP_MAJOR:\n device_time = int(device_time)\n return device_time", "response": "Convert a time in seconds to the device timestamp units."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _next_id(self):\n assert get_thread_ident() == self.ioloop_thread_id\n self._last_msg_id += 1\n return str(self._last_msg_id)", "response": "Return the next available message id."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef inform_version_connect(self, msg):\n if len(msg.arguments) < 2:\n return\n # Store version information.\n name = msg.arguments[0]\n self.versions[name] = tuple(msg.arguments[1:])\n if msg.arguments[0] == \"katcp-protocol\":\n protocol_flags = ProtocolFlags.parse_version(msg.arguments[1])\n self._set_protocol_from_inform(protocol_flags, msg)", "response": "Process a version - connect message."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets message ID for current request and assign to msg. mid if needed.", "response": "def _get_mid_and_update_msg(self, msg, use_mid):\n \"\"\"Get message ID for current request and assign to msg.mid if needed.\n\n Parameters\n ----------\n msg : katcp.Message ?request message\n use_mid : bool or None\n\n If msg.mid is None, a new message ID will be created. msg.mid will be\n filled with this ID if use_mid is True or if use_mid is None and the\n server supports message ids. If msg.mid is already assigned, it will\n not be touched, and will be used as the active message ID.\n\n Return value\n ------------\n The active message ID\n\n \"\"\"\n if use_mid is None:\n use_mid = self._server_supports_ids\n\n if msg.mid is None:\n mid = self._next_id()\n if use_mid:\n msg.mid = mid\n # An internal mid may be needed for the request/inform/response\n # machinery to work, so we return it\n return mid\n else:\n return msg.mid"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef request(self, msg, use_mid=None):\n mid = self._get_mid_and_update_msg(msg, use_mid)\n self.send_request(msg)\n return mid", "response": "Send a request message to the server with automatic message ID assignment."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nsend a request message to the server.", "response": "def send_request(self, msg):\n \"\"\"Send a request messsage.\n\n Parameters\n ----------\n msg : Message object\n The request Message to send.\n\n \"\"\"\n assert(msg.mtype == Message.REQUEST)\n if msg.mid and not self._server_supports_ids:\n raise KatcpVersionError('Message IDs not supported by server')\n self.send_message(msg)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsends a message to the ioloop thread.", "response": "def send_message(self, msg):\n \"\"\"Send any kind of message.\n\n Parameters\n ----------\n msg : Message object\n The message to send.\n\n \"\"\"\n assert get_thread_ident() == self.ioloop_thread_id\n data = str(msg) + \"\\n\"\n # Log all sent messages here so no one else has to.\n if self._logger.isEnabledFor(logging.DEBUG):\n self._logger.debug(\"Sending to {}: {}\"\n .format(self.bind_address_string, repr(data)))\n if not self._connected.isSet():\n raise KatcpClientDisconnected('Not connected to device {0}'.format(\n self.bind_address_string))\n try:\n return self._stream.write(data)\n except Exception:\n self._logger.warn('Could not send message {0!r} to {1!r}'\n .format(str(msg), self._bindaddr), exc_info=True)\n self._disconnect(exc_info=True)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nconnect to the server.", "response": "def _connect(self):\n \"\"\"Connect to the server.\"\"\"\n assert get_thread_ident() == self.ioloop_thread_id\n if self._stream:\n self._logger.warn('Disconnecting existing connection to {0!r} '\n 'to create a new connection')\n self._disconnect()\n yield self._disconnected.until_set()\n\n stream = None\n try:\n host, port = self._bindaddr\n stream = self._stream = yield self._tcp_client.connect(\n host, port, max_buffer_size=self.MAX_MSG_SIZE)\n stream.set_close_callback(partial(self._stream_closed_callback,\n stream))\n # our message packets are small, don't delay sending them.\n stream.set_nodelay(True)\n stream.max_write_buffer_size = self.MAX_WRITE_BUFFER_SIZE\n self._logger.debug('Connected to {0} with client addr {1}'\n .format(self.bind_address_string,\n address_to_string(stream.socket.getsockname())))\n if self._connect_failures >= 5:\n self._logger.warn(\"Reconnected to {0}\"\n .format(self.bind_address_string))\n self._connect_failures = 0\n except Exception, e:\n if self._connect_failures % 5 == 0:\n # warn on every fifth failure\n\n # TODO (NM 2015-03-04) This can get a bit verbose, and typically we have\n # other mechanisms for tracking failed connections. Consider doing some\n # kind of exponential backoff starting at 5 times the reconnect time up to\n # once per 5 minutes\n self._logger.debug(\"Failed to connect to {0!r}: {1}\"\n .format(self._bindaddr, e))\n self._connect_failures += 1\n stream = None\n yield gen.moment\n # TODO some kind of error rate limiting?\n\n if self._stream:\n # Can't use _disconnect() and wait on self._disconnected, since\n # exception may have been raised before _stream_closed_callback\n # was attached to the iostream.\n self._logger.debug('stream was set even though connecting failed')\n self._stream.close()\n self._disconnected.set()\n\n if stream:\n self._disconnected.clear()\n self._connected.set()\n self.last_connect_time = self.ioloop.time()\n try:\n self.notify_connected(True)\n except Exception:\n self._logger.exception(\"Notify connect failed. Disconnecting.\")\n self._disconnect()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef handle_message(self, msg):\n # log messages received so that no one else has to\n if self._logger.isEnabledFor(logging.DEBUG):\n self._logger.debug(\n \"received from {}: {}\"\n .format(self.bind_address_string, repr(str(msg))))\n\n if msg.mtype == Message.INFORM:\n return self.handle_inform(msg)\n elif msg.mtype == Message.REPLY:\n return self.handle_reply(msg)\n elif msg.mtype == Message.REQUEST:\n return self.handle_request(msg)\n else:\n self._logger.error(\"Unexpected message type from server ['%s'].\"\n % (msg,))", "response": "Handle a message from the server."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef handle_inform(self, msg):\n method = self._inform_handlers.get(\n msg.name, self.__class__.unhandled_inform)\n\n try:\n return method(self, msg)\n except Exception:\n e_type, e_value, trace = sys.exc_info()\n reason = \"\\n\".join(traceback.format_exception(\n e_type, e_value, trace, self._tb_limit))\n self._logger.error(\"Inform %s FAIL: %s\" % (msg.name, reason))", "response": "Dispatch an inform message to the appropriate method."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef handle_reply(self, msg):\n method = self.__class__.unhandled_reply\n if msg.name in self._reply_handlers:\n method = self._reply_handlers[msg.name]\n\n try:\n return method(self, msg)\n except Exception:\n e_type, e_value, trace = sys.exc_info()\n reason = \"\\n\".join(traceback.format_exception(\n e_type, e_value, trace, self._tb_limit))\n self._logger.error(\"Reply %s FAIL: %s\" % (msg.name, reason))", "response": "Dispatch a reply message to the appropriate method."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ndispatch a request to the appropriate method.", "response": "def handle_request(self, msg):\n \"\"\"Dispatch a request message to the appropriate method.\n\n Parameters\n ----------\n msg : Message object\n The request message to dispatch.\n\n \"\"\"\n method = self.__class__.unhandled_request\n if msg.name in self._request_handlers:\n method = self._request_handlers[msg.name]\n\n try:\n reply = method(self, msg)\n if isinstance(reply, Message):\n # If it is a message object, assume it is a reply.\n reply.mid = msg.mid\n assert (reply.mtype == Message.REPLY)\n assert (reply.name == msg.name)\n self._logger.info(\"%s OK\" % (msg.name,))\n self.send_message(reply)\n else:\n # Just pass on what is, potentially, a future. The implementor\n # of the request handler method must arrange for a reply to be\n # sent. Since clients have no business dealing with requests in\n # any case we don't do much to help them.\n return reply\n # We do want to catch everything that inherits from Exception\n # pylint: disable-msg = W0703\n except Exception:\n e_type, e_value, trace = sys.exc_info()\n reason = \"\\n\".join(traceback.format_exception(\n e_type, e_value, trace, self._tb_limit))\n self._logger.error(\"Request %s FAIL: %s\" % (msg.name, reason))"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nsetting the tornado. ioloop. IOLoop instance to use.", "response": "def set_ioloop(self, ioloop=None):\n \"\"\"Set the tornado.ioloop.IOLoop instance to use.\n\n This defaults to IOLoop.current(). If set_ioloop() is never called the\n IOLoop is managed: started in a new thread, and will be stopped if\n self.stop() is called.\n\n Notes\n -----\n Must be called before start() is called\n\n \"\"\"\n self._ioloop_manager.set_ioloop(ioloop, managed=False)\n self.ioloop = ioloop"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef enable_thread_safety(self):\n if self.threadsafe:\n return # Already done!\n if self._running.isSet():\n raise RuntimeError('Cannot enable thread safety after start')\n\n def _getattr(obj, name):\n # use 'is True' so mock objects don't return true for everything\n return getattr(obj, name, False) is True\n\n for name in dir(self):\n try:\n meth = getattr(self, name)\n except AttributeError:\n # Subclasses may have computed attributes that don't work\n # before they are started, so let's ignore those\n pass\n if not callable(meth):\n continue\n make_threadsafe = _getattr(meth, 'make_threadsafe')\n make_threadsafe_blocking = _getattr(meth, 'make_threadsafe_blocking')\n\n if make_threadsafe:\n assert not make_threadsafe_blocking\n meth = self._make_threadsafe(meth)\n setattr(self, name, meth)\n elif make_threadsafe_blocking:\n meth = self._make_threadsafe_blocking(meth)\n setattr(self, name, meth)\n\n self._threadsafe = True", "response": "Enable thread - safety features."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef start(self, timeout=None):\n if self._running.isSet():\n raise RuntimeError(\"Device client already started.\")\n # Make sure we have an ioloop\n self.ioloop = self._ioloop_manager.get_ioloop()\n if timeout:\n t0 = self.ioloop.time()\n self._ioloop_manager.start(timeout)\n self.ioloop.add_callback(self._install)\n if timeout:\n remaining_timeout = timeout - (self.ioloop.time() - t0)\n self.wait_running(remaining_timeout)", "response": "Start the device client in a new thread."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nstopping a running client from another thread.", "response": "def stop(self, timeout=None):\n \"\"\"Stop a running client (from another thread).\n\n Parameters\n ----------\n timeout : float in seconds\n Seconds to wait for client thread to have *started*.\n\n \"\"\"\n ioloop = getattr(self, 'ioloop', None)\n if not ioloop:\n raise RuntimeError('Call start() before stop()')\n if timeout:\n if get_thread_ident() == self.ioloop_thread_id:\n raise RuntimeError('Cannot block inside ioloop')\n self._running.wait_with_ioloop(self.ioloop, timeout)\n\n def _cleanup():\n self._running.clear()\n self._disconnect()\n self._ioloop_manager.stop(timeout=timeout, callback=_cleanup)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nwaits until the client is running.", "response": "def wait_running(self, timeout=None):\n \"\"\"Wait until the client is running.\n\n Parameters\n ----------\n timeout : float in seconds\n Seconds to wait for the client to start running.\n\n Returns\n -------\n running : bool\n Whether the client is running\n\n Notes\n -----\n Do not call this from the ioloop, use until_running().\n\n \"\"\"\n ioloop = getattr(self, 'ioloop', None)\n if not ioloop:\n raise RuntimeError('Call start() before wait_running()')\n return self._running.wait_with_ioloop(ioloop, timeout)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef until_connected(self, timeout=None):\n t0 = self.ioloop.time()\n yield self.until_running(timeout=timeout)\n t1 = self.ioloop.time()\n if timeout:\n timedelta = timeout - (t1 - t0)\n else:\n timedelta = None\n assert get_thread_ident() == self.ioloop_thread_id\n yield self._connected.until_set(timeout=timedelta)", "response": "Return future that resolves when the client is connected."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns a future that resolves after receipt of katcp protocol info.", "response": "def until_protocol(self, timeout=None):\n \"\"\"Return future that resolves after receipt of katcp protocol info.\n\n If the returned future resolves, the server's protocol information is\n available in the ProtocolFlags instance self.protocol_flags.\n\n \"\"\"\n t0 = self.ioloop.time()\n yield self.until_running(timeout=timeout)\n t1 = self.ioloop.time()\n if timeout:\n timedelta = timeout - (t1 - t0)\n else:\n timedelta = None\n assert get_thread_ident() == self.ioloop_thread_id\n yield self._received_protocol_info.until_set(timeout=timedelta)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _push_async_request(self, msg_id, request, reply_cb, inform_cb,\n user_data, timeout_handle):\n \"\"\"Store reply / inform callbacks for request we've sent.\"\"\"\n assert get_thread_ident() == self.ioloop_thread_id\n self._async_queue[msg_id] = (\n request, reply_cb, inform_cb, user_data, timeout_handle)\n if request.name in self._async_id_stack:\n self._async_id_stack[request.name].append(msg_id)\n else:\n self._async_id_stack[request.name] = [msg_id]", "response": "Push a request onto the async queue."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\npops the set of callbacks for a request.", "response": "def _pop_async_request(self, msg_id, msg_name):\n \"\"\"Pop the set of callbacks for a request.\n\n Return tuple of Nones if callbacks already popped (or don't exist).\n\n \"\"\"\n assert get_thread_ident() == self.ioloop_thread_id\n if msg_id is None:\n msg_id = self._msg_id_for_name(msg_name)\n if msg_id in self._async_queue:\n callback_tuple = self._async_queue[msg_id]\n del self._async_queue[msg_id]\n self._async_id_stack[callback_tuple[0].name].remove(msg_id)\n return callback_tuple\n else:\n return None, None, None, None, None"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _peek_async_request(self, msg_id, msg_name):\n assert get_thread_ident() == self.ioloop_thread_id\n if msg_id is None:\n msg_id = self._msg_id_for_name(msg_name)\n if msg_id in self._async_queue:\n return self._async_queue[msg_id]\n else:\n return None, None, None, None, None", "response": "Return the set of callbacks for a request."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _msg_id_for_name(self, msg_name):\n if msg_name in self._async_id_stack and self._async_id_stack[msg_name]:\n return self._async_id_stack[msg_name][0]", "response": "Find the message id for a given request name."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef callback_request(self, msg, reply_cb=None, inform_cb=None,\n user_data=None, timeout=None, use_mid=None):\n \"\"\"Send a request messsage.\n\n Parameters\n ----------\n msg : Message object\n The request message to send.\n reply_cb : function\n The reply callback with signature reply_cb(msg)\n or reply_cb(msg, \\*user_data)\n inform_cb : function\n The inform callback with signature inform_cb(msg)\n or inform_cb(msg, \\*user_data)\n user_data : tuple\n Optional user data to send to the reply and inform\n callbacks.\n timeout : float in seconds\n How long to wait for a reply. The default is the\n the timeout set when creating the AsyncClient.\n use_mid : boolean, optional\n Whether to use message IDs. Default is to use message IDs\n if the server supports them.\n\n \"\"\"\n if timeout is None:\n timeout = self._request_timeout\n\n mid = self._get_mid_and_update_msg(msg, use_mid)\n\n if timeout is None: # deal with 'no timeout', i.e. None\n timeout_handle = None\n else:\n timeout_handle = self.ioloop.call_later(\n timeout, partial(self._handle_timeout, mid, self.ioloop.time()))\n\n self._push_async_request(\n mid, msg, reply_cb, inform_cb, user_data, timeout_handle)\n\n try:\n self.send_request(msg)\n except KatcpClientError, e:\n error_reply = Message.request(msg.name, \"fail\", str(e))\n error_reply.mid = mid\n self.handle_reply(error_reply)", "response": "Send a request to the server."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef future_request(self, msg, timeout=None, use_mid=None):\n if timeout is None:\n timeout = self._request_timeout\n\n f = tornado_Future()\n informs = []\n\n def reply_cb(msg):\n f.set_result((msg, informs))\n\n def inform_cb(msg):\n informs.append(msg)\n\n try:\n self.callback_request(msg, reply_cb=reply_cb, inform_cb=inform_cb,\n timeout=timeout, use_mid=use_mid)\n except Exception:\n f.set_exc_info(sys.exc_info())\n return f", "response": "Send a request to the server with future replies."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef blocking_request(self, msg, timeout=None, use_mid=None):\n assert (get_thread_ident() != self.ioloop_thread_id), (\n 'Cannot call blocking_request() in ioloop')\n if timeout is None:\n timeout = self._request_timeout\n\n f = Future() # for thread safety\n tf = [None] # Placeholder for tornado Future for exception tracebacks\n def blocking_request_callback():\n try:\n tf[0] = frf = self.future_request(msg, timeout=timeout,\n use_mid=use_mid)\n except Exception:\n tf[0] = frf = tornado_Future()\n frf.set_exc_info(sys.exc_info())\n gen.chain_future(frf, f)\n\n self.ioloop.add_callback(blocking_request_callback)\n\n # We wait on the future result that should be set by the reply\n # handler callback. If this does not occur within the\n # timeout it means something unexpected went wrong. We give it\n # an extra second to deal with (unlikely?) slowness in the\n # rest of the code.\n extra_wait = 1\n wait_timeout = timeout\n if wait_timeout is not None:\n wait_timeout = wait_timeout + extra_wait\n try:\n return f.result(timeout=wait_timeout)\n except TimeoutError:\n raise RuntimeError('Unexpected error: Async request handler did '\n 'not call reply handler within timeout period')\n except Exception:\n # Use the tornado future to give us a usable traceback\n tf[0].result()\n assert False", "response": "Send a request and wait for its reply."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nhandling the inform message related to any current requests.", "response": "def handle_inform(self, msg):\n \"\"\"Handle inform messages related to any current requests.\n\n Inform messages not related to the current request go up\n to the base class method.\n\n Parameters\n ----------\n msg : Message object\n The inform message to dispatch.\n\n \"\"\"\n # this may also result in inform_cb being None if no\n # inform_cb was passed to the request method.\n if msg.mid is not None:\n _request, _reply_cb, inform_cb, user_data, _timeout_handle = \\\n self._peek_async_request(msg.mid, None)\n else:\n request, _reply_cb, inform_cb, user_data, _timeout_handle = \\\n self._peek_async_request(None, msg.name)\n if request is not None and request.mid is not None:\n # we sent a mid but this inform doesn't have one\n inform_cb, user_data = None, None\n\n if inform_cb is None:\n inform_cb = super(AsyncClient, self).handle_inform\n # override user_data since handle_inform takes no user_data\n user_data = None\n\n try:\n if user_data is None:\n inform_cb(msg)\n else:\n inform_cb(msg, *user_data)\n except Exception:\n e_type, e_value, trace = sys.exc_info()\n reason = \"\\n\".join(traceback.format_exception(\n e_type, e_value, trace, self._tb_limit))\n self._logger.error(\"Callback inform %s FAIL: %s\" %\n (msg.name, reason))"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _do_fail_callback(\n self, reason, msg, reply_cb, inform_cb, user_data, timeout_handle):\n \"\"\"Do callback for a failed request.\"\"\"\n # this may also result in reply_cb being None if no\n # reply_cb was passed to the request method\n\n if reply_cb is None:\n # this happens if no reply_cb was passed in to the request\n return\n\n reason_msg = Message.reply(msg.name, \"fail\", reason, mid=msg.mid)\n\n try:\n if user_data is None:\n reply_cb(reason_msg)\n else:\n reply_cb(reason_msg, *user_data)\n except Exception:\n e_type, e_value, trace = sys.exc_info()\n exc_reason = \"\\n\".join(traceback.format_exception(\n e_type, e_value, trace, self._tb_limit))\n self._logger.error(\"Callback reply during failure %s, %s FAIL: %s\" %\n (reason, msg.name, exc_reason))", "response": "Do callback for a failed request."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nhandles a timed - out callback request.", "response": "def _handle_timeout(self, msg_id, start_time):\n \"\"\"Handle a timed-out callback request.\n\n Parameters\n ----------\n msg_id : uuid.UUID for message\n The name of the reply which was expected.\n\n \"\"\"\n msg, reply_cb, inform_cb, user_data, timeout_handle = \\\n self._pop_async_request(msg_id, None)\n # We may have been racing with the actual reply handler if the reply\n # arrived close to the timeout expiry,\n # which means the self._pop_async_request() call gave us None's.\n # In this case, just bail.\n #\n # NM 2014-09-17 Not sure if this is true after porting to tornado,\n # but I'm too afraid to remove this code :-/\n if timeout_handle is None:\n return\n\n reason = \"Request {0.name} timed out after {1:f} seconds.\".format(\n msg, self.ioloop.time() - start_time)\n self._do_fail_callback(\n reason, msg, reply_cb, inform_cb, user_data, timeout_handle)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nhandle a reply message related to the current request.", "response": "def handle_reply(self, msg):\n \"\"\"Handle a reply message related to the current request.\n\n Reply messages not related to the current request go up\n to the base class method.\n\n Parameters\n ----------\n msg : Message object\n The reply message to dispatch.\n\n \"\"\"\n # this may also result in reply_cb being None if no\n # reply_cb was passed to the request method\n if msg.mid is not None:\n _request, reply_cb, _inform_cb, user_data, timeout_handle = \\\n self._pop_async_request(msg.mid, None)\n else:\n request, _reply_cb, _inform_cb, _user_data, timeout_handle = \\\n self._peek_async_request(None, msg.name)\n if request is not None and request.mid is None:\n # we didn't send a mid so this is the request we want\n _request, reply_cb, _inform_cb, user_data, timeout_handle = \\\n self._pop_async_request(None, msg.name)\n else:\n reply_cb, user_data = None, None\n\n if timeout_handle is not None:\n self.ioloop.remove_timeout(timeout_handle)\n\n if reply_cb is None:\n reply_cb = super(AsyncClient, self).handle_reply\n # override user_data since handle_reply takes no user_data\n user_data = None\n\n try:\n if user_data is None:\n reply_cb(msg)\n else:\n reply_cb(msg, *user_data)\n except Exception:\n e_type, e_value, trace = sys.exc_info()\n reason = \"\\n\".join(traceback.format_exception(\n e_type, e_value, trace, self._tb_limit))\n self._logger.error(\"Callback reply %s FAIL: %s\" %\n (msg.name, reason))"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef validate_answer(self, value):\n # Check that the answer is JSON-serializable\n try:\n serialized = json.dumps(value)\n except (ValueError, TypeError):\n raise serializers.ValidationError(\"Answer value must be JSON-serializable\")\n\n # Check the length of the serialized representation\n if len(serialized) > Submission.MAXSIZE:\n raise serializers.ValidationError(\"Maximum answer size exceeded.\")\n\n return value", "response": "Validate the answer value."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_annotations(self, obj):\n annotations = ScoreAnnotation.objects.filter(score_id=obj.id)\n return [\n ScoreAnnotationSerializer(instance=annotation).data\n for annotation in annotations\n ]", "response": "Inspect ScoreAnnotations to attach all relevant annotations."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncreating a reset score.", "response": "def create_reset_score(cls, student_item):\n \"\"\"\n Create a \"reset\" score (a score with a null submission).\n\n Only scores created after the most recent \"reset\" score\n should be used to determine a student's effective score.\n\n Args:\n student_item (StudentItem): The student item model.\n\n Returns:\n Score: The newly created \"reset\" score.\n\n Raises:\n DatabaseError: An error occurred while creating the score\n\n \"\"\"\n # By setting the \"reset\" flag, we ensure that the \"highest\"\n # score in the score summary will point to this score.\n # By setting points earned and points possible to 0,\n # we ensure that this score will be hidden from the user.\n return cls.objects.create(\n student_item=student_item,\n submission=None,\n points_earned=0,\n points_possible=0,\n reset=True,\n )"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nlisten for new Scores and update the relevant ScoreSummary.", "response": "def update_score_summary(sender, **kwargs):\n \"\"\"\n Listen for new Scores and update the relevant ScoreSummary.\n\n Args:\n sender: not used\n\n Kwargs:\n instance (Score): The score model whose save triggered this receiver.\n\n \"\"\"\n score = kwargs['instance']\n try:\n score_summary = ScoreSummary.objects.get(\n student_item=score.student_item\n )\n score_summary.latest = score\n\n # A score with the \"reset\" flag set will always replace the current highest score\n if score.reset:\n score_summary.highest = score\n # The conversion to a float may return None if points possible is zero\n # In Python, None is always less than an integer, so any score\n # with non-null points possible will take precedence.\n elif score.to_float() > score_summary.highest.to_float():\n score_summary.highest = score\n score_summary.save()\n except ScoreSummary.DoesNotExist:\n ScoreSummary.objects.create(\n student_item=score.student_item,\n highest=score,\n latest=score,\n )\n except DatabaseError as err:\n logger.exception(\n u\"Error while updating score summary for student item {}\"\n .format(score.student_item)\n )"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nadds the arguments to the command parser.", "response": "def add_arguments(self, parser):\n \"\"\"\n Add arguments to the command parser.\n\n Uses argparse syntax. See documentation at\n https://docs.python.org/3/library/argparse.html.\n \"\"\"\n parser.add_argument(\n '--start', '-s',\n default=0,\n type=int,\n help=u\"The Submission.id at which to begin updating rows. 0 by default.\"\n )\n parser.add_argument(\n '--chunk', '-c',\n default=1000,\n type=int,\n help=u\"Batch size, how many rows to update in a given transaction. Default 1000.\",\n )\n parser.add_argument(\n '--wait', '-w',\n default=2,\n type=int,\n help=u\"Wait time between transactions, in seconds. Default 2.\",\n )"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef handle(self, *args, **options):\n # Note that by taking last_id here, we're going to miss any submissions created *during* the command execution\n # But that's okay! All new entries have already been created using the new style, no acion needed there\n last_id = Submission._objects.all().aggregate(Max('id'))['id__max']\n log.info(\"Beginning uuid update\")\n\n current = options['start']\n while current < last_id:\n end_chunk = current + options['chunk'] if last_id - options['chunk'] >= current else last_id\n log.info(\"Updating entries in range [{}, {}]\".format(current, end_chunk))\n with transaction.atomic():\n for submission in Submission._objects.filter(id__gte=current, id__lte=end_chunk).iterator():\n submission.save(update_fields=['uuid'])\n time.sleep(options['wait'])\n current = end_chunk + 1", "response": "This is the main entry point for the command execution."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nretrieves all submissions associated with the given student item. Developer utility for accessing all the submissions associated with a student item. The student item is specified by the unique combination of course, student, and item. Args: request (dict): The request. course_id (str): The course id for this student item. student_id (str): The student id for this student item. item_id (str): The item id for this student item. Returns: HttpResponse: The response object for this request. Renders a simple development page with all the submissions related to the specified student item.", "response": "def get_submissions_for_student_item(request, course_id, student_id, item_id):\n \"\"\"Retrieve all submissions associated with the given student item.\n\n Developer utility for accessing all the submissions associated with a\n student item. The student item is specified by the unique combination of\n course, student, and item.\n\n Args:\n request (dict): The request.\n course_id (str): The course id for this student item.\n student_id (str): The student id for this student item.\n item_id (str): The item id for this student item.\n\n Returns:\n HttpResponse: The response object for this request. Renders a simple\n development page with all the submissions related to the specified\n student item.\n\n \"\"\"\n student_item_dict = dict(\n course_id=course_id,\n student_id=student_id,\n item_id=item_id,\n )\n context = dict(**student_item_dict)\n try:\n submissions = get_submissions(student_item_dict)\n context[\"submissions\"] = submissions\n except SubmissionRequestError:\n context[\"error\"] = \"The specified student item was not found.\"\n\n return render_to_response('submissions.html', context)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncreating a submission for assessment.", "response": "def create_submission(student_item_dict, answer, submitted_at=None, attempt_number=None):\n \"\"\"Creates a submission for assessment.\n\n Generic means by which to submit an answer for assessment.\n\n Args:\n student_item_dict (dict): The student_item this\n submission is associated with. This is used to determine which\n course, student, and location this submission belongs to.\n\n answer (JSON-serializable): The answer given by the student to be assessed.\n\n submitted_at (datetime): The date in which this submission was submitted.\n If not specified, defaults to the current date.\n\n attempt_number (int): A student may be able to submit multiple attempts\n per question. This allows the designated attempt to be overridden.\n If the attempt is not specified, it will take the most recent\n submission, as specified by the submitted_at time, and use its\n attempt_number plus one.\n\n Returns:\n dict: A representation of the created Submission. The submission\n contains five attributes: student_item, attempt_number, submitted_at,\n created_at, and answer. 'student_item' is the ID of the related student\n item for the submission. 'attempt_number' is the attempt this submission\n represents for this question. 'submitted_at' represents the time this\n submission was submitted, which can be configured, versus the\n 'created_at' date, which is when the submission is first created.\n\n Raises:\n SubmissionRequestError: Raised when there are validation errors for the\n student item or submission. This can be caused by the student item\n missing required values, the submission being too long, the\n attempt_number is negative, or the given submitted_at time is invalid.\n SubmissionInternalError: Raised when submission access causes an\n internal error.\n\n Examples:\n >>> student_item_dict = dict(\n >>> student_id=\"Tim\",\n >>> item_id=\"item_1\",\n >>> course_id=\"course_1\",\n >>> item_type=\"type_one\"\n >>> )\n >>> create_submission(student_item_dict, \"The answer is 42.\", datetime.utcnow, 1)\n {\n 'student_item': 2,\n 'attempt_number': 1,\n 'submitted_at': datetime.datetime(2014, 1, 29, 17, 14, 52, 649284 tzinfo=<UTC>),\n 'created_at': datetime.datetime(2014, 1, 29, 17, 14, 52, 668850, tzinfo=<UTC>),\n 'answer': u'The answer is 42.'\n }\n\n \"\"\"\n student_item_model = _get_or_create_student_item(student_item_dict)\n if attempt_number is None:\n try:\n submissions = Submission.objects.filter(\n student_item=student_item_model)[:1]\n except DatabaseError:\n error_message = u\"An error occurred while filtering submissions for student item: {}\".format(\n student_item_dict)\n logger.exception(error_message)\n raise SubmissionInternalError(error_message)\n attempt_number = submissions[0].attempt_number + 1 if submissions else 1\n\n model_kwargs = {\n \"student_item\": student_item_model.pk,\n \"answer\": answer,\n \"attempt_number\": attempt_number,\n }\n if submitted_at:\n model_kwargs[\"submitted_at\"] = submitted_at\n\n try:\n submission_serializer = SubmissionSerializer(data=model_kwargs)\n if not submission_serializer.is_valid():\n raise SubmissionRequestError(field_errors=submission_serializer.errors)\n submission_serializer.save()\n\n sub_data = submission_serializer.data\n _log_submission(sub_data, student_item_dict)\n\n return sub_data\n\n except DatabaseError:\n error_message = u\"An error occurred while creating submission {} for student item: {}\".format(\n model_kwargs,\n student_item_dict\n )\n logger.exception(error_message)\n raise SubmissionInternalError(error_message)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_submission(submission_uuid, read_replica=False):\n if not isinstance(submission_uuid, six.string_types):\n if isinstance(submission_uuid, UUID):\n submission_uuid = six.text_type(submission_uuid)\n else:\n raise SubmissionRequestError(\n msg=\"submission_uuid ({!r}) must be serializable\".format(submission_uuid)\n )\n\n cache_key = Submission.get_cache_key(submission_uuid)\n try:\n cached_submission_data = cache.get(cache_key)\n except Exception:\n # The cache backend could raise an exception\n # (for example, memcache keys that contain spaces)\n logger.exception(\"Error occurred while retrieving submission from the cache\")\n cached_submission_data = None\n\n if cached_submission_data:\n logger.info(\"Get submission {} (cached)\".format(submission_uuid))\n return cached_submission_data\n\n try:\n submission = _get_submission_model(submission_uuid, read_replica)\n submission_data = SubmissionSerializer(submission).data\n cache.set(cache_key, submission_data)\n except Submission.DoesNotExist:\n logger.error(\"Submission {} not found.\".format(submission_uuid))\n raise SubmissionNotFoundError(\n u\"No submission matching uuid {}\".format(submission_uuid)\n )\n except Exception as exc:\n # Something very unexpected has just happened (like DB misconfig)\n err_msg = \"Could not get submission due to error: {}\".format(exc)\n logger.exception(err_msg)\n raise SubmissionInternalError(err_msg)\n\n logger.info(\"Get submission {}\".format(submission_uuid))\n return submission_data", "response": "Retrieves a single submission by uuid."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nretrieve a submission by its unique identifier including the associated student item.", "response": "def get_submission_and_student(uuid, read_replica=False):\n \"\"\"\n Retrieve a submission by its unique identifier, including the associated student item.\n\n Args:\n uuid (str): the unique identifier of the submission.\n\n Kwargs:\n read_replica (bool): If true, attempt to use the read replica database.\n If no read replica is available, use the default database.\n\n Returns:\n Serialized Submission model (dict) containing a serialized StudentItem model\n\n Raises:\n SubmissionNotFoundError: Raised if the submission does not exist.\n SubmissionRequestError: Raised if the search parameter is not a string.\n SubmissionInternalError: Raised for unknown errors.\n\n \"\"\"\n # This may raise API exceptions\n submission = get_submission(uuid, read_replica=read_replica)\n\n # Retrieve the student item from the cache\n cache_key = \"submissions.student_item.{}\".format(submission['student_item'])\n try:\n cached_student_item = cache.get(cache_key)\n except Exception:\n # The cache backend could raise an exception\n # (for example, memcache keys that contain spaces)\n logger.exception(\"Error occurred while retrieving student item from the cache\")\n cached_student_item = None\n\n if cached_student_item is not None:\n submission['student_item'] = cached_student_item\n else:\n # There is probably a more idiomatic way to do this using the Django REST framework\n try:\n student_item_qs = StudentItem.objects\n if read_replica:\n student_item_qs = _use_read_replica(student_item_qs)\n\n student_item = student_item_qs.get(id=submission['student_item'])\n submission['student_item'] = StudentItemSerializer(student_item).data\n cache.set(cache_key, submission['student_item'])\n except Exception as ex:\n err_msg = \"Could not get submission due to error: {}\".format(ex)\n logger.exception(err_msg)\n raise SubmissionInternalError(err_msg)\n\n return submission"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_submissions(student_item_dict, limit=None):\n student_item_model = _get_or_create_student_item(student_item_dict)\n try:\n submission_models = Submission.objects.filter(\n student_item=student_item_model)\n except DatabaseError:\n error_message = (\n u\"Error getting submission request for student item {}\"\n .format(student_item_dict)\n )\n logger.exception(error_message)\n raise SubmissionNotFoundError(error_message)\n\n if limit:\n submission_models = submission_models[:limit]\n\n return SubmissionSerializer(submission_models, many=True).data", "response": "Returns the submissions for the specified student item."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_all_submissions(course_id, item_id, item_type, read_replica=True):\n submission_qs = Submission.objects\n if read_replica:\n submission_qs = _use_read_replica(submission_qs)\n # We cannot use SELECT DISTINCT ON because it's PostgreSQL only, so unfortunately\n # our results will contain every entry of each student, not just the most recent.\n # We sort by student_id and primary key, so the reults will be grouped be grouped by\n # student, with the most recent submission being the first one in each group.\n query = submission_qs.select_related('student_item').filter(\n student_item__course_id=course_id,\n student_item__item_id=item_id,\n student_item__item_type=item_type,\n ).order_by('student_item__student_id', '-submitted_at', '-id').iterator()\n\n for unused_student_id, row_iter in itertools.groupby(query, operator.attrgetter('student_item.student_id')):\n submission = next(row_iter)\n data = SubmissionSerializer(submission).data\n data['student_id'] = submission.student_item.student_id\n yield data", "response": "For the given item get the most recent submission for every student who has submitted."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngets all the information for the given course and item type.", "response": "def get_all_course_submission_information(course_id, item_type, read_replica=True):\n \"\"\" For the given course, get all student items of the given item type, all the submissions for those itemes,\n and the latest scores for each item. If a submission was given a score that is not the latest score for the\n relevant student item, it will still be included but without score.\n\n Args:\n course_id (str): The course that we are getting submissions from.\n item_type (str): The type of items that we are getting submissions for.\n read_replica (bool): Try to use the database's read replica if it's available.\n\n Yields:\n A tuple of three dictionaries representing:\n (1) a student item with the following fields:\n student_id\n course_id\n student_item\n item_type\n (2) a submission with the following fields:\n student_item\n attempt_number\n submitted_at\n created_at\n answer\n (3) a score with the following fields, if one exists and it is the latest score:\n (if both conditions are not met, an empty dict is returned here)\n student_item\n submission\n points_earned\n points_possible\n created_at\n submission_uuid\n \"\"\"\n\n submission_qs = Submission.objects\n if read_replica:\n submission_qs = _use_read_replica(submission_qs)\n\n query = submission_qs.select_related('student_item__scoresummary__latest__submission').filter(\n student_item__course_id=course_id,\n student_item__item_type=item_type,\n ).iterator()\n\n for submission in query:\n student_item = submission.student_item\n serialized_score = {}\n if hasattr(student_item, 'scoresummary'):\n latest_score = student_item.scoresummary.latest\n\n # Only include the score if it is not a reset score (is_hidden), and if the current submission is the same\n # as the student_item's latest score's submission. This matches the behavior of the API's get_score method.\n if (not latest_score.is_hidden()) and latest_score.submission.uuid == submission.uuid:\n serialized_score = ScoreSerializer(latest_score).data\n yield (\n StudentItemSerializer(student_item).data,\n SubmissionSerializer(submission).data,\n serialized_score\n )"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_top_submissions(course_id, item_id, item_type, number_of_top_scores, use_cache=True, read_replica=True):\n if number_of_top_scores < 1 or number_of_top_scores > MAX_TOP_SUBMISSIONS:\n error_msg = (\n u\"Number of top scores must be a number between 1 and {}.\".format(MAX_TOP_SUBMISSIONS)\n )\n logger.exception(error_msg)\n raise SubmissionRequestError(msg=error_msg)\n\n # First check the cache (unless caching is disabled)\n cache_key = \"submissions.top_submissions.{course}.{item}.{type}.{number}\".format(\n course=course_id,\n item=item_id,\n type=item_type,\n number=number_of_top_scores\n )\n top_submissions = cache.get(cache_key) if use_cache else None\n\n # If we can't find it in the cache (or caching is disabled), check the database\n # By default, prefer the read-replica.\n if top_submissions is None:\n try:\n query = ScoreSummary.objects.filter(\n student_item__course_id=course_id,\n student_item__item_id=item_id,\n student_item__item_type=item_type,\n latest__points_earned__gt=0\n ).select_related('latest', 'latest__submission').order_by(\"-latest__points_earned\")\n\n if read_replica:\n query = _use_read_replica(query)\n score_summaries = query[:number_of_top_scores]\n except DatabaseError:\n msg = u\"Could not fetch top score summaries for course {}, item {} of type {}\".format(\n course_id, item_id, item_type\n )\n logger.exception(msg)\n raise SubmissionInternalError(msg)\n\n # Retrieve the submission content for each top score\n top_submissions = [\n {\n \"score\": score_summary.latest.points_earned,\n \"content\": SubmissionSerializer(score_summary.latest.submission).data['answer']\n }\n for score_summary in score_summaries\n ]\n\n # Always store the retrieved list in the cache\n cache.set(cache_key, top_submissions, TOP_SUBMISSIONS_CACHE_TIMEOUT)\n\n return top_submissions", "response": "This function returns a number of top submissions for a particular student item within a particular assessment module."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngetting the score for a particular student item Each student item should have a unique score. This function will return the score if it is available. A score is only calculated for a student item if it has completed the workflow for a particular assessment module. Args: student_item (dict): The dictionary representation of a student item. Function returns the score related to this student item. Returns: score (dict): The score associated with this student item. None if there is no score found. Raises: SubmissionInternalError: Raised if a score cannot be retrieved because of an internal server error. Examples: >>> student_item = { >>> \"student_id\":\"Tim\", >>> \"course_id\":\"TestCourse\", >>> \"item_id\":\"u_67\", >>> \"item_type\":\"openassessment\" >>> } >>> >>> get_score(student_item) [{ 'student_item': 2, 'submission': 2, 'points_earned': 8, 'points_possible': 20, 'created_at': datetime.datetime(2014, 2, 7, 18, 30, 1, 807911, tzinfo=<UTC>) }]", "response": "def get_score(student_item):\n \"\"\"Get the score for a particular student item\n\n Each student item should have a unique score. This function will return the\n score if it is available. A score is only calculated for a student item if\n it has completed the workflow for a particular assessment module.\n\n Args:\n student_item (dict): The dictionary representation of a student item.\n Function returns the score related to this student item.\n\n Returns:\n score (dict): The score associated with this student item. None if there\n is no score found.\n\n Raises:\n SubmissionInternalError: Raised if a score cannot be retrieved because\n of an internal server error.\n\n Examples:\n >>> student_item = {\n >>> \"student_id\":\"Tim\",\n >>> \"course_id\":\"TestCourse\",\n >>> \"item_id\":\"u_67\",\n >>> \"item_type\":\"openassessment\"\n >>> }\n >>>\n >>> get_score(student_item)\n [{\n 'student_item': 2,\n 'submission': 2,\n 'points_earned': 8,\n 'points_possible': 20,\n 'created_at': datetime.datetime(2014, 2, 7, 18, 30, 1, 807911, tzinfo=<UTC>)\n }]\n\n \"\"\"\n try:\n student_item_model = StudentItem.objects.get(**student_item)\n score = ScoreSummary.objects.get(student_item=student_item_model).latest\n except (ScoreSummary.DoesNotExist, StudentItem.DoesNotExist):\n return None\n\n # By convention, scores are hidden if \"points possible\" is set to 0.\n # This can occur when an instructor has reset scores for a student.\n if score.is_hidden():\n return None\n else:\n return ScoreSerializer(score).data"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a dictionary mapping item_ids to scores.", "response": "def get_scores(course_id, student_id):\n \"\"\"Return a dict mapping item_ids to scores.\n\n Scores are represented by serialized Score objects in JSON-like dict\n format.\n\n This method would be used by an LMS to find all the scores for a given\n student in a given course.\n\n Scores that are \"hidden\" (because they have points earned set to zero)\n are excluded from the results.\n\n Args:\n course_id (str): Course ID, used to do a lookup on the `StudentItem`.\n student_id (str): Student ID, used to do a lookup on the `StudentItem`.\n\n Returns:\n dict: The keys are `item_id`s (`str`) and the values are tuples of\n `(points_earned, points_possible)`. All points are integer values and\n represent the raw, unweighted scores. Submissions does not have any\n concept of weights. If there are no entries matching the `course_id` or\n `student_id`, we simply return an empty dictionary. This is not\n considered an error because there might be many queries for the progress\n page of a person who has never submitted anything.\n\n Raises:\n SubmissionInternalError: An unexpected error occurred while resetting scores.\n \"\"\"\n try:\n score_summaries = ScoreSummary.objects.filter(\n student_item__course_id=course_id,\n student_item__student_id=student_id,\n ).select_related('latest', 'latest__submission', 'student_item')\n except DatabaseError:\n msg = u\"Could not fetch scores for course {}, student {}\".format(\n course_id, student_id\n )\n logger.exception(msg)\n raise SubmissionInternalError(msg)\n scores = {\n summary.student_item.item_id: UnannotatedScoreSerializer(summary.latest).data\n for summary in score_summaries if not summary.latest.is_hidden()\n }\n return scores"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreset the score of a specific student on a specific problem.", "response": "def reset_score(student_id, course_id, item_id, clear_state=False, emit_signal=True):\n \"\"\"\n Reset scores for a specific student on a specific problem.\n\n Note: this does *not* delete `Score` models from the database,\n since these are immutable. It simply creates a new score with\n the \"reset\" flag set to True.\n\n Args:\n student_id (unicode): The ID of the student for whom to reset scores.\n course_id (unicode): The ID of the course containing the item to reset.\n item_id (unicode): The ID of the item for which to reset scores.\n clear_state (bool): If True, will appear to delete any submissions associated with the specified StudentItem\n\n Returns:\n None\n\n Raises:\n SubmissionInternalError: An unexpected error occurred while resetting scores.\n\n \"\"\"\n # Retrieve the student item\n try:\n student_item = StudentItem.objects.get(\n student_id=student_id, course_id=course_id, item_id=item_id\n )\n except StudentItem.DoesNotExist:\n # If there is no student item, then there is no score to reset,\n # so we can return immediately.\n return\n\n # Create a \"reset\" score\n try:\n score = Score.create_reset_score(student_item)\n if emit_signal:\n # Send a signal out to any listeners who are waiting for scoring events.\n score_reset.send(\n sender=None,\n anonymous_user_id=student_id,\n course_id=course_id,\n item_id=item_id,\n created_at=score.created_at,\n )\n\n if clear_state:\n for sub in student_item.submission_set.all():\n # soft-delete the Submission\n sub.status = Submission.DELETED\n sub.save(update_fields=[\"status\"])\n\n # Also clear out cached values\n cache_key = Submission.get_cache_key(sub.uuid)\n cache.delete(cache_key)\n\n except DatabaseError:\n msg = (\n u\"Error occurred while reseting scores for\"\n u\" item {item_id} in course {course_id} for student {student_id}\"\n ).format(item_id=item_id, course_id=course_id, student_id=student_id)\n logger.exception(msg)\n raise SubmissionInternalError(msg)\n else:\n msg = u\"Score reset for item {item_id} in course {course_id} for student {student_id}\".format(\n item_id=item_id, course_id=course_id, student_id=student_id\n )\n logger.info(msg)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nset a score for a particular submission.", "response": "def set_score(submission_uuid, points_earned, points_possible,\n annotation_creator=None, annotation_type=None, annotation_reason=None):\n \"\"\"Set a score for a particular submission.\n\n Sets the score for a particular submission. This score is calculated\n externally to the API.\n\n Args:\n submission_uuid (str): UUID for the submission (must exist).\n points_earned (int): The earned points for this submission.\n points_possible (int): The total points possible for this particular student item.\n\n annotation_creator (str): An optional field for recording who gave this particular score\n annotation_type (str): An optional field for recording what type of annotation should be created,\n e.g. \"staff_override\".\n annotation_reason (str): An optional field for recording why this score was set to its value.\n\n Returns:\n None\n\n Raises:\n SubmissionInternalError: Thrown if there was an internal error while\n attempting to save the score.\n SubmissionRequestError: Thrown if the given student item or submission\n are not found.\n\n Examples:\n >>> set_score(\"a778b933-9fb3-11e3-9c0f-040ccee02800\", 11, 12)\n {\n 'student_item': 2,\n 'submission': 1,\n 'points_earned': 11,\n 'points_possible': 12,\n 'created_at': datetime.datetime(2014, 2, 7, 20, 6, 42, 331156, tzinfo=<UTC>)\n }\n\n \"\"\"\n try:\n submission_model = _get_submission_model(submission_uuid)\n except Submission.DoesNotExist:\n raise SubmissionNotFoundError(\n u\"No submission matching uuid {}\".format(submission_uuid)\n )\n except DatabaseError:\n error_msg = u\"Could not retrieve submission {}.\".format(\n submission_uuid\n )\n logger.exception(error_msg)\n raise SubmissionRequestError(msg=error_msg)\n\n score = ScoreSerializer(\n data={\n \"student_item\": submission_model.student_item.pk,\n \"submission\": submission_model.pk,\n \"points_earned\": points_earned,\n \"points_possible\": points_possible,\n }\n )\n if not score.is_valid():\n logger.exception(score.errors)\n raise SubmissionInternalError(score.errors)\n\n # When we save the score, a score summary will be created if\n # it does not already exist.\n # When the database's isolation level is set to repeatable-read,\n # it's possible for a score summary to exist for this student item,\n # even though we cannot retrieve it.\n # In this case, we assume that someone else has already created\n # a score summary and ignore the error.\n # TODO: once we're using Django 1.8, use transactions to ensure that these\n # two models are saved at the same time.\n try:\n score_model = score.save()\n _log_score(score_model)\n if annotation_creator is not None:\n score_annotation = ScoreAnnotation(\n score=score_model,\n creator=annotation_creator,\n annotation_type=annotation_type,\n reason=annotation_reason\n )\n score_annotation.save()\n # Send a signal out to any listeners who are waiting for scoring events.\n score_set.send(\n sender=None,\n points_possible=points_possible,\n points_earned=points_earned,\n anonymous_user_id=submission_model.student_item.student_id,\n course_id=submission_model.student_item.course_id,\n item_id=submission_model.student_item.item_id,\n created_at=score_model.created_at,\n )\n except IntegrityError:\n pass"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nlog the creation of a submission.", "response": "def _log_submission(submission, student_item):\n \"\"\"\n Log the creation of a submission.\n\n Args:\n submission (dict): The serialized submission model.\n student_item (dict): The serialized student item model.\n\n Returns:\n None\n \"\"\"\n logger.info(\n u\"Created submission uuid={submission_uuid} for \"\n u\"(course_id={course_id}, item_id={item_id}, \"\n u\"anonymous_student_id={anonymous_student_id})\"\n .format(\n submission_uuid=submission[\"uuid\"],\n course_id=student_item[\"course_id\"],\n item_id=student_item[\"item_id\"],\n anonymous_student_id=student_item[\"student_id\"]\n )\n )"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nlogs the creation of a score.", "response": "def _log_score(score):\n \"\"\"\n Log the creation of a score.\n\n Args:\n score (Score): The score model.\n\n Returns:\n None\n \"\"\"\n logger.info(\n \"Score of ({}/{}) set for submission {}\"\n .format(score.points_earned, score.points_possible, score.submission.uuid)\n )"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _get_or_create_student_item(student_item_dict):\n try:\n try:\n return StudentItem.objects.get(**student_item_dict)\n except StudentItem.DoesNotExist:\n student_item_serializer = StudentItemSerializer(\n data=student_item_dict\n )\n if not student_item_serializer.is_valid():\n logger.error(\n u\"Invalid StudentItemSerializer: errors:{} data:{}\".format(\n student_item_serializer.errors,\n student_item_dict\n )\n )\n raise SubmissionRequestError(field_errors=student_item_serializer.errors)\n return student_item_serializer.save()\n except DatabaseError:\n error_message = u\"An error occurred creating student item: {}\".format(\n student_item_dict\n )\n logger.exception(error_message)\n raise SubmissionInternalError(error_message)", "response": "Gets or creates a Student Item that matches the given dict."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _request(self,\n path,\n method=\"GET\",\n query=None,\n body=None,\n headers=None):\n \"\"\"Make a Manta request\n\n ...\n @returns (res, content)\n \"\"\"\n assert path.startswith('/'), \"bogus path: %r\" % path\n\n # Presuming utf-8 encoding here for requests. Not sure if that is\n # technically correct.\n if not isinstance(path, bytes):\n spath = path.encode('utf-8')\n else:\n spath = path\n\n qpath = urlquote(spath)\n if query:\n qpath += '?' + urlencode(query)\n url = self.url + qpath\n http = self._get_http()\n\n ubody = body\n if body is not None and isinstance(body, dict):\n ubody = urlencode(body)\n if headers is None:\n headers = {}\n headers[\"User-Agent\"] = self.user_agent\n\n if self.signer:\n # Signature auth.\n if \"Date\" not in headers:\n headers[\"Date\"] = http_date()\n sigstr = 'date: ' + headers[\"Date\"]\n algorithm, fingerprint, signature = self.signer.sign(sigstr.encode(\n 'utf-8'))\n auth = 'Signature keyId=\"/%s/keys/%s\",algorithm=\"%s\",signature=\"%s\"'\\\n % ('/'.join(filter(None, [self.account, self.subuser])),\n fingerprint, algorithm, signature.decode('utf-8'))\n\n headers[\"Authorization\"] = auth\n\n if self.role:\n headers['Role'] = self.role\n\n # python 3\n try:\n url = url.decode('utf-8') # encoding='utf-8'\n except:\n pass\n\n return http.request(url, method, ubody, headers)", "response": "Make a Manta request to the specified path."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef put_directory(self, mdir):\n log.debug('PutDirectory %r', mdir)\n headers = {\"Content-Type\": \"application/json; type=directory\"}\n res, content = self._request(mdir, \"PUT\", headers=headers)\n if res[\"status\"] != \"204\":\n raise errors.MantaAPIError(res, content)", "response": "Put a directory into the Manta."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef list_directory(self, mdir, limit=None, marker=None):\n res, dirents = self.list_directory2(mdir, limit=limit, marker=marker)\n return dirents", "response": "List the directory entries in a Manta."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef list_directory2(self, mdir, limit=None, marker=None):\n log.debug('ListDirectory %r', mdir)\n\n query = {}\n if limit:\n query[\"limit\"] = limit\n if marker:\n query[\"marker\"] = marker\n\n res, content = self._request(mdir, \"GET\", query=query)\n if res[\"status\"] != \"200\":\n raise errors.MantaAPIError(res, content)\n lines = content.splitlines(False)\n dirents = []\n for line in lines:\n if not line.strip():\n continue\n try:\n dirents.append(json.loads(line.decode(\"utf-8\")))\n except ValueError:\n raise errors.MantaError('invalid directory entry: %r' % line)\n return res, dirents", "response": "A lower - level version of list_directory that returns the\n response object which includes the headers."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef head_directory(self, mdir):\n log.debug('HEAD ListDirectory %r', mdir)\n res, content = self._request(mdir, \"HEAD\")\n if res[\"status\"] != \"200\":\n raise errors.MantaAPIError(res, content)\n return res", "response": "HEAD method on ListDirectory"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_object(self, mpath, path=None, accept=\"*/*\"):\n res, content = self.get_object2(mpath, path=path, accept=accept)\n try:\n if isinstance(content, bytes):\n return content.decode(sys.stdout.encoding)\n else:\n return content\n except UnicodeDecodeError:\n return content", "response": "Get the object from the server."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ndelete an object from the Manta.", "response": "def delete_object(self, mpath):\n \"\"\"DeleteObject\n https://apidocs.joyent.com/manta/api.html#DeleteObject\n\n @param mpath {str} Required. A manta path, e.g. '/trent/stor/myobj'.\n \"\"\"\n log.debug('DeleteObject %r', mpath)\n res, content = self._request(mpath, \"DELETE\")\n if res[\"status\"] != \"204\":\n raise errors.MantaAPIError(res, content)\n return res"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nputs a link to a specific object.", "response": "def put_snaplink(self, link_path, object_path):\n \"\"\"PutSnapLink\n https://mo.joyent.com/docs/muskie/master/api.html#putsnaplink\n\n @param link_path {str} Required. A manta path, e.g.\n '/trent/stor/mylink'.\n @param object_path {str} Required. The manta path to an existing target\n manta object.\n \"\"\"\n log.debug('PutLink %r -> %r', link_path, object_path)\n headers = {\n \"Content-Type\": \"application/json; type=link\",\n #\"Content-Length\": \"0\", #XXX Needed?\n \"Location\": object_path\n }\n res, content = self._request(link_path, \"PUT\", headers=headers)\n if res[\"status\"] != \"204\":\n raise errors.MantaAPIError(res, content)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncreate a new job in the Manta account.", "response": "def create_job(self, phases, name=None, input=None):\n \"\"\"CreateJob\n https://apidocs.joyent.com/manta/api.html#CreateJob\n \"\"\"\n log.debug('CreateJob')\n path = '/%s/jobs' % self.account\n body = {\"phases\": phases}\n if name:\n body[\"name\"] = name\n if input:\n body[\"input\"] = input\n headers = {\"Content-Type\": \"application/json\"}\n res, content = self._request(path,\n \"POST\",\n body=json.dumps(body),\n headers=headers)\n if res[\"status\"] != '201':\n raise errors.MantaAPIError(res, content)\n location = res[\"location\"]\n assert res[\"location\"]\n job_id = res[\"location\"].rsplit('/', 1)[-1]\n return job_id"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nadding the given keys to the job s live in list.", "response": "def add_job_inputs(self, job_id, keys):\n \"\"\"AddJobInputs\n https://apidocs.joyent.com/manta/api.html#AddJobInputs\n \"\"\"\n log.debug(\"AddJobInputs %r\", job_id)\n path = \"/%s/jobs/%s/live/in\" % (self.account, job_id)\n body = '\\r\\n'.join(keys) + '\\r\\n'\n headers = {\n \"Content-Type\": \"text/plain\",\n \"Content-Length\": str(len(body))\n }\n res, content = self._request(path, \"POST\", body=body, headers=headers)\n if res[\"status\"] != '204':\n raise errors.MantaAPIError(res, content)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef end_job_input(self, job_id):\n log.debug(\"EndJobInput %r\", job_id)\n path = \"/%s/jobs/%s/live/in/end\" % (self.account, job_id)\n headers = {\n # \"Content-Length\": \"0\" #XXX needed?\n }\n res, content = self._request(path, \"POST\", headers=headers)\n if res[\"status\"] != '202':\n raise errors.MantaAPIError(res, content)", "response": "End a running job input."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef list_jobs(self, state=None, limit=None, marker=None):\n log.debug('ListJobs')\n\n path = \"/%s/jobs\" % self.account\n query = {}\n if state:\n query[\"state\"] = state\n if limit:\n query[\"limit\"] = limit\n if marker:\n query[\"marker\"] = marker\n\n res, content = self._request(path, \"GET\", query=query)\n if res[\"status\"] != \"200\":\n raise errors.MantaAPIError(res, content)\n lines = content.splitlines(False)\n jobs = []\n for line in lines:\n if not line.strip():\n continue\n try:\n jobs.append(json.loads(line))\n except ValueError:\n raise errors.MantaError('invalid job entry: %r' % line)\n return jobs", "response": "List all jobs in the account."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting the current status of a job.", "response": "def get_job(self, job_id):\n \"\"\"GetJob\n https://apidocs.joyent.com/manta/api.html#GetJob\n \"\"\"\n log.debug(\"GetJob %r\", job_id)\n path = \"/%s/jobs/%s/live/status\" % (self.account, job_id)\n res, content = self._request(path, \"GET\")\n if res.status != 200:\n raise errors.MantaAPIError(res, content)\n try:\n return json.loads(content)\n except ValueError:\n raise errors.MantaError('invalid job data: %r' % content)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets the output of a job.", "response": "def get_job_output(self, job_id):\n \"\"\"GetJobOutput\n https://apidocs.joyent.com/manta/api.html#GetJobOutput\n \"\"\"\n log.debug(\"GetJobOutput %r\", job_id)\n path = \"/%s/jobs/%s/live/out\" % (self.account, job_id)\n res, content = self._request(path, \"GET\")\n if res[\"status\"] != \"200\":\n raise errors.MantaAPIError(res, content)\n keys = content.splitlines(False)\n return keys"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_job_errors(self, job_id):\n log.debug(\"GetJobErrors %r\", job_id)\n path = \"/%s/jobs/%s/live/err\" % (self.account, job_id)\n res, content = self._request(path, \"GET\")\n if res[\"status\"] != \"200\":\n raise errors.MantaAPIError(res, content)\n return self._job_errors_from_content(content)", "response": "Get the errors from a specific job."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nlisting a directory and return a dict mapping names to their directory entries.", "response": "def ls(self, mdir, limit=None, marker=None):\n \"\"\"List a directory.\n\n Dev Notes:\n - If `limit` and `marker` are *not* specified. This handles paging\n through a directory with more entries than Manta will return in\n one request (1000).\n - This returns a dict mapping name to dirent as a convenience.\n Note that that makes this inappropriate for streaming a huge\n listing. A streaming-appropriate `ls` will be a separate method\n if/when that is added.\n\n @param mdir {str} A manta directory, e.g. '/trent/stor/a-dir'.\n @returns {dict} A mapping of names to their directory entry (dirent).\n \"\"\"\n assert limit is None and marker is None, \"not yet implemented\"\n dirents = {}\n\n if limit or marker:\n entries = self.list_directory(mdir, limit=limit, marker=marker)\n for entry in entries:\n dirents[entry[\"name\"]] = entry\n\n else:\n marker = None\n while True:\n res, entries = self.list_directory2(mdir, marker=marker)\n if marker:\n entries.pop(0) # first one is a repeat (the marker)\n if not entries:\n # Only the marker was there, we've got them all.\n break\n for entry in entries:\n if \"id\" in entry: # GET /:account/jobs\n dirents[entry[\"id\"]] = entry\n else:\n dirents[entry[\"name\"]] = entry\n if marker is None:\n # See if got all results in one go (quick out).\n result_set_size = int(res.get(\"result-set-size\", 0))\n if len(entries) == result_set_size:\n break\n if \"id\" in entries[-1]:\n marker = entries[-1][\"id\"] # jobs\n else:\n marker = entries[-1][\"name\"]\n\n return dirents"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef mkdir(self, mdir, parents=False):\n assert mdir.startswith('/'), \"%s: invalid manta path\" % mdir\n parts = mdir.split('/')\n assert len(parts) > 3, \"%s: cannot create top-level dirs\" % mdir\n if not parents:\n self.put_directory(mdir)\n else:\n # Find the first non-existant dir: binary search. Because\n # PutDirectory doesn't error on 'mkdir .../already-exists' we\n # don't have a way to detect a miss on `start`. So basically we\n # keep doing the binary search until we hit and close the `start`\n # to `end` gap.\n # Example:\n # - mdir: /trent/stor/builds/a/b/c (need to mk a/b/c)\n # parts: ['', 'trent', 'stor', 'builds', 'a', 'b', 'c']\n # start: 4\n # end: 8\n # - idx: 6\n # d: /trent/stor/builds/a/b (put_directory fails)\n # end: 6\n # - idx: 5\n # d: /trent/stor/builds/a (put_directory succeeds)\n # start: 5\n # (break out of loop)\n # - for i in range(6, 8):\n # i=6 -> d: /trent/stor/builds/a/b\n # i=7 -> d: /trent/stor/builds/a/b/c\n end = len(parts) + 1\n start = 3 # Index of the first possible dir to create.\n while start < end - 1:\n idx = int((end - start) // 2 + start)\n d = '/'.join(parts[:idx])\n try:\n self.put_directory(d)\n except errors.MantaAPIError:\n _, ex, _ = sys.exc_info()\n if ex.code == 'DirectoryDoesNotExist':\n end = idx\n else:\n raise\n else:\n start = idx\n\n # Now need to create from (end-1, len(parts)].\n for i in range(end, len(parts) + 1):\n d = '/'.join(parts[:i])\n self.put_directory(d)", "response": "Make a directory.\n\n Note that this will not error out if the directory already exists\n (that is how the PutDirectory Manta API behaves).\n\n @param mdir {str} A manta path, e.g. '/trent/stor/mydir'.\n @param parents {bool} Optional. Default false. Like 'mkdir -p', this\n will create parent dirs as necessary."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning available dirent info for the given Manta path.", "response": "def stat(self, mpath):\n \"\"\"Return available dirent info for the given Manta path.\"\"\"\n parts = mpath.split('/')\n if len(parts) == 0:\n raise errors.MantaError(\"cannot stat empty manta path: %r\" % mpath)\n elif len(parts) <= 3:\n raise errors.MantaError(\"cannot stat special manta path: %r\" %\n mpath)\n mparent = udirname(mpath)\n name = ubasename(mpath)\n dirents = self.ls(mparent)\n if name in dirents:\n return dirents[name]\n else:\n raise errors.MantaResourceNotFoundError(\n \"%s: no such object or directory\" % mpath)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the type of the given manta path.", "response": "def type(self, mpath):\n \"\"\"Return the manta type for the given manta path.\n\n @param mpath {str} The manta path for which to get the type.\n @returns {str|None} The manta type, e.g. \"object\" or \"directory\",\n or None if the path doesn't exist.\n \"\"\"\n try:\n return self.stat(mpath)[\"type\"]\n except errors.MantaResourceNotFoundError:\n return None\n except errors.MantaAPIError:\n _, ex, _ = sys.exc_info()\n if ex.code in ('ResourceNotFound', 'DirectoryDoesNotExist'):\n return None\n else:\n raise"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_job(self, job_id):\n try:\n return RawMantaClient.get_job(self, job_id)\n except errors.MantaAPIError as ex:\n if ex.res.status != 404:\n raise\n # Job was archived, try to retrieve the archived data.\n mpath = \"/%s/jobs/%s/job.json\" % (self.account, job_id)\n content = self.get_object(mpath, accept='application/json')\n try:\n return json.loads(content)\n except ValueError:\n raise errors.MantaError('invalid job data: %r' % content)", "response": "Get the job with the given ID."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngets the input of a single job.", "response": "def get_job_input(self, job_id):\n \"\"\"GetJobInput\n https://apidocs.joyent.com/manta/api.html#GetJobInput\n with the added sugar that it will retrieve the archived job if it has\n been archived, per:\n https://apidocs.joyent.com/manta/jobs-reference.html#job-completion-and-archival\n \"\"\"\n try:\n return RawMantaClient.get_job_input(self, job_id)\n except errors.MantaAPIError as ex:\n if ex.res.status != 404:\n raise\n # Job was archived, try to retrieve the archived data.\n mpath = \"/%s/jobs/%s/in.txt\" % (self.account, job_id)\n content = self.get_object(mpath)\n keys = content.splitlines(False)\n return keys"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget the errors of a specific job.", "response": "def get_job_errors(self, job_id):\n \"\"\"GetJobErrors\n https://apidocs.joyent.com/manta/api.html#GetJobErrors\n with the added sugar that it will retrieve the archived job if it has\n been archived, per:\n https://apidocs.joyent.com/manta/jobs-reference.html#job-completion-and-archival\n \"\"\"\n try:\n return RawMantaClient.get_job_errors(self, job_id)\n except errors.MantaAPIError as ex:\n if ex.res.status != 404:\n raise\n # Job was archived, try to retrieve the archived data.\n mpath = \"/%s/jobs/%s/err.txt\" % (self.account, job_id)\n content = self.get_object(mpath)\n return self._job_errors_from_content(content)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef alias(*aliases):\n\n def decorate(f):\n if not hasattr(f, \"aliases\"):\n f.aliases = []\n f.aliases += aliases\n return f\n\n return decorate", "response": "Decorator to add aliases for Cmdln. do_* command handlers."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ngenerate the man page content for the given Cmdln instance.", "response": "def man_sections_from_cmdln(inst, summary=None, description=None, author=None):\n \"\"\"Return man page sections appropriate for the given Cmdln instance.\n Join these sections for man page content.\n\n The man page sections generated are:\n NAME\n SYNOPSIS\n DESCRIPTION (if `description` is given)\n OPTIONS\n COMMANDS\n HELP TOPICS (if any)\n\n @param inst {Cmdln} Instance of Cmdln subclass for which to generate\n man page content.\n @param summary {str} A one-liner summary of the command.\n @param description {str} A description of the command. If given,\n it will be used for a \"DESCRIPTION\" section.\n @param author {str} The author name and email for the AUTHOR secion\n of the man page.\n @raises {ValueError} if man page content cannot be generated for the\n given class.\n \"\"\"\n if not inst.__class__.name:\n raise ValueError(\"cannot generate man page content: `name` is not \"\n \"set on class %r\" % inst.__class__)\n data = {\n \"name\": inst.name,\n \"ucname\": inst.name.upper(),\n \"date\": datetime.date.today().strftime(\"%b %Y\"),\n \"cmdln_version\": __version__,\n \"version_str\": inst.version and \" %s\" % inst.version or \"\",\n \"summary_str\": summary and r\" \\- %s\" % summary or \"\",\n }\n\n sections = []\n sections.append(\n '.\\\\\" Automatically generated by cmdln %(cmdln_version)s\\n'\n '.TH %(ucname)s \"1\" \"%(date)s\" \"%(name)s%(version_str)s\" \"User Commands\"\\n'\n % data)\n sections.append(\".SH NAME\\n%(name)s%(summary_str)s\\n\" % data)\n sections.append(_dedent(r\"\"\"\n .SH SYNOPSIS\n .B %(name)s\n [\\fIGLOBALOPTS\\fR] \\fISUBCOMMAND \\fR[\\fIOPTS\\fR] [\\fIARGS\\fR...]\n .br\n .B %(name)s\n \\fIhelp SUBCOMMAND\\fR\n \"\"\") % data)\n if description:\n sections.append(\".SH DESCRIPTION\\n%s\\n\" % description)\n\n section = \".SH OPTIONS\\n\"\n if not hasattr(inst, \"optparser\") is None:\n #HACK: In case `.main()` hasn't been run.\n inst.optparser = inst.get_optparser()\n lines = inst._help_preprocess(\"${option_list}\", None).splitlines(False)\n for line in lines[1:]:\n line = line.lstrip()\n if not line:\n continue\n section += \".TP\\n\"\n opts, desc = line.split(' ', 1)\n section += \".B %s\\n\" % opts\n section += \"%s\\n\" % _dedent(desc.lstrip(), skip_first_line=True)\n sections.append(section)\n\n section = \".SH COMMANDS\\n\"\n cmds = inst._get_cmds_data()\n for cmdstr, doc in cmds:\n cmdname = cmdstr.split(' ')[0] # e.g. \"commit (ci)\" -> \"commit\"\n doc = inst._help_reindent(doc, indent=\"\")\n doc = inst._help_preprocess(doc, cmdname)\n doc = doc.rstrip() + \"\\n\" # trim down trailing space\n section += '.PP\\n.SS %s\\n%s\\n' % (cmdstr, doc)\n sections.append(section)\n\n help_names = inst._get_help_names()\n if help_names:\n section = \".SH HELP TOPICS\\n\"\n for help_name, help_meth in sorted(help_names.items()):\n help = help_meth(inst)\n help = inst._help_reindent(help, indent=\"\")\n section += '.PP\\n.SS %s\\n%s\\n' % (help_name, help)\n sections.append(section)\n\n if author:\n sections.append(\".SH AUTHOR\\n%s\\n\" % author)\n\n return sections"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nformat specific linedata into a pleasant layout.", "response": "def _format_linedata(linedata, indent, indent_width):\n \"\"\"Format specific linedata into a pleasant layout.\n\n \"linedata\" is a list of 2-tuples of the form:\n (<item-display-string>, <item-docstring>)\n \"indent\" is a string to use for one level of indentation\n \"indent_width\" is a number of columns by which the\n formatted data will be indented when printed.\n\n The <item-display-string> column is held to 30 columns.\n \"\"\"\n lines = []\n WIDTH = 78 - indent_width\n SPACING = 2\n NAME_WIDTH_LOWER_BOUND = 13\n NAME_WIDTH_UPPER_BOUND = 30\n NAME_WIDTH = max([len(s) for s, d in linedata])\n if NAME_WIDTH < NAME_WIDTH_LOWER_BOUND:\n NAME_WIDTH = NAME_WIDTH_LOWER_BOUND\n elif NAME_WIDTH > NAME_WIDTH_UPPER_BOUND:\n NAME_WIDTH = NAME_WIDTH_UPPER_BOUND\n\n DOC_WIDTH = WIDTH - NAME_WIDTH - SPACING\n for namestr, doc in linedata:\n line = indent + namestr\n if len(namestr) <= NAME_WIDTH:\n line += ' ' * (NAME_WIDTH + SPACING - len(namestr))\n else:\n lines.append(line)\n line = indent + ' ' * (NAME_WIDTH + SPACING)\n line += _summarize_doc(doc, DOC_WIDTH)\n lines.append(line.rstrip())\n return lines"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns the indentation of the specified marker from the given string.", "response": "def _get_indent(marker, s, tab_width=8):\n \"\"\"_get_indent(marker, s, tab_width=8) ->\n (<indentation-of-'marker'>, <indentation-width>)\"\"\"\n # Figure out how much the marker is indented.\n INDENT_CHARS = tuple(' \\t')\n start = s.index(marker)\n i = start\n while i > 0:\n if s[i - 1] not in INDENT_CHARS:\n break\n i -= 1\n indent = s[i:start]\n indent_width = 0\n for ch in indent:\n if ch == ' ':\n indent_width += 1\n elif ch == '\\t':\n indent_width += tab_width - (indent_width % tab_width)\n return indent, indent_width"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the whitespace content trailing the given marker in string s up to and including a newline.", "response": "def _get_trailing_whitespace(marker, s):\n \"\"\"Return the whitespace content trailing the given 'marker' in string 's',\n up to and including a newline.\n \"\"\"\n suffix = ''\n start = s.index(marker) + len(marker)\n i = start\n while i < len(s):\n if s[i] in ' \\t':\n suffix += s[i]\n elif s[i] in '\\r\\n':\n suffix += s[i]\n if s[i] == '\\r' and i + 1 < len(s) and s[i + 1] == '\\n':\n suffix += s[i + 1]\n break\n else:\n break\n i += 1\n return suffix"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nhooks for subclasses to set the option parser for the current top - level command.", "response": "def get_optparser(self):\n \"\"\"Hook for subclasses to set the option parser for the\n top-level command/shell.\n\n This option parser is used retrieved and used by `.main()' to\n handle top-level options.\n\n The default implements a single '-h|--help' option. Sub-classes\n can return None to have no options at the top-level. Typically\n an instance of CmdlnOptionParser should be returned.\n \"\"\"\n version = (self.version is not None and\n \"%s %s\" % (self._name_str, self.version) or None)\n return CmdlnOptionParser(self, version=version)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nrunning one command and exit.", "response": "def cmd(self, argv):\n \"\"\"Run one command and exit.\n\n \"argv\" is the arglist for the command to run. argv[0] is the\n command to run. If argv is an empty list then the\n 'emptyline' handler is run.\n\n Returns the return value from the command handler.\n \"\"\"\n assert isinstance(argv, (list, tuple)), \\\n \"'argv' is not a sequence: %r\" % argv\n retval = None\n try:\n argv = self.precmd(argv)\n retval = self.onecmd(argv)\n self.postcmd(argv)\n except:\n if not self.cmdexc(argv):\n raise\n retval = 1\n return retval"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef cmdexc(self, argv):\n import sys\n type, exc, traceback = sys.exc_info()\n if isinstance(exc, CmdlnUserError):\n msg = \"%s %s: %s\\nTry '%s help %s' for info.\\n\"\\\n % (self.name, argv[0], exc, self.name, argv[0])\n self.stderr.write(self._str(msg))\n self.stderr.flush()\n return True", "response": "Called by the command line interface to handle the exception raised by the command line."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef default(self, argv):\n errmsg = self._str(self.unknowncmd % (argv[0], ))\n if self.cmdlooping:\n self.stderr.write(errmsg + \"\\n\")\n else:\n self.stderr.write(\"%s: %s\\nTry '%s help' for info.\\n\" %\n (self._name_str, errmsg, self._name_str))\n self.stderr.flush()\n return 1", "response": "Default implementation for the unknown command."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef helpdefault(self, cmd, known):\n if known:\n msg = self._str(self.nohelp % (cmd, ))\n if self.cmdlooping:\n self.stderr.write(msg + '\\n')\n else:\n self.stderr.write(\"%s: %s\\n\" % (self.name, msg))\n else:\n msg = self.unknowncmd % (cmd, )\n if self.cmdlooping:\n self.stderr.write(msg + '\\n')\n else:\n self.stderr.write(\"%s: %s\\n\"\n \"Try '%s help' for info.\\n\" %\n (self.name, msg, self.name))\n self.stderr.flush()\n return 1", "response": "Hook called to handle help on a command for which there is no help handler."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef do_help(self, argv):\n if len(argv) > 1: # asking for help on a particular command\n doc = None\n cmdname = self._get_canonical_cmd_name(argv[1]) or argv[1]\n if not cmdname:\n return self.helpdefault(argv[1], False)\n else:\n helpfunc = getattr(self, \"help_\" + cmdname, None)\n if helpfunc:\n doc = helpfunc()\n else:\n handler = self._get_cmd_handler(cmdname)\n if handler:\n doc = handler.__doc__\n if doc is None:\n return self.helpdefault(argv[1], handler != None)\n else: # bare \"help\" command\n doc = self.__class__.__doc__ # try class docstring\n if doc is None:\n # Try to provide some reasonable useful default help.\n if self.cmdlooping: prefix = \"\"\n else: prefix = self.name + ' '\n doc = \"\"\"Usage:\n %sCOMMAND [ARGS...]\n %shelp [COMMAND]\n\n ${option_list}\n ${command_list}\n ${help_list}\n \"\"\" % (prefix, prefix)\n cmdname = None\n\n if doc: # *do* have help content, massage and print that\n doc = self._help_reindent(doc)\n doc = self._help_preprocess(doc, cmdname)\n doc = doc.rstrip() + '\\n' # trim down trailing space\n self.stdout.write(self._str(doc))\n self.stdout.flush()", "response": "gives detailed help on a specific sub - command"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nhook to re - indent the help string before writing to stdout.", "response": "def _help_reindent(self, help, indent=None):\n \"\"\"Hook to re-indent help strings before writing to stdout.\n\n \"help\" is the help content to re-indent\n \"indent\" is a string with which to indent each line of the\n help content after normalizing. If unspecified or None\n then the default is use: the 'self.helpindent' class\n attribute. By default this is the empty string, i.e.\n no indentation.\n\n By default, all common leading whitespace is removed and then\n the lot is indented by 'self.helpindent'. When calculating the\n common leading whitespace the first line is ignored -- hence\n help content for Conan can be written as follows and have the\n expected indentation:\n\n def do_crush(self, ...):\n '''${cmd_name}: crush your enemies, see them driven before you...\n\n c.f. Conan the Barbarian'''\n \"\"\"\n if indent is None:\n indent = self.helpindent\n lines = help.splitlines(0)\n _dedentlines(lines, skip_first_line=True)\n lines = [(indent + line).rstrip() for line in lines]\n return '\\n'.join(lines)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nhooking to preprocess a help string before writing to stdout.", "response": "def _help_preprocess(self, help, cmdname):\n \"\"\"Hook to preprocess a help string before writing to stdout.\n\n \"help\" is the help string to process.\n \"cmdname\" is the canonical sub-command name for which help\n is being given, or None if the help is not specific to a\n command.\n\n By default the following template variables are interpolated in\n help content. (Note: these are similar to Python 2.4's\n string.Template interpolation but not quite.)\n\n ${name}\n The tool's/shell's name, i.e. 'self.name'.\n ${option_list}\n A formatted table of options for this shell/tool.\n ${command_list}\n A formatted table of available sub-commands.\n ${help_list}\n A formatted table of additional help topics (i.e. 'help_*'\n methods with no matching 'do_*' method).\n ${cmd_name}\n The name (and aliases) for this sub-command formatted as:\n \"NAME (ALIAS1, ALIAS2, ...)\".\n ${cmd_usage}\n A formatted usage block inferred from the command function\n signature.\n ${cmd_option_list}\n A formatted table of options for this sub-command. (This is\n only available for commands using the optparse integration,\n i.e. using @cmdln.option decorators or manually setting the\n 'optparser' attribute on the 'do_*' method.)\n\n Returns the processed help.\n \"\"\"\n preprocessors = {\n \"${name}\": self._help_preprocess_name,\n \"${option_list}\": self._help_preprocess_option_list,\n \"${command_list}\": self._help_preprocess_command_list,\n \"${help_list}\": self._help_preprocess_help_list,\n \"${cmd_name}\": self._help_preprocess_cmd_name,\n \"${cmd_usage}\": self._help_preprocess_cmd_usage,\n \"${cmd_option_list}\": self._help_preprocess_cmd_option_list,\n }\n\n for marker, preprocessor in preprocessors.items():\n if marker in help:\n help = preprocessor(help, cmdname)\n return help"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _get_help_names(self):\n # Determine the additional help topics, if any.\n help_names = {}\n token2cmdname = self._get_canonical_map()\n for attrname, attr in self._gen_names_and_attrs():\n if not attrname.startswith(\"help_\"): continue\n help_name = attrname[5:]\n if help_name not in token2cmdname:\n help_names[help_name] = attr\n return help_names", "response": "Return a mapping of help topic name to help_* method."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _get_canonical_map(self):\n cacheattr = \"_token2canonical\"\n if not hasattr(self, cacheattr):\n # Get the list of commands and their aliases, if any.\n token2canonical = {}\n cmd2funcname = {} # use a dict to strip duplicates\n for attr in self.get_names():\n if attr.startswith(\"do_\"): cmdname = attr[3:]\n elif attr.startswith(\"_do_\"): cmdname = attr[4:]\n else:\n continue\n cmd2funcname[cmdname] = attr\n token2canonical[cmdname] = cmdname\n for cmdname, funcname in cmd2funcname.items(): # add aliases\n func = getattr(self, funcname)\n aliases = getattr(func, \"aliases\", [])\n for alias in aliases:\n if alias in cmd2funcname:\n import warnings\n warnings.warn(\"'%s' alias for '%s' command conflicts \"\n \"with '%s' handler\" %\n (alias, cmdname, cmd2funcname[alias]))\n continue\n token2canonical[alias] = cmdname\n setattr(self, cacheattr, token2canonical)\n return getattr(self, cacheattr)", "response": "Return a mapping of available command names and aliases to their canonical command name."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef set_cmdln_info(self, cmdln, subcmd):\n self.cmdln = cmdln\n self.subcmd = subcmd", "response": "Called by Cmdln to pass relevant info about itself needed\n "} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nwindowing allow application paths to be registered in the registry.", "response": "def _getRegisteredExecutable(exeName):\n \"\"\"Windows allow application paths to be registered in the registry.\"\"\"\n registered = None\n if sys.platform.startswith('win'):\n if os.path.splitext(exeName)[1].lower() != '.exe':\n exeName += '.exe'\n import _winreg\n try:\n key = \"SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\App Paths\\\\\" +\\\n exeName\n value = _winreg.QueryValue(_winreg.HKEY_LOCAL_MACHINE, key)\n registered = (value, \"from HKLM\\\\\"+key)\n except _winreg.error:\n pass\n if registered and not os.path.exists(registered[0]):\n registered = None\n return registered"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef whichgen(command, path=None, verbose=0, exts=None):\n matches = []\n if path is None:\n usingGivenPath = 0\n path = os.environ.get(\"PATH\", \"\").split(os.pathsep)\n if sys.platform.startswith(\"win\"):\n path.insert(0, os.curdir) # implied by Windows shell\n else:\n usingGivenPath = 1\n\n # Windows has the concept of a list of extensions (PATHEXT env var).\n if sys.platform.startswith(\"win\"):\n if exts is None:\n exts = os.environ.get(\"PATHEXT\", \"\").split(os.pathsep)\n # If '.exe' is not in exts then obviously this is Win9x and\n # or a bogus PATHEXT, then use a reasonable default.\n for ext in exts:\n if ext.lower() == \".exe\":\n break\n else:\n exts = ['.COM', '.EXE', '.BAT']\n elif not isinstance(exts, list):\n raise TypeError(\"'exts' argument must be a list or None\")\n else:\n if exts is not None:\n raise WhichError(\"'exts' argument is not supported on \"\\\n \"platform '%s'\" % sys.platform)\n exts = []\n\n # File name cannot have path separators because PATH lookup does not\n # work that way.\n if os.sep in command or os.altsep and os.altsep in command:\n if os.path.exists(command):\n match = _cull((command, \"explicit path given\"), matches, verbose)\n if verbose:\n yield match\n else:\n yield match[0]\n else:\n for i in range(len(path)):\n dirName = path[i]\n # On windows the dirName *could* be quoted, drop the quotes\n if sys.platform.startswith(\"win\") and len(dirName) >= 2\\\n and dirName[0] == '\"' and dirName[-1] == '\"':\n dirName = dirName[1:-1]\n for ext in ['']+exts:\n absName = os.path.abspath(\n os.path.normpath(os.path.join(dirName, command+ext)))\n if os.path.isfile(absName):\n if usingGivenPath:\n fromWhere = \"from given path element %d\" % i\n elif not sys.platform.startswith(\"win\"):\n fromWhere = \"from PATH element %d\" % i\n elif i == 0:\n fromWhere = \"from current directory\"\n else:\n fromWhere = \"from PATH element %d\" % (i-1)\n match = _cull((absName, fromWhere), matches, verbose)\n if match:\n if verbose:\n yield match\n else:\n yield match[0]\n match = _getRegisteredExecutable(command)\n if match is not None:\n match = _cull(match, matches, verbose)\n if match:\n if verbose:\n yield match\n else:\n yield match[0]", "response": "Return a generator of full paths to the given command."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef whichall(command, path=None, verbose=0, exts=None):\n return list( whichgen(command, path, verbose, exts) )", "response": "Return a list of full paths to all matches of the given command on the path."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting the python - manta version without having to import the manta package.", "response": "def get_version():\n \"\"\"Get the python-manta version without having to import the manta package,\n which requires deps to already be installed.\n \"\"\"\n _globals = {}\n _locals = {}\n exec(\n compile(\n open(TOP + \"/manta/version.py\").read(), TOP + \"/manta/version.py\",\n 'exec'), _globals, _locals)\n return _locals[\"__version__\"]"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncalculate the fingerprint of SSH public key data.", "response": "def fingerprint_from_ssh_pub_key(data):\n \"\"\"Calculate the fingerprint of SSH public key data.\n\n >>> data = \"ssh-rsa AAAAB3NzaC1y...4IEAA1Z4wIWCuk8F9Tzw== my key comment\"\n >>> fingerprint_from_ssh_pub_key(data)\n '54:c7:4c:93:cf:ff:e3:32:68:bc:89:6e:5e:22:b5:9c'\n\n Adapted from <http://stackoverflow.com/questions/6682815/>\n and imgapi.js#fingerprintFromSshpubkey.\n \"\"\"\n data = data.strip()\n\n # Let's accept either:\n # - just the base64 encoded data part, e.g.\n # 'AAAAB3NzaC1yc2EAAAABIwAA...2l24uq9Lfw=='\n # - the full ssh pub key file content, e.g.:\n # 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAA...2l24uq9Lfw== my comment'\n if (re.search(r'^ssh-(?:rsa|dss) ', data) or\n re.search(r'^ecdsa-sha2-nistp(?:[0-9]+)', data)):\n data = data.split(None, 2)[1]\n\n key = base64.b64decode(data)\n fp_plain = hashlib.md5(key).hexdigest()\n return ':'.join(a + b for a, b in zip(fp_plain[::2], fp_plain[1::2]))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef fingerprint_from_raw_ssh_pub_key(key):\n fp_plain = hashlib.md5(key).hexdigest()\n return ':'.join(a + b for a, b in zip(fp_plain[::2], fp_plain[1::2]))", "response": "Encode a raw SSH public key as from\n str to a fingerprint in the typical\n 54. c7. 4c93. cf. e3. 32. b5. 9c form."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nencode a raw SSH key as from str to a SHA256 fingerprint", "response": "def sha256_fingerprint_from_raw_ssh_pub_key(raw_key):\n \"\"\"Encode a raw SSH key (string of bytes, as from\n `str(paramiko.AgentKey)`) to a fingerprint in the SHA256 form:\n SHA256:j2WoSeOWhFy69BQ39fuafFAySp9qCZTSCEyT2vRKcL+s\n \"\"\"\n digest = hashlib.sha256(raw_key).digest()\n h = base64.b64encode(digest).decode('utf-8')\n h = h.rstrip().rstrip('=') # drop newline and possible base64 padding\n return 'SHA256:' + h"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nload a local ssh private key.", "response": "def load_ssh_key(key_id, skip_priv_key=False):\n \"\"\"\n Load a local ssh private key (in PEM format). PEM format is the OpenSSH\n default format for private keys.\n\n See similar code in imgapi.js#loadSSHKey.\n\n @param key_id {str} An ssh public key fingerprint or ssh private key path.\n @param skip_priv_key {boolean} Optional. Default false. If true, then this\n will skip loading the private key file and `priv_key` will be `None`\n in the retval.\n @returns {dict} with these keys:\n - pub_key_path\n - fingerprint\n - priv_key_path\n - priv_key\n - algorithm\n \"\"\"\n priv_key = None\n\n # If `key_id` is already a private key path, then easy.\n if not FINGERPRINT_RE.match(key_id):\n if not skip_priv_key:\n f = io.open(key_id, 'rb')\n try:\n priv_key = f.read()\n finally:\n f.close()\n pub_key_path = key_id + '.pub'\n f = io.open(pub_key_path, 'r')\n try:\n pub_key = f.read()\n finally:\n f.close()\n fingerprint = fingerprint_from_ssh_pub_key(pub_key)\n\n # XXX: pubkey should NOT be in PEM format.\n try:\n algo = ALGO_FROM_SSH_KEY_TYPE[pub_key.split()[0]]\n except KeyError:\n raise MantaError(\"Unsupported key type for: {}\".format(key_id))\n\n return dict(\n pub_key_path=pub_key_path,\n fingerprint=fingerprint,\n priv_key_path=key_id,\n priv_key=priv_key,\n algorithm=algo)\n\n # Else, look at all pub/priv keys in \"~/.ssh\" for a matching fingerprint.\n fingerprint = key_id\n\n pub_key_glob = expanduser('~/.ssh/*.pub')\n pub_key = None\n for pub_key_path in glob(pub_key_glob):\n try:\n f = io.open(pub_key_path, 'r')\n except IOError:\n # This can happen if the .pub file is a broken symlink.\n log.debug(\"could not open '%s', skip it\", pub_key_path)\n continue\n try:\n pub_key = f.read()\n finally:\n f.close()\n\n # The MD5 fingerprint functions return the hexdigest without the hash\n # algorithm prefix (\"MD5:\"), and the SHA256 functions return the\n # fingerprint with the prefix (\"SHA256:\"). Ideally we'd want to\n # normalize these, but more importantly we don't want to break backwards\n # compatibility for either the SHA or MD5 users.\n md5_fp = fingerprint_from_ssh_pub_key(pub_key)\n sha256_fp = sha256_fingerprint_from_ssh_pub_key(pub_key)\n if (sha256_fp == fingerprint or\n md5_fp == fingerprint or\n \"MD5:\" + md5_fp == fingerprint):\n\n # if the user has given us sha256 fingerprint, canonicalize\n # it to the md5 fingerprint\n fingerprint = md5_fp\n break\n else:\n raise MantaError(\n \"no '~/.ssh/*.pub' key found with fingerprint '%s'\"\n % fingerprint)\n\n # XXX: pubkey should NOT be in PEM format.\n try:\n algo = ALGO_FROM_SSH_KEY_TYPE[pub_key.split()[0]]\n except KeyError:\n raise MantaError(\"Unsupported key type for: {}\".format(key_id))\n\n priv_key_path = os.path.splitext(pub_key_path)[0]\n if not skip_priv_key:\n f = io.open(priv_key_path, 'rb')\n try:\n priv_key = f.read()\n finally:\n f.close()\n return dict(\n pub_key_path=pub_key_path,\n fingerprint=fingerprint,\n priv_key_path=priv_key_path,\n priv_key=priv_key,\n algorithm=algo)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nloads the SSH key info necessary for signing.", "response": "def ssh_key_info_from_key_data(key_id, priv_key=None):\n \"\"\"Get/load SSH key info necessary for signing.\n\n @param key_id {str} Either a private ssh key fingerprint, e.g.\n 'b3:f0:a1:6c:18:3b:42:63:fd:6e:57:42:74:17:d4:bc', or the path to\n an ssh private key file (like ssh's IdentityFile config option).\n @param priv_key {str} Optional. SSH private key file data (PEM format).\n @return {dict} with these keys:\n - type: \"agent\"\n - signer: Crypto signer class (a PKCS#1 v1.5 signer for RSA keys)\n - fingerprint: key md5 fingerprint\n - algorithm: See ALGO_FROM_SSH_KEY_TYPE for supported list.\n - ... some others added by `load_ssh_key()`\n \"\"\"\n if FINGERPRINT_RE.match(key_id) and priv_key:\n key_info = {\"fingerprint\": key_id, \"priv_key\": priv_key}\n else:\n # Otherwise, we attempt to load necessary details from ~/.ssh.\n key_info = load_ssh_key(key_id)\n\n # Load a key signer.\n key = None\n try:\n key = serialization.load_pem_private_key(\n key_info[\"priv_key\"],\n password=None,\n backend=default_backend())\n except TypeError as ex:\n log.debug(\"could not import key without passphrase (will \"\n \"try with passphrase): %s\", ex)\n if \"priv_key_path\" in key_info:\n prompt = \"Passphrase [%s]: \" % key_info[\"priv_key_path\"]\n else:\n prompt = \"Passphrase: \"\n for i in range(3):\n passphrase = getpass(prompt)\n if not passphrase:\n break\n try:\n key = serialization.load_pem_private_key(\n key_info[\"priv_key\"],\n password=passphrase,\n backend=default_backend())\n except ValueError:\n continue\n else:\n break\n if not key:\n details = \"\"\n if \"priv_key_path\" in key_info:\n details = \" (%s)\" % key_info[\"priv_key_path\"]\n raise MantaError(\"could not import key\" + details)\n\n # If load_ssh_key() wasn't run, set the algorithm here.\n if 'algorithm' not in key_info:\n if isinstance(key, ec.EllipticCurvePrivateKey):\n key_info['algorithm'] = ECDSA_ALGO_FROM_KEY_SIZE[str(key.key_size)]\n elif isinstance(key, rsa.RSAPrivateKey):\n key_info['algorithm'] = RSA_STR\n else:\n raise MantaError(\"Unsupported key type for: {}\".format(key_id))\n\n key_info[\"signer\"] = key\n key_info[\"type\"] = \"ssh_key\"\n return key_info"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns the agent key info from the given key id.", "response": "def agent_key_info_from_key_id(key_id):\n \"\"\"Find a matching key in the ssh-agent.\n\n @param key_id {str} Either a private ssh key fingerprint, e.g.\n 'b3:f0:a1:6c:18:3b:42:63:fd:6e:57:42:74:17:d4:bc', or the path to\n an ssh private key file (like ssh's IdentityFile config option).\n @return {dict} with these keys:\n - type: \"agent\"\n - agent_key: paramiko AgentKey\n - fingerprint: key fingerprint\n - algorithm: \"rsa-sha1\" Currently don't support DSA agent signing.\n \"\"\"\n # Need the fingerprint of the key we're using for signing. If it\n # is a path to a priv key, then we need to load it.\n if not FINGERPRINT_RE.match(key_id):\n ssh_key = load_ssh_key(key_id, True)\n fingerprint = ssh_key[\"fingerprint\"]\n else:\n fingerprint = key_id\n\n # Look for a matching fingerprint in the ssh-agent keys.\n keys = Agent().get_keys()\n\n for key in keys:\n raw_key = key.blob\n\n\n # The MD5 fingerprint functions return the hexdigest without the hash\n # algorithm prefix (\"MD5:\"), and the SHA256 functions return the\n # fingerprint with the prefix (\"SHA256:\"). Ideally we'd want to\n # normalize these, but more importantly we don't want to break backwards\n # compatibility for either the SHA or MD5 users.\n md5_fp = fingerprint_from_raw_ssh_pub_key(raw_key)\n sha_fp = sha256_fingerprint_from_raw_ssh_pub_key(raw_key)\n if (sha_fp == fingerprint or\n md5_fp == fingerprint or\n \"MD5:\" + md5_fp == fingerprint):\n\n # Canonicalize it to the md5 fingerprint.\n md5_fingerprint = md5_fp\n break\n else:\n raise MantaError('no ssh-agent key with fingerprint \"%s\"' %\n fingerprint)\n\n return {\n \"type\": \"agent\",\n \"agent_key\": key,\n \"fingerprint\": md5_fingerprint,\n \"algorithm\": ALGO_FROM_SSH_KEY_TYPE[key.name]\n }"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngetting key info appropriate for signing.", "response": "def _get_key_info(self):\n \"\"\"Get key info appropriate for signing.\"\"\"\n if self._key_info_cache is None:\n self._key_info_cache = ssh_key_info_from_key_data(self.key_id,\n self.priv_key)\n return self._key_info_cache"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _get_key_info(self):\n if self._key_info_cache is None:\n self._key_info_cache = agent_key_info_from_key_id(self.key_id)\n return self._key_info_cache", "response": "Get key info appropriate for signing."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting the key info appropriate for signing.", "response": "def _get_key_info(self):\n \"\"\"Get key info appropriate for signing: either from the ssh agent\n or from a private key.\n \"\"\"\n if self._key_info_cache is not None:\n return self._key_info_cache\n\n errors = []\n\n # First try the agent.\n try:\n key_info = agent_key_info_from_key_id(self.key_id)\n except MantaError:\n _, ex, _ = sys.exc_info()\n errors.append(ex)\n else:\n self._key_info_cache = key_info\n return self._key_info_cache\n\n # Try loading from \"~/.ssh/*\".\n try:\n key_info = ssh_key_info_from_key_data(self.key_id)\n except MantaError:\n _, ex, _ = sys.exc_info()\n errors.append(ex)\n else:\n self._key_info_cache = key_info\n return self._key_info_cache\n\n raise MantaError(\"could not find key info for signing: %s\" %\n \"; \".join(map(str, errors)))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create_channel(\n target: str,\n options: Optional[List[Tuple[str, Any]]] = None,\n interceptors: Optional[List[ClientInterceptor]] = None,\n) -> grpc.Channel:\n \"\"\"Creates a gRPC channel\n\n The gRPC channel is created with the provided options and intercepts each\n invocation via the provided interceptors.\n\n The created channel is configured with the following default options:\n - \"grpc.max_send_message_length\": 100MB,\n - \"grpc.max_receive_message_length\": 100MB.\n\n :param target: the server address.\n :param options: optional list of key-value pairs to configure the channel.\n :param interceptors: optional list of client interceptors.\n :returns: a gRPC channel.\n\n \"\"\"\n # The list of possible options is available here:\n # https://grpc.io/grpc/core/group__grpc__arg__keys.html\n options = (options or []) + [\n (\"grpc.max_send_message_length\", grpc_max_msg_size),\n (\"grpc.max_receive_message_length\", grpc_max_msg_size),\n ]\n interceptors = interceptors or []\n channel = grpc.insecure_channel(target, options)\n return grpc.intercept_channel(channel, *interceptors)", "response": "Creates a gRPC channel with the given options and intercepts each\n invocation."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create_server(\n max_workers: int,\n options: Optional[List[Tuple[str, Any]]] = None,\n interceptors: Optional[List[grpc.ServerInterceptor]] = None,\n) -> grpc.Server:\n \"\"\"Creates a gRPC server\n\n The gRPC server is created with the provided options and intercepts each\n incoming RPCs via the provided interceptors.\n\n The created server is configured with the following default options:\n - \"grpc.max_send_message_length\": 100MB,\n - \"grpc.max_receive_message_length\": 100MB.\n\n :param max_workers: the maximum number of workers to use in the underlying\n futures.ThreadPoolExecutor to be used by the Server to execute RPC\n handlers.\n :param options: optional list of key-value pairs to configure the channel.\n :param interceptors: optional list of server interceptors.\n :returns: a gRPC server.\n\n \"\"\"\n # The list of possible options is available here:\n # https://grpc.io/grpc/core/group__grpc__arg__keys.html\n options = (options or []) + [\n (\"grpc.max_send_message_length\", grpc_max_msg_size),\n (\"grpc.max_receive_message_length\", grpc_max_msg_size),\n ]\n interceptors = [base.ServerInterceptorWrapper(i)\n for i in (interceptors or [])]\n server = grpc.server(ThreadPoolExecutor(max_workers=max_workers),\n options=options, interceptors=interceptors)\n\n for i in interceptors:\n i.bind(server)\n\n return server", "response": "Creates a new server for the given base - level cache."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef to_grpc_address(target: str) -> str:\n u = urlparse(target)\n if u.scheme == \"dns\":\n raise ValueError(\"dns:// not supported\")\n\n if u.scheme == \"unix\":\n return \"unix:\"+u.path\n\n return u.netloc", "response": "Converts a standard gRPC target to one that is supported by grpcio\n ."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef one_storage_per_feeder(edisgo, storage_timeseries,\n storage_nominal_power=None, **kwargs):\n \"\"\"\n Allocates the given storage capacity to multiple smaller storages.\n\n For each feeder with load or voltage issues it is checked if integrating a\n storage will reduce peaks in the feeder, starting with the feeder with\n the highest theoretical grid expansion costs. A heuristic approach is used\n to estimate storage sizing and siting while storage operation is carried\n over from the given storage operation.\n\n Parameters\n -----------\n edisgo : :class:`~.grid.network.EDisGo`\n storage_timeseries : :pandas:`pandas.DataFrame<dataframe>`\n Total active and reactive power time series that will be allocated to\n the smaller storages in feeders with load or voltage issues. Columns of\n the dataframe are 'p' containing active power time series in kW and 'q'\n containing the reactive power time series in kvar. Index is a\n :pandas:`pandas.DatetimeIndex<datetimeindex>`.\n storage_nominal_power : :obj:`float` or None\n Nominal power in kW that will be allocated to the smaller storages in\n feeders with load or voltage issues. If no nominal power is provided\n the maximum active power given in `storage_timeseries` is used.\n Default: None.\n debug : :obj:`Boolean`, optional\n If dedug is True a dataframe with storage size and path to storage of\n all installed and possibly discarded storages is saved to a csv file\n and a plot with all storage positions is created and saved, both to the\n current working directory with filename `storage_results_{MVgrid_id}`.\n Default: False.\n check_costs_reduction : :obj:`Boolean` or :obj:`str`, optional\n This parameter specifies when and whether it should be checked if a\n storage reduced grid expansion costs or not. It can be used as a safety\n check but can be quite time consuming. Possible options are:\n\n * 'each_feeder'\n Costs reduction is checked for each feeder. If the storage did not\n reduce grid expansion costs it is discarded.\n * 'once'\n Costs reduction is checked after the total storage capacity is\n allocated to the feeders. If the storages did not reduce grid\n expansion costs they are all discarded.\n * False\n Costs reduction is never checked.\n\n Default: False.\n\n \"\"\"\n\n def _feeder_ranking(grid_expansion_costs):\n \"\"\"\n Get feeder ranking from grid expansion costs DataFrame.\n\n MV feeders are ranked descending by grid expansion costs that are\n attributed to that feeder.\n\n Parameters\n ----------\n grid_expansion_costs : :pandas:`pandas.DataFrame<dataframe>`\n grid_expansion_costs DataFrame from :class:`~.grid.network.Results`\n of the copied edisgo object.\n\n Returns\n -------\n :pandas:`pandas.Series<series>`\n Series with ranked MV feeders (in the copied graph) of type\n :class:`~.grid.components.Line`. Feeders are ranked by total grid\n expansion costs of all measures conducted in the feeder. The\n feeder with the highest costs is in the first row and the feeder\n with the lowest costs in the last row.\n\n \"\"\"\n return grid_expansion_costs.groupby(\n ['mv_feeder'], sort=False).sum().reset_index().sort_values(\n by=['total_costs'], ascending=False)['mv_feeder']\n\n def _shortest_path(node):\n if isinstance(node, LVStation):\n return len(nx.shortest_path(\n node.mv_grid.graph, node.mv_grid.station, node))\n else:\n return len(nx.shortest_path(\n node.grid.graph, node.grid.station, node))\n\n def _find_battery_node(edisgo, critical_lines_feeder,\n critical_nodes_feeder):\n \"\"\"\n Evaluates where to install the storage.\n\n Parameters\n -----------\n edisgo : :class:`~.grid.network.EDisGo`\n The original edisgo object.\n critical_lines_feeder : :pandas:`pandas.DataFrame<dataframe>`\n Dataframe containing over-loaded lines in MV feeder, their maximum\n relative over-loading and the corresponding time step. See\n :func:`edisgo.flex_opt.check_tech_constraints.mv_line_load` for\n more information.\n critical_nodes_feeder : :obj:`list`\n List with all nodes in MV feeder with voltage issues.\n\n Returns\n -------\n :obj:`float`\n Node where storage is installed.\n\n \"\"\"\n\n # if there are overloaded lines in the MV feeder the battery storage\n # will be installed at the node farthest away from the MV station\n if not critical_lines_feeder.empty:\n logger.debug(\"Storage positioning due to overload.\")\n # dictionary with nodes and their corresponding path length to\n # MV station\n path_length_dict = {}\n for l in critical_lines_feeder.index:\n nodes = l.grid.graph.nodes_from_line(l)\n for node in nodes:\n path_length_dict[node] = _shortest_path(node)\n # return node farthest away\n return [_ for _ in path_length_dict.keys()\n if path_length_dict[_] == max(\n path_length_dict.values())][0]\n\n # if there are voltage issues in the MV grid the battery storage will\n # be installed at the first node in path that exceeds 2/3 of the line\n # length from station to critical node with highest voltage deviation\n if critical_nodes_feeder:\n logger.debug(\"Storage positioning due to voltage issues.\")\n node = critical_nodes_feeder[0]\n\n # get path length from station to critical node\n get_weight = lambda u, v, data: data['line'].length\n path_length = dijkstra_shortest_path_length(\n edisgo.network.mv_grid.graph,\n edisgo.network.mv_grid.station,\n get_weight, target=node)\n\n # find first node in path that exceeds 2/3 of the line length\n # from station to critical node farthest away from the station\n path = nx.shortest_path(edisgo.network.mv_grid.graph,\n edisgo.network.mv_grid.station,\n node)\n return next(j for j in path\n if path_length[j] >= path_length[node] * 2 / 3)\n\n return None\n\n def _calc_storage_size(edisgo, feeder, max_storage_size):\n \"\"\"\n Calculates storage size that reduces residual load.\n\n Parameters\n -----------\n edisgo : :class:`~.grid.network.EDisGo`\n The original edisgo object.\n feeder : :class:`~.grid.components.Line`\n MV feeder the storage will be connected to. The line object is an\n object from the copied graph.\n\n Returns\n -------\n :obj:`float`\n Storage size that reduced the residual load in the feeder.\n\n \"\"\"\n step_size = 200\n sizes = [0] + list(np.arange(\n p_storage_min, max_storage_size + 0.5 * step_size, step_size))\n p_feeder = edisgo.network.results.pfa_p.loc[:, repr(feeder)]\n q_feeder = edisgo.network.results.pfa_q.loc[:, repr(feeder)]\n p_slack = edisgo.network.pypsa.generators_t.p.loc[\n :, 'Generator_slack'] * 1e3\n\n # get sign of p and q\n l = edisgo.network.pypsa.lines.loc[repr(feeder), :]\n mv_station_bus = 'bus0' if l.loc['bus0'] == 'Bus_'.format(\n repr(edisgo.network.mv_grid.station)) else 'bus1'\n if mv_station_bus == 'bus0':\n diff = edisgo.network.pypsa.lines_t.p1.loc[:, repr(feeder)] - \\\n edisgo.network.pypsa.lines_t.p0.loc[:, repr(feeder)]\n diff_q = edisgo.network.pypsa.lines_t.q1.loc[:, repr(feeder)] - \\\n edisgo.network.pypsa.lines_t.q0.loc[:, repr(feeder)]\n else:\n diff = edisgo.network.pypsa.lines_t.p0.loc[:, repr(feeder)] - \\\n edisgo.network.pypsa.lines_t.p1.loc[:, repr(feeder)]\n diff_q = edisgo.network.pypsa.lines_t.q0.loc[:, repr(feeder)] - \\\n edisgo.network.pypsa.lines_t.q1.loc[:, repr(feeder)]\n p_sign = pd.Series([-1 if _ < 0 else 1 for _ in diff],\n index=p_feeder.index)\n q_sign = pd.Series([-1 if _ < 0 else 1 for _ in diff_q],\n index=p_feeder.index)\n\n # get allowed load factors per case\n lf = {'feedin_case': edisgo.network.config[\n 'grid_expansion_load_factors']['mv_feedin_case_line'],\n 'load_case': network.config[\n 'grid_expansion_load_factors']['mv_load_case_line']}\n\n # calculate maximum apparent power for each storage size to find\n # storage size that minimizes apparent power in the feeder\n p_feeder = p_feeder.multiply(p_sign)\n q_feeder = q_feeder.multiply(q_sign)\n s_max = []\n for size in sizes:\n share = size / storage_nominal_power\n p_storage = storage_timeseries.p * share\n q_storage = storage_timeseries.q * share\n p_total = p_feeder + p_storage\n q_total = q_feeder + q_storage\n p_hv_mv_station = p_slack - p_storage\n lf_ts = p_hv_mv_station.apply(\n lambda _: lf['feedin_case'] if _ < 0 else lf['load_case'])\n s_max_ts = (p_total ** 2 + q_total ** 2).apply(\n sqrt).divide(lf_ts)\n s_max.append(max(s_max_ts))\n\n return sizes[pd.Series(s_max).idxmin()]\n\n def _critical_nodes_feeder(edisgo, feeder):\n \"\"\"\n Returns all nodes in MV feeder with voltage issues.\n\n Parameters\n -----------\n edisgo : :class:`~.grid.network.EDisGo`\n The original edisgo object.\n feeder : :class:`~.grid.components.Line`\n MV feeder the storage will be connected to. The line object is an\n object from the copied graph.\n\n Returns\n -------\n :obj:`list`\n List with all nodes in MV feeder with voltage issues.\n\n \"\"\"\n # get all nodes with voltage issues in MV grid\n critical_nodes = check_tech_constraints.mv_voltage_deviation(\n edisgo.network, voltage_levels='mv')\n if critical_nodes:\n critical_nodes = critical_nodes[edisgo.network.mv_grid]\n else:\n return []\n # filter nodes with voltage issues in feeder\n critical_nodes_feeder = []\n for n in critical_nodes.index:\n if repr(n.mv_feeder) == repr(feeder):\n critical_nodes_feeder.append(n)\n return critical_nodes_feeder\n\n def _critical_lines_feeder(edisgo, feeder):\n \"\"\"\n Returns all lines in MV feeder with overload issues.\n\n Parameters\n -----------\n edisgo : :class:`~.grid.network.EDisGo`\n The original edisgo object.\n feeder : :class:`~.grid.components.Line`\n MV feeder the storage will be connected to. The line object is an\n object from the copied graph.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n Dataframe containing over-loaded lines in MV feeder, their maximum\n relative over-loading and the corresponding time step. See\n :func:`edisgo.flex_opt.check_tech_constraints.mv_line_load` for\n more information.\n\n \"\"\"\n # get all overloaded MV lines\n critical_lines = check_tech_constraints.mv_line_load(edisgo.network)\n # filter overloaded lines in feeder\n critical_lines_feeder = []\n for l in critical_lines.index:\n if repr(tools.get_mv_feeder_from_line(l)) == repr(feeder):\n critical_lines_feeder.append(l)\n return critical_lines.loc[critical_lines_feeder, :]\n\n def _estimate_new_number_of_lines(critical_lines_feeder):\n number_parallel_lines = 0\n for crit_line in critical_lines_feeder.index:\n number_parallel_lines += ceil(critical_lines_feeder.loc[\n crit_line, 'max_rel_overload'] * crit_line.quantity) - \\\n crit_line.quantity\n return number_parallel_lines\n\n debug = kwargs.get('debug', False)\n check_costs_reduction = kwargs.get('check_costs_reduction', False)\n\n # global variables\n # minimum and maximum storage power to be connected to the MV grid\n p_storage_min = 300\n p_storage_max = 4500\n\n # remaining storage nominal power\n if storage_nominal_power is None:\n storage_nominal_power = max(abs(storage_timeseries.p))\n p_storage_remaining = storage_nominal_power\n\n if debug:\n feeder_repr = []\n storage_path = []\n storage_repr = []\n storage_size = []\n\n # rank MV feeders by grid expansion costs\n\n # conduct grid reinforcement on copied edisgo object on worst-case time\n # steps\n grid_expansion_results_init = edisgo.reinforce(\n copy_graph=True, timesteps_pfa='snapshot_analysis')\n\n # only analyse storage integration if there were any grid expansion needs\n if grid_expansion_results_init.equipment_changes.empty:\n logger.debug('No storage integration necessary since there are no '\n 'grid expansion needs.')\n return\n else:\n equipment_changes_reinforcement_init = \\\n grid_expansion_results_init.equipment_changes.loc[\n grid_expansion_results_init.equipment_changes.iteration_step >\n 0]\n total_grid_expansion_costs = \\\n grid_expansion_results_init.grid_expansion_costs.total_costs.sum()\n if equipment_changes_reinforcement_init.empty:\n logger.debug('No storage integration necessary since there are no '\n 'grid expansion needs.')\n return\n else:\n network = equipment_changes_reinforcement_init.index[\n 0].grid.network\n\n # calculate grid expansion costs without costs for new generators\n # to be used in feeder ranking\n grid_expansion_costs_feeder_ranking = costs.grid_expansion_costs(\n network, without_generator_import=True)\n\n ranked_feeders = _feeder_ranking(grid_expansion_costs_feeder_ranking)\n\n count = 1\n storage_obj_list = []\n total_grid_expansion_costs_new = 'not calculated'\n for feeder in ranked_feeders.values:\n logger.debug('Feeder: {}'.format(count))\n count += 1\n\n # first step: find node where storage will be installed\n\n critical_nodes_feeder = _critical_nodes_feeder(edisgo, feeder)\n critical_lines_feeder = _critical_lines_feeder(edisgo, feeder)\n\n # get node the storage will be connected to (in original graph)\n battery_node = _find_battery_node(edisgo, critical_lines_feeder,\n critical_nodes_feeder)\n\n if battery_node:\n\n # add to output lists\n if debug:\n feeder_repr.append(repr(feeder))\n storage_path.append(nx.shortest_path(\n edisgo.network.mv_grid.graph,\n edisgo.network.mv_grid.station,\n battery_node))\n\n # second step: calculate storage size\n\n max_storage_size = min(p_storage_remaining, p_storage_max)\n p_storage = _calc_storage_size(edisgo, feeder, max_storage_size)\n\n # if p_storage is greater than or equal to the minimum storage\n # power required, do storage integration\n if p_storage >= p_storage_min:\n\n # third step: integrate storage\n\n share = p_storage / storage_nominal_power\n edisgo.integrate_storage(\n timeseries=storage_timeseries.p * share,\n position=battery_node,\n voltage_level='mv',\n timeseries_reactive_power=storage_timeseries.q * share)\n tools.assign_mv_feeder_to_nodes(edisgo.network.mv_grid)\n\n # get new storage object\n storage_obj = [_\n for _ in\n edisgo.network.mv_grid.graph.nodes_by_attribute(\n 'storage') if _ in\n edisgo.network.mv_grid.graph.neighbors(\n battery_node)][0]\n storage_obj_list.append(storage_obj)\n\n logger.debug(\n 'Storage with nominal power of {} kW connected to '\n 'node {} (path to HV/MV station {}).'.format(\n p_storage, battery_node, nx.shortest_path(\n battery_node.grid.graph, battery_node.grid.station,\n battery_node)))\n\n # fourth step: check if storage integration reduced grid\n # reinforcement costs or number of issues\n\n if check_costs_reduction == 'each_feeder':\n\n # calculate new grid expansion costs\n\n grid_expansion_results_new = edisgo.reinforce(\n copy_graph=True, timesteps_pfa='snapshot_analysis')\n\n total_grid_expansion_costs_new = \\\n grid_expansion_results_new.grid_expansion_costs.\\\n total_costs.sum()\n\n costs_diff = total_grid_expansion_costs - \\\n total_grid_expansion_costs_new\n\n if costs_diff > 0:\n logger.debug(\n 'Storage integration in feeder {} reduced grid '\n 'expansion costs by {} kEuro.'.format(\n feeder, costs_diff))\n\n if debug:\n storage_repr.append(repr(storage_obj))\n storage_size.append(storage_obj.nominal_power)\n\n total_grid_expansion_costs = \\\n total_grid_expansion_costs_new\n\n else:\n logger.debug(\n 'Storage integration in feeder {} did not reduce '\n 'grid expansion costs (costs increased by {} '\n 'kEuro).'.format(feeder, -costs_diff))\n\n tools.disconnect_storage(edisgo.network, storage_obj)\n p_storage = 0\n\n if debug:\n storage_repr.append(None)\n storage_size.append(0)\n\n edisgo.integrate_storage(\n timeseries=storage_timeseries.p * 0,\n position=battery_node,\n voltage_level='mv',\n timeseries_reactive_power=\n storage_timeseries.q * 0)\n tools.assign_mv_feeder_to_nodes(\n edisgo.network.mv_grid)\n\n else:\n number_parallel_lines_before = \\\n _estimate_new_number_of_lines(critical_lines_feeder)\n edisgo.analyze()\n critical_lines_feeder_new = _critical_lines_feeder(\n edisgo, feeder)\n critical_nodes_feeder_new = _critical_nodes_feeder(\n edisgo, feeder)\n number_parallel_lines = _estimate_new_number_of_lines(\n critical_lines_feeder_new)\n\n # if there are critical lines check if number of parallel\n # lines was reduced\n if not critical_lines_feeder.empty:\n diff_lines = number_parallel_lines_before - \\\n number_parallel_lines\n # if it was not reduced check if there are critical\n # nodes and if the number was reduced\n if diff_lines <= 0:\n # if there are no critical nodes remove storage\n if not critical_nodes_feeder:\n logger.debug(\n 'Storage integration in feeder {} did not '\n 'reduce number of critical lines (number '\n 'increased by {}), storage '\n 'is therefore removed.'.format(\n feeder, -diff_lines))\n\n tools.disconnect_storage(edisgo.network,\n storage_obj)\n p_storage = 0\n\n if debug:\n storage_repr.append(None)\n storage_size.append(0)\n\n edisgo.integrate_storage(\n timeseries=storage_timeseries.p * 0,\n position=battery_node,\n voltage_level='mv',\n timeseries_reactive_power=\n storage_timeseries.q * 0)\n tools.assign_mv_feeder_to_nodes(\n edisgo.network.mv_grid)\n else:\n logger.debug(\n 'Critical nodes in feeder {} '\n 'before and after storage integration: '\n '{} vs. {}'.format(\n feeder, critical_nodes_feeder,\n critical_nodes_feeder_new))\n if debug:\n storage_repr.append(repr(storage_obj))\n storage_size.append(\n storage_obj.nominal_power)\n else:\n logger.debug(\n 'Storage integration in feeder {} reduced '\n 'number of critical lines.'.format(feeder))\n\n if debug:\n storage_repr.append(repr(storage_obj))\n storage_size.append(storage_obj.nominal_power)\n\n # if there are no critical lines\n else:\n logger.debug(\n 'Critical nodes in feeder {} '\n 'before and after storage integration: '\n '{} vs. {}'.format(\n feeder, critical_nodes_feeder,\n critical_nodes_feeder_new))\n if debug:\n storage_repr.append(repr(storage_obj))\n storage_size.append(storage_obj.nominal_power)\n\n # fifth step: if there is storage capacity left, rerun\n # the past steps for the next feeder in the ranking\n # list\n p_storage_remaining = p_storage_remaining - p_storage\n if not p_storage_remaining > p_storage_min:\n break\n\n else:\n logger.debug('No storage integration in feeder {}.'.format(\n feeder))\n\n if debug:\n storage_repr.append(None)\n storage_size.append(0)\n\n edisgo.integrate_storage(\n timeseries=storage_timeseries.p * 0,\n position=battery_node,\n voltage_level='mv',\n timeseries_reactive_power=storage_timeseries.q * 0)\n tools.assign_mv_feeder_to_nodes(edisgo.network.mv_grid)\n else:\n logger.debug('No storage integration in feeder {} because there '\n 'are neither overloading nor voltage issues.'.format(\n feeder))\n\n if debug:\n storage_repr.append(None)\n storage_size.append(0)\n feeder_repr.append(repr(feeder))\n storage_path.append([])\n\n if check_costs_reduction == 'once':\n # check costs reduction and discard all storages if costs were not\n # reduced\n grid_expansion_results_new = edisgo.reinforce(\n copy_graph=True, timesteps_pfa='snapshot_analysis')\n\n total_grid_expansion_costs_new = \\\n grid_expansion_results_new.grid_expansion_costs. \\\n total_costs.sum()\n\n costs_diff = total_grid_expansion_costs - \\\n total_grid_expansion_costs_new\n\n if costs_diff > 0:\n logger.info(\n 'Storage integration in grid {} reduced grid '\n 'expansion costs by {} kEuro.'.format(\n edisgo.network.id, costs_diff))\n else:\n logger.info(\n 'Storage integration in grid {} did not reduce '\n 'grid expansion costs (costs increased by {} '\n 'kEuro).'.format(edisgo.network.id, -costs_diff))\n\n for storage in storage_obj_list:\n tools.disconnect_storage(edisgo.network, storage)\n elif check_costs_reduction == 'each_feeder':\n # if costs redcution was checked after each storage only give out\n # total costs reduction\n if total_grid_expansion_costs_new == 'not calculated':\n costs_diff = 0\n else:\n total_grid_expansion_costs = grid_expansion_results_init.\\\n grid_expansion_costs.total_costs.sum()\n costs_diff = total_grid_expansion_costs - \\\n total_grid_expansion_costs_new\n\n logger.info(\n 'Storage integration in grid {} reduced grid '\n 'expansion costs by {} kEuro.'.format(\n edisgo.network.id, costs_diff))\n\n if debug:\n plots.storage_size(edisgo.network.mv_grid, edisgo.network.pypsa,\n filename='storage_results_{}.pdf'.format(\n edisgo.network.id), lopf=False)\n storages_df = pd.DataFrame({'path': storage_path,\n 'repr': storage_repr,\n 'p_nom': storage_size},\n index=feeder_repr)\n storages_df.to_csv('storage_results_{}.csv'.format(edisgo.network.id))\n\n edisgo.network.results.storages_costs_reduction = pd.DataFrame(\n {'grid_expansion_costs_initial': total_grid_expansion_costs,\n 'grid_expansion_costs_with_storages': total_grid_expansion_costs_new},\n index=[edisgo.network.id])", "response": "This function creates a new storage capacity for one feeder."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn a list of modus switch disconnectors in MV grid.", "response": "def position_switch_disconnectors(mv_grid, mode='load', status='open'):\n \"\"\"\n Determine position of switch disconnector in MV grid rings\n\n Determination of the switch disconnector location is motivated by placing\n it to minimized load flows in both parts of the ring (half-rings).\n The switch disconnecter will be installed to a LV station, unless none\n exists in a ring. In this case, a node of arbitrary type is chosen for the\n location of the switch disconnecter.\n\n Parameters\n ----------\n mv_grid : :class:`~.grid.grids.MVGrid`\n MV grid instance\n mode : str\n Define modus switch disconnector positioning: can be performed based of\n 'load', 'generation' or both 'loadgen'. Defaults to 'load'\n status : str\n Either 'open' or 'closed'. Define which status is should be set\n initially. Defaults to 'open' (which refers to conditions of normal\n grid operation).\n\n Returns\n -------\n tuple\n A tuple of size 2 specifying their pair of nodes between which the\n switch disconnector is located. The first node specifies the node that\n actually includes the switch disconnector.\n\n Notes\n -----\n This function uses `nx.algorithms.find_cycle()` to identify nodes that are\n part of the MV grid ring(s). Make sure grid topology data that is provided\n has closed rings. Otherwise, no location for a switch disconnector can be\n identified.\n\n \"\"\"\n\n def peak_load_gen_at_node(node):\n \"\"\"Return peak load and peak generation capacity for ``node``\n\n Parameters\n ----------\n node : object\n Node instance in the grid topology graph\n\n Returns\n -------\n tuple\n Tuple of size two. First item is the peak load at ``node``; second\n parameters reflects peak generation capacity at ``node``.\n Returned peak_load and generation capacity is given as apparent\n power in kVA.\n\n \"\"\"\n if isinstance(node, LVStation):\n node_peak_load = node.grid.peak_load\n node_peak_gen = node.grid.peak_generation\n elif isinstance(node, Generator):\n node_peak_load = 0\n node_peak_gen = node.nominal_capacity\n elif isinstance(node, Load):\n node_peak_load = node.peak_load.sum()\n node_peak_gen = 0\n elif isinstance(node, BranchTee):\n node_peak_load = 0\n node_peak_gen = 0\n\n return (node_peak_load / cos_phi_load, node_peak_gen / cos_phi_gen)\n\n def load_gen_from_subtree(graph, ring, node):\n \"\"\"\n Accumulate load and generation capacity in branches to connecting node\n on the ring.\n\n Includes peak_load and generation capacity at ``node```itself.\n\n Parameters\n ----------\n graph : networkx.Graph\n The graph representing the MV grid topology\n ring : list\n A list of ring nodes\n node : networkx.Node\n A member of the ring\n\n Returns\n -------\n tuple\n Tuple of size two. First item is the peak load of subtree at\n ``node``; second parameter reflects peak generation capacity from\n subtree at ``node``.\n\n \"\"\"\n ring_nodes_except_node = [_ for _ in ring if _ is not node]\n non_ring_nodes = [n for n in\n [_ for _ in graph.nodes()\n if _ is not mv_grid.station]\n if n not in ring_nodes_except_node]\n subgraph = graph.subgraph(non_ring_nodes)\n\n nodes_subtree = nx.dfs_tree(subgraph, source=node)\n\n if len(nodes_subtree) > 1:\n peak_load_subtree = 0\n peak_gen_subtree = 0\n for n in nodes_subtree.nodes():\n peak_load_subtree_tmp, peak_gen_subtree_tmp = \\\n peak_load_gen_at_node(n)\n peak_load_subtree += peak_load_subtree_tmp\n peak_gen_subtree += peak_gen_subtree_tmp\n return (peak_load_subtree, peak_gen_subtree)\n else:\n return (0, 0)\n\n cos_phi_load = mv_grid.network.config['reactive_power_factor']['mv_load']\n cos_phi_gen = mv_grid.network.config['reactive_power_factor']['mv_gen']\n\n # Identify position of switch disconnector (SD)\n rings = nx.algorithms.cycle_basis(mv_grid.graph, root=mv_grid.station)\n\n for ring in rings:\n ring = [_ for _ in ring if _ is not mv_grid.station]\n\n node_peak_load = []\n node_peak_gen = []\n\n # Collect peak load and generation along the ring\n for node in ring:\n if len(mv_grid.graph.edges(nbunch=node)) > 2:\n peak_load, peak_gen = load_gen_from_subtree(\n mv_grid.graph, ring, node)\n else:\n peak_load, peak_gen = peak_load_gen_at_node(node)\n\n node_peak_load.append(peak_load)\n node_peak_gen.append(peak_gen)\n\n # Choose if SD is placed 'load' or 'generation' oriented\n if mode == 'load':\n node_peak_data = node_peak_load\n elif mode == 'generation':\n node_peak_data = node_peak_gen\n elif mode == 'loadgen':\n node_peak_data = node_peak_load if sum(node_peak_load) > sum(\n node_peak_gen) else node_peak_gen\n else:\n raise ValueError(\"Mode {mode} is not known!\".format(mode=mode))\n\n # Set start value for difference in ring halfs\n diff_min = 10e9\n\n # if none of the nodes is of the type LVStation, a switch\n # disconnecter will be installed anyways.\n if any([isinstance(n, LVStation) for n in ring]):\n has_lv_station = True\n else:\n has_lv_station = False\n logging.debug(\"Ring {} does not have a LV station. \"\n \"Switch disconnecter is installed at arbitrary \"\n \"node.\".format(ring))\n\n # Identify nodes where switch disconnector is located in between\n for ctr in range(len(node_peak_data)):\n # check if node that owns the switch disconnector is of type\n # LVStation\n if isinstance(ring[ctr - 2], LVStation) or not has_lv_station:\n # Iteratively split route and calc peak load difference\n route_data_part1 = sum(node_peak_data[0:ctr])\n route_data_part2 = sum(node_peak_data[ctr:len(node_peak_data)])\n diff = abs(route_data_part1 - route_data_part2)\n\n # stop walking through the ring when load/generation is almost\n # equal\n if diff <= diff_min:\n diff_min = diff\n position = ctr\n else:\n break\n\n # find position of switch disconnector\n node1 = ring[position - 1]\n node2 = ring[position]\n\n implement_switch_disconnector(mv_grid, node1, node2)\n\n # open all switch disconnectors\n if status == 'open':\n for sd in mv_grid.graph.nodes_by_attribute('mv_disconnecting_point'):\n sd.open()\n elif status == 'close':\n for sd in mv_grid.graph.nodes_by_attribute('mv_disconnecting_point'):\n sd.close()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef implement_switch_disconnector(mv_grid, node1, node2):\n # Get disconnecting point's location\n line = mv_grid.graph.edge[node1][node2]['line']\n\n length_sd_line = .75e-3 # in km\n\n x_sd = node1.geom.x + (length_sd_line / line.length) * (\n node1.geom.x - node2.geom.x)\n y_sd = node1.geom.y + (length_sd_line / line.length) * (\n node1.geom.y - node2.geom.y)\n\n # Instantiate disconnecting point\n mv_dp_number = len(mv_grid.graph.nodes_by_attribute(\n 'mv_disconnecting_point'))\n disconnecting_point = MVDisconnectingPoint(\n id=mv_dp_number + 1,\n geom=Point(x_sd, y_sd),\n grid=mv_grid)\n mv_grid.graph.add_node(disconnecting_point, type='mv_disconnecting_point')\n\n # Replace original line by a new line\n new_line_attr = {\n 'line': Line(\n id=line.id,\n type=line.type,\n length=line.length - length_sd_line,\n grid=mv_grid),\n 'type': 'line'}\n mv_grid.graph.remove_edge(node1, node2)\n mv_grid.graph.add_edge(disconnecting_point, node2, new_line_attr)\n\n # Add disconnecting line segment\n switch_disconnector_line_attr = {\n 'line': Line(\n id=\"switch_disconnector_line_{}\".format(\n str(mv_dp_number + 1)),\n type=line.type,\n length=length_sd_line,\n grid=mv_grid),\n 'type': 'line'}\n\n mv_grid.graph.add_edge(node1, disconnecting_point,\n switch_disconnector_line_attr)\n\n # Set line to switch disconnector\n disconnecting_point.line = mv_grid.graph.line_from_nodes(\n disconnecting_point, node1)", "response": "This function creates a new switch disconnector in the MV grid."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef select_cable(network, level, apparent_power):\n\n cable_count = 1\n\n if level == 'mv':\n\n available_cables = network.equipment_data['mv_cables'][\n network.equipment_data['mv_cables']['U_n'] ==\n network.mv_grid.voltage_nom]\n\n suitable_cables = available_cables[\n available_cables['I_max_th'] *\n network.mv_grid.voltage_nom > apparent_power]\n\n # increase cable count until appropriate cable type is found\n while suitable_cables.empty and cable_count < 20:\n cable_count += 1\n suitable_cables = available_cables[\n available_cables['I_max_th'] *\n network.mv_grid.voltage_nom *\n cable_count > apparent_power]\n if suitable_cables.empty and cable_count == 20:\n raise exceptions.MaximumIterationError(\n \"Could not find a suitable cable for apparent power of \"\n \"{} kVA.\".format(apparent_power))\n\n cable_type = suitable_cables.ix[suitable_cables['I_max_th'].idxmin()]\n\n elif level == 'lv':\n\n suitable_cables = network.equipment_data['lv_cables'][\n network.equipment_data['lv_cables']['I_max_th'] *\n network.equipment_data['lv_cables']['U_n'] > apparent_power]\n\n # increase cable count until appropriate cable type is found\n while suitable_cables.empty and cable_count < 20:\n cable_count += 1\n suitable_cables = network.equipment_data['lv_cables'][\n network.equipment_data['lv_cables']['I_max_th'] *\n network.equipment_data['lv_cables']['U_n'] *\n cable_count > apparent_power]\n if suitable_cables.empty and cable_count == 20:\n raise exceptions.MaximumIterationError(\n \"Could not find a suitable cable for apparent power of \"\n \"{} kVA.\".format(apparent_power))\n\n cable_type = suitable_cables.ix[suitable_cables['I_max_th'].idxmin()]\n\n else:\n raise ValueError('Please supply a level (either \\'mv\\' or \\'lv\\').')\n\n return cable_type, cable_count", "response": "Selects an appropriate cable type and quantity using given apparent power."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn all generators with some additional information.", "response": "def get_gen_info(network, level='mvlv', fluctuating=False):\n \"\"\"\n Gets all the installed generators with some additional information.\n\n Parameters\n ----------\n network : :class:`~.grid.network.Network`\n Network object holding the grid data.\n level : :obj:`str`\n Defines which generators are returned. Possible options are:\n\n * 'mv'\n Only generators connected to the MV grid are returned.\n * 'lv'\n Only generators connected to the LV grids are returned.\n * 'mvlv'\n All generators connected to the MV grid and LV grids are returned.\n\n Default: 'mvlv'.\n fluctuating : :obj:`bool`\n If True only returns fluctuating generators. Default: False.\n\n Returns\n --------\n :pandas:`pandas.DataFrame<dataframe>`\n Dataframe with all generators connected to the specified voltage\n level. Index of the dataframe are the generator objects of type\n :class:`~.grid.components.Generator`. Columns of the dataframe are:\n\n * 'gen_repr'\n The representative of the generator as :obj:`str`.\n * 'type'\n The generator type, e.g. 'solar' or 'wind' as :obj:`str`.\n * 'voltage_level'\n The voltage level the generator is connected to as :obj:`str`. Can\n either be 'mv' or 'lv'.\n * 'nominal_capacity'\n The nominal capacity of the generator as as :obj:`float`.\n * 'weather_cell_id'\n The id of the weather cell the generator is located in as :obj:`int`\n (only applies to fluctuating generators).\n\n \"\"\"\n gens_w_id = []\n if 'mv' in level:\n gens = network.mv_grid.generators\n gens_voltage_level = ['mv']*len(gens)\n gens_type = [gen.type for gen in gens]\n gens_rating = [gen.nominal_capacity for gen in gens]\n for gen in gens:\n try:\n gens_w_id.append(gen.weather_cell_id)\n except AttributeError:\n gens_w_id.append(np.nan)\n gens_grid = [network.mv_grid]*len(gens)\n else:\n gens = []\n gens_voltage_level = []\n gens_type = []\n gens_rating = []\n gens_grid = []\n\n if 'lv' in level:\n for lv_grid in network.mv_grid.lv_grids:\n gens_lv = lv_grid.generators\n gens.extend(gens_lv)\n gens_voltage_level.extend(['lv']*len(gens_lv))\n gens_type.extend([gen.type for gen in gens_lv])\n gens_rating.extend([gen.nominal_capacity for gen in gens_lv])\n for gen in gens_lv:\n try:\n gens_w_id.append(gen.weather_cell_id)\n except AttributeError:\n gens_w_id.append(np.nan)\n gens_grid.extend([lv_grid] * len(gens_lv))\n\n gen_df = pd.DataFrame({'gen_repr': list(map(lambda x: repr(x), gens)),\n 'generator': gens,\n 'type': gens_type,\n 'voltage_level': gens_voltage_level,\n 'nominal_capacity': gens_rating,\n 'weather_cell_id': gens_w_id,\n 'grid': gens_grid})\n\n gen_df.set_index('generator', inplace=True, drop=True)\n\n # filter fluctuating generators\n if fluctuating:\n gen_df = gen_df.loc[(gen_df.type == 'solar') | (gen_df.type == 'wind')]\n\n return gen_df"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef assign_mv_feeder_to_nodes(mv_grid):\n mv_station_neighbors = mv_grid.graph.neighbors(mv_grid.station)\n # get all nodes in MV grid and remove MV station to get separate subgraphs\n mv_graph_nodes = mv_grid.graph.nodes()\n mv_graph_nodes.remove(mv_grid.station)\n subgraph = mv_grid.graph.subgraph(mv_graph_nodes)\n\n for neighbor in mv_station_neighbors:\n # determine feeder\n mv_feeder = mv_grid.graph.line_from_nodes(mv_grid.station, neighbor)\n # get all nodes in that feeder by doing a DFS in the disconnected\n # subgraph starting from the node adjacent to the MVStation `neighbor`\n subgraph_neighbor = nx.dfs_tree(subgraph, source=neighbor)\n for node in subgraph_neighbor.nodes():\n # in case of an LV station assign feeder to all nodes in that LV\n # grid\n if isinstance(node, LVStation):\n for lv_node in node.grid.graph.nodes():\n lv_node.mv_feeder = mv_feeder\n else:\n node.mv_feeder = mv_feeder", "response": "Assign a MV feeder to every generator LV station load and branch tee."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_mv_feeder_from_line(line):\n try:\n # get nodes of line\n nodes = line.grid.graph.nodes_from_line(line)\n\n # get feeders\n feeders = {}\n for node in nodes:\n # if one of the nodes is an MV station the line is an MV feeder\n # itself\n if isinstance(node, MVStation):\n feeders[repr(node)] = None\n else:\n feeders[repr(node)] = node.mv_feeder\n\n # return feeder that is not None\n feeder_1 = feeders[repr(nodes[0])]\n feeder_2 = feeders[repr(nodes[1])]\n if not feeder_1 is None and not feeder_2 is None:\n if feeder_1 == feeder_2:\n return feeder_1\n else:\n logging.warning('Different feeders for line {}.'.format(line))\n return None\n else:\n return feeder_1 if feeder_1 is not None else feeder_2\n except Exception as e:\n logging.warning('Failed to get MV feeder: {}.'.format(e))\n return None", "response": "Returns MV feeder identifier for the given line."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef disconnect_storage(network, storage):\n # does only remove from network.pypsa, not from network.pypsa_lopf\n # remove from pypsa (buses, storage_units, storage_units_t, lines)\n neighbor = storage.grid.graph.neighbors(storage)[0]\n if network.pypsa is not None:\n line = storage.grid.graph.line_from_nodes(storage, neighbor)\n network.pypsa.storage_units = network.pypsa.storage_units.loc[\n network.pypsa.storage_units.index.drop(\n repr(storage)), :]\n network.pypsa.storage_units_t.p_set.drop([repr(storage)], axis=1,\n inplace=True)\n network.pypsa.storage_units_t.q_set.drop([repr(storage)], axis=1,\n inplace=True)\n network.pypsa.buses = network.pypsa.buses.loc[\n network.pypsa.buses.index.drop(\n '_'.join(['Bus', repr(storage)])), :]\n network.pypsa.lines = network.pypsa.lines.loc[\n network.pypsa.lines.index.drop(\n repr(line)), :]\n # delete line\n neighbor = storage.grid.graph.neighbors(storage)[0]\n storage.grid.graph.remove_edge(storage, neighbor)\n # delete storage\n storage.grid.graph.remove_node(storage)", "response": "Removes storage from network and pypsa representation."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns a list of weather cell ids contained in the grid.", "response": "def weather_cells(self):\n \"\"\"\n Weather cells contained in grid\n\n Returns\n -------\n list\n list of weather cell ids contained in grid\n \"\"\"\n if not self._weather_cells:\n\n # get all the weather cell ids\n self._weather_cells = []\n for gen in self.generators:\n if hasattr(gen, 'weather_cell_id'):\n self._weather_cells.append(gen.weather_cell_id)\n\n # drop duplicates\n self._weather_cells = list(set(self._weather_cells))\n # no need to check for Nones in the list because None in\n # gen.weather_cell_id is kicked out by the if hasattr() before\n\n return self._weather_cells"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn the total peak generation capacity of generators in this grid.", "response": "def peak_generation(self):\n \"\"\"\n Cumulative peak generation capacity of generators of this grid\n\n Returns\n -------\n float\n Ad-hoc calculated or cached peak generation capacity\n \"\"\"\n if self._peak_generation is None:\n self._peak_generation = sum(\n [gen.nominal_capacity\n for gen in self.generators])\n\n return self._peak_generation"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef peak_generation_per_technology(self):\n peak_generation = defaultdict(float)\n for gen in self.generators:\n peak_generation[gen.type] += gen.nominal_capacity\n\n return pd.Series(peak_generation)", "response": "Returns a pandas. Series of peak generation of each technology in the grid."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef peak_generation_per_technology_and_weather_cell(self):\n peak_generation = defaultdict(float)\n for gen in self.generators:\n if hasattr(gen, 'weather_cell_id'):\n if (gen.type, gen.weather_cell_id) in peak_generation.keys():\n peak_generation[gen.type, gen.weather_cell_id] += gen.nominal_capacity\n else:\n peak_generation[gen.type, gen.weather_cell_id] = gen.nominal_capacity\n else:\n message = 'No weather cell ID found for ' \\\n 'generator {}.'.format(repr(gen))\n raise KeyError(message)\n\n series_index = pd.MultiIndex.from_tuples(list(peak_generation.keys()),\n names=['type', 'weather_cell_id'])\n\n return pd.Series(peak_generation, index=series_index)", "response": "Peak generation of each technology and the \n corresponding weather cell in the grid \n\n ."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning the total peak load capacity of generators in this grid.", "response": "def peak_load(self):\n \"\"\"\n Cumulative peak load capacity of generators of this grid\n\n Returns\n -------\n float\n Ad-hoc calculated or cached peak load capacity\n \"\"\"\n if self._peak_load is None:\n self._peak_load = sum(\n [_.peak_load.sum()\n for _ in self.graph.nodes_by_attribute('load')])\n\n return self._peak_load"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning a pandas. Series of all the load - level kWh per sector for whole grid", "response": "def consumption(self):\n \"\"\"\n Consumption in kWh per sector for whole grid\n\n Returns\n -------\n :pandas:`pandas.Series<series>`\n Indexed by demand sector\n \"\"\"\n consumption = defaultdict(float)\n for load in self.graph.nodes_by_attribute('load'):\n for sector, val in load.consumption.items():\n consumption[sector] += val\n\n return pd.Series(consumption)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning a list of Connected Generators within the grid.", "response": "def generators(self):\n \"\"\"\n Connected Generators within the grid\n\n Returns\n -------\n list\n List of Generator Objects\n\n \"\"\"\n if not self._generators:\n generators = list(self.graph.nodes_by_attribute('generator'))\n generators.extend(list(self.graph.nodes_by_attribute(\n 'generator_aggr')))\n return generators\n else:\n return self._generators"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef draw(self):\n\n # get nodes' positions\n nodes_pos = {}\n for node in self.graph.nodes():\n nodes_pos[node] = (node.geom.x, node.geom.y)\n\n plt.figure()\n nx.draw_networkx(self.graph, nodes_pos, node_size=16, font_size=8)\n plt.show()", "response": "Draw MV grid using the geo data stored in nodes"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef nodes_from_line(self, line):\n\n return dict([(v, k) for k, v in\n nx.get_edge_attributes(self, 'line').items()])[line]", "response": "Returns a dict of nodes adjacent to a line."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef line_from_nodes(self, u, v):\n try:\n line = nx.get_edge_attributes(self, 'line')[(u, v)]\n except:\n try:\n line = nx.get_edge_attributes(self, 'line')[(v, u)]\n except:\n raise nx.NetworkXError('Line between ``u`` and ``v`` not '\n 'included in the graph.')\n\n return line", "response": "Get line between two nodes u and v."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef nodes_by_attribute(self, attr_val, attr='type'):\n\n temp_nodes = getattr(self, 'node')\n nodes = list(filter(None, map(lambda x: x if temp_nodes[x][attr] == attr_val else None,\n temp_nodes.keys())))\n\n return nodes", "response": "Select Graph s nodes by attribute value"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn a generator for iterating over the lines in the network.", "response": "def lines_by_attribute(self, attr_val=None, attr='type'):\n \"\"\" Returns a generator for iterating over Graph's lines by attribute value.\n\n Get all lines that share the same attribute. By default, the attr 'type'\n is used to specify the lines' type (line, agg_line, etc.).\n\n The edge of a graph is described by the two adjacent nodes and the line\n object itself. Whereas the line object is used to hold all relevant\n power system parameters.\n\n Examples\n --------\n >>> import edisgo\n >>> G = edisgo.grids.Graph()\n >>> G.add_node(1, type='generator')\n >>> G.add_node(2, type='load')\n >>> G.add_edge(1, 2, type='line')\n >>> lines = G.lines_by_attribute('line')\n >>> list(lines)[0]\n <class 'tuple'>: ((node1, node2), line)\n\n Parameters\n ----------\n attr_val: str\n Value of the `attr` lines should be selected by\n attr: str, default: 'type'\n Attribute key which is 'type' by default\n\n Returns\n -------\n Generator of :obj:`dict`\n A list containing line elements that match the given attribute\n value\n\n Notes\n -----\n There are generator functions for nodes (`Graph.nodes()`) and edges\n (`Graph.edges()`) in NetworkX but unlike graph nodes, which can be\n represented by objects, branch objects can only be accessed by using an\n edge attribute ('line' is used here)\n\n To make access to attributes of the line objects simpler and more\n intuitive for the user, this generator yields a dictionary for each edge\n that contains information about adjacent nodes and the line object.\n\n Note, the construction of the dictionary highly depends on the structure\n of the in-going tuple (which is defined by the needs of networkX). If\n this changes, the code will break.\n\n Adapted from `Dingo <https://github.com/openego/dingo/blob/\\\n ee237e37d4c228081e1e246d7e6d0d431c6dda9e/dingo/core/network/\\\n __init__.py>`_.\n \"\"\"\n\n # get all lines that have the attribute 'type' set\n lines_attributes = nx.get_edge_attributes(self, attr).items()\n\n # attribute value provided?\n if attr_val:\n # extract lines where 'type' == attr_val\n lines_attributes = [(k, self[k[0]][k[1]]['line'])\n for k, v in lines_attributes if v == attr_val]\n else:\n # get all lines\n lines_attributes = [(k, self[k[0]][k[1]]['line'])\n for k, v in lines_attributes]\n\n # sort them according to connected nodes\n lines_sorted = sorted(list(lines_attributes), key=lambda _: repr(_[1]))\n\n for line in lines_sorted:\n yield {'adj_nodes': line[0], 'line': line[1]}"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef intercept_service(self, continuation, handler_call_details):\n rpc_method_handler = self._get_rpc_handler(handler_call_details)\n if rpc_method_handler.response_streaming:\n if self._wrapped.is_streaming:\n # `self._wrapped` is a `StreamServerInterceptor`\n return self._wrapped.intercept_service(\n continuation, handler_call_details)\n else:\n if not self._wrapped.is_streaming:\n # `self._wrapped` is a `UnaryServerInterceptor`\n return self._wrapped.intercept_service(\n continuation, handler_call_details)\n\n # skip the interceptor due to type mismatch\n return continuation(handler_call_details)", "response": "Intercepts incoming RPCs before handing them over to a handler\n "} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef to_pypsa(network, mode, timesteps):\n\n # check if timesteps is array-like, otherwise convert to list (necessary\n # to obtain a dataframe when using .loc in time series functions)\n if not hasattr(timesteps, \"__len__\"):\n timesteps = [timesteps]\n\n # get topology and time series data\n if mode is None:\n mv_components = mv_to_pypsa(network)\n lv_components = lv_to_pypsa(network)\n components = combine_mv_and_lv(mv_components, lv_components)\n\n if list(components['Load'].index.values):\n timeseries_load_p, timeseries_load_q = _pypsa_load_timeseries(\n network, mode=mode, timesteps=timesteps)\n\n if len(list(components['Generator'].index.values)) > 1:\n timeseries_gen_p, timeseries_gen_q = _pypsa_generator_timeseries(\n network, mode=mode, timesteps=timesteps)\n\n if list(components['Bus'].index.values):\n timeseries_bus_v_set = _pypsa_bus_timeseries(\n network, components['Bus'].index.tolist(), timesteps=timesteps)\n\n if len(list(components['StorageUnit'].index.values)) > 0:\n timeseries_storage_p, timeseries_storage_q = \\\n _pypsa_storage_timeseries(\n network, mode=mode, timesteps=timesteps)\n\n elif mode is 'mv':\n # the pypsa export works but NotImplementedError is raised since the\n # rest of edisgo (handling of results from pfa, grid expansion, etc.)\n # does not yet work\n raise NotImplementedError\n mv_components = mv_to_pypsa(network)\n components = add_aggregated_lv_components(network, mv_components)\n\n if list(components['Load'].index.values):\n timeseries_load_p, timeseries_load_q = _pypsa_load_timeseries(\n network, mode=mode, timesteps=timesteps)\n\n if len(list(components['Generator'].index.values)) > 1:\n timeseries_gen_p, timeseries_gen_q = _pypsa_generator_timeseries(\n network, mode=mode, timesteps=timesteps)\n\n if list(components['Bus'].index.values):\n timeseries_bus_v_set = _pypsa_bus_timeseries(\n network, components['Bus'].index.tolist(), timesteps=timesteps)\n\n if len(list(components['StorageUnit'].index.values)) > 0:\n timeseries_storage_p, timeseries_storage_q = \\\n _pypsa_storage_timeseries(\n network, mode=mode, timesteps=timesteps)\n\n elif mode is 'lv':\n raise NotImplementedError\n #lv_to_pypsa(network)\n else:\n raise ValueError(\"Provide proper mode or leave it empty to export \"\n \"entire grid topology.\")\n\n # check topology\n _check_topology(components)\n\n # create power flow problem\n pypsa_network = PyPSANetwork()\n pypsa_network.edisgo_mode = mode\n pypsa_network.set_snapshots(timesteps)\n\n # import grid topology to PyPSA network\n # buses are created first to avoid warnings\n pypsa_network.import_components_from_dataframe(components['Bus'], 'Bus')\n\n for k, comps in components.items():\n if k is not 'Bus' and not comps.empty:\n pypsa_network.import_components_from_dataframe(comps, k)\n\n # import time series to PyPSA network\n if len(list(components['Generator'].index.values)) > 1:\n import_series_from_dataframe(pypsa_network, timeseries_gen_p,\n 'Generator', 'p_set')\n import_series_from_dataframe(pypsa_network, timeseries_gen_q,\n 'Generator', 'q_set')\n\n if list(components['Load'].index.values):\n import_series_from_dataframe(pypsa_network, timeseries_load_p,\n 'Load', 'p_set')\n import_series_from_dataframe(pypsa_network, timeseries_load_q,\n 'Load', 'q_set')\n\n if list(components['Bus'].index.values):\n import_series_from_dataframe(pypsa_network, timeseries_bus_v_set,\n 'Bus', 'v_mag_pu_set')\n\n if len(list(components['StorageUnit'].index.values)) > 0:\n import_series_from_dataframe(pypsa_network, timeseries_storage_p,\n 'StorageUnit', 'p_set')\n import_series_from_dataframe(pypsa_network, timeseries_storage_q,\n 'StorageUnit', 'q_set')\n\n _check_integrity_of_pypsa(pypsa_network)\n\n return pypsa_network", "response": "Translate graph based grid representation to PyPSA representation."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ntranslates MV grid topology representation to PyPSA format.", "response": "def mv_to_pypsa(network):\n \"\"\"Translate MV grid topology representation to PyPSA format\n\n MV grid topology translated here includes\n\n * MV station (no transformer, see :meth:`~.grid.network.EDisGo.analyze`)\n * Loads, Generators, Lines, Storages, Branch Tees of MV grid level as well\n as LV stations. LV stations do not have load and generation of LV level.\n\n Parameters\n ----------\n network : Network\n eDisGo grid container\n\n Returns\n -------\n dict of :pandas:`pandas.DataFrame<dataframe>`\n A DataFrame for each type of PyPSA components constituting the grid\n topology. Keys included\n\n * 'Generator'\n * 'Load'\n * 'Line'\n * 'BranchTee'\n * 'Transformer'\n * 'StorageUnit'\n\n .. warning::\n\n PyPSA takes resistance R and reactance X in p.u. The conversion from\n values in ohm to pu notation is performed by following equations\n\n .. math::\n\n r_{p.u.} = R_{\\Omega} / Z_{B}\n\n x_{p.u.} = X_{\\Omega} / Z_{B}\n\n with\n\n Z_{B} = V_B / S_B\n\n I'm quite sure, but its not 100 % clear if the base voltage V_B is\n chosen correctly. We take the primary side voltage of transformer as\n the transformers base voltage. See\n `#54 <https://github.com/openego/eDisGo/issues/54>`_ for discussion.\n \"\"\"\n\n generators = network.mv_grid.generators\n loads = network.mv_grid.graph.nodes_by_attribute('load')\n branch_tees = network.mv_grid.graph.nodes_by_attribute('branch_tee')\n lines = network.mv_grid.graph.lines()\n lv_stations = network.mv_grid.graph.nodes_by_attribute('lv_station')\n mv_stations = network.mv_grid.graph.nodes_by_attribute('mv_station')\n disconnecting_points = network.mv_grid.graph.nodes_by_attribute(\n 'mv_disconnecting_point')\n storages = network.mv_grid.graph.nodes_by_attribute(\n 'storage')\n\n omega = 2 * pi * 50\n\n # define required dataframe columns for components\n generator = {'name': [],\n 'bus': [],\n 'control': [],\n 'p_nom': [],\n 'type': []}\n\n bus = {'name': [], 'v_nom': [], 'x': [], 'y': []}\n\n load = {'name': [], 'bus': []}\n\n line = {'name': [],\n 'bus0': [],\n 'bus1': [],\n 'type': [],\n 'x': [],\n 'r': [],\n 's_nom': [],\n 'length': []}\n\n transformer = {'name': [],\n 'bus0': [],\n 'bus1': [],\n 'type': [],\n 'model': [],\n 'x': [],\n 'r': [],\n 's_nom': [],\n 'tap_ratio': []}\n\n storage = {\n 'name': [],\n 'bus': [],\n 'p_nom': [],\n 'state_of_charge_initial': [],\n 'efficiency_store': [],\n 'efficiency_dispatch': [],\n 'standing_loss': []}\n\n # create dataframe representing generators and associated buses\n for gen in generators:\n bus_name = '_'.join(['Bus', repr(gen)])\n generator['name'].append(repr(gen))\n generator['bus'].append(bus_name)\n generator['control'].append('PQ')\n generator['p_nom'].append(gen.nominal_capacity / 1e3)\n generator['type'].append('_'.join([gen.type, gen.subtype]))\n\n bus['name'].append(bus_name)\n bus['v_nom'].append(gen.grid.voltage_nom)\n bus['x'].append(gen.geom.x)\n bus['y'].append(gen.geom.y)\n\n # create dataframe representing branch tees\n for bt in branch_tees:\n bus['name'].append('_'.join(['Bus', repr(bt)]))\n bus['v_nom'].append(bt.grid.voltage_nom)\n bus['x'].append(bt.geom.x)\n bus['y'].append(bt.geom.y)\n\n # create dataframes representing loads and associated buses\n for lo in loads:\n bus_name = '_'.join(['Bus', repr(lo)])\n load['name'].append(repr(lo))\n load['bus'].append(bus_name)\n\n bus['name'].append(bus_name)\n bus['v_nom'].append(lo.grid.voltage_nom)\n bus['x'].append(lo.geom.x)\n bus['y'].append(lo.geom.y)\n\n # create dataframe for lines\n for l in lines:\n line['name'].append(repr(l['line']))\n\n if l['adj_nodes'][0] in lv_stations:\n line['bus0'].append(\n '_'.join(['Bus', l['adj_nodes'][0].__repr__(side='mv')]))\n else:\n line['bus0'].append('_'.join(['Bus', repr(l['adj_nodes'][0])]))\n\n if l['adj_nodes'][1] in lv_stations:\n line['bus1'].append(\n '_'.join(['Bus', l['adj_nodes'][1].__repr__(side='mv')]))\n else:\n line['bus1'].append('_'.join(['Bus', repr(l['adj_nodes'][1])]))\n\n line['type'].append(\"\")\n line['x'].append(\n l['line'].type['L'] / l['line'].quantity * omega / 1e3 *\n l['line'].length)\n line['r'].append(l['line'].type['R'] / l['line'].quantity *\n l['line'].length)\n line['s_nom'].append(\n sqrt(3) * l['line'].type['I_max_th'] * l['line'].type['U_n'] *\n l['line'].quantity / 1e3)\n line['length'].append(l['line'].length)\n\n # create dataframe for LV stations incl. primary/secondary side bus\n for lv_st in lv_stations:\n transformer_count = 1\n # add primary side bus (bus0)\n bus0_name = '_'.join(['Bus', lv_st.__repr__(side='mv')])\n bus['name'].append(bus0_name)\n bus['v_nom'].append(lv_st.mv_grid.voltage_nom)\n bus['x'].append(lv_st.geom.x)\n bus['y'].append(lv_st.geom.y)\n\n # add secondary side bus (bus1)\n bus1_name = '_'.join(['Bus', lv_st.__repr__(side='lv')])\n bus['name'].append(bus1_name)\n bus['v_nom'].append(lv_st.transformers[0].voltage_op)\n bus['x'].append(None)\n bus['y'].append(None)\n\n # we choose voltage of transformers' primary side\n v_base = lv_st.mv_grid.voltage_nom\n\n for tr in lv_st.transformers:\n z_base = v_base ** 2 / tr.type.S_nom\n transformer['name'].append(\n '_'.join([repr(lv_st), 'transformer', str(transformer_count)]))\n transformer['bus0'].append(bus0_name)\n transformer['bus1'].append(bus1_name)\n transformer['type'].append(\"\")\n transformer['model'].append('pi')\n transformer['r'].append(tr.type.R / z_base)\n transformer['x'].append(tr.type.X / z_base)\n transformer['s_nom'].append(tr.type.S_nom / 1e3)\n transformer['tap_ratio'].append(1)\n\n transformer_count += 1\n\n # create dataframe for MV stations (only secondary side bus)\n for mv_st in mv_stations:\n # add secondary side bus (bus1)\n bus1_name = '_'.join(['Bus', mv_st.__repr__(side='mv')])\n bus['name'].append(bus1_name)\n bus['v_nom'].append(mv_st.transformers[0].voltage_op)\n bus['x'].append(mv_st.geom.x)\n bus['y'].append(mv_st.geom.y)\n\n # create dataframe representing disconnecting points\n for dp in disconnecting_points:\n bus['name'].append('_'.join(['Bus', repr(dp)]))\n bus['v_nom'].append(dp.grid.voltage_nom)\n bus['x'].append(dp.geom.x)\n bus['y'].append(dp.geom.y)\n\n # create dataframe representing storages\n for sto in storages:\n bus_name = '_'.join(['Bus', repr(sto)])\n\n storage['name'].append(repr(sto))\n storage['bus'].append(bus_name)\n storage['p_nom'].append(sto.nominal_power / 1e3)\n storage['state_of_charge_initial'].append(sto.soc_initial)\n storage['efficiency_store'].append(sto.efficiency_in)\n storage['efficiency_dispatch'].append(sto.efficiency_out)\n storage['standing_loss'].append(sto.standing_loss)\n\n bus['name'].append(bus_name)\n bus['v_nom'].append(sto.grid.voltage_nom)\n bus['x'].append(sto.geom.x)\n bus['y'].append(sto.geom.y)\n\n # Add separate slack generator at MV station secondary side bus bar\n generator['name'].append(\"Generator_slack\")\n generator['bus'].append(bus1_name)\n generator['control'].append('Slack')\n generator['p_nom'].append(0)\n generator['type'].append('Slack generator')\n\n components = {\n 'Generator': pd.DataFrame(generator).set_index('name'),\n 'Bus': pd.DataFrame(bus).set_index('name'),\n 'Load': pd.DataFrame(load).set_index('name'),\n 'Line': pd.DataFrame(line).set_index('name'),\n 'Transformer': pd.DataFrame(transformer).set_index('name'),\n 'StorageUnit': pd.DataFrame(storage).set_index('name')}\n\n return components"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncombines MV and LV grid topology in PyPSA format", "response": "def combine_mv_and_lv(mv, lv):\n \"\"\"Combine MV and LV grid topology in PyPSA format\n \"\"\"\n\n combined = {\n c: pd.concat([mv[c], lv[c]], axis=0) for c in list(lv.keys())\n }\n\n combined['Transformer'] = mv['Transformer']\n\n return combined"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nadding aggregated LV load and generation at LV stations to the overall container.", "response": "def add_aggregated_lv_components(network, components):\n \"\"\"\n Aggregates LV load and generation at LV stations\n\n Use this function if you aim for MV calculation only. The according\n DataFrames of `components` are extended by load and generators representing\n these aggregated respecting the technology type.\n\n Parameters\n ----------\n network : Network\n The eDisGo grid topology model overall container\n components : dict of :pandas:`pandas.DataFrame<dataframe>`\n PyPSA components in tabular format\n\n Returns\n -------\n :obj:`dict` of :pandas:`pandas.DataFrame<dataframe>`\n The dictionary components passed to the function is returned altered.\n \"\"\"\n generators = {}\n\n loads = {}\n\n # collect aggregated generation capacity by type and subtype\n # collect aggregated load grouped by sector\n for lv_grid in network.mv_grid.lv_grids:\n generators.setdefault(lv_grid, {})\n for gen in lv_grid.generators:\n generators[lv_grid].setdefault(gen.type, {})\n generators[lv_grid][gen.type].setdefault(gen.subtype, {})\n generators[lv_grid][gen.type][gen.subtype].setdefault(\n 'capacity', 0)\n generators[lv_grid][gen.type][gen.subtype][\n 'capacity'] += gen.nominal_capacity\n generators[lv_grid][gen.type][gen.subtype].setdefault(\n 'name',\n '_'.join([gen.type,\n gen.subtype,\n 'aggregated',\n 'LV_grid',\n str(lv_grid.id)]))\n loads.setdefault(lv_grid, {})\n for lo in lv_grid.graph.nodes_by_attribute('load'):\n for sector, val in lo.consumption.items():\n loads[lv_grid].setdefault(sector, 0)\n loads[lv_grid][sector] += val\n\n # define dict for DataFrame creation of aggr. generation and load\n generator = {'name': [],\n 'bus': [],\n 'control': [],\n 'p_nom': [],\n 'type': []}\n\n load = {'name': [], 'bus': []}\n\n # fill generators dictionary for DataFrame creation\n for lv_grid_obj, lv_grid in generators.items():\n for _, gen_type in lv_grid.items():\n for _, gen_subtype in gen_type.items():\n generator['name'].append(gen_subtype['name'])\n generator['bus'].append(\n '_'.join(['Bus', lv_grid_obj.station.__repr__('lv')]))\n generator['control'].append('PQ')\n generator['p_nom'].append(gen_subtype['capacity'])\n generator['type'].append(\"\")\n\n # fill loads dictionary for DataFrame creation\n for lv_grid_obj, lv_grid in loads.items():\n for sector, val in lv_grid.items():\n load['name'].append('_'.join(['Load', sector, repr(lv_grid_obj)]))\n load['bus'].append(\n '_'.join(['Bus', lv_grid_obj.station.__repr__('lv')]))\n\n components['Generator'] = pd.concat(\n [components['Generator'], pd.DataFrame(generator).set_index('name')])\n components['Load'] = pd.concat(\n [components['Load'], pd.DataFrame(load).set_index('name')])\n\n return components"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns a pandas. DataFrame of load time series for MV and LV grids.", "response": "def _pypsa_load_timeseries(network, timesteps, mode=None):\n \"\"\"\n Time series in PyPSA compatible format for load instances\n\n Parameters\n ----------\n network : Network\n The eDisGo grid topology model overall container\n timesteps : array_like\n Timesteps is an array-like object with entries of type\n :pandas:`pandas.Timestamp<timestamp>` specifying which time steps\n to export to pypsa representation and use in power flow analysis.\n mode : str, optional\n Specifically retrieve load time series for MV or LV grid level or both.\n Either choose 'mv' or 'lv'.\n Defaults to None, which returns both timeseries for MV and LV in a\n single DataFrame.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n Time series table in PyPSA format\n \"\"\"\n mv_load_timeseries_p = []\n mv_load_timeseries_q = []\n lv_load_timeseries_p = []\n lv_load_timeseries_q = []\n\n # add MV grid loads\n if mode is 'mv' or mode is None:\n for load in network.mv_grid.graph.nodes_by_attribute('load'):\n mv_load_timeseries_q.append(load.pypsa_timeseries('q').rename(\n repr(load)).to_frame().loc[timesteps])\n mv_load_timeseries_p.append(load.pypsa_timeseries('p').rename(\n repr(load)).to_frame().loc[timesteps])\n if mode is 'mv':\n lv_load_timeseries_p, lv_load_timeseries_q = \\\n _pypsa_load_timeseries_aggregated_at_lv_station(\n network, timesteps)\n\n # add LV grid's loads\n if mode is 'lv' or mode is None:\n for lv_grid in network.mv_grid.lv_grids:\n for load in lv_grid.graph.nodes_by_attribute('load'):\n lv_load_timeseries_q.append(load.pypsa_timeseries('q').rename(\n repr(load)).to_frame().loc[timesteps])\n lv_load_timeseries_p.append(load.pypsa_timeseries('p').rename(\n repr(load)).to_frame().loc[timesteps])\n\n load_df_p = pd.concat(mv_load_timeseries_p + lv_load_timeseries_p, axis=1)\n load_df_q = pd.concat(mv_load_timeseries_q + lv_load_timeseries_q, axis=1)\n\n return load_df_p, load_df_q"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _pypsa_generator_timeseries(network, timesteps, mode=None):\n\n mv_gen_timeseries_q = []\n mv_gen_timeseries_p = []\n lv_gen_timeseries_q = []\n lv_gen_timeseries_p = []\n\n # MV generator timeseries\n if mode is 'mv' or mode is None:\n for gen in network.mv_grid.generators:\n mv_gen_timeseries_q.append(gen.pypsa_timeseries('q').rename(\n repr(gen)).to_frame().loc[timesteps])\n mv_gen_timeseries_p.append(gen.pypsa_timeseries('p').rename(\n repr(gen)).to_frame().loc[timesteps])\n if mode is 'mv':\n lv_gen_timeseries_p, lv_gen_timeseries_q = \\\n _pypsa_generator_timeseries_aggregated_at_lv_station(\n network, timesteps)\n\n # LV generator timeseries\n if mode is 'lv' or mode is None:\n for lv_grid in network.mv_grid.lv_grids:\n for gen in lv_grid.generators:\n lv_gen_timeseries_q.append(gen.pypsa_timeseries('q').rename(\n repr(gen)).to_frame().loc[timesteps])\n lv_gen_timeseries_p.append(gen.pypsa_timeseries('p').rename(\n repr(gen)).to_frame().loc[timesteps])\n\n gen_df_p = pd.concat(mv_gen_timeseries_p + lv_gen_timeseries_p, axis=1)\n gen_df_q = pd.concat(mv_gen_timeseries_q + lv_gen_timeseries_q, axis=1)\n\n return gen_df_p, gen_df_q", "response": "Returns a pandas. DataFrame of generator time series for the given number of timesteps."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a pandas. DataFrame of time series for storage instances in pypsa compatible format.", "response": "def _pypsa_storage_timeseries(network, timesteps, mode=None):\n \"\"\"\n Timeseries in PyPSA compatible format for storage instances\n\n Parameters\n ----------\n network : Network\n The eDisGo grid topology model overall container\n timesteps : array_like\n Timesteps is an array-like object with entries of type\n :pandas:`pandas.Timestamp<timestamp>` specifying which time steps\n to export to pypsa representation and use in power flow analysis.\n mode : str, optional\n Specifically retrieve generator time series for MV or LV grid level or\n both. Either choose 'mv' or 'lv'.\n Defaults to None, which returns both timeseries for MV and LV in a\n single DataFrame.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n Time series table in PyPSA format\n \"\"\"\n\n mv_storage_timeseries_q = []\n mv_storage_timeseries_p = []\n lv_storage_timeseries_q = []\n lv_storage_timeseries_p = []\n\n # MV storage time series\n if mode is 'mv' or mode is None:\n for storage in network.mv_grid.graph.nodes_by_attribute('storage'):\n mv_storage_timeseries_q.append(\n storage.pypsa_timeseries('q').rename(\n repr(storage)).to_frame().loc[timesteps])\n mv_storage_timeseries_p.append(\n storage.pypsa_timeseries('p').rename(\n repr(storage)).to_frame().loc[timesteps])\n\n # LV storage time series\n if mode is 'lv' or mode is None:\n for lv_grid in network.mv_grid.lv_grids:\n for storage in lv_grid.graph.nodes_by_attribute('storage'):\n lv_storage_timeseries_q.append(\n storage.pypsa_timeseries('q').rename(\n repr(storage)).to_frame().loc[timesteps])\n lv_storage_timeseries_p.append(\n storage.pypsa_timeseries('p').rename(\n repr(storage)).to_frame().loc[timesteps])\n\n storage_df_p = pd.concat(\n mv_storage_timeseries_p + lv_storage_timeseries_p, axis=1)\n storage_df_q = pd.concat(\n mv_storage_timeseries_q + lv_storage_timeseries_q, axis=1)\n\n return storage_df_p, storage_df_q"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning a Pandas dataframe of time series for each bus in timesteps.", "response": "def _pypsa_bus_timeseries(network, buses, timesteps):\n \"\"\"\n Time series in PyPSA compatible format for bus instances\n\n Set all buses except for the slack bus to voltage of 1 pu (it is assumed\n this setting is entirely ignored during solving the power flow problem).\n This slack bus is set to an operational voltage which is typically greater\n than nominal voltage plus a control deviation.\n The control deviation is always added positively to the operational voltage.\n For example, the operational voltage (offset) is set to 1.025 pu plus the\n control deviation of 0.015 pu. This adds up to a set voltage of the slack\n bus of 1.04 pu.\n\n .. warning::\n\n Voltage settings for the slack bus defined by this function assume the\n feedin case (reverse power flow case) as the worst-case for the power\n system. Thus, the set point for the slack is always greater 1.\n\n\n Parameters\n ----------\n network : Network\n The eDisGo grid topology model overall container\n timesteps : array_like\n Timesteps is an array-like object with entries of type\n :pandas:`pandas.Timestamp<timestamp>` specifying which time steps\n to export to pypsa representation and use in power flow analysis.\n buses : list\n Buses names\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n Time series table in PyPSA format\n \"\"\"\n\n # get slack bus label\n slack_bus = '_'.join(\n ['Bus', network.mv_grid.station.__repr__(side='mv')])\n\n # set all buses (except slack bus) to nominal voltage\n v_set_dict = {bus: 1 for bus in buses if bus != slack_bus}\n\n # Set slack bus to operational voltage (includes offset and control\n # deviation\n control_deviation = network.config[\n 'grid_expansion_allowed_voltage_deviations'][\n 'hv_mv_trafo_control_deviation']\n if control_deviation != 0:\n control_deviation_ts = \\\n network.timeseries.timesteps_load_feedin_case.case.apply(\n lambda _: control_deviation if _ == 'feedin_case'\n else -control_deviation)\n else:\n control_deviation_ts = 0\n\n slack_voltage_pu = control_deviation_ts + 1 + \\\n network.config[\n 'grid_expansion_allowed_voltage_deviations'][\n 'hv_mv_trafo_offset']\n\n v_set_dict.update({slack_bus: slack_voltage_pu})\n\n # Convert to PyPSA compatible dataframe\n v_set_df = pd.DataFrame(v_set_dict, index=timesteps)\n\n return v_set_df"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _pypsa_generator_timeseries_aggregated_at_lv_station(network, timesteps):\n\n generation_p = []\n generation_q = []\n\n for lv_grid in network.mv_grid.lv_grids:\n # Determine aggregated generation at LV stations\n generation = {}\n for gen in lv_grid.generators:\n # for type in gen.type:\n # for subtype in gen.subtype:\n gen_name = '_'.join([gen.type,\n gen.subtype,\n 'aggregated',\n 'LV_grid',\n str(lv_grid.id)])\n\n generation.setdefault(gen.type, {})\n generation[gen.type].setdefault(gen.subtype, {})\n generation[gen.type][gen.subtype].setdefault('timeseries_p', [])\n generation[gen.type][gen.subtype].setdefault('timeseries_q', [])\n generation[gen.type][gen.subtype]['timeseries_p'].append(\n gen.pypsa_timeseries('p').rename(gen_name).to_frame().loc[\n timesteps])\n generation[gen.type][gen.subtype]['timeseries_q'].append(\n gen.pypsa_timeseries('q').rename(gen_name).to_frame().loc[\n timesteps])\n\n for k_type, v_type in generation.items():\n for k_type, v_subtype in v_type.items():\n col_name = v_subtype['timeseries_p'][0].columns[0]\n generation_p.append(\n pd.concat(v_subtype['timeseries_p'],\n axis=1).sum(axis=1).rename(col_name).to_frame())\n generation_q.append(\n pd.concat(v_subtype['timeseries_q'], axis=1).sum(\n axis=1).rename(col_name).to_frame())\n\n return generation_p, generation_q", "response": "Aggregates generator time series per generator subtype and LV grid."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\naggregate load time series per sector and LV grid. Parameters ---------- network : Network The eDisGo grid topology model overall container timesteps : array_like Timesteps is an array-like object with entries of type :pandas:`pandas.Timestamp<timestamp>` specifying which time steps to export to pypsa representation and use in power flow analysis. Returns ------- tuple of :pandas:`pandas.DataFrame<dataframe>` Tuple of size two containing DataFrames that represent 1. 'p_set' of aggregated Load per sector at each LV station 2. 'q_set' of aggregated Load per sector at each LV station", "response": "def _pypsa_load_timeseries_aggregated_at_lv_station(network, timesteps):\n \"\"\"\n Aggregates load time series per sector and LV grid.\n\n Parameters\n ----------\n network : Network\n The eDisGo grid topology model overall container\n timesteps : array_like\n Timesteps is an array-like object with entries of type\n :pandas:`pandas.Timestamp<timestamp>` specifying which time steps\n to export to pypsa representation and use in power flow analysis.\n\n Returns\n -------\n tuple of :pandas:`pandas.DataFrame<dataframe>`\n Tuple of size two containing DataFrames that represent\n\n 1. 'p_set' of aggregated Load per sector at each LV station\n 2. 'q_set' of aggregated Load per sector at each LV station\n\n \"\"\"\n # ToDo: Load.pypsa_timeseries is not differentiated by sector so this\n # function will not work (either change here and in\n # add_aggregated_lv_components or in Load class)\n \n load_p = []\n load_q = []\n\n for lv_grid in network.mv_grid.lv_grids:\n # Determine aggregated load at LV stations\n load = {}\n for lo in lv_grid.graph.nodes_by_attribute('load'):\n for sector, val in lo.consumption.items():\n load.setdefault(sector, {})\n load[sector].setdefault('timeseries_p', [])\n load[sector].setdefault('timeseries_q', [])\n\n load[sector]['timeseries_p'].append(\n lo.pypsa_timeseries('p').rename(repr(lo)).to_frame().loc[\n timesteps])\n load[sector]['timeseries_q'].append(\n lo.pypsa_timeseries('q').rename(repr(lo)).to_frame().loc[\n timesteps])\n\n for sector, val in load.items():\n load_p.append(\n pd.concat(val['timeseries_p'], axis=1).sum(axis=1).rename(\n '_'.join(['Load', sector, repr(lv_grid)])).to_frame())\n load_q.append(\n pd.concat(val['timeseries_q'], axis=1).sum(axis=1).rename(\n '_'.join(['Load', sector, repr(lv_grid)])).to_frame())\n\n return load_p, load_q"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef process_pfa_results(network, pypsa, timesteps):\n # get the absolute losses in the system\n # subtracting total generation (including slack) from total load\n grid_losses = {'p': 1e3 * (pypsa.generators_t['p'].sum(axis=1) -\n pypsa.loads_t['p'].sum(axis=1)),\n 'q': 1e3 * (pypsa.generators_t['q'].sum(axis=1) -\n pypsa.loads_t['q'].sum(axis=1))}\n\n network.results.grid_losses = pd.DataFrame(grid_losses).loc[timesteps, :]\n\n # get slack results\n grid_exchanges = {'p': 1e3 * (pypsa.generators_t['p']['Generator_slack']),\n 'q': 1e3 * (pypsa.generators_t['q']['Generator_slack'])}\n\n network.results.hv_mv_exchanges = pd.DataFrame(grid_exchanges).loc[timesteps, :]\n\n # get p and q of lines, LV transformers and MV Station (slack generator)\n # in absolute values\n q0 = pd.concat(\n [np.abs(pypsa.lines_t['q0']),\n np.abs(pypsa.transformers_t['q0']),\n np.abs(pypsa.generators_t['q']['Generator_slack'].rename(\n repr(network.mv_grid.station)))], axis=1).loc[timesteps, :]\n q1 = pd.concat(\n [np.abs(pypsa.lines_t['q1']),\n np.abs(pypsa.transformers_t['q1']),\n np.abs(pypsa.generators_t['q']['Generator_slack'].rename(\n repr(network.mv_grid.station)))], axis=1).loc[timesteps, :]\n p0 = pd.concat(\n [np.abs(pypsa.lines_t['p0']),\n np.abs(pypsa.transformers_t['p0']),\n np.abs(pypsa.generators_t['p']['Generator_slack'].rename(\n repr(network.mv_grid.station)))], axis=1).loc[timesteps, :]\n p1 = pd.concat(\n [np.abs(pypsa.lines_t['p1']),\n np.abs(pypsa.transformers_t['p1']),\n np.abs(pypsa.generators_t['p']['Generator_slack'].rename(\n repr(network.mv_grid.station)))], axis=1).loc[timesteps, :]\n\n # determine apparent power and line endings/transformers' side\n s0 = np.hypot(p0, q0)\n s1 = np.hypot(p1, q1)\n\n # choose p and q from line ending with max(s0,s1)\n network.results.pfa_p = p0.where(s0 > s1, p1) * 1e3\n network.results.pfa_q = q0.where(s0 > s1, q1) * 1e3\n\n lines_bus0 = pypsa.lines['bus0'].to_dict()\n bus0_v_mag_pu = pypsa.buses_t['v_mag_pu'].T.loc[\n list(lines_bus0.values()), :].copy()\n bus0_v_mag_pu.index = list(lines_bus0.keys())\n\n lines_bus1 = pypsa.lines['bus1'].to_dict()\n bus1_v_mag_pu = pypsa.buses_t['v_mag_pu'].T.loc[\n list(lines_bus1.values()), :].copy()\n bus1_v_mag_pu.index = list(lines_bus1.keys())\n\n line_voltage_avg = 0.5 * (bus0_v_mag_pu.loc[:, timesteps] +\n bus1_v_mag_pu.loc[:, timesteps])\n\n # Get voltage levels at line (avg. of buses at both sides)\n network.results._i_res = \\\n network.results.s_res()[pypsa.lines_t['q0'].columns].truediv(\n pypsa.lines['v_nom'] * line_voltage_avg.T,\n axis='columns') * sqrt(3)\n # process results at nodes\n generators_names = [repr(g) for g in network.mv_grid.generators]\n generators_mapping = {v: k for k, v in\n pypsa.generators.loc[generators_names][\n 'bus'].to_dict().items()}\n storages_names = [repr(g) for g in\n network.mv_grid.graph.nodes_by_attribute('storage')]\n storages_mapping = {v: k for k, v in\n pypsa.storage_units.loc[storages_names][\n 'bus'].to_dict().items()}\n branch_t_names = [repr(bt) for bt in\n network.mv_grid.graph.nodes_by_attribute('branch_tee')]\n branch_t_mapping = {'_'.join(['Bus', v]): v for v in branch_t_names}\n mv_station_names = [repr(m) for m in\n network.mv_grid.graph.nodes_by_attribute('mv_station')]\n mv_station_mapping_sec = {'_'.join(['Bus', v]): v for v in\n mv_station_names}\n mv_switch_disconnector_names = [repr(sd) for sd in\n network.mv_grid.graph.nodes_by_attribute(\n 'mv_disconnecting_point')]\n mv_switch_disconnector_mapping = {'_'.join(['Bus', v]): v for v in\n mv_switch_disconnector_names}\n\n lv_station_mapping_pri = {\n '_'.join(['Bus', l.__repr__('mv')]): repr(l)\n for l in network.mv_grid.graph.nodes_by_attribute('lv_station')}\n lv_station_mapping_sec = {\n '_'.join(['Bus', l.__repr__('lv')]): repr(l)\n for l in network.mv_grid.graph.nodes_by_attribute('lv_station')}\n loads_names = [repr(lo) for lo in\n network.mv_grid.graph.nodes_by_attribute('load')]\n loads_mapping = {v: k for k, v in\n pypsa.loads.loc[loads_names][\n 'bus'].to_dict().items()}\n\n lv_generators_names = []\n lv_storages_names = []\n lv_branch_t_names = []\n lv_loads_names = []\n for lv_grid in network.mv_grid.lv_grids:\n lv_generators_names.extend([repr(g) for g in\n lv_grid.graph.nodes_by_attribute(\n 'generator')])\n lv_storages_names.extend([repr(g) for g in\n lv_grid.graph.nodes_by_attribute(\n 'storage')])\n lv_branch_t_names.extend([repr(bt) for bt in\n lv_grid.graph.nodes_by_attribute('branch_tee')])\n lv_loads_names.extend([repr(lo) for lo in\n lv_grid.graph.nodes_by_attribute('load')])\n\n lv_generators_mapping = {v: k for k, v in\n pypsa.generators.loc[lv_generators_names][\n 'bus'].to_dict().items()}\n lv_storages_mapping = {v: k for k, v in\n pypsa.storage_units.loc[lv_storages_names][\n 'bus'].to_dict().items()}\n lv_branch_t_mapping = {'_'.join(['Bus', v]): v for v in lv_branch_t_names}\n lv_loads_mapping = {v: k for k, v in pypsa.loads.loc[lv_loads_names][\n 'bus'].to_dict().items()}\n\n names_mapping = {\n **generators_mapping,\n **storages_mapping,\n **branch_t_mapping,\n **mv_station_mapping_sec,\n **lv_station_mapping_pri,\n **lv_station_mapping_sec,\n **mv_switch_disconnector_mapping,\n **loads_mapping,\n **lv_generators_mapping,\n **lv_storages_mapping,\n **lv_loads_mapping,\n **lv_branch_t_mapping\n }\n\n # write voltage levels obtained from power flow to results object\n pfa_v_mag_pu_mv = (pypsa.buses_t['v_mag_pu'][\n list(generators_mapping) +\n list(storages_mapping) +\n list(branch_t_mapping) +\n list(mv_station_mapping_sec) +\n list(mv_switch_disconnector_mapping) +\n list(lv_station_mapping_pri) +\n list(loads_mapping)]).rename(columns=names_mapping)\n pfa_v_mag_pu_lv = (pypsa.buses_t['v_mag_pu'][\n list(lv_station_mapping_sec) +\n list(lv_generators_mapping) +\n list(lv_storages_mapping) +\n list(lv_branch_t_mapping) +\n list(lv_loads_mapping)]).rename(columns=names_mapping)\n network.results.pfa_v_mag_pu = pd.concat(\n {'mv': pfa_v_mag_pu_mv.loc[timesteps, :],\n 'lv': pfa_v_mag_pu_lv.loc[timesteps, :]}, axis=1)", "response": "Processes the results of a single PFA and returns a new set of results."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef update_pypsa_generator_import(network):\n\n # get topology and time series data\n if network.pypsa.edisgo_mode is None:\n mv_components = mv_to_pypsa(network)\n lv_components = lv_to_pypsa(network)\n components = combine_mv_and_lv(mv_components, lv_components)\n elif network.pypsa.edisgo_mode is 'mv':\n raise NotImplementedError\n elif network.pypsa.edisgo_mode is 'lv':\n raise NotImplementedError\n else:\n raise ValueError(\"Provide proper mode or leave it empty to export \"\n \"entire grid topology.\")\n\n # check topology\n _check_topology(components)\n\n # create power flow problem\n pypsa_network = PyPSANetwork()\n pypsa_network.edisgo_mode = network.pypsa.edisgo_mode\n pypsa_network.set_snapshots(network.pypsa.snapshots)\n\n # import grid topology to PyPSA network\n # buses are created first to avoid warnings\n pypsa_network.import_components_from_dataframe(components['Bus'], 'Bus')\n\n for k, comps in components.items():\n if k is not 'Bus' and not comps.empty:\n pypsa_network.import_components_from_dataframe(comps, k)\n\n # import time series to PyPSA network\n pypsa_network.generators_t.p_set = network.pypsa.generators_t.p_set\n pypsa_network.generators_t.q_set = network.pypsa.generators_t.q_set\n pypsa_network.loads_t.p_set = network.pypsa.loads_t.p_set\n pypsa_network.loads_t.q_set = network.pypsa.loads_t.q_set\n pypsa_network.storage_units_t.p_set = network.pypsa.storage_units_t.p_set\n pypsa_network.storage_units_t.q_set = network.pypsa.storage_units_t.q_set\n pypsa_network.buses_t.v_mag_pu_set = network.pypsa.buses_t.v_mag_pu_set\n\n network.pypsa = pypsa_network\n\n if len(list(components['Generator'].index.values)) > 1:\n update_pypsa_generator_timeseries(network)\n if list(components['Bus'].index.values):\n update_pypsa_bus_timeseries(network)\n if len(list(components['StorageUnit'].index.values)) > 0:\n update_pypsa_storage_timeseries(network)\n\n _check_integrity_of_pypsa(pypsa_network)", "response": "Update PyPSA generator import code for a given network."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef update_pypsa_grid_reinforcement(network, equipment_changes):\n\n # Step 1: Update transformers\n transformers = equipment_changes[\n equipment_changes['equipment'].apply(isinstance, args=(Transformer,))]\n # HV/MV transformers are excluded because it's not part of the PFA\n removed_transformers = [repr(_) for _ in\n transformers[transformers['change'] == 'removed'][\n 'equipment'].tolist() if _.voltage_op < 10]\n added_transformers = transformers[transformers['change'] == 'added']\n\n transformer = {'name': [],\n 'bus0': [],\n 'bus1': [],\n 'type': [],\n 'model': [],\n 'x': [],\n 'r': [],\n 's_nom': [],\n 'tap_ratio': []}\n\n for idx, row in added_transformers.iterrows():\n\n if isinstance(idx, LVStation):\n # we choose voltage of transformers' primary side\n v_base = idx.mv_grid.voltage_nom\n z_base = v_base ** 2 / row['equipment'].type.S_nom\n\n transformer['bus0'].append('_'.join(['Bus', idx.__repr__(\n side='mv')]))\n transformer['bus1'].append('_'.join(['Bus', idx.__repr__(\n side='lv')]))\n transformer['name'].append(repr(row['equipment']))\n transformer['type'].append(\"\")\n transformer['model'].append('pi')\n transformer['r'].append(row['equipment'].type.R / z_base)\n transformer['x'].append(row['equipment'].type.X / z_base)\n transformer['s_nom'].append(row['equipment'].type.S_nom / 1e3)\n transformer['tap_ratio'].append(1)\n\n network.pypsa.transformers.drop(removed_transformers, inplace=True)\n\n if transformer['name']:\n network.pypsa.import_components_from_dataframe(\n pd.DataFrame(transformer).set_index('name'), 'Transformer')\n\n # Step 2: Update lines\n lines = equipment_changes.loc[equipment_changes.index[\n equipment_changes.reset_index()['index'].apply(\n isinstance, args=(Line,))]]\n changed_lines = lines[lines['change'] == 'changed']\n\n lv_stations = network.mv_grid.graph.nodes_by_attribute('lv_station')\n\n omega = 2 * pi * 50\n\n for idx, row in changed_lines.iterrows():\n # Update line parameters\n network.pypsa.lines.loc[repr(idx), 'r'] = (\n idx.type['R'] / idx.quantity * idx.length)\n network.pypsa.lines.loc[repr(idx), 'x'] = (\n idx.type['L'] / 1e3 * omega / idx.quantity * idx.length)\n network.pypsa.lines.loc[repr(idx), 's_nom'] = (\n sqrt(3) * idx.type['I_max_th'] * idx.type[\n 'U_n'] * idx.quantity / 1e3)\n network.pypsa.lines.loc[repr(idx), 'length'] = idx.length\n\n # Update buses line is connected to\n adj_nodes = idx.grid.graph.nodes_from_line(idx)\n\n if adj_nodes[0] in lv_stations:\n side = 'lv' if isinstance(idx.grid, LVGrid) else 'mv'\n bus0 = '_'.join(['Bus', adj_nodes[0].__repr__(side=side)])\n else:\n bus0 = '_'.join(['Bus', repr(adj_nodes[0])])\n\n if adj_nodes[1] in lv_stations:\n side = 'lv' if isinstance(idx.grid, LVGrid) else 'mv'\n bus1 = '_'.join(['Bus', adj_nodes[1].__repr__(side=side)])\n else:\n bus1 = '_'.join(['Bus', repr(adj_nodes[1])])\n\n network.pypsa.lines.loc[repr(idx), 'bus0'] = bus0\n network.pypsa.lines.loc[repr(idx), 'bus1'] = bus1", "response": "Updates the data of the current eDisGo grid with the changes in equipment_changes."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nupdate the pypsa representation of the edisgo graph with the given storages and lines.", "response": "def update_pypsa_storage(pypsa, storages, storages_lines):\n \"\"\"\n Adds storages and their lines to pypsa representation of the edisgo graph.\n\n This function effects the following attributes of the pypsa network:\n components ('StorageUnit'), storage_units, storage_units_t (p_set, q_set),\n buses, lines\n\n Parameters\n -----------\n pypsa : :pypsa:`pypsa.Network<network>`\n storages : :obj:`list`\n List with storages of type :class:`~.grid.components.Storage` to add\n to pypsa network.\n storages_lines : :obj:`list`\n List with lines of type :class:`~.grid.components.Line` that connect\n storages to the grid.\n\n \"\"\"\n bus = {'name': [], 'v_nom': [], 'x': [], 'y': []}\n\n line = {'name': [],\n 'bus0': [],\n 'bus1': [],\n 'type': [],\n 'x': [],\n 'r': [],\n 's_nom': [],\n 'length': []}\n\n storage = {\n 'name': [],\n 'bus': [],\n 'p_nom': [],\n 'state_of_charge_initial': [],\n 'efficiency_store': [],\n 'efficiency_dispatch': [],\n 'standing_loss': []}\n\n for s in storages:\n bus_name = '_'.join(['Bus', repr(s)])\n\n storage['name'].append(repr(s))\n storage['bus'].append(bus_name)\n storage['p_nom'].append(s.nominal_power / 1e3)\n storage['state_of_charge_initial'].append(s.soc_initial)\n storage['efficiency_store'].append(s.efficiency_in)\n storage['efficiency_dispatch'].append(s.efficiency_out)\n storage['standing_loss'].append(s.standing_loss)\n\n bus['name'].append(bus_name)\n bus['v_nom'].append(s.grid.voltage_nom)\n bus['x'].append(s.geom.x)\n bus['y'].append(s.geom.y)\n\n omega = 2 * pi * 50\n for l in storages_lines:\n line['name'].append(repr(l))\n\n adj_nodes = l.grid.graph.nodes_from_line(l)\n if isinstance(l.grid, LVGrid):\n if isinstance(adj_nodes[0], LVStation):\n line['bus0'].append(\n '_'.join(['Bus', adj_nodes[0].__repr__(side='lv')]))\n else:\n line['bus0'].append('_'.join(['Bus', repr(adj_nodes[0])]))\n\n if isinstance(adj_nodes[1], LVStation):\n line['bus1'].append(\n '_'.join(['Bus', adj_nodes[1].__repr__(side='lv')]))\n else:\n line['bus1'].append('_'.join(['Bus', repr(adj_nodes[1])]))\n else:\n if isinstance(adj_nodes[0], LVStation):\n line['bus0'].append(\n '_'.join(['Bus', adj_nodes[0].__repr__(side='mv')]))\n elif isinstance(adj_nodes[0], MVStation):\n line['bus0'].append(\n '_'.join(['Bus', adj_nodes[0].__repr__(side='lv')]))\n else:\n line['bus0'].append('_'.join(['Bus', repr(adj_nodes[0])]))\n\n if isinstance(adj_nodes[1], LVStation):\n line['bus1'].append(\n '_'.join(['Bus', adj_nodes[1].__repr__(side='mv')]))\n elif isinstance(adj_nodes[1], MVStation):\n line['bus1'].append(\n '_'.join(['Bus', adj_nodes[1].__repr__(side='lv')]))\n else:\n line['bus1'].append('_'.join(['Bus', repr(adj_nodes[1])]))\n\n line['type'].append(\"\")\n line['x'].append(l.type['L'] * omega / 1e3 * l.length)\n line['r'].append(l.type['R'] * l.length)\n line['s_nom'].append(\n sqrt(3) * l.type['I_max_th'] * l.type['U_n'] / 1e3)\n line['length'].append(l.length)\n\n # import new components to pypsa\n pypsa.import_components_from_dataframe(\n pd.DataFrame(bus).set_index('name'), 'Bus')\n pypsa.import_components_from_dataframe(\n pd.DataFrame(storage).set_index('name'), 'StorageUnit')\n pypsa.import_components_from_dataframe(\n pd.DataFrame(line).set_index('name'), 'Line')\n\n # import time series of storages and buses to pypsa\n timeseries_storage_p = pd.DataFrame()\n timeseries_storage_q = pd.DataFrame()\n for s in storages:\n timeseries_storage_p[repr(s)] = s.pypsa_timeseries('p').loc[\n pypsa.storage_units_t.p_set.index]\n timeseries_storage_q[repr(s)] = s.pypsa_timeseries('q').loc[\n pypsa.storage_units_t.q_set.index]\n\n import_series_from_dataframe(pypsa, timeseries_storage_p,\n 'StorageUnit', 'p_set')\n import_series_from_dataframe(pypsa, timeseries_storage_q,\n 'StorageUnit', 'q_set')"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nupdating load generator storage and bus time series in pypsa representation.", "response": "def update_pypsa_timeseries(network, loads_to_update=None,\n generators_to_update=None, storages_to_update=None,\n timesteps=None):\n \"\"\"\n Updates load, generator, storage and bus time series in pypsa network.\n\n See functions :func:`update_pypsa_load_timeseries`,\n :func:`update_pypsa_generator_timeseries`,\n :func:`update_pypsa_storage_timeseries`, and\n :func:`update_pypsa_bus_timeseries` for more information.\n\n Parameters\n ----------\n network : Network\n The eDisGo grid topology model overall container\n loads_to_update : :obj:`list`, optional\n List with all loads (of type :class:`~.grid.components.Load`) that need\n to be updated. If None all loads are updated depending on mode. See\n :meth:`~.tools.pypsa_io.to_pypsa` for more information.\n generators_to_update : :obj:`list`, optional\n List with all generators (of type :class:`~.grid.components.Generator`)\n that need to be updated. If None all generators are updated depending\n on mode. See :meth:`~.tools.pypsa_io.to_pypsa` for more information.\n storages_to_update : :obj:`list`, optional\n List with all storages (of type :class:`~.grid.components.Storage`)\n that need to be updated. If None all storages are updated depending on\n mode. See :meth:`~.tools.pypsa_io.to_pypsa` for more information.\n timesteps : :pandas:`pandas.DatetimeIndex<datetimeindex>` or :pandas:`pandas.Timestamp<timestamp>`\n Timesteps specifies which time steps of the load time series to export\n to pypsa representation and use in power flow analysis.\n If None all time steps currently existing in pypsa representation are\n updated. If not None current time steps are overwritten by given\n time steps. Default: None.\n\n \"\"\"\n update_pypsa_load_timeseries(\n network, loads_to_update=loads_to_update, timesteps=timesteps)\n update_pypsa_generator_timeseries(\n network, generators_to_update=generators_to_update,\n timesteps=timesteps)\n update_pypsa_storage_timeseries(\n network, storages_to_update=storages_to_update, timesteps=timesteps)\n update_pypsa_bus_timeseries(network, timesteps=timesteps)\n # update pypsa snapshots\n if timesteps is None:\n timesteps = network.pypsa.buses_t.v_mag_pu_set.index\n network.pypsa.set_snapshots(timesteps)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nupdating load time series in pypsa representation.", "response": "def update_pypsa_load_timeseries(network, loads_to_update=None,\n timesteps=None):\n \"\"\"\n Updates load time series in pypsa representation.\n\n This function overwrites p_set and q_set of loads_t attribute of\n pypsa network.\n Be aware that if you call this function with `timesteps` and thus overwrite\n current time steps it may lead to inconsistencies in the pypsa network\n since only load time series are updated but none of the other time series\n or the snapshots attribute of the pypsa network. Use the function\n :func:`update_pypsa_timeseries` to change the time steps you want to\n analyse in the power flow analysis.\n This function will also raise an error when a load that is currently not\n in the pypsa representation is added.\n\n Parameters\n ----------\n network : Network\n The eDisGo grid topology model overall container\n loads_to_update : :obj:`list`, optional\n List with all loads (of type :class:`~.grid.components.Load`) that need\n to be updated. If None all loads are updated depending on mode. See\n :meth:`~.tools.pypsa_io.to_pypsa` for more information.\n timesteps : :pandas:`pandas.DatetimeIndex<datetimeindex>` or :pandas:`pandas.Timestamp<timestamp>`\n Timesteps specifies which time steps of the load time series to export\n to pypsa representation. If None all time steps currently existing in\n pypsa representation are updated. If not None current time steps are\n overwritten by given time steps. Default: None.\n\n \"\"\"\n _update_pypsa_timeseries_by_type(\n network, type='load', components_to_update=loads_to_update,\n timesteps=timesteps)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef update_pypsa_generator_timeseries(network, generators_to_update=None,\n timesteps=None):\n \"\"\"\n Updates generator time series in pypsa representation.\n\n This function overwrites p_set and q_set of generators_t attribute of\n pypsa network.\n Be aware that if you call this function with `timesteps` and thus overwrite\n current time steps it may lead to inconsistencies in the pypsa network\n since only generator time series are updated but none of the other time\n series or the snapshots attribute of the pypsa network. Use the function\n :func:`update_pypsa_timeseries` to change the time steps you want to\n analyse in the power flow analysis.\n This function will also raise an error when a generator that is currently\n not in the pypsa representation is added.\n\n Parameters\n ----------\n network : Network\n The eDisGo grid topology model overall container\n generators_to_update : :obj:`list`, optional\n List with all generators (of type :class:`~.grid.components.Generator`)\n that need to be updated. If None all generators are updated depending\n on mode. See :meth:`~.tools.pypsa_io.to_pypsa` for more information.\n timesteps : :pandas:`pandas.DatetimeIndex<datetimeindex>` or :pandas:`pandas.Timestamp<timestamp>`\n Timesteps specifies which time steps of the generator time series to\n export to pypsa representation. If None all time steps currently\n existing in pypsa representation are updated. If not None current time\n steps are overwritten by given time steps. Default: None.\n\n \"\"\"\n _update_pypsa_timeseries_by_type(\n network, type='generator', components_to_update=generators_to_update,\n timesteps=timesteps)", "response": "Updates generator time series in pypsa representation."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef update_pypsa_storage_timeseries(network, storages_to_update=None,\n timesteps=None):\n \"\"\"\n Updates storage time series in pypsa representation.\n\n This function overwrites p_set and q_set of storage_unit_t attribute of\n pypsa network.\n Be aware that if you call this function with `timesteps` and thus overwrite\n current time steps it may lead to inconsistencies in the pypsa network\n since only storage time series are updated but none of the other time\n series or the snapshots attribute of the pypsa network. Use the function\n :func:`update_pypsa_timeseries` to change the time steps you want to\n analyse in the power flow analysis.\n This function will also raise an error when a storage that is currently\n not in the pypsa representation is added.\n\n Parameters\n ----------\n network : Network\n The eDisGo grid topology model overall container\n storages_to_update : :obj:`list`, optional\n List with all storages (of type :class:`~.grid.components.Storage`)\n that need to be updated. If None all storages are updated depending on\n mode. See :meth:`~.tools.pypsa_io.to_pypsa` for more information.\n timesteps : :pandas:`pandas.DatetimeIndex<datetimeindex>` or :pandas:`pandas.Timestamp<timestamp>`\n Timesteps specifies which time steps of the storage time series to\n export to pypsa representation. If None all time steps currently\n existing in pypsa representation are updated. If not None current time\n steps are overwritten by given time steps. Default: None.\n\n \"\"\"\n _update_pypsa_timeseries_by_type(\n network, type='storage', components_to_update=storages_to_update,\n timesteps=timesteps)", "response": "Updates storage time series in pypsa representation."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nupdates the time series of the buses voltage in pypsa representation.", "response": "def update_pypsa_bus_timeseries(network, timesteps=None):\n \"\"\"\n Updates buses voltage time series in pypsa representation.\n\n This function overwrites v_mag_pu_set of buses_t attribute of\n pypsa network.\n Be aware that if you call this function with `timesteps` and thus overwrite\n current time steps it may lead to inconsistencies in the pypsa network\n since only bus time series are updated but none of the other time\n series or the snapshots attribute of the pypsa network. Use the function\n :func:`update_pypsa_timeseries` to change the time steps you want to\n analyse in the power flow analysis.\n\n Parameters\n ----------\n network : Network\n The eDisGo grid topology model overall container\n timesteps : :pandas:`pandas.DatetimeIndex<datetimeindex>` or :pandas:`pandas.Timestamp<timestamp>`\n Timesteps specifies which time steps of the time series to\n export to pypsa representation. If None all time steps currently\n existing in pypsa representation are updated. If not None current\n time steps are overwritten by given time steps. Default: None.\n\n \"\"\"\n if timesteps is None:\n timesteps = network.pypsa.buses_t.v_mag_pu_set.index\n # check if timesteps is array-like, otherwise convert to list\n if not hasattr(timesteps, \"__len__\"):\n timesteps = [timesteps]\n buses = network.pypsa.buses.index\n v_mag_pu_set = _pypsa_bus_timeseries(network, buses, timesteps)\n network.pypsa.buses_t.v_mag_pu_set = v_mag_pu_set"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _update_pypsa_timeseries_by_type(network, type, components_to_update=None,\n timesteps=None):\n \"\"\"\n Updates time series of specified component in pypsa representation.\n\n Be aware that if you call this function with `timesteps` and thus overwrite\n current time steps it may lead to inconsistencies in the pypsa network\n since only time series of the specified component are updated but none of\n the other time series or the snapshots attribute of the pypsa network.\n Use the function :func:`update_pypsa_timeseries` to change the time steps\n you want to analyse in the power flow analysis.\n This function will raise an error when a component that is currently not in\n the pypsa representation is added.\n\n Parameters\n ----------\n network : Network\n The eDisGo grid topology model overall container\n type : :obj:`str`\n Type specifies the type of component (load, generator or storage)\n that is updated.\n components_to_update : :obj:`list`, optional\n List with all components (either of type\n :class:`~.grid.components.Load`, :class:`~.grid.components.Generator`\n or :class:`~.grid.components.Storage`) that need to be updated.\n Possible options are 'load', 'generator' and 'storage'.\n Components in list must all be of the same type. If None all components\n specified by `type` are updated depending on the mode. See\n :meth:`~.tools.pypsa_io.to_pypsa` for more information on mode.\n timesteps : :pandas:`pandas.DatetimeIndex<datetimeindex>` or :pandas:`pandas.Timestamp<timestamp>`\n Timesteps specifies which time steps of the time series to\n export to pypsa representation. If None all time steps currently\n existing in pypsa representation are updated. If not None current\n time steps are overwritten by given time steps. Default: None.\n\n \"\"\"\n\n # pypsa dataframe to update\n if type == 'load':\n pypsa_ts = network.pypsa.loads_t\n components_in_pypsa = network.pypsa.loads.index\n elif type == 'generator':\n pypsa_ts = network.pypsa.generators_t\n components_in_pypsa = network.pypsa.generators.index\n elif type == 'storage':\n pypsa_ts = network.pypsa.storage_units_t\n components_in_pypsa = network.pypsa.storage_units.index\n else:\n raise ValueError('{} is not a valid type.'.format(type))\n\n # MV and LV loads\n if network.pypsa.edisgo_mode is None:\n # if no components are specified get all components of specified type\n # in whole grid\n if components_to_update is None:\n grids = [network.mv_grid] + list(network.mv_grid.lv_grids)\n if type == 'generator':\n components_to_update = list(itertools.chain(\n *[grid.generators for grid in grids]))\n else:\n components_to_update = list(itertools.chain(\n *[grid.graph.nodes_by_attribute(type) for grid in grids]))\n # if no time steps are specified update all time steps currently\n # contained in pypsa representation\n if timesteps is None:\n timesteps = pypsa_ts.p_set.index\n # check if timesteps is array-like, otherwise convert to list\n # (necessary to avoid getting a scalar using .loc)\n if not hasattr(timesteps, \"__len__\"):\n timesteps = [timesteps]\n\n p_set = pd.DataFrame()\n q_set = pd.DataFrame()\n for comp in components_to_update:\n if repr(comp) in components_in_pypsa:\n p_set[repr(comp)] = comp.pypsa_timeseries('p').loc[timesteps]\n q_set[repr(comp)] = comp.pypsa_timeseries('q').loc[timesteps]\n else:\n raise KeyError(\"Tried to update component {} but could not \"\n \"find it in pypsa network.\".format(comp))\n # overwrite pypsa time series\n pypsa_ts.p_set = p_set\n pypsa_ts.q_set = q_set\n\n # MV and aggregated LV loads\n elif network.pypsa.edisgo_mode is 'mv':\n raise NotImplementedError\n\n # LV only\n elif network.pypsa.edisgo_mode is 'lv':\n raise NotImplementedError", "response": "Updates time series of specified type in pypsa representation."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef connect_mv_generators(network):\n\n # get params from config\n buffer_radius = int(network.config[\n 'grid_connection']['conn_buffer_radius'])\n buffer_radius_inc = int(network.config[\n 'grid_connection']['conn_buffer_radius_inc'])\n\n # get standard equipment\n std_line_type = network.equipment_data['mv_cables'].loc[\n network.config['grid_expansion_standard_equipment']['mv_line']]\n\n for geno in sorted(network.mv_grid.graph.nodes_by_attribute('generator'),\n key=lambda _: repr(_)):\n if nx.is_isolate(network.mv_grid.graph, geno):\n\n # ===== voltage level 4: generator has to be connected to MV station =====\n if geno.v_level == 4:\n\n line_length = calc_geo_dist_vincenty(network=network,\n node_source=geno,\n node_target=network.mv_grid.station)\n\n line = Line(id=random.randint(10**8, 10**9),\n type=std_line_type,\n kind='cable',\n quantity=1,\n length=line_length / 1e3,\n grid=network.mv_grid)\n\n network.mv_grid.graph.add_edge(network.mv_grid.station,\n geno,\n line=line,\n type='line')\n\n # add line to equipment changes to track costs\n _add_cable_to_equipment_changes(network=network,\n line=line)\n\n # ===== voltage level 5: generator has to be connected to MV grid (next-neighbor) =====\n elif geno.v_level == 5:\n\n # get branches within a the predefined radius `generator_buffer_radius`\n branches = calc_geo_lines_in_buffer(network=network,\n node=geno,\n grid=network.mv_grid,\n radius=buffer_radius,\n radius_inc=buffer_radius_inc)\n\n # calc distance between generator and grid's lines -> find nearest line\n conn_objects_min_stack = _find_nearest_conn_objects(network=network,\n node=geno,\n branches=branches)\n\n # connect!\n # go through the stack (from nearest to most far connection target object)\n generator_connected = False\n for dist_min_obj in conn_objects_min_stack:\n target_obj_result = _connect_mv_node(network=network,\n node=geno,\n target_obj=dist_min_obj)\n\n if target_obj_result is not None:\n generator_connected = True\n break\n\n if not generator_connected:\n logger.debug(\n 'Generator {0} could not be connected, try to '\n 'increase the parameter `conn_buffer_radius` in '\n 'config file `config_grid.cfg` to gain more possible '\n 'connection points.'.format(geno))", "response": "Connect MV generators to existing grids."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef connect_lv_generators(network, allow_multiple_genos_per_load=True):\n\n # get predefined random seed and initialize random generator\n seed = int(network.config['grid_connection']['random_seed'])\n #random.seed(a=seed)\n random.seed(a=1234)\n # ToDo: Switch back to 'seed' as soon as line ids are finished, #58\n\n # get standard equipment\n std_line_type = network.equipment_data['lv_cables'].loc[\n network.config['grid_expansion_standard_equipment']['lv_line']]\n std_line_kind = 'cable'\n\n # # TEMP: DEBUG STUFF\n # lv_grid_stats = pd.DataFrame(columns=('lv_grid',\n # 'load_count',\n # 'geno_count',\n # 'more_genos_than_loads')\n # )\n\n # iterate over all LV grids\n for lv_grid in network.mv_grid.lv_grids:\n\n lv_loads = lv_grid.graph.nodes_by_attribute('load')\n\n # counter for genos in v_level 7\n log_geno_count_vlevel7 = 0\n\n # generate random list (without replacement => unique elements)\n # of loads (residential) to connect genos (P <= 30kW) to.\n lv_loads_res = sorted([lv_load for lv_load in lv_loads\n if 'residential' in list(lv_load.consumption.keys())],\n key=lambda _: repr(_))\n\n if len(lv_loads_res) > 0:\n lv_loads_res_rnd = set(random.sample(lv_loads_res,\n len(lv_loads_res)))\n else:\n lv_loads_res_rnd = None\n\n # generate random list (without replacement => unique elements)\n # of loads (retail, industrial, agricultural) to connect genos\n # (30kW < P <= 100kW) to.\n lv_loads_ria = sorted([lv_load for lv_load in lv_loads\n if any([_ in list(lv_load.consumption.keys())\n for _ in ['retail', 'industrial', 'agricultural']])],\n key=lambda _: repr(_))\n\n if len(lv_loads_ria) > 0:\n lv_loads_ria_rnd = set(random.sample(lv_loads_ria,\n len(lv_loads_ria)))\n else:\n lv_loads_ria_rnd = None\n\n for geno in sorted(lv_grid.graph.nodes_by_attribute('generator'), key=lambda x: repr(x)):\n if nx.is_isolate(lv_grid.graph, geno):\n\n lv_station = lv_grid.station\n\n # generator is of v_level 6 -> connect to LV station\n if geno.v_level == 6:\n\n line_length = calc_geo_dist_vincenty(network=network,\n node_source=geno,\n node_target=lv_station)\n\n line = Line(id=random.randint(10 ** 8, 10 ** 9),\n length=line_length / 1e3,\n quantity=1,\n kind=std_line_kind,\n type=std_line_type,\n grid=lv_grid)\n\n lv_grid.graph.add_edge(geno,\n lv_station,\n line=line,\n type='line')\n\n # add line to equipment changes to track costs\n _add_cable_to_equipment_changes(network=network,\n line=line)\n\n # generator is of v_level 7 -> assign geno to load\n elif geno.v_level == 7:\n # counter for genos in v_level 7\n log_geno_count_vlevel7 += 1\n\n # connect genos with P <= 30kW to residential loads, if available\n if (geno.nominal_capacity <= 30) and (lv_loads_res_rnd is not None):\n if len(lv_loads_res_rnd) > 0:\n lv_load = lv_loads_res_rnd.pop()\n # if random load list is empty, create new one\n else:\n lv_loads_res_rnd = set(random.sample(lv_loads_res,\n len(lv_loads_res))\n )\n lv_load = lv_loads_res_rnd.pop()\n\n # get cable distributor of building\n lv_conn_target = lv_grid.graph.neighbors(lv_load)[0]\n\n if not allow_multiple_genos_per_load:\n # check if there's an existing generator connected to the load\n # if so, select next load. If no load is available, connect to station.\n while any([isinstance(_, Generator)\n for _ in lv_grid.graph.neighbors(\n lv_grid.graph.neighbors(lv_load)[0])]):\n if len(lv_loads_res_rnd) > 0:\n lv_load = lv_loads_res_rnd.pop()\n\n # get cable distributor of building\n lv_conn_target = lv_grid.graph.neighbors(lv_load)[0]\n else:\n lv_conn_target = lv_grid.station\n\n logger.debug(\n 'No valid conn. target found for {}. '\n 'Connected to {}.'.format(\n repr(geno),\n repr(lv_conn_target)\n )\n )\n break\n\n # connect genos with 30kW <= P <= 100kW to residential loads\n # to retail, industrial, agricultural loads, if available\n elif (geno.nominal_capacity > 30) and (lv_loads_ria_rnd is not None):\n if len(lv_loads_ria_rnd) > 0:\n lv_load = lv_loads_ria_rnd.pop()\n # if random load list is empty, create new one\n else:\n lv_loads_ria_rnd = set(random.sample(lv_loads_ria,\n len(lv_loads_ria))\n )\n lv_load = lv_loads_ria_rnd.pop()\n\n # get cable distributor of building\n lv_conn_target = lv_grid.graph.neighbors(lv_load)[0]\n\n if not allow_multiple_genos_per_load:\n # check if there's an existing generator connected to the load\n # if so, select next load. If no load is available, connect to station.\n while any([isinstance(_, Generator)\n for _ in lv_grid.graph.neighbors(\n lv_grid.graph.neighbors(lv_load)[0])]):\n if len(lv_loads_ria_rnd) > 0:\n lv_load = lv_loads_ria_rnd.pop()\n\n # get cable distributor of building\n lv_conn_target = lv_grid.graph.neighbors(lv_load)[0]\n else:\n lv_conn_target = lv_grid.station\n\n logger.debug(\n 'No valid conn. target found for {}. '\n 'Connected to {}.'.format(\n repr(geno),\n repr(lv_conn_target)\n )\n )\n break\n\n # fallback: connect to station\n else:\n lv_conn_target = lv_grid.station\n\n logger.debug(\n 'No valid conn. target found for {}. '\n 'Connected to {}.'.format(\n repr(geno),\n repr(lv_conn_target)\n )\n )\n\n line = Line(id=random.randint(10 ** 8, 10 ** 9),\n length=1e-3,\n quantity=1,\n kind=std_line_kind,\n type=std_line_type,\n grid=lv_grid)\n\n lv_grid.graph.add_edge(geno,\n lv_station,\n line=line,\n type='line')\n\n # add line to equipment changes to track costs\n _add_cable_to_equipment_changes(network=network,\n line=line)\n\n # warn if there're more genos than loads in LV grid\n if log_geno_count_vlevel7 > len(lv_loads):\n logger.debug('The count of newly connected generators in voltage level 7 ({}) '\n 'exceeds the count of loads ({}) in LV grid {}.'\n .format(str(log_geno_count_vlevel7),\n str(len(lv_loads)),\n repr(lv_grid)\n )\n )", "response": "Connect LV generators to existing grids."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _add_cable_to_equipment_changes(network, line):\n network.results.equipment_changes = \\\n network.results.equipment_changes.append(\n pd.DataFrame(\n {'iteration_step': [0],\n 'change': ['added'],\n 'equipment': [line.type.name],\n 'quantity': [1]\n },\n index=[line]\n )\n )", "response": "Adds cable to the equipment changes dataframe"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ndeletes a cable from the equipment changes if existing", "response": "def _del_cable_from_equipment_changes(network, line):\n \"\"\"Delete cable from the equipment changes if existing\n\n This is needed if a cable was already added to network.results.equipment_changes\n but another node is connected later to this cable. Therefore, the cable needs to\n be split which changes the id (one cable id -> 2 new cable ids).\n\n Parameters\n ----------\n network : :class:`~.grid.network.Network`\n The eDisGo container object\n line : class:`~.grid.components.Line`\n Line instance which is to be deleted\n \"\"\"\n if line in network.results.equipment_changes.index:\n network.results.equipment_changes = \\\n network.results.equipment_changes.drop(line)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _find_nearest_conn_objects(network, node, branches):\n\n # threshold which is used to determine if 2 objects are on the same position (see below for details on usage)\n conn_diff_tolerance = network.config['grid_connection'][\n 'conn_diff_tolerance']\n\n conn_objects_min_stack = []\n\n node_shp = transform(proj2equidistant(network), node.geom)\n\n for branch in branches:\n stations = branch['adj_nodes']\n\n # create shapely objects for 2 stations and line between them, transform to equidistant CRS\n station1_shp = transform(proj2equidistant(network), stations[0].geom)\n station2_shp = transform(proj2equidistant(network), stations[1].geom)\n line_shp = LineString([station1_shp, station2_shp])\n\n # create dict with DING0 objects (line & 2 adjacent stations), shapely objects and distances\n conn_objects = {'s1': {'obj': stations[0],\n 'shp': station1_shp,\n 'dist': node_shp.distance(station1_shp) * 0.999},\n 's2': {'obj': stations[1],\n 'shp': station2_shp,\n 'dist': node_shp.distance(station2_shp) * 0.999},\n 'b': {'obj': branch,\n 'shp': line_shp,\n 'dist': node_shp.distance(line_shp)}}\n\n # Remove branch from the dict of possible conn. objects if it is too close to a node.\n # Without this solution, the target object is not unique for different runs (and so\n # were the topology)\n if (\n abs(conn_objects['s1']['dist'] - conn_objects['b']['dist']) < conn_diff_tolerance\n or abs(conn_objects['s2']['dist'] - conn_objects['b']['dist']) < conn_diff_tolerance\n ):\n del conn_objects['b']\n\n # remove MV station as possible connection point\n if isinstance(conn_objects['s1']['obj'], MVStation):\n del conn_objects['s1']\n elif isinstance(conn_objects['s2']['obj'], MVStation):\n del conn_objects['s2']\n\n # find nearest connection point on given triple dict (2 branch-adjacent stations + cable dist. on line)\n conn_objects_min = min(conn_objects.values(), key=lambda v: v['dist'])\n\n conn_objects_min_stack.append(conn_objects_min)\n\n # sort all objects by distance from node\n conn_objects_min_stack = [_ for _ in sorted(conn_objects_min_stack, key=lambda x: x['dist'])]\n\n return conn_objects_min_stack", "response": "Searches all branches for nearest possible connection objects."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _connect_mv_node(network, node, target_obj):\n\n # get standard equipment\n std_line_type = network.equipment_data['mv_cables'].loc[\n network.config['grid_expansion_standard_equipment']['mv_line']]\n std_line_kind = 'cable'\n\n target_obj_result = None\n\n node_shp = transform(proj2equidistant(network), node.geom)\n\n # MV line is nearest connection point\n if isinstance(target_obj['shp'], LineString):\n\n adj_node1 = target_obj['obj']['adj_nodes'][0]\n adj_node2 = target_obj['obj']['adj_nodes'][1]\n\n # find nearest point on MV line\n conn_point_shp = target_obj['shp'].interpolate(target_obj['shp'].project(node_shp))\n conn_point_shp = transform(proj2conformal(network), conn_point_shp)\n\n line = network.mv_grid.graph.edge[adj_node1][adj_node2]\n\n # target MV line does currently not connect a load area of type aggregated\n if not line['type'] == 'line_aggr':\n\n # create branch tee and add it to grid\n branch_tee = BranchTee(geom=conn_point_shp,\n grid=network.mv_grid,\n in_building=False)\n network.mv_grid.graph.add_node(branch_tee,\n type='branch_tee')\n\n # split old branch into 2 segments\n # (delete old branch and create 2 new ones along cable_dist)\n # ==========================================================\n\n # backup kind and type of branch\n line_kind = line['line'].kind\n line_type = line['line'].type\n\n # remove line from graph\n network.mv_grid.graph.remove_edge(adj_node1, adj_node2)\n\n # delete line from equipment changes if existing\n _del_cable_from_equipment_changes(network=network,\n line=line['line'])\n\n line_length = calc_geo_dist_vincenty(network=network,\n node_source=adj_node1,\n node_target=branch_tee)\n line = Line(id=random.randint(10 ** 8, 10 ** 9),\n length=line_length / 1e3,\n quantity=1,\n kind=line_kind,\n type=line_type,\n grid=network.mv_grid)\n\n network.mv_grid.graph.add_edge(adj_node1,\n branch_tee,\n line=line,\n type='line')\n\n # add line to equipment changes to track costs\n _add_cable_to_equipment_changes(network=network,\n line=line)\n\n line_length = calc_geo_dist_vincenty(network=network,\n node_source=adj_node2,\n node_target=branch_tee)\n line = Line(id=random.randint(10 ** 8, 10 ** 9),\n length=line_length / 1e3,\n quantity=1,\n kind=line_kind,\n type=line_type,\n grid=network.mv_grid)\n\n network.mv_grid.graph.add_edge(adj_node2,\n branch_tee,\n line=line,\n type='line')\n\n # add line to equipment changes to track costs\n _add_cable_to_equipment_changes(network=network,\n line=line)\n\n # add new branch for new node (node to branch tee)\n # ================================================\n line_length = calc_geo_dist_vincenty(network=network,\n node_source=node,\n node_target=branch_tee)\n line = Line(id=random.randint(10 ** 8, 10 ** 9),\n length=line_length / 1e3,\n quantity=1,\n kind=std_line_kind,\n type=std_line_type,\n grid=network.mv_grid)\n\n network.mv_grid.graph.add_edge(node,\n branch_tee,\n line=line,\n type='line')\n\n # add line to equipment changes to track costs\n _add_cable_to_equipment_changes(network=network,\n line=line)\n\n target_obj_result = branch_tee\n\n # node ist nearest connection point\n else:\n\n # what kind of node is to be connected? (which type is node of?)\n # LVStation: Connect to LVStation or BranchTee\n # Generator: Connect to LVStation, BranchTee or Generator\n if isinstance(node, LVStation):\n valid_conn_objects = (LVStation, BranchTee)\n elif isinstance(node, Generator):\n valid_conn_objects = (LVStation, BranchTee, Generator)\n else:\n raise ValueError('Oops, the node you are trying to connect is not a valid connection object')\n\n # if target is generator or Load, check if it is aggregated (=> connection not allowed)\n if isinstance(target_obj['obj'], (Generator, Load)):\n target_is_aggregated = any([_ for _ in network.mv_grid.graph.edge[target_obj['obj']].values()\n if _['type'] == 'line_aggr'])\n else:\n target_is_aggregated = False\n\n # target node is not a load area of type aggregated\n if isinstance(target_obj['obj'], valid_conn_objects) and not target_is_aggregated:\n\n # add new branch for satellite (station to station)\n line_length = calc_geo_dist_vincenty(network=network,\n node_source=node,\n node_target=target_obj['obj'])\n\n line = Line(id=random.randint(10 ** 8, 10 ** 9),\n type=std_line_type,\n kind=std_line_kind,\n quantity=1,\n length=line_length / 1e3,\n grid=network.mv_grid)\n\n network.mv_grid.graph.add_edge(node,\n target_obj['obj'],\n line=line,\n type='line')\n\n # add line to equipment changes to track costs\n _add_cable_to_equipment_changes(network=network,\n line=line)\n\n target_obj_result = target_obj['obj']\n\n return target_obj_result", "response": "Connects MV node to target object in MV grid"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _get_griddistrict(ding0_filepath):\n grid_district = os.path.basename(ding0_filepath)\n grid_district_search = re.search('[_]+\\d+', grid_district)\n if grid_district_search:\n grid_district = int(grid_district_search.group(0)[2:])\n return grid_district\n else:\n raise (KeyError('Grid District not found in '.format(grid_district)))", "response": "Get the grid district number from the ding0 data file path"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nrun basic EDisGo algorithm.", "response": "def run_edisgo_basic(ding0_filepath,\n generator_scenario=None,\n analysis='worst-case',\n *edisgo_grid):\n \"\"\"\n Analyze edisgo grid extension cost as reference scenario\n\n Parameters\n ----------\n ding0_filepath : str\n Path to ding0 data ending typically\n `/path/to/ding0_data/\"ding0_grids__\" + str(``grid_district``) + \".xxx\"`\n\n analysis : str\n Either 'worst-case' or 'timeseries'\n\n generator_scenario : None or :obj:`str`\n If provided defines which scenario of future generator park to use\n and invokes import of these generators. Possible options are 'nep2035'\n and 'ego100'.\n\n Returns\n -------\n edisgo_grid : :class:`~.grid.network.EDisGo`\n eDisGo network container\n costs : :pandas:`pandas.Dataframe<dataframe>`\n Cost of grid extension\n grid_issues : dict\n Grids resulting in an error including error message\n\n \"\"\"\n\n grid_district = _get_griddistrict(ding0_filepath)\n\n grid_issues = {}\n\n logging.info('Grid expansion for MV grid district {}'.format(grid_district))\n\n if edisgo_grid: # if an edisgo_grid is passed in arg then ignore everything else\n edisgo_grid = edisgo_grid[0]\n else:\n try:\n if 'worst-case' in analysis:\n edisgo_grid = EDisGo(ding0_grid=ding0_filepath,\n worst_case_analysis=analysis)\n elif 'timeseries' in analysis:\n edisgo_grid = EDisGo(ding0_grid=ding0_filepath,\n timeseries_generation_fluctuating='oedb',\n timeseries_load='demandlib')\n except FileNotFoundError as e:\n return None, pd.DataFrame(), {'grid': grid_district, 'msg': str(e)}\n\n # Import generators\n if generator_scenario:\n logging.info('Grid expansion for scenario \\'{}\\'.'.format(generator_scenario))\n edisgo_grid.import_generators(generator_scenario=generator_scenario)\n else:\n logging.info('Grid expansion with no generator imports based on scenario')\n\n try:\n # Do grid reinforcement\n edisgo_grid.reinforce()\n\n # Get costs\n costs_grouped = \\\n edisgo_grid.network.results.grid_expansion_costs.groupby(\n ['type']).sum()\n costs = pd.DataFrame(costs_grouped.values,\n columns=costs_grouped.columns,\n index=[[edisgo_grid.network.id] * len(costs_grouped),\n costs_grouped.index]).reset_index()\n costs.rename(columns={'level_0': 'grid'}, inplace=True)\n\n grid_issues['grid'] = None\n grid_issues['msg'] = None\n\n logging.info('SUCCESS!')\n except MaximumIterationError:\n grid_issues['grid'] = edisgo_grid.network.id\n grid_issues['msg'] = str(edisgo_grid.network.results.unresolved_issues)\n costs = pd.DataFrame()\n logging.warning('Unresolved issues left after grid expansion.')\n except Exception as e:\n grid_issues['grid'] = edisgo_grid.network.id\n grid_issues['msg'] = repr(e)\n costs = pd.DataFrame()\n logging.exception()\n\n return edisgo_grid, costs, grid_issues"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef run_edisgo_twice(run_args):\n\n # base case with no generator import\n edisgo_grid, \\\n costs_before_geno_import, \\\n grid_issues_before_geno_import = run_edisgo_basic(*run_args)\n\n if edisgo_grid:\n # clear the pypsa object and results from edisgo_grid\n edisgo_grid.network.results = Results(edisgo_grid.network)\n edisgo_grid.network.pypsa = None\n\n # case after generator import\n # run_args = [ding0_filename]\n # run_args.extend(run_args_opt)\n run_args.append(edisgo_grid)\n\n _, costs, \\\n grid_issues = run_edisgo_basic(*run_args)\n\n return costs_before_geno_import, grid_issues_before_geno_import, \\\n costs, grid_issues\n else:\n return costs_before_geno_import, grid_issues_before_geno_import, \\\n costs_before_geno_import, grid_issues_before_geno_import", "response": "Run the EDI - GO on same grid and return the results."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nrunning the eDisGo parallelization of a list of Ding0 grid files in parallel and return the cost of each grid imported.", "response": "def run_edisgo_pool(ding0_file_list, run_args_opt,\n workers=mp.cpu_count(), worker_lifetime=1):\n \"\"\"\n Use python multiprocessing toolbox for parallelization\n\n Several grids are analyzed in parallel.\n\n Parameters\n ----------\n ding0_file_list : list\n Ding0 grid data file names\n run_args_opt : list\n eDisGo options, see :func:`run_edisgo_basic` and\n :func:`run_edisgo_twice`\n workers: int\n Number of parallel process\n worker_lifetime : int\n Bunch of grids sequentially analyzed by a worker\n\n Returns\n -------\n all_costs_before_geno_import : list\n Grid extension cost before grid connection of new generators\n all_grid_issues_before_geno_import : list\n Remaining overloading or over-voltage issues in grid\n all_costs : list\n Grid extension cost due to grid connection of new generators\n all_grid_issues : list\n Remaining overloading or over-voltage issues in grid\n \"\"\"\n def collect_pool_results(result):\n results.append(result)\n\n results = []\n\n pool = mp.Pool(workers,\n maxtasksperchild=worker_lifetime)\n\n for file in ding0_file_list:\n edisgo_args = [file] + run_args_opt\n pool.apply_async(func=run_edisgo_twice,\n args=(edisgo_args,),\n callback=collect_pool_results)\n\n pool.close()\n pool.join()\n\n # process results data\n all_costs_before_geno_import = [r[0] for r in results]\n all_grid_issues_before_geno_import = [r[1] for r in results]\n all_costs = [r[2] for r in results]\n all_grid_issues = [r[3] for r in results]\n\n return all_costs_before_geno_import, all_grid_issues_before_geno_import, \\\n all_costs, all_grid_issues"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef run_edisgo_pool_flexible(ding0_id_list, func, func_arguments,\n workers=mp2.cpu_count(), worker_lifetime=1):\n \"\"\"\n Use python multiprocessing toolbox for parallelization\n\n Several grids are analyzed in parallel based on your custom function that\n defines the specific application of eDisGo.\n\n Parameters\n ----------\n ding0_id_list : list of int\n List of ding0 grid data IDs (also known as HV/MV substation IDs)\n func : any function\n Your custom function that shall be parallelized\n func_arguments : tuple\n Arguments to custom function ``func``\n workers: int\n Number of parallel process\n worker_lifetime : int\n Bunch of grids sequentially analyzed by a worker\n\n Notes\n -----\n Please note, the following requirements for the custom function which is to\n be executed in parallel\n\n #. It must return an instance of the type :class:`~.edisgo.EDisGo`.\n #. The first positional argument is the MV grid district id (as int). It is\n prepended to the tuple of arguments ``func_arguments``\n\n\n Returns\n -------\n containers : dict of :class:`~.edisgo.EDisGo`\n Dict of EDisGo instances keyed by its ID\n \"\"\"\n def collect_pool_results(result):\n \"\"\"\n Store results from parallelized calculation in structured manner\n\n Parameters\n ----------\n result: :class:`~.edisgo.EDisGo`\n \"\"\"\n results.update({result.network.id: result})\n\n results = {}\n\n pool = mp2.Pool(workers,\n maxtasksperchild=worker_lifetime)\n\n def error_callback(key):\n return lambda o: results.update({key: o})\n \n for ding0_id in ding0_id_list:\n edisgo_args = (ding0_id, *func_arguments)\n pool.apply_async(func=func,\n args=edisgo_args,\n callback=collect_pool_results,\n error_callback=error_callback(ding0_id))\n\n pool.close()\n pool.join()\n\n return results", "response": "This function runs a custom function in parallel and returns a dict of all the containers in the main dict."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef reinforce_grid(edisgo, timesteps_pfa=None, copy_graph=False,\n max_while_iterations=10, combined_analysis=False):\n \"\"\"\n Evaluates grid reinforcement needs and performs measures.\n\n This function is the parent function for all grid reinforcements.\n\n Parameters\n ----------\n edisgo : :class:`~.grid.network.EDisGo`\n The eDisGo API object\n timesteps_pfa : :obj:`str` or :pandas:`pandas.DatetimeIndex<datetimeindex>` or :pandas:`pandas.Timestamp<timestamp>`\n timesteps_pfa specifies for which time steps power flow analysis is\n conducted and therefore which time steps to consider when checking\n for over-loading and over-voltage issues.\n It defaults to None in which case all timesteps in\n timeseries.timeindex (see :class:`~.grid.network.TimeSeries`) are used.\n Possible options are:\n\n * None\n Time steps in timeseries.timeindex (see\n :class:`~.grid.network.TimeSeries`) are used.\n * 'snapshot_analysis'\n Reinforcement is conducted for two worst-case snapshots. See\n :meth:`edisgo.tools.tools.select_worstcase_snapshots()` for further\n explanation on how worst-case snapshots are chosen.\n Note: If you have large time series choosing this option will save\n calculation time since power flow analysis is only conducted for two\n time steps. If your time series already represents the worst-case\n keep the default value of None because finding the worst-case\n snapshots takes some time.\n * :pandas:`pandas.DatetimeIndex<datetimeindex>` or :pandas:`pandas.Timestamp<timestamp>`\n Use this option to explicitly choose which time steps to consider.\n\n copy_graph : :obj:`Boolean`\n If True reinforcement is conducted on a copied graph and discarded.\n Default: False.\n max_while_iterations : :obj:`int`\n Maximum number of times each while loop is conducted.\n combined_analysis : :obj:`Boolean`\n If True allowed voltage deviations for combined analysis of MV and LV\n grid are used. If False different allowed voltage deviations for MV\n and LV are used. See also config section\n `grid_expansion_allowed_voltage_deviations`. Default: False.\n\n Returns\n -------\n :class:`~.grid.network.Results`\n Returns the Results object holding grid expansion costs, equipment\n changes, etc.\n\n Notes\n -----\n See :ref:`features-in-detail` for more information on how grid\n reinforcement is conducted.\n\n \"\"\"\n\n def _add_lines_changes_to_equipment_changes():\n equipment, index, quantity = [], [], []\n for line, number_of_lines in lines_changes.items():\n equipment.append(line.type.name)\n index.append(line)\n quantity.append(number_of_lines)\n edisgo_reinforce.network.results.equipment_changes = \\\n edisgo_reinforce.network.results.equipment_changes.append(\n pd.DataFrame(\n {'iteration_step': [iteration_step] * len(\n lines_changes),\n 'change': ['changed'] * len(lines_changes),\n 'equipment': equipment,\n 'quantity': quantity},\n index=index))\n\n def _add_transformer_changes_to_equipment_changes(mode):\n for station, transformer_list in transformer_changes[mode].items():\n edisgo_reinforce.network.results.equipment_changes = \\\n edisgo_reinforce.network.results.equipment_changes.append(\n pd.DataFrame(\n {'iteration_step': [iteration_step] * len(\n transformer_list),\n 'change': [mode] * len(transformer_list),\n 'equipment': transformer_list,\n 'quantity': [1] * len(transformer_list)},\n index=[station] * len(transformer_list)))\n\n # assign MV feeder to every generator, LV station, load, and branch tee\n # to assign grid expansion costs to an MV feeder\n assign_mv_feeder_to_nodes(edisgo.network.mv_grid)\n\n # analyze for all time steps (advantage is that load and feed-in case can\n # be obtained more performant in case `timesteps_pfa` = 'snapshot_analysis'\n # plus edisgo and edisgo_reinforce will have pypsa representation in case\n # reinforcement needs to be conducted on a copied graph)\n edisgo.analyze()\n\n # in case reinforcement needs to be conducted on a copied graph the\n # edisgo object is deep copied\n if copy_graph is True:\n edisgo_reinforce = copy.deepcopy(edisgo)\n else:\n edisgo_reinforce = edisgo\n\n if timesteps_pfa is not None:\n # if timesteps_pfa = 'snapshot_analysis' get snapshots\n if (isinstance(timesteps_pfa, str) and\n timesteps_pfa == 'snapshot_analysis'):\n snapshots = tools.select_worstcase_snapshots(\n edisgo_reinforce.network)\n # drop None values in case any of the two snapshots does not exist\n timesteps_pfa = pd.DatetimeIndex(data=[\n snapshots['load_case'], snapshots['feedin_case']]).dropna()\n # if timesteps_pfa is not of type datetime or does not contain\n # datetimes throw an error\n elif not isinstance(timesteps_pfa, datetime.datetime):\n if hasattr(timesteps_pfa, '__iter__'):\n if not all(isinstance(_, datetime.datetime)\n for _ in timesteps_pfa):\n raise ValueError(\n 'Input {} for timesteps_pfa is not valid.'.format(\n timesteps_pfa))\n else:\n raise ValueError(\n 'Input {} for timesteps_pfa is not valid.'.format(\n timesteps_pfa))\n\n iteration_step = 1\n edisgo_reinforce.analyze(timesteps=timesteps_pfa)\n\n # REINFORCE OVERLOADED TRANSFORMERS AND LINES\n\n logger.debug('==> Check station load.')\n overloaded_mv_station = checks.hv_mv_station_load(edisgo_reinforce.network)\n overloaded_lv_stations = checks.mv_lv_station_load(\n edisgo_reinforce.network)\n logger.debug('==> Check line load.')\n crit_lines_lv = checks.lv_line_load(edisgo_reinforce.network)\n crit_lines_mv = checks.mv_line_load(edisgo_reinforce.network)\n crit_lines = crit_lines_lv.append(crit_lines_mv)\n\n while_counter = 0\n while ((not overloaded_mv_station.empty or not overloaded_lv_stations.empty\n or not crit_lines.empty) and\n while_counter < max_while_iterations):\n\n if not overloaded_mv_station.empty:\n # reinforce substations\n transformer_changes = \\\n reinforce_measures.extend_substation_overloading(\n edisgo_reinforce.network, overloaded_mv_station)\n # write added and removed transformers to results.equipment_changes\n _add_transformer_changes_to_equipment_changes('added')\n _add_transformer_changes_to_equipment_changes('removed')\n\n if not overloaded_lv_stations.empty:\n # reinforce distribution substations\n transformer_changes = \\\n reinforce_measures.extend_distribution_substation_overloading(\n edisgo_reinforce.network, overloaded_lv_stations)\n # write added and removed transformers to results.equipment_changes\n _add_transformer_changes_to_equipment_changes('added')\n _add_transformer_changes_to_equipment_changes('removed')\n\n if not crit_lines.empty:\n # reinforce lines\n lines_changes = reinforce_measures.reinforce_branches_overloading(\n edisgo_reinforce.network, crit_lines)\n # write changed lines to results.equipment_changes\n _add_lines_changes_to_equipment_changes()\n\n # run power flow analysis again (after updating pypsa object) and check\n # if all over-loading problems were solved\n logger.debug('==> Run power flow analysis.')\n pypsa_io.update_pypsa_grid_reinforcement(\n edisgo_reinforce.network,\n edisgo_reinforce.network.results.equipment_changes[\n edisgo_reinforce.network.results.equipment_changes.\n iteration_step==iteration_step])\n edisgo_reinforce.analyze(timesteps=timesteps_pfa)\n logger.debug('==> Recheck station load.')\n overloaded_mv_station = checks.hv_mv_station_load(\n edisgo_reinforce.network)\n overloaded_lv_stations = checks.mv_lv_station_load(\n edisgo_reinforce.network)\n logger.debug('==> Recheck line load.')\n crit_lines_lv = checks.lv_line_load(edisgo_reinforce.network)\n crit_lines_mv = checks.mv_line_load(edisgo_reinforce.network)\n crit_lines = crit_lines_lv.append(crit_lines_mv)\n\n iteration_step += 1\n while_counter += 1\n\n # check if all load problems were solved after maximum number of\n # iterations allowed\n if (while_counter == max_while_iterations and\n (not crit_lines.empty or not overloaded_mv_station.empty or\n not overloaded_lv_stations.empty)):\n edisgo_reinforce.network.results.unresolved_issues.update(crit_lines)\n edisgo_reinforce.network.results.unresolved_issues.update(\n overloaded_lv_stations)\n edisgo_reinforce.network.results.unresolved_issues.update(\n overloaded_mv_station)\n raise exceptions.MaximumIterationError(\n \"Overloading issues for the following lines could not be solved:\"\n \"{}\".format(crit_lines))\n else:\n logger.info('==> Load issues were solved in {} iteration '\n 'step(s).'.format(while_counter))\n\n # REINFORCE BRANCHES DUE TO VOLTAGE ISSUES\n iteration_step += 1\n\n # solve voltage problems in MV grid\n logger.debug('==> Check voltage in MV grid.')\n if combined_analysis:\n voltage_levels = 'mv_lv'\n else:\n voltage_levels = 'mv'\n crit_nodes = checks.mv_voltage_deviation(edisgo_reinforce.network,\n voltage_levels=voltage_levels)\n\n while_counter = 0\n while crit_nodes and while_counter < max_while_iterations:\n\n # reinforce lines\n lines_changes = reinforce_measures.reinforce_branches_overvoltage(\n edisgo_reinforce.network, edisgo_reinforce.network.mv_grid,\n crit_nodes[edisgo_reinforce.network.mv_grid])\n # write changed lines to results.equipment_changes\n _add_lines_changes_to_equipment_changes()\n\n # run power flow analysis again (after updating pypsa object) and check\n # if all over-voltage problems were solved\n logger.debug('==> Run power flow analysis.')\n pypsa_io.update_pypsa_grid_reinforcement(\n edisgo_reinforce.network,\n edisgo_reinforce.network.results.equipment_changes[\n edisgo_reinforce.network.results.equipment_changes.\n iteration_step == iteration_step])\n edisgo_reinforce.analyze(timesteps=timesteps_pfa)\n logger.debug('==> Recheck voltage in MV grid.')\n crit_nodes = checks.mv_voltage_deviation(edisgo_reinforce.network,\n voltage_levels=voltage_levels)\n\n iteration_step += 1\n while_counter += 1\n\n # check if all voltage problems were solved after maximum number of\n # iterations allowed\n if while_counter == max_while_iterations and crit_nodes:\n for k, v in crit_nodes.items():\n for node in v.index:\n edisgo_reinforce.network.results.unresolved_issues.update(\n {repr(node): v.loc[node, 'v_mag_pu']})\n raise exceptions.MaximumIterationError(\n \"Over-voltage issues for the following nodes in MV grid could \"\n \"not be solved: {}\".format(crit_nodes))\n else:\n logger.info('==> Voltage issues in MV grid were solved in {} '\n 'iteration step(s).'.format(while_counter))\n\n # solve voltage problems at secondary side of LV stations\n logger.debug('==> Check voltage at secondary side of LV stations.')\n if combined_analysis:\n voltage_levels = 'mv_lv'\n else:\n voltage_levels = 'lv'\n crit_stations = checks.lv_voltage_deviation(edisgo_reinforce.network,\n mode='stations',\n voltage_levels=voltage_levels)\n\n while_counter = 0\n while crit_stations and while_counter < max_while_iterations:\n # reinforce distribution substations\n transformer_changes = \\\n reinforce_measures.extend_distribution_substation_overvoltage(\n edisgo_reinforce.network, crit_stations)\n # write added transformers to results.equipment_changes\n _add_transformer_changes_to_equipment_changes('added')\n\n # run power flow analysis again (after updating pypsa object) and check\n # if all over-voltage problems were solved\n logger.debug('==> Run power flow analysis.')\n pypsa_io.update_pypsa_grid_reinforcement(\n edisgo_reinforce.network,\n edisgo_reinforce.network.results.equipment_changes[\n edisgo_reinforce.network.results.equipment_changes.\n iteration_step == iteration_step])\n edisgo_reinforce.analyze(timesteps=timesteps_pfa)\n logger.debug('==> Recheck voltage at secondary side of LV stations.')\n crit_stations = checks.lv_voltage_deviation(\n edisgo_reinforce.network, mode='stations',\n voltage_levels=voltage_levels)\n\n iteration_step += 1\n while_counter += 1\n\n # check if all voltage problems were solved after maximum number of\n # iterations allowed\n if while_counter == max_while_iterations and crit_stations:\n for k, v in crit_stations.items():\n for node in v.index:\n edisgo_reinforce.network.results.unresolved_issues.update(\n {repr(node): v.loc[node, 'v_mag_pu']})\n raise exceptions.MaximumIterationError(\n \"Over-voltage issues at busbar could not be solved for the \"\n \"following LV grids: {}\".format(crit_stations))\n else:\n logger.info('==> Voltage issues at busbars in LV grids were solved '\n 'in {} iteration step(s).'.format(while_counter))\n\n # solve voltage problems in LV grids\n logger.debug('==> Check voltage in LV grids.')\n crit_nodes = checks.lv_voltage_deviation(edisgo_reinforce.network,\n voltage_levels=voltage_levels)\n\n while_counter = 0\n while crit_nodes and while_counter < max_while_iterations:\n # for every grid in crit_nodes do reinforcement\n for grid in crit_nodes:\n # reinforce lines\n lines_changes = reinforce_measures.reinforce_branches_overvoltage(\n edisgo_reinforce.network, grid, crit_nodes[grid])\n # write changed lines to results.equipment_changes\n _add_lines_changes_to_equipment_changes()\n\n # run power flow analysis again (after updating pypsa object) and check\n # if all over-voltage problems were solved\n logger.debug('==> Run power flow analysis.')\n pypsa_io.update_pypsa_grid_reinforcement(\n edisgo_reinforce.network,\n edisgo_reinforce.network.results.equipment_changes[\n edisgo_reinforce.network.results.equipment_changes.\n iteration_step == iteration_step])\n edisgo_reinforce.analyze(timesteps=timesteps_pfa)\n logger.debug('==> Recheck voltage in LV grids.')\n crit_nodes = checks.lv_voltage_deviation(edisgo_reinforce.network,\n voltage_levels=voltage_levels)\n\n iteration_step += 1\n while_counter += 1\n\n # check if all voltage problems were solved after maximum number of\n # iterations allowed\n if while_counter == max_while_iterations and crit_nodes:\n for k, v in crit_nodes.items():\n for node in v.index:\n edisgo_reinforce.network.results.unresolved_issues.update(\n {repr(node): v.loc[node, 'v_mag_pu']})\n raise exceptions.MaximumIterationError(\n \"Over-voltage issues for the following nodes in LV grids could \"\n \"not be solved: {}\".format(crit_nodes))\n else:\n logger.info(\n '==> Voltage issues in LV grids were solved '\n 'in {} iteration step(s).'.format(while_counter))\n\n # RECHECK FOR OVERLOADED TRANSFORMERS AND LINES\n logger.debug('==> Recheck station load.')\n overloaded_mv_station = checks.hv_mv_station_load(edisgo_reinforce.network)\n overloaded_lv_stations = checks.mv_lv_station_load(\n edisgo_reinforce.network)\n logger.debug('==> Recheck line load.')\n crit_lines_lv = checks.lv_line_load(edisgo_reinforce.network)\n crit_lines_mv = checks.mv_line_load(edisgo_reinforce.network)\n crit_lines = crit_lines_lv.append(crit_lines_mv)\n\n while_counter = 0\n while ((not overloaded_mv_station.empty or not overloaded_lv_stations.empty\n or not crit_lines.empty) and while_counter < max_while_iterations):\n\n if not overloaded_mv_station.empty:\n # reinforce substations\n transformer_changes = \\\n reinforce_measures.extend_substation_overloading(\n edisgo_reinforce.network, overloaded_mv_station)\n # write added and removed transformers to results.equipment_changes\n _add_transformer_changes_to_equipment_changes('added')\n _add_transformer_changes_to_equipment_changes('removed')\n\n if not overloaded_lv_stations.empty:\n # reinforce substations\n transformer_changes = \\\n reinforce_measures.extend_distribution_substation_overloading(\n edisgo_reinforce.network, overloaded_lv_stations)\n # write added and removed transformers to results.equipment_changes\n _add_transformer_changes_to_equipment_changes('added')\n _add_transformer_changes_to_equipment_changes('removed')\n\n if not crit_lines.empty:\n # reinforce lines\n lines_changes = reinforce_measures.reinforce_branches_overloading(\n edisgo_reinforce.network, crit_lines)\n # write changed lines to results.equipment_changes\n _add_lines_changes_to_equipment_changes()\n\n # run power flow analysis again (after updating pypsa object) and check\n # if all over-loading problems were solved\n logger.debug('==> Run power flow analysis.')\n pypsa_io.update_pypsa_grid_reinforcement(\n edisgo_reinforce.network,\n edisgo_reinforce.network.results.equipment_changes[\n edisgo_reinforce.network.results.equipment_changes.\n iteration_step == iteration_step])\n edisgo_reinforce.analyze(timesteps=timesteps_pfa)\n logger.debug('==> Recheck station load.')\n overloaded_mv_station = checks.hv_mv_station_load(\n edisgo_reinforce.network)\n overloaded_lv_stations = checks.mv_lv_station_load(\n edisgo_reinforce.network)\n logger.debug('==> Recheck line load.')\n crit_lines_lv = checks.lv_line_load(edisgo_reinforce.network)\n crit_lines_mv = checks.mv_line_load(edisgo_reinforce.network)\n crit_lines = crit_lines_lv.append(crit_lines_mv)\n\n iteration_step += 1\n while_counter += 1\n\n # check if all load problems were solved after maximum number of\n # iterations allowed\n if (while_counter == max_while_iterations and\n (not crit_lines.empty or not overloaded_mv_station.empty or\n not overloaded_lv_stations.empty)):\n edisgo_reinforce.network.results.unresolved_issues.update(crit_lines)\n edisgo_reinforce.network.results.unresolved_issues.update(\n overloaded_lv_stations)\n edisgo_reinforce.network.results.unresolved_issues.update(\n overloaded_mv_station)\n raise exceptions.MaximumIterationError(\n \"Overloading issues (after solving over-voltage issues) for the\"\n \"following lines could not be solved: {}\".format(crit_lines))\n else:\n logger.info(\n '==> Load issues were rechecked and solved '\n 'in {} iteration step(s).'.format(while_counter))\n\n # final check 10% criteria\n checks.check_ten_percent_voltage_deviation(edisgo_reinforce.network)\n\n # calculate grid expansion costs\n edisgo_reinforce.network.results.grid_expansion_costs = \\\n grid_expansion_costs(edisgo_reinforce.network)\n\n return edisgo_reinforce.network.results", "response": "Evaluates grid reinforcement needs and performs measures."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nimporting an eDisGo grid topology from a pickle file.", "response": "def import_from_ding0(file, network):\n \"\"\"\n Import an eDisGo grid topology from\n `Ding0 data <https://github.com/openego/ding0>`_.\n\n This import method is specifically designed to load grid topology data in\n the format as `Ding0 <https://github.com/openego/ding0>`_ provides it via\n pickles.\n\n The import of the grid topology includes\n\n * the topology itself\n * equipment parameter\n * generators incl. location, type, subtype and capacity\n * loads incl. location and sectoral consumption\n\n Parameters\n ----------\n file: :obj:`str` or :class:`ding0.core.NetworkDing0`\n If a str is provided it is assumed it points to a pickle with Ding0\n grid data. This file will be read.\n If an object of the type :class:`ding0.core.NetworkDing0` data will be\n used directly from this object.\n network: :class:`~.grid.network.Network`\n The eDisGo data container object\n\n Notes\n -----\n Assumes :class:`ding0.core.NetworkDing0` provided by `file` contains\n only data of one mv_grid_district.\n\n \"\"\"\n # when `file` is a string, it will be read by the help of pickle\n if isinstance(file, str):\n ding0_nd = load_nd_from_pickle(filename=file)\n # otherwise it is assumed the object is passed directly\n else:\n ding0_nd = file\n\n ding0_mv_grid = ding0_nd._mv_grid_districts[0].mv_grid\n\n # Make sure circuit breakers (respectively the rings) are closed\n ding0_mv_grid.close_circuit_breakers()\n\n # Import medium-voltage grid data\n network.mv_grid = _build_mv_grid(ding0_mv_grid, network)\n\n # Import low-voltage grid data\n lv_grids, lv_station_mapping, lv_grid_mapping = _build_lv_grid(\n ding0_mv_grid, network)\n\n # Assign lv_grids to network\n network.mv_grid.lv_grids = lv_grids\n\n # Integrate disconnecting points\n position_switch_disconnectors(network.mv_grid,\n mode=network.config['disconnecting_point'][\n 'position'])\n\n # Check data integrity\n _validate_ding0_grid_import(network.mv_grid, ding0_mv_grid,\n lv_grid_mapping)\n\n # Set data source\n network.set_data_source('grid', 'dingo')\n\n # Set more params\n network._id = network.mv_grid.id\n\n # Update the weather_cell_ids in mv_grid to include the ones in lv_grids\n # ToDo: maybe get a better solution to push the weather_cell_ids in lv_grids but not in mv_grid but into the\n # mv_grid.weather_cell_ids from within the Grid() object or the MVGrid() or LVGrid()\n mv_weather_cell_id = network.mv_grid.weather_cells\n for lvg in lv_grids:\n if lvg.weather_cells:\n for lv_w_id in lvg._weather_cells:\n if not (lv_w_id in mv_weather_cell_id):\n network.mv_grid._weather_cells.append(lv_w_id)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nbuilding LV grids from Ding0 data and creates LV grids.", "response": "def _build_lv_grid(ding0_grid, network):\n \"\"\"\n Build eDisGo LV grid from Ding0 data\n\n Parameters\n ----------\n ding0_grid: ding0.MVGridDing0\n Ding0 MV grid object\n\n Returns\n -------\n list of LVGrid\n LV grids\n dict\n Dictionary containing a mapping of LV stations in Ding0 to newly\n created eDisGo LV stations. This mapping is used to use the same\n instances of LV stations in the MV grid graph.\n \"\"\"\n\n lv_station_mapping = {}\n lv_grids = []\n lv_grid_mapping = {}\n\n for la in ding0_grid.grid_district._lv_load_areas:\n for lvgd in la._lv_grid_districts:\n ding0_lv_grid = lvgd.lv_grid\n if not ding0_lv_grid.grid_district.lv_load_area.is_aggregated:\n\n # Create LV grid instance\n lv_grid = LVGrid(\n id=ding0_lv_grid.id_db,\n geom=ding0_lv_grid.grid_district.geo_data,\n grid_district={\n 'geom': ding0_lv_grid.grid_district.geo_data,\n 'population': ding0_lv_grid.grid_district.population},\n voltage_nom=ding0_lv_grid.v_level / 1e3,\n network=network)\n\n station = {repr(_): _ for _ in\n network.mv_grid.graph.nodes_by_attribute(\n 'lv_station')}['LVStation_' + str(\n ding0_lv_grid._station.id_db)]\n\n station.grid = lv_grid\n for t in station.transformers:\n t.grid = lv_grid\n\n lv_grid.graph.add_node(station, type='lv_station')\n lv_station_mapping.update({ding0_lv_grid._station: station})\n\n # Create list of load instances and add these to grid's graph\n loads = {_: Load(\n id=_.id_db,\n geom=_.geo_data,\n grid=lv_grid,\n consumption=_.consumption) for _ in ding0_lv_grid.loads()}\n lv_grid.graph.add_nodes_from(loads.values(), type='load')\n\n # Create list of generator instances and add these to grid's\n # graph\n generators = {_: (GeneratorFluctuating(\n id=_.id_db,\n geom=_.geo_data,\n nominal_capacity=_.capacity,\n type=_.type,\n subtype=_.subtype,\n grid=lv_grid,\n weather_cell_id=_.weather_cell_id,\n v_level=_.v_level) if _.type in ['wind', 'solar'] else\n Generator(\n id=_.id_db,\n geom=_.geo_data,\n nominal_capacity=_.capacity,\n type=_.type,\n subtype=_.subtype,\n grid=lv_grid,\n v_level=_.v_level))\n for _ in ding0_lv_grid.generators()}\n lv_grid.graph.add_nodes_from(generators.values(),\n type='generator')\n\n # Create list of branch tee instances and add these to grid's\n # graph\n branch_tees = {\n _: BranchTee(id=_.id_db,\n geom=_.geo_data,\n grid=lv_grid,\n in_building=_.in_building)\n for _ in ding0_lv_grid._cable_distributors}\n lv_grid.graph.add_nodes_from(branch_tees.values(),\n type='branch_tee')\n\n # Merge node above defined above to a single dict\n nodes = {**loads,\n **generators,\n **branch_tees,\n **{ding0_lv_grid._station: station}}\n\n edges = []\n edges_raw = list(nx.get_edge_attributes(\n ding0_lv_grid._graph, 'branch').items())\n for edge in edges_raw:\n edges.append({'adj_nodes': edge[0], 'branch': edge[1]})\n\n # Create list of line instances and add these to grid's graph\n lines = [(nodes[_['adj_nodes'][0]], nodes[_['adj_nodes'][1]],\n {'line': Line(\n id=_['branch'].id_db,\n type=_['branch'].type,\n length=_['branch'].length / 1e3,\n kind=_['branch'].kind,\n grid=lv_grid)\n })\n for _ in edges]\n # convert voltage from V to kV\n for line in lines:\n # ToDo: remove work around once it's fixed in ding0\n if line[2]['line'].type['U_n'] >= 400:\n line[2]['line'].type['U_n'] = \\\n line[2]['line'].type['U_n'] / 1e3\n lv_grid.graph.add_edges_from(lines, type='line')\n\n # Add LV station as association to LV grid\n lv_grid._station = station\n\n # Add to lv grid mapping\n lv_grid_mapping.update({lv_grid: ding0_lv_grid})\n\n # Put all LV grid to a list of LV grids\n lv_grids.append(lv_grid)\n\n # ToDo: don't forget to adapt lv stations creation in MV grid\n return lv_grids, lv_station_mapping, lv_grid_mapping"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _determine_aggregated_nodes(la_centers):\n\n def aggregate_generators(gen, aggr):\n \"\"\"Aggregate generation capacity per voltage level\n\n Parameters\n ----------\n gen: ding0.core.GeneratorDing0\n Ding0 Generator object\n aggr: dict\n Aggregated generation capacity. For structure see\n `_determine_aggregated_nodes()`.\n\n Returns\n -------\n\n \"\"\"\n\n if gen.v_level not in aggr['generation']:\n aggr['generation'][gen.v_level] = {}\n if gen.type not in aggr['generation'][gen.v_level]:\n aggr['generation'][gen.v_level][gen.type] = {}\n if gen.subtype not in aggr['generation'][gen.v_level][gen.type]:\n aggr['generation'][gen.v_level][gen.type].update(\n {gen.subtype: {'ids': [gen.id_db],\n 'capacity': gen.capacity}})\n else:\n aggr['generation'][gen.v_level][gen.type][gen.subtype][\n 'ids'].append(gen.id_db)\n aggr['generation'][gen.v_level][gen.type][gen.subtype][\n 'capacity'] += gen.capacity\n\n return aggr\n\n def aggregate_loads(la_center, aggr):\n \"\"\"Aggregate consumption in load area per sector\n\n Parameters\n ----------\n la_center: LVLoadAreaCentreDing0\n Load area center object from Ding0\n\n Returns\n -------\n\n \"\"\"\n for s in ['retail', 'industrial', 'agricultural', 'residential']:\n if s not in aggr['load']:\n aggr['load'][s] = 0\n\n aggr['load']['retail'] += sum(\n [_.sector_consumption_retail\n for _ in la_center.lv_load_area._lv_grid_districts])\n aggr['load']['industrial'] += sum(\n [_.sector_consumption_industrial\n for _ in la_center.lv_load_area._lv_grid_districts])\n aggr['load']['agricultural'] += sum(\n [_.sector_consumption_agricultural\n for _ in la_center.lv_load_area._lv_grid_districts])\n aggr['load']['residential'] += sum(\n [_.sector_consumption_residential\n for _ in la_center.lv_load_area._lv_grid_districts])\n\n return aggr\n\n aggregated = {}\n aggr_stations = []\n\n # ToDo: The variable generation_aggr is further used -> delete this code\n generation_aggr = {}\n for la in la_centers[0].grid.grid_district._lv_load_areas:\n for lvgd in la._lv_grid_districts:\n for gen in lvgd.lv_grid.generators():\n if la.is_aggregated:\n generation_aggr.setdefault(gen.type, {})\n generation_aggr[gen.type].setdefault(gen.subtype, {'ding0': 0})\n generation_aggr[gen.type][gen.subtype].setdefault('ding0', 0)\n generation_aggr[gen.type][gen.subtype]['ding0'] += gen.capacity\n\n dingo_import_data = pd.DataFrame(columns=('id',\n 'capacity',\n 'agg_geno')\n )\n\n for la_center in la_centers:\n aggr = {'generation': {}, 'load': {}, 'aggregates': []}\n\n # Determine aggregated generation in LV grid\n for lvgd in la_center.lv_load_area._lv_grid_districts:\n weather_cell_ids = {}\n for gen in lvgd.lv_grid.generators():\n aggr = aggregate_generators(gen, aggr)\n\n # Get the aggregated weather cell id of the area\n # b\n if isinstance(gen, GeneratorFluctuatingDing0):\n if gen.weather_cell_id not in weather_cell_ids.keys():\n weather_cell_ids[gen.weather_cell_id] = 1\n else:\n weather_cell_ids[gen.weather_cell_id] += 1\n\n dingo_import_data.loc[len(dingo_import_data)] = \\\n [int(gen.id_db),\n gen.capacity,\n None]\n\n # Get the weather cell id that occurs the most if there are any generators\n if not(list(lvgd.lv_grid.generators())):\n weather_cell_id = None\n else:\n if weather_cell_ids:\n weather_cell_id = list(weather_cell_ids.keys())[\n list(weather_cell_ids.values()).index(\n max(weather_cell_ids.values()))]\n else:\n weather_cell_id = None\n\n for v_level in aggr['generation']:\n for type in aggr['generation'][v_level]:\n for subtype in aggr['generation'][v_level][type]:\n # make sure to check if there are any generators before assigning\n # a weather cell id\n if not(list(lvgd.lv_grid.generators())):\n pass\n else:\n aggr['generation'][v_level][type][subtype]['weather_cell_id'] = \\\n weather_cell_id\n\n # Determine aggregated load in MV grid\n # -> Implement once laods in Ding0 MV grids exist\n\n # Determine aggregated load in LV grid\n aggr = aggregate_loads(la_center, aggr)\n\n # Collect metadata of aggregated load areas\n aggr['aggregates'] = {\n 'population': la_center.lv_load_area.zensus_sum,\n 'geom': la_center.lv_load_area.geo_area}\n\n # Determine LV grids/ stations that are aggregated\n for _ in la_center.lv_load_area._lv_grid_districts:\n aggr_stations.append(_.lv_grid.station())\n\n # add elements to lists\n aggregated.update({la_center.id_db: aggr})\n\n return aggregated, aggr_stations, dingo_import_data", "response": "This function determines the generation and load within load areas and aggregates the nodes in the LV grid."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nadding generators and loads to MV station representing aggregated load and generation capacity and load.", "response": "def _attach_aggregated(network, grid, aggregated, ding0_grid):\n \"\"\"Add Generators and Loads to MV station representing aggregated generation\n capacity and load\n\n Parameters\n ----------\n grid: MVGrid\n MV grid object\n aggregated: dict\n Information about aggregated load and generation capacity. For\n information about the structure of the dict see ... .\n ding0_grid: ding0.Network\n Ding0 network container\n Returns\n -------\n MVGrid\n Altered instance of MV grid including aggregated load and generation\n \"\"\"\n\n aggr_line_type = ding0_grid.network._static_data['MV_cables'].iloc[\n ding0_grid.network._static_data['MV_cables']['I_max_th'].idxmax()]\n\n for la_id, la in aggregated.items():\n # add aggregated generators\n for v_level, val in la['generation'].items():\n for type, val2 in val.items():\n for subtype, val3 in val2.items():\n if type in ['solar', 'wind']:\n gen = GeneratorFluctuating(\n id='agg-' + str(la_id) + '-' + '_'.join(\n [str(_) for _ in val3['ids']]),\n nominal_capacity=val3['capacity'],\n weather_cell_id=val3['weather_cell_id'],\n type=type,\n subtype=subtype,\n geom=grid.station.geom,\n grid=grid,\n v_level=4)\n else:\n gen = Generator(\n id='agg-' + str(la_id) + '-' + '_'.join(\n [str(_) for _ in val3['ids']]),\n nominal_capacity=val3['capacity'],\n type=type,\n subtype=subtype,\n geom=grid.station.geom,\n grid=grid,\n v_level=4)\n grid.graph.add_node(gen, type='generator_aggr')\n\n # backup reference of geno to LV geno list (save geno\n # where the former LV genos are aggregated in)\n network.dingo_import_data.set_value(network.dingo_import_data['id'].isin(val3['ids']),\n 'agg_geno',\n gen)\n\n # connect generator to MV station\n line = Line(id='line_aggr_generator_la_' + str(la_id) + '_vlevel_{v_level}_'\n '{subtype}'.format(\n v_level=v_level,\n subtype=subtype),\n type=aggr_line_type,\n kind='cable',\n length=1e-3,\n grid=grid)\n grid.graph.add_edge(grid.station,\n gen,\n line=line,\n type='line_aggr')\n\n for sector, sectoral_load in la['load'].items():\n load = Load(\n geom=grid.station.geom,\n consumption={sector: sectoral_load},\n grid=grid,\n id='_'.join(['Load_aggregated', sector, repr(grid), str(la_id)]))\n\n grid.graph.add_node(load, type='load')\n\n # connect aggregated load to MV station\n line = Line(id='_'.join(['line_aggr_load_la_' + str(la_id), sector, str(la_id)]),\n type=aggr_line_type,\n kind='cable',\n length=1e-3,\n grid=grid)\n\n grid.graph.add_edge(grid.station,\n load,\n line=line,\n type='line_aggr')"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _validate_ding0_grid_import(mv_grid, ding0_mv_grid, lv_grid_mapping):\n\n # Check number of components in MV grid\n _validate_ding0_mv_grid_import(mv_grid, ding0_mv_grid)\n\n # Check number of components in LV grid\n _validate_ding0_lv_grid_import(mv_grid.lv_grids, ding0_mv_grid,\n lv_grid_mapping)\n\n # Check cumulative load and generation in MV grid district\n _validate_load_generation(mv_grid, ding0_mv_grid)", "response": "Cross - check imported data with original data source"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _validate_ding0_mv_grid_import(grid, ding0_grid):\n\n integrity_checks = ['branch_tee',\n 'disconnection_point', 'mv_transformer',\n 'lv_station'#,'line',\n ]\n\n data_integrity = {}\n data_integrity.update({_: {'ding0': None, 'edisgo': None, 'msg': None}\n for _ in integrity_checks})\n\n # Check number of branch tees\n data_integrity['branch_tee']['ding0'] = len(ding0_grid._cable_distributors)\n data_integrity['branch_tee']['edisgo'] = len(\n grid.graph.nodes_by_attribute('branch_tee'))\n\n # Check number of disconnecting points\n data_integrity['disconnection_point']['ding0'] = len(\n ding0_grid._circuit_breakers)\n data_integrity['disconnection_point']['edisgo'] = len(\n grid.graph.nodes_by_attribute('mv_disconnecting_point'))\n\n # Check number of MV transformers\n data_integrity['mv_transformer']['ding0'] = len(\n list(ding0_grid.station().transformers()))\n data_integrity['mv_transformer']['edisgo'] = len(\n grid.station.transformers)\n\n # Check number of LV stations in MV grid (graph)\n data_integrity['lv_station']['edisgo'] = len(grid.graph.nodes_by_attribute(\n 'lv_station'))\n data_integrity['lv_station']['ding0'] = len(\n [_ for _ in ding0_grid._graph.nodes()\n if (isinstance(_, LVStationDing0) and\n not _.grid.grid_district.lv_load_area.is_aggregated)])\n\n # Check number of lines outside aggregated LA\n # edges_w_la = grid.graph.lines()\n # data_integrity['line']['edisgo'] = len([_ for _ in edges_w_la\n # if not (_['adj_nodes'][0] == grid.station or\n # _['adj_nodes'][1] == grid.station) and\n # _['line']._length > .5])\n # data_integrity['line']['ding0'] = len(\n # [_ for _ in ding0_grid.lines()\n # if not _['branch'].connects_aggregated])\n\n # raise an error if data does not match\n for c in integrity_checks:\n if data_integrity[c]['edisgo'] != data_integrity[c]['ding0']:\n raise ValueError(\n 'Unequal number of objects for {c}. '\n '\\n\\tDing0:\\t{ding0_no}'\n '\\n\\teDisGo:\\t{edisgo_no}'.format(\n c=c,\n ding0_no=data_integrity[c]['ding0'],\n edisgo_no=data_integrity[c]['edisgo']))\n\n return data_integrity", "response": "Verify imported data with original data from MV grid"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _validate_ding0_lv_grid_import(grids, ding0_grid, lv_grid_mapping):\n\n integrity_checks = ['branch_tee', 'lv_transformer',\n 'generator', 'load','line']\n\n data_integrity = {}\n\n for grid in grids:\n\n data_integrity.update({grid:{_: {'ding0': None, 'edisgo': None, 'msg': None}\n for _ in integrity_checks}})\n\n # Check number of branch tees\n data_integrity[grid]['branch_tee']['ding0'] = len(\n lv_grid_mapping[grid]._cable_distributors)\n data_integrity[grid]['branch_tee']['edisgo'] = len(\n grid.graph.nodes_by_attribute('branch_tee'))\n\n # Check number of LV transformers\n data_integrity[grid]['lv_transformer']['ding0'] = len(\n list(lv_grid_mapping[grid].station().transformers()))\n data_integrity[grid]['lv_transformer']['edisgo'] = len(\n grid.station.transformers)\n\n # Check number of generators\n data_integrity[grid]['generator']['edisgo'] = len(\n grid.generators)\n data_integrity[grid]['generator']['ding0'] = len(\n list(lv_grid_mapping[grid].generators()))\n\n # Check number of loads\n data_integrity[grid]['load']['edisgo'] = len(\n grid.graph.nodes_by_attribute('load'))\n data_integrity[grid]['load']['ding0'] = len(\n list(lv_grid_mapping[grid].loads()))\n\n # Check number of lines outside aggregated LA\n data_integrity[grid]['line']['edisgo'] = len(\n list(grid.graph.lines()))\n data_integrity[grid]['line']['ding0'] = len(\n [_ for _ in lv_grid_mapping[grid].graph_edges()\n if not _['branch'].connects_aggregated])\n\n # raise an error if data does not match\n for grid in grids:\n for c in integrity_checks:\n if data_integrity[grid][c]['edisgo'] != data_integrity[grid][c]['ding0']:\n raise ValueError(\n 'Unequal number of objects in grid {grid} for {c}. '\n '\\n\\tDing0:\\t{ding0_no}'\n '\\n\\teDisGo:\\t{edisgo_no}'.format(\n grid=grid,\n c=c,\n ding0_no=data_integrity[grid][c]['ding0'],\n edisgo_no=data_integrity[grid][c]['edisgo']))", "response": "Verify imported data with original data from LV grid"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef import_generators(network, data_source=None, file=None):\n\n if data_source == 'oedb':\n logging.warning('Right now only solar and wind generators can be '\n 'imported from the oedb.')\n _import_genos_from_oedb(network=network)\n network.mv_grid._weather_cells = None\n if network.pypsa is not None:\n pypsa_io.update_pypsa_generator_import(network)\n elif data_source == 'pypsa':\n _import_genos_from_pypsa(network=network, file=file)\n else:\n logger.error(\"Invalid option {} for generator import. Must either be \"\n \"'oedb' or 'pypsa'.\".format(data_source))\n raise ValueError('The option you specified is not supported.')", "response": "Imports the generator data from source."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nimports the generator data from the Open Energy Database.", "response": "def _import_genos_from_oedb(network):\n \"\"\"Import generator data from the Open Energy Database (OEDB).\n\n The importer uses SQLAlchemy ORM objects.\n These are defined in ego.io,\n see https://github.com/openego/ego.io/tree/dev/egoio/db_tables\n\n Parameters\n ----------\n network: :class:`~.grid.network.Network`\n The eDisGo container object\n\n Notes\n ------\n Right now only solar and wind generators can be imported.\n\n \"\"\"\n\n def _import_conv_generators():\n \"\"\"Import conventional (conv) generators\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n List of medium-voltage generators\n\n Notes\n -----\n You can find a full list of columns in\n :func:`edisgo.data.import_data._update_grids`\n \"\"\"\n\n # build query\n generators_sqla = session.query(\n orm_conv_generators.columns.id,\n orm_conv_generators.columns.subst_id,\n orm_conv_generators.columns.la_id,\n orm_conv_generators.columns.capacity,\n orm_conv_generators.columns.type,\n orm_conv_generators.columns.voltage_level,\n orm_conv_generators.columns.fuel,\n func.ST_AsText(func.ST_Transform(\n orm_conv_generators.columns.geom, srid))\n ). \\\n filter(orm_conv_generators.columns.subst_id == network.mv_grid.id). \\\n filter(orm_conv_generators.columns.voltage_level.in_([4, 5, 6, 7])). \\\n filter(orm_conv_generators_version)\n\n # read data from db\n generators_mv = pd.read_sql_query(generators_sqla.statement,\n session.bind,\n index_col='id')\n\n return generators_mv\n\n def _import_res_generators(types_filter):\n \"\"\"Import renewable (res) generators\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n List of medium-voltage generators\n :pandas:`pandas.DataFrame<dataframe>`\n List of low-voltage generators\n\n Notes\n -----\n You can find a full list of columns in\n :func:`edisgo.data.import_data._update_grids`\n\n If subtype is not specified it's set to 'unknown'.\n \"\"\"\n\n # build basic query\n generators_sqla = session.query(\n orm_re_generators.columns.id,\n orm_re_generators.columns.subst_id,\n orm_re_generators.columns.la_id,\n orm_re_generators.columns.mvlv_subst_id,\n orm_re_generators.columns.electrical_capacity,\n orm_re_generators.columns.generation_type,\n orm_re_generators.columns.generation_subtype,\n orm_re_generators.columns.voltage_level,\n orm_re_generators.columns.w_id,\n func.ST_AsText(func.ST_Transform(\n orm_re_generators.columns.rea_geom_new, srid)).label('geom'),\n func.ST_AsText(func.ST_Transform(\n orm_re_generators.columns.geom, srid)).label('geom_em')). \\\n filter(orm_re_generators.columns.subst_id == network.mv_grid.id). \\\n filter(orm_re_generators_version). \\\n filter(types_filter)\n\n # extend basic query for MV generators and read data from db\n generators_mv_sqla = generators_sqla. \\\n filter(orm_re_generators.columns.voltage_level.in_([4, 5]))\n generators_mv = pd.read_sql_query(generators_mv_sqla.statement,\n session.bind,\n index_col='id')\n\n # define generators with unknown subtype as 'unknown'\n generators_mv.loc[generators_mv[\n 'generation_subtype'].isnull(),\n 'generation_subtype'] = 'unknown'\n\n # extend basic query for LV generators and read data from db\n generators_lv_sqla = generators_sqla. \\\n filter(orm_re_generators.columns.voltage_level.in_([6, 7]))\n generators_lv = pd.read_sql_query(generators_lv_sqla.statement,\n session.bind,\n index_col='id')\n\n # define generators with unknown subtype as 'unknown'\n generators_lv.loc[generators_lv[\n 'generation_subtype'].isnull(),\n 'generation_subtype'] = 'unknown'\n\n return generators_mv, generators_lv\n\n def _update_grids(network, generators_mv, generators_lv, remove_missing=True):\n \"\"\"Update imported status quo DINGO-grid according to new generator dataset\n\n It\n * adds new generators to grid if they do not exist\n * updates existing generators if parameters have changed\n * removes existing generators from grid which do not exist in the imported dataset\n\n Steps:\n\n * Step 1: MV generators: Update existing, create new, remove decommissioned\n * Step 2: LV generators (single units): Update existing, remove decommissioned\n * Step 3: LV generators (in aggregated MV generators): Update existing,\n remove decommissioned\n (aggregated MV generators = originally LV generators from aggregated Load\n Areas which were aggregated during import from ding0.)\n * Step 4: LV generators (single units + aggregated MV generators): Create new\n\n Parameters\n ----------\n network: :class:`~.grid.network.Network`\n The eDisGo container object\n\n generators_mv: :pandas:`pandas.DataFrame<dataframe>`\n List of MV generators\n Columns:\n * id: :obj:`int` (index column)\n * electrical_capacity: :obj:`float` (unit: kW)\n * generation_type: :obj:`str` (e.g. 'solar')\n * generation_subtype: :obj:`str` (e.g. 'solar_roof_mounted')\n * voltage level: :obj:`int` (range: 4..7,)\n * geom: :shapely:`Shapely Point object<points>`\n (CRS see config_grid.cfg)\n * geom_em: :shapely:`Shapely Point object<points>`\n (CRS see config_grid.cfg)\n\n generators_lv: :pandas:`pandas.DataFrame<dataframe>`\n List of LV generators\n Columns:\n * id: :obj:`int` (index column)\n * mvlv_subst_id: :obj:`int` (id of MV-LV substation in grid\n = grid which the generator will be connected to)\n * electrical_capacity: :obj:`float` (unit: kW)\n * generation_type: :obj:`str` (e.g. 'solar')\n * generation_subtype: :obj:`str` (e.g. 'solar_roof_mounted')\n * voltage level: :obj:`int` (range: 4..7,)\n * geom: :shapely:`Shapely Point object<points>`\n (CRS see config_grid.cfg)\n * geom_em: :shapely:`Shapely Point object<points>`\n (CRS see config_grid.cfg)\n\n remove_missing: :obj:`bool`\n If true, remove generators from grid which are not included in the imported dataset.\n \"\"\"\n\n # set capacity difference threshold\n cap_diff_threshold = 10 ** -4\n\n # get existing generators in MV and LV grids\n g_mv, g_lv, g_mv_agg = _build_generator_list(network=network)\n\n # print current capacity\n capacity_grid = 0\n capacity_grid += sum([row['obj'].nominal_capacity for id, row in g_mv.iterrows()])\n capacity_grid += sum([row['obj'].nominal_capacity for id, row in g_lv.iterrows()])\n capacity_grid += sum([row['obj'].nominal_capacity for id, row in g_mv_agg.iterrows()])\n logger.debug('Cumulative generator capacity (existing): {} kW'\n .format(str(round(capacity_grid, 1)))\n )\n\n # ======================================\n # Step 1: MV generators (existing + new)\n # ======================================\n logger.debug('==> MV generators')\n logger.debug('{} generators imported.'\n .format(str(len(generators_mv))))\n # get existing genos (status quo DF format)\n g_mv_existing = g_mv[g_mv['id'].isin(list(generators_mv.index.values))]\n # get existing genos (new genos DF format)\n generators_mv_existing = generators_mv[generators_mv.index.isin(list(g_mv_existing['id']))]\n # remove existing ones from grid's geno list\n g_mv = g_mv[~g_mv.isin(g_mv_existing)].dropna()\n\n # TEMP: BACKUP 1 GENO FOR TESTING\n #temp_geno = generators_mv_existing.iloc[0]\n #temp_geno['geom_em'] = temp_geno['geom_em'].replace('10.667', '10.64')\n\n # iterate over exiting generators and check whether capacity has changed\n log_geno_count = 0\n log_geno_cap = 0\n for id, row in generators_mv_existing.iterrows():\n\n geno_existing = g_mv_existing[g_mv_existing['id'] == id]['obj'].iloc[0]\n\n # check if capacity equals; if not: update capacity\n if abs(row['electrical_capacity'] - \\\n geno_existing.nominal_capacity) < cap_diff_threshold:\n continue\n else:\n log_geno_cap += row['electrical_capacity'] - geno_existing.nominal_capacity\n log_geno_count += 1\n geno_existing.nominal_capacity = row['electrical_capacity']\n\n # check if cap=0 (this may happen if dp is buggy)\n if row['electrical_capacity'] <= 0:\n geno_existing.grid.graph.remove_node(geno_existing)\n logger.warning('Capacity of generator {} is <=0, generator removed. '\n 'Check your data source.'\n .format(repr(geno_existing))\n )\n\n logger.debug('Capacities of {} of {} existing generators updated ({} kW).'\n .format(str(log_geno_count),\n str(len(generators_mv_existing) - log_geno_count),\n str(round(log_geno_cap, 1))\n )\n )\n\n # new genos\n log_geno_count = 0\n log_geno_cap = 0\n generators_mv_new = generators_mv[~generators_mv.index.isin(\n list(g_mv_existing['id']))]\n\n # remove them from grid's geno list\n g_mv = g_mv[~g_mv.isin(list(generators_mv_new.index.values))].dropna()\n\n # TEMP: INSERT BACKUPPED GENO IN DF FOR TESTING\n #generators_mv_new = generators_mv_new.append(temp_geno)\n\n # iterate over new generators and create them\n for id, row in generators_mv_new.iterrows():\n # check if geom is available, skip otherwise\n geom = _check_geom(id, row)\n if not geom:\n logger.warning('Generator {} has no geom entry at all and will'\n 'not be imported!'.format(id))\n continue\n\n # create generator object and add it to MV grid's graph\n if row['generation_type'] in ['solar', 'wind']:\n network.mv_grid.graph.add_node(\n GeneratorFluctuating(\n id=id,\n grid=network.mv_grid,\n nominal_capacity=row['electrical_capacity'],\n type=row['generation_type'],\n subtype=row['generation_subtype'],\n v_level=int(row['voltage_level']),\n weather_cell_id=row['w_id'],\n geom=wkt_loads(geom)),\n type='generator')\n else:\n network.mv_grid.graph.add_node(\n Generator(id=id,\n grid=network.mv_grid,\n nominal_capacity=row['electrical_capacity'],\n type=row['generation_type'],\n subtype=row['generation_subtype'],\n v_level=int(row['voltage_level']),\n geom=wkt_loads(geom)\n ),\n type='generator')\n log_geno_cap += row['electrical_capacity']\n log_geno_count += 1\n\n logger.debug('{} of {} new generators added ({} kW).'\n .format(str(log_geno_count),\n str(len(generators_mv_new)),\n str(round(log_geno_cap, 1))\n )\n )\n\n # remove decommissioned genos\n # (genos which exist in grid but not in the new dataset)\n log_geno_cap = 0\n if not g_mv.empty and remove_missing:\n log_geno_count = 0\n for _, row in g_mv.iterrows():\n log_geno_cap += row['obj'].nominal_capacity\n row['obj'].grid.graph.remove_node(row['obj'])\n log_geno_count += 1\n logger.debug('{} of {} decommissioned generators removed ({} kW).'\n .format(str(log_geno_count),\n str(len(g_mv)),\n str(round(log_geno_cap, 1))\n )\n )\n\n # =============================================\n # Step 2: LV generators (single existing units)\n # =============================================\n logger.debug('==> LV generators')\n logger.debug('{} generators imported.'.format(str(len(generators_lv))))\n # get existing genos (status quo DF format)\n g_lv_existing = g_lv[g_lv['id'].isin(list(generators_lv.index.values))]\n # get existing genos (new genos DF format)\n generators_lv_existing = generators_lv[generators_lv.index.isin(list(g_lv_existing['id']))]\n\n # TEMP: BACKUP 1 GENO FOR TESTING\n # temp_geno = g_lv.iloc[0]\n\n # remove existing ones from grid's geno list\n g_lv = g_lv[~g_lv.isin(g_lv_existing)].dropna()\n\n # iterate over exiting generators and check whether capacity has changed\n log_geno_count = 0\n log_geno_cap = 0\n for id, row in generators_lv_existing.iterrows():\n\n geno_existing = g_lv_existing[g_lv_existing['id'] == id]['obj'].iloc[0]\n\n # check if capacity equals; if not: update capacity\n if abs(row['electrical_capacity'] - \\\n geno_existing.nominal_capacity) < cap_diff_threshold:\n continue\n else:\n log_geno_cap += row['electrical_capacity'] - geno_existing.nominal_capacity\n log_geno_count += 1\n geno_existing.nominal_capacity = row['electrical_capacity']\n\n logger.debug('Capacities of {} of {} existing generators (single units) updated ({} kW).'\n .format(str(log_geno_count),\n str(len(generators_lv_existing) - log_geno_count),\n str(round(log_geno_cap, 1))\n )\n )\n\n # TEMP: INSERT BACKUPPED GENO IN DF FOR TESTING\n # g_lv.loc[len(g_lv)] = temp_geno\n\n # remove decommissioned genos\n # (genos which exist in grid but not in the new dataset)\n log_geno_cap = 0\n if not g_lv.empty and remove_missing:\n log_geno_count = 0\n for _, row in g_lv.iterrows():\n log_geno_cap += row['obj'].nominal_capacity\n row['obj'].grid.graph.remove_node(row['obj'])\n log_geno_count += 1\n logger.debug('{} of {} decommissioned generators (single units) removed ({} kW).'\n .format(str(log_geno_count),\n str(len(g_lv)),\n str(round(log_geno_cap, 1))\n )\n )\n\n # ====================================================================================\n # Step 3: LV generators (existing in aggregated units (originally from aggregated LA))\n # ====================================================================================\n g_lv_agg = network.dingo_import_data\n g_lv_agg_existing = g_lv_agg[g_lv_agg['id'].isin(list(generators_lv.index.values))]\n generators_lv_agg_existing = generators_lv[generators_lv.index.isin(list(g_lv_agg_existing['id']))]\n\n # TEMP: BACKUP 1 GENO FOR TESTING\n # temp_geno = g_lv_agg.iloc[0]\n\n g_lv_agg = g_lv_agg[~g_lv_agg.isin(g_lv_agg_existing)].dropna()\n\n log_geno_count = 0\n log_agg_geno_list = []\n log_geno_cap = 0\n for id, row in generators_lv_agg_existing.iterrows():\n\n # check if capacity equals; if not: update capacity off agg. geno\n cap_diff = row['electrical_capacity'] - \\\n g_lv_agg_existing[g_lv_agg_existing['id'] == id]['capacity'].iloc[0]\n if abs(cap_diff) < cap_diff_threshold:\n continue\n else:\n agg_geno = g_lv_agg_existing[g_lv_agg_existing['id'] == id]['agg_geno'].iloc[0]\n agg_geno.nominal_capacity += cap_diff\n log_geno_cap += cap_diff\n\n log_geno_count += 1\n log_agg_geno_list.append(agg_geno)\n\n logger.debug('Capacities of {} of {} existing generators (in {} of {} aggregated units) '\n 'updated ({} kW).'\n .format(str(log_geno_count),\n str(len(generators_lv_agg_existing) - log_geno_count),\n str(len(set(log_agg_geno_list))),\n str(len(g_lv_agg_existing['agg_geno'].unique())),\n str(round(log_geno_cap, 1))\n )\n )\n\n # TEMP: INSERT BACKUPPED GENO IN DF FOR TESTING\n # g_lv_agg.loc[len(g_lv_agg)] = temp_geno\n\n # remove decommissioned genos\n # (genos which exist in grid but not in the new dataset)\n log_geno_cap = 0\n if not g_lv_agg.empty and remove_missing:\n log_geno_count = 0\n for _, row in g_lv_agg.iterrows():\n row['agg_geno'].nominal_capacity -= row['capacity']\n log_geno_cap += row['capacity']\n # remove LV geno id from id string of agg. geno\n id = row['agg_geno'].id.split('-')\n ids = id[2].split('_')\n ids.remove(str(int(row['id'])))\n row['agg_geno'].id = '-'.join([id[0], id[1], '_'.join(ids)])\n\n # after removing the LV geno from agg geno, is the agg. geno empty?\n # if yes, remove it from grid\n if not ids:\n row['agg_geno'].grid.graph.remove_node(row['agg_geno'])\n\n log_geno_count += 1\n logger.debug('{} of {} decommissioned generators in aggregated generators removed ({} kW).'\n .format(str(log_geno_count),\n str(len(g_lv_agg)),\n str(round(log_geno_cap, 1))\n )\n )\n\n # ====================================================================\n # Step 4: LV generators (new single units + genos in aggregated units)\n # ====================================================================\n # new genos\n log_geno_count =\\\n log_agg_geno_new_count =\\\n log_agg_geno_upd_count = 0\n\n # TEMP: BACKUP 1 GENO FOR TESTING\n #temp_geno = generators_lv[generators_lv.index == g_lv_existing.iloc[0]['id']]\n\n generators_lv_new = generators_lv[~generators_lv.index.isin(list(g_lv_existing['id'])) &\n ~generators_lv.index.isin(list(g_lv_agg_existing['id']))]\n\n # TEMP: INSERT BACKUPPED GENO IN DF FOR TESTING\n #generators_lv_new = generators_lv_new.append(temp_geno)\n\n # dict for new agg. generators\n agg_geno_new = {}\n # get LV grid districts\n lv_grid_dict = _build_lv_grid_dict(network)\n\n # get predefined random seed and initialize random generator\n seed = int(network.config['grid_connection']['random_seed'])\n random.seed(a=seed)\n\n # check if none of new generators can be allocated to an existing LV grid\n if not any([_ in lv_grid_dict.keys()\n for _ in list(generators_lv_new['mvlv_subst_id'])]):\n logger.warning('None of the imported generators can be allocated '\n 'to an existing LV grid. Check compatibility of grid '\n 'and generator datasets.')\n\n # iterate over new (single unit or part of agg. unit) generators and create them\n log_geno_cap = 0\n for id, row in generators_lv_new.iterrows():\n lv_geno_added_to_agg_geno = False\n\n # new unit is part of agg. LA (mvlv_subst_id is different from existing\n # ones in LV grids of non-agg. load areas)\n if (row['mvlv_subst_id'] not in lv_grid_dict.keys() and\n row['la_id'] and not isnan(row['la_id']) and\n row['mvlv_subst_id'] and not isnan(row['mvlv_subst_id'])):\n\n # check if new unit can be added to existing agg. generator\n # (LA id, type and subtype match) -> update existing agg. generator.\n # Normally, this case should not occur since `subtype` of new genos\n # is set to a new value (e.g. 'solar')\n for _, agg_row in g_mv_agg.iterrows():\n if (agg_row['la_id'] == int(row['la_id']) and\n agg_row['obj'].type == row['generation_type'] and\n agg_row['obj'].subtype == row['generation_subtype']):\n\n agg_row['obj'].nominal_capacity += row['electrical_capacity']\n agg_row['obj'].id += '_{}'.format(str(id))\n log_agg_geno_upd_count += 1\n lv_geno_added_to_agg_geno = True\n\n if not lv_geno_added_to_agg_geno:\n la_id = int(row['la_id'])\n if la_id not in agg_geno_new:\n agg_geno_new[la_id] = {}\n if row['voltage_level'] not in agg_geno_new[la_id]:\n agg_geno_new[la_id][row['voltage_level']] = {}\n if row['generation_type'] not in agg_geno_new[la_id][row['voltage_level']]:\n agg_geno_new[la_id][row['voltage_level']][row['generation_type']] = {}\n if row['generation_subtype'] not in \\\n agg_geno_new[la_id][row['voltage_level']][row['generation_type']]:\n agg_geno_new[la_id][row['voltage_level']][row['generation_type']]\\\n .update({row['generation_subtype']: {'ids': [int(id)],\n 'capacity': row['electrical_capacity']\n }\n }\n )\n else:\n agg_geno_new[la_id][row['voltage_level']][row['generation_type']] \\\n [row['generation_subtype']]['ids'].append(int(id))\n agg_geno_new[la_id][row['voltage_level']][row['generation_type']] \\\n [row['generation_subtype']]['capacity'] += row['electrical_capacity']\n\n # new generator is a single (non-aggregated) unit\n else:\n # check if geom is available\n geom = _check_geom(id, row)\n\n if row['generation_type'] in ['solar', 'wind']:\n gen = GeneratorFluctuating(\n id=id,\n grid=None,\n nominal_capacity=row['electrical_capacity'],\n type=row['generation_type'],\n subtype=row['generation_subtype'],\n v_level=int(row['voltage_level']),\n weather_cell_id=row['w_id'],\n geom=wkt_loads(geom) if geom else geom)\n else:\n gen = Generator(id=id,\n grid=None,\n nominal_capacity=row[\n 'electrical_capacity'],\n type=row['generation_type'],\n subtype=row['generation_subtype'],\n v_level=int(row['voltage_level']),\n geom=wkt_loads(geom) if geom else geom)\n\n # TEMP: REMOVE MVLV SUBST ID FOR TESTING\n #row['mvlv_subst_id'] = None\n\n # check if MV-LV substation id exists. if not, allocate to\n # random one\n lv_grid = _check_mvlv_subst_id(\n generator=gen,\n mvlv_subst_id=row['mvlv_subst_id'],\n lv_grid_dict=lv_grid_dict)\n\n gen.grid = lv_grid\n\n lv_grid.graph.add_node(gen, type='generator')\n\n log_geno_count += 1\n log_geno_cap += row['electrical_capacity']\n\n # there are new agg. generators to be created\n if agg_geno_new:\n\n pfac_mv_gen = network.config['reactive_power_factor']['mv_gen']\n\n # add aggregated generators\n for la_id, val in agg_geno_new.items():\n for v_level, val2 in val.items():\n for type, val3 in val2.items():\n for subtype, val4 in val3.items():\n if type in ['solar', 'wind']:\n gen = GeneratorFluctuating(\n id='agg-' + str(la_id) + '-' + '_'.join([\n str(_) for _ in val4['ids']]),\n grid=network.mv_grid,\n nominal_capacity=val4['capacity'],\n type=type,\n subtype=subtype,\n v_level=4,\n # ToDo: get correct w_id\n weather_cell_id=row['w_id'],\n geom=network.mv_grid.station.geom)\n else:\n gen = Generator(\n id='agg-' + str(la_id) + '-' + '_'.join([\n str(_) for _ in val4['ids']]),\n nominal_capacity=val4['capacity'],\n type=type,\n subtype=subtype,\n geom=network.mv_grid.station.geom,\n grid=network.mv_grid,\n v_level=4)\n\n network.mv_grid.graph.add_node(\n gen, type='generator_aggr')\n\n # select cable type\n line_type, line_count = select_cable(\n network=network,\n level='mv',\n apparent_power=gen.nominal_capacity /\n pfac_mv_gen)\n\n # connect generator to MV station\n line = Line(id='line_aggr_generator_la_' + str(la_id) + '_vlevel_{v_level}_'\n '{subtype}'.format(\n v_level=v_level,\n subtype=subtype),\n type=line_type,\n kind='cable',\n quantity=line_count,\n length=1e-3,\n grid=network.mv_grid)\n\n network.mv_grid.graph.add_edge(network.mv_grid.station,\n gen,\n line=line,\n type='line_aggr')\n\n log_agg_geno_new_count += len(val4['ids'])\n log_geno_cap += val4['capacity']\n\n logger.debug('{} of {} new generators added ({} single units, {} to existing '\n 'agg. generators and {} units as new aggregated generators) '\n '(total: {} kW).'\n .format(str(log_geno_count +\n log_agg_geno_new_count +\n log_agg_geno_upd_count),\n str(len(generators_lv_new)),\n str(log_geno_count),\n str(log_agg_geno_upd_count),\n str(log_agg_geno_new_count),\n str(round(log_geno_cap, 1))\n )\n )\n\n def _check_geom(id, row):\n \"\"\"Checks if a valid geom is available in dataset\n\n If yes, this geom will be used.\n If not:\n\n * MV generators: use geom from EnergyMap.\n * LV generators: set geom to None. It is re-set in\n :func:`edisgo.data.import_data._check_mvlv_subst_id`\n to MV-LV station's geom. EnergyMap's geom is not used\n since it is more inaccurate than the station's geom.\n\n Parameters\n ----------\n id : :obj:`int`\n Id of generator\n row : :pandas:`pandas.Series<series>`\n Generator dataset\n\n Returns\n -------\n :shapely:`Shapely Point object<points>` or None\n Geom of generator. None, if no geom is available.\n \"\"\"\n\n geom = None\n\n # check if geom is available\n if row['geom']:\n geom = row['geom']\n else:\n # MV generators: set geom to EnergyMap's geom, if available\n if int(row['voltage_level']) in [4,5]:\n # check if original geom from Energy Map is available\n if row['geom_em']:\n geom = row['geom_em']\n logger.debug('Generator {} has no geom entry, EnergyMap\\'s geom entry will be used.'\n .format(id)\n )\n\n return geom\n\n def _check_mvlv_subst_id(generator, mvlv_subst_id, lv_grid_dict):\n \"\"\"Checks if MV-LV substation id of single LV generator is missing or invalid.\n If so, a random one from existing stations in LV grids will be assigned.\n\n Parameters\n ----------\n generator : :class:`~.grid.components.Generator`\n LV generator\n mvlv_subst_id : :obj:`int`\n MV-LV substation id\n lv_grid_dict : :obj:`dict`\n Dict of existing LV grids\n Format: {:obj:`int`: :class:`~.grid.grids.LVGrid`}\n\n Returns\n -------\n :class:`~.grid.grids.LVGrid`\n LV grid of generator\n \"\"\"\n\n if mvlv_subst_id and not isnan(mvlv_subst_id):\n # assume that given LA exists\n try:\n # get LV grid\n lv_grid = lv_grid_dict[mvlv_subst_id]\n\n # if no geom, use geom of station\n if not generator.geom:\n generator.geom = lv_grid.station.geom\n logger.debug('Generator {} has no geom entry, stations\\' geom will be used.'\n .format(generator.id)\n )\n\n return lv_grid\n\n # if LA/LVGD does not exist, choose random LVGD and move generator to station of LVGD\n # this occurs due to exclusion of LA with peak load < 1kW\n except:\n lv_grid = random.choice(list(lv_grid_dict.values()))\n\n generator.geom = lv_grid.station.geom\n\n logger.warning('Generator {} cannot be assigned to '\n 'non-existent LV Grid and was '\n 'allocated to a random LV Grid ({}); '\n 'geom was set to stations\\' geom.'\n .format(repr(generator),\n repr(lv_grid)))\n pass\n return lv_grid\n\n else:\n lv_grid = random.choice(list(lv_grid_dict.values()))\n\n generator.geom = lv_grid.station.geom\n\n logger.warning('Generator {} has no mvlv_subst_id and was '\n 'allocated to a random LV Grid ({}); '\n 'geom was set to stations\\' geom.'\n .format(repr(generator),\n repr(lv_grid)))\n pass\n return lv_grid\n\n def _validate_generation():\n \"\"\"Validate generators in updated grids\n\n The validation uses the cumulative capacity of all generators.\n \"\"\"\n # ToDo: Valdate conv. genos too!\n\n # set capacity difference threshold\n cap_diff_threshold = 10 ** -4\n\n capacity_imported = generators_res_mv['electrical_capacity'].sum() + \\\n generators_res_lv['electrical_capacity'].sum() #+ \\\n #generators_conv_mv['capacity'].sum()\n\n capacity_grid = 0\n # MV genos\n for geno in network.mv_grid.generators:\n capacity_grid += geno.nominal_capacity\n\n # LV genos\n for lv_grid in network.mv_grid.lv_grids:\n for geno in lv_grid.generators:\n capacity_grid += geno.nominal_capacity\n\n logger.debug('Cumulative generator capacity (updated): {} kW'\n .format(str(round(capacity_imported, 1)))\n )\n\n if abs(capacity_imported - capacity_grid) > cap_diff_threshold:\n raise ValueError('Cumulative capacity of imported generators ({} kW) '\n 'differ from cumulative capacity of generators '\n 'in updated grid ({} kW) by {} kW.'\n .format(str(round(capacity_imported, 1)),\n str(round(capacity_grid, 1)),\n str(round(capacity_imported - capacity_grid, 1))\n )\n )\n else:\n logger.debug('Cumulative capacity of imported generators validated.')\n\n def _validate_sample_geno_location():\n if all(generators_res_lv['geom'].notnull()) \\\n and all(generators_res_mv['geom'].notnull()) \\\n and not generators_res_lv['geom'].empty \\\n and not generators_res_mv['geom'].empty:\n # get geom of 1 random MV and 1 random LV generator and transform\n sample_mv_geno_geom_shp = transform(proj2equidistant(network),\n wkt_loads(generators_res_mv['geom']\n .dropna()\n .sample(n=1)\n .item())\n )\n sample_lv_geno_geom_shp = transform(proj2equidistant(network),\n wkt_loads(generators_res_lv['geom']\n .dropna()\n .sample(n=1)\n .item())\n )\n\n # get geom of MV grid district\n mvgd_geom_shp = transform(proj2equidistant(network),\n network.mv_grid.grid_district['geom']\n )\n\n # check if MVGD contains geno\n if not (mvgd_geom_shp.contains(sample_mv_geno_geom_shp) and\n mvgd_geom_shp.contains(sample_lv_geno_geom_shp)):\n raise ValueError('At least one imported generator is not located '\n 'in the MV grid area. Check compatibility of '\n 'grid and generator datasets.')\n\n # make DB session\n conn = connection(section=network.config['db_connection']['section'])\n Session = sessionmaker(bind=conn)\n session = Session()\n\n srid = int(network.config['geo']['srid'])\n\n oedb_data_source = network.config['data_source']['oedb_data_source']\n scenario = network.generator_scenario\n\n if oedb_data_source == 'model_draft':\n\n # load ORM names\n orm_conv_generators_name = network.config['model_draft']['conv_generators_prefix'] + \\\n scenario + \\\n network.config['model_draft']['conv_generators_suffix']\n orm_re_generators_name = network.config['model_draft']['re_generators_prefix'] + \\\n scenario + \\\n network.config['model_draft']['re_generators_suffix']\n\n # import ORMs\n orm_conv_generators = model_draft.__getattribute__(orm_conv_generators_name)\n orm_re_generators = model_draft.__getattribute__(orm_re_generators_name)\n\n # set dummy version condition (select all generators)\n orm_conv_generators_version = 1 == 1\n orm_re_generators_version = 1 == 1\n\n elif oedb_data_source == 'versioned':\n\n # load ORM names\n orm_conv_generators_name = network.config['versioned']['conv_generators_prefix'] + \\\n scenario + \\\n network.config['versioned']['conv_generators_suffix']\n orm_re_generators_name = network.config['versioned']['re_generators_prefix'] + \\\n scenario + \\\n network.config['versioned']['re_generators_suffix']\n data_version = network.config['versioned']['version']\n\n # import ORMs\n orm_conv_generators = supply.__getattribute__(orm_conv_generators_name)\n orm_re_generators = supply.__getattribute__(orm_re_generators_name)\n\n # set version condition\n orm_conv_generators_version = orm_conv_generators.columns.version == data_version\n orm_re_generators_version = orm_re_generators.columns.version == data_version\n\n # Create filter for generation technologies\n # ToDo: This needs to be removed when all generators can be imported\n # (all generators in a scenario should be imported)\n types_condition = orm_re_generators.columns.generation_type.in_(\n ['solar', 'wind'])\n\n # get conventional and renewable generators\n #generators_conv_mv = _import_conv_generators()\n generators_res_mv, generators_res_lv = _import_res_generators(\n types_condition)\n\n #generators_mv = generators_conv_mv.append(generators_res_mv)\n\n _validate_sample_geno_location()\n\n _update_grids(network=network,\n #generators_mv=generators_mv,\n generators_mv=generators_res_mv,\n generators_lv=generators_res_lv)\n\n _validate_generation()\n\n connect_mv_generators(network=network)\n connect_lv_generators(network=network)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nbuild DataFrames with all generators in MV and LV grids", "response": "def _build_generator_list(network):\n \"\"\"Builds DataFrames with all generators in MV and LV grids\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n A DataFrame with id of and reference to MV generators\n :pandas:`pandas.DataFrame<dataframe>`\n A DataFrame with id of and reference to LV generators\n :pandas:`pandas.DataFrame<dataframe>`\n A DataFrame with id of and reference to aggregated LV generators\n \"\"\"\n\n genos_mv = pd.DataFrame(columns=\n ('id', 'obj'))\n genos_lv = pd.DataFrame(columns=\n ('id', 'obj'))\n genos_lv_agg = pd.DataFrame(columns=\n ('la_id', 'id', 'obj'))\n\n # MV genos\n for geno in network.mv_grid.graph.nodes_by_attribute('generator'):\n genos_mv.loc[len(genos_mv)] = [int(geno.id), geno]\n for geno in network.mv_grid.graph.nodes_by_attribute('generator_aggr'):\n la_id = int(geno.id.split('-')[1].split('_')[-1])\n genos_lv_agg.loc[len(genos_lv_agg)] = [la_id, geno.id, geno]\n\n # LV genos\n for lv_grid in network.mv_grid.lv_grids:\n for geno in lv_grid.generators:\n genos_lv.loc[len(genos_lv)] = [int(geno.id), geno]\n\n return genos_mv, genos_lv, genos_lv_agg"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nbuilds a dict of LV grids containing LV grids of the specified network.", "response": "def _build_lv_grid_dict(network):\n \"\"\"Creates dict of LV grids\n\n LV grid ids are used as keys, LV grid references as values.\n\n Parameters\n ----------\n network: :class:`~.grid.network.Network`\n The eDisGo container object\n\n Returns\n -------\n :obj:`dict`\n Format: {:obj:`int`: :class:`~.grid.grids.LVGrid`}\n \"\"\"\n\n lv_grid_dict = {}\n for lv_grid in network.mv_grid.lv_grids:\n lv_grid_dict[lv_grid.id] = lv_grid\n return lv_grid_dict"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef import_feedin_timeseries(config_data, weather_cell_ids):\n\n def _retrieve_timeseries_from_oedb(config_data, weather_cell_ids):\n \"\"\"Retrieve time series from oedb\n\n Parameters\n ----------\n config_data : dict\n Dictionary containing config data from config files.\n weather_cell_ids : :obj:`list`\n List of weather cell id's (integers) to obtain feed-in data for.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n Feedin time series\n \"\"\"\n if config_data['data_source']['oedb_data_source'] == 'model_draft':\n orm_feedin_name = config_data['model_draft']['res_feedin_data']\n orm_feedin = model_draft.__getattribute__(orm_feedin_name)\n orm_feedin_version = 1 == 1\n else:\n orm_feedin_name = config_data['versioned']['res_feedin_data']\n orm_feedin = supply.__getattribute__(orm_feedin_name)\n orm_feedin_version = orm_feedin.version == config_data['versioned']['version']\n\n conn = connection(section=config_data['db_connection']['section'])\n Session = sessionmaker(bind=conn)\n session = Session()\n\n # ToDo: add option to retrieve subset of time series\n # ToDo: find the reference power class for mvgrid/w_id and insert instead of 4\n feedin_sqla = session.query(\n orm_feedin.w_id,\n orm_feedin.source,\n orm_feedin.feedin). \\\n filter(orm_feedin.w_id.in_(weather_cell_ids)). \\\n filter(orm_feedin.power_class.in_([0, 4])). \\\n filter(orm_feedin_version)\n\n feedin = pd.read_sql_query(feedin_sqla.statement,\n session.bind,\n index_col=['source', 'w_id'])\n\n feedin.sort_index(axis=0, inplace=True)\n\n timeindex = pd.date_range('1/1/2011', periods=8760, freq='H')\n\n recasted_feedin_dict = {}\n for type_w_id in feedin.index:\n recasted_feedin_dict[type_w_id] = feedin.loc[\n type_w_id, :].values[0]\n\n feedin = pd.DataFrame(recasted_feedin_dict, index=timeindex)\n\n # rename 'wind_onshore' and 'wind_offshore' to 'wind'\n new_level = [_ if _ not in ['wind_onshore']\n else 'wind' for _ in feedin.columns.levels[0]]\n feedin.columns.set_levels(new_level, level=0, inplace=True)\n\n feedin.columns.rename('type', level=0, inplace=True)\n feedin.columns.rename('weather_cell_id', level=1, inplace=True)\n\n return feedin\n\n feedin = _retrieve_timeseries_from_oedb(config_data, weather_cell_ids)\n\n return feedin", "response": "Imports RES feed - in time series data and processes it."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nimport load time series from oedb.", "response": "def import_load_timeseries(config_data, data_source, mv_grid_id=None,\n year=None):\n \"\"\"\n Import load time series\n\n Parameters\n ----------\n config_data : dict\n Dictionary containing config data from config files.\n data_source : str\n Specify type of data source. Available data sources are\n\n * 'demandlib'\n Determine a load time series with the use of the demandlib.\n This calculates standard load profiles for 4 different sectors.\n\n mv_grid_id : :obj:`str`\n MV grid ID as used in oedb. Provide this if `data_source` is 'oedb'.\n Default: None.\n year : int\n Year for which to generate load time series. Provide this if\n `data_source` is 'demandlib'. Default: None.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n Load time series\n\n \"\"\"\n\n def _import_load_timeseries_from_oedb(config_data, mv_grid_id):\n \"\"\"\n Retrieve load time series from oedb\n\n Parameters\n ----------\n config_data : dict\n Dictionary containing config data from config files.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n Load time series\n\n Notes\n ------\n This is currently not a valid option to retrieve load time series\n since time series in the oedb are not differentiated by sector. An\n issue concerning this has been created.\n\n \"\"\"\n\n if config_data['versioned']['version'] == 'model_draft':\n orm_load_name = config_data['model_draft']['load_data']\n orm_load = model_draft.__getattribute__(orm_load_name)\n orm_load_areas_name = config_data['model_draft']['load_areas']\n orm_load_areas = model_draft.__getattribute__(orm_load_areas_name)\n orm_load_version = 1 == 1\n else:\n orm_load_name = config_data['versioned']['load_data']\n # orm_load = supply.__getattribute__(orm_load_name)\n # ToDo: remove workaround\n orm_load = model_draft.__getattribute__(orm_load_name)\n # orm_load_version = orm_load.version == config.data['versioned']['version']\n\n orm_load_areas_name = config_data['versioned']['load_areas']\n # orm_load_areas = supply.__getattribute__(orm_load_areas_name)\n # ToDo: remove workaround\n orm_load_areas = model_draft.__getattribute__(orm_load_areas_name)\n # orm_load_areas_version = orm_load.version == config.data['versioned']['version']\n\n orm_load_version = 1 == 1\n\n conn = connection(section=config_data['db_connection']['section'])\n Session = sessionmaker(bind=conn)\n session = Session()\n\n load_sqla = session.query( # orm_load.id,\n orm_load.p_set,\n orm_load.q_set,\n orm_load_areas.subst_id). \\\n join(orm_load_areas, orm_load.id == orm_load_areas.otg_id). \\\n filter(orm_load_areas.subst_id == mv_grid_id). \\\n filter(orm_load_version). \\\n distinct()\n\n load = pd.read_sql_query(load_sqla.statement,\n session.bind,\n index_col='subst_id')\n return load\n\n def _load_timeseries_demandlib(config_data, year):\n \"\"\"\n Get normalized sectoral load time series\n\n Time series are normalized to 1 kWh consumption per year\n\n Parameters\n ----------\n config_data : dict\n Dictionary containing config data from config files.\n year : int\n Year for which to generate load time series.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n Load time series\n\n \"\"\"\n\n sectoral_consumption = {'h0': 1, 'g0': 1, 'i0': 1, 'l0': 1}\n\n cal = Germany()\n holidays = dict(cal.holidays(year))\n\n e_slp = bdew.ElecSlp(year, holidays=holidays)\n\n # multiply given annual demand with timeseries\n elec_demand = e_slp.get_profile(sectoral_consumption)\n\n # Add the slp for the industrial group\n ilp = profiles.IndustrialLoadProfile(e_slp.date_time_index,\n holidays=holidays)\n\n # Beginning and end of workday, weekdays and weekend days, and scaling\n # factors by default\n elec_demand['i0'] = ilp.simple_profile(\n sectoral_consumption['i0'],\n am=datetime.time(config_data['demandlib']['day_start'].hour,\n config_data['demandlib']['day_start'].minute, 0),\n pm=datetime.time(config_data['demandlib']['day_end'].hour,\n config_data['demandlib']['day_end'].minute, 0),\n profile_factors=\n {'week': {'day': config_data['demandlib']['week_day'],\n 'night': config_data['demandlib']['week_night']},\n 'weekend': {'day': config_data['demandlib']['weekend_day'],\n 'night': config_data['demandlib']['weekend_night']}})\n\n # Resample 15-minute values to hourly values and sum across sectors\n elec_demand = elec_demand.resample('H').mean()\n\n return elec_demand\n\n if data_source == 'oedb':\n load = _import_load_timeseries_from_oedb(config_data, mv_grid_id)\n elif data_source == 'demandlib':\n load = _load_timeseries_demandlib(config_data, year)\n load.rename(columns={'g0': 'retail', 'h0': 'residential',\n 'l0': 'agricultural', 'i0': 'industrial'},\n inplace=True)\n return load"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nformulating and solves linear problem to find linear relation between curtailment and node voltage. Parameters ------------ feedin : :pandas:`pandas.DataFrame<dataframe>` See `feedin` parameter in :func:`edisgo.flex_opt.curtailment.voltage_based` for more information. voltage_pu : :pandas:`pandas.DataFrame<dataframe> Dataframe containing voltages in p.u. at the generator nodes. Index of the dataframe is a :pandas:`pandas.DatetimeIndex<datetimeindex>`, columns are the generator representatives. total_curtailment : :pandas:`pandas.Series<series>` Series containing the specific curtailment in kW to be allocated to the generators. The index is a :pandas:`pandas.DatetimeIndex<datetimeindex>`. voltage_threshold : :pandas:`pandas.Series<series>` Series containing the voltage thresholds in p.u. below which no generator curtailment will occur. The index is a :pandas:`pandas.DatetimeIndex<datetimeindex>`. solver : :obj:`str` The solver used to optimize the linear problem. Default: 'cbc'. Returns ------- :pandas:`pandas:DataFrame<dataframe>` Dataframe containing the curtailment in kW per generator and time step feed-in was provided for in `feedin` parameter. Index is a :pandas:`pandas.DatetimeIndex<datetimeindex>`, columns are the generator representatives.", "response": "def _optimize_voltage_based_curtailment(feedin, voltage_pu, total_curtailment,\n voltage_threshold, timeindex, solver):\n \"\"\"\n Formulates and solves linear problem to find linear relation between\n curtailment and node voltage.\n\n Parameters\n ------------\n feedin : :pandas:`pandas.DataFrame<dataframe>`\n See `feedin` parameter in\n :func:`edisgo.flex_opt.curtailment.voltage_based` for more information.\n voltage_pu : :pandas:`pandas.DataFrame<dataframe>\n Dataframe containing voltages in p.u. at the generator nodes. Index\n of the dataframe is a :pandas:`pandas.DatetimeIndex<datetimeindex>`,\n columns are the generator representatives.\n total_curtailment : :pandas:`pandas.Series<series>`\n Series containing the specific curtailment in kW to be allocated to the\n generators. The index is a\n :pandas:`pandas.DatetimeIndex<datetimeindex>`.\n voltage_threshold : :pandas:`pandas.Series<series>`\n Series containing the voltage thresholds in p.u. below which no\n generator curtailment will occur. The index is a\n :pandas:`pandas.DatetimeIndex<datetimeindex>`.\n solver : :obj:`str`\n The solver used to optimize the linear problem. Default: 'cbc'.\n\n Returns\n -------\n :pandas:`pandas:DataFrame<dataframe>`\n Dataframe containing the curtailment in kW per generator and time step\n feed-in was provided for in `feedin` parameter. Index is a\n :pandas:`pandas.DatetimeIndex<datetimeindex>`, columns are the\n generator representatives.\n\n \"\"\"\n\n logging.debug(\"Start curtailment optimization.\")\n \n v_max = voltage_pu.max(axis=1)\n generators = feedin.columns\n\n # additional curtailment factors\n cf_add = pd.DataFrame(index=timeindex)\n for gen in generators:\n cf_add[gen] = abs(\n (voltage_pu.loc[timeindex, gen] - v_max[timeindex]) / (\n voltage_threshold[timeindex] - v_max[timeindex]))\n\n # curtailment factors\n cf = pd.DataFrame(index=timeindex)\n for gen in generators:\n cf[gen] = abs(\n (voltage_pu.loc[timeindex, gen] - voltage_threshold[timeindex]) / (\n v_max[timeindex] - voltage_threshold[timeindex]))\n\n # initialize model\n model = ConcreteModel()\n\n # add sets\n model.T = Set(initialize=timeindex)\n model.G = Set(initialize=generators)\n\n # add parameters\n def feedin_init(model, t, g):\n return feedin.loc[t, g]\n\n model.feedin = Param(model.T, model.G, initialize=feedin_init)\n\n def voltage_pu_init(model, t, g):\n return voltage_pu.loc[t, g]\n\n model.voltage_pu = Param(model.T, model.G, initialize=voltage_pu_init)\n\n def cf_add_init(model, t, g):\n return cf_add.loc[t, g]\n\n model.cf_add = Param(model.T, model.G, initialize=cf_add_init)\n\n def cf_init(model, t, g):\n return cf.loc[t, g]\n\n model.cf = Param(model.T, model.G, initialize=cf_init)\n\n def total_curtailment_init(model, t):\n return total_curtailment.loc[t]\n\n model.total_curtailment = Param(model.T, initialize=total_curtailment_init)\n\n # add variables\n model.offset = Var(model.T, bounds=(0, 1))\n model.cf_max = Var(model.T, bounds=(0, 1))\n\n def curtailment_init(model, t, g):\n return (0, feedin.loc[t, g])\n\n model.c = Var(model.T, model.G, bounds=curtailment_init)\n\n # add objective\n def obj_rule(model):\n expr = (sum(model.offset[t] * 100\n for t in model.T))\n return expr\n\n model.obj = Objective(rule=obj_rule, sense=minimize)\n\n # add constraints\n # curtailment per generator constraints\n def curtail(model, t, g):\n return (\n model.cf[t, g] * model.cf_max[t] * model.feedin[t, g] + model.cf_add[\n t, g] * model.offset[t] * model.feedin[t, g] - model.c[t, g] == 0)\n\n model.curtailment = Constraint(model.T, model.G, rule=curtail)\n\n # total curtailment constraint\n def total_curtailment(model, t):\n return (\n sum(model.c[t, g] for g in model.G) == model.total_curtailment[t])\n\n model.sum_curtailment = Constraint(model.T, rule=total_curtailment)\n\n # solve\n solver = SolverFactory(solver)\n results = solver.solve(model, tee=False)\n\n # load results back into model\n model.solutions.load_from(results)\n\n return pd.DataFrame({g: [model.c[t, g].value for t in model.T]\n for g in model.G}, index=model.T.value)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nimplementing curtailment methodology 'feedin-proportional'. The curtailment that has to be met in each time step is allocated equally to all generators depending on their share of total feed-in in that time step. Parameters ---------- feedin : :pandas:`pandas.DataFrame<dataframe>` Dataframe holding the feed-in of each generator in kW for the technology (and weather cell) specified in `curtailment_key` parameter. Index of the dataframe is a :pandas:`pandas.DatetimeIndex<datetimeindex>`. Columns are the representatives of the fluctuating generators. generators : :pandas:`pandas.DataFrame<dataframe>` Dataframe with all generators of the type (and in weather cell) specified in `curtailment_key` parameter. See return value of :func:`edisgo.grid.tools.get_gen_info` for more information. curtailment_timeseries : :pandas:`pandas.Series<series>` The curtailment in kW to be distributed amongst the generators in `generators` parameter. Index of the series is a :pandas:`pandas.DatetimeIndex<datetimeindex>`. edisgo : :class:`edisgo.grid.network.EDisGo` curtailment_key::obj:`str` or :obj:`tuple` with :obj:`str` The technology and weather cell ID if :obj:`tuple` or only the technology if :obj:`str` the curtailment is specified for.", "response": "def feedin_proportional(feedin, generators, curtailment_timeseries, edisgo,\n curtailment_key, **kwargs):\n \"\"\"\n Implements curtailment methodology 'feedin-proportional'.\n\n The curtailment that has to be met in each time step is allocated\n equally to all generators depending on their share of total\n feed-in in that time step.\n\n Parameters\n ----------\n feedin : :pandas:`pandas.DataFrame<dataframe>`\n Dataframe holding the feed-in of each generator in kW for the\n technology (and weather cell) specified in `curtailment_key` parameter.\n Index of the dataframe is a\n :pandas:`pandas.DatetimeIndex<datetimeindex>`. Columns are the\n representatives of the fluctuating generators.\n generators : :pandas:`pandas.DataFrame<dataframe>`\n Dataframe with all generators of the type (and in weather cell)\n specified in `curtailment_key` parameter. See return value of\n :func:`edisgo.grid.tools.get_gen_info` for more information.\n curtailment_timeseries : :pandas:`pandas.Series<series>`\n The curtailment in kW to be distributed amongst the generators in\n `generators` parameter. Index of the series is a\n :pandas:`pandas.DatetimeIndex<datetimeindex>`.\n edisgo : :class:`edisgo.grid.network.EDisGo`\n curtailment_key::obj:`str` or :obj:`tuple` with :obj:`str`\n The technology and weather cell ID if :obj:`tuple` or only\n the technology if :obj:`str` the curtailment is specified for.\n\n \"\"\"\n # calculate curtailment in each time step of each generator\n curtailment = feedin.divide(feedin.sum(axis=1), axis=0). \\\n multiply(curtailment_timeseries, axis=0)\n\n # substitute NaNs from division with 0 by 0\n curtailment.fillna(0, inplace=True)\n\n # check if curtailment target was met\n _check_curtailment_target(curtailment, curtailment_timeseries,\n curtailment_key)\n\n # assign curtailment to individual generators\n _assign_curtailment(curtailment, edisgo, generators, curtailment_key)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _check_curtailment_target(curtailment, curtailment_target,\n curtailment_key):\n \"\"\"\n Raises an error if curtailment target was not met in any time step.\n\n Parameters\n -----------\n curtailment : :pandas:`pandas:DataFrame<dataframe>`\n Dataframe containing the curtailment in kW per generator and time step.\n Index is a :pandas:`pandas.DatetimeIndex<datetimeindex>`, columns are\n the generator representatives.\n curtailment_target : :pandas:`pandas.Series<series>`\n The curtailment in kW that was to be distributed amongst the\n generators. Index of the series is a\n :pandas:`pandas.DatetimeIndex<datetimeindex>`.\n curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str`\n The technology and weather cell ID if :obj:`tuple` or only\n the technology if :obj:`str` the curtailment was specified for.\n\n \"\"\"\n if not (abs(curtailment.sum(axis=1) - curtailment_target) < 1e-1).all():\n message = 'Curtailment target not met for {}.'.format(curtailment_key)\n logging.error(message)\n raise TypeError(message)", "response": "Checks that the curtailment target is met in any time step."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _assign_curtailment(curtailment, edisgo, generators, curtailment_key):\n\n gen_object_list = []\n for gen in curtailment.columns:\n # get generator object from representative\n gen_object = generators.loc[generators.gen_repr == gen].index[0]\n # assign curtailment to individual generators\n gen_object.curtailment = curtailment.loc[:, gen]\n gen_object_list.append(gen_object)\n\n # set timeseries.curtailment\n if edisgo.network.timeseries._curtailment:\n edisgo.network.timeseries._curtailment.extend(gen_object_list)\n edisgo.network.results._curtailment[curtailment_key] = \\\n gen_object_list\n else:\n edisgo.network.timeseries._curtailment = gen_object_list\n # list needs to be copied, otherwise it will be extended every time\n # a new key is added to results._curtailment\n edisgo.network.results._curtailment = \\\n {curtailment_key: gen_object_list.copy()}", "response": "Private function to assign curtailment to individual generators and time series objects."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nextending distribution substation overloading issues.", "response": "def extend_distribution_substation_overloading(network, critical_stations):\n \"\"\"\n Reinforce MV/LV substations due to overloading issues.\n\n In a first step a parallel transformer of the same kind is installed.\n If this is not sufficient as many standard transformers as needed are\n installed.\n\n Parameters\n ----------\n network : :class:`~.grid.network.Network`\n critical_stations : :pandas:`pandas.DataFrame<dataframe>`\n Dataframe containing over-loaded MV/LV stations, their apparent power\n at maximal over-loading and the corresponding time step.\n Index of the dataframe are the over-loaded stations of type\n :class:`~.grid.components.LVStation`. Columns are 's_pfa'\n containing the apparent power at maximal over-loading as float and\n 'time_index' containing the corresponding time step the over-loading\n occured in as :pandas:`pandas.Timestamp<timestamp>`. See\n :func:`~.flex_opt.check_tech_constraints.mv_lv_station_load` for more\n information.\n\n Returns\n -------\n dict\n Dictionary with lists of added and removed transformers.\n\n \"\"\"\n\n # get parameters for standard transformer\n try:\n standard_transformer = network.equipment_data['lv_trafos'].loc[\n network.config['grid_expansion_standard_equipment'][\n 'mv_lv_transformer']]\n except KeyError:\n print('Standard MV/LV transformer is not in equipment list.')\n\n transformers_changes = {'added': {}, 'removed': {}}\n for station in critical_stations.index:\n\n # list of maximum power of each transformer in the station\n s_max_per_trafo = [_.type.S_nom for _ in station.transformers]\n\n # maximum station load from power flow analysis\n s_station_pfa = critical_stations.s_pfa[station]\n\n # determine missing transformer power to solve overloading issue\n case = network.timeseries.timesteps_load_feedin_case.case[\n critical_stations.time_index[station]]\n load_factor = network.config['grid_expansion_load_factors'][\n 'lv_{}_transformer'.format(case)]\n s_trafo_missing = s_station_pfa - (sum(s_max_per_trafo) * load_factor)\n\n # check if second transformer of the same kind is sufficient\n # if true install second transformer, otherwise install as many\n # standard transformers as needed\n if max(s_max_per_trafo) >= s_trafo_missing:\n # if station has more than one transformer install a new\n # transformer of the same kind as the transformer that best\n # meets the missing power demand\n duplicated_transformer = min(\n [_ for _ in station.transformers\n if _.type.S_nom > s_trafo_missing],\n key=lambda j: j.type.S_nom - s_trafo_missing)\n\n new_transformer = Transformer(\n id='LVStation_{}_transformer_{}'.format(\n str(station.id), str(len(station.transformers) + 1)),\n geom=duplicated_transformer.geom,\n mv_grid=duplicated_transformer.mv_grid,\n grid=duplicated_transformer.grid,\n voltage_op=duplicated_transformer.voltage_op,\n type=copy.deepcopy(duplicated_transformer.type))\n\n # add transformer to station and return value\n station.add_transformer(new_transformer)\n transformers_changes['added'][station] = [new_transformer]\n\n else:\n # get any transformer to get attributes for new transformer from\n station_transformer = station.transformers[0]\n\n # calculate how many parallel standard transformers are needed\n number_transformers = math.ceil(\n s_station_pfa / standard_transformer.S_nom)\n\n # add transformer to station\n new_transformers = []\n for i in range(number_transformers):\n new_transformer = Transformer(\n id='LVStation_{}_transformer_{}'.format(\n str(station.id), str(i + 1)),\n geom=station_transformer.geom,\n mv_grid=station_transformer.mv_grid,\n grid=station_transformer.grid,\n voltage_op=station_transformer.voltage_op,\n type=copy.deepcopy(standard_transformer))\n new_transformers.append(new_transformer)\n transformers_changes['added'][station] = new_transformers\n transformers_changes['removed'][station] = station.transformers\n station.transformers = new_transformers\n return transformers_changes"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef extend_distribution_substation_overvoltage(network, critical_stations):\n\n # get parameters for standard transformer\n try:\n standard_transformer = network.equipment_data['lv_trafos'].loc[\n network.config['grid_expansion_standard_equipment'][\n 'mv_lv_transformer']]\n except KeyError:\n print('Standard MV/LV transformer is not in equipment list.')\n\n transformers_changes = {'added': {}}\n for grid in critical_stations.keys():\n\n # get any transformer to get attributes for new transformer from\n station_transformer = grid.station.transformers[0]\n\n new_transformer = Transformer(\n id='LVStation_{}_transformer_{}'.format(\n str(grid.station.id), str(len(grid.station.transformers) + 1)),\n geom=station_transformer.geom,\n mv_grid=station_transformer.mv_grid,\n grid=station_transformer.grid,\n voltage_op=station_transformer.voltage_op,\n type=copy.deepcopy(standard_transformer))\n\n # add standard transformer to station and return value\n grid.station.add_transformer(new_transformer)\n transformers_changes['added'][grid.station] = [new_transformer]\n\n if transformers_changes['added']:\n logger.debug(\"==> {} LV station(s) has/have been reinforced \".format(\n str(len(transformers_changes['added']))) +\n \"due to overloading issues.\")\n\n return transformers_changes", "response": "Extend distribution substation with standard transformers that is installed for over - voltage issues."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef reinforce_branches_overvoltage(network, grid, crit_nodes):\n\n # load standard line data\n if isinstance(grid, LVGrid):\n try:\n standard_line = network.equipment_data['lv_cables'].loc[\n network.config['grid_expansion_standard_equipment']['lv_line']]\n except KeyError:\n print('Chosen standard LV line is not in equipment list.')\n else:\n try:\n standard_line = network.equipment_data['mv_cables'].loc[\n network.config['grid_expansion_standard_equipment']['mv_line']]\n except KeyError:\n print('Chosen standard MV line is not in equipment list.')\n\n # find first nodes of every main line as representatives\n rep_main_line = list(\n nx.predecessor(grid.graph, grid.station, cutoff=1).keys())\n # list containing all representatives of main lines that have already been\n # reinforced\n main_line_reinforced = []\n\n lines_changes = {}\n for node in crit_nodes.index:\n path = nx.shortest_path(grid.graph, grid.station, node)\n # raise exception if voltage issue occurs at station's secondary side\n # because voltage issues should have been solved during extension of\n # distribution substations due to overvoltage issues.\n if len(path) == 1:\n logging.error(\"Voltage issues at busbar in LV grid {} should have \"\n \"been solved in previous steps.\".format(grid))\n else:\n # check if representative of line is already in list\n # main_line_reinforced; if it is, the main line the critical node\n # is connected to has already been reinforced in this iteration\n # step\n if not path[1] in main_line_reinforced:\n\n main_line_reinforced.append(path[1])\n # get path length from station to critical node\n get_weight = lambda u, v, data: data['line'].length\n path_length = dijkstra_shortest_path_length(\n grid.graph, grid.station, get_weight, target=node)\n # find first node in path that exceeds 2/3 of the line length\n # from station to critical node farthest away from the station\n node_2_3 = next(j for j in path if\n path_length[j] >= path_length[node] * 2 / 3)\n\n # if LVGrid: check if node_2_3 is outside of a house\n # and if not find next BranchTee outside the house\n if isinstance(grid, LVGrid):\n if isinstance(node_2_3, BranchTee):\n if node_2_3.in_building:\n # ToDo more generic (new function)\n try:\n node_2_3 = path[path.index(node_2_3) - 1]\n except IndexError:\n print('BranchTee outside of building is not ' +\n 'in path.')\n elif (isinstance(node_2_3, Generator) or\n isinstance(node_2_3, Load)):\n pred_node = path[path.index(node_2_3) - 1]\n if isinstance(pred_node, BranchTee):\n if pred_node.in_building:\n # ToDo more generic (new function)\n try:\n node_2_3 = path[path.index(node_2_3) - 2]\n except IndexError:\n print('BranchTee outside of building is ' +\n 'not in path.')\n else:\n logging.error(\"Not implemented for {}.\".format(\n str(type(node_2_3))))\n # if MVGrid: check if node_2_3 is LV station and if not find\n # next LV station\n else:\n if not isinstance(node_2_3, LVStation):\n next_index = path.index(node_2_3) + 1\n try:\n # try to find LVStation behind node_2_3\n while not isinstance(node_2_3, LVStation):\n node_2_3 = path[next_index]\n next_index += 1\n except IndexError:\n # if no LVStation between node_2_3 and node with\n # voltage problem, connect node directly to\n # MVStation\n node_2_3 = node\n\n # if node_2_3 is a representative (meaning it is already\n # directly connected to the station), line cannot be\n # disconnected and must therefore be reinforced\n if node_2_3 in rep_main_line:\n crit_line = grid.graph.get_edge_data(\n grid.station, node_2_3)['line']\n\n # if critical line is already a standard line install one\n # more parallel line\n if crit_line.type.name == standard_line.name:\n crit_line.quantity += 1\n lines_changes[crit_line] = 1\n\n # if critical line is not yet a standard line replace old\n # line by a standard line\n else:\n # number of parallel standard lines could be calculated\n # following [2] p.103; for now number of parallel\n # standard lines is iterated\n crit_line.type = standard_line.copy()\n crit_line.quantity = 1\n crit_line.kind = 'cable'\n lines_changes[crit_line] = 1\n\n # if node_2_3 is not a representative, disconnect line\n else:\n # get line between node_2_3 and predecessor node (that is\n # closer to the station)\n pred_node = path[path.index(node_2_3) - 1]\n crit_line = grid.graph.get_edge_data(\n node_2_3, pred_node)['line']\n # add new edge between node_2_3 and station\n new_line_data = {'line': crit_line,\n 'type': 'line'}\n grid.graph.add_edge(grid.station, node_2_3, new_line_data)\n # remove old edge\n grid.graph.remove_edge(pred_node, node_2_3)\n # change line length and type\n crit_line.length = path_length[node_2_3]\n crit_line.type = standard_line.copy()\n crit_line.kind = 'cable'\n crit_line.quantity = 1\n lines_changes[crit_line] = 1\n # add node_2_3 to representatives list to not further\n # reinforce this part off the grid in this iteration step\n rep_main_line.append(node_2_3)\n main_line_reinforced.append(node_2_3)\n\n else:\n logger.debug(\n '==> Main line of node {} in grid {} '.format(\n repr(node), str(grid)) +\n 'has already been reinforced.')\n\n if main_line_reinforced:\n logger.debug('==> {} branche(s) was/were reinforced '.format(\n str(len(lines_changes))) + 'due to over-voltage issues.')\n\n return lines_changes", "response": "Reinforce MV and LV grids with over - voltage issues."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef reinforce_branches_overloading(network, crit_lines):\n\n # load standard line data\n try:\n standard_line_lv = network.equipment_data['lv_cables'].loc[\n network.config['grid_expansion_standard_equipment']['lv_line']]\n except KeyError:\n print('Chosen standard LV line is not in equipment list.')\n try:\n standard_line_mv = network.equipment_data['mv_cables'].loc[\n network.config['grid_expansion_standard_equipment']['mv_line']]\n except KeyError:\n print('Chosen standard MV line is not in equipment list.')\n\n lines_changes = {}\n for crit_line in crit_lines.index:\n rel_overload = crit_lines.max_rel_overload[crit_line]\n # check if line is in LV or MV and set standard line accordingly\n if isinstance(crit_line.grid, LVGrid):\n standard_line = standard_line_lv\n else:\n standard_line = standard_line_mv\n\n if crit_line.type.name == standard_line.name:\n # check how many parallel standard lines are needed\n number_parallel_lines = math.ceil(\n rel_overload * crit_line.quantity)\n lines_changes[crit_line] = (number_parallel_lines -\n crit_line.quantity)\n crit_line.quantity = number_parallel_lines\n else:\n # check if parallel line of the same kind is sufficient\n if (crit_line.quantity == 1 and rel_overload <= 2\n and crit_line.kind == 'cable'):\n crit_line.quantity = 2\n lines_changes[crit_line] = 1\n else:\n number_parallel_lines = math.ceil(\n crit_line.type['I_max_th'] * rel_overload /\n standard_line['I_max_th'])\n lines_changes[crit_line] = number_parallel_lines\n crit_line.type = standard_line.copy()\n crit_line.quantity = number_parallel_lines\n crit_line.kind = 'cable'\n\n if not crit_lines.empty:\n logger.debug('==> {} branche(s) was/were reinforced '.format(\n crit_lines.shape[0]) + 'due to over-loading issues.')\n\n return lines_changes", "response": "Reinforce MV or LV grid due to overloading."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nfunctions to create histogram for a given set of data.", "response": "def histogram(data, **kwargs):\n \"\"\"\n Function to create histogram, e.g. for voltages or currents.\n\n Parameters\n ----------\n data : :pandas:`pandas.DataFrame<dataframe>`\n Data to be plotted, e.g. voltage or current (`v_res` or `i_res` from\n :class:`edisgo.grid.network.Results`). Index of the dataframe must be\n a :pandas:`pandas.DatetimeIndex<datetimeindex>`.\n timeindex : :pandas:`pandas.Timestamp<timestamp>` or None, optional\n Specifies time step histogram is plotted for. If timeindex is None all\n time steps provided in dataframe are used. Default: None.\n directory : :obj:`str` or None, optional\n Path to directory the plot is saved to. Is created if it does not\n exist. Default: None.\n filename : :obj:`str` or None, optional\n Filename the plot is saved as. File format is specified by ending. If\n filename is None, the plot is shown. Default: None.\n color : :obj:`str` or None, optional\n Color used in plot. If None it defaults to blue. Default: None.\n alpha : :obj:`float`, optional\n Transparency of the plot. Must be a number between 0 and 1,\n where 0 is see through and 1 is opaque. Default: 1.\n title : :obj:`str` or None, optional\n Plot title. Default: None.\n x_label : :obj:`str`, optional\n Label for x-axis. Default: \"\".\n y_label : :obj:`str`, optional\n Label for y-axis. Default: \"\".\n normed : :obj:`bool`, optional\n Defines if histogram is normed. Default: False.\n x_limits : :obj:`tuple` or None, optional\n Tuple with x-axis limits. First entry is the minimum and second entry\n the maximum value. Default: None.\n y_limits : :obj:`tuple` or None, optional\n Tuple with y-axis limits. First entry is the minimum and second entry\n the maximum value. Default: None.\n fig_size : :obj:`str` or :obj:`tuple`, optional\n Size of the figure in inches or a string with the following options:\n * 'a4portrait'\n * 'a4landscape'\n * 'a5portrait'\n * 'a5landscape'\n\n Default: 'a5landscape'.\n binwidth : :obj:`float`\n Width of bins. Default: None.\n\n \"\"\"\n timeindex = kwargs.get('timeindex', None)\n directory = kwargs.get('directory', None)\n filename = kwargs.get('filename', None)\n title = kwargs.get('title', \"\")\n x_label = kwargs.get('x_label', \"\")\n y_label = kwargs.get('y_label', \"\")\n\n color = kwargs.get('color', None)\n alpha = kwargs.get('alpha', 1)\n normed = kwargs.get('normed', False)\n\n x_limits = kwargs.get('x_limits', None)\n y_limits = kwargs.get('y_limits', None)\n binwidth = kwargs.get('binwidth', None)\n\n fig_size = kwargs.get('fig_size', 'a5landscape')\n standard_sizes = {'a4portrait': (8.27, 11.69),\n 'a4landscape': (11.69, 8.27),\n 'a5portrait': (5.8, 8.3),\n 'a5landscape': (8.3, 5.8)}\n try:\n fig_size = standard_sizes[fig_size]\n except:\n fig_size = standard_sizes['a5landscape']\n\n if timeindex is not None:\n plot_data = data.loc[timeindex, :]\n else:\n plot_data = data.T.stack()\n\n if binwidth is not None:\n if x_limits is not None:\n lowerlimit = x_limits[0] - binwidth / 2\n upperlimit = x_limits[1] + binwidth / 2\n else:\n lowerlimit = plot_data.min() - binwidth / 2\n upperlimit = plot_data.max() + binwidth / 2\n bins = np.arange(lowerlimit, upperlimit, binwidth)\n else:\n bins = 10\n\n plt.figure(figsize=fig_size)\n ax = plot_data.hist(\n normed=normed, color=color, alpha=alpha, bins=bins, grid=True)\n plt.minorticks_on()\n\n if x_limits is not None:\n ax.set_xlim(x_limits[0], x_limits[1])\n if y_limits is not None:\n ax.set_ylim(y_limits[0], y_limits[1])\n if title is not None:\n plt.title(title)\n plt.xlabel(x_label)\n plt.ylabel(y_label)\n\n if filename is None:\n plt.show()\n else:\n if directory is not None:\n os.makedirs(directory, exist_ok=True)\n filename = os.path.join(directory, filename)\n plt.savefig(filename)\n plt.close()"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nadd basemap to a plot.", "response": "def add_basemap(ax, zoom=12):\n \"\"\"\n Adds map to a plot.\n\n \"\"\"\n url = ctx.sources.ST_TONER_LITE\n xmin, xmax, ymin, ymax = ax.axis()\n basemap, extent = ctx.bounds2img(xmin, ymin, xmax, ymax,\n zoom=zoom, url=url)\n ax.imshow(basemap, extent=extent, interpolation='bilinear')\n # restore original x/y limits\n ax.axis((xmin, xmax, ymin, ymax))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting MV grid district polygon from oedb for plotting.", "response": "def get_grid_district_polygon(config, subst_id=None, projection=4326):\n \"\"\"\n Get MV grid district polygon from oedb for plotting.\n\n \"\"\"\n\n # make DB session\n conn = connection(section=config['db_connection']['section'])\n Session = sessionmaker(bind=conn)\n session = Session()\n\n # get polygon from versioned schema\n if config['data_source']['oedb_data_source'] == 'versioned':\n\n version = config['versioned']['version']\n query = session.query(EgoDpMvGriddistrict.subst_id,\n EgoDpMvGriddistrict.geom)\n Regions = [(subst_id, shape.to_shape(geom)) for subst_id, geom in\n query.filter(EgoDpMvGriddistrict.version == version,\n EgoDpMvGriddistrict.subst_id == subst_id).all()\n ]\n\n # get polygon from model_draft\n else:\n query = session.query(EgoGridMvGriddistrict.subst_id,\n EgoGridMvGriddistrict.geom)\n Regions = [(subst_id, shape.to_shape(geom)) for subst_id, geom in\n query.filter(EgoGridMvGriddistrict.subst_id.in_(\n subst_id)).all()]\n\n crs = {'init': 'epsg:3035'}\n region = gpd.GeoDataFrame(\n Regions, columns=['subst_id', 'geometry'], crs=crs)\n region = region.to_crs(epsg=projection)\n\n return region"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef mv_grid_topology(pypsa_network, configs, timestep=None,\n line_color=None, node_color=None,\n line_load=None, grid_expansion_costs=None,\n filename=None, arrows=False,\n grid_district_geom=True, background_map=True,\n voltage=None, limits_cb_lines=None, limits_cb_nodes=None,\n xlim=None, ylim=None, lines_cmap='inferno_r',\n title='', scaling_factor_line_width=None):\n \"\"\"\n Plot line loading as color on lines.\n\n Displays line loading relative to nominal capacity.\n\n Parameters\n ----------\n pypsa_network : :pypsa:`pypsa.Network<network>`\n configs : :obj:`dict`\n Dictionary with used configurations from config files. See\n :class:`~.grid.network.Config` for more information.\n timestep : :pandas:`pandas.Timestamp<timestamp>`\n Time step to plot analysis results for. If `timestep` is None maximum\n line load and if given, maximum voltage deviation, is used. In that\n case arrows cannot be drawn. Default: None.\n line_color : :obj:`str`\n Defines whereby to choose line colors (and implicitly size). Possible\n options are:\n\n * 'loading'\n Line color is set according to loading of the line. Loading of MV\n lines must be provided by parameter `line_load`.\n * 'expansion_costs'\n Line color is set according to investment costs of the line. This\n option also effects node colors and sizes by plotting investment in\n stations and setting `node_color` to 'storage_integration' in order\n to plot storage size of integrated storages. Grid expansion costs\n must be provided by parameter `grid_expansion_costs`.\n * None (default)\n Lines are plotted in black. Is also the fallback option in case of\n wrong input.\n\n node_color : :obj:`str`\n Defines whereby to choose node colors (and implicitly size). Possible\n options are:\n\n * 'technology'\n Node color as well as size is set according to type of node\n (generator, MV station, etc.).\n * 'voltage'\n Node color is set according to voltage deviation from 1 p.u..\n Voltages of nodes in MV grid must be provided by parameter `voltage`.\n * 'storage_integration'\n Only storages are plotted. Size of node corresponds to size of\n storage.\n * None (default)\n Nodes are not plotted. Is also the fallback option in case of wrong\n input.\n\n line_load : :pandas:`pandas.DataFrame<dataframe>`\n Dataframe with current results from power flow analysis in A. Index of\n the dataframe is a :pandas:`pandas.DatetimeIndex<datetimeindex>`,\n columns are the line representatives.\n grid_expansion_costs : :pandas:`pandas.DataFrame<dataframe>`\n Dataframe with grid expansion costs in kEUR. See `grid_expansion_costs`\n in :class:`~.grid.network.Results` for more information.\n filename : :obj:`str`\n Filename to save plot under. If not provided, figure is shown directly.\n Default: None.\n arrows : :obj:`Boolean`\n If True draws arrows on lines in the direction of the power flow. Does\n only work when `line_color` option 'loading' is used and a time step\n is given.\n Default: False.\n limits_cb_lines : :obj:`tuple`\n Tuple with limits for colorbar of line color. First entry is the\n minimum and second entry the maximum value. Default: None.\n limits_cb_nodes : :obj:`tuple`\n Tuple with limits for colorbar of nodes. First entry is the\n minimum and second entry the maximum value. Default: None.\n xlim : :obj:`tuple`\n Limits of x-axis. Default: None.\n ylim : :obj:`tuple`\n Limits of y-axis. Default: None.\n lines_cmap : :obj:`str`\n Colormap to use for lines in case `line_color` is 'loading' or\n 'expansion_costs'. Default: 'inferno_r'.\n title : :obj:`str`\n Title of the plot. Default: ''.\n scaling_factor_line_width : :obj:`float` or None\n If provided line width is set according to the nominal apparent power\n of the lines. If line width is None a default line width of 2 is used\n for each line. Default: None.\n\n \"\"\"\n\n def get_color_and_size(name, colors_dict, sizes_dict):\n if 'BranchTee' in name:\n return colors_dict['BranchTee'], sizes_dict['BranchTee']\n elif 'LVStation' in name:\n return colors_dict['LVStation'], sizes_dict['LVStation']\n elif 'GeneratorFluctuating' in name:\n return (colors_dict['GeneratorFluctuating'],\n sizes_dict['GeneratorFluctuating'])\n elif 'Generator' in name:\n return colors_dict['Generator'], sizes_dict['Generator']\n elif 'DisconnectingPoint' in name:\n return (colors_dict['DisconnectingPoint'],\n sizes_dict['DisconnectingPoint'])\n elif 'MVStation' in name:\n return colors_dict['MVStation'], sizes_dict['MVStation']\n elif 'Storage' in name:\n return colors_dict['Storage'], sizes_dict['Storage']\n else:\n return colors_dict['else'], sizes_dict['else']\n\n def nodes_by_technology(buses):\n bus_sizes = {}\n bus_colors = {}\n colors_dict = {'BranchTee': 'b',\n 'GeneratorFluctuating': 'g',\n 'Generator': 'k',\n 'LVStation': 'c',\n 'MVStation': 'r',\n 'Storage': 'y',\n 'DisconnectingPoint': '0.75',\n 'else': 'orange'}\n sizes_dict = {'BranchTee': 10,\n 'GeneratorFluctuating': 100,\n 'Generator': 100,\n 'LVStation': 50,\n 'MVStation': 120,\n 'Storage': 100,\n 'DisconnectingPoint': 50,\n 'else': 200}\n for bus in buses:\n bus_colors[bus], bus_sizes[bus] = get_color_and_size(\n bus, colors_dict, sizes_dict)\n return bus_sizes, bus_colors\n\n def nodes_by_voltage(buses, voltage):\n bus_colors = {}\n bus_sizes = {}\n for bus in buses:\n if 'primary' in bus:\n bus_tmp = bus[12:]\n else:\n bus_tmp = bus[4:]\n if timestep is not None:\n bus_colors[bus] = 100 * abs(1 - voltage.loc[timestep,\n ('mv', bus_tmp)])\n else:\n bus_colors[bus] = 100 * max(abs(1 - voltage.loc[\n :, ('mv', bus_tmp)]))\n bus_sizes[bus] = 50\n return bus_sizes, bus_colors\n\n def nodes_storage_integration(buses):\n bus_sizes = {}\n for bus in buses:\n if not 'storage' in bus:\n bus_sizes[bus] = 0\n else:\n tmp = bus.split('_')\n storage_repr = '_'.join(tmp[1:])\n bus_sizes[bus] = pypsa_network.storage_units.loc[\n storage_repr, 'p_nom'] * 1000 / 3\n return bus_sizes\n\n def nodes_by_costs(buses, grid_expansion_costs):\n # sum costs for each station\n costs_lv_stations = grid_expansion_costs[\n grid_expansion_costs.index.str.contains(\"LVStation\")]\n costs_lv_stations['station'] = \\\n costs_lv_stations.reset_index()['index'].apply(\n lambda _: '_'.join(_.split('_')[0:2])).values\n costs_lv_stations = costs_lv_stations.groupby('station').sum()\n costs_mv_station = grid_expansion_costs[\n grid_expansion_costs.index.str.contains(\"MVStation\")]\n costs_mv_station['station'] = \\\n costs_mv_station.reset_index()['index'].apply(\n lambda _: '_'.join(_.split('_')[0:2])).values\n costs_mv_station = costs_mv_station.groupby('station').sum()\n\n bus_sizes = {}\n bus_colors = {}\n for bus in buses:\n if 'LVStation' in bus:\n try:\n tmp = bus.split('_')\n lv_st = '_'.join(tmp[2:])\n bus_colors[bus] = costs_lv_stations.loc[\n lv_st, 'total_costs']\n bus_sizes[bus] = 100\n except:\n bus_colors[bus] = 0\n bus_sizes[bus] = 0\n elif 'MVStation' in bus:\n try:\n tmp = bus.split('_')\n mv_st = '_'.join(tmp[2:])\n bus_colors[bus] = costs_mv_station.loc[\n mv_st, 'total_costs']\n bus_sizes[bus] = 100\n except:\n bus_colors[bus] = 0\n bus_sizes[bus] = 0\n else:\n bus_colors[bus] = 0\n bus_sizes[bus] = 0\n\n return bus_sizes, bus_colors\n\n # set font and font size\n font = {'family': 'serif',\n 'size': 15}\n matplotlib.rc('font', **font)\n\n # create pypsa network only containing MV buses and lines\n pypsa_plot = PyPSANetwork()\n pypsa_plot.buses = pypsa_network.buses.loc[pypsa_network.buses.v_nom >= 10]\n # filter buses of aggregated loads and generators\n pypsa_plot.buses = pypsa_plot.buses[\n ~pypsa_plot.buses.index.str.contains(\"agg\")]\n pypsa_plot.lines = pypsa_network.lines[\n pypsa_network.lines.bus0.isin(pypsa_plot.buses.index)][\n pypsa_network.lines.bus1.isin(pypsa_plot.buses.index)]\n\n # line colors\n if line_color == 'loading':\n # calculate relative line loading\n # get load factor\n residual_load = tools.get_residual_load_from_pypsa_network(\n pypsa_network)\n case = residual_load.apply(\n lambda _: 'feedin_case' if _ < 0 else 'load_case')\n if timestep is not None:\n timeindex = [timestep]\n else:\n timeindex = line_load.index\n load_factor = pd.DataFrame(\n data={'s_nom': [float(configs[\n 'grid_expansion_load_factors'][\n 'mv_{}_line'.format(case.loc[_])])\n for _ in timeindex]},\n index=timeindex)\n # get allowed line load\n s_allowed = load_factor.dot(\n pypsa_plot.lines.s_nom.to_frame().T * 1e3)\n # get line load from pf\n line_colors = line_load.loc[:, pypsa_plot.lines.index].divide(\n s_allowed).max()\n elif line_color == 'expansion_costs':\n node_color = 'expansion_costs'\n line_costs = pypsa_plot.lines.join(\n grid_expansion_costs, rsuffix='costs', how='left')\n line_colors = line_costs.total_costs.fillna(0)\n else:\n line_colors = pd.Series('black', index=pypsa_plot.lines.index)\n\n # bus colors and sizes\n if node_color == 'technology':\n bus_sizes, bus_colors = nodes_by_technology(pypsa_plot.buses.index)\n bus_cmap = None\n elif node_color == 'voltage':\n bus_sizes, bus_colors = nodes_by_voltage(\n pypsa_plot.buses.index, voltage)\n bus_cmap = plt.cm.Blues\n elif node_color == 'storage_integration':\n bus_sizes = nodes_storage_integration(pypsa_plot.buses.index)\n bus_colors = 'orangered'\n bus_cmap = None\n elif node_color == 'expansion_costs':\n bus_sizes, bus_colors = nodes_by_costs(pypsa_plot.buses.index,\n grid_expansion_costs)\n bus_cmap = None\n elif node_color is None:\n bus_sizes = 0\n bus_colors = 'r'\n bus_cmap = None\n else:\n logging.warning('Choice for `node_color` is not valid. Default is '\n 'used instead.')\n bus_sizes = 0\n bus_colors = 'r'\n bus_cmap = None\n\n # convert bus coordinates to Mercator\n if contextily and background_map:\n inProj = Proj(init='epsg:4326')\n outProj = Proj(init='epsg:3857')\n x2, y2 = transform(inProj, outProj,\n list(pypsa_plot.buses.loc[:, 'x']),\n list(pypsa_plot.buses.loc[:, 'y']))\n pypsa_plot.buses.loc[:, 'x'] = x2\n pypsa_plot.buses.loc[:, 'y'] = y2\n\n # plot\n plt.figure(figsize=(12, 8))\n ax = plt.gca()\n\n # plot grid district\n if grid_district_geom and geopandas:\n try:\n subst = pypsa_network.buses[\n pypsa_network.buses.index.str.contains(\"MVStation\")].index[0]\n subst_id = subst.split('_')[-1]\n projection = 3857 if contextily and background_map else 4326\n region = get_grid_district_polygon(configs, subst_id=subst_id,\n projection=projection)\n region.plot(ax=ax, color='white', alpha=0.2,\n edgecolor='red', linewidth=2)\n except Exception as e:\n logging.warning(\"Grid district geometry could not be plotted due \"\n \"to the following error: {}\".format(e))\n\n # if scaling factor is given s_nom is plotted as line width\n if scaling_factor_line_width is not None:\n line_width = pypsa_plot.lines.s_nom * scaling_factor_line_width\n else:\n line_width = 2\n cmap = plt.cm.get_cmap(lines_cmap)\n ll = pypsa_plot.plot(line_colors=line_colors, line_cmap=cmap, ax=ax,\n title=title, line_widths=line_width,\n branch_components=['Line'], basemap=True,\n bus_sizes=bus_sizes, bus_colors=bus_colors,\n bus_cmap=bus_cmap)\n\n # color bar line loading\n if line_color == 'loading':\n if limits_cb_lines is None:\n limits_cb_lines = (min(line_colors), max(line_colors))\n v = np.linspace(limits_cb_lines[0], limits_cb_lines[1], 101)\n cb = plt.colorbar(ll[1], boundaries=v, ticks=v[0:101:10])\n cb.set_clim(vmin=limits_cb_lines[0], vmax=limits_cb_lines[1])\n cb.set_label('Line loading in p.u.')\n # color bar grid expansion costs\n elif line_color == 'expansion_costs':\n if limits_cb_lines is None:\n limits_cb_lines = (min(min(line_colors), min(bus_colors.values())),\n max(max(line_colors), max(bus_colors.values())))\n v = np.linspace(limits_cb_lines[0], limits_cb_lines[1], 101)\n cb = plt.colorbar(ll[1], boundaries=v, ticks=v[0:101:10])\n cb.set_clim(vmin=limits_cb_lines[0], vmax=limits_cb_lines[1])\n cb.set_label('Grid expansion costs in kEUR')\n\n # color bar voltage\n if node_color == 'voltage':\n if limits_cb_nodes is None:\n limits_cb_nodes = (min(bus_colors.values()),\n max(bus_colors.values()))\n v_voltage = np.linspace(limits_cb_nodes[0], limits_cb_nodes[1], 101)\n cb_voltage = plt.colorbar(ll[0], boundaries=v_voltage,\n ticks=v_voltage[0:101:10])\n cb_voltage.set_clim(vmin=limits_cb_nodes[0],\n vmax=limits_cb_nodes[1])\n cb_voltage.set_label('Voltage deviation in %')\n\n # storages\n if node_color == 'expansion_costs':\n ax.scatter(\n pypsa_plot.buses.loc[\n pypsa_network.storage_units.loc[:, 'bus'], 'x'],\n pypsa_plot.buses.loc[\n pypsa_network.storage_units.loc[:, 'bus'], 'y'],\n c='orangered',\n s=pypsa_network.storage_units.loc[:, 'p_nom'] * 1000 / 3)\n # add legend for storage size and line capacity\n if (node_color == 'storage_integration' or\n node_color == 'expansion_costs') and \\\n pypsa_network.storage_units.loc[:, 'p_nom'].any() > 0:\n scatter_handle = plt.scatter(\n [], [], c='orangered', s=100, label='= 300 kW battery storage')\n else:\n scatter_handle = None\n if scaling_factor_line_width is not None:\n line_handle = plt.plot(\n [], [], c='black', linewidth=scaling_factor_line_width*10,\n label='= 10 MVA')\n else:\n line_handle = None\n if scatter_handle and line_handle:\n plt.legend(handles=[scatter_handle, line_handle[0]], labelspacing=1,\n title='Storage size and line capacity', borderpad=0.5,\n loc=2, framealpha=0.5, fontsize='medium')\n elif scatter_handle:\n plt.legend(handles=[scatter_handle], labelspacing=1,\n title='Storage size', borderpad=0.5,\n loc=2, framealpha=0.5, fontsize='medium')\n elif line_handle:\n plt.legend(handles=[line_handle[0]], labelspacing=1,\n title='Line capacity', borderpad=0.5,\n loc=2, framealpha=0.5, fontsize='medium')\n\n # axes limits\n if xlim is not None:\n ax.set_xlim(xlim[0], xlim[1])\n if ylim is not None:\n ax.set_ylim(ylim[0], ylim[1])\n\n # hide axes labels\n ax.get_xaxis().set_visible(False)\n ax.get_yaxis().set_visible(False)\n\n # draw arrows on lines\n if arrows and timestep and line_color=='loading':\n path = ll[1].get_segments()\n colors = cmap(ll[1].get_array() / 100)\n for i in range(len(path)):\n if pypsa_network.lines_t.p0.loc[timestep,\n line_colors.index[i]] > 0:\n arrowprops = dict(arrowstyle=\"->\", color='b')#colors[i])\n else:\n arrowprops = dict(arrowstyle=\"<-\", color='b')#colors[i])\n ax.annotate(\n \"\",\n xy=abs(\n (path[i][0] - path[i][1]) * 0.51 - path[i][0]),\n xytext=abs(\n (path[i][0] - path[i][1]) * 0.49 - path[i][0]),\n arrowprops=arrowprops,\n size=10)\n\n # plot map data in background\n if contextily and background_map:\n try:\n add_basemap(ax, zoom=12)\n except Exception as e:\n logging.warning(\"Background map could not be plotted due to the \"\n \"following error: {}\".format(e))\n\n if filename is None:\n plt.show()\n else:\n plt.savefig(filename)\n plt.close()", "response": "Plots MV grid topology for given network and configurations."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef to_pypsa(network, mode, timesteps):\n\n # check if timesteps is array-like, otherwise convert to list (necessary\n # to obtain a dataframe when using .loc in time series functions)\n if not hasattr(timesteps, \"__len__\"):\n timesteps = [timesteps]\n\n # get topology and time series data\n if mode is None:\n mv_components = mv_to_pypsa(network)\n lv_components = lv_to_pypsa(network)\n components = combine_mv_and_lv(mv_components, lv_components)\n\n if list(components['Load'].index.values):\n timeseries_load_p_set = _pypsa_load_timeseries(\n network, mode=mode, timesteps=timesteps)\n\n if len(list(components['Generator'].index.values)) > 1:\n timeseries_gen_p_min, timeseries_gen_p_max = \\\n _pypsa_generator_timeseries(\n network, mode=mode, timesteps=timesteps)\n timeseries_storage_p_min, timeseries_storage_p_max = \\\n _pypsa_storage_timeseries(\n network, mode=mode, timesteps=timesteps)\n\n if list(components['Bus'].index.values):\n timeseries_bus_v_set = _pypsa_bus_timeseries(\n network, components['Bus'].index.tolist(), timesteps=timesteps)\n else:\n raise ValueError(\"Provide proper mode or leave it empty to export \"\n \"entire grid topology.\")\n\n # check topology\n _check_topology(components)\n\n # create power flow problem\n pypsa_network = PyPSANetwork()\n pypsa_network.edisgo_mode = mode\n pypsa_network.set_snapshots(timesteps)\n\n # import grid topology to PyPSA network\n # buses are created first to avoid warnings\n pypsa_network.import_components_from_dataframe(components['Bus'], 'Bus')\n\n for k, comps in components.items():\n if k is not 'Bus' and not comps.empty:\n pypsa_network.import_components_from_dataframe(comps, k)\n\n # import time series to PyPSA network\n if len(list(components['Generator'].index.values)) > 1:\n import_series_from_dataframe(pypsa_network, timeseries_gen_p_min,\n 'Generator', 'p_min_pu')\n import_series_from_dataframe(pypsa_network, timeseries_gen_p_max,\n 'Generator', 'p_max_pu')\n import_series_from_dataframe(pypsa_network, timeseries_storage_p_min,\n 'Generator', 'p_min_pu')\n import_series_from_dataframe(pypsa_network, timeseries_storage_p_max,\n 'Generator', 'p_max_pu')\n\n if list(components['Load'].index.values):\n import_series_from_dataframe(pypsa_network, timeseries_load_p_set,\n 'Load', 'p_set')\n\n if list(components['Bus'].index.values):\n import_series_from_dataframe(pypsa_network, timeseries_bus_v_set,\n 'Bus', 'v_mag_pu_set')\n\n _check_integrity_of_pypsa(pypsa_network)\n\n return pypsa_network", "response": "Translate graph based grid representation to PyPSA representation."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef lv_to_pypsa(network):\n\n generators = []\n loads = []\n branch_tees = []\n lines = []\n lv_stations = []\n storages = []\n\n for lv_grid in network.mv_grid.lv_grids:\n generators.extend(lv_grid.graph.nodes_by_attribute('generator'))\n loads.extend(lv_grid.graph.nodes_by_attribute('load'))\n branch_tees.extend(lv_grid.graph.nodes_by_attribute('branch_tee'))\n lines.extend(lv_grid.graph.lines())\n lv_stations.extend(lv_grid.graph.nodes_by_attribute('lv_station'))\n storages.extend(lv_grid.graph.nodes_by_attribute('storage'))\n\n omega = 2 * pi * 50\n\n generator = {'name': [],\n 'bus': [],\n 'control': [],\n 'p_nom': [],\n 'type': [],\n 'p_nom_extendable': [],\n 'p_nom_min': [],\n 'p_nom_max': [],\n 'capital_cost': []\n }\n\n bus = {'name': [], 'v_nom': [], 'x': [], 'y': []}\n\n load = {'name': [], 'bus': []}\n\n line = {'name': [],\n 'bus0': [],\n 'bus1': [],\n 'type': [],\n 'x': [],\n 'r': [],\n 's_nom': [],\n 's_nom_min': [],\n 's_max_pu': [],\n 's_nom_extendable': [],\n 'capital_cost': [],\n 'length': []}\n\n storage = {\n 'name': [],\n 'bus': [],\n 'p_nom': [],\n 'p_nom_extendable': [],\n 'p_nom_min': [],\n 'p_nom_max': [],\n 'capital_cost': [],\n 'max_hours': []}\n\n # create dictionary representing generators and associated buses\n for gen in generators:\n bus_name = '_'.join(['Bus', repr(gen)])\n generator['name'].append(repr(gen))\n generator['bus'].append(bus_name)\n generator['control'].append('PQ')\n generator['p_nom'].append(gen.nominal_capacity / 1e3)\n generator['type'].append('_'.join([gen.type, gen.subtype]))\n generator['p_nom_extendable'].append(False)\n generator['p_nom_min'].append(0) # 0.3\n generator['p_nom_max'].append(0)\n generator['capital_cost'].append(0)\n\n bus['name'].append(bus_name)\n bus['v_nom'].append(gen.grid.voltage_nom)\n bus['x'].append(None)\n bus['y'].append(None)\n\n # create dictionary representing branch tees\n for bt in branch_tees:\n bus['name'].append('_'.join(['Bus', repr(bt)]))\n bus['v_nom'].append(bt.grid.voltage_nom)\n bus['x'].append(None)\n bus['y'].append(None)\n\n # create dataframes representing loads and associated buses\n for lo in loads:\n bus_name = '_'.join(['Bus', repr(lo)])\n load['name'].append(repr(lo))\n load['bus'].append(bus_name)\n\n bus['name'].append(bus_name)\n bus['v_nom'].append(lo.grid.voltage_nom)\n bus['x'].append(None)\n bus['y'].append(None)\n\n # create dataframe for lines\n for l in lines:\n line['name'].append(repr(l['line']))\n\n if l['adj_nodes'][0] in lv_stations:\n line['bus0'].append(\n '_'.join(['Bus', l['adj_nodes'][0].__repr__(side='lv')]))\n else:\n line['bus0'].append('_'.join(['Bus', repr(l['adj_nodes'][0])]))\n\n if l['adj_nodes'][1] in lv_stations:\n line['bus1'].append(\n '_'.join(['Bus', l['adj_nodes'][1].__repr__(side='lv')]))\n else:\n line['bus1'].append('_'.join(['Bus', repr(l['adj_nodes'][1])]))\n\n line['type'].append(\"\")\n line['x'].append(\n l['line'].type['L'] * omega / 1e3 * l['line'].length)\n line['r'].append(l['line'].type['R'] * l['line'].length)\n s_nom = sqrt(3) * l['line'].type['I_max_th'] * \\\n l['line'].type['U_n'] / 1e3\n line['s_nom'].append(s_nom)\n line['s_nom_min'].append(s_nom)\n line['s_max_pu'].append(0.6)\n line['s_nom_extendable'].append(True)\n line['capital_cost'].append(100)\n line['length'].append(l['line'].length)\n\n lv_components = {\n 'Generator': pd.DataFrame(generator).set_index('name'),\n 'Bus': pd.DataFrame(bus).set_index('name'),\n 'Load': pd.DataFrame(load).set_index('name'),\n 'Line': pd.DataFrame(line).set_index('name'),\n 'StorageUnit': pd.DataFrame(storage).set_index('name')}\n\n return lv_components", "response": "Convert LV grid topology to PyPSA representation."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a pandas. DataFrame of load time series for the given timesteps and mode.", "response": "def _pypsa_load_timeseries(network, timesteps, mode=None):\n \"\"\"\n Time series in PyPSA compatible format for load instances\n\n Parameters\n ----------\n network : Network\n The eDisGo grid topology model overall container\n timesteps : array_like\n Timesteps is an array-like object with entries of type\n :pandas:`pandas.Timestamp<timestamp>` specifying which time steps\n to export to pypsa representation and use in power flow analysis.\n mode : str, optional\n Specifically retrieve load time series for MV or LV grid level or both.\n Either choose 'mv' or 'lv'.\n Defaults to None, which returns both timeseries for MV and LV in a\n single DataFrame.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n Time series table in PyPSA format\n \"\"\"\n mv_load_timeseries_p = []\n lv_load_timeseries_p = []\n\n # add MV grid loads\n if mode is 'mv' or mode is None:\n for load in network.mv_grid.graph.nodes_by_attribute('load'):\n mv_load_timeseries_p.append(load.pypsa_timeseries('p').rename(\n repr(load)).to_frame().loc[timesteps])\n\n # add LV grid's loads\n if mode is 'lv' or mode is None:\n for lv_grid in network.mv_grid.lv_grids:\n for load in lv_grid.graph.nodes_by_attribute('load'):\n for sector in list(load.consumption.keys()):\n # for sector in list(list(load.consumption.keys())[0]):\n # ToDo: remove consideration of only industrial sector\n # now, if a load object has consumption in multiple sectors\n # (like currently only industrial/retail) the consumption is\n # implicitly assigned to the industrial sector when being\n # exported to pypsa.\n # ToDo: resolve this in the importer\n if sector != 'retail':\n # lv_load_timeseries_q.append(\n # load.pypsa_timeseries('q').rename(\n # repr(load)).to_frame().loc[timesteps])\n lv_load_timeseries_p.append(\n load.pypsa_timeseries('p').rename(\n repr(load)).to_frame().loc[timesteps])\n\n load_df_p = pd.concat(mv_load_timeseries_p + lv_load_timeseries_p, axis=1)\n\n return load_df_p"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a pandas. DataFrame of generator time series for MV and LV grid level.", "response": "def _pypsa_generator_timeseries(network, timesteps, mode=None):\n \"\"\"Timeseries in PyPSA compatible format for generator instances\n\n Parameters\n ----------\n network : Network\n The eDisGo grid topology model overall container\n timesteps : array_like\n Timesteps is an array-like object with entries of type\n :pandas:`pandas.Timestamp<timestamp>` specifying which time steps\n to export to pypsa representation and use in power flow analysis.\n mode : str, optional\n Specifically retrieve generator time series for MV or LV grid level or\n both. Either choose 'mv' or 'lv'.\n Defaults to None, which returns both timeseries for MV and LV in a\n single DataFrame.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n Time series table in PyPSA format\n \"\"\"\n\n mv_gen_timeseries_p_min = []\n mv_gen_timeseries_p_max = []\n lv_gen_timeseries_p_min = []\n lv_gen_timeseries_p_max = []\n\n # MV generator timeseries\n if mode is 'mv' or mode is None:\n for gen in network.mv_grid.graph.nodes_by_attribute('generator') + \\\n network.mv_grid.graph.nodes_by_attribute('generator_aggr'):\n mv_gen_timeseries_p_min.append(gen.pypsa_timeseries('p').rename(\n repr(gen)).to_frame().loc[timesteps] / gen.nominal_capacity)\n mv_gen_timeseries_p_max.append(gen.pypsa_timeseries('p').rename(\n repr(gen)).to_frame().loc[timesteps] / gen.nominal_capacity)\n\n # LV generator timeseries\n if mode is 'lv' or mode is None:\n for lv_grid in network.mv_grid.lv_grids:\n for gen in lv_grid.graph.nodes_by_attribute('generator'):\n lv_gen_timeseries_p_min.append(gen.pypsa_timeseries('p').rename(\n repr(gen)).to_frame().loc[timesteps] / gen.nominal_capacity)\n lv_gen_timeseries_p_max.append(gen.pypsa_timeseries('p').rename(\n repr(gen)).to_frame().loc[timesteps] / gen.nominal_capacity)\n\n # Slack time series\n lv_gen_timeseries_p_min.append(\n pd.Series([-1] * len(timesteps), index=timesteps).rename(\n \"Generator_slack\").to_frame())\n lv_gen_timeseries_p_max.append(\n pd.Series([1] * len(timesteps), index=timesteps).rename(\n \"Generator_slack\").to_frame())\n\n gen_df_p_max = pd.concat(\n mv_gen_timeseries_p_max + lv_gen_timeseries_p_max, axis=1)\n gen_df_p_min = pd.concat(\n mv_gen_timeseries_p_min + lv_gen_timeseries_p_min, axis=1)\n\n return gen_df_p_min, gen_df_p_max"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _pypsa_storage_timeseries(network, timesteps, mode=None):\n\n mv_storage_timeseries_p_min = []\n mv_storage_timeseries_p_max = []\n\n # MV storage time series\n if mode is 'mv' or mode is None:\n for storage in network.mv_grid.graph.nodes_by_attribute('storage'):\n mv_storage_timeseries_p_min.append(\n storage.timeseries.p.rename(repr(\n storage)).to_frame().loc[timesteps])\n mv_storage_timeseries_p_max.append(\n storage.timeseries.p.rename(repr(\n storage)).to_frame().loc[timesteps])\n\n storage_df_p_min = pd.concat(\n mv_storage_timeseries_p_min, axis=1)\n storage_df_p_max = pd.concat(\n mv_storage_timeseries_p_max, axis=1)\n\n return storage_df_p_min, storage_df_p_max", "response": "Returns a pandas. DataFrame of time series for storage instances in pypsa compatible format."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning the actual time series used in power flow analysis.", "response": "def timeseries(self):\n \"\"\"\n Load time series\n\n It returns the actual time series used in power flow analysis. If\n :attr:`_timeseries` is not :obj:`None`, it is returned. Otherwise,\n :meth:`timeseries()` looks for time series of the according sector in\n :class:`~.grid.network.TimeSeries` object.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n DataFrame containing active power in kW in column 'p' and\n reactive power in kVA in column 'q'.\n\n \"\"\"\n if self._timeseries is None:\n\n if isinstance(self.grid, MVGrid):\n voltage_level = 'mv'\n elif isinstance(self.grid, LVGrid):\n voltage_level = 'lv'\n\n ts_total = None\n for sector in self.consumption.keys():\n consumption = self.consumption[sector]\n\n # check if load time series for MV and LV are differentiated\n try:\n ts = self.grid.network.timeseries.load[\n sector, voltage_level].to_frame('p')\n except KeyError:\n try:\n ts = self.grid.network.timeseries.load[\n sector].to_frame('p')\n except KeyError:\n logger.exception(\n \"No timeseries for load of type {} \"\n \"given.\".format(sector))\n raise\n ts = ts * consumption\n ts_q = self.timeseries_reactive\n if ts_q is not None:\n ts['q'] = ts_q.q\n else:\n ts['q'] = ts['p'] * self.q_sign * tan(\n acos(self.power_factor))\n\n if ts_total is None:\n ts_total = ts\n else:\n ts_total.p += ts.p\n ts_total.q += ts.q\n\n return ts_total\n else:\n return self._timeseries"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef timeseries_reactive(self):\n if self._timeseries_reactive is None:\n # if normalized reactive power time series are given, they are\n # scaled by the annual consumption; if none are given reactive\n # power time series are calculated timeseries getter using a given\n # power factor\n if self.grid.network.timeseries.load_reactive_power is not None:\n self.power_factor = 'not_applicable'\n self.reactive_power_mode = 'not_applicable'\n ts_total = None\n for sector in self.consumption.keys():\n consumption = self.consumption[sector]\n\n try:\n ts = self.grid.network.timeseries.load_reactive_power[\n sector].to_frame('q')\n except KeyError:\n logger.exception(\n \"No timeseries for load of type {} \"\n \"given.\".format(sector))\n raise\n ts = ts * consumption\n if ts_total is None:\n ts_total = ts\n else:\n ts_total.q += ts.q\n return ts_total\n else:\n return None\n\n else:\n return self._timeseries_reactive", "response": "Returns a pandas. Series containing the time series of the Reactive power in kvar."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef peak_load(self):\n peak_load = pd.Series(self.consumption).mul(pd.Series(\n self.grid.network.config['peakload_consumption_ratio']).astype(\n float), fill_value=0)\n\n return peak_load", "response": "Get sectoral peak load of the current user"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns the power factor of load", "response": "def power_factor(self):\n \"\"\"\n Power factor of load\n\n Parameters\n -----------\n power_factor : :obj:`float`\n Ratio of real power to apparent power.\n\n Returns\n --------\n :obj:`float`\n Ratio of real power to apparent power. If power factor is not set\n it is retrieved from the network config object depending on the\n grid level the load is in.\n\n \"\"\"\n if self._power_factor is None:\n if isinstance(self.grid, MVGrid):\n self._power_factor = self.grid.network.config[\n 'reactive_power_factor']['mv_load']\n elif isinstance(self.grid, LVGrid):\n self._power_factor = self.grid.network.config[\n 'reactive_power_factor']['lv_load']\n return self._power_factor"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the reactive power mode of load.", "response": "def reactive_power_mode(self):\n \"\"\"\n Power factor mode of Load.\n\n This information is necessary to make the load behave in an inductive\n or capacitive manner. Essentially this changes the sign of the reactive\n power.\n\n The convention used here in a load is that:\n - when `reactive_power_mode` is 'inductive' then Q is positive\n - when `reactive_power_mode` is 'capacitive' then Q is negative\n\n Parameters\n ----------\n reactive_power_mode : :obj:`str` or None\n Possible options are 'inductive', 'capacitive' and\n 'not_applicable'. In the case of 'not_applicable' a reactive\n power time series must be given.\n\n Returns\n -------\n :obj:`str`\n In the case that this attribute is not set, it is retrieved from\n the network config object depending on the voltage level the load\n is in.\n\n \"\"\"\n if self._reactive_power_mode is None:\n if isinstance(self.grid, MVGrid):\n self._reactive_power_mode = self.grid.network.config[\n 'reactive_power_mode']['mv_load']\n elif isinstance(self.grid, LVGrid):\n self._reactive_power_mode = self.grid.network.config[\n 'reactive_power_mode']['lv_load']\n\n return self._reactive_power_mode"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef timeseries(self):\n if self._timeseries is None:\n # calculate time series for active and reactive power\n try:\n timeseries = \\\n self.grid.network.timeseries.generation_dispatchable[\n self.type].to_frame('p')\n except KeyError:\n try:\n timeseries = \\\n self.grid.network.timeseries.generation_dispatchable[\n 'other'].to_frame('p')\n except KeyError:\n logger.exception(\"No time series for type {} \"\n \"given.\".format(self.type))\n raise\n\n timeseries = timeseries * self.nominal_capacity\n if self.timeseries_reactive is not None:\n timeseries['q'] = self.timeseries_reactive\n else:\n timeseries['q'] = timeseries['p'] * self.q_sign * tan(acos(\n self.power_factor))\n\n return timeseries\n else:\n return self._timeseries.loc[\n self.grid.network.timeseries.timeindex, :]", "response": "Returns the actual dispatch time series used in power flow analysis."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef timeseries_reactive(self):\n if self._timeseries_reactive is None:\n if self.grid.network.timeseries.generation_reactive_power \\\n is not None:\n try:\n timeseries = \\\n self.grid.network.timeseries.generation_reactive_power[\n self.type].to_frame('q')\n except (KeyError, TypeError):\n try:\n timeseries = \\\n self.grid.network.timeseries.generation_reactive_power[\n 'other'].to_frame('q')\n except:\n logger.warning(\n \"No reactive power time series for type {} given. \"\n \"Reactive power time series will be calculated from \"\n \"assumptions in config files and active power \"\n \"timeseries.\".format(self.type))\n return None\n self.power_factor = 'not_applicable'\n self.reactive_power_mode = 'not_applicable'\n return timeseries * self.nominal_capacity\n else:\n return None\n else:\n return self._timeseries_reactive.loc[\n self.grid.network.timeseries.timeindex, :]", "response": "Returns the time series in kvar that contains the reactive power."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the actual time series used in power flow analysis.", "response": "def timeseries(self):\n \"\"\"\n Feed-in time series of generator\n\n It returns the actual time series used in power flow analysis. If\n :attr:`_timeseries` is not :obj:`None`, it is returned. Otherwise,\n :meth:`timeseries` looks for generation and curtailment time series\n of the according type of technology (and weather cell) in\n :class:`~.grid.network.TimeSeries`.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n DataFrame containing active power in kW in column 'p' and\n reactive power in kVA in column 'q'.\n\n \"\"\"\n if self._timeseries is None:\n\n # get time series for active power depending on if they are\n # differentiated by weather cell ID or not\n if isinstance(self.grid.network.timeseries.generation_fluctuating.\n columns, pd.MultiIndex):\n if self.weather_cell_id:\n try:\n timeseries = self.grid.network.timeseries.\\\n generation_fluctuating[\n self.type, self.weather_cell_id].to_frame('p')\n except KeyError:\n logger.exception(\"No time series for type {} and \"\n \"weather cell ID {} given.\".format(\n self.type, self.weather_cell_id))\n raise\n else:\n logger.exception(\"No weather cell ID provided for \"\n \"fluctuating generator {}.\".format(\n repr(self)))\n raise KeyError\n else:\n try:\n timeseries = self.grid.network.timeseries.\\\n generation_fluctuating[self.type].to_frame('p')\n except KeyError:\n logger.exception(\"No time series for type {} \"\n \"given.\".format(self.type))\n raise\n\n timeseries = timeseries * self.nominal_capacity\n\n # subtract curtailment\n if self.curtailment is not None:\n timeseries = timeseries.join(\n self.curtailment.to_frame('curtailment'), how='left')\n timeseries.p = timeseries.p - timeseries.curtailment.fillna(0)\n\n if self.timeseries_reactive is not None:\n timeseries['q'] = self.timeseries_reactive\n else:\n timeseries['q'] = timeseries['p'] * self.q_sign * tan(acos(\n self.power_factor))\n\n return timeseries\n else:\n return self._timeseries.loc[\n self.grid.network.timeseries.timeindex, :]"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning a pandas. DataFrame containing the time series for the reactive power of the given generator.", "response": "def timeseries_reactive(self):\n \"\"\"\n Reactive power time series in kvar.\n\n Parameters\n -------\n :pandas:`pandas.Series<series>`\n Series containing reactive power time series in kvar.\n\n Returns\n ----------\n :pandas:`pandas.DataFrame<dataframe>` or None\n Series containing reactive power time series in kvar. If it is not\n set it is tried to be retrieved from `generation_reactive_power`\n attribute of global TimeSeries object. If that is not possible\n None is returned.\n\n \"\"\"\n\n if self._timeseries_reactive is None:\n # try to get time series for reactive power depending on if they\n # are differentiated by weather cell ID or not\n # raise warning if no time series for generator type (and weather\n # cell ID) can be retrieved\n if self.grid.network.timeseries.generation_reactive_power \\\n is not None:\n if isinstance(\n self.grid.network.timeseries.generation_reactive_power.\n columns, pd.MultiIndex):\n if self.weather_cell_id:\n try:\n timeseries = self.grid.network.timeseries. \\\n generation_reactive_power[\n self.type, self.weather_cell_id].to_frame('q')\n return timeseries * self.nominal_capacity\n except (KeyError, TypeError):\n logger.warning(\"No time series for type {} and \"\n \"weather cell ID {} given. \"\n \"Reactive power time series will \"\n \"be calculated from assumptions \"\n \"in config files and active power \"\n \"timeseries.\".format(\n self.type, self.weather_cell_id))\n return None\n else:\n raise ValueError(\n \"No weather cell ID provided for fluctuating \"\n \"generator {}, but reactive power is given as a \"\n \"MultiIndex suggesting that it is differentiated \"\n \"by weather cell ID.\".format(repr(self)))\n else:\n try:\n timeseries = self.grid.network.timeseries. \\\n generation_reactive_power[self.type].to_frame('q')\n return timeseries * self.nominal_capacity\n except (KeyError, TypeError):\n logger.warning(\"No reactive power time series for \"\n \"type {} given. Reactive power time \"\n \"series will be calculated from \"\n \"assumptions in config files and \"\n \"active power timeseries.\".format(\n self.type))\n return None\n else:\n return None\n else:\n return self._timeseries_reactive.loc[\n self.grid.network.timeseries.timeindex, :]"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning the curtailment time series for the given technology type and weather cell ID.", "response": "def curtailment(self):\n \"\"\"\n Parameters\n ----------\n curtailment_ts : :pandas:`pandas.Series<series>`\n See class definition for details.\n\n Returns\n -------\n :pandas:`pandas.Series<series>`\n If self._curtailment is set it returns that. Otherwise, if\n curtailment in :class:`~.grid.network.TimeSeries` for the\n corresponding technology type (and if given, weather cell ID)\n is set this is returned.\n\n \"\"\"\n if self._curtailment is not None:\n return self._curtailment\n elif isinstance(self.grid.network.timeseries._curtailment,\n pd.DataFrame):\n if isinstance(self.grid.network.timeseries.curtailment.\n columns, pd.MultiIndex):\n if self.weather_cell_id:\n try:\n return self.grid.network.timeseries.curtailment[\n self.type, self.weather_cell_id]\n except KeyError:\n logger.exception(\"No curtailment time series for type \"\n \"{} and weather cell ID {} \"\n \"given.\".format(self.type,\n self.weather_cell_id))\n raise\n else:\n logger.exception(\"No weather cell ID provided for \"\n \"fluctuating generator {}.\".format(\n repr(self)))\n raise KeyError\n else:\n try:\n return self.grid.network.timeseries.curtailment[self.type]\n except KeyError:\n logger.exception(\"No curtailment time series for type \"\n \"{} given.\".format(self.type))\n raise\n else:\n return None"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns a pandas. DataFrame containing the time series of storage operation.", "response": "def timeseries(self):\n \"\"\"\n Time series of storage operation\n\n Parameters\n ----------\n ts : :pandas:`pandas.DataFrame<dataframe>`\n DataFrame containing active power the storage is charged (negative)\n and discharged (positive) with (on the grid side) in kW in column \n 'p' and reactive power in kvar in column 'q'. When 'q' is positive,\n reactive power is supplied (behaving as a capacitor) and when 'q'\n is negative reactive power is consumed (behaving as an inductor).\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n See parameter `timeseries`.\n\n \"\"\"\n # check if time series for reactive power is given, otherwise\n # calculate it\n if 'q' in self._timeseries.columns:\n return self._timeseries\n else:\n self._timeseries['q'] = abs(self._timeseries.p) * self.q_sign * \\\n tan(acos(self.power_factor))\n return self._timeseries.loc[\n self.grid.network.timeseries.timeindex, :]"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets the sign reactive power based on the _reactive_power_mode attribute.", "response": "def q_sign(self):\n \"\"\"\n Get the sign reactive power based on the\n :attr: `_reactive_power_mode`\n\n Returns\n -------\n :obj: `int` : +1 or -1\n \"\"\"\n if self.reactive_power_mode.lower() == 'inductive':\n return -1\n elif self.reactive_power_mode.lower() == 'capacitive':\n return 1\n else:\n raise ValueError(\"Unknown value {} in reactive_power_mode\".format(\n self.reactive_power_mode))"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef open(self):\n if self._state != 'open':\n if self._line is not None:\n self._state = 'open'\n self._nodes = self.grid.graph.nodes_from_line(self._line)\n self.grid.graph.remove_edge(\n self._nodes[0], self._nodes[1])\n else:\n raise ValueError('``line`` is not set')", "response": "Toggle state to open switch disconnector"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ntoggling state to closed switch disconnector", "response": "def close(self):\n \"\"\"Toggle state to closed switch disconnector\"\"\"\n self._state = 'closed'\n self.grid.graph.add_edge(\n self._nodes[0], self._nodes[1], {'line': self._line})"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nprovides : shallowely LineString object<linestrings > geometry of", "response": "def geom(self):\n \"\"\"Provide :shapely:`Shapely LineString object<linestrings>` geometry of\n :class:`Line`\"\"\"\n adj_nodes = self._grid._graph.nodes_from_line(self)\n\n return LineString([adj_nodes[0].geom, adj_nodes[1].geom])"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef wrap_context(func):\n @functools.wraps(func)\n def wrapper(self, request, context):\n return func(self, request, LogFieldsContext(context))\n\n return wrapper", "response": "Wraps the provided servicer method by passing a wrapped context"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef add_log_fields(self, fields: Dict[str, Any]):\n self._log_fields.add_fields(fields)", "response": "Add the provided log fields to the log_fields attribute of this object."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\npacks the log fields and the invocation metadata into a new metadata list", "response": "def pack_metadata(self) -> List[Tuple[str, Any]]:\n \"\"\"Packs the log fields and the invocation metadata into a new metadata\n\n The log fields are added in the new metadata with the key\n `LOG_FIELDS_KEY_META`.\n\n \"\"\"\n metadata = [(k, v) for k, v in self._invocation_metadata.items()\n if k != LOG_FIELDS_KEY_META]\n metadata.append((LOG_FIELDS_KEY_META, self._log_fields.dumps()))\n return metadata"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef from_metadata(cls, metadata: Dict[str, Any]) -> 'LogFields':\n return cls(fields=json.loads(metadata.get(LOG_FIELDS_KEY_META, '{}')))", "response": "Initialize the log fields from the provided metadata dictionary."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nadd the provided log fields to the log.", "response": "def add_fields(self, fields):\n \"\"\"Add the provided log fields\n\n If a key is already present, then it is ignored.\n\n :param fields: the log fields to add\n\n \"\"\"\n for k, v in fields.items():\n if k not in self._fields:\n self._fields[k] = v"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncalculate the grid expansion costs for each reinforced transformer and line in kEUR.", "response": "def grid_expansion_costs(network, without_generator_import=False):\n \"\"\"\n Calculates grid expansion costs for each reinforced transformer and line\n in kEUR.\n\n Attributes\n ----------\n network : :class:`~.grid.network.Network`\n without_generator_import : Boolean\n If True excludes lines that were added in the generator import to\n connect new generators to the grid from calculation of grid expansion\n costs. Default: False.\n\n Returns\n -------\n `pandas.DataFrame<dataframe>`\n DataFrame containing type and costs plus in the case of lines the\n line length and number of parallel lines of each reinforced\n transformer and line. Index of the DataFrame is the respective object\n that can either be a :class:`~.grid.components.Line` or a\n :class:`~.grid.components.Transformer`. Columns are the following:\n\n type: String\n Transformer size or cable name\n\n total_costs: float\n Costs of equipment in kEUR. For lines the line length and number of\n parallel lines is already included in the total costs.\n\n quantity: int\n For transformers quantity is always one, for lines it specifies the\n number of parallel lines.\n\n line_length: float\n Length of line or in case of parallel lines all lines in km.\n\n voltage_level : :obj:`str` {'lv' | 'mv' | 'mv/lv'}\n Specifies voltage level the equipment is in.\n\n mv_feeder : :class:`~.grid.components.Line`\n First line segment of half-ring used to identify in which\n feeder the grid expansion was conducted in.\n\n Notes\n -------\n Total grid expansion costs can be obtained through\n self.grid_expansion_costs.total_costs.sum().\n\n \"\"\"\n def _get_transformer_costs(transformer):\n if isinstance(transformer.grid, LVGrid):\n return float(network.config['costs_transformers']['lv'])\n elif isinstance(transformer.grid, MVGrid):\n return float(network.config['costs_transformers']['mv'])\n\n def _get_line_costs(line, quantity):\n # get voltage level\n if isinstance(line.grid, LVGrid):\n voltage_level = 'lv'\n elif isinstance(line.grid, MVGrid):\n voltage_level = 'mv'\n else:\n raise KeyError(\"Grid must be LVGrid or MVGrid.\")\n # get population density in people/km^2\n # transform area to calculate area in km^2\n projection = partial(\n pyproj.transform,\n pyproj.Proj(init='epsg:{}'.format(\n int(network.config['geo']['srid']))),\n pyproj.Proj(init='epsg:3035'))\n sqm2sqkm = 1e6\n population_density = (line.grid.grid_district['population'] /\n (transform(projection,\n line.grid.grid_district['geom']).area /\n sqm2sqkm))\n if population_density <= 500:\n population_density = 'rural'\n else:\n population_density = 'urban'\n # get costs from config\n costs_cable = float(network.config['costs_cables']['{}_cable'.format(\n voltage_level)])\n costs_cable_earthwork = float(network.config['costs_cables'][\n '{}_cable_incl_earthwork_{}'.format(\n voltage_level,\n population_density)])\n return (costs_cable_earthwork * l.length +\n costs_cable * l.length * (quantity - 1))\n\n costs = pd.DataFrame()\n\n if without_generator_import:\n equipment_changes = network.results.equipment_changes.loc[\n network.results.equipment_changes.iteration_step > 0]\n else:\n equipment_changes = network.results.equipment_changes\n\n # costs for transformers\n if not equipment_changes.empty:\n transformers = equipment_changes[equipment_changes['equipment'].apply(\n isinstance, args=(Transformer,))]\n added_transformers = transformers[transformers['change'] == 'added']\n removed_transformers = transformers[\n transformers['change'] == 'removed']\n # check if any of the added transformers were later removed\n added_removed_transformers = added_transformers.loc[\n added_transformers['equipment'].isin(\n removed_transformers['equipment'])]\n added_transformers = added_transformers[\n ~added_transformers['equipment'].isin(\n added_removed_transformers.equipment)]\n # calculate costs for each transformer\n for t in added_transformers['equipment']:\n costs = costs.append(pd.DataFrame(\n {'type': t.type.name,\n 'total_costs': _get_transformer_costs(t),\n 'quantity': 1,\n 'voltage_level': 'mv/lv',\n 'mv_feeder': t.grid.station.mv_feeder if isinstance(\n t.grid, LVGrid) else None},\n index=[t]))\n\n # costs for lines\n # get changed lines\n lines = equipment_changes.loc[equipment_changes.index[\n equipment_changes.reset_index()['index'].apply(\n isinstance, args=(Line,))]]\n # calculate costs for each reinforced line\n for l in list(lines.index.unique()):\n # check if line connects aggregated units\n aggr_lines = []\n aggr_lines_generator = l.grid.graph.lines_by_attribute('line_aggr')\n for aggr_line in aggr_lines_generator:\n aggr_lines.append(repr(aggr_line['line']))\n if not repr(l) in aggr_lines:\n number_lines_added = equipment_changes[\n (equipment_changes.index == l) &\n (equipment_changes.equipment ==\n l.type.name)]['quantity'].sum()\n costs = costs.append(pd.DataFrame(\n {'type': l.type.name,\n 'total_costs': _get_line_costs(l, number_lines_added),\n 'length': l.length * number_lines_added,\n 'quantity': number_lines_added,\n 'voltage_level': ('lv' if isinstance(l.grid, LVGrid)\n else 'mv'),\n 'mv_feeder': get_mv_feeder_from_line(l)},\n index=[l]))\n\n # if no costs incurred write zero costs to DataFrame\n if costs.empty:\n costs = costs.append(pd.DataFrame(\n {'type': ['N/A'],\n 'total_costs': [0],\n 'length': [0],\n 'quantity': [0],\n 'voltage_level': '',\n 'mv_feeder': ''\n },\n index=['No reinforced equipment.']))\n\n return costs"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef plot_mv_grid_topology(self, technologies=False, **kwargs):\n if self.network.pypsa is None:\n try:\n timesteps = self.network.timeseries.timeindex\n self.network.pypsa = pypsa_io.to_pypsa(\n self.network, mode=None, timesteps=timesteps)\n except:\n logging.warning(\n \"pypsa representation of MV grid needed to plot MV \"\n \"grid topology.\")\n\n if self.network.pypsa is not None:\n plots.mv_grid_topology(\n self.network.pypsa, self.network.config,\n node_color='technology' if technologies is True else None,\n filename=kwargs.get('filename', None),\n grid_district_geom=kwargs.get('grid_district_geom', True),\n background_map=kwargs.get('background_map', True),\n xlim=kwargs.get('xlim', None), ylim=kwargs.get('ylim', None),\n title=kwargs.get('title', ''))", "response": "Plots a MV grid topology."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nplotting voltages in MV grid on grid topology plot.", "response": "def plot_mv_voltages(self, **kwargs):\n \"\"\"\n Plots voltages in MV grid on grid topology plot.\n\n For more information see :func:`edisgo.tools.plots.mv_grid_topology`.\n\n \"\"\"\n if self.network.pypsa is not None:\n try:\n v_res = self.network.results.v_res()\n except:\n logging.warning(\"Voltages `pfa_v_mag_pu` from power flow \"\n \"analysis must be available to plot them.\")\n return\n plots.mv_grid_topology(\n self.network.pypsa, self.network.config,\n timestep=kwargs.get('timestep', None),\n node_color='voltage',\n filename=kwargs.get('filename', None),\n grid_district_geom=kwargs.get('grid_district_geom', True),\n background_map=kwargs.get('background_map', True),\n voltage=v_res,\n limits_cb_nodes=kwargs.get('limits_cb_nodes', None),\n xlim=kwargs.get('xlim', None), ylim=kwargs.get('ylim', None),\n title=kwargs.get('title', ''))\n else:\n logging.warning(\"pypsa representation of MV grid needed to \"\n \"plot voltages.\")"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nplot relative line loading of MV lines.", "response": "def plot_mv_line_loading(self, **kwargs):\n \"\"\"\n Plots relative line loading (current from power flow analysis to\n allowed current) of MV lines.\n\n For more information see :func:`edisgo.tools.plots.mv_grid_topology`.\n\n \"\"\"\n if self.network.pypsa is not None and \\\n self.network.results.i_res is not None:\n plots.mv_grid_topology(\n self.network.pypsa, self.network.config,\n timestep=kwargs.get('timestep', None),\n line_color='loading',\n node_color=kwargs.get('node_color', None),\n line_load=self.network.results.s_res(),\n filename=kwargs.get('filename', None),\n arrows=kwargs.get('arrows', None),\n grid_district_geom=kwargs.get('grid_district_geom', True),\n background_map=kwargs.get('background_map', True),\n voltage=self.network.results.v_res(),\n limits_cb_lines=kwargs.get('limits_cb_lines', None),\n limits_cb_nodes=kwargs.get('limits_cb_nodes', None),\n xlim=kwargs.get('xlim', None), ylim=kwargs.get('ylim', None),\n lines_cmap=kwargs.get('lines_cmap', 'inferno_r'),\n title=kwargs.get('title', ''),\n scaling_factor_line_width=kwargs.get(\n 'scaling_factor_line_width', None))\n else:\n if self.network.pypsa is None:\n logging.warning(\"pypsa representation of MV grid needed to \"\n \"plot line loading.\")\n if self.network.results.i_res is None:\n logging.warning(\"Currents `i_res` from power flow analysis \"\n \"must be available to plot line loading.\")"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef plot_mv_grid_expansion_costs(self, **kwargs):\n if self.network.pypsa is not None and \\\n self.network.results.grid_expansion_costs is not None:\n if isinstance(self, EDisGo):\n # convert index of grid expansion costs to str\n grid_expansion_costs = \\\n self.network.results.grid_expansion_costs.reset_index()\n grid_expansion_costs['index'] = \\\n grid_expansion_costs['index'].apply(lambda _: repr(_))\n grid_expansion_costs.set_index('index', inplace=True)\n else:\n grid_expansion_costs = \\\n self.network.results.grid_expansion_costs\n\n plots.mv_grid_topology(\n self.network.pypsa, self.network.config,\n line_color='expansion_costs',\n grid_expansion_costs=grid_expansion_costs,\n filename=kwargs.get('filename', None),\n grid_district_geom=kwargs.get('grid_district_geom', True),\n background_map=kwargs.get('background_map', True),\n limits_cb_lines=kwargs.get('limits_cb_lines', None),\n xlim=kwargs.get('xlim', None), ylim=kwargs.get('ylim', None),\n lines_cmap=kwargs.get('lines_cmap', 'inferno_r'),\n title=kwargs.get('title', ''),\n scaling_factor_line_width=kwargs.get(\n 'scaling_factor_line_width', None)\n )\n else:\n if self.network.pypsa is None:\n logging.warning(\"pypsa representation of MV grid needed to \"\n \"plot grid expansion costs.\")\n if self.network.results.grid_expansion_costs is None:\n logging.warning(\"Grid expansion cost results needed to plot \"\n \"them.\")", "response": "Plots the expansion costs per MV line."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef plot_mv_storage_integration(self, **kwargs):\n if self.network.pypsa is not None:\n plots.mv_grid_topology(\n self.network.pypsa, self.network.config,\n node_color='storage_integration',\n filename=kwargs.get('filename', None),\n grid_district_geom=kwargs.get('grid_district_geom', True),\n background_map=kwargs.get('background_map', True),\n xlim=kwargs.get('xlim', None), ylim=kwargs.get('ylim', None),\n title=kwargs.get('title', ''))\n else:\n if self.network.pypsa is None:\n logging.warning(\"pypsa representation of MV grid needed to \"\n \"plot storage integration in MV grid.\")", "response": "Plots storage integration in MV grid."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef histogram_voltage(self, timestep=None, title=True, **kwargs):\n data = self.network.results.v_res()\n if title is True:\n if timestep is not None:\n title = \"Voltage histogram for time step {}\".format(timestep)\n else:\n title = \"Voltage histogram \\nfor time steps {} to {}\".format(\n data.index[0], data.index[-1])\n elif title is False:\n title = None\n plots.histogram(data=data, title=title, timeindex=timestep, **kwargs)", "response": "Plots histogram of voltage for a specific time step."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef histogram_relative_line_load(self, timestep=None, title=True,\n voltage_level='mv_lv', **kwargs):\n \"\"\"\n Plots histogram of relative line loads.\n\n For more information see :func:`edisgo.tools.plots.histogram`.\n\n Parameters\n ----------\n Parameters\n ----------\n timestep : :pandas:`pandas.Timestamp<timestamp>` or None, optional\n Specifies time step histogram is plotted for. If timestep is None\n all time steps voltages are calculated for are used. Default: None.\n title : :obj:`str` or :obj:`bool`, optional\n Title for plot. If True title is auto generated. If False plot has\n no title. If :obj:`str`, the provided title is used. Default: True.\n voltage_level : :obj:`str`\n Specifies which voltage level to plot voltage histogram for.\n Possible options are 'mv', 'lv' and 'mv_lv'. 'mv_lv' is also the\n fallback option in case of wrong input. Default: 'mv_lv'\n\n \"\"\"\n residual_load = tools.get_residual_load_from_pypsa_network(\n self.network.pypsa)\n case = residual_load.apply(\n lambda _: 'feedin_case' if _ < 0 else 'load_case')\n if timestep is not None:\n timeindex = [timestep]\n else:\n timeindex = self.network.results.s_res().index\n load_factor = pd.DataFrame(\n data={'s_nom': [float(self.network.config[\n 'grid_expansion_load_factors'][\n 'mv_{}_line'.format(case.loc[_])])\n for _ in timeindex]},\n index=timeindex)\n if voltage_level == 'mv':\n lines = self.network.pypsa.lines.loc[\n self.network.pypsa.lines.v_nom > 1]\n elif voltage_level == 'lv':\n lines = self.network.pypsa.lines.loc[\n self.network.pypsa.lines.v_nom < 1]\n else:\n lines = self.network.pypsa.lines\n s_res = self.network.results.s_res().loc[\n timeindex, lines.index]\n # get allowed line load\n s_allowed = load_factor.dot(\n self.network.pypsa.lines.s_nom.to_frame().T * 1e3)\n # get line load from pf\n data = s_res.divide(s_allowed)\n\n if title is True:\n if timestep is not None:\n title = \"Relative line load histogram for time step {}\".format(\n timestep)\n else:\n title = \"Relative line load histogram \\nfor time steps \" \\\n \"{} to {}\".format(data.index[0], data.index[-1])\n elif title is False:\n title = None\n plots.histogram(data=data, title=title, **kwargs)", "response": "Plots a histogram of relative line loads."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef curtail(self, methodology, curtailment_timeseries, **kwargs):\n CurtailmentControl(edisgo=self, methodology=methodology,\n curtailment_timeseries=curtailment_timeseries,\n **kwargs)", "response": "Sets up curtailment time series."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef import_from_ding0(self, file, **kwargs):\n import_from_ding0(file=file, network=self.network)", "response": "Import grid data from DINGO file"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef import_generators(self, generator_scenario=None):\n if generator_scenario:\n self.network.generator_scenario = generator_scenario\n data_source = 'oedb'\n import_generators(network=self.network, data_source=data_source)", "response": "Import generators from oedb."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nanalyzing the grid by power flow analysis.", "response": "def analyze(self, mode=None, timesteps=None):\n \"\"\"Analyzes the grid by power flow analysis\n\n Analyze the grid for violations of hosting capacity. Means, perform a\n power flow analysis and obtain voltages at nodes (load, generator,\n stations/transformers and branch tees) and active/reactive power at\n lines.\n\n The power flow analysis can currently only be performed for both grid\n levels MV and LV. See ToDos section for more information.\n\n A static `non-linear power flow analysis is performed using PyPSA\n <https://www.pypsa.org/doc/power_flow.html#full-non-linear-power-flow>`_.\n The high-voltage to medium-voltage transformer are not included in the\n analysis. The slack bus is defined at secondary side of these\n transformers assuming an ideal tap changer. Hence, potential\n overloading of the transformers is not studied here.\n\n Parameters\n ----------\n mode : str\n Allows to toggle between power flow analysis (PFA) on the whole\n grid topology (MV + LV), only MV or only LV. Defaults to None which\n equals power flow analysis for MV + LV which is the only\n implemented option at the moment. See ToDos section for\n more information.\n timesteps : :pandas:`pandas.DatetimeIndex<datetimeindex>` or :pandas:`pandas.Timestamp<timestamp>`\n Timesteps specifies for which time steps to conduct the power flow\n analysis. It defaults to None in which case the time steps in\n timeseries.timeindex (see :class:`~.grid.network.TimeSeries`) are\n used.\n\n Notes\n -----\n The current implementation always translates the grid topology\n representation to the PyPSA format and stores it to\n :attr:`self.network.pypsa`.\n\n ToDos\n ------\n The option to export only the edisgo MV grid (mode = 'mv') to conduct\n a power flow analysis is implemented in\n :func:`~.tools.pypsa_io.to_pypsa` but NotImplementedError is raised\n since the rest of edisgo does not handle this option yet. The analyze\n function will throw an error since\n :func:`~.tools.pypsa_io.process_pfa_results`\n does not handle aggregated loads and generators in the LV grids. Also,\n grid reinforcement, pypsa update of time series, and probably other\n functionalities do not work when only the MV grid is analysed.\n\n Further ToDos are:\n * explain how power plants are modeled, if possible use a link\n * explain where to find and adjust power flow analysis defining\n parameters\n\n See Also\n --------\n :func:`~.tools.pypsa_io.to_pypsa`\n Translator to PyPSA data format\n\n \"\"\"\n if timesteps is None:\n timesteps = self.network.timeseries.timeindex\n # check if timesteps is array-like, otherwise convert to list\n if not hasattr(timesteps, \"__len__\"):\n timesteps = [timesteps]\n\n if self.network.pypsa is None:\n # Translate eDisGo grid topology representation to PyPSA format\n self.network.pypsa = pypsa_io.to_pypsa(\n self.network, mode, timesteps)\n else:\n if self.network.pypsa.edisgo_mode is not mode:\n # Translate eDisGo grid topology representation to PyPSA format\n self.network.pypsa = pypsa_io.to_pypsa(\n self.network, mode, timesteps)\n\n # check if all timesteps are in pypsa.snapshots, if not update time\n # series\n if False in [True if _ in self.network.pypsa.snapshots else False\n for _ in timesteps]:\n pypsa_io.update_pypsa_timeseries(self.network, timesteps=timesteps)\n # run power flow analysis\n pf_results = self.network.pypsa.pf(timesteps)\n\n if all(pf_results['converged']['0'].tolist()):\n pypsa_io.process_pfa_results(\n self.network, self.network.pypsa, timesteps)\n else:\n raise ValueError(\"Power flow analysis did not converge.\")"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef analyze_lopf(self, mode=None, timesteps=None,\n etrago_max_storage_size=None):\n \"\"\"Analyzes the grid by power flow analysis\n\n Analyze the grid for violations of hosting capacity. Means, perform a\n power flow analysis and obtain voltages at nodes (load, generator,\n stations/transformers and branch tees) and active/reactive power at\n lines.\n\n The power flow analysis can currently only be performed for both grid\n levels MV and LV. See ToDos section for more information.\n\n A static `non-linear power flow analysis is performed using PyPSA\n <https://www.pypsa.org/doc/power_flow.html#full-non-linear-power-flow>`_.\n The high-voltage to medium-voltage transformer are not included in the\n analysis. The slack bus is defined at secondary side of these\n transformers assuming an ideal tap changer. Hence, potential\n overloading of the transformers is not studied here.\n\n Parameters\n ----------\n mode : str\n Allows to toggle between power flow analysis (PFA) on the whole\n grid topology (MV + LV), only MV or only LV. Defaults to None which\n equals power flow analysis for MV + LV which is the only\n implemented option at the moment. See ToDos section for\n more information.\n timesteps : :pandas:`pandas.DatetimeIndex<datetimeindex>` or :pandas:`pandas.Timestamp<timestamp>`\n Timesteps specifies for which time steps to conduct the power flow\n analysis. It defaults to None in which case the time steps in\n timeseries.timeindex (see :class:`~.grid.network.TimeSeries`) are\n used.\n\n Notes\n -----\n The current implementation always translates the grid topology\n representation to the PyPSA format and stores it to\n :attr:`self.network.pypsa`.\n\n ToDos\n ------\n The option to export only the edisgo MV grid (mode = 'mv') to conduct\n a power flow analysis is implemented in\n :func:`~.tools.pypsa_io.to_pypsa` but NotImplementedError is raised\n since the rest of edisgo does not handle this option yet. The analyze\n function will throw an error since\n :func:`~.tools.pypsa_io.process_pfa_results`\n does not handle aggregated loads and generators in the LV grids. Also,\n grid reinforcement, pypsa update of time series, and probably other\n functionalities do not work when only the MV grid is analysed.\n\n Further ToDos are:\n * explain how power plants are modeled, if possible use a link\n * explain where to find and adjust power flow analysis defining\n parameters\n\n See Also\n --------\n :func:`~.tools.pypsa_io.to_pypsa`\n Translator to PyPSA data format\n\n \"\"\"\n if timesteps is None:\n timesteps = self.network.timeseries.timeindex\n # check if timesteps is array-like, otherwise convert to list\n if not hasattr(timesteps, \"__len__\"):\n timesteps = [timesteps]\n\n # Translate eDisGo grid topology representation to PyPSA format\n logging.debug('Translate eDisGo grid topology representation to '\n 'PyPSA format.')\n self.network.pypsa_lopf = pypsa_io_lopf.to_pypsa(\n self.network, mode, timesteps)\n logging.debug('Translating eDisGo grid topology representation to '\n 'PyPSA format finished.')\n\n # add total storage capacity constraint\n def extra_functionality(network, snapshots):\n model = network.model\n # total installed capacity\n model.storages_p_nom = Constraint(\n rule=lambda model: sum(\n model.generator_p_nom[s]\n for s in self.network.pypsa_lopf.generators[\n self.network.pypsa_lopf.generators.type ==\n 'Storage'].index) <= etrago_max_storage_size)\n\n # run power flow analysis\n self.network.pypsa_lopf.lopf(\n snapshots=timesteps, solver_name='cbc', keep_files=False,\n extra_functionality=extra_functionality,\n solver_options={'tee': True})\n\n # self.network.pypsa.model.write(\n # io_options={'symbolic_solver_labels': True})\n\n print('objective: {}'.format(self.network.pypsa_lopf.objective))\n\n # relevant outputs\n # plot MV grid\n plots.storage_size(self.network.mv_grid, self.network.pypsa_lopf,\n filename='storage_results_{}.pdf'.format(\n self.network.id))\n\n storages = self.network.mv_grid.graph.nodes_by_attribute('storage')\n storages_repr = [repr(_) for _ in storages]\n print('Installed storage capacity: {} MW'.format(\n self.network.pypsa_lopf.generators.loc[\n storages_repr, 'p_nom_opt'].sum()))\n\n # export storage results (pypsa and path to storage)\n pypsa_storages_df = self.network.pypsa_lopf.generators.loc[\n storages_repr, :].sort_values(by=['p_nom_opt'], ascending=False)\n\n storage_repr = []\n storage_path = []\n for s in storages:\n storage_repr.append(repr(s))\n storage_path.append(nx.shortest_path(self.network.mv_grid.graph,\n self.network.mv_grid.station, s))\n graph_storages_df = pd.DataFrame({'path': storage_path},\n index=storage_repr)\n pypsa_storages_df.join(graph_storages_df).to_csv(\n 'storage_results_{}.csv'.format(self.network.id))\n\n # take largest 8 storages and remove the rest\n keep_storages = pypsa_storages_df.iloc[:8, :].index\n remove_storages = pypsa_storages_df.iloc[8:, :].index\n # write time series to kept storages\n for s in keep_storages:\n keep_storage_obj = [_ for _ in storages if repr(_)==s][0]\n ts = self.network.pypsa_lopf.generators_t.p.loc[:, s]\n keep_storage_obj.timeseries = pd.DataFrame({'p': ts * 1000,\n 'q': [0] * len(ts)},\n index=ts.index)\n # delete small storages\n for s in remove_storages:\n disconnect_storage(self.network,\n [_ for _ in storages if repr(_)==s][0])", "response": "Analyzes the grid by power flow analysis."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreinforce the object and calculates grid expansion costs.", "response": "def reinforce(self, **kwargs):\n \"\"\"\n Reinforces the grid and calculates grid expansion costs.\n\n See :meth:`edisgo.flex_opt.reinforce_grid` for more information.\n\n \"\"\"\n results = reinforce_grid(\n self, max_while_iterations=kwargs.get(\n 'max_while_iterations', 10),\n copy_graph=kwargs.get('copy_graph', False),\n timesteps_pfa=kwargs.get('timesteps_pfa', None),\n combined_analysis=kwargs.get('combined_analysis', False))\n\n # add measure to Results object\n if not kwargs.get('copy_graph', False):\n self.network.results.measures = 'grid_expansion'\n\n return results"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nintegrate storage into grid.", "response": "def integrate_storage(self, timeseries, position, **kwargs):\n \"\"\"\n Integrates storage into grid.\n\n See :class:`~.grid.network.StorageControl` for more information.\n\n \"\"\"\n StorageControl(edisgo=self, timeseries=timeseries,\n position=position, **kwargs)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _load_equipment_data(self):\n\n package_path = edisgo.__path__[0]\n equipment_dir = self.config['system_dirs']['equipment_dir']\n\n data = {}\n equipment = {'mv': ['trafos', 'lines', 'cables'],\n 'lv': ['trafos', 'cables']}\n\n for voltage_level, eq_list in equipment.items():\n for i in eq_list:\n equipment_parameters = self.config['equipment'][\n 'equipment_{}_parameters_{}'.format(voltage_level, i)]\n data['{}_{}'.format(voltage_level, i)] = pd.read_csv(\n os.path.join(package_path, equipment_dir,\n equipment_parameters),\n comment='#', index_col='name',\n delimiter=',', decimal='.')\n\n return data", "response": "Load the data for transformers cables etc."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _load_config(config_path=None):\n\n config_files = ['config_db_tables', 'config_grid',\n 'config_grid_expansion', 'config_timeseries']\n\n # load configs\n if isinstance(config_path, dict):\n for conf in config_files:\n config.load_config(filename='{}.cfg'.format(conf),\n config_dir=config_path[conf],\n copy_default_config=False)\n else:\n for conf in config_files:\n config.load_config(filename='{}.cfg'.format(conf),\n config_dir=config_path)\n\n config_dict = config.cfg._sections\n\n # convert numeric values to float\n for sec, subsecs in config_dict.items():\n for subsec, val in subsecs.items():\n # try str -> float conversion\n try:\n config_dict[sec][subsec] = float(val)\n except:\n pass\n\n # convert to time object\n config_dict['demandlib']['day_start'] = datetime.datetime.strptime(\n config_dict['demandlib']['day_start'], \"%H:%M\")\n config_dict['demandlib']['day_start'] = datetime.time(\n config_dict['demandlib']['day_start'].hour,\n config_dict['demandlib']['day_start'].minute)\n config_dict['demandlib']['day_end'] = datetime.datetime.strptime(\n config_dict['demandlib']['day_end'], \"%H:%M\")\n config_dict['demandlib']['day_end'] = datetime.time(\n config_dict['demandlib']['day_end'].hour,\n config_dict['demandlib']['day_end'].minute)\n\n return config_dict", "response": "Load config files and return a dictionary of the configuration data."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _check_timeindex(self):\n try:\n self.timeseries.generation_fluctuating\n self.timeseries.generation_dispatchable\n self.timeseries.load\n self.timeseries.generation_reactive_power\n self.timeseries.load_reactive_power\n except:\n message = 'Time index of feed-in and load time series does ' \\\n 'not match.'\n logging.error(message)\n raise KeyError(message)", "response": "Check if all feed - in and load time series contain the specified time index."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ndefining time series for worst case generation.", "response": "def _worst_case_generation(self, worst_case_scale_factors, modes):\n \"\"\"\n Define worst case generation time series for fluctuating and\n dispatchable generators.\n\n Parameters\n ----------\n worst_case_scale_factors : dict\n Scale factors defined in config file 'config_timeseries.cfg'.\n Scale factors describe actual power to nominal power ratio of in\n worst-case scenarios.\n modes : list\n List with worst-cases to generate time series for. Can be\n 'feedin_case', 'load_case' or both.\n\n \"\"\"\n\n self.timeseries.generation_fluctuating = pd.DataFrame(\n {'solar': [worst_case_scale_factors[\n '{}_feedin_pv'.format(mode)] for mode in modes],\n 'wind': [worst_case_scale_factors[\n '{}_feedin_other'.format(mode)] for mode in modes]},\n index=self.timeseries.timeindex)\n\n self.timeseries.generation_dispatchable = pd.DataFrame(\n {'other': [worst_case_scale_factors[\n '{}_feedin_other'.format(mode)] for mode in modes]},\n index=self.timeseries.timeindex)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _worst_case_load(self, worst_case_scale_factors,\n peakload_consumption_ratio, modes):\n \"\"\"\n Define worst case load time series for each sector.\n\n Parameters\n ----------\n worst_case_scale_factors : dict\n Scale factors defined in config file 'config_timeseries.cfg'.\n Scale factors describe actual power to nominal power ratio of in\n worst-case scenarios.\n peakload_consumption_ratio : dict\n Ratios of peak load to annual consumption per sector, defined in\n config file 'config_timeseries.cfg'\n modes : list\n List with worst-cases to generate time series for. Can be\n 'feedin_case', 'load_case' or both.\n\n \"\"\"\n\n sectors = ['residential', 'retail', 'industrial', 'agricultural']\n lv_power_scaling = np.array(\n [worst_case_scale_factors['lv_{}_load'.format(mode)]\n for mode in modes])\n mv_power_scaling = np.array(\n [worst_case_scale_factors['mv_{}_load'.format(mode)]\n for mode in modes])\n\n lv = {(sector, 'lv'): peakload_consumption_ratio[sector] *\n lv_power_scaling\n for sector in sectors}\n mv = {(sector, 'mv'): peakload_consumption_ratio[sector] *\n mv_power_scaling\n for sector in sectors}\n self.timeseries.load = pd.DataFrame({**lv, **mv},\n index=self.timeseries.timeindex)", "response": "Generate worst case load time series for each sector."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _check_timeindex(self, curtailment_timeseries, network):\n if curtailment_timeseries is None:\n message = 'No curtailment given.'\n logging.error(message)\n raise KeyError(message)\n try:\n curtailment_timeseries.loc[network.timeseries.timeindex]\n except:\n message = 'Time index of curtailment time series does not match ' \\\n 'with load and feed-in time series.'\n logging.error(message)\n raise KeyError(message)", "response": "Raises an error if time index of curtailment time series does not comply with the time index of load and feed - in time series."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncheck that the curtailment at any time step exceeds the total feed - in of all generators and curtailment can be distributed among all generators. Raises an error if the curtailment at any time step exceeds the total feed - in of all generators and curtailment can be distributed among all generators.", "response": "def _precheck(self, curtailment_timeseries, feedin_df, curtailment_key):\n \"\"\"\n Raises an error if the curtailment at any time step exceeds the\n total feed-in of all generators curtailment can be distributed among\n at that time.\n\n Parameters\n -----------\n curtailment_timeseries : :pandas:`pandas.Series<series>`\n Curtailment time series in kW for the technology (and weather\n cell) specified in `curtailment_key`.\n feedin_df : :pandas:`pandas.Series<series>`\n Feed-in time series in kW for all generators of type (and in\n weather cell) specified in `curtailment_key`.\n curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str`\n Technology (and weather cell) curtailment is given for.\n\n \"\"\"\n if not feedin_df.empty:\n feedin_selected_sum = feedin_df.sum(axis=1)\n diff = feedin_selected_sum - curtailment_timeseries\n # add tolerance (set small negative values to zero)\n diff[diff.between(-1, 0)] = 0\n if not (diff >= 0).all():\n bad_time_steps = [_ for _ in diff.index\n if diff[_] < 0]\n message = 'Curtailment demand exceeds total feed-in in time ' \\\n 'steps {}.'.format(bad_time_steps)\n logging.error(message)\n raise ValueError(message)\n else:\n bad_time_steps = [_ for _ in curtailment_timeseries.index\n if curtailment_timeseries[_] > 0]\n if bad_time_steps:\n message = 'Curtailment given for time steps {} but there ' \\\n 'are no generators to meet the curtailment target ' \\\n 'for {}.'.format(bad_time_steps, curtailment_key)\n logging.error(message)\n raise ValueError(message)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _postcheck(self, network, feedin):\n curtailment = network.timeseries.curtailment\n gen_repr = [repr(_) for _ in curtailment.columns]\n feedin_repr = feedin.loc[:, gen_repr]\n curtailment_repr = curtailment\n curtailment_repr.columns = gen_repr\n if not ((feedin_repr - curtailment_repr) > -1e-1).all().all():\n message = 'Curtailment exceeds feed-in.'\n logging.error(message)\n raise TypeError(message)", "response": "Checks that the curtailment of a generator exceeds the curtailment of a feed - in of a generator at any time step. Raises an error if the curtailment exceeds the feed - in of that generator at any time step."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nintegrating storage units in the grid.", "response": "def _integrate_storage(self, timeseries, position, params, voltage_level,\n reactive_power_timeseries, **kwargs):\n \"\"\"\n Integrate storage units in the grid.\n\n Parameters\n ----------\n timeseries : :obj:`str` or :pandas:`pandas.Series<series>`\n Parameter used to obtain time series of active power the storage\n storage is charged (negative) or discharged (positive) with. Can\n either be a given time series or an operation strategy. See class\n definition for more information\n position : :obj:`str` or :class:`~.grid.components.Station` or :class:`~.grid.components.BranchTee` or :class:`~.grid.components.Generator` or :class:`~.grid.components.Load`\n Parameter used to place the storage. See class definition for more\n information.\n params : :obj:`dict`\n Dictionary with storage parameters for one storage. See class\n definition for more information on what parameters must be\n provided.\n voltage_level : :obj:`str` or None\n `voltage_level` defines which side of the LV station the storage is\n connected to. Valid options are 'lv' and 'mv'. Default: None. See\n class definition for more information.\n reactive_power_timeseries : :pandas:`pandas.Series<series>` or None\n Reactive power time series in kvar (generator sign convention).\n Index of the series needs to be a\n :pandas:`pandas.DatetimeIndex<datetimeindex>`.\n\n \"\"\"\n # place storage\n params = self._check_nominal_power(params, timeseries)\n if isinstance(position, Station) or isinstance(position, BranchTee) \\\n or isinstance(position, Generator) \\\n or isinstance(position, Load):\n storage = storage_integration.set_up_storage(\n node=position, parameters=params, voltage_level=voltage_level)\n line = storage_integration.connect_storage(storage, position)\n elif isinstance(position, str) \\\n and position == 'hvmv_substation_busbar':\n storage, line = storage_integration.storage_at_hvmv_substation(\n self.edisgo.network.mv_grid, params)\n elif isinstance(position, str) \\\n and position == 'distribute_storages_mv':\n # check active power time series\n if not isinstance(timeseries, pd.Series):\n raise ValueError(\n \"Storage time series needs to be a pandas Series if \"\n \"`position` is 'distribute_storages_mv'.\")\n else:\n timeseries = pd.DataFrame(data={'p': timeseries},\n index=timeseries.index)\n self._check_timeindex(timeseries)\n # check reactive power time series\n if reactive_power_timeseries is not None:\n self._check_timeindex(reactive_power_timeseries)\n timeseries['q'] = reactive_power_timeseries.loc[\n timeseries.index]\n else:\n timeseries['q'] = 0\n # start storage positioning method\n storage_positioning.one_storage_per_feeder(\n edisgo=self.edisgo, storage_timeseries=timeseries,\n storage_nominal_power=params['nominal_power'], **kwargs)\n return\n else:\n message = 'Provided storage position option {} is not ' \\\n 'valid.'.format(timeseries)\n logging.error(message)\n raise KeyError(message)\n\n # implement operation strategy (active power)\n if isinstance(timeseries, pd.Series):\n timeseries = pd.DataFrame(data={'p': timeseries},\n index=timeseries.index)\n self._check_timeindex(timeseries)\n storage.timeseries = timeseries\n elif isinstance(timeseries, str) and timeseries == 'fifty-fifty':\n storage_operation.fifty_fifty(self.edisgo.network, storage)\n else:\n message = 'Provided storage timeseries option {} is not ' \\\n 'valid.'.format(timeseries)\n logging.error(message)\n raise KeyError(message)\n\n # reactive power\n if reactive_power_timeseries is not None:\n self._check_timeindex(reactive_power_timeseries)\n storage.timeseries = pd.DataFrame(\n {'p': storage.timeseries.p,\n 'q': reactive_power_timeseries.loc[storage.timeseries.index]},\n index=storage.timeseries.index)\n\n # update pypsa representation\n if self.edisgo.network.pypsa is not None:\n pypsa_io.update_pypsa_storage(\n self.edisgo.network.pypsa,\n storages=[storage], storages_lines=[line])"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _check_nominal_power(self, storage_parameters, timeseries):\n if storage_parameters.get('nominal_power', None) is None:\n try:\n storage_parameters['nominal_power'] = max(abs(timeseries))\n except:\n raise ValueError(\"Could not assign a nominal power to the \"\n \"storage. Please provide either a nominal \"\n \"power or an active power time series.\")\n return storage_parameters", "response": "Checks if nominal power is provided through storage_parameters and returns the absolute maximum of timeseries. Raises a ValueError if it cannot assign a nominal power to the storage."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nraise an error if the time index of storage time series does not comply with the time index of load and feed - in time series.", "response": "def _check_timeindex(self, timeseries):\n \"\"\"\n Raises an error if time index of storage time series does not\n comply with the time index of load and feed-in time series.\n\n Parameters\n -----------\n timeseries : :pandas:`pandas.DataFrame<dataframe>`\n DataFrame containing active power the storage is charged (negative)\n and discharged (positive) with in kW in column 'p' and\n reactive power in kVA in column 'q'.\n\n \"\"\"\n try:\n timeseries.loc[self.edisgo.network.timeseries.timeindex]\n except:\n message = 'Time index of storage time series does not match ' \\\n 'with load and feed-in time series.'\n logging.error(message)\n raise KeyError(message)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning a pandas. DataFrame of the dispatchable generators.", "response": "def generation_dispatchable(self):\n \"\"\"\n Get generation time series of dispatchable generators (only active\n power)\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n See class definition for details.\n\n \"\"\"\n try:\n return self._generation_dispatchable.loc[[self.timeindex], :]\n except:\n return self._generation_dispatchable.loc[self.timeindex, :]"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef generation_fluctuating(self):\n try:\n return self._generation_fluctuating.loc[[self.timeindex], :]\n except:\n return self._generation_fluctuating.loc[self.timeindex, :]", "response": "Returns a pandas. DataFrame of the active generation time series of fluctuating renewables."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget load time series", "response": "def load(self):\n \"\"\"\n Get load time series (only active power)\n\n Returns\n -------\n dict or :pandas:`pandas.DataFrame<dataframe>`\n See class definition for details.\n\n \"\"\"\n try:\n return self._load.loc[[self.timeindex], :]\n except:\n return self._load.loc[self.timeindex, :]"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the curtailment time series of all curtailed generators and curtailed generators.", "response": "def curtailment(self):\n \"\"\"\n Get curtailment time series of dispatchable generators (only active\n power)\n\n Parameters\n ----------\n curtailment : list or :pandas:`pandas.DataFrame<dataframe>`\n See class definition for details.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n In the case curtailment is applied to all solar and wind generators\n curtailment time series either aggregated by technology type or by\n type and weather cell ID are returnded. In the first case columns\n of the DataFrame are 'solar' and 'wind'; in the second case columns\n need to be a :pandas:`pandas.MultiIndex<multiindex>` with the\n first level containing the type and the second level the weather\n cell ID.\n In the case curtailment is only applied to specific generators,\n curtailment time series of all curtailed generators, specified in\n by the column name are returned.\n\n \"\"\"\n if self._curtailment is not None:\n if isinstance(self._curtailment, pd.DataFrame):\n try:\n return self._curtailment.loc[[self.timeindex], :]\n except:\n return self._curtailment.loc[self.timeindex, :]\n elif isinstance(self._curtailment, list):\n try:\n curtailment = pd.DataFrame()\n for gen in self._curtailment:\n curtailment[gen] = gen.curtailment\n return curtailment\n except:\n raise\n else:\n return None"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef curtailment(self):\n if self._curtailment is not None:\n result_dict = {}\n for key, gen_list in self._curtailment.items():\n curtailment_df = pd.DataFrame()\n for gen in gen_list:\n curtailment_df[gen] = gen.curtailment\n result_dict[key] = curtailment_df\n return result_dict\n else:\n return None", "response": "Returns a dictionary with curtailment assigned to each generator per curtailed power in the curtailed power in the curtailed power in the generator."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef storages(self):\n grids = [self.network.mv_grid] + list(self.network.mv_grid.lv_grids)\n storage_results = {}\n storage_results['storage_id'] = []\n storage_results['nominal_power'] = []\n storage_results['voltage_level'] = []\n storage_results['grid_connection_point'] = []\n for grid in grids:\n for storage in grid.graph.nodes_by_attribute('storage'):\n storage_results['storage_id'].append(repr(storage))\n storage_results['nominal_power'].append(storage.nominal_power)\n storage_results['voltage_level'].append(\n 'mv' if isinstance(grid, MVGrid) else 'lv')\n storage_results['grid_connection_point'].append(\n grid.graph.neighbors(storage)[0])\n\n return pd.DataFrame(storage_results).set_index('storage_id')", "response": "Returns a pandas. DataFrame containing all storages installed in MV and LV grids."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning a dataframe with storage time series.", "response": "def storages_timeseries(self):\n \"\"\"\n Returns a dataframe with storage time series.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n\n Dataframe containing time series of all storages installed in the\n MV grid and LV grids. Index of the dataframe is a\n :pandas:`pandas.DatetimeIndex<datetimeindex>`. Columns are the\n storage representatives.\n\n \"\"\"\n storages_p = pd.DataFrame()\n storages_q = pd.DataFrame()\n grids = [self.network.mv_grid] + list(self.network.mv_grid.lv_grids)\n for grid in grids:\n for storage in grid.graph.nodes_by_attribute('storage'):\n ts = storage.timeseries\n storages_p[repr(storage)] = ts.p\n storages_q[repr(storage)] = ts.q\n\n return storages_p, storages_q"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef s_res(self, components=None):\n\n if components is not None:\n labels_included = []\n labels_not_included = []\n labels = [repr(l) for l in components]\n for label in labels:\n if (label in list(self.pfa_p.columns) and\n label in list(self.pfa_q.columns)):\n labels_included.append(label)\n else:\n labels_not_included.append(label)\n if labels_not_included:\n logging.warning(\n \"Apparent power for {lines} are not returned from \"\n \"PFA\".format(lines=labels_not_included))\n else:\n labels_included = self.pfa_p.columns\n\n s_res = ((self.pfa_p[labels_included] ** 2 + self.pfa_q[\n labels_included] ** 2)).applymap(sqrt)\n\n return s_res", "response": "Returns the apparent power in kVA at a line or transformer."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsaving the results of the current curtailment to disk.", "response": "def save(self, directory, parameters='all'):\n \"\"\"\n Saves results to disk.\n\n Depending on which results are selected and if they exist, the\n following directories and files are created:\n\n * `powerflow_results` directory\n\n * `voltages_pu.csv`\n\n See :py:attr:`~pfa_v_mag_pu` for more information.\n * `currents.csv`\n\n See :func:`~i_res` for more information.\n * `active_powers.csv`\n\n See :py:attr:`~pfa_p` for more information.\n * `reactive_powers.csv`\n\n See :py:attr:`~pfa_q` for more information.\n * `apparent_powers.csv`\n\n See :func:`~s_res` for more information.\n * `grid_losses.csv`\n\n See :py:attr:`~grid_losses` for more information.\n * `hv_mv_exchanges.csv`\n\n See :py:attr:`~hv_mv_exchanges` for more information.\n\n * `pypsa_network` directory\n\n See :py:func:`pypsa.Network.export_to_csv_folder`\n\n * `grid_expansion_results` directory\n\n * `grid_expansion_costs.csv`\n\n See :py:attr:`~grid_expansion_costs` for more information.\n * `equipment_changes.csv`\n\n See :py:attr:`~equipment_changes` for more information.\n * `unresolved_issues.csv`\n\n See :py:attr:`~unresolved_issues` for more information.\n\n * `curtailment_results` directory\n\n Files depend on curtailment specifications. There will be one file\n for each curtailment specification, that is for every key in\n :py:attr:`~curtailment` dictionary.\n\n * `storage_integration_results` directory\n\n * `storages.csv`\n\n See :func:`~storages` for more information.\n\n Parameters\n ----------\n directory : :obj:`str`\n Directory to save the results in.\n parameters : :obj:`str` or :obj:`list` of :obj:`str`\n Specifies which results will be saved. By default all results are\n saved. To only save certain results set `parameters` to one of the\n following options or choose several options by providing a list:\n\n * 'pypsa_network'\n * 'powerflow_results'\n * 'grid_expansion_results'\n * 'curtailment_results'\n * 'storage_integration_results'\n\n \"\"\"\n def _save_power_flow_results(target_dir):\n if self.pfa_v_mag_pu is not None:\n # create directory\n os.makedirs(target_dir, exist_ok=True)\n\n # voltage\n self.pfa_v_mag_pu.to_csv(\n os.path.join(target_dir, 'voltages_pu.csv'))\n\n # current\n self.i_res.to_csv(\n os.path.join(target_dir, 'currents.csv'))\n\n # active power\n self.pfa_p.to_csv(\n os.path.join(target_dir, 'active_powers.csv'))\n\n # reactive power\n self.pfa_q.to_csv(\n os.path.join(target_dir, 'reactive_powers.csv'))\n\n # apparent power\n self.s_res().to_csv(\n os.path.join(target_dir, 'apparent_powers.csv'))\n\n # grid losses\n self.grid_losses.to_csv(\n os.path.join(target_dir, 'grid_losses.csv'))\n\n # grid exchanges\n self.hv_mv_exchanges.to_csv(os.path.join(\n target_dir, 'hv_mv_exchanges.csv'))\n\n def _save_pypsa_network(target_dir):\n if self.network.pypsa:\n # create directory\n os.makedirs(target_dir, exist_ok=True)\n self.network.pypsa.export_to_csv_folder(target_dir)\n\n def _save_grid_expansion_results(target_dir):\n if self.grid_expansion_costs is not None:\n # create directory\n os.makedirs(target_dir, exist_ok=True)\n\n # grid expansion costs\n self.grid_expansion_costs.to_csv(os.path.join(\n target_dir, 'grid_expansion_costs.csv'))\n\n # unresolved issues\n pd.DataFrame(self.unresolved_issues).to_csv(os.path.join(\n target_dir, 'unresolved_issues.csv'))\n\n # equipment changes\n self.equipment_changes.to_csv(os.path.join(\n target_dir, 'equipment_changes.csv'))\n\n def _save_curtailment_results(target_dir):\n if self.curtailment is not None:\n # create directory\n os.makedirs(target_dir, exist_ok=True)\n\n for key, curtailment_df in self.curtailment.items():\n if type(key) == tuple:\n type_prefix = '-'.join([key[0], str(key[1])])\n elif type(key) == str:\n type_prefix = key\n else:\n raise KeyError(\"Unknown key type {} for key {}\".format(\n type(key), key))\n\n filename = os.path.join(\n target_dir, '{}.csv'.format(type_prefix))\n\n curtailment_df.to_csv(filename, index_label=type_prefix)\n\n def _save_storage_integration_results(target_dir):\n storages = self.storages\n if not storages.empty:\n # create directory\n os.makedirs(target_dir, exist_ok=True)\n\n # general storage information\n storages.to_csv(os.path.join(target_dir, 'storages.csv'))\n\n # storages time series\n ts_p, ts_q = self.storages_timeseries()\n ts_p.to_csv(os.path.join(\n target_dir, 'storages_active_power.csv'))\n ts_q.to_csv(os.path.join(\n target_dir, 'storages_reactive_power.csv'))\n\n if not self.storages_costs_reduction is None:\n self.storages_costs_reduction.to_csv(\n os.path.join(target_dir,\n 'storages_costs_reduction.csv'))\n\n # dictionary with function to call to save each parameter\n func_dict = {\n 'powerflow_results': _save_power_flow_results,\n 'pypsa_network': _save_pypsa_network,\n 'grid_expansion_results': _save_grid_expansion_results,\n 'curtailment_results': _save_curtailment_results,\n 'storage_integration_results': _save_storage_integration_results\n }\n\n # if string is given convert to list\n if isinstance(parameters, str):\n if parameters == 'all':\n parameters = ['powerflow_results', 'pypsa_network',\n 'grid_expansion_results', 'curtailment_results',\n 'storage_integration_results']\n else:\n parameters = [parameters]\n\n # save each parameter\n for parameter in parameters:\n try:\n func_dict[parameter](os.path.join(directory, parameter))\n except KeyError:\n message = \"Invalid input {} for `parameters` when saving \" \\\n \"results. Must be any or a list of the following: \" \\\n \"'pypsa_network', 'powerflow_results', \" \\\n \"'grid_expansion_results', 'curtailment_results', \" \\\n \"'storage_integration_results'.\".format(parameter)\n logger.error(message)\n raise KeyError(message)\n except:\n raise\n # save measures\n pd.DataFrame(data={'measure': self.measures}).to_csv(\n os.path.join(directory, 'measures.csv'))\n # save configs\n with open(os.path.join(directory, 'configs.csv'), 'w') as f:\n writer = csv.writer(f)\n rows = [\n ['{}'.format(key)] + [value for item in values.items()\n for value in item]\n for key, values in self.network.config._data.items()]\n writer.writerows(rows)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef v_res(self, nodes=None, level=None):\n # check if voltages are available:\n if hasattr(self, 'pfa_v_mag_pu'):\n self.pfa_v_mag_pu.sort_index(axis=1, inplace=True)\n else:\n message = \"No voltage results available.\"\n raise AttributeError(message)\n\n if level is None:\n level = ['mv', 'lv']\n\n if nodes is None:\n return self.pfa_v_mag_pu.loc[:, (level, slice(None))]\n else:\n not_included = [_ for _ in nodes\n if _ not in list(self.pfa_v_mag_pu[level].columns)]\n labels_included = [_ for _ in nodes if _ not in not_included]\n\n if not_included:\n logging.warning(\"Voltage levels for {nodes} are not returned \"\n \"from PFA\".format(nodes=not_included))\n return self.pfa_v_mag_pu[level][labels_included]", "response": "Returns a pandas. DataFrame of voltage levels obtained at a given grid level."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef s_res(self, components=None):\n if components is None:\n return self.apparent_power\n else:\n not_included = [_ for _ in components\n if _ not in self.apparent_power.index]\n labels_included = [_ for _ in components if _ not in not_included]\n\n if not_included:\n logging.warning(\n \"No apparent power results available for: {}\".format(\n not_included))\n return self.apparent_power.loc[:, labels_included]", "response": "Returns the apparent power in kVA at lines and transformers."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef storage_at_hvmv_substation(mv_grid, parameters, mode=None):\n storage = set_up_storage(node=mv_grid.station, parameters=parameters,\n operational_mode=mode)\n line = connect_storage(storage, mv_grid.station)\n return storage, line", "response": "Place storage at HV or MV substation bus bar."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nsets up a storage instance for the given node and parameters.", "response": "def set_up_storage(node, parameters,\n voltage_level=None, operational_mode=None):\n \"\"\"\n Sets up a storage instance.\n\n Parameters\n ----------\n node : :class:`~.grid.components.Station` or :class:`~.grid.components.BranchTee`\n Node the storage will be connected to.\n parameters : :obj:`dict`, optional\n Dictionary with storage parameters. Must at least contain\n 'nominal_power'. See :class:`~.grid.network.StorageControl` for more\n information.\n voltage_level : :obj:`str`, optional\n This parameter only needs to be provided if `node` is of type\n :class:`~.grid.components.LVStation`. In that case `voltage_level`\n defines which side of the LV station the storage is connected to. Valid\n options are 'lv' and 'mv'. Default: None.\n operational_mode : :obj:`str`, optional\n Operational mode. See :class:`~.grid.network.StorageControl` for\n possible options and more information. Default: None.\n\n \"\"\"\n\n # if node the storage is connected to is an LVStation voltage_level\n # defines which side the storage is connected to\n if isinstance(node, LVStation):\n if voltage_level == 'lv':\n grid = node.grid\n elif voltage_level == 'mv':\n grid = node.mv_grid\n else:\n raise ValueError(\n \"{} is not a valid option for voltage_level.\".format(\n voltage_level))\n else:\n grid = node.grid\n\n return Storage(operation=operational_mode,\n id='{}_storage_{}'.format(grid,\n len(grid.graph.nodes_by_attribute(\n 'storage')) + 1),\n grid=grid,\n geom=node.geom,\n **parameters)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nconnects storage to a given node.", "response": "def connect_storage(storage, node):\n \"\"\"\n Connects storage to the given node.\n\n The storage is connected by a cable\n The cable the storage is connected with is selected to be able to carry\n the storages nominal power and equal amount of reactive power.\n No load factor is considered.\n\n Parameters\n ----------\n storage : :class:`~.grid.components.Storage`\n Storage instance to be integrated into the grid.\n node : :class:`~.grid.components.Station` or :class:`~.grid.components.BranchTee`\n Node the storage will be connected to.\n\n Returns\n -------\n :class:`~.grid.components.Line`\n Newly added line to connect storage.\n\n \"\"\"\n\n # add storage itself to graph\n storage.grid.graph.add_node(storage, type='storage')\n\n # add 1m connecting line to node the storage is connected to\n if isinstance(storage.grid, MVGrid):\n voltage_level = 'mv'\n else:\n voltage_level = 'lv'\n\n # necessary apparent power the line must be able to carry is set to be\n # the storages nominal power and equal amount of reactive power devided by\n # the minimum load factor\n lf_dict = storage.grid.network.config['grid_expansion_load_factors']\n lf = min(lf_dict['{}_feedin_case_line'.format(voltage_level)],\n lf_dict['{}_load_case_line'.format(voltage_level)])\n apparent_power_line = sqrt(2) * storage.nominal_power / lf\n line_type, line_count = select_cable(storage.grid.network, voltage_level,\n apparent_power_line)\n line = Line(\n id=storage.id,\n type=line_type,\n kind='cable',\n length=1e-3,\n grid=storage.grid,\n quantity=line_count)\n\n storage.grid.graph.add_edge(node, storage, line=line, type='line')\n\n return line"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_request_log_fields(\n self, log_fields: LogFields,\n call_details: Union[grpc.HandlerCallDetails,\n grpc.ClientCallDetails]\n ):\n \"\"\"Add log fields related to a request to the provided log fields\n\n :param log_fields: log fields instance to which to add the fields\n :param call_details: some information regarding the call\n\n \"\"\"\n service, method = call_details.method[1:].split(\"/\")\n log_fields.add_fields({\n \"system\": \"grpc\",\n \"span.kind\": self.KIND,\n \"grpc.service\": service,\n \"grpc.method\": method,\n })", "response": "Add log fields related to a request to the provided log fields instance."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef add_response_log_fields(self, log_fields: LogFields,\n start_time: datetime, err: Exception):\n \"\"\"Add log fields related to a response to the provided log fields\n\n :param log_fields: log fields instnace to which to add the fields\n :param start_time: start time of the request\n :param err: exception raised during the handling of the request.\n\n \"\"\"\n code = \"Unknown\" if err is not None else \"OK\"\n duration = (datetime.utcnow() - start_time).total_seconds() * 1000\n log_fields.add_fields({\n \"grpc.start_time\": start_time.isoformat() + \"Z\",\n \"grpc.code\": code,\n \"duration\": \"{duration}ms\".format(duration=duration),\n })", "response": "Add log fields related to a response to the provided log fields"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nselecting two worst - case snapshots from time series.", "response": "def select_worstcase_snapshots(network):\n \"\"\"\n Select two worst-case snapshots from time series\n\n Two time steps in a time series represent worst-case snapshots. These are\n\n 1. Load case: refers to the point in the time series where the\n (load - generation) achieves its maximum and is greater than 0.\n 2. Feed-in case: refers to the point in the time series where the\n (load - generation) achieves its minimum and is smaller than 0.\n\n These two points are identified based on the generation and load time\n series. In case load or feed-in case don't exist None is returned.\n\n Parameters\n ----------\n network : :class:`~.grid.network.Network`\n Network for which worst-case snapshots are identified.\n\n Returns\n -------\n :obj:`dict`\n Dictionary with keys 'load_case' and 'feedin_case'. Values are\n corresponding worst-case snapshots of type\n :pandas:`pandas.Timestamp<timestamp>` or None.\n\n \"\"\"\n timeseries_load_feedin_case = network.timeseries.timesteps_load_feedin_case\n\n timestamp = {}\n timestamp['load_case'] = (\n timeseries_load_feedin_case.residual_load.idxmax()\n if max(timeseries_load_feedin_case.residual_load) > 0 else None)\n timestamp['feedin_case'] = (\n timeseries_load_feedin_case.residual_load.idxmin()\n if min(timeseries_load_feedin_case.residual_load) < 0 else None)\n return timestamp"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ncalculating the residual load in MW in MV grid and underlying LV grids.", "response": "def get_residual_load_from_pypsa_network(pypsa_network):\n \"\"\"\n Calculates residual load in MW in MV grid and underlying LV grids.\n\n Parameters\n ----------\n pypsa_network : :pypsa:`pypsa.Network<network>`\n The `PyPSA network\n <https://www.pypsa.org/doc/components.html#network>`_ container,\n containing load flow results.\n\n Returns\n -------\n :pandas:`pandas.Series<series>`\n Series with residual load in MW for each time step. Positiv values\n indicate a higher demand than generation and vice versa. Index of the\n series is a :pandas:`pandas.DatetimeIndex<datetimeindex>`\n\n \"\"\"\n residual_load = \\\n pypsa_network.loads_t.p_set.sum(axis=1) - (\n pypsa_network.generators_t.p_set.loc[\n :, pypsa_network.generators_t.p_set.columns !=\n 'Generator_slack'].sum(axis=1) +\n pypsa_network.storage_units_t.p_set.sum(axis=1))\n return residual_load"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef assign_load_feedin_case(network):\n\n if network.pypsa is not None:\n residual_load = get_residual_load_from_pypsa_network(network.pypsa) * \\\n 1e3\n\n else:\n grids = [network.mv_grid] + list(network.mv_grid.lv_grids)\n\n gens = []\n loads = []\n for grid in grids:\n gens.extend(grid.generators)\n gens.extend(list(grid.graph.nodes_by_attribute('storage')))\n loads.extend(list(grid.graph.nodes_by_attribute('load')))\n\n generation_timeseries = pd.Series(\n 0, index=network.timeseries.timeindex)\n for gen in gens:\n generation_timeseries += gen.timeseries.p\n\n load_timeseries = pd.Series(0, index=network.timeseries.timeindex)\n for load in loads:\n load_timeseries += load.timeseries.p\n\n residual_load = load_timeseries - generation_timeseries\n\n timeseries_load_feedin_case = residual_load.rename(\n 'residual_load').to_frame()\n\n timeseries_load_feedin_case['case'] = \\\n timeseries_load_feedin_case.residual_load.apply(\n lambda _: 'feedin_case' if _ < 0 else 'load_case')\n\n return timeseries_load_feedin_case", "response": "Assign load - case to load - case."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef load_config(filename, config_dir=None, copy_default_config=True):\n if not config_dir:\n config_file = os.path.join(get_default_config_path(), filename)\n else:\n config_file = os.path.join(config_dir, filename)\n\n # config file does not exist -> copy default\n if not os.path.isfile(config_file):\n if copy_default_config:\n logger.info('Config file {} not found, I will create a '\n 'default version'.format(config_file))\n make_directory(config_dir)\n shutil.copy(os.path.join(package_path, 'config', filename.\n replace('.cfg', '_default.cfg')),\n config_file)\n else:\n message = 'Config file {} not found.'.format(config_file)\n logger.error(message)\n raise FileNotFoundError(message)\n\n if len(cfg.read(config_file)) == 0:\n message = 'Config file {} not found or empty.'.format(config_file)\n logger.error(message)\n raise FileNotFoundError(message)\n global _loaded\n _loaded = True", "response": "Loads the specified config file into the config_dir."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_default_config_path():\n config_dir = get('user_dirs', 'config_dir')\n root_dir = get('user_dirs', 'root_dir')\n root_path = os.path.join(os.path.expanduser('~'), root_dir)\n config_path = os.path.join(root_path, config_dir)\n\n # root directory does not exist\n if not os.path.isdir(root_path):\n # create it\n logger.info('eDisGo root path {} not found, I will create it.'\n .format(root_path))\n make_directory(root_path)\n\n # config directory does not exist\n if not os.path.isdir(config_path):\n # create it\n config_path = os.path.join(root_path, config_dir)\n make_directory(config_path)\n\n # copy default config files\n logger.info('eDisGo config path {} not found, I will create it.'\n .format(config_path))\n\n # copy default config files if they don't exist\n internal_config_dir = os.path.join(package_path, 'config')\n for file in glob(os.path.join(internal_config_dir, '*.cfg')):\n filename = os.path.join(config_path,\n os.path.basename(file).replace('_default', ''))\n if not os.path.isfile(filename):\n logger.info('I will create a default config file {} in {}'\n .format(file, config_path))\n shutil.copy(file, filename)\n return config_path", "response": "Returns the basic edisgo config path. If it does not exist it creates it and copies all default config files into it."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nmakes directory if it does not exist.", "response": "def make_directory(directory):\n \"\"\"\n Makes directory if it does not exist.\n\n Parameters\n -----------\n directory : :obj:`str`\n Directory path\n\n \"\"\"\n if not os.path.isdir(directory):\n os.mkdir(directory)\n logger.info('Path {} not found, I will create it.'\n .format(directory))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncheck for over - loading issues in MV grid.", "response": "def mv_line_load(network):\n \"\"\"\n Checks for over-loading issues in MV grid.\n\n Parameters\n ----------\n network : :class:`~.grid.network.Network`\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n Dataframe containing over-loaded MV lines, their maximum relative\n over-loading and the corresponding time step.\n Index of the dataframe are the over-loaded lines of type\n :class:`~.grid.components.Line`. Columns are 'max_rel_overload'\n containing the maximum relative over-loading as float and 'time_index'\n containing the corresponding time step the over-loading occured in as\n :pandas:`pandas.Timestamp<timestamp>`.\n\n Notes\n -----\n Line over-load is determined based on allowed load factors for feed-in and\n load cases that are defined in the config file 'config_grid_expansion' in\n section 'grid_expansion_load_factors'.\n\n \"\"\"\n\n crit_lines = pd.DataFrame()\n crit_lines = _line_load(network, network.mv_grid, crit_lines)\n\n if not crit_lines.empty:\n logger.debug('==> {} line(s) in MV grid has/have load issues.'.format(\n crit_lines.shape[0]))\n else:\n logger.debug('==> No line load issues in MV grid.')\n\n return crit_lines"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef lv_line_load(network):\n\n crit_lines = pd.DataFrame()\n\n for lv_grid in network.mv_grid.lv_grids:\n crit_lines = _line_load(network, lv_grid, crit_lines)\n\n if not crit_lines.empty:\n logger.debug('==> {} line(s) in LV grids has/have load issues.'.format(\n crit_lines.shape[0]))\n else:\n logger.debug('==> No line load issues in LV grids.')\n\n return crit_lines", "response": "Checks for over - loading issues in LV grids."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncheck for over - loading issues of lines.", "response": "def _line_load(network, grid, crit_lines):\n \"\"\"\n Checks for over-loading issues of lines.\n\n Parameters\n ----------\n network : :class:`~.grid.network.Network`\n grid : :class:`~.grid.grids.LVGrid` or :class:`~.grid.grids.MVGrid`\n crit_lines : :pandas:`pandas.DataFrame<dataframe>`\n Dataframe containing over-loaded lines, their maximum relative\n over-loading and the corresponding time step.\n Index of the dataframe are the over-loaded lines of type\n :class:`~.grid.components.Line`. Columns are 'max_rel_overload'\n containing the maximum relative over-loading as float and 'time_index'\n containing the corresponding time step the over-loading occured in as\n :pandas:`pandas.Timestamp<timestamp>`.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n Dataframe containing over-loaded lines, their maximum relative\n over-loading and the corresponding time step.\n Index of the dataframe are the over-loaded lines of type\n :class:`~.grid.components.Line`. Columns are 'max_rel_overload'\n containing the maximum relative over-loading as float and 'time_index'\n containing the corresponding time step the over-loading occured in as\n :pandas:`pandas.Timestamp<timestamp>`.\n\n \"\"\"\n if isinstance(grid, LVGrid):\n grid_level = 'lv'\n else:\n grid_level = 'mv'\n\n for line in list(grid.graph.lines()):\n i_line_allowed_per_case = {}\n i_line_allowed_per_case['feedin_case'] = \\\n line['line'].type['I_max_th'] * line['line'].quantity * \\\n network.config['grid_expansion_load_factors'][\n '{}_feedin_case_line'.format(grid_level)]\n i_line_allowed_per_case['load_case'] = \\\n line['line'].type['I_max_th'] * line['line'].quantity * \\\n network.config['grid_expansion_load_factors'][\n '{}_load_case_line'.format(grid_level)]\n # maximum allowed line load in each time step\n i_line_allowed = \\\n network.timeseries.timesteps_load_feedin_case.case.apply(\n lambda _: i_line_allowed_per_case[_])\n try:\n # check if maximum current from power flow analysis exceeds\n # allowed maximum current\n i_line_pfa = network.results.i_res[repr(line['line'])]\n if any((i_line_allowed - i_line_pfa) < 0):\n # find out largest relative deviation\n relative_i_res = i_line_pfa / i_line_allowed\n crit_lines = crit_lines.append(pd.DataFrame(\n {'max_rel_overload': relative_i_res.max(),\n 'time_index': relative_i_res.idxmax()},\n index=[line['line']]))\n except KeyError:\n logger.debug('No results for line {} '.format(str(line)) +\n 'to check overloading.')\n\n return crit_lines"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef hv_mv_station_load(network):\n crit_stations = pd.DataFrame()\n crit_stations = _station_load(network, network.mv_grid.station,\n crit_stations)\n if not crit_stations.empty:\n logger.debug('==> HV/MV station has load issues.')\n else:\n logger.debug('==> No HV/MV station load issues.')\n\n return crit_stations", "response": "Checks for over - loading of HV or MV station."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncheck for over - loading of MV and LV stations.", "response": "def mv_lv_station_load(network):\n \"\"\"\n Checks for over-loading of MV/LV stations.\n\n Parameters\n ----------\n network : :class:`~.grid.network.Network`\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n Dataframe containing over-loaded MV/LV stations, their apparent power\n at maximal over-loading and the corresponding time step.\n Index of the dataframe are the over-loaded stations of type\n :class:`~.grid.components.LVStation`. Columns are 's_pfa'\n containing the apparent power at maximal over-loading as float and\n 'time_index' containing the corresponding time step the over-loading\n occured in as :pandas:`pandas.Timestamp<timestamp>`.\n\n Notes\n -----\n Over-load is determined based on allowed load factors for feed-in and\n load cases that are defined in the config file 'config_grid_expansion' in\n section 'grid_expansion_load_factors'.\n\n \"\"\"\n\n crit_stations = pd.DataFrame()\n\n for lv_grid in network.mv_grid.lv_grids:\n crit_stations = _station_load(network, lv_grid.station,\n crit_stations)\n if not crit_stations.empty:\n logger.debug('==> {} MV/LV station(s) has/have load issues.'.format(\n crit_stations.shape[0]))\n else:\n logger.debug('==> No MV/LV station load issues.')\n\n return crit_stations"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nchecks for over - loading of stations.", "response": "def _station_load(network, station, crit_stations):\n \"\"\"\n Checks for over-loading of stations.\n\n Parameters\n ----------\n network : :class:`~.grid.network.Network`\n station : :class:`~.grid.components.LVStation` or :class:`~.grid.components.MVStation`\n crit_stations : :pandas:`pandas.DataFrame<dataframe>`\n Dataframe containing over-loaded stations, their apparent power at\n maximal over-loading and the corresponding time step.\n Index of the dataframe are the over-loaded stations either of type\n :class:`~.grid.components.LVStation` or\n :class:`~.grid.components.MVStation`. Columns are 's_pfa'\n containing the apparent power at maximal over-loading as float and\n 'time_index' containing the corresponding time step the over-loading\n occured in as :pandas:`pandas.Timestamp<timestamp>`.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n Dataframe containing over-loaded stations, their apparent power at\n maximal over-loading and the corresponding time step.\n Index of the dataframe are the over-loaded stations either of type\n :class:`~.grid.components.LVStation` or\n :class:`~.grid.components.MVStation`. Columns are 's_pfa'\n containing the apparent power at maximal over-loading as float and\n 'time_index' containing the corresponding time step the over-loading\n occured in as :pandas:`pandas.Timestamp<timestamp>`.\n\n \"\"\"\n if isinstance(station, LVStation):\n grid_level = 'lv'\n else:\n grid_level = 'mv'\n\n # maximum allowed apparent power of station for feed-in and load case\n s_station = sum([_.type.S_nom for _ in station.transformers])\n s_station_allowed_per_case = {}\n s_station_allowed_per_case['feedin_case'] = s_station * network.config[\n 'grid_expansion_load_factors']['{}_feedin_case_transformer'.format(\n grid_level)]\n s_station_allowed_per_case['load_case'] = s_station * network.config[\n 'grid_expansion_load_factors']['{}_load_case_transformer'.format(\n grid_level)]\n # maximum allowed apparent power of station in each time step\n s_station_allowed = \\\n network.timeseries.timesteps_load_feedin_case.case.apply(\n lambda _: s_station_allowed_per_case[_])\n\n try:\n if isinstance(station, LVStation):\n s_station_pfa = network.results.s_res(\n station.transformers).sum(axis=1)\n else:\n s_station_pfa = network.results.s_res([station]).iloc[:, 0]\n s_res = s_station_allowed - s_station_pfa\n s_res = s_res[s_res < 0]\n # check if maximum allowed apparent power of station exceeds\n # apparent power from power flow analysis at any time step\n if not s_res.empty:\n # find out largest relative deviation\n load_factor = \\\n network.timeseries.timesteps_load_feedin_case.case.apply(\n lambda _: network.config[\n 'grid_expansion_load_factors'][\n '{}_{}_transformer'.format(grid_level, _)])\n relative_s_res = load_factor * s_res\n crit_stations = crit_stations.append(pd.DataFrame(\n {'s_pfa': s_station_pfa.loc[relative_s_res.idxmin()],\n 'time_index': relative_s_res.idxmin()},\n index=[station]))\n\n except KeyError:\n logger.debug('No results for {} station to check overloading.'.format(\n grid_level.upper()))\n\n return crit_stations"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef mv_voltage_deviation(network, voltage_levels='mv_lv'):\n\n crit_nodes = {}\n\n v_dev_allowed_per_case = {}\n v_dev_allowed_per_case['feedin_case_lower'] = 0.9\n v_dev_allowed_per_case['load_case_upper'] = 1.1\n offset = network.config[\n 'grid_expansion_allowed_voltage_deviations']['hv_mv_trafo_offset']\n control_deviation = network.config[\n 'grid_expansion_allowed_voltage_deviations'][\n 'hv_mv_trafo_control_deviation']\n if voltage_levels == 'mv_lv':\n v_dev_allowed_per_case['feedin_case_upper'] = \\\n 1 + offset + control_deviation + network.config[\n 'grid_expansion_allowed_voltage_deviations'][\n 'mv_lv_feedin_case_max_v_deviation']\n v_dev_allowed_per_case['load_case_lower'] = \\\n 1 + offset - control_deviation - network.config[\n 'grid_expansion_allowed_voltage_deviations'][\n 'mv_lv_load_case_max_v_deviation']\n elif voltage_levels == 'mv':\n v_dev_allowed_per_case['feedin_case_upper'] = \\\n 1 + offset + control_deviation + network.config[\n 'grid_expansion_allowed_voltage_deviations'][\n 'mv_feedin_case_max_v_deviation']\n v_dev_allowed_per_case['load_case_lower'] = \\\n 1 + offset - control_deviation - network.config[\n 'grid_expansion_allowed_voltage_deviations'][\n 'mv_load_case_max_v_deviation']\n else:\n raise ValueError(\n 'Specified mode {} is not a valid option.'.format(voltage_levels))\n # maximum allowed apparent power of station in each time step\n v_dev_allowed_upper = \\\n network.timeseries.timesteps_load_feedin_case.case.apply(\n lambda _: v_dev_allowed_per_case['{}_upper'.format(_)])\n v_dev_allowed_lower = \\\n network.timeseries.timesteps_load_feedin_case.case.apply(\n lambda _: v_dev_allowed_per_case['{}_lower'.format(_)])\n\n nodes = network.mv_grid.graph.nodes()\n\n crit_nodes_grid = _voltage_deviation(\n network, nodes, v_dev_allowed_upper, v_dev_allowed_lower,\n voltage_level='mv')\n\n if not crit_nodes_grid.empty:\n crit_nodes[network.mv_grid] = crit_nodes_grid.sort_values(\n by=['v_mag_pu'], ascending=False)\n logger.debug(\n '==> {} node(s) in MV grid has/have voltage issues.'.format(\n crit_nodes[network.mv_grid].shape[0]))\n else:\n logger.debug('==> No voltage issues in MV grid.')\n\n return crit_nodes", "response": "Returns a new MV grid with over - voltage issues."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nchecks for voltage stability issues in LV grids.", "response": "def lv_voltage_deviation(network, mode=None, voltage_levels='mv_lv'):\n \"\"\"\n Checks for voltage stability issues in LV grids.\n\n Parameters\n ----------\n network : :class:`~.grid.network.Network`\n mode : None or String\n If None voltage at all nodes in LV grid is checked. If mode is set to\n 'stations' only voltage at busbar is checked.\n voltage_levels : :obj:`str`\n Specifies which allowed voltage deviations to use. Possible options\n are:\n\n * 'mv_lv'\n This is the default. The allowed voltage deviation for nodes in the\n MV grid is the same as for nodes in the LV grid. Further load and\n feed-in case are not distinguished.\n * 'lv'\n Use this to handle allowed voltage deviations in the MV and LV grid\n differently. Here, load and feed-in case are differentiated as well.\n\n Returns\n -------\n :obj:`dict`\n Dictionary with :class:`~.grid.grids.LVGrid` as key and a\n :pandas:`pandas.DataFrame<dataframe>` with its critical nodes, sorted\n descending by voltage deviation, as value.\n Index of the dataframe are all nodes (of type\n :class:`~.grid.components.Generator`, :class:`~.grid.components.Load`,\n etc.) with over-voltage issues. Columns are 'v_mag_pu' containing the\n maximum voltage deviation as float and 'time_index' containing the\n corresponding time step the over-voltage occured in as\n :pandas:`pandas.Timestamp<timestamp>`.\n\n Notes\n -----\n Over-voltage is determined based on allowed voltage deviations defined in\n the config file 'config_grid_expansion' in section\n 'grid_expansion_allowed_voltage_deviations'.\n\n \"\"\"\n\n crit_nodes = {}\n\n v_dev_allowed_per_case = {}\n if voltage_levels == 'mv_lv':\n offset = network.config[\n 'grid_expansion_allowed_voltage_deviations']['hv_mv_trafo_offset']\n control_deviation = network.config[\n 'grid_expansion_allowed_voltage_deviations'][\n 'hv_mv_trafo_control_deviation']\n v_dev_allowed_per_case['feedin_case_upper'] = \\\n 1 + offset + control_deviation + network.config[\n 'grid_expansion_allowed_voltage_deviations'][\n 'mv_lv_feedin_case_max_v_deviation']\n v_dev_allowed_per_case['load_case_lower'] = \\\n 1 + offset - control_deviation - network.config[\n 'grid_expansion_allowed_voltage_deviations'][\n 'mv_lv_load_case_max_v_deviation']\n\n v_dev_allowed_per_case['feedin_case_lower'] = 0.9\n v_dev_allowed_per_case['load_case_upper'] = 1.1\n\n v_dev_allowed_upper = \\\n network.timeseries.timesteps_load_feedin_case.case.apply(\n lambda _: v_dev_allowed_per_case['{}_upper'.format(_)])\n v_dev_allowed_lower = \\\n network.timeseries.timesteps_load_feedin_case.case.apply(\n lambda _: v_dev_allowed_per_case['{}_lower'.format(_)])\n elif voltage_levels == 'lv':\n pass\n else:\n raise ValueError(\n 'Specified mode {} is not a valid option.'.format(voltage_levels))\n\n for lv_grid in network.mv_grid.lv_grids:\n\n if mode:\n if mode == 'stations':\n nodes = [lv_grid.station]\n else:\n raise ValueError(\n \"{} is not a valid option for input variable 'mode' in \"\n \"function lv_voltage_deviation. Try 'stations' or \"\n \"None\".format(mode))\n else:\n nodes = lv_grid.graph.nodes()\n\n if voltage_levels == 'lv':\n if mode == 'stations':\n # get voltage at primary side to calculate upper bound for\n # feed-in case and lower bound for load case\n v_lv_station_primary = network.results.v_res(\n nodes=[lv_grid.station], level='mv').iloc[:, 0]\n timeindex = v_lv_station_primary.index\n v_dev_allowed_per_case['feedin_case_upper'] = \\\n v_lv_station_primary + network.config[\n 'grid_expansion_allowed_voltage_deviations'][\n 'mv_lv_station_feedin_case_max_v_deviation']\n v_dev_allowed_per_case['load_case_lower'] = \\\n v_lv_station_primary - network.config[\n 'grid_expansion_allowed_voltage_deviations'][\n 'mv_lv_station_load_case_max_v_deviation']\n else:\n # get voltage at secondary side to calculate upper bound for\n # feed-in case and lower bound for load case\n v_lv_station_secondary = network.results.v_res(\n nodes=[lv_grid.station], level='lv').iloc[:, 0]\n timeindex = v_lv_station_secondary.index\n v_dev_allowed_per_case['feedin_case_upper'] = \\\n v_lv_station_secondary + network.config[\n 'grid_expansion_allowed_voltage_deviations'][\n 'lv_feedin_case_max_v_deviation']\n v_dev_allowed_per_case['load_case_lower'] = \\\n v_lv_station_secondary - network.config[\n 'grid_expansion_allowed_voltage_deviations'][\n 'lv_load_case_max_v_deviation']\n v_dev_allowed_per_case['feedin_case_lower'] = pd.Series(\n 0.9, index=timeindex)\n v_dev_allowed_per_case['load_case_upper'] = pd.Series(\n 1.1, index=timeindex)\n # maximum allowed voltage deviation in each time step\n v_dev_allowed_upper = []\n v_dev_allowed_lower = []\n for t in timeindex:\n case = \\\n network.timeseries.timesteps_load_feedin_case.loc[\n t, 'case']\n v_dev_allowed_upper.append(\n v_dev_allowed_per_case[\n '{}_upper'.format(case)].loc[t])\n v_dev_allowed_lower.append(\n v_dev_allowed_per_case[\n '{}_lower'.format(case)].loc[t])\n v_dev_allowed_upper = pd.Series(v_dev_allowed_upper,\n index=timeindex)\n v_dev_allowed_lower = pd.Series(v_dev_allowed_lower,\n index=timeindex)\n\n crit_nodes_grid = _voltage_deviation(\n network, nodes, v_dev_allowed_upper, v_dev_allowed_lower,\n voltage_level='lv')\n\n if not crit_nodes_grid.empty:\n crit_nodes[lv_grid] = crit_nodes_grid.sort_values(\n by=['v_mag_pu'], ascending=False)\n\n if crit_nodes:\n if mode == 'stations':\n logger.debug(\n '==> {} LV station(s) has/have voltage issues.'.format(\n len(crit_nodes)))\n else:\n logger.debug(\n '==> {} LV grid(s) has/have voltage issues.'.format(\n len(crit_nodes)))\n else:\n if mode == 'stations':\n logger.debug('==> No voltage issues in LV stations.')\n else:\n logger.debug('==> No voltage issues in LV grids.')\n\n return crit_nodes"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nchecking for voltage stability issues in LV grids.", "response": "def _voltage_deviation(network, nodes, v_dev_allowed_upper,\n v_dev_allowed_lower, voltage_level):\n \"\"\"\n Checks for voltage stability issues in LV grids.\n Parameters\n ----------\n network : :class:`~.grid.network.Network`\n nodes : :obj:`list`\n List of nodes (of type :class:`~.grid.components.Generator`,\n :class:`~.grid.components.Load`, etc.) to check voltage deviation for.\n v_dev_allowed_upper : :pandas:`pandas.Series<series>`\n Series with time steps (of type :pandas:`pandas.Timestamp<timestamp>`)\n power flow analysis was conducted for and the allowed upper limit of\n voltage deviation for each time step as float.\n v_dev_allowed_lower : :pandas:`pandas.Series<series>`\n Series with time steps (of type :pandas:`pandas.Timestamp<timestamp>`)\n power flow analysis was conducted for and the allowed lower limit of\n voltage deviation for each time step as float.\n voltage_levels : :obj:`str`\n Specifies which voltage level to retrieve power flow analysis results\n for. Possible options are 'mv' and 'lv'.\n\n Returns\n -------\n :pandas:`pandas.DataFrame<dataframe>`\n Dataframe with critical nodes, sorted descending by voltage deviation.\n Index of the dataframe are all nodes (of type\n :class:`~.grid.components.Generator`, :class:`~.grid.components.Load`,\n etc.) with over-voltage issues. Columns are 'v_mag_pu' containing the\n maximum voltage deviation as float and 'time_index' containing the\n corresponding time step the over-voltage occured in as\n :pandas:`pandas.Timestamp<timestamp>`.\n\n \"\"\"\n\n def _append_crit_node(series):\n return pd.DataFrame({'v_mag_pu': series.max(),\n 'time_index': series.idxmax()},\n index=[node])\n\n crit_nodes_grid = pd.DataFrame()\n\n v_mag_pu_pfa = network.results.v_res(nodes=nodes, level=voltage_level)\n\n for node in nodes:\n # check for over- and under-voltage\n overvoltage = v_mag_pu_pfa[repr(node)][\n (v_mag_pu_pfa[repr(node)] > (v_dev_allowed_upper.loc[\n v_mag_pu_pfa.index]))]\n undervoltage = v_mag_pu_pfa[repr(node)][\n (v_mag_pu_pfa[repr(node)] < (v_dev_allowed_lower.loc[\n v_mag_pu_pfa.index]))]\n\n # write greatest voltage deviation to dataframe\n if not overvoltage.empty:\n overvoltage_diff = overvoltage - v_dev_allowed_upper.loc[\n overvoltage.index]\n if not undervoltage.empty:\n undervoltage_diff = v_dev_allowed_lower.loc[\n undervoltage.index] - undervoltage\n if overvoltage_diff.max() > undervoltage_diff.max():\n crit_nodes_grid = crit_nodes_grid.append(\n _append_crit_node(overvoltage_diff))\n else:\n crit_nodes_grid = crit_nodes_grid.append(\n _append_crit_node(undervoltage_diff))\n else:\n crit_nodes_grid = crit_nodes_grid.append(\n _append_crit_node(overvoltage_diff))\n elif not undervoltage.empty:\n undervoltage_diff = v_dev_allowed_lower.loc[\n undervoltage.index] - undervoltage\n crit_nodes_grid = crit_nodes_grid.append(\n _append_crit_node(undervoltage_diff))\n\n return crit_nodes_grid"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncheck if 10% criteria is exceeded.", "response": "def check_ten_percent_voltage_deviation(network):\n \"\"\"\n Checks if 10% criteria is exceeded.\n\n Parameters\n ----------\n network : :class:`~.grid.network.Network`\n\n \"\"\"\n\n v_mag_pu_pfa = network.results.v_res()\n if (v_mag_pu_pfa > 1.1).any().any() or (v_mag_pu_pfa < 0.9).any().any():\n message = \"Maximum allowed voltage deviation of 10% exceeded.\"\n raise ValueError(message)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef proj2equidistant(network):\n srid = int(network.config['geo']['srid'])\n\n return partial(pyproj.transform,\n pyproj.Proj(init='epsg:{}'\n .format(str(srid))), # source coordinate system\n pyproj.Proj(init='epsg:3035') # destination coordinate system\n )", "response": "Defines conformal ( e. g. WGS84 to ETRS"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncalculate lines in nodes associated graph that are at least partly partly within buffer of radius from node.", "response": "def calc_geo_lines_in_buffer(network, node, grid, radius, radius_inc):\n \"\"\"Determines lines in nodes' associated graph that are at least partly\n within buffer of radius from node. If there are no lines, the buffer is\n successively extended by radius_inc until lines are found.\n\n Parameters\n ----------\n network : :class:`~.grid.network.Network`\n The eDisGo container object\n node : :class:`~.grid.components.Component`\n Origin node the buffer is created around (e.g. :class:`~.grid.components.Generator`).\n Node must be a member of grid's graph (grid.graph)\n grid : :class:`~.grid.grids.Grid`\n Grid whose lines are searched\n radius : :obj:`float`\n Buffer radius in m\n radius_inc : :obj:`float`\n Buffer radius increment in m\n\n Returns\n -------\n :obj:`list` of :class:`~.grid.components.Line`\n Sorted (by repr()) list of lines\n\n Notes\n -----\n Adapted from `Ding0 <https://github.com/openego/ding0/blob/\\\n 21a52048f84ec341fe54e0204ac62228a9e8a32a/\\\n ding0/tools/geo.py#L53>`_.\n \"\"\"\n\n lines = []\n\n while not lines:\n node_shp = transform(proj2equidistant(network), node.geom)\n buffer_zone_shp = node_shp.buffer(radius)\n for line in grid.graph.lines():\n nodes = line['adj_nodes']\n branch_shp = transform(proj2equidistant(network), LineString([nodes[0].geom, nodes[1].geom]))\n if buffer_zone_shp.intersects(branch_shp):\n lines.append(line)\n radius += radius_inc\n\n return sorted(lines, key=lambda _: repr(_))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef calc_geo_dist_vincenty(network, node_source, node_target):\n\n branch_detour_factor = network.config['grid_connection'][\n 'branch_detour_factor']\n\n # notice: vincenty takes (lat,lon)\n branch_length = branch_detour_factor * vincenty((node_source.geom.y, node_source.geom.x),\n (node_target.geom.y, node_target.geom.x)).m\n\n # ========= BUG: LINE LENGTH=0 WHEN CONNECTING GENERATORS ===========\n # When importing generators, the geom_new field is used as position. If it is empty, EnergyMap's geom\n # is used and so there are a couple of generators at the same position => length of interconnecting\n # line is 0. See issue #76\n if branch_length == 0:\n branch_length = 1\n logger.debug('Geo distance is zero, check objects\\' positions. '\n 'Distance is set to 1m')\n # ===================================================================\n\n return branch_length", "response": "Calculates the geodesic distance between node_source and node_target and returns the distance in m"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_username(details, backend, response, *args, **kwargs):\n\n user = details.get('user')\n if not user:\n user_uuid = kwargs.get('uid')\n if not user_uuid:\n return\n\n username = uuid_to_username(user_uuid)\n else:\n username = user.username\n\n return {\n 'username': username\n }", "response": "Sets the username argument."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nupdate user based on token information.", "response": "def pre_social_login(self, request, sociallogin):\n \"\"\"Update user based on token information.\"\"\"\n\n user = sociallogin.user\n # If the user hasn't been saved yet, it will be updated\n # later on in the sign-up flow.\n if not user.pk:\n return\n\n data = sociallogin.account.extra_data\n oidc = sociallogin.account.provider == 'helsinki_oidc'\n update_user(user, data, oidc)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef sync_groups_from_ad(self):\n\n ad_list = ADGroupMapping.objects.values_list('ad_group', 'group')\n mappings = {ad_group: group for ad_group, group in ad_list}\n\n user_ad_groups = set(self.ad_groups.filter(groups__isnull=False).values_list(flat=True))\n all_mapped_groups = set(mappings.values())\n old_groups = set(self.groups.filter(id__in=all_mapped_groups).values_list(flat=True))\n new_groups = set([mappings[x] for x in user_ad_groups])\n\n groups_to_delete = old_groups - new_groups\n if groups_to_delete:\n self.groups.remove(*groups_to_delete)\n groups_to_add = new_groups - old_groups\n if groups_to_add:\n self.groups.add(*groups_to_add)", "response": "Determine which Django groups to add or remove based on AD groups."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\npatch rest_framework_jwt authentication settings from allauth", "response": "def patch_jwt_settings():\n \"\"\"Patch rest_framework_jwt authentication settings from allauth\"\"\"\n defaults = api_settings.defaults\n defaults['JWT_PAYLOAD_GET_USER_ID_HANDLER'] = (\n __name__ + '.get_user_id_from_payload_handler')\n\n if 'allauth.socialaccount' not in settings.INSTALLED_APPS:\n return\n\n from allauth.socialaccount.models import SocialApp\n try:\n app = SocialApp.objects.get(provider='helsinki')\n except SocialApp.DoesNotExist:\n return\n\n defaults['JWT_SECRET_KEY'] = app.secret\n defaults['JWT_AUDIENCE'] = app.client_id"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef uuid_to_username(uuid):\n uuid_data = getattr(uuid, 'bytes', None) or UUID(uuid).bytes\n b32coded = base64.b32encode(uuid_data)\n return 'u-' + b32coded.decode('ascii').replace('=', '').lower()", "response": "Convert UUID to username."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nconvert username to UUID.", "response": "def username_to_uuid(username):\n \"\"\"\n Convert username to UUID.\n\n >>> username_to_uuid('u-ad52zgilvnpgnduefzlh5jgr6y')\n UUID('00fbac99-0bab-5e66-8e84-2e567ea4d1f6')\n \"\"\"\n if not username.startswith('u-') or len(username) != 28:\n raise ValueError('Not an UUID based username: %r' % (username,))\n decoded = base64.b32decode(username[2:].upper() + '======')\n return UUID(bytes=decoded)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nmap OIDC claims to Django user fields.", "response": "def oidc_to_user_data(payload):\n \"\"\"\n Map OIDC claims to Django user fields.\n \"\"\"\n payload = payload.copy()\n\n field_map = {\n 'given_name': 'first_name',\n 'family_name': 'last_name',\n 'email': 'email',\n }\n ret = {}\n for token_attr, user_attr in field_map.items():\n if token_attr not in payload:\n continue\n ret[user_attr] = payload.pop(token_attr)\n ret.update(payload)\n\n return ret"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef has_api_scopes(self, *api_scopes):\n if self._authorized_api_scopes is None:\n return None\n return all((x in self._authorized_api_scopes) for x in api_scopes)", "response": "Test if all given API scopes are authorized."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ntests if there is an API scope with the given prefix.", "response": "def has_api_scope_with_prefix(self, prefix):\n \"\"\"\n Test if there is an API scope with the given prefix.\n\n :rtype: bool|None\n \"\"\"\n if self._authorized_api_scopes is None:\n return None\n return any(\n x == prefix or x.startswith(prefix + '.')\n for x in self._authorized_api_scopes)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_function_by_path(dotted_function_module_path):\n # Separate the module path from the function name.\n module_path, function_name = tuple(dotted_function_module_path.rsplit('.', 1))\n\n module = import_module(module_path)\n return getattr(module, function_name)", "response": "Returns the function for a given path."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nwrite a waypoint to the specified IANA top level domain.", "response": "def write_waypoint(\n self, name, shortname, country, latitude, longitude, elevation=u'',\n style=WaypointStyle.NORMAL, runway_direction=u'', runway_length=u'',\n frequency=u'', description=u''):\n\n \"\"\"\n Write a waypoint::\n\n writer.write_waypoint(\n 'Meiersberg',\n 'MEIER',\n 'DE',\n (51 + 7.345 / 60.),\n (6 + 24.765 / 60.),\n )\n # -> \"Meiersberg\",\"MEIER\",DE,5107.345N,00624.765E,,1,,,,\n\n :param name: name of the waypoint (must not be empty)\n :param shortname: short name for depending GPS devices\n :param country: IANA top level domain country code (see\n http://www.iana.org/cctld/cctld-whois.htm)\n :param latitude: latitude of the point (between -90 and 90 degrees)\n :param longitude: longitude of the point (between -180 and 180 degrees)\n :param elevation: elevation of the waypoint in meters or as\n ``(elevation, unit)`` tuple\n :param style: the waypoint type (see official specification for the\n list of valid styles, defaults to \"Normal\")\n :param runway_direction: heading of the runway in degrees if the\n waypoint is landable\n :param runway_length: length of the runway in meters or as ``(length,\n unit)`` tuple if the waypoint is landable\n :param frequency: radio frequency of the airport\n :param description: optional description of the waypoint (no length\n limit)\n \"\"\"\n\n if self.in_task_section:\n raise RuntimeError(u'Waypoints must be written before any tasks')\n\n if not name:\n raise ValueError(u'Waypoint name must not be empty')\n\n fields = [\n self.escape(name),\n self.escape(shortname),\n country,\n self.format_latitude(latitude),\n self.format_longitude(longitude),\n self.format_distance(elevation),\n str(style),\n str(runway_direction),\n self.format_distance(runway_length),\n self.escape(frequency),\n self.escape(description),\n ]\n\n self.write_fields(fields)\n\n self.wps.add(name)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nwrites a task definition to file.", "response": "def write_task(self, description, waypoints):\n\n \"\"\"\n Write a task definition::\n\n writer.write_task('500 km FAI', [\n 'MEIER',\n 'BRILO',\n 'AILER',\n 'MEIER',\n ])\n # -> \"500 km FAI\",\"MEIER\",\"BRILO\",\"AILER\",\"MEIER\"\n\n Make sure that the referenced waypoints have been written with\n :meth:`~aerofiles.seeyou.Writer.write_waypoint` before writing the\n task. The task section divider will be written to automatically when\n :meth:`~aerofiles.seeyou.Writer.write_task` is called the first time.\n After the first task is written\n :meth:`~aerofiles.seeyou.Writer.write_waypoint` must not be called\n anymore.\n\n :param description: description of the task (may be blank)\n :param waypoints: list of waypoints in the task (names must match the\n long names of previously written waypoints)\n \"\"\"\n\n if not self.in_task_section:\n self.write_line()\n self.write_line(self.DIVIDER)\n self.in_task_section = True\n\n fields = [self.escape(description)]\n\n for waypoint in waypoints:\n if waypoint not in self.wps:\n raise ValueError(u'Waypoint \"%s\" was not found' % waypoint)\n\n fields.append(self.escape(waypoint))\n\n self.write_fields(fields)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nwrites an options line for a specific task.", "response": "def write_task_options(self, **kw):\n\n \"\"\"\n Write an options line for a task definition::\n\n writer.write_task_options(\n start_time=time(12, 34, 56),\n task_time=timedelta(hours=1, minutes=45, seconds=12),\n waypoint_distance=False,\n distance_tolerance=(0.7, 'km'),\n altitude_tolerance=300.0,\n )\n # -> Options,NoStart=12:34:56,TaskTime=01:45:12,WpDis=False,NearDis=0.7km,NearAlt=300.0m\n\n :param start_time: opening time of the start line as\n :class:`datetime.time` or string\n :param task_time: designated time for the task as\n :class:`datetime.timedelta` or string\n :param waypoint_distance: task distance calculation (``False``: use\n fixes, ``True``: use waypoints)\n :param distance_tolerance: distance tolerance in meters or as\n ``(distance, unit)`` tuple\n :param altitude_tolerance: altitude tolerance in meters or as\n ``(distance, unit)`` tuple\n :param min_distance: \"uncompleted leg (``False``: calculate maximum\n distance from last observation zone)\"\n :param random_order: if ``True``, then Random order of waypoints is\n checked\n :param max_points: maximum number of points\n :param before_points: number of mandatory waypoints at the beginning.\n ``1`` means start line only, ``2`` means start line plus first\n point in task sequence (Task line).\n :param after_points: number of mandatory waypoints at the end. ``1``\n means finish line only, ``2`` means finish line and one point\n before finish in task sequence (Task line).\n :param bonus: bonus for crossing the finish line\n \"\"\"\n\n if not self.in_task_section:\n raise RuntimeError(\n u'Task options have to be written in task section')\n\n fields = ['Options']\n\n if 'start_time' in kw:\n fields.append(u'NoStart=' + self.format_time(kw['start_time']))\n\n if 'task_time' in kw:\n fields.append(u'TaskTime=' + self.format_timedelta(kw['task_time']))\n\n if 'waypoint_distance' in kw:\n fields.append(u'WpDis=%s' % kw['waypoint_distance'])\n\n if 'distance_tolerance' in kw:\n fields.append(\n u'NearDis=' + self.format_distance(kw['distance_tolerance']))\n\n if 'altitude_tolerance' in kw:\n fields.append(\n u'NearAlt=' + self.format_distance(kw['altitude_tolerance']))\n\n if 'min_distance' in kw:\n fields.append(u'MinDis=%s' % kw['min_distance'])\n\n if 'random_order' in kw:\n fields.append(u'RandomOrder=%s' % kw['random_order'])\n\n if 'max_points' in kw:\n fields.append(u'MaxPts=%d' % kw['max_points'])\n\n if 'before_points' in kw:\n fields.append(u'BeforePts=%d' % kw['before_points'])\n\n if 'after_points' in kw:\n fields.append(u'AfterPts=%d' % kw['after_points'])\n\n if 'bonus' in kw:\n fields.append(u'Bonus=%d' % kw['bonus'])\n\n self.write_fields(fields)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef write_observation_zone(self, num, **kw):\n\n \"\"\"\n Write observation zone information for a taskpoint::\n\n writer.write_task_options(\n start_time=time(12, 34, 56),\n task_time=timedelta(hours=1, minutes=45, seconds=12),\n waypoint_distance=False,\n distance_tolerance=(0.7, 'km'),\n altitude_tolerance=300.0,\n )\n # -> Options,NoStart=12:34:56,TaskTime=01:45:12,WpDis=False,NearDis=0.7km,NearAlt=300.0m\n\n :param num: consecutive number of a waypoint (``0``: Start)\n :param style: direction (``0``: Fixed value, ``1``: Symmetrical, ``2``:\n To next point, ``3``: To previous point, ``4``: To start point\n :param radius: radius 1 in meter or as ``(radius, unit)`` tuple\n :param angle: angle 1 in degrees\n :param radius2: radius 2 in meter or as ``(radius, unit)`` tuple\n :param angle 2: angle 2 in degrees\n :param angle12: angle 12 in degress\n :param line: should be ``True`` if start or finish line\n \"\"\"\n\n if not self.in_task_section:\n raise RuntimeError(\n u'Observation zones have to be written in task section')\n\n fields = [u'ObsZone=%d' % num]\n\n if 'style' in kw:\n fields.append(u'Style=%d' % kw['style'])\n\n if 'radius' in kw:\n fields.append(u'R1=' + self.format_distance(kw['radius']))\n\n if 'angle' in kw:\n fields.append(u'A1=' + self.format_angle(kw['angle']))\n\n if 'radius2' in kw:\n fields.append(u'R2=' + self.format_distance(kw['radius2']))\n\n if 'angle2' in kw:\n fields.append(u'A2=' + self.format_angle(kw['angle2']))\n\n if 'angle12' in kw:\n fields.append(u'A12=' + self.format_angle(kw['angle12']))\n\n if 'line' in kw:\n fields.append(u'Line=' + ('1' if kw['line'] else '0'))\n\n self.write_fields(fields)", "response": "Write observation zone information for a taskpoint."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef register_site(self):\n if self.oxd_id:\n logger.info('Client is already registered. ID: %s', self.oxd_id)\n return self.oxd_id\n\n # add required params for the command\n params = {\n \"authorization_redirect_uri\": self.authorization_redirect_uri,\n\t\t\t\"oxd_rp_programming_language\": \"python\",\n }\n\n # add other optional params if they exist in config\n for op in self.opt_params:\n if self.config.get(\"client\", op):\n params[op] = self.config.get(\"client\", op)\n\n for olp in self.opt_list_params:\n if self.config.get(\"client\", olp):\n params[olp] = self.config.get(\"client\", olp).split(\",\")\n\n logger.debug(\"Sending command `register_site` with params %s\", params)\n response = self.msgr.request(\"register_site\", **params)\n logger.debug(\"Received response: %s\", response)\n\n if response['status'] == 'error':\n raise OxdServerError(response['data'])\n\n self.oxd_id = response[\"data\"][\"oxd_id\"]\n self.config.set(\"oxd\", \"id\", self.oxd_id)\n logger.info(\"Site registration successful. Oxd ID: %s\", self.oxd_id)\n return self.oxd_id", "response": "Function to register the site and generate a unique ID for the site."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nfunctioning to get the authorization url that the user must access for authentication and authentication.", "response": "def get_authorization_url(self, acr_values=None, prompt=None, scope=None,\n custom_params=None):\n \"\"\"Function to get the authorization url that can be opened in the\n browser for the user to provide authorization and authentication\n\n Parameters:\n * **acr_values (list, optional):** acr values in the order of priority\n * **prompt (string, optional):** prompt=login is required if you want to force alter current user session (in case user is already logged in from site1 and site2 constructs authorization request and want to force alter current user session)\n * **scope (list, optional):** scopes required, takes the one provided during site registrations by default\n * **custom_params (dict, optional):** Any custom arguments that the client wishes to pass on to the OP can be passed on as extra parameters to the function\n\n Returns:\n **string:** The authorization url that the user must access for authentication and authorization\n\n Raises:\n **OxdServerError:** If the oxd throws an error for any reason.\n \"\"\"\n params = {\"oxd_id\": self.oxd_id}\n\n if scope and isinstance(scope, list):\n params[\"scope\"] = scope\n\n if acr_values and isinstance(acr_values, list):\n params[\"acr_values\"] = acr_values\n\n if prompt and isinstance(prompt, str):\n params[\"prompt\"] = prompt\n\n if custom_params:\n params[\"custom_parameters\"] = custom_params\n\n logger.debug(\"Sending command `get_authorization_url` with params %s\",\n params)\n response = self.msgr.request(\"get_authorization_url\", **params)\n logger.debug(\"Received response: %s\", response)\n\n if response['status'] == 'error':\n raise OxdServerError(response['data'])\n return response['data']['authorization_url']"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_tokens_by_code(self, code, state):\n params = dict(oxd_id=self.oxd_id, code=code, state=state)\n\n logger.debug(\"Sending command `get_tokens_by_code` with params %s\",\n params)\n response = self.msgr.request(\"get_tokens_by_code\", **params)\n logger.debug(\"Received response: %s\", response)\n\n if response['status'] == 'error':\n raise OxdServerError(response['data'])\n return response['data']", "response": "Function to get the user details from the user details."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_access_token_by_refresh_token(self, refresh_token, scope=None):\n params = {\n \"oxd_id\": self.oxd_id,\n \"refresh_token\": refresh_token\n }\n\n if scope:\n params['scope'] = scope\n\n logger.debug(\"Sending command `get_access_token_by_refresh_token` with\"\n \" params %s\", params)\n response = self.msgr.request(\"get_access_token_by_refresh_token\",\n **params)\n logger.debug(\"Received response: %s\", response)\n\n if response['status'] == 'error':\n raise OxdServerError(response['data'])\n return response['data']", "response": "Function that is used to get a new access token using a refresh token."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nfunctions to get the user info from the OP", "response": "def get_user_info(self, access_token):\n \"\"\"Function to get the information about the user using the access code\n obtained from the OP\n\n Note:\n Refer to the /.well-known/openid-configuration URL of your OP for\n the complete list of the claims for different scopes.\n\n Parameters:\n * **access_token (string):** access token from the get_tokens_by_code function\n\n Returns:\n **dict:** The user data claims that are returned by the OP in format\n\n Example response::\n\n {\n \"sub\": [\"248289761001\"],\n \"name\": [\"Jane Doe\"],\n \"given_name\": [\"Jane\"],\n \"family_name\": [\"Doe\"],\n \"preferred_username\": [\"j.doe\"],\n \"email\": [\"janedoe@example.com\"],\n \"picture\": [\"http://example.com/janedoe/me.jpg\"]\n }\n\n Raises:\n **OxdServerError:** If the param access_token is empty OR if the oxd\n Server returns an error.\n \"\"\"\n params = dict(oxd_id=self.oxd_id, access_token=access_token)\n params[\"access_token\"] = access_token\n logger.debug(\"Sending command `get_user_info` with params %s\",\n params)\n response = self.msgr.request(\"get_user_info\", **params)\n logger.debug(\"Received response: %s\", response)\n\n if response['status'] == 'error':\n raise OxdServerError(response['data'])\n return response['data']['claims']"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nfunctions to logout the user.", "response": "def get_logout_uri(self, id_token_hint=None, post_logout_redirect_uri=None,\n state=None, session_state=None):\n \"\"\"Function to logout the user.\n\n Parameters:\n * **id_token_hint (string, optional):** oxd server will use last used ID Token, if not provided\n * **post_logout_redirect_uri (string, optional):** URI to redirect, this uri would override the value given in the site-config\n * **state (string, optional):** site state\n * **session_state (string, optional):** session state\n\n Returns:\n **string:** The URI to which the user must be directed in order to\n perform the logout\n \"\"\"\n params = {\"oxd_id\": self.oxd_id}\n if id_token_hint:\n params[\"id_token_hint\"] = id_token_hint\n\n if post_logout_redirect_uri:\n params[\"post_logout_redirect_uri\"] = post_logout_redirect_uri\n\n if state:\n params[\"state\"] = state\n\n if session_state:\n params[\"session_state\"] = session_state\n\n logger.debug(\"Sending command `get_logout_uri` with params %s\", params)\n response = self.msgr.request(\"get_logout_uri\", **params)\n logger.debug(\"Received response: %s\", response)\n\n if response['status'] == 'error':\n raise OxdServerError(response['data'])\n return response['data']['uri']"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef update_site(self, client_secret_expires_at=None):\n params = {\n \"oxd_id\": self.oxd_id,\n \"authorization_redirect_uri\": self.authorization_redirect_uri\n }\n\n if client_secret_expires_at:\n params[\"client_secret_expires_at\"] = client_secret_expires_at\n\n for param in self.opt_params:\n if self.config.get(\"client\", param):\n value = self.config.get(\"client\", param)\n params[param] = value\n\n for param in self.opt_list_params:\n if self.config.get(\"client\", param):\n value = self.config.get(\"client\", param).split(\",\")\n params[param] = value\n\n logger.debug(\"Sending `update_site` with params %s\",\n params)\n response = self.msgr.request(\"update_site\", **params)\n logger.debug(\"Received response: %s\", response)\n\n if response['status'] == 'error':\n raise OxdServerError(response['data'])\n\n return True", "response": "Function to update the site s information with OpenID Provider. This function should be called after changing the values in the cfg file."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nfunctioning to be used in a UMA Resource Server to protect resources.", "response": "def uma_rs_protect(self, resources, overwrite=None):\n \"\"\"Function to be used in a UMA Resource Server to protect resources.\n\n Parameters:\n * **resources (list):** list of resource to protect. See example at `here <https://gluu.org/docs/oxd/3.1.2/api/#uma-rs-protect-resources>`_\n * **overwrite (bool):** If true, Allows to update existing resources\n\n Returns:\n **bool:** The status of the request.\n \"\"\"\n params = dict(oxd_id=self.oxd_id, resources=resources)\n\n if overwrite:\n params[\"overwrite\"] = overwrite\n\n logger.debug(\"Sending `uma_rs_protect` with params %s\", params)\n response = self.msgr.request(\"uma_rs_protect\", **params)\n logger.debug(\"Received response: %s\", response)\n\n if response['status'] == 'error':\n raise OxdServerError(response['data'])\n return True"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nfunctioning to be used in a UMA Resource Server to check access.", "response": "def uma_rs_check_access(self, rpt, path, http_method):\n \"\"\"Function to be used in a UMA Resource Server to check access.\n\n Parameters:\n * **rpt (string):** RPT or blank value if absent (not send by RP)\n * **path (string):** Path of resource (e.g. for http://rs.com/phones, /phones should be passed)\n * **http_method (string):** Http method of RP request (GET, POST, PUT, DELETE)\n\n Returns:\n **dict:** The access information received in the format below.\n\n If the access is granted::\n\n { \"access\": \"granted\" }\n\n If the access is denied with ticket response::\n\n {\n \"access\": \"denied\",\n \"www-authenticate_header\": \"UMA realm='example',\n as_uri='https://as.example.com',\n error='insufficient_scope',\n ticket='016f84e8-f9b9-11e0-bd6f-0021cc6004de'\",\n \"ticket\": \"016f84e8-f9b9-11e0-bd6f-0021cc6004de\"\n }\n\n If the access is denied without ticket response::\n\n { \"access\": \"denied\" }\n\n Raises:\n ``oxdpython.exceptions.InvalidRequestError`` if the resource is not\n protected\n \"\"\"\n params = {\"oxd_id\": self.oxd_id,\n \"rpt\": rpt,\n \"path\": path,\n \"http_method\": http_method}\n\n logger.debug(\"Sending command `uma_rs_check_access` with params %s\",\n params)\n response = self.msgr.request(\"uma_rs_check_access\", **params)\n logger.debug(\"Received response: %s\", response)\n\n if response['status'] == 'error':\n if response['data']['error'] == 'invalid_request':\n raise InvalidRequestError(response['data'])\n else:\n raise OxdServerError(response['data'])\n return response['data']"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef uma_rp_get_rpt(self, ticket, claim_token=None, claim_token_format=None,\n pct=None, rpt=None, scope=None, state=None):\n \"\"\"Function to be used by a UMA Requesting Party to get RPT token.\n\n Parameters:\n * **ticket (str, REQUIRED):** ticket\n * **claim_token (str, OPTIONAL):** claim token\n * **claim_token_format (str, OPTIONAL):** claim token format\n * **pct (str, OPTIONAL):** pct\n * **rpt (str, OPTIONAL):** rpt\n * **scope (list, OPTIONAL):** scope\n * **state (str, OPTIONAL):** state that is returned from `uma_rp_get_claims_gathering_url` command\n\n Returns:\n **dict:** The response from the OP.\n\n Success response::\n\n {\n \"status\":\"ok\",\n \"data\":{\n \"access_token\":\"SSJHBSUSSJHVhjsgvhsgvshgsv\",\n \"token_type\":\"Bearer\",\n \"pct\":\"c2F2ZWRjb25zZW50\",\n \"upgraded\":true\n }\n }\n\n NeedInfoError response::\n\n {\n\t\t\t\t\t\"error\":\"need_info\",\n\t\t\t\t\t\"ticket\":\"ZXJyb3JfZGV0YWlscw==\",\n\t\t\t\t\t\"required_claims\":[\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\t\"claim_token_format\":[\n\t\t\t\t\t\t\t\t\"http://openid.net/specs/openid-connect-core-1_0.html#IDToken\"\n\t\t\t\t\t\t\t],\n\t\t\t\t\t\t\t\"claim_type\":\"urn:oid:0.9.2342.19200300.100.1.3\",\n\t\t\t\t\t\t\t\"friendly_name\":\"email\",\n\t\t\t\t\t\t\t\"issuer\":[\"https://example.com/idp\"],\n\t\t\t\t\t\t\t\"name\":\"email23423453ou453\"\n\t\t\t\t\t\t}\n\t\t\t\t\t],\n\t\t\t\t\t\"redirect_user\":\"https://as.example.com/rqp_claims?id=2346576421\"\n\t\t\t\t}\n\t\t\t\t\n Raises:\n **OxdServerError:** When oxd-server reports a generic internal_error\n **InvalidTicketError:** When the oxd server returns a \"invalid_ticket\" error\n\t\t\t\n \"\"\"\n params = {\n \"oxd_id\": self.oxd_id,\n \"ticket\": ticket\n }\n if claim_token:\n params[\"claim_token\"] = claim_token\n if claim_token_format:\n params[\"claim_token_format\"] = claim_token_format\n if pct:\n params[\"pct\"] = pct\n if rpt:\n params[\"rpt\"] = rpt\n if scope:\n params[\"scope\"] = scope\n if state:\n params[\"state\"] = state\n\n logger.debug(\"Sending command `uma_rp_get_rpt` with params %s\", params)\n response = self.msgr.request(\"uma_rp_get_rpt\", **params)\n logger.debug(\"Received response: %s\", response)\n\n if response['status'] == 'ok':\n return response['data']\n\n if response['data']['error'] == 'internal_error':\n raise OxdServerError(response['data'])\n\n if response['data']['error'] == 'need_info':\n return response['data']\n\n if response['data']['error'] == 'invalid_ticket':\n raise InvalidTicketError(response['data'])", "response": "Function to be used by a UMA Requesting Party to get a RPT token."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef setup_client(self):\n # add required params for the command\n params = {\n \"authorization_redirect_uri\": self.authorization_redirect_uri,\n\t\t\t\"oxd_rp_programming_language\": \"python\",\n }\n\n # add other optional params if they exist in config\n for op in self.opt_params:\n if self.config.get(\"client\", op):\n params[op] = self.config.get(\"client\", op)\n\n for olp in self.opt_list_params:\n if self.config.get(\"client\", olp):\n params[olp] = self.config.get(\"client\", olp).split(\",\")\n\n logger.debug(\"Sending command `setup_client` with params %s\", params)\n\n response = self.msgr.request(\"setup_client\", **params)\n logger.debug(\"Received response: %s\", response)\n\n if response['status'] == 'error':\n raise OxdServerError(response['data'])\n data = response[\"data\"]\n\n self.oxd_id = data[\"oxd_id\"]\n self.config.set(\"oxd\", \"id\", data[\"oxd_id\"])\n self.config.set(\"client\", \"client_id\", data[\"client_id\"])\n self.config.set(\"client\", \"client_secret\", data[\"client_secret\"])\n if data[\"client_registration_access_token\"]:\n self.config.set(\"client\", \"client_registration_access_token\",\n data[\"client_registration_access_token\"])\n if data[\"client_registration_client_uri\"]:\n self.config.set(\"client\", \"client_registration_client_uri\",\n data[\"client_registration_client_uri\"])\n self.config.set(\"client\", \"client_id_issued_at\",\n str(data[\"client_id_issued_at\"]))\n\n return data", "response": "This command registers the client for communication protection."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nfunctions to get the client token which can be used for protection in a specific client.", "response": "def get_client_token(self, client_id=None, client_secret=None,\n op_host=None, op_discovery_path=None, scope=None,\n auto_update=True):\n \"\"\"Function to get the client token which can be used for protection in\n all future communication. The access token received by this method is\n stored in the config file and used as the `protection_access_token`\n for all subsequent calls to oxd.\n\n Parameters:\n * **client_id (str, optional):** client id from OP or from previous `setup_client` call\n * **client_secret (str, optional):** client secret from the OP or from `setup_client` call\n * **op_host (str, optional):** OP Host URL, default is read from the site configuration file\n * **op_discovery_path (str, optional):** op discovery path provided by OP\n * **scope (list, optional):** scopes of access required, default values are obtained from the config file\n * **auto_update(bool, optional):** automatically get a new access_token when the current one expires. If this is set to False, then the application must call `get_client_token` when the token expires to update the client with a new access token.\n\n Returns:\n **dict:** The client token and the refresh token in the form.\n\n Example response ::\n\n {\n \"access_token\":\"6F9619FF-8B86-D011-B42D-00CF4FC964FF\",\n \"expires_in\": 399,\n \"refresh_token\": \"fr459f\",\n \"scope\": \"openid\"\n }\n\n \"\"\"\n # override the values from config\n params = dict(client_id=client_id, client_secret=client_secret,\n op_host=op_host)\n\n if op_discovery_path:\n params['op_discovery_path'] = op_discovery_path\n if scope and isinstance(scope, list):\n params['scope'] = scope\n\n # If client id and secret aren't passed, then just read from the config\n if not client_id:\n params[\"client_id\"] = self.config.get(\"client\", \"client_id\")\n if not client_secret:\n params[\"client_secret\"] = self.config.get(\"client\",\n \"client_secret\")\n if not op_host:\n params[\"op_host\"] = self.config.get(\"client\", \"op_host\")\n logger.debug(\"Sending command `get_client_token` with params %s\",\n params)\n\n response = self.msgr.request(\"get_client_token\", **params)\n logger.debug(\"Received response: %s\", response)\n\n if response['status'] == 'error':\n raise OxdServerError(response['data'])\n\n self.config.set(\"client\", \"protection_access_token\",\n response[\"data\"][\"access_token\"])\n self.msgr.access_token = response[\"data\"][\"access_token\"]\n\n # Setup a new timer thread to refresh the access token.\n if auto_update:\n interval = int(response['data']['expires_in'])\n args = [client_id, client_secret, op_host, op_discovery_path,\n scope, auto_update]\n logger.info(\"Setting up a threading.Timer to get_client_token in \"\n \"%s seconds\", interval)\n t = Timer(interval, self.get_client_token, args)\n t.start()\n\n return response['data']"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef remove_site(self):\n params = dict(oxd_id=self.oxd_id)\n\n logger.debug(\"Sending command `remove_site` with params %s\",\n params)\n response = self.msgr.request(\"remove_site\", **params)\n logger.debug(\"Received response: %s\", response)\n\n if response['status'] == 'error':\n raise OxdServerError(response['data'])\n return response['data']['oxd_id']", "response": "Cleans up the data for the site."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef introspect_rpt(self, rpt):\n params = dict(oxd_id=self.oxd_id)\n\n params['rpt'] = rpt\n\n logger.debug(\"Sending command `introspect_rpt` with params %s\",\n params)\n response = self.msgr.request(\"introspect_rpt\", **params)\n logger.debug(\"Received response: %s\", response)\n\n if response['status'] == 'error':\n raise OxdServerError(response['data'])\n return response['data']", "response": "This function returns the information about an RPT."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nwrite the main task to the file.", "response": "def write_task(self, **kw):\n \"\"\"\n Write the main task to the file::\n\n with writer.write_task(type=TaskType.RACING):\n ...\n\n # <Task type=\"RT\"> ... </Task>\n\n Inside the with clause the :meth:`~aerofiles.xcsoar.Writer.write_point`\n method should be used to write the individual task points. All\n parameters are optional.\n\n :param type: type of the task (one of the constants in\n :class:`~aerofiles.xcsoar.constants.TaskType`)\n :param start_requires_arm: ``True``: start has to be armed manually,\n ``False``: task will be started automatically\n :param start_max_height: maximum altitude when the task is started\n (in m)\n :param start_max_height_ref: altitude reference of\n ``start_max_height`` (one of the constants in\n :class:`~aerofiles.xcsoar.constants.AltitudeReference`)\n :param start_max_speed: maximum speed when the task is started (in m/s)\n :param start_open_time: time that the start line opens as\n :class:`datetime.time`\n :param start_close_time: time that the start line is closing as\n :class:`datetime.time`\n :param aat_min_time: AAT time as :class:`datetime.timedelta`\n :param finish_min_height: minimum altitude when the task is finished\n (in m)\n :param finish_min_height_ref: altitude reference of\n ``finish_min_height`` (one of the constants in\n :class:`~aerofiles.xcsoar.constants.AltitudeReference`)\n :param fai_finish: ``True``: FAI finish rules apply\n \"\"\"\n\n self.convert_timedelta(kw, 'aat_min_time')\n self.convert_time(kw, 'start_open_time')\n self.convert_time(kw, 'start_close_time')\n self.convert_bool(kw, 'fai_finish')\n self.convert_bool(kw, 'start_requires_arm')\n\n return self.write_tag_with_content('Task', **kw)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef write_point(self, **kw):\n\n assert 'type' in kw\n\n self.convert_bool(kw, 'score_exit')\n\n return self.write_tag_with_content('Point', **kw)", "response": "Writes a task point to the file."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nwrite a waypoint to the file.", "response": "def write_waypoint(self, **kw):\n \"\"\"\n Write a waypoint to the file::\n\n writer.write_waypoint(\n name='Meiersberg',\n latitude=51.4,\n longitude=7.1\n )\n\n # <Waypoint name=\"Meiersberg\">\n # <Location latitude=\"51.4\" longitude=\"7.1\"/>\n # </Waypoint>\n\n :param name: name of the waypoint\n :param latitude: latitude of the waypoint (in WGS84)\n :param longitude: longitude of the waypoint (in WGS84)\n :param altitude: altitude of the waypoint (in m, optional)\n :param id: internal id of the waypoint (optional)\n :param comment: extended description of the waypoint (optional)\n \"\"\"\n\n assert 'name' in kw\n assert 'latitude' in kw\n assert 'longitude' in kw\n\n location_kw = {\n 'latitude': kw['latitude'],\n 'longitude': kw['longitude'],\n }\n\n del kw['latitude']\n del kw['longitude']\n\n with self.write_tag_with_content('Waypoint', **kw):\n self.write_tag('Location', **location_kw)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef write_observation_zone(self, **kw):\n\n assert 'type' in kw\n\n if kw['type'] == ObservationZoneType.LINE:\n assert 'length' in kw\n\n elif kw['type'] == ObservationZoneType.CYLINDER:\n assert 'radius' in kw\n\n elif kw['type'] == ObservationZoneType.SECTOR:\n assert 'radius' in kw\n assert 'start_radial' in kw\n assert 'end_radial' in kw\n\n elif kw['type'] == ObservationZoneType.SYMMETRIC_QUADRANT:\n assert 'radius' in kw\n\n elif kw['type'] == ObservationZoneType.CUSTOM_KEYHOLE:\n assert 'radius' in kw\n assert 'inner_radius' in kw\n assert 'angle' in kw\n\n self.write_tag('ObservationZone', **kw)", "response": "Writes an observation zone declaration to the file::\n "} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a list with the names of all PlaceholderFields.", "response": "def get_placeholder_field_names(self):\n \"\"\"\n Returns a list with the names of all PlaceholderFields.\n \"\"\"\n return [field.name for field in self._meta.fields if field.get_internal_type() == 'PlaceholderField']"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef logout_callback():\n resp = make_response('Logging Out')\n resp.set_cookie('sub', 'null', expires=0)\n resp.set_cookie('session_id', 'null', expires=0)\n return resp", "response": "Route called by OpenID provider when user logs out."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef __connect(self):\n try:\n logger.debug(\"Socket connecting to %s:%s\", self.host, self.port)\n self.sock.connect((self.host, self.port))\n except socket.error as e:\n logger.exception(\"socket error %s\", e)\n logger.error(\"Closing socket and recreating a new one.\")\n self.sock.close()\n self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.sock.connect((self.host, self.port))", "response": "A helper function to make connection to the ACS."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsending the command to the oxd server and recieves the response.", "response": "def send(self, command):\n \"\"\"send function sends the command to the oxd server and recieves the\n response.\n\n Parameters:\n * **command (dict):** Dict representation of the JSON command string\n\n Returns:\n **response (dict):** The JSON response from the oxd Server as a dict\n \"\"\"\n cmd = json.dumps(command)\n cmd = \"{:04d}\".format(len(cmd)) + cmd\n msg_length = len(cmd)\n\n # make the first time connection\n if not self.firstDone:\n logger.info('Initiating first time socket connection.')\n self.__connect()\n self.firstDone = True\n\n # Send the message the to the server\n totalsent = 0\n while totalsent < msg_length:\n try:\n logger.debug(\"Sending: %s\", cmd[totalsent:])\n sent = self.sock.send(cmd[totalsent:])\n totalsent = totalsent + sent\n except socket.error as e:\n logger.exception(\"Reconneting due to socket error. %s\", e)\n self.__connect()\n logger.info(\"Reconnected to socket.\")\n\n # Check and receive the response if available\n parts = []\n resp_length = 0\n received = 0\n done = False\n while not done:\n part = self.sock.recv(1024)\n if part == \"\":\n logger.error(\"Socket connection broken, read empty.\")\n self.__connect()\n logger.info(\"Reconnected to socket.\")\n\n # Find out the length of the response\n if len(part) > 0 and resp_length == 0:\n resp_length = int(part[0:4])\n part = part[4:]\n\n # Set Done flag\n received = received + len(part)\n if received >= resp_length:\n done = True\n\n parts.append(part)\n\n response = \"\".join(parts)\n # return the JSON as a namedtuple object\n return json.loads(response)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef request(self, command, **kwargs):\n payload = {\n \"command\": command,\n \"params\": dict()\n }\n for item in kwargs.keys():\n payload[\"params\"][item] = kwargs.get(item)\n\n if self.access_token:\n payload[\"params\"][\"protection_access_token\"] = self.access_token\n\n return self.send(payload)", "response": "Function that builds the request and returns the response from the oxd - server"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef request(self, command, **kwargs):\n url = self.base + command.replace(\"_\", \"-\")\n\n req = urllib2.Request(url, json.dumps(kwargs))\n req.add_header(\"User-Agent\", \"oxdpython/%s\" % __version__)\n req.add_header(\"Content-type\", \"application/json; charset=UTF-8\")\n\n\n # add the protection token if available\n if self.access_token:\n req.add_header(\"Authorization\",\n \"Bearer {0}\".format(self.access_token))\n\n gcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1)\n resp = urllib2.urlopen(req, context=gcontext)\n\n return json.load(resp)", "response": "Function that builds the request and returns the response from the oxd - server"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn mass of the current object", "response": "def get_mass(self):\n '''Returns mass'''\n mass = parsers.get_mass(self.__chebi_id)\n\n if math.isnan(mass):\n mass = parsers.get_mass(self.get_parent_id())\n\n if math.isnan(mass):\n for parent_or_child_id in self.__get_all_ids():\n mass = parsers.get_mass(parent_or_child_id)\n\n if not math.isnan(mass):\n break\n\n return mass"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_charge(self):\n '''Returns charge'''\n charge = parsers.get_charge(self.__chebi_id)\n\n if math.isnan(charge):\n charge = parsers.get_charge(self.get_parent_id())\n\n if math.isnan(charge):\n for parent_or_child_id in self.__get_all_ids():\n charge = parsers.get_charge(parent_or_child_id)\n\n if not math.isnan(charge):\n break\n\n return charge", "response": "Returns the current charge"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn name of the current object", "response": "def get_name(self):\n '''Returns name'''\n name = parsers.get_name(self.__chebi_id)\n\n if name is None:\n name = parsers.get_name(self.get_parent_id())\n\n if name is None:\n for parent_or_child_id in self.__get_all_ids():\n name = parsers.get_name(parent_or_child_id)\n\n if name is not None:\n break\n\n return name"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning definition of the current object", "response": "def get_definition(self):\n '''Returns definition'''\n definition = parsers.get_definition(self.__chebi_id)\n\n if definition is None:\n definition = parsers.get_definition(self.get_parent_id())\n\n if definition is None:\n for parent_or_child_id in self.__get_all_ids():\n definition = parsers.get_definition(parent_or_child_id)\n\n if definition is not None:\n break\n\n return definition"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_created_by(self):\n '''Returns created by'''\n created_by = parsers.get_created_by(self.__chebi_id)\n\n if created_by is None:\n created_by = parsers.get_created_by(self.get_parent_id())\n\n if created_by is None:\n for parent_or_child_id in self.__get_all_ids():\n created_by = parsers.get_created_by(parent_or_child_id)\n\n if created_by is not None:\n break\n\n return created_by", "response": "Returns the created by of the current object"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_inchi(self):\n '''Returns inchi'''\n inchi = parsers.get_inchi(self.__chebi_id)\n\n if inchi is None:\n inchi = parsers.get_inchi(self.get_parent_id())\n\n if inchi is None:\n for parent_or_child_id in self.__get_all_ids():\n inchi = parsers.get_inchi(parent_or_child_id)\n\n if inchi is not None:\n break\n\n return inchi", "response": "Returns inchi of the current ChEBI."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn mol if it exists else None", "response": "def get_mol(self):\n '''Returns mol'''\n structure = parsers.get_mol(self.__chebi_id)\n\n if structure is None:\n structure = parsers.get_mol(self.get_parent_id())\n\n if structure is None:\n for parent_or_child_id in self.__get_all_ids():\n structure = parsers.get_mol(parent_or_child_id)\n\n if structure is not None:\n break\n\n return None if structure is None else structure.get_structure()"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef set_scope(self, http_method, scope):\n\n for con in self.conditions:\n if http_method in con['httpMethods']:\n if isinstance(scope, list):\n con['scopes'] = scope\n elif isinstance(scope, str) or isinstance(scope, unicode):\n con['scopes'].append(scope)\n return\n # If not present, then create a new condition\n if isinstance(scope, list):\n self.conditions.append({'httpMethods': [http_method],\n 'scopes': scope})\n elif isinstance(scope, str) or isinstance(scope, unicode):\n self.conditions.append({'httpMethods': [http_method],\n 'scopes': [scope]})", "response": "Set a scope condition for the resource for a given http_method."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nadding a new resource with the given path to the resource set.", "response": "def add(self, path):\n \"\"\"Adds a new resource with the given path to the resource set.\n\n Parameters:\n * **path (str, unicode):** path of the resource to be protected\n\n Raises:\n TypeError when the path is not a string or a unicode string\n \"\"\"\n if not isinstance(path, str) and not isinstance(path, unicode):\n raise TypeError('The value passed for parameter path is not a str'\n ' or unicode')\n\n resource = Resource(path)\n self.resources[path] = resource\n return resource"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nsearch ChEBI via ols.", "response": "def search(term, exact=False, rows=1e6):\n '''Searches ChEBI via ols.'''\n url = 'https://www.ebi.ac.uk/ols/api/search?ontology=chebi' + \\\n '&exact=' + str(exact) + '&q=' + term + \\\n '&rows=' + str(int(rows))\n\n response = requests.get(url)\n data = response.json()\n\n return [ChebiEntity(doc['obo_id']) for doc in data['response']['docs']]"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets function reads the config value for the requested section and key and returns it", "response": "def get(self, section, key):\n \"\"\"get function reads the config value for the requested section and\n key and returns it\n\n Parameters:\n * **section (string):** the section to look for the config value either - oxd, client\n * **key (string):** the key for the config value required\n\n Returns:\n **value (string):** the function returns the value of the key in the appropriate format if found or returns None if such a section or key couldnot be found\n\n Example:\n config = Configurer(location)\n oxd_port = config.get('oxd', 'port') # returns the port of the oxd\n \"\"\"\n try:\n return self.parser.get(section, key)\n except (NoOptionError, NoSectionError) as e:\n logger.warning(\"%s\", e)\n return None"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef set(self, section, key, value):\n if not self.parser.has_section(section):\n logger.warning(\"Invalid config section: %s\", section)\n return False\n\n self.parser.set(section, key, value)\n\n with open(self.config_file, 'wb') as cfile:\n self.parser.write(cfile)\n\n return True", "response": "set function sets a particular value for the specified key in the specified section and writes it to the config file."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the mass of a given ChEBI ID", "response": "def get_mass(chebi_id):\n '''Returns mass'''\n if len(__MASSES) == 0:\n __parse_chemical_data()\n\n return __MASSES[chebi_id] if chebi_id in __MASSES else float('NaN')"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_charge(chebi_id):\n '''Returns charge'''\n if len(__CHARGES) == 0:\n __parse_chemical_data()\n\n return __CHARGES[chebi_id] if chebi_id in __CHARGES else float('NaN')", "response": "Returns the charge of a given ChEBI ID"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef __parse_chemical_data():\n '''Gets and parses file'''\n filename = get_file('chemical_data.tsv')\n\n with io.open(filename, 'r', encoding='cp1252') as textfile:\n next(textfile)\n\n for line in textfile:\n tokens = line.strip().split('\\t')\n\n if tokens[3] == 'FORMULA':\n # Many seemingly contradictory formulae exist,\n # depending upon the source database\n chebi_id = int(tokens[1])\n\n if chebi_id not in __FORMULAE:\n __FORMULAE[chebi_id] = []\n\n # Append formula:\n form = Formula(tokens[4], tokens[2])\n __FORMULAE[chebi_id].append(form)\n\n elif tokens[3] == 'MASS':\n __MASSES[int(tokens[1])] = float(tokens[4])\n\n elif tokens[3] == 'CHARGE':\n __CHARGES[int(tokens[1])] = int(tokens[4]\n if tokens[4][-1] != '-'\n else '-' + tokens[4][:-1])", "response": "Gets and parses the chemical data file"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning all comments in a single ChEBI.", "response": "def get_all_comments(chebi_ids):\n '''Returns all comments'''\n all_comments = [get_comments(chebi_id) for chebi_id in chebi_ids]\n return [x for sublist in all_comments for x in sublist]"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef __parse_comments():\n '''Gets and parses file'''\n filename = get_file('comments.tsv')\n\n with io.open(filename, 'r', encoding='cp1252') as textfile:\n next(textfile)\n\n for line in textfile:\n tokens = line.strip().split('\\t')\n chebi_id = int(tokens[1])\n\n if chebi_id not in __COMMENTS:\n __COMMENTS[chebi_id] = []\n\n # Append Comment:\n com = Comment(tokens[3],\n tokens[4],\n tokens[5],\n datetime.datetime.strptime(tokens[2], '%Y-%M-%d'))\n\n __COMMENTS[chebi_id].append(com)", "response": "Gets and parses file and adds comments to the list of comments."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_all_compound_origins(chebi_ids):\n '''Returns all compound origins'''\n all_compound_origins = [get_compound_origins(chebi_id)\n for chebi_id in chebi_ids]\n return [x for sublist in all_compound_origins for x in sublist]", "response": "Returns all compound origins"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget and parses file for compound_origins. tsv", "response": "def __parse_compound_origins():\n '''Gets and parses file'''\n filename = get_file('compound_origins.tsv')\n\n with io.open(filename, 'r', encoding='cp1252') as textfile:\n next(textfile)\n\n for line in textfile:\n tokens = line.strip().split('\\t')\n\n if len(tokens) > 10:\n chebi_id = int(tokens[1])\n\n if chebi_id not in __COMPOUND_ORIGINS:\n __COMPOUND_ORIGINS[chebi_id] = []\n\n # Append CompoundOrigin:\n comp_orig = CompoundOrigin(tokens[2], tokens[3],\n tokens[4], tokens[5],\n tokens[6], tokens[7],\n tokens[8], tokens[9],\n tokens[10])\n __COMPOUND_ORIGINS[chebi_id].append(comp_orig)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn parent id of given chebi id", "response": "def get_parent_id(chebi_id):\n '''Returns parent id'''\n if len(__PARENT_IDS) == 0:\n __parse_compounds()\n\n return __PARENT_IDS[chebi_id] if chebi_id in __PARENT_IDS else float('NaN')"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn all modified on", "response": "def get_all_modified_on(chebi_ids):\n '''Returns all modified on'''\n all_modified_ons = [get_modified_on(chebi_id) for chebi_id in chebi_ids]\n all_modified_ons = [modified_on for modified_on in all_modified_ons\n if modified_on is not None]\n return None if len(all_modified_ons) == 0 else sorted(all_modified_ons)[-1]"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns the star of a given chebi_id", "response": "def get_star(chebi_id):\n '''Returns created by'''\n if len(__STARS) == 0:\n __parse_compounds()\n\n return __STARS[chebi_id] if chebi_id in __STARS else float('NaN')"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef __parse_compounds():\n '''Gets and parses file'''\n filename = get_file('compounds.tsv.gz')\n\n with io.open(filename, 'r', encoding='cp1252') as textfile:\n next(textfile)\n\n for line in textfile:\n tokens = line.strip().split('\\t')\n chebi_id = int(tokens[0])\n\n __STATUSES[chebi_id] = tokens[1]\n __SOURCES[chebi_id] = tokens[3]\n\n parent_id_token = tokens[4]\n __PARENT_IDS[chebi_id] = float('NaN') \\\n if parent_id_token == 'null' \\\n else int(parent_id_token)\n __put_all_ids(chebi_id, chebi_id)\n\n if parent_id_token != 'null':\n parent_id = int(parent_id_token)\n __put_all_ids(parent_id, chebi_id)\n\n __NAMES[chebi_id] = None if tokens[5] == 'null' else tokens[5]\n __DEFINITIONS[chebi_id] = None if tokens[6] == 'null' \\\n else tokens[6]\n __MODIFIED_ONS[chebi_id] = None if tokens[7] == 'null' \\\n else datetime.datetime.strptime(tokens[7], '%Y-%m-%d')\n __CREATED_BYS[chebi_id] = None if tokens[8] == 'null' \\\n or len(tokens) == 9 else tokens[8]\n __STARS[chebi_id] = float('NaN') \\\n if tokens[9 if len(tokens) > 9 else 8] == 'null' \\\n else int(tokens[9 if len(tokens) > 9 else 8])", "response": "Gets and parses the file of the components."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nadds a child_id to the list of all IDs of the parent_id.", "response": "def __put_all_ids(parent_id, child_id):\n '''COMMENT'''\n if parent_id in __ALL_IDS:\n __ALL_IDS[parent_id].append(child_id)\n else:\n __ALL_IDS[parent_id] = [child_id]"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_all_database_accessions(chebi_ids):\n '''Returns all database accessions'''\n all_database_accessions = [get_database_accessions(chebi_id)\n for chebi_id in chebi_ids]\n return [x for sublist in all_database_accessions for x in sublist]", "response": "Returns all database accessions in a list of ChEBI IDs."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nget and parses file for database accessions", "response": "def __parse_database_accessions():\n '''Gets and parses file'''\n filename = get_file('database_accession.tsv')\n\n with io.open(filename, 'r', encoding='cp1252') as textfile:\n next(textfile)\n\n for line in textfile:\n tokens = line.strip().split('\\t')\n chebi_id = int(tokens[1])\n\n if chebi_id not in __DATABASE_ACCESSIONS:\n __DATABASE_ACCESSIONS[chebi_id] = []\n\n # Append DatabaseAccession:\n dat_acc = DatabaseAccession(tokens[3], tokens[4], tokens[2])\n\n __DATABASE_ACCESSIONS[chebi_id].append(dat_acc)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngetting and parses file for ChEBI ID INCHIS", "response": "def __parse_inchi():\n '''Gets and parses file'''\n filename = get_file('chebiId_inchi.tsv')\n\n with io.open(filename, 'r', encoding='cp1252') as textfile:\n next(textfile)\n\n for line in textfile:\n tokens = line.strip().split('\\t')\n __INCHIS[int(tokens[0])] = tokens[1]"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_all_names(chebi_ids):\n '''Returns all names'''\n all_names = [get_names(chebi_id) for chebi_id in chebi_ids]\n return [x for sublist in all_names for x in sublist]", "response": "Returns all names in a given list of chebi ids"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets and parses file for all names", "response": "def __parse_names():\n '''Gets and parses file'''\n filename = get_file('names.tsv.gz')\n\n with io.open(filename, 'r', encoding='cp1252') as textfile:\n next(textfile)\n\n for line in textfile:\n tokens = line.strip().split('\\t')\n chebi_id = int(tokens[1])\n\n if chebi_id not in __ALL_NAMES:\n __ALL_NAMES[chebi_id] = []\n\n # Append Name:\n nme = Name(tokens[4],\n tokens[2],\n tokens[3],\n tokens[5] == 'T',\n tokens[6])\n\n __ALL_NAMES[chebi_id].append(nme)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning a list of Reference objects for the given list of ChEBIs.", "response": "def get_references(chebi_ids):\n '''Returns references'''\n references = []\n chebi_ids = [str(chebi_id) for chebi_id in chebi_ids]\n\n filename = get_file('reference.tsv.gz')\n\n with io.open(filename, 'r', encoding='cp1252') as textfile:\n next(textfile)\n\n for line in textfile:\n tokens = line.strip().split('\\t')\n\n if tokens[0] in chebi_ids:\n # Append Reference:\n if len(tokens) > 3:\n ref = Reference(tokens[1], tokens[2], tokens[3],\n tokens[4])\n else:\n ref = Reference(tokens[1], tokens[2])\n\n references.append(ref)\n return references"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_all_outgoings(chebi_ids):\n '''Returns all outgoings'''\n all_outgoings = [get_outgoings(chebi_id) for chebi_id in chebi_ids]\n return [x for sublist in all_outgoings for x in sublist]", "response": "Returns all outgoings for a list of chebi_ids"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_all_incomings(chebi_ids):\n '''Returns all incomings'''\n all_incomings = [get_incomings(chebi_id) for chebi_id in chebi_ids]\n return [x for sublist in all_incomings for x in sublist]", "response": "Returns all incomings in a given list of chebi_ids."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nget and parses the relation. tsv file and returns the resulting set of relations.", "response": "def __parse_relation():\n '''Gets and parses file'''\n relation_filename = get_file('relation.tsv')\n vertice_filename = get_file('vertice.tsv')\n relation_textfile = open(relation_filename, 'r')\n vertice_textfile = open(vertice_filename, 'r')\n\n # Parse vertice:\n vertices = {}\n\n next(vertice_textfile)\n\n for line in vertice_textfile:\n tokens = line.strip().split('\\t')\n vertices[tokens[0]] = tokens[1]\n\n next(relation_textfile)\n\n for line in relation_textfile:\n tokens = line.strip().split('\\t')\n\n source_chebi_id = int(vertices[tokens[3]])\n target_chebi_id = int(vertices[tokens[2]])\n typ = tokens[1]\n\n if source_chebi_id not in __OUTGOINGS:\n __OUTGOINGS[source_chebi_id] = []\n\n if target_chebi_id not in __INCOMINGS:\n __INCOMINGS[target_chebi_id] = []\n\n target_relation = Relation(typ, str(target_chebi_id), tokens[4])\n source_relation = Relation(typ, str(source_chebi_id), tokens[4])\n\n __OUTGOINGS[source_chebi_id].append(target_relation)\n __INCOMINGS[target_chebi_id].append(source_relation)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_mol(chebi_id):\n '''Returns mol'''\n chebi_id_regexp = '^\\\\d+\\\\,' + str(chebi_id) + '\\\\,.*'\n mol_file_end_regexp = '\\\",mol,\\\\dD'\n this_structure = []\n\n filename = get_file('structures.csv.gz')\n\n with io.open(filename, 'r', encoding='cp1252') as textfile:\n in_chebi_id = False\n\n next(textfile)\n\n for line in textfile:\n if in_chebi_id or line[0].isdigit():\n if re.match(chebi_id_regexp, line) \\\n and int(line.split(',')[0]) \\\n in __get_default_structure_ids():\n tokens = line.strip().split(',')\n in_chebi_id = True\n this_structure = []\n this_structure.append(','.join(tokens[2:])\n .replace('\\\"', ''))\n this_structure.append('\\n')\n elif in_chebi_id:\n if re.match(mol_file_end_regexp, line):\n tokens = line.strip().split(',')\n this_structure.append(tokens[0].replace('\\\"', ''))\n return Structure(''.join(this_structure),\n Structure.mol,\n int(tokens[2][0]))\n else:\n # In Molfile:\n this_structure.append(line)\n\n return None", "response": "Returns mol if the ChEBI ID is not in the default structure ids."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nparse the structures. csv file and stores them in the internal structures dictionary.", "response": "def __parse_structures():\n '''COMMENT'''\n filename = get_file('structures.csv.gz')\n\n with io.open(filename, 'r', encoding='cp1252') as textfile:\n next(textfile)\n\n for line in textfile:\n tokens = line.strip().split(',')\n\n if len(tokens) == 5:\n if tokens[3] == 'InChIKey':\n __INCHI_KEYS[int(tokens[1])] = \\\n Structure(tokens[2],\n Structure.InChIKey,\n int(tokens[4][0]))\n elif tokens[3] == 'SMILES':\n __SMILES[int(tokens[1])] = \\\n Structure(tokens[2],\n Structure.SMILES,\n int(tokens[4][0]))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef __get_default_structure_ids():\n '''COMMENT'''\n if len(__DEFAULT_STRUCTURE_IDS) == 0:\n filename = get_file('default_structures.tsv')\n\n with io.open(filename, 'r', encoding='cp1252') as textfile:\n next(textfile)\n\n for line in textfile:\n tokens = line.strip().split('\\t')\n __DEFAULT_STRUCTURE_IDS.append(int(tokens[1]))\n\n return __DEFAULT_STRUCTURE_IDS", "response": "COMMENT | Get the default structure ids."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ndownloads filename from ChEBI FTP site", "response": "def get_file(filename):\n '''Downloads filename from ChEBI FTP site'''\n destination = __DOWNLOAD_PARAMS['path']\n filepath = os.path.join(destination, filename)\n\n if not __is_current(filepath):\n\n if not os.path.exists(destination):\n os.makedirs(destination)\n\n url = 'ftp://ftp.ebi.ac.uk/pub/databases/chebi/' + \\\n 'Flat_file_tab_delimited/'\n urlretrieve(urlparse.urljoin(url, filename), filepath)\n urlcleanup()\n\n if filepath.endswith('.zip'):\n zfile = zipfile.ZipFile(filepath, 'r')\n filepath = os.path.join(destination, zfile.namelist()[0])\n zfile.extractall(destination)\n elif filepath.endswith('.gz'):\n unzipped_filepath = filepath[:-len('.gz')]\n\n if os.path.exists(unzipped_filepath) \\\n and __is_current(unzipped_filepath):\n filepath = unzipped_filepath\n else:\n input_file = gzip.open(filepath, 'rb')\n filepath = os.path.join(destination, input_file.name[:-len('.gz')])\n output_file = open(filepath, 'wb')\n\n for line in input_file:\n output_file.write(line)\n\n input_file.close()\n output_file.close()\n\n return filepath"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nchecking whether file is current", "response": "def __is_current(filepath):\n '''Checks whether file is current'''\n if not __DOWNLOAD_PARAMS['auto_update']:\n return True\n\n if not os.path.isfile(filepath):\n return False\n\n return datetime.datetime.utcfromtimestamp(os.path.getmtime(filepath)) \\\n > __get_last_update_time()"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef __get_last_update_time():\n '''Returns last FTP site update time'''\n now = datetime.datetime.utcnow()\n\n # Get the first Tuesday of the month\n first_tuesday = __get_first_tuesday(now)\n\n if first_tuesday < now:\n return first_tuesday\n else:\n first_of_month = datetime.datetime(now.year, now.month, 1)\n last_month = first_of_month + datetime.timedelta(days=-1)\n return __get_first_tuesday(last_month)", "response": "Returns last FTP site update time"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting the first Tuesday of the month", "response": "def __get_first_tuesday(this_date):\n '''Get the first Tuesday of the month'''\n month_range = calendar.monthrange(this_date.year, this_date.month)\n first_of_month = datetime.datetime(this_date.year, this_date.month, 1)\n first_tuesday_day = (calendar.TUESDAY - month_range[0]) % 7\n first_tuesday = first_of_month + datetime.timedelta(days=first_tuesday_day)\n return first_tuesday"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nwrite the logger id to the log file.", "response": "def write_logger_id(self, manufacturer, logger_id, extension=None,\n validate=True):\n \"\"\"\n Write the manufacturer and logger id header line::\n\n writer.write_logger_id('XXX', 'ABC', extension='FLIGHT:1')\n # -> AXXXABCFLIGHT:1\n\n Some older loggers have decimal logger ids which can be written like\n this::\n\n writer.write_logger_id('FIL', '13961', validate=False)\n # -> AFIL13961\n\n :param manufacturer: the three-letter-code of the manufacturer\n :param logger_id: the logger id as three-letter-code\n :param extension: anything else that should be appended to this header\n (e.g. ``FLIGHT:1``)\n :param validate: whether to validate the manufacturer and logger_id\n three-letter-codes\n \"\"\"\n\n if validate:\n if not patterns.MANUFACTURER_CODE.match(manufacturer):\n raise ValueError('Invalid manufacturer code')\n if not patterns.LOGGER_ID.match(logger_id):\n raise ValueError('Invalid logger id')\n\n record = '%s%s' % (manufacturer, logger_id)\n if extension:\n record = record + extension\n\n self.write_record('A', record)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef write_fix_accuracy(self, accuracy=None):\n\n if accuracy is None:\n accuracy = 500\n\n accuracy = int(accuracy)\n if not 0 < accuracy < 1000:\n raise ValueError('Invalid fix accuracy')\n\n self.write_fr_header('FXA', '%03d' % accuracy)", "response": "Writes the GPS fix accuracy header."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nwriting the GPS datum in the required format.", "response": "def write_gps_datum(self, code=None, gps_datum=None):\n \"\"\"\n Write the mandatory GPS datum header::\n\n writer.write_gps_datum()\n # -> HFDTM100GPSDATUM:WGS-1984\n\n writer.write_gps_datum(33, 'Guam-1963')\n # -> HFDTM033GPSDATUM:Guam-1963\n\n Note that the default GPS datum is WGS-1984 and you should use that\n unless you have very good reasons against it.\n\n :param code: the GPS datum code as defined in the IGC file\n specification, section A8\n :param gps_datum: the GPS datum in written form\n \"\"\"\n\n if code is None:\n code = 100\n\n if gps_datum is None:\n gps_datum = 'WGS-1984'\n\n self.write_fr_header(\n 'DTM',\n '%03d' % code,\n subtype_long='GPSDATUM',\n value_long=gps_datum,\n )"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nwrites the headers to the file.", "response": "def write_headers(self, headers):\n \"\"\"\n Write all the necessary headers in the correct order::\n\n writer.write_headers({\n 'manufacturer_code': 'XCS',\n 'logger_id': 'TBX',\n 'date': datetime.date(1987, 2, 24),\n 'fix_accuracy': 50,\n 'pilot': 'Tobias Bieniek',\n 'copilot': 'John Doe',\n 'glider_type': 'Duo Discus',\n 'glider_id': 'D-KKHH',\n 'firmware_version': '2.2',\n 'hardware_version': '2',\n 'logger_type': 'LXNAVIGATION,LX8000F',\n 'gps_receiver': 'uBLOX LEA-4S-2,16,max9000m',\n 'pressure_sensor': 'INTERSEMA,MS5534A,max10000m',\n 'competition_id': '2H',\n 'competition_class': 'Doubleseater',\n })\n\n # -> AXCSTBX\n # -> HFDTE870224\n # -> HFFXA050\n # -> HFPLTPILOTINCHARGE:Tobias Bieniek\n # -> HFCM2CREW2:John Doe\n # -> HFGTYGLIDERTYPE:Duo Discus\n # -> HFGIDGLIDERID:D-KKHH\n # -> HFDTM100GPSDATUM:WGS-1984\n # -> HFRFWFIRMWAREVERSION:2.2\n # -> HFRHWHARDWAREVERSION:2\n # -> HFFTYFRTYPE:LXNAVIGATION,LX8000F\n # -> HFGPSuBLOX LEA-4S-2,16,max9000m\n # -> HFPRSPRESSALTSENSOR:INTERSEMA,MS5534A,max10000m\n # -> HFCIDCOMPETITIONID:2H\n # -> HFCCLCOMPETITIONCLASS:Doubleseater\n\n This method will throw a :class:`ValueError` if a mandatory header is\n missing and will fill others up with empty strings if no value was\n given. The optional headers are only written if they are part of the\n specified :class:`dict`.\n\n .. admonition:: Note\n\n The use of this method is encouraged compared to calling all the\n other header-writing methods manually!\n\n :param headers: a :class:`dict` of all the headers that should be\n written.\n \"\"\"\n\n for header in self.REQUIRED_HEADERS:\n if header not in headers:\n raise ValueError('%s header missing' % header)\n\n self.write_logger_id(\n headers['manufacturer_code'],\n headers['logger_id'],\n extension=headers.get('logger_id_extension')\n )\n\n self.write_date(headers['date'])\n self.write_fix_accuracy(headers.get('fix_accuracy'))\n\n self.write_pilot(headers.get('pilot', ''))\n if 'copilot' in headers:\n self.write_copilot(headers['copilot'])\n\n self.write_glider_type(headers.get('glider_type', ''))\n self.write_glider_id(headers.get('glider_id', ''))\n\n self.write_gps_datum(\n code=headers.get('gps_datum_code'),\n gps_datum=headers.get('gps_datum'),\n )\n\n self.write_firmware_version(headers.get('firmware_version', ''))\n self.write_hardware_version(headers.get('hardware_version', ''))\n self.write_logger_type(headers['logger_type'])\n self.write_gps_receiver(headers['gps_receiver'])\n self.write_pressure_sensor(headers.get('pressure_sensor', ''))\n\n if 'competition_id' in headers:\n self.write_competition_id(headers['competition_id'])\n\n if 'competition_class' in headers:\n self.write_competition_class(headers['competition_class'])\n\n if 'club' in headers:\n self.write_club(headers['club'])"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef write_task_metadata(\n self, declaration_datetime=None, flight_date=None,\n task_number=None, turnpoints=None, text=None):\n \"\"\"\n Write the task declaration metadata record::\n\n writer.write_task_metadata(\n datetime.datetime(2014, 4, 13, 12, 53, 02),\n task_number=42,\n turnpoints=3,\n )\n # -> C140413125302000000004203\n\n There are sensible defaults in place for all parameters except for the\n ``turnpoints`` parameter. If you don't pass that parameter the method\n will raise a :class:`ValueError`. The other parameter defaults are\n mentioned in the list below.\n\n :param declaration_datetime: a :class:`datetime.datetime` instance of\n the UTC date and time at the time of declaration (default: current\n date and time)\n :param flight_date: a :class:`datetime.date` instance of the intended\n date of the flight (default: ``000000``, which means \"use\n declaration date\")\n :param task_number: task number for the flight date or an integer-based\n identifier (default: ``0001``)\n :param turnpoints: the number of turnpoints in the task (not counting\n start and finish points!)\n :param text: optional text to append to the metadata record\n \"\"\"\n\n if declaration_datetime is None:\n declaration_datetime = datetime.datetime.utcnow()\n\n if isinstance(declaration_datetime, datetime.datetime):\n declaration_datetime = (\n self.format_date(declaration_datetime) +\n self.format_time(declaration_datetime)\n )\n elif not patterns.DATETIME.match(declaration_datetime):\n raise ValueError('Invalid declaration datetime: %s' %\n declaration_datetime)\n\n if flight_date is None:\n flight_date = '000000'\n else:\n flight_date = self.format_date(flight_date)\n\n if task_number is None:\n task_number = 1\n elif not isinstance(task_number, int):\n raise ValueError('Invalid task number: %s' % task_number)\n\n if not isinstance(turnpoints, int):\n raise ValueError('Invalid turnpoints: %s' % turnpoints)\n\n record = '{0}{1}{2:04d}{3:02d}'.format(\n declaration_datetime,\n flight_date,\n task_number,\n turnpoints,\n )\n\n if text:\n record += text\n\n self.write_record('C', record)", "response": "Writes the task declaration metadata record."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nwrites a task point to the log file.", "response": "def write_task_point(self, latitude=None, longitude=None, text='',\n distance_min=None, distance_max=None,\n bearing1=None, bearing2=None):\n \"\"\"\n Write a task declaration point::\n\n writer.write_task_point(\n latitude=(51 + 7.345 / 60.),\n longitude=(6 + 24.765 / 60.),\n text='Meiersberg',\n )\n # -> C5107345N00624765EMeiersberg\n\n If no ``latitude`` or ``longitude`` is passed, the fields will be\n filled with zeros (i.e. unknown coordinates). This however should only\n be used for ``TAKEOFF`` and ``LANDING`` points.\n\n For area tasks there are some additional parameters that can be used\n to specify the relevant areas::\n\n writer.write_task_point(\n -(12 + 32.112 / 60.),\n -(178 + .001 / 60.),\n 'TURN AREA',\n distance_min=12.0,\n distance_max=32.0,\n bearing1=122.0,\n bearing2=182.0,\n )\n # -> C1232112S17800001W00120000032000122000182000TURN AREA\n\n :param latitude: latitude of the point (between -90 and 90 degrees)\n :param longitude: longitude of the point (between -180 and 180 degrees)\n :param text: type and/or name of the waypoint (e.g. ``TAKEOFF``,\n ``START``, ``TURN 1``, ``TURN 2``, ``FINISH`` or ``LANDING``)\n \"\"\"\n\n latitude = self.format_latitude(latitude)\n longitude = self.format_longitude(longitude)\n\n record = latitude + longitude\n\n if None not in [distance_min, distance_max, bearing1, bearing2]:\n record += '%04d' % int(distance_min)\n record += '%03d' % int((distance_min - int(distance_min)) * 1000)\n record += '%04d' % int(distance_max)\n record += '%03d' % int((distance_max - int(distance_max)) * 1000)\n record += '%03d' % int(bearing1)\n record += '%03d' % int((bearing1 - int(bearing1)) * 1000)\n record += '%03d' % int(bearing2)\n record += '%03d' % int((bearing2 - int(bearing2)) * 1000)\n\n if text:\n record += text\n\n self.write_record('C', record)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef write_task_points(self, points):\n for args in points:\n if len(args) not in [3, 7]:\n raise ValueError('Invalid number of task point tuple items')\n\n self.write_task_point(*args)", "response": "Writes a list of points to the internal file."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nwrites the security signature to the IGC file.", "response": "def write_security(self, security, bytes_per_line=75):\n \"\"\"\n Write the security signature::\n\n writer.write_security('ABCDEF')\n # -> GABCDEF\n\n If a signature of more than 75 bytes is used the G record will be\n broken into multiple lines according to the IGC file specification.\n This rule can be configured with the ``bytes_per_line`` parameter if\n necessary.\n\n :param security: the security signature\n :param bytes_per_line: the maximum number of bytes per line\n (default: ``75``)\n \"\"\"\n\n for start in range(0, len(security), bytes_per_line):\n self.write_record('G', security[start:start + bytes_per_line])"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef write_fix(self, time=None, latitude=None, longitude=None, valid=False,\n pressure_alt=None, gps_alt=None, extensions=None):\n \"\"\"\n Write a fix record::\n\n writer.write_fix(\n datetime.time(12, 34, 56),\n latitude=51.40375,\n longitude=6.41275,\n valid=True,\n pressure_alt=1234,\n gps_alt=1432,\n )\n # -> B1234565124225N00624765EA0123401432\n\n :param time: UTC time of the fix record (default:\n :meth:`~datetime.datetime.utcnow`)\n :param latitude: longitude of the last GPS fix\n :param longitude: latitude of the last GPS fix\n :param valid: ``True`` if the current GPS fix is 3D\n :param pressure_alt: altitude to the ICAO ISA above the 1013.25 hPa\n sea level datum\n :param gps_alt: altitude above the WGS84 ellipsoid\n :param extensions: a list of extension values according to previous\n declaration through\n :meth:`~aerofiles.igc.Writer.write_fix_extensions`\n \"\"\"\n\n if time is None:\n time = datetime.datetime.utcnow()\n\n record = self.format_time(time)\n record += self.format_latitude(latitude)\n record += self.format_longitude(longitude)\n record += 'A' if valid else 'V'\n record += '%05d' % (pressure_alt or 0)\n record += '%05d' % (gps_alt or 0)\n\n if self.fix_extensions or extensions:\n if not (isinstance(extensions, list) and\n isinstance(self.fix_extensions, list)):\n raise ValueError('Invalid extensions list')\n\n if len(extensions) != len(self.fix_extensions):\n raise ValueError(\n 'Number of extensions does not match declaration')\n\n for type_length, value in zip(self.fix_extensions, extensions):\n length = type_length[1]\n\n if isinstance(value, (int, float)):\n value = ('%0' + str(length) + 'd') % value\n\n if len(value) != length:\n raise ValueError('Extension value has wrong length')\n\n record += value\n\n self.write_record('B', record)", "response": "Write a fix record for the current ICAO sea level."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef write_event(self, *args):\n\n num_args = len(args)\n if not (1 <= num_args <= 3):\n raise ValueError('Invalid number of parameters received')\n\n if num_args == 3:\n time, code, text = args\n elif num_args == 1:\n code = args[0]\n time = text = None\n elif isinstance(args[0], (datetime.time, datetime.datetime)):\n time, code = args\n text = None\n else:\n code, text = args\n time = None\n\n if time is None:\n time = datetime.datetime.utcnow()\n\n if not patterns.THREE_LETTER_CODE.match(code):\n raise ValueError('Invalid event code')\n\n record = self.format_time(time)\n record += code\n if text:\n record += text\n\n self.write_record('E', record)", "response": "Write an event record."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef write_satellites(self, *args):\n\n num_args = len(args)\n if num_args not in (1, 2):\n raise ValueError('Invalid number of parameters received')\n\n if num_args == 1:\n satellites = args[0]\n time = None\n else:\n time, satellites = args\n\n if time is None:\n time = datetime.datetime.utcnow()\n\n record = self.format_time(time)\n\n for satellite in satellites:\n if isinstance(satellite, int):\n satellite = '%02d' % satellite\n\n if len(satellite) != 2:\n raise ValueError('Invalid satellite ID')\n\n record += satellite\n\n self.write_record('F', record)", "response": "Writes a satellite constellation record."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nwrites a K record to the file.", "response": "def write_k_record(self, *args):\n \"\"\"\n Write a K record::\n\n writer.write_k_record_extensions([\n ('FXA', 3), ('SIU', 2), ('ENL', 3),\n ])\n\n writer.write_k_record(datetime.time(2, 3, 4), ['023', 13, 2])\n\n # -> J030810FXA1112SIU1315ENL\n # -> K02030402313002\n\n :param time: UTC time of the k record (default:\n :meth:`~datetime.datetime.utcnow`)\n :param extensions: a list of extension values according to previous\n declaration through\n :meth:`~aerofiles.igc.Writer.write_k_record_extensions`\n \"\"\"\n\n num_args = len(args)\n if num_args not in (1, 2):\n raise ValueError('Invalid number of parameters received')\n\n if num_args == 1:\n extensions = args[0]\n time = None\n else:\n time, extensions = args\n\n if time is None:\n time = datetime.datetime.utcnow()\n\n record = self.format_time(time)\n\n if not (isinstance(extensions, list) and\n isinstance(self.k_record_extensions, list)):\n raise ValueError('Invalid extensions list')\n\n if len(extensions) != len(self.k_record_extensions):\n raise ValueError(\n 'Number of extensions does not match declaration')\n\n for type_length, value in zip(self.k_record_extensions, extensions):\n length = type_length[1]\n\n if isinstance(value, (int, float)):\n value = ('%0' + str(length) + 'd') % value\n\n if len(value) != length:\n raise ValueError('Extension value has wrong length')\n\n record += value\n\n self.write_record('K', record)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef write_comment(self, code, text):\n\n if not patterns.THREE_LETTER_CODE.match(code):\n raise ValueError('Invalid source')\n\n self.write_record('L', code + text)", "response": "Write a comment record."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_frontend_data_dict_for_cms_page(cms_page, cms_page_title, request, editable=False):\n placeholders = list(cms_page.placeholders.all())\n placeholder_frontend_data_dict = get_frontend_data_dict_for_placeholders(\n placeholders=placeholders,\n request=request,\n editable=editable\n )\n global_placeholder_data_dict = get_global_placeholder_data(placeholder_frontend_data_dict)\n data = {\n 'containers': placeholder_frontend_data_dict,\n 'meta': {\n 'title': cms_page_title.page_title if cms_page_title.page_title else cms_page_title.title,\n 'description': cms_page_title.meta_description or '',\n }\n }\n\n language_links = get_language_links(cms_page=cms_page, request=request)\n if language_links:\n data['meta']['languages'] = language_links\n\n if placeholder_frontend_data_dict:\n data['containers'] = placeholder_frontend_data_dict\n\n if global_placeholder_data_dict:\n data['global_placeholder_data'] = global_placeholder_data_dict\n\n post_processer = settings.DJANGOCMS_SPA_CMS_PAGE_DATA_POST_PROCESSOR\n if post_processer:\n func = get_function_by_path(post_processer)\n data = func(cms_page=cms_page, data=data, request=request)\n\n return data", "response": "Returns the data dictionary of a CMS page that is used by the frontend."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning a dictionary that contains the data that is used by the frontend to render all contents of the given list of placeholders.", "response": "def get_frontend_data_dict_for_placeholders(placeholders, request, editable=False):\n \"\"\"\n Takes a list of placeholder instances and returns the data that is used by the frontend to render all contents.\n The returned dict is grouped by placeholder slots.\n \"\"\"\n data_dict = {}\n for placeholder in placeholders:\n if placeholder:\n plugins = []\n\n # We don't use the helper method `placeholder.get_plugins()` because of the wrong order by path.\n placeholder_plugins = placeholder.cmsplugin_set.filter(language=request.LANGUAGE_CODE).order_by(\n settings.DJANGOCMS_SPA_PLUGIN_ORDER_FIELD)\n\n for plugin in placeholder_plugins:\n # We need the complete cascading structure of the plugins in the frontend. This is why we ignore the\n # children here and add them later in the loop.\n if not plugin.parent:\n plugins.append(get_frontend_data_dict_for_plugin(\n request=request,\n plugin=plugin,\n editable=editable)\n )\n\n if plugins or editable:\n data_dict[placeholder.slot] = {\n 'type': 'cmp-%s' % placeholder.slot,\n 'plugins': plugins,\n }\n\n if editable:\n # This is the structure of the template `cms/toolbar/placeholder.html` that is used to register\n # the frontend editing.\n from cms.plugin_pool import plugin_pool\n plugin_types = [cls.__name__ for cls in plugin_pool.get_all_plugins(placeholder.slot, placeholder.page)]\n allowed_plugins = plugin_types + plugin_pool.get_system_plugins()\n\n data_dict[placeholder.slot]['cms'] = [\n 'cms-placeholder-{}'.format(placeholder.pk),\n {\n 'type': 'placeholder',\n 'name': str(placeholder.get_label()),\n 'page_language': request.LANGUAGE_CODE,\n 'placeholder_id': placeholder.pk,\n 'plugin_language': request.LANGUAGE_CODE,\n 'plugin_restriction': [module for module in allowed_plugins],\n 'addPluginHelpTitle': 'Add plugin to placeholder {}'.format(placeholder.get_label()),\n 'urls': {\n 'add_plugin': placeholder.get_add_url(),\n 'copy_plugin': placeholder.get_copy_url()\n }\n }\n ]\n\n return data_dict"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns a serializable data dict of a CMS plugin and all its children.", "response": "def get_frontend_data_dict_for_plugin(request, plugin, editable):\n \"\"\"\n Returns a serializable data dict of a CMS plugin and all its children. It expects a `render_json_plugin()` method\n from each plugin. Make sure you implement it for your custom plugins and monkey patch all third-party plugins.\n \"\"\"\n json_data = {}\n instance, plugin = plugin.get_plugin_instance()\n\n if not instance:\n return json_data\n\n renderer = renderer_pool.renderer_for_plugin(plugin)\n if renderer:\n json_data = renderer.render(request=request, plugin=plugin, instance=instance, editable=editable)\n\n if hasattr(plugin, 'parse_child_plugins') and plugin.parse_child_plugins:\n children = []\n for child_plugin in instance.get_children().order_by(settings.DJANGOCMS_SPA_PLUGIN_ORDER_FIELD):\n # Parse all children\n children.append(\n get_frontend_data_dict_for_plugin(\n request=request,\n plugin=child_plugin,\n editable=editable\n )\n )\n\n if children:\n json_data['plugins'] = children\n\n return json_data"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting the frontend data dictionary for all partials.", "response": "def get_frontend_data_dict_for_partials(partials, request, editable=False, renderer=None):\n \"\"\"\n We call global page elements that are used to render a template `partial`. The contents of a partial do not\n change from one page to another. In a django CMS project partials are implemented as static placeholders. But\n there are usually other parts (e.g. menu) that work pretty much the same way. Because we don't have a template\n that allows us to render template tags, we need to have a custom implementation for those needs. We decided to\n use a `callback` approach that allows developers to bring custom data into the partial list.\n\n To prevent infinite recursions when using callbacks we have the `renderer` parameter.\n \"\"\"\n\n # Split static placeholders from partials that have a custom callback.\n static_placeholder_names = []\n custom_callback_partials = []\n for partial in partials:\n if partial in settings.DJANGOCMS_SPA_PARTIAL_CALLBACKS.keys():\n custom_callback_partials.append(partial)\n else:\n static_placeholder_names.append(partial)\n\n # Get the data of all static placeholders\n use_static_placeholder_draft = (hasattr(request, 'toolbar') and request.toolbar.edit_mode and\n request.user.has_perm('cms.edit_static_placeholder'))\n static_placeholders = []\n for static_placeholder_name in static_placeholder_names:\n static_placeholders.append(get_static_placeholder(static_placeholder_name, use_static_placeholder_draft))\n\n partial_data = get_frontend_data_dict_for_placeholders(\n placeholders=static_placeholders,\n request=request,\n editable=editable\n )\n\n # Get the data of all partials that have a custom callback.\n for partial_settings_key in custom_callback_partials:\n dotted_function_module_path = settings.DJANGOCMS_SPA_PARTIAL_CALLBACKS[partial_settings_key]\n callback_function = get_function_by_path(dotted_function_module_path)\n partial_data[partial_settings_key] = callback_function(request, renderer)\n\n return partial_data"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_global_placeholder_data(placeholder_frontend_data_dict):\n post_processer = settings.DJANGOCMS_SPA_PLACEHOLDER_DATA_POST_PROCESSOR\n if not post_processer:\n return {}\n\n func = get_function_by_path(post_processer)\n return func(placeholder_frontend_data_dict=placeholder_frontend_data_dict)", "response": "Get the global placeholder data for a route."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nwrite a waypoint to the current task declaration.", "response": "def write_waypoint(self, latitude=None, longitude=None, description=None):\n \"\"\"\n Adds a waypoint to the current task declaration. The first and the\n last waypoint added will be treated as takeoff and landing location,\n respectively.\n\n ::\n\n writer.write_waypoint(\n latitude=(51 + 7.345 / 60.),\n longitude=(6 + 24.765 / 60.),\n text='Meiersberg',\n )\n # -> $PFLAC,S,ADDWP,5107345N,00624765E,Meiersberg\n\n If no ``latitude`` or ``longitude`` is passed, the fields will be\n filled with zeros (i.e. unknown coordinates). This however should only\n be used for takeoff and landing points.\n\n :param latitude: latitude of the point (between -90 and 90 degrees)\n :param longitude: longitude of the point (between -180 and 180 degrees)\n :param description: arbitrary text description of waypoint\n \"\"\"\n\n if not description:\n description = ''\n\n latitude = self.format_latitude(latitude)\n longitude = self.format_longitude(longitude)\n\n self.write_config(\n 'ADDWP', '%s,%s,%s' % (latitude, longitude, description[0:50])\n )"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nwrite a list of task declaration points to the file.", "response": "def write_waypoints(self, points):\n \"\"\"\n Write multiple task declaration points with one call::\n\n writer.write_waypoints([\n (None, None, 'TAKEOFF'),\n (51.40375, 6.41275, 'START'),\n (50.38210, 8.82105, 'TURN 1'),\n (50.59045, 7.03555, 'TURN 2'),\n (51.40375, 6.41275, 'FINISH'),\n (None, None, 'LANDING'),\n ])\n # -> $PFLAC,S,ADDWP,0000000N,00000000E,TAKEOFF\n # -> $PFLAC,S,ADDWP,5124225N,00624765E,START\n # -> $PFLAC,S,ADDWP,5022926N,00849263E,TURN 1\n # -> $PFLAC,S,ADDWP,5035427N,00702133E,TURN 2\n # -> $PFLAC,S,ADDWP,5124225N,00624765E,FINISH\n # -> $PFLAC,S,ADDWP,0000000N,00000000E,LANDING\n\n see the :meth:`~aerofiles.flarmcfg.Writer.write_waypoint` method for\n more information.\n\n :param points: a list of ``(latitude, longitude, text)`` tuples.\n \"\"\"\n for args in points:\n if len(args) != 3:\n raise ValueError('Invalid number of task point tuple items')\n\n self.write_waypoint(*args)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef read(self, file_obj):\n self.reader = LowLevelReader(file_obj)\n\n logger_id = [[], None]\n fix_records = [[], []]\n task = [[], {\"waypoints\": []}]\n dgps_records = [[], []]\n event_records = [[], []]\n satellite_records = [[], []]\n security_records = [[], []]\n header = [[], {}]\n fix_record_extensions = [[], []]\n k_record_extensions = [[], []]\n k_records = [[], []]\n comment_records = [[], []]\n\n for record_type, line, error in self.reader:\n\n if record_type == 'A':\n if error:\n logger_id[0].append(error)\n else:\n logger_id[1] = line\n elif record_type == 'B':\n if error:\n if MissingRecordsError not in fix_records[0]:\n fix_records[0].append(MissingRecordsError)\n else:\n\n # add MissingExtensionsError when fix_record_extensions contains error\n if len(fix_record_extensions[0]) > 0 and MissingExtensionsError not in fix_records[0]:\n fix_records[0].append(MissingExtensionsError)\n\n fix_record = LowLevelReader.process_B_record(line, fix_record_extensions[1])\n fix_records[1].append(fix_record)\n elif record_type == 'C':\n task_item = line\n\n if error:\n if MissingRecordsError not in task[0]:\n task[0].append(MissingRecordsError)\n else:\n if task_item['subtype'] == 'task_info':\n del task_item['subtype']\n task[1].update(task_item)\n elif task_item['subtype'] == 'waypoint_info':\n del task_item['subtype']\n task[1]['waypoints'].append(task_item)\n\n elif record_type == 'D':\n if error:\n if MissingRecordsError not in dgps_records[0]:\n dgps_records[0].append(MissingRecordsError)\n else:\n dgps_records[1].append(line)\n elif record_type == 'E':\n if error:\n if MissingRecordsError not in event_records[0]:\n event_records[0].append(MissingRecordsError)\n else:\n event_records[1].append(line)\n elif record_type == 'F':\n if error:\n if MissingRecordsError not in satellite_records[0]:\n satellite_records[0].append(MissingRecordsError)\n else:\n satellite_records[1].append(line)\n elif record_type == 'G':\n if error:\n if MissingRecordsError not in security_records[0]:\n security_records[0].append(MissingRecordsError)\n else:\n security_records[1].append(line)\n elif record_type == 'H':\n header_item = line\n\n if error:\n if MissingRecordsError not in header[0]:\n header[0].append(MissingRecordsError)\n else:\n del header_item['source']\n header[1].update(header_item)\n elif record_type == 'I':\n if error:\n fix_record_extensions[0].append(error)\n else:\n fix_record_extensions[1] = line\n elif record_type == 'J':\n if error:\n k_record_extensions[0].append(error)\n else:\n k_record_extensions[1] = line\n elif record_type == 'K':\n if error:\n if MissingRecordsError not in k_records[0]:\n k_records[0].append(MissingRecordsError)\n else:\n\n # add MissingExtensionsError when fix_record_extensions contains error\n if len(k_record_extensions[0]) > 0 and MissingExtensionsError not in k_records[0]:\n k_records[0].append(MissingExtensionsError)\n\n k_record = LowLevelReader.process_K_record(line, k_record_extensions[1])\n k_records[1].append(k_record)\n elif record_type == 'L':\n if error:\n if MissingRecordsError not in comment_records[0]:\n comment_records[0].append(MissingRecordsError)\n else:\n comment_records[1].append(line)\n\n return dict(logger_id=logger_id, # A record\n fix_records=fix_records, # B records\n task=task, # C records\n dgps_records=dgps_records, # D records\n event_records=event_records, # E records\n satellite_records=satellite_records, # F records\n security_records=security_records, # G records\n header=header, # H records\n fix_record_extensions=fix_record_extensions, # I records\n k_record_extensions=k_record_extensions, # J records\n k_records=k_records, # K records\n comment_records=comment_records, # L records\n )", "response": "Reads the specified file object and returns a dictionary with the parsed data."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ndefines the paths associated with this model.", "response": "def set_model_pathnames(self):\n \"\"\"Define the paths associated with this model.\"\"\"\n self.control_path = self.expt.control_path\n self.input_basepath = self.expt.lab.input_basepath\n self.work_path = self.expt.work_path\n self.codebase_path = self.expt.lab.codebase_path\n\n if len(self.expt.models) > 1:\n\n self.control_path = os.path.join(self.control_path, self.name)\n self.work_path = os.path.join(self.work_path, self.name)\n self.codebase_path = os.path.join(self.codebase_path, self.name)\n\n # NOTE: Individual models may override the work subdirectories\n self.work_input_path = self.work_path\n self.work_restart_path = self.work_path\n self.work_output_path = self.work_path\n self.work_init_path = self.work_path\n\n self.exec_prefix = self.config.get('exe_prefix', '')\n self.exec_name = self.config.get('exe', self.default_exec)\n if self.exec_name:\n # By default os.path.join will not prepend the lab bin_path\n # to an absolute path\n self.exec_path = os.path.join(self.expt.lab.bin_path,\n self.exec_name)\n else:\n self.exec_path = None\n\n if self.exec_path:\n # Make exec_name consistent for models with fully qualified path.\n # In all cases it will just be the name of the executable without a\n # path\n self.exec_name = os.path.basename(self.exec_path)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef archive(self):\n\n # Traverse the model directory deleting symlinks, zero length files\n # and empty directories\n for path, dirs, files in os.walk(self.work_path, topdown=False):\n for f_name in files:\n f_path = os.path.join(path, f_name)\n if os.path.islink(f_path) or os.path.getsize(f_path) == 0:\n os.remove(f_path)\n if len(os.listdir(path)) == 0:\n os.rmdir(path)", "response": "Store model output to laboratory archive."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef parse():\n\n # Build the list of subcommand modules\n modnames = [mod for (_, mod, _)\n in pkgutil.iter_modules(payu.subcommands.__path__,\n prefix=payu.subcommands.__name__ + '.')\n if mod.endswith('_cmd')]\n\n subcmds = [importlib.import_module(mod) for mod in modnames]\n\n # Construct the subcommand parser\n parser = argparse.ArgumentParser()\n parser.add_argument('--version', action='version',\n version='payu {0}'.format(payu.__version__))\n\n subparsers = parser.add_subparsers()\n\n for cmd in subcmds:\n cmd_parser = subparsers.add_parser(cmd.title, **cmd.parameters)\n cmd_parser.set_defaults(run_cmd=cmd.runcmd)\n\n for arg in cmd.arguments:\n cmd_parser.add_argument(*arg['flags'], **arg['parameters'])\n\n # Display help if no arguments are provided\n if len(sys.argv) == 1:\n parser.print_help()\n else:\n args = vars(parser.parse_args())\n run_cmd = args.pop('run_cmd')\n run_cmd(**args)", "response": "Parse the command line inputs and execute the subcommand."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ndetermine and validate the active model type.", "response": "def get_model_type(model_type, config):\n \"\"\"Determine and validate the active model type.\"\"\"\n\n # If no model type is given, then check the config file\n if not model_type:\n model_type = config.get('model')\n\n # If there is still no model type, try the parent directory\n if not model_type:\n model_type = os.path.basename(os.path.abspath(os.pardir))\n print('payu: warning: Assuming model is {0} based on parent directory '\n 'name.'.format(model_type))\n\n if model_type not in supported_models:\n print('payu: error: Unknown model {0}'.format(model_type))\n sys.exit(-1)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef set_env_vars(init_run=None, n_runs=None, lab_path=None, dir_path=None,\n reproduce=None):\n \"\"\"Construct the environment variables used by payu for resubmissions.\"\"\"\n payu_env_vars = {}\n\n # Setup Python dynamic library link\n lib_paths = sysconfig.get_config_vars('LIBDIR')\n payu_env_vars['LD_LIBRARY_PATH'] = ':'.join(lib_paths)\n if 'PYTHONPATH' in os.environ:\n payu_env_vars['PYTHONPATH'] = os.environ['PYTHONPATH']\n\n # Set (or import) the path to the PAYU scripts (PAYU_PATH)\n # NOTE: We may be able to use sys.path[0] here.\n payu_binpath = os.environ.get('PAYU_PATH')\n\n if not payu_binpath or not os.path.isdir(payu_binpath):\n payu_binpath = os.path.dirname(sys.argv[0])\n\n payu_env_vars['PAYU_PATH'] = payu_binpath\n\n # Set the run counters\n if init_run:\n init_run = int(init_run)\n assert init_run >= 0\n payu_env_vars['PAYU_CURRENT_RUN'] = init_run\n\n if n_runs:\n n_runs = int(n_runs)\n assert n_runs > 0\n payu_env_vars['PAYU_N_RUNS'] = n_runs\n\n # Import explicit project paths\n if lab_path:\n payu_env_vars['PAYU_LAB_PATH'] = os.path.normpath(lab_path)\n\n if dir_path:\n payu_env_vars['PAYU_DIR_PATH'] = os.path.normpath(dir_path)\n\n if reproduce:\n payu_env_vars['PAYU_REPRODUCE'] = reproduce\n\n return payu_env_vars", "response": "Set environment variables used by payu for resubmissions."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsubmits a userscript the scheduler.", "response": "def submit_job(pbs_script, pbs_config, pbs_vars=None):\n \"\"\"Submit a userscript the scheduler.\"\"\"\n\n # Initialisation\n if pbs_vars is None:\n pbs_vars = {}\n\n pbs_flags = []\n\n pbs_queue = pbs_config.get('queue', 'normal')\n pbs_flags.append('-q {queue}'.format(queue=pbs_queue))\n\n pbs_project = pbs_config.get('project', os.environ['PROJECT'])\n pbs_flags.append('-P {project}'.format(project=pbs_project))\n\n pbs_resources = ['walltime', 'ncpus', 'mem', 'jobfs']\n\n for res_key in pbs_resources:\n res_flags = []\n res_val = pbs_config.get(res_key)\n if res_val:\n res_flags.append('{key}={val}'.format(key=res_key, val=res_val))\n\n if res_flags:\n pbs_flags.append('-l {res}'.format(res=','.join(res_flags)))\n\n # TODO: Need to pass lab.config_path somehow...\n pbs_jobname = pbs_config.get('jobname', os.path.basename(os.getcwd()))\n if pbs_jobname:\n # PBSPro has a 15-character jobname limit\n pbs_flags.append('-N {name}'.format(name=pbs_jobname[:15]))\n\n pbs_priority = pbs_config.get('priority')\n if pbs_priority:\n pbs_flags.append('-p {priority}'.format(priority=pbs_priority))\n\n pbs_flags.append('-l wd')\n\n pbs_join = pbs_config.get('join', 'n')\n if pbs_join not in ('oe', 'eo', 'n'):\n print('payu: error: unknown qsub IO stream join setting.')\n sys.exit(-1)\n else:\n pbs_flags.append('-j {join}'.format(join=pbs_join))\n\n # Append environment variables to qsub command\n # TODO: Support full export of environment variables: `qsub -V`\n pbs_vstring = ','.join('{0}={1}'.format(k, v)\n for k, v in pbs_vars.items())\n pbs_flags.append('-v ' + pbs_vstring)\n\n # Append any additional qsub flags here\n pbs_flags_extend = pbs_config.get('qsub_flags')\n if pbs_flags_extend:\n pbs_flags.append(pbs_flags_extend)\n\n if not os.path.isabs(pbs_script):\n # NOTE: PAYU_PATH is always set if `set_env_vars` was always called.\n # This is currently always true, but is not explicitly enforced.\n # So this conditional check is a bit redundant.\n payu_bin = pbs_vars.get('PAYU_PATH', os.path.dirname(sys.argv[0]))\n pbs_script = os.path.join(payu_bin, pbs_script)\n assert os.path.isfile(pbs_script)\n\n # Set up environment modules here for PBS.\n envmod.setup()\n envmod.module('load', 'pbs')\n\n # Construct job submission command\n cmd = 'qsub {flags} -- {python} {script}'.format(\n flags=' '.join(pbs_flags),\n python=sys.executable,\n script=pbs_script\n )\n print(cmd)\n\n subprocess.check_call(shlex.split(cmd))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsubstituting a new timestep value using regex.", "response": "def substitute_timestep(self, regex, timestep):\n \"\"\"\n Substitute a new timestep value using regex.\n \"\"\"\n\n # Make one change at a time, each change affects subsequent matches.\n timestep_changed = False\n while True:\n matches = re.finditer(regex, self.str, re.MULTILINE | re.DOTALL)\n none_updated = True\n for m in matches:\n if m.group(1) == timestep:\n continue\n else:\n self.str = (self.str[:m.start(1)] + timestep +\n self.str[m.end(1):])\n none_updated = False\n timestep_changed = True\n break\n\n if none_updated:\n break\n\n if not timestep_changed:\n sys.stderr.write('WARNING: no update with {0}.\\n'.format(regex))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef int_to_date(date):\n\n year = date // 10**4\n month = date % 10**4 // 10**2\n day = date % 10**2\n\n return datetime.date(year, month, day)", "response": "Convert an int of form yyyymmdd to a python date object."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget the number of seconds from start date to end date", "response": "def runtime_from_date(start_date, years, months, days, seconds, caltype):\n \"\"\"\n Get the number of seconds from start date to start date + date_delta.\n\n Ignores Feb 29 for caltype == NOLEAP.\n \"\"\"\n\n end_date = start_date + relativedelta(years=years, months=months,\n days=days)\n runtime = end_date - start_date\n\n if caltype == NOLEAP:\n runtime -= get_leapdays(start_date, end_date)\n\n return int(runtime.total_seconds() + seconds)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef date_plus_seconds(init_date, seconds, caltype):\n\n end_date = init_date + datetime.timedelta(seconds=seconds)\n\n if caltype == NOLEAP:\n end_date += get_leapdays(init_date, end_date)\n if end_date.month == 2 and end_date.day == 29:\n end_date += datetime.timedelta(days=1)\n\n return end_date", "response": "Get a new date for a given calendar type plus a number of seconds."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_leapdays(init_date, final_date):\n\n curr_date = init_date\n leap_days = 0\n\n while curr_date != final_date:\n\n if curr_date.month == 2 and curr_date.day == 29:\n leap_days += 1\n\n curr_date += datetime.timedelta(days=1)\n\n return datetime.timedelta(days=leap_days)", "response": "Find the number of leap days between arbitrary dates. Returns a timedelta object."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncalculate the number of days between two dates.", "response": "def calculate_leapdays(init_date, final_date):\n \"\"\"Currently unsupported, it only works for differences in years.\"\"\"\n\n leap_days = (final_date.year - 1) // 4 - (init_date.year - 1) // 4\n leap_days -= (final_date.year - 1) // 100 - (init_date.year - 1) // 100\n leap_days += (final_date.year - 1) // 400 - (init_date.year - 1) // 400\n\n # TODO: Internal date correction (e.g. init_date is 1-March or later)\n\n return datetime.timedelta(days=leap_days)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngenerating a default laboratory path based on user environment.", "response": "def get_default_lab_path(self, config):\n \"\"\"Generate a default laboratory path based on user environment.\"\"\"\n # Default path settings\n\n # Append project name if present (NCI-specific)\n default_project = os.environ.get('PROJECT', '')\n default_short_path = os.path.join('/short', default_project)\n default_user = pwd.getpwuid(os.getuid()).pw_name\n\n short_path = config.get('shortpath', default_short_path)\n lab_name = config.get('laboratory', self.model_type)\n\n if os.path.isabs(lab_name):\n lab_path = lab_name\n else:\n user_name = config.get('user', default_user)\n lab_path = os.path.join(short_path, user_name, lab_name)\n\n return lab_path"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates the laboratory directories.", "response": "def initialize(self):\n \"\"\"Create the laboratory directories.\"\"\"\n mkdir_p(self.archive_path)\n mkdir_p(self.bin_path)\n mkdir_p(self.codebase_path)\n mkdir_p(self.input_basepath)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_job_id(short=True):\n\n jobid = os.environ.get('PBS_JOBID', '')\n\n if short:\n # Strip off '.rman2'\n jobid = jobid.split('.')[0]\n\n return(jobid)", "response": "Return the PBS job id from the environment variable PBS_JOBID."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget information about the job from the PBS server", "response": "def get_job_info():\n \"\"\"\n Get information about the job from the PBS server\n \"\"\"\n\n jobid = get_job_id()\n\n if jobid == '':\n return None\n\n info = get_qstat_info('-ft {0}'.format(jobid), 'Job Id:')\n\n # Select the dict for this job (there should only be one entry in any case)\n info = info['Job Id: {}'.format(jobid)]\n\n # Add the jobid to the dict and then return\n info['Job_ID'] = jobid\n\n return info"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsubmitting a postprocessing script after collation", "response": "def postprocess(self):\n \"\"\"Submit a postprocessing script after collation\"\"\"\n assert self.postscript\n envmod.setup()\n envmod.module('load', 'pbs')\n\n cmd = 'qsub {script}'.format(script=self.postscript)\n\n cmd = shlex.split(cmd)\n rc = sp.call(cmd)\n assert rc == 0, 'Postprocessing script submission failed.'"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nconverting a time date object to a um dump format date which is", "response": "def date_to_um_dump_date(date):\n \"\"\"\n Convert a time date object to a um dump format date which is\n <decade><year><month><day>0\n\n To accommodate two digit months and days the UM uses letters. e.g. 1st oct\n is writing 01a10.\n \"\"\"\n\n assert(date.month <= 12)\n\n decade = date.year // 10\n # UM can only handle 36 decades then goes back to the beginning.\n decade = decade % 36\n year = date.year % 10\n month = date.month\n day = date.day\n\n um_d = string.digits + string.ascii_letters[:26]\n\n um_dump_date = (\n '{decade}{year}{month}{day}0'.format(\n decade=um_d[decade],\n year=um_d[year],\n month=um_d[month],\n day=um_d[day]\n )\n )\n return um_dump_date"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nconverting a date object to a um date.", "response": "def date_to_um_date(date):\n \"\"\"\n Convert a date object to 'year, month, day, hour, minute, second.'\n \"\"\"\n\n assert date.hour == 0 and date.minute == 0 and date.second == 0\n\n return [date.year, date.month, date.day, 0, 0, 0]"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef um_date_to_date(d):\n\n return datetime.datetime(year=d[0], month=d[1], day=d[2],\n hour=d[3], minute=d[4], second=d[5])", "response": "Convert a string with format year month day hour minute second to a datetime date."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsets the environment modules used by the Environment Module system.", "response": "def setup(basepath=DEFAULT_BASEPATH):\n \"\"\"Set the environment modules used by the Environment Module system.\"\"\"\n\n module_version = os.environ.get('MODULE_VERSION', DEFAULT_VERSION)\n moduleshome = os.path.join(basepath, module_version)\n\n # Abort if MODULESHOME does not exist\n if not os.path.isdir(moduleshome):\n print('payu: warning: MODULESHOME does not exist; disabling '\n 'environment modules.')\n os.environ['MODULESHOME'] = ''\n return\n\n os.environ['MODULE_VERSION'] = module_version\n os.environ['MODULE_VERSION_STACK'] = module_version\n os.environ['MODULESHOME'] = moduleshome\n\n if 'MODULEPATH' not in os.environ:\n module_initpath = os.path.join(moduleshome, 'init', '.modulespath')\n with open(module_initpath) as initpaths:\n modpaths = [\n line.partition('#')[0].strip()\n for line in initpaths.readlines() if not line.startswith('#')\n ]\n\n os.environ['MODULEPATH'] = ':'.join(modpaths)\n\n os.environ['LOADEDMODULES'] = os.environ.get('LOADEDMODULES', '')\n\n # Environment modules with certain characters will cause corruption\n # when MPI jobs get launched on other nodes (possibly a PBS issue).\n #\n # Bash processes obscure the issue on Raijin, since it occurs in an\n # environment module function, and bash moves those to the end of\n # the environment variable list.\n #\n # Raijin's mpirun wrapper is a bash script, and therefore \"fixes\" by doing\n # the shuffle and limiting the damage to other bash functions, but some\n # wrappers (e.g. OpenMPI 2.1.x) may not be present. So we manually patch\n # the problematic variable here. But a more general solution would be nice\n # someday.\n\n if 'BASH_FUNC_module()' in os.environ:\n bash_func_module = os.environ['BASH_FUNC_module()']\n os.environ['BASH_FUNC_module()'] = bash_func_module.replace('\\n', ';')"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nrun the modulecmd tool and use its Python - formatted output to set the environment variables.", "response": "def module(command, *args):\n \"\"\"Run the modulecmd tool and use its Python-formatted output to set the\n environment variables.\"\"\"\n\n if 'MODULESHOME' not in os.environ:\n print('payu: warning: No Environment Modules found; skipping {0} call.'\n ''.format(command))\n return\n\n modulecmd = ('{0}/bin/modulecmd'.format(os.environ['MODULESHOME']))\n\n cmd = '{0} python {1} {2}'.format(modulecmd, command, ' '.join(args))\n\n envs, _ = subprocess.Popen(shlex.split(cmd),\n stdout=subprocess.PIPE).communicate()\n exec(envs)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef init_config(self):\n\n input_fpath = os.path.join(self.work_path, 'input.nml')\n\n input_nml = f90nml.read(input_fpath)\n\n if self.expt.counter == 0 or self.expt.repeat_run:\n input_type = 'n'\n else:\n input_type = 'r'\n input_nml['MOM_input_nml']['input_filename'] = input_type\n\n f90nml.write(input_nml, input_fpath, force=True)", "response": "Patch input. nml as a new or restart run."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates a new directory ; ignore if it already exists.", "response": "def mkdir_p(path):\n \"\"\"Create a new directory; ignore if it already exists.\"\"\"\n\n try:\n os.makedirs(path)\n except EnvironmentError as exc:\n if exc.errno != errno.EEXIST:\n raise"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef read_config(config_fname=None):\n\n if not config_fname:\n config_fname = DEFAULT_CONFIG_FNAME\n\n try:\n with open(config_fname, 'r') as config_file:\n config = yaml.load(config_file)\n except IOError as exc:\n if exc.errno == errno.ENOENT:\n print('payu: warning: Configuration file {0} not found!'\n .format(config_fname))\n config = {}\n else:\n raise\n\n collate_config = config.pop('collate', {})\n\n # Transform legacy collate config options\n if type(collate_config) is bool:\n collate_config = {'enable': collate_config}\n\n collatestr = 'collate_'\n foundkeys = []\n # Cycle through old collate config and convert to newer dict format\n for key in list(config.keys()):\n if key.startswith(collatestr):\n foundkeys.append(key)\n collate_config[key[len(collatestr):]] = config.pop(key)\n if foundkeys:\n print(\"Use of these keys is deprecated: {}.\".format(\n \", \".join(foundkeys)))\n print(\"Instead use collate dictionary and subkey \"\n \"without 'collate_' prefix\")\n\n config['collate'] = collate_config\n\n return config", "response": "Parse input configuration file and return a config dict."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef make_symlink(src_path, lnk_path):\n\n # Check for Lustre 60-character symbolic link path bug\n if CHECK_LUSTRE_PATH_LEN:\n src_path = patch_lustre_path(src_path)\n lnk_path = patch_lustre_path(lnk_path)\n\n # os.symlink will happily make a symlink to a non-existent\n # file, but we don't want that behaviour\n # XXX: Do we want to be doing this?\n if not os.path.exists(src_path):\n return\n\n try:\n os.symlink(src_path, lnk_path)\n except EnvironmentError as exc:\n if exc.errno != errno.EEXIST:\n raise\n elif not os.path.islink(lnk_path):\n # Warn the user, but do not interrupt the job\n print(\"Warning: Cannot create symbolic link to {p}; a file named \"\n \"{f} already exists.\".format(p=src_path, f=lnk_path))\n else:\n # Overwrite any existing symbolic link\n if os.path.realpath(lnk_path) != src_path:\n os.remove(lnk_path)\n os.symlink(src_path, lnk_path)", "response": "Safely create a symbolic link to an input field."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\npatch any 60 - character pathnames to avoid a current Lustre bug.", "response": "def patch_lustre_path(f_path):\n \"\"\"Patch any 60-character pathnames, to avoid a current Lustre bug.\"\"\"\n\n if CHECK_LUSTRE_PATH_LEN and len(f_path) == 60:\n if os.path.isabs(f_path):\n f_path = '/.' + f_path\n else:\n f_path = './' + f_path\n\n return f_path"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef check_fast(self, reproduce=False, **args):\n hashvals = {}\n\n fast_check = self.check_file(\n filepaths=self.data.keys(),\n hashvals=hashvals,\n hashfn=fast_hashes,\n shortcircuit=True,\n **args\n )\n\n if not fast_check:\n\n # Save all the fast hashes for failed files that we've already\n # calculated\n for filepath in hashvals:\n for hash, val in hashvals[filepath].items():\n self.data[filepath]['hashes'][hash] = val\n\n if reproduce:\n for filepath in hashvals:\n print('Check failed for {0} {1}'\n ''.format(filepath, hashvals[filepath]))\n tmphash = {}\n full_check = self.check_file(\n filepaths=filepath,\n hashfn=full_hashes,\n hashvals=tmphash,\n shortcircuit=False,\n **args\n )\n\n if full_check:\n # File is still ok, so replace fast hashes\n print('Full hashes ({0}) checked ok'\n ''.format(full_hashes))\n print('Updating fast hashes for {0} in {1}'\n ''.format(filepath, self.path))\n self.add_fast(filepath, force=True)\n print('Saving updated manifest')\n self.needsync = True\n else:\n sys.stderr.write(\n 'Run cannot reproduce: manifest {0} is not '\n 'correct\\n'.format(self.path)\n )\n for path, hashdict in tmphash.items():\n print(' {0}:'.format(path))\n for hash, val in hashdict.items():\n hash_table = self.data[path]['hashes']\n hash_table_val = hash_table.get(hash, None)\n print(' {0}: {1} != {2}'\n ''.format(hash, val, hash_table_val))\n sys.exit(1)\n else:\n # Not relevant if full hashes are correct. Regenerate full\n # hashes for all filepaths that failed fast check.\n print('Updating full hashes for {0} files in {1}'\n ''.format(len(hashvals), self.path))\n\n # Add all full hashes at once -- much faster. Definitely want\n # to force the full hash to be updated. In the specific case of\n # an empty hash the value will be None, without force it will\n # be written as null.\n self.add(\n filepaths=list(hashvals.keys()),\n hashfn=full_hashes,\n force=True,\n fullpaths=[self.fullpath(fpath) for fpath\n in list(hashvals.keys())]\n )\n\n # Flag need to update version on disk\n self.needsync = True", "response": "Check if all filepaths have the same hash value and replace the fast hashes with the new one."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_filepath(self, filepath, fullpath, copy=False):\n\n # Ignore directories\n if os.path.isdir(fullpath):\n return False\n\n # Ignore anything matching the ignore patterns\n for pattern in self.ignore:\n if fnmatch.fnmatch(os.path.basename(fullpath), pattern):\n return False\n\n if filepath not in self.data:\n self.data[filepath] = {}\n\n self.data[filepath]['fullpath'] = fullpath\n if 'hashes' not in self.data[filepath]:\n self.data[filepath]['hashes'] = {hash: None for hash in all_hashes}\n\n if copy:\n self.data[filepath]['copy'] = copy\n\n if hasattr(self, 'existing_filepaths'):\n if filepath in self.existing_filepaths:\n self.existing_filepaths.remove(filepath)\n\n return True", "response": "Add filepath & fullpath to the manifest object without hashing."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nadd a filepath to the cache.", "response": "def add_fast(self, filepath, hashfn=None, force=False):\n \"\"\"\n Bespoke function to add filepaths but set shortcircuit to True, which\n means only the first calculable hash will be stored. In this way only\n one \"fast\" hashing function need be called for each filepath.\n \"\"\"\n if hashfn is None:\n hashfn = fast_hashes\n self.add(filepath, hashfn, force, shortcircuit=True)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef copy_file(self, filepath):\n copy_file = False\n try:\n copy_file = self.data[filepath]['copy']\n except KeyError:\n return False\n return copy_file", "response": "Returns flag which says to copy rather than link a file."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef add_filepath(self, manifest, filepath, fullpath, copy=False):\n filepath = os.path.normpath(filepath)\n if self.manifests[manifest].add_filepath(filepath, fullpath, copy):\n # Only link if filepath was added\n self.manifests[manifest].make_link(filepath)", "response": "Wrapper to add_filepath function in PayuManifest."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef commit_hash(dir='.'):\n\n cmd = ['git', 'rev-parse', 'HEAD']\n\n try:\n with open(os.devnull, 'w') as devnull:\n revision_hash = subprocess.check_output(\n cmd,\n cwd=dir,\n stderr=devnull\n )\n if sys.version_info.major > 2:\n revision_hash = revision_hash.decode('ascii')\n\n return revision_hash.strip()\n\n except subprocess.CalledProcessError:\n return None", "response": "Return the commit hash for HEAD of the specified directory."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nconstruct the list of files to be tracked by the runlog.", "response": "def create_manifest(self):\n \"\"\"Construct the list of files to be tracked by the runlog.\"\"\"\n config_path = os.path.join(self.expt.control_path,\n DEFAULT_CONFIG_FNAME)\n\n self.manifest = []\n\n if os.path.isfile(config_path):\n self.manifest.append(config_path)\n\n for model in self.expt.models:\n config_files = model.config_files + model.optional_config_files\n\n self.manifest.extend(os.path.join(model.control_path, f)\n for f in config_files)\n\n # Add file manifests to runlog manifest\n for mf in self.expt.manifest:\n self.manifest.append(mf.path)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef push(self):\n expt_name = self.config.get('name', self.expt.name)\n\n default_ssh_key = 'id_rsa_payu_' + expt_name\n ssh_key = self.config.get('sshid', default_ssh_key)\n ssh_key_path = os.path.join(os.path.expanduser('~'), '.ssh', 'payu',\n ssh_key)\n\n if not os.path.isfile(ssh_key_path):\n print('payu: error: Github SSH key {key} not found.'\n ''.format(key=ssh_key_path))\n print('payu: error: Run `payu ghsetup` to generate a new key.')\n sys.exit(-1)\n\n cmd = ('ssh-agent bash -c \"ssh-add {key}; git push --all payu\"'\n ''.format(key=ssh_key_path))\n subprocess.check_call(shlex.split(cmd), cwd=self.expt.control_path)", "response": "Push local runlog changes to the remote runlog repository"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef github_setup(self):\n github_auth = self.authenticate()\n github_username = github_auth[0]\n\n expt_name = self.config.get('name', self.expt.name)\n expt_description = self.expt.config.get('description')\n if not expt_description:\n expt_description = input('Briefly describe the experiment: ')\n assert isinstance(expt_description, str)\n expt_private = self.config.get('private', False)\n\n # 1. Create the organisation if needed\n github_api_url = 'https://api.github.com'\n org_name = self.config.get('organization')\n if org_name:\n repo_target = org_name\n\n # Check if org exists\n org_query_url = os.path.join(github_api_url, 'orgs', org_name)\n org_req = requests.get(org_query_url)\n\n if org_req.status_code == 404:\n # NOTE: Orgs cannot be created via the API\n print('payu: github organization {org} does not exist.'\n ''.format(org=org_name))\n print(' You must first create this on the website.')\n\n elif org_req.status_code == 200:\n # TODO: Confirm that the user can interact with the repo\n pass\n\n else:\n # TODO: Exit with grace\n print('payu: abort!')\n sys.exit(-1)\n\n repo_query_url = os.path.join(github_api_url, 'orgs', org_name,\n 'repos')\n repo_api_url = os.path.join(github_api_url, 'repos', org_name,\n expt_name)\n else:\n repo_target = github_username\n\n # Create repo in user account\n repo_query_url = os.path.join(github_api_url, 'user', 'repos')\n repo_api_url = os.path.join(github_api_url, 'repos',\n github_username, expt_name)\n\n # 2. Create the remote repository\n user_repos = []\n page = 1\n while True:\n repo_params = {'page': page, 'per_page': 100}\n repo_query = requests.get(repo_query_url, auth=github_auth,\n params=repo_params)\n assert repo_query.status_code == 200\n if repo_query.json():\n user_repos.extend(list(r['name'] for r in repo_query.json()))\n page += 1\n else:\n break\n\n if expt_name not in user_repos:\n repo_config = {\n 'name': expt_name,\n 'description': expt_description,\n 'private': expt_private,\n 'has_issues': True,\n 'has_downloads': True,\n 'has_wiki': False\n }\n\n repo_gen = requests.post(repo_query_url, json.dumps(repo_config),\n auth=github_auth)\n\n assert repo_gen.status_code == 201\n\n # 3. Check if remote is set\n git_remote_out = subprocess.check_output(shlex.split('git remote -v'),\n cwd=self.expt.control_path)\n\n git_remotes = dict([(r.split()[0], r.split()[1])\n for r in git_remote_out.split('\\n') if r])\n\n remote_name = self.config.get('remote', 'payu')\n remote_url = os.path.join('ssh://git@github.com', repo_target,\n self.expt.name + '.git')\n\n if remote_name not in git_remotes:\n cmd = ('git remote add {name} {url}'\n ''.format(name=remote_name, url=remote_url))\n subprocess.check_call(shlex.split(cmd), cwd=self.expt.control_path)\n elif git_remotes[remote_name] != remote_url:\n print('payu: error: Existing remote URL does not match '\n 'the proposed URL.')\n print('payu: error: To delete the old remote, type '\n '`git remote rm {name}`.'.format(name=remote_name))\n sys.exit(-1)\n\n # 4. Generate a payu-specific SSH key\n default_ssh_key = 'id_rsa_payu_' + expt_name\n ssh_key = self.config.get('sshid', default_ssh_key)\n ssh_dir = os.path.join(os.path.expanduser('~'), '.ssh', 'payu')\n mkdir_p(ssh_dir)\n\n ssh_keypath = os.path.join(ssh_dir, ssh_key)\n if not os.path.isfile(ssh_keypath):\n cmd = 'ssh-keygen -t rsa -f {key} -q -P \"\"'.format(key=ssh_key)\n subprocess.check_call(shlex.split(cmd), cwd=ssh_dir)\n\n # 5. Deploy key to repo\n with open(ssh_keypath + '.pub') as keyfile:\n pubkey = ' '.join(keyfile.read().split()[:-1])\n\n # TODO: Get this from github?\n repo_keys_url = os.path.join(repo_api_url, 'keys')\n keys_req = requests.get(repo_keys_url, auth=github_auth)\n assert keys_req.status_code == 200\n\n if not any(k['key'] == pubkey for k in keys_req.json()):\n add_key_param = {'title': 'payu', 'key': pubkey}\n add_key_req = requests.post(repo_keys_url, auth=github_auth,\n json=add_key_param)\n assert add_key_req.status_code == 201", "response": "Setup the github API and the authentication keys and API tokens."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef add_expiration_postfix(expiration):\n if re.match(r'^[1-9][0-9]*$', expiration):\n return expiration + \".0a1\"\n\n if re.match(r'^[1-9][0-9]*\\.0$', expiration):\n return expiration + \"a1\"\n\n return expiration", "response": "Formats the expiration version and adds a version postfix if needed."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nloads a YAML file from disk throw a ParserError on failure.", "response": "def load_yaml_file(filename):\n \"\"\" Load a YAML file from disk, throw a ParserError on failure.\"\"\"\n try:\n with open(filename, 'r') as f:\n return yaml.safe_load(f)\n except IOError as e:\n raise ParserError('Error opening ' + filename + ': ' + e.message)\n except ValueError as e:\n raise ParserError('Error parsing processes in {}: {}'\n .format(filename, e.message))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef stringIndex(self, string):\n if string in self.table:\n return self.table[string]\n else:\n result = self.current_index\n self.table[string] = result\n self.current_index += self.c_strlen(string)\n return result", "response": "Returns the index in the table of the provided string. Adds the string to the table if it s not there."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef writeDefinition(self, f, name):\n entries = self.table.items()\n entries.sort(key=lambda x: x[1])\n\n # Avoid null-in-string warnings with GCC and potentially\n # overlong string constants; write everything out the long way.\n def explodeToCharArray(string):\n def toCChar(s):\n if s == \"'\":\n return \"'\\\\''\"\n else:\n return \"'%s'\" % s\n return \", \".join(map(toCChar, string))\n\n f.write(\"const char %s[] = {\\n\" % name)\n for (string, offset) in entries:\n if \"*/\" in string:\n raise ValueError(\"String in string table contains unexpected sequence '*/': %s\" %\n string)\n\n e = explodeToCharArray(string)\n if e:\n f.write(\" /* %5d - \\\"%s\\\" */ %s, '\\\\0',\\n\"\n % (offset, string, explodeToCharArray(string)))\n else:\n f.write(\" /* %5d - \\\"%s\\\" */ '\\\\0',\\n\" % (offset, string))\n f.write(\"};\\n\\n\")", "response": "Writes out the string table to a file as a C const char array."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nrendering comments for obj.", "response": "def comments(context, obj):\n \"\"\" Render comments for obj. \"\"\"\n content_type = ContentType.objects.get_for_model(obj.__class__)\n comment_list = LogEntry.objects.filter(\n content_type=content_type,\n object_id=obj.pk,\n action_flag=COMMENT\n )\n return {\n 'obj': obj,\n 'comment_list': comment_list,\n 'is_admin': context['is_admin'],\n }"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_disk_quota(username, machine_name=None):\n\n try:\n ua = Account.objects.get(\n username=username,\n date_deleted__isnull=True)\n except Account.DoesNotExist:\n return 'Account not found'\n\n result = ua.get_disk_quota()\n if result is None:\n return False\n\n return result * 1048576", "response": "Returns disk quota for username in KB\n "} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngenerates a bar graph for a project", "response": "def _gen_project_trend_graph(project, start, end, force_overwrite=False):\n \"\"\"Generates a bar graph for a project\n\n Keyword arguments:\n project -- Project\n start -- start date\n end -- end date\n\n \"\"\"\n filename = graphs.get_project_trend_graph_filename(project, start, end)\n csv_filename = os.path.join(GRAPH_ROOT, filename + '.csv')\n png_filename = os.path.join(GRAPH_ROOT, filename + '.png')\n\n _check_directory_exists(csv_filename)\n _check_directory_exists(png_filename)\n\n if not settings.GRAPH_DEBUG or force_overwrite:\n if os.path.exists(csv_filename):\n if os.path.exists(png_filename):\n return\n\n query = CPUJob.objects.filter(\n project=project,\n date__range=(start, end)\n )\n query = query.values('account', 'account__username', 'date')\n query = query.annotate(Sum('cpu_usage')).order_by('account', 'date')\n\n t_start = start\n t_end = end\n\n start_str = start.strftime('%Y-%m-%d')\n end_str = end.strftime('%Y-%m-%d')\n\n fig, ax = plt.subplots(figsize=(6, 4))\n ax.set_xlim(start, end + datetime.timedelta(days=1))\n ax.set_title('%s %s - %s' % (project.pid, start_str, end_str))\n ax.set_ylabel(\"CPU Time (hours)\")\n ax.set_xlabel(\"Date\")\n\n locator = mdates.AutoDateLocator()\n ax.xaxis.set_major_locator(locator)\n ax.xaxis.set_major_formatter(mdates.AutoDateFormatter(locator))\n ax.xaxis.set_minor_locator(mdates.DayLocator())\n\n data = {}\n x_data = {}\n y_data = {}\n\n with open(csv_filename, 'w') as csv_file:\n csv_writer = csv.writer(csv_file)\n for row in query.iterator():\n csv_writer.writerow([\n row['account__username'],\n row['date'], row['cpu_usage__sum'] / 3600.00\n ])\n\n account = row['account']\n date = row['date']\n\n if account not in data:\n data[account] = {}\n x_data[account] = []\n y_data[account] = []\n\n data[account][date] = row['cpu_usage__sum']\n\n for account, dates in six.iteritems(data):\n start = t_start\n end = t_end\n while start <= end:\n total = 0\n if start in dates:\n total = dates[start]\n x_data[account].append(start)\n y_data[account].append(total / 3600.00)\n start = start + datetime.timedelta(days=1)\n\n del data\n\n totals = []\n start = t_start\n end = t_end\n while start <= end:\n totals.append(0)\n start = start + datetime.timedelta(days=1)\n\n count = 0\n for account in x_data.keys():\n ax.bar(\n x_data[account], y_data[account],\n bottom=totals,\n color=graphs.get_colour(count),\n edgecolor=graphs.get_colour(count),\n align='edge')\n count = count + 1\n\n i = 0\n start = t_start\n end = t_end\n while start <= end:\n totals[i] += y_data[account][i]\n i = i + 1\n start = start + datetime.timedelta(days=1)\n\n del x_data\n del y_data\n del totals\n\n fig.autofmt_xdate()\n plt.tight_layout()\n plt.savefig(png_filename)\n plt.close()"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _gen_institute_graph(start, end, force_overwrite=False):\n filename = graphs.get_institute_graph_filename(start, end)\n csv_filename = os.path.join(GRAPH_ROOT, filename + '.csv')\n png_filename = os.path.join(GRAPH_ROOT, filename + '.png')\n\n _check_directory_exists(csv_filename)\n _check_directory_exists(png_filename)\n\n if not settings.GRAPH_DEBUG or force_overwrite:\n if os.path.exists(csv_filename):\n if os.path.exists(png_filename):\n return\n\n institute_list = Institute.active.all()\n\n plt.subplots(figsize=(4, 4))\n\n data = []\n labels = []\n\n total = 0\n with open(csv_filename, 'w') as csv_file:\n csv_writer = csv.writer(csv_file)\n for institute in institute_list.iterator():\n hours, jobs = usage.get_institute_usage(institute, start, end)\n total = total + int(hours)\n if hours > 0:\n csv_writer.writerow([institute.name, hours, jobs])\n data.append(hours)\n labels.append(institute.name)\n\n mcu = usage.get_machine_category_usage(start, end)\n hours = int(mcu.available_time - total)\n csv_writer.writerow([\"unused\", hours])\n data.append(hours)\n labels.append('Unused')\n\n plt.pie(data, labels=labels, autopct='%1.1f%%', shadow=True)\n plt.tight_layout()\n plt.savefig(png_filename)\n plt.close()", "response": "Generate a pie chart comparing institutes usage."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _gen_machine_graph(start, end, force_overwrite=False):\n filename = graphs.get_machine_graph_filename(start, end)\n csv_filename = os.path.join(GRAPH_ROOT, filename + '.csv')\n png_filename = os.path.join(GRAPH_ROOT, filename + '.png')\n\n _check_directory_exists(csv_filename)\n _check_directory_exists(png_filename)\n\n if not settings.GRAPH_DEBUG or force_overwrite:\n if os.path.exists(csv_filename):\n if os.path.exists(png_filename):\n return\n\n machine_list = Machine.objects.all()\n\n plt.subplots(figsize=(4, 4))\n\n data = []\n labels = []\n\n with open(csv_filename, 'w') as csv_file:\n csv_writer = csv.writer(csv_file)\n for machine in machine_list.iterator():\n hours, jobs = usage.get_machine_usage(machine, start, end)\n if hours > 0:\n csv_writer.writerow([machine.name, hours, jobs])\n data.append(hours)\n labels.append(machine.name)\n\n plt.pie(data, labels=labels, autopct='%1.1f%%', shadow=True)\n\n del data\n del labels\n\n plt.tight_layout()\n plt.savefig(png_filename)\n plt.close()", "response": "Generate a graph of the machines usage."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _gen_trend_graph(start, end, force_overwrite=False):\n filename = graphs.get_trend_graph_filename(start, end)\n csv_filename = os.path.join(GRAPH_ROOT, filename + '.csv')\n png_filename = os.path.join(GRAPH_ROOT, filename + '.png')\n\n _check_directory_exists(csv_filename)\n _check_directory_exists(png_filename)\n\n if not settings.GRAPH_DEBUG or force_overwrite:\n if os.path.exists(csv_filename):\n if os.path.exists(png_filename):\n return\n\n query = CPUJob.objects.filter(\n date__range=(start, end)\n )\n query = query.values('date').annotate(Sum('cpu_usage'))\n query = query.order_by('date')\n\n t_start = start\n t_end = end\n\n start_str = start.strftime('%Y-%m-%d')\n end_str = end.strftime('%Y-%m-%d')\n\n fig, ax = plt.subplots(figsize=(6, 4))\n ax.set_xlim(start, end)\n ax.set_title('%s - %s' % (start_str, end_str))\n ax.set_ylabel(\"CPU Time (hours)\")\n ax.set_xlabel(\"Date\")\n\n locator = mdates.AutoDateLocator()\n ax.xaxis.set_major_locator(locator)\n ax.xaxis.set_major_formatter(mdates.AutoDateFormatter(locator))\n ax.xaxis.set_minor_locator(mdates.DayLocator())\n\n data = {}\n x_data = []\n y_data = []\n\n with open(csv_filename, 'w') as csv_file:\n csv_writer = csv.writer(csv_file)\n for row in query.iterator():\n csv_writer.writerow([\n row['date'], row['cpu_usage__sum'] / 3600.00\n ])\n\n date = row['date']\n\n data[date] = row['cpu_usage__sum']\n\n start = t_start\n end = t_end\n while start <= end:\n total = 0\n if start in data:\n total = data[start]\n x_data.append(start)\n y_data.append(total / 3600.00)\n start = start + datetime.timedelta(days=1)\n\n del data\n\n ax.plot(x_data, y_data)\n\n del x_data\n del y_data\n\n fig.autofmt_xdate()\n plt.tight_layout()\n plt.savefig(png_filename)\n plt.close()", "response": "Generate a total trend graph for a given start and end date."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef daynum_to_date(daynum, max_days=1000000):\n if daynum is None:\n return None\n try:\n daycount = int(daynum)\n except ValueError:\n return None\n\n if daycount > max_days:\n # Using default: some time in the 48th century, clearly bogus.\n daycount = max_days\n return date(1970, 1, 1) + timedelta(daycount)", "response": "Convert a number of days to a date."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef mau(dataframe, target_day, past_days=28, future_days=10, date_format=\"%Y%m%d\"):\n target_day_date = datetime.strptime(target_day, date_format)\n\n # Compute activity over `past_days` days leading up to target_day\n min_activity_date = target_day_date - timedelta(past_days)\n min_activity = unix_time_nanos(min_activity_date)\n max_activity = unix_time_nanos(target_day_date + timedelta(1))\n act_col = dataframe.activityTimestamp\n\n min_submission = datetime.strftime(min_activity_date, date_format)\n max_submission_date = target_day_date + timedelta(future_days)\n max_submission = datetime.strftime(max_submission_date, date_format)\n sub_col = dataframe.submission_date_s3\n\n filtered = filter_date_range(dataframe, act_col, min_activity, max_activity,\n sub_col, min_submission, max_submission)\n return count_distinct_clientids(filtered)", "response": "Compute Monthly Active Users from the Executive Summary dataset."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsnaps a date to the beginning of the week.", "response": "def snap_to_beginning_of_week(day, weekday_start=\"Sunday\"):\n \"\"\" Get the first day of the current week.\n\n :param day: The input date to snap.\n :param weekday_start: Either \"Monday\" or \"Sunday\", indicating the first day of the week.\n :returns: A date representing the first day of the current week.\n \"\"\"\n delta_days = ((day.weekday() + 1) % 7) if weekday_start is \"Sunday\" else day.weekday()\n return day - timedelta(days=delta_days)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_last_week_range(weekday_start=\"Sunday\"):\n today = date.today()\n # Get the first day of the past complete week.\n start_of_week = snap_to_beginning_of_week(today, weekday_start) - timedelta(weeks=1)\n end_of_week = start_of_week + timedelta(days=6)\n return (start_of_week, end_of_week)", "response": "Gets the date for the first and last day of the previous complete week."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_last_month_range():\n today = date.today()\n # Get the last day for the previous month.\n end_of_last_month = snap_to_beginning_of_month(today) - timedelta(days=1)\n start_of_last_month = snap_to_beginning_of_month(end_of_last_month)\n return (start_of_last_month, end_of_last_month)", "response": "Gets the date for the first and last day of the previous complete month."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn a dataframe with a sample of clients given a given sample_id.", "response": "def sampler(dataframe, modulo, column=\"client_id\", sample_id=42):\n \"\"\" Collect a sample of clients given an input column\n\n Filter dataframe based on the modulus of the CRC32 of a given string\n column matching a given sample_id. if dataframe has already been filtered\n by sample_id, then modulo should be a multiple of 100, column should be\n \"client_id\", and the given sample_id should match the value previously\n used, optionally plus multiples of 100.\n\n Args:\n dataframe: A Dataframe to be sampled\n modulo (int): selects a 1/modulo sampling of dataframe\n column (str): name of a string column to sample on\n sample_id (int): modulus result to select for sampling\n\n Returns:\n A DataFrame sampled on the given inputs.\n \"\"\"\n return dataframe \\\n .withColumn(\n \"sampler\",\n udf(lambda key: (crc32(key or \"\") & 0xffffffff) % modulo)(column),\n ).where(\"sampler = %s\" % sample_id).drop(\"sampler\")"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nchecks status of task.", "response": "def progress(request):\n \"\"\" Check status of task. \"\"\"\n if 'delete' in request.GET:\n models.MachineCache.objects.all().delete()\n models.InstituteCache.objects.all().delete()\n models.PersonCache.objects.all().delete()\n models.ProjectCache.objects.all().delete()\n return render(\n template_name='main.html',\n context={'content': 'Deleted'},\n request=request)\n\n if request.method == 'POST':\n if 'task_id' in request.POST:\n result = Task.AsyncResult(request.POST['task_id'])\n if result.failed():\n value = {\n 'info': {},\n 'ready': result.ready(),\n }\n else:\n value = {\n 'info': result.info,\n 'ready': result.ready(),\n }\n return HttpResponse(\n json.dumps(value), content_type=\"application/json\")\n return None"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef synchronise(func):\n def inner(request, *args):\n lock_id = '%s-%s-built-%s' % (\n datetime.date.today(), func.__name__,\n \",\".join([str(a) for a in args]))\n\n if cache.add(lock_id, 'true', LOCK_EXPIRE):\n result = func(request, *args)\n cache.set(lock_id, result.task_id)\n else:\n task_id = cache.get(lock_id)\n if not task_id:\n return None\n\n cache.set(lock_id, \"\")\n result = Task.AsyncResult(task_id)\n if result.ready():\n result.forget()\n return None\n return result\n return inner", "response": "Decorator that synchronises a task into the cache."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\npartitions a list of objects in even buckets", "response": "def _group_by_size_greedy(obj_list, tot_groups):\n \"\"\"Partition a list of objects in even buckets\n\n The idea is to choose the bucket for an object in a round-robin fashion.\n The list of objects is sorted to also try to keep the total size in bytes\n as balanced as possible.\n :param obj_list: a list of dict-like objects with a 'size' property\n :param tot_groups: number of partitions to split the data into.\n :return: a list of lists, one for each partition.\n \"\"\"\n sorted_list = sorted(obj_list, key=lambda x: x['size'], reverse=True)\n groups = [[] for _ in range(tot_groups)]\n for index, obj in enumerate(sorted_list):\n current_group = groups[index % len(groups)]\n current_group.append(obj)\n return groups"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\npartitioning a list of objects evenly and by file size.", "response": "def _group_by_equal_size(obj_list, tot_groups, threshold=pow(2, 32)):\n \"\"\"Partition a list of objects evenly and by file size\n\n Files are placed according to largest file in the smallest bucket. If the\n file is larger than the given threshold, then it is placed in a new bucket\n by itself.\n :param obj_list: a list of dict-like objects with a 'size' property\n :param tot_groups: number of partitions to split the data\n :param threshold: the maximum size of each bucket\n :return: a list of lists, one for each partition\n \"\"\"\n sorted_obj_list = sorted([(obj['size'], obj) for obj in obj_list], reverse=True)\n groups = [(random.random(), []) for _ in range(tot_groups)]\n\n if tot_groups <= 1:\n groups = _group_by_size_greedy(obj_list, tot_groups)\n return groups\n heapq.heapify(groups)\n for obj in sorted_obj_list:\n if obj[0] > threshold:\n heapq.heappush(groups, (obj[0], [obj[1]]))\n else:\n size, files = heapq.heappop(groups)\n size += obj[0]\n files.append(obj[1])\n heapq.heappush(groups, (size, files))\n groups = [group[1] for group in groups]\n return groups"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef select(self, *properties, **aliased_properties):\n if not (properties or aliased_properties):\n return self\n merged_properties = dict(zip(properties, properties))\n merged_properties.update(aliased_properties)\n\n for prop_name in (merged_properties.keys()):\n if prop_name in self.selection:\n raise Exception('The property {} has already been selected'.format(prop_name))\n\n new_selection = self.selection.copy()\n new_selection.update(merged_properties)\n\n return self._copy(selection=new_selection)", "response": "Select the properties of the dataset."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning a new Dataset refined using the given condition.", "response": "def where(self, **kwargs):\n \"\"\"Return a new Dataset refined using the given condition\n\n :param kwargs: a map of `dimension` => `condition` to filter the elements\n of the dataset. `condition` can either be an exact value or a\n callable returning a boolean value. If `condition` is a value, it is\n converted to a string, then sanitized. If `condition` is a callable, note that it will\n be passed sanitized values -- i.e., characters outside [a-zA-Z0-9_.] are converted\n to `_`.\n \"\"\"\n clauses = copy(self.clauses)\n for dimension, condition in kwargs.items():\n if dimension in self.clauses:\n raise Exception('There should be only one clause for {}'.format(dimension))\n if dimension not in self.schema:\n raise Exception('The dimension {} doesn\\'t exist'.format(dimension))\n if isfunction(condition) or isinstance(condition, functools.partial):\n clauses[dimension] = condition\n else:\n clauses[dimension] = functools.partial((lambda x, y: x == y), self._sanitize_dimension(str(condition)))\n return self._copy(clauses=clauses)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nretrieves the summaries of the files contained in the current dataset.", "response": "def summaries(self, sc, limit=None):\n \"\"\"Summary of the files contained in the current dataset\n\n Every item in the summary is a dict containing a key name and the corresponding size of\n the key item in bytes, e.g.::\n {'key': 'full/path/to/my/key', 'size': 200}\n\n :param limit: Max number of objects to retrieve\n :return: An iterable of summaries\n \"\"\"\n clauses = copy(self.clauses)\n schema = self.schema\n\n if self.prefix:\n schema = ['prefix'] + schema\n # Add a clause for the prefix that always returns True, in case\n # the output is not filtered at all (so that we do a scan/filter\n # on the prefix directory)\n clauses['prefix'] = lambda x: True\n\n with futures.ThreadPoolExecutor(self.max_concurrency) as executor:\n scanned = self._scan(schema, [self.prefix], clauses, executor)\n keys = sc.parallelize(scanned).flatMap(self.store.list_keys)\n return keys.take(limit) if limit else keys.collect()"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef records(self, sc, group_by='greedy', limit=None, sample=1, seed=42, decode=None, summaries=None):\n decode = decode or message_parser.parse_heka_message\n summaries = summaries or self.summaries(sc, limit)\n\n # Calculate the sample if summaries is not empty and limit is not set\n if summaries and limit is None and sample != 1:\n if sample < 0 or sample > 1:\n raise ValueError('sample must be between 0 and 1')\n print(\n \"WARNING: THIS IS NOT A REPRESENTATIVE SAMPLE.\\n\"\n \"This 'sampling' is based on s3 files and is highly\\n\"\n \"susceptible to skew. Use only for quicker performance\\n\"\n \"while prototyping.\"\n )\n # We want this sample to be reproducible.\n # See https://bugzilla.mozilla.org/show_bug.cgi?id=1318681\n seed_state = random.getstate()\n try:\n random.seed(seed)\n summaries = random.sample(summaries,\n int(len(summaries) * sample))\n finally:\n random.setstate(seed_state)\n\n # Obtain size in MB\n total_size = functools.reduce(lambda acc, item: acc + item['size'], summaries, 0)\n total_size_mb = total_size / float(1 << 20)\n print(\"fetching %.5fMB in %s files...\" % (total_size_mb, len(summaries)))\n\n if group_by == 'equal_size':\n groups = _group_by_equal_size(summaries, 10*sc.defaultParallelism)\n elif group_by == 'greedy':\n groups = _group_by_size_greedy(summaries, 10*sc.defaultParallelism)\n else:\n raise Exception(\"group_by specification is invalid\")\n\n self._compile_selection()\n\n keys = (\n sc.parallelize(groups, len(groups))\n .flatMap(lambda x: x)\n .map(lambda x: x['key'])\n )\n file_handles = keys.map(self.store.get_key)\n\n # decode(fp: file-object) -> list[dict]\n data = file_handles.flatMap(decode)\n\n return data.map(self._apply_selection)", "response": "Retrieve the elements of a Dataset\n "} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef dataframe(self, spark, group_by='greedy', limit=None, sample=1, seed=42, decode=None, summaries=None, schema=None, table_name=None):\n rdd = self.records(spark.sparkContext, group_by, limit, sample, seed, decode, summaries)\n if not schema:\n df = rdd.map(lambda d: Row(**d)).toDF()\n else:\n df = spark.createDataFrame(rdd, schema=schema)\n if table_name:\n df.createOrReplaceTempView(table_name)\n return df", "response": "Convert an RDD returned from records function to a Spark DataFrame"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncreating a new Dataset configured for the given source_name", "response": "def from_source(source_name):\n \"\"\"Create a Dataset configured for the given source_name\n\n This is particularly convenient when the user doesn't know\n the list of dimensions or the bucket name, but only the source name.\n\n Usage example::\n\n records = Dataset.from_source('telemetry').where(\n docType='main',\n submissionDate='20160701',\n appUpdateChannel='nightly'\n )\n \"\"\"\n meta_bucket = 'net-mozaws-prod-us-west-2-pipeline-metadata'\n store = S3Store(meta_bucket)\n\n try:\n source = json.loads(store.get_key('sources.json').read().decode('utf-8'))[source_name]\n except KeyError:\n raise Exception('Unknown source {}'.format(source_name))\n\n schema = store.get_key('{}/schema.json'.format(source['metadata_prefix'])).read().decode('utf-8')\n dimensions = [f['field_name'] for f in json.loads(schema)['dimensions']]\n return Dataset(source['bucket'], dimensions, prefix=source['prefix'])"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_next_action(self, request, application, roles):\n application.reopen()\n link, is_secret = base.get_email_link(application)\n emails.send_invite_email(application, link, is_secret)\n messages.success(\n request,\n \"Sent an invitation to %s.\"\n % application.applicant.email)\n return 'success'", "response": "Send an invitation email to the application."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nretrieving the next state of the application.", "response": "def get_next_action(self, request, application, roles):\n \"\"\" Retrieve the next state. \"\"\"\n\n # Check for serious errors in submission.\n # Should only happen in rare circumstances.\n errors = application.check_valid()\n if len(errors) > 0:\n for error in errors:\n messages.error(request, error)\n return 'error'\n\n # mark as submitted\n application.submit()\n\n return 'success'"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_next_action(self, request, application, roles):\n # Check for serious errors in submission.\n # Should only happen in rare circumstances.\n errors = application.check_valid()\n if len(errors) > 0:\n for error in errors:\n messages.error(request, error)\n return 'error'\n\n # approve application\n approved_by = request.user\n created_person, created_account = application.approve(approved_by)\n\n # send email\n application.extend()\n link, is_secret = base.get_email_link(application)\n emails.send_approved_email(\n application, created_person, created_account, link, is_secret)\n\n if created_person or created_account:\n return 'password_needed'\n else:\n return 'password_ok'", "response": "Retrieve the next action."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsends an email to each project leader for person", "response": "def send_bounced_warning(person, leader_list):\n \"\"\"Sends an email to each project leader for person\n informing them that person's email has bounced\"\"\"\n context = CONTEXT.copy()\n context['person'] = person\n\n for lp in leader_list:\n leader = lp['leader']\n\n context['project'] = lp['project']\n context['receiver'] = leader\n\n to_email = leader.email\n subject = render_to_string(\n 'karaage/people/emails/bounced_email_subject.txt', context)\n body = render_to_string(\n 'karaage/people/emails/bounced_email_body.txt', context)\n send_mail(\n subject.replace('\\n', ''), body,\n settings.ACCOUNTS_EMAIL, [to_email])\n log.change(\n leader,\n 'Sent email about bounced emails from %s' % person)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nsend an email to user allowing them to set their password.", "response": "def send_reset_password_email(person):\n \"\"\"Sends an email to user allowing them to set their password.\"\"\"\n uid = urlsafe_base64_encode(force_bytes(person.pk)).decode(\"ascii\")\n token = default_token_generator.make_token(person)\n url = '%s/persons/reset/%s/%s/' % (\n settings.REGISTRATION_BASE_URL, uid, token)\n\n context = CONTEXT.copy()\n context.update({\n 'url': url,\n 'receiver': person,\n })\n\n to_email = person.email\n subject, body = render_email('reset_password', context)\n\n send_mail(subject, body, settings.ACCOUNTS_EMAIL, [to_email])"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef send_confirm_password_email(person):\n url = '%s/profile/login/%s/' % (\n settings.REGISTRATION_BASE_URL, person.username)\n\n context = CONTEXT.copy()\n context.update({\n 'url': url,\n 'receiver': person,\n })\n\n to_email = person.email\n subject, body = render_email('confirm_password', context)\n\n send_mail(subject, body, settings.ACCOUNTS_EMAIL, [to_email])", "response": "Sends an email to user allowing them to confirm their password."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef check_can_approve(self, request, application, roles):\n try:\n authorised_persons = self.get_authorised_persons(application)\n authorised_persons.get(pk=request.user.pk)\n return True\n except Person.DoesNotExist:\n return False", "response": "Check the person s authorization."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_next_action(self, request, application, label, roles):\n actions = self.get_actions(request, application, roles)\n if label is None and 'is_applicant' in roles:\n assert application.content_type.model == 'person'\n if application.applicant.has_usable_password():\n form = forms.PersonVerifyPassword(\n data=request.POST or None, person=application.applicant)\n form_type = \"verify\"\n else:\n form = forms.PersonSetPassword(\n data=request.POST or None, person=application.applicant)\n form_type = \"set\"\n if request.method == 'POST':\n if 'cancel' in request.POST:\n return 'cancel'\n if form.is_valid():\n form.save()\n messages.success(\n request, 'Password updated. New accounts activated.')\n for action in actions:\n if action in request.POST:\n return action\n return HttpResponseBadRequest(\"<h1>Bad Request</h1>\")\n return render(\n template_name='kgapplications/common_password.html',\n context={\n 'application': application,\n 'form': form,\n 'actions': actions,\n 'roles': roles,\n 'type': form_type\n },\n request=request)\n return super(StatePassword, self).get_next_action(request, application, label, roles)", "response": "This method is used to get the next action for the user."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_next_action(self, request, application, label, roles):\n actions = self.get_actions(request, application, roles)\n if label is None and \\\n 'is_applicant' in roles and 'is_admin' not in roles:\n # applicant, admin, leader can reopen an application\n for action in actions:\n if action in request.POST:\n return action\n return render(\n template_name='kgapplications/common_declined.html',\n context={\n 'application': application,\n 'actions': actions,\n 'roles': roles\n },\n request=request)\n return super(StateDeclined, self).get_next_action(\n request, application, label, roles)", "response": "This method returns the next action that is needed to be used in the state declined page."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef add_step(self, step, step_id):\n assert step_id not in self._steps\n assert step_id not in self._order\n assert isinstance(step, Step)\n\n self._steps[step_id] = step\n self._order.append(step_id)", "response": "Add a step to the list."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_next_action(self, request, application, label, roles):\n actions = self.get_actions(request, application, roles)\n\n # if the user is not the applicant, the steps don't apply.\n if 'is_applicant' not in roles:\n return super(StateWithSteps, self).get_next_action(\n request, application, label, roles)\n\n # was label supplied?\n if label is None:\n # no label given, find first step and redirect to it.\n this_id = self._order[0]\n url = base.get_url(request, application, roles, this_id)\n return HttpResponseRedirect(url)\n else:\n # label was given, get the step position and id for it\n this_id = label\n if this_id not in self._steps:\n return HttpResponseBadRequest(\"<h1>Bad Request</h1>\")\n position = self._order.index(this_id)\n\n # get the step given the label\n this_step = self._steps[this_id]\n\n # define list of allowed actions for step\n step_actions = {}\n if 'cancel' in actions:\n step_actions['cancel'] = \"state:cancel\"\n if 'submit' in actions and position == len(self._order) - 1:\n step_actions['submit'] = \"state:submit\"\n if position > 0:\n step_actions['prev'] = self._order[position - 1]\n if position < len(self._order) - 1:\n step_actions['next'] = self._order[position + 1]\n\n # process the request\n if request.method == \"GET\":\n # if GET request, state changes are not allowed\n response = this_step.view(\n request, application, this_id, roles, step_actions.keys())\n assert isinstance(response, HttpResponse)\n return response\n elif request.method == \"POST\":\n # if POST request, state changes are allowed\n response = this_step.view(\n request, application, this_id, roles, step_actions.keys())\n assert response is not None\n\n # If it was a HttpResponse, just return it\n if isinstance(response, HttpResponse):\n return response\n else:\n # try to lookup the response\n if response not in step_actions:\n raise RuntimeError(\n \"Invalid response '%s' from step '%s'\"\n % (response, this_step))\n action = step_actions[response]\n\n # process the required action\n if action.startswith(\"state:\"):\n return action[6:]\n else:\n url = base.get_url(request, application, roles, action)\n return HttpResponseRedirect(url)\n\n # Something went wrong.\n return HttpResponseBadRequest(\"<h1>Bad Request</h1>\")", "response": "Process the get_next_action request at the current step."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_next_action(self, request, application, label, roles):\n actions = self.get_actions(request, application, roles)\n if label is None and \\\n 'is_applicant' in roles and 'is_admin' not in roles:\n for action in actions:\n if action in request.POST:\n return action\n link, is_secret = base.get_email_link(application)\n return render(\n template_name='kgapplications/software_introduction.html',\n context={\n 'actions': actions,\n 'application': application,\n 'roles': roles,\n 'link': link,\n 'is_secret': is_secret\n },\n request=request)\n return super(StateIntroduction, self).get_next_action(\n request, application, label, roles)", "response": "This method returns the next action for the given application and label."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef set_account_username(account, old_username, new_username):\n for datastore in _get_datastores():\n datastore.set_account_username(account, old_username, new_username)", "response": "Sets the username of an account."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_account_details(account):\n result = []\n for datastore in _get_datastores():\n value = datastore.get_account_details(account)\n value['datastore'] = datastore.config['DESCRIPTION']\n result.append(value)\n return result", "response": "Get the account details."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nchanging the group name.", "response": "def set_group_name(group, old_name, new_name):\n \"\"\" Group was renamed. \"\"\"\n for datastore in _get_datastores():\n datastore.set_group_name(group, old_name, new_name)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets details for this user.", "response": "def get_project_details(project):\n \"\"\" Get details for this user. \"\"\"\n result = []\n for datastore in _get_datastores():\n value = datastore.get_project_details(project)\n value['datastore'] = datastore.config['DESCRIPTION']\n result.append(value)\n return result"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef set_project_pid(project, old_pid, new_pid):\n for datastore in _get_datastores():\n datastore.save_project(project)\n datastore.set_project_pid(project, old_pid, new_pid)", "response": "Set the project s PID to new_pid."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_institute_details(institute):\n result = []\n for datastore in _get_datastores():\n value = datastore.get_institute_details(institute)\n value['datastore'] = datastore.config['DESCRIPTION']\n result.append(value)\n return result", "response": "Get details for this user."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nadd accounts to group.", "response": "def add_accounts_to_group(accounts_query, group):\n \"\"\" Add accounts to group. \"\"\"\n\n query = accounts_query.filter(date_deleted__isnull=True)\n\n for account in query:\n add_account_to_group(account, group)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef remove_accounts_from_group(accounts_query, group):\n\n query = accounts_query.filter(date_deleted__isnull=True)\n\n for account in query:\n remove_account_from_group(account, group)", "response": "Remove accounts from group."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nadd accounts to project.", "response": "def add_accounts_to_project(accounts_query, project):\n \"\"\" Add accounts to project. \"\"\"\n\n query = accounts_query.filter(date_deleted__isnull=True)\n\n for account in query:\n add_account_to_project(account, project)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nremoves accounts from project.", "response": "def remove_accounts_from_project(accounts_query, project):\n \"\"\" Remove accounts from project. \"\"\"\n\n query = accounts_query.filter(date_deleted__isnull=True)\n\n for account in query:\n remove_account_from_project(account, project)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nadd accounts to institute.", "response": "def add_accounts_to_institute(accounts_query, institute):\n \"\"\" Add accounts to institute. \"\"\"\n\n query = accounts_query.filter(date_deleted__isnull=True)\n\n for account in query:\n add_account_to_institute(account, institute)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef remove_accounts_from_institute(accounts_query, institute):\n\n query = accounts_query.filter(date_deleted__isnull=True)\n\n for account in query:\n remove_account_from_institute(account, institute)", "response": "Remove accounts from institute."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef view(self, request, application, label, roles, actions):\n # Get the appropriate form\n status = None\n form = None\n\n if application.content_type.model != 'applicant':\n status = \"You are already registered in the system.\"\n elif application.content_type.model == 'applicant':\n if application.applicant.saml_id is not None:\n form = forms.SAMLApplicantForm(\n request.POST or None,\n instance=application.applicant)\n else:\n form = forms.UserApplicantForm(\n request.POST or None,\n instance=application.applicant)\n\n # Process the form, if there is one\n if form is not None and request.method == 'POST':\n if form.is_valid():\n form.save(commit=True)\n for action in actions:\n if action in request.POST:\n return action\n return HttpResponseBadRequest(\"<h1>Bad Request</h1>\")\n else:\n # if form didn't validate and we want to go back or cancel,\n # then just do it.\n if 'cancel' in request.POST:\n return \"cancel\"\n if 'prev' in request.POST:\n return 'prev'\n\n # If we don't have a form, we can just process the actions here\n if form is None:\n for action in actions:\n if action in request.POST:\n return action\n\n # Render the response\n return render(\n template_name='kgapplications/project_aed_applicant.html',\n context={\n 'form': form,\n 'application': application,\n 'status': status, 'actions': actions, 'roles': roles,\n },\n request=request)", "response": "This method is called by the Django app to view the applicant state."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nprocessing the get_next_action request at the current step.", "response": "def get_next_action(self, request, application, label, roles):\n \"\"\" Process the get_next_action request at the current step. \"\"\"\n\n # if user is logged and and not applicant, steal the\n # application\n if 'is_applicant' in roles:\n # if we got this far, then we either we are logged in as applicant,\n # or we know the secret for this application.\n\n new_person = None\n reason = None\n details = None\n\n attrs, _ = saml.parse_attributes(request)\n saml_id = attrs['persistent_id']\n if saml_id is not None:\n query = Person.objects.filter(saml_id=saml_id)\n if application.content_type.model == \"person\":\n query = query.exclude(pk=application.applicant.pk)\n if query.count() > 0:\n new_person = Person.objects.get(saml_id=saml_id)\n reason = \"SAML id is already in use by existing person.\"\n details = (\n \"It is not possible to continue this application \"\n + \"as is because the saml identity already exists \"\n + \"as a registered user.\")\n del query\n\n if request.user.is_authenticated:\n new_person = request.user\n reason = \"%s was logged in \" \\\n \"and accessed the secret URL.\" % new_person\n details = (\n \"If you want to access this application \"\n + \"as %s \" % application.applicant\n + \"without %s stealing it, \" % new_person\n + \"you will have to ensure %s is \" % new_person\n + \"logged out first.\")\n\n if new_person is not None:\n if application.applicant != new_person:\n if 'steal' in request.POST:\n old_applicant = application.applicant\n application.applicant = new_person\n application.save()\n log.change(\n application.application_ptr,\n \"Stolen application from %s\" % old_applicant)\n messages.success(\n request,\n \"Stolen application from %s\" % old_applicant)\n url = base.get_url(request, application, roles, label)\n return HttpResponseRedirect(url)\n else:\n return render(\n template_name='kgapplications'\n '/project_aed_steal.html',\n context={\n 'application': application,\n 'person': new_person,\n 'reason': reason,\n 'details': details,\n },\n request=request)\n\n # if the user is the leader, show him the leader specific page.\n if ('is_leader' in roles or 'is_delegate' in roles) \\\n and 'is_admin' not in roles \\\n and 'is_applicant' not in roles:\n actions = ['reopen']\n if 'reopen' in request.POST:\n return 'reopen'\n return render(\n template_name='kgapplications/project_aed_for_leader.html',\n context={'application': application,\n 'actions': actions, 'roles': roles, },\n request=request)\n\n # otherwise do the default behaviour for StateWithSteps\n return super(StateApplicantEnteringDetails, self) \\\n .get_next_action(request, application, label, roles)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _filter_string(value):\n if value is None:\n value = \"\"\n\n # replace whitespace with space\n value = value.replace(\"\\n\", \" \")\n value = value.replace(\"\\t\", \" \")\n\n # CSV seperator\n value = value.replace(\"|\", \" \")\n\n # remove leading/trailing whitespace\n value = value.strip()\n\n # hack because MAM doesn't quote sql correctly\n value = value.replace(\"\\\\\", \"\")\n\n # Used for stripping non-ascii characters\n value = ''.join(c for c in value if 31 < ord(c) < 127)\n\n return value", "response": "Filter the string so MAM doesn t have heart failure."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ntruncates a string after a given number of chars", "response": "def _truncate(value, arg):\n \"\"\"\n Truncates a string after a given number of chars\n Argument: Number of chars to _truncate after\n \"\"\"\n length = int(arg)\n if value is None:\n value = \"\"\n if len(value) > length:\n return value[:length] + \"...\"\n else:\n return value"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncalling a command and return the result.", "response": "def _call(self, command, ignore_errors=None):\n \"\"\" Call remote command with logging. \"\"\"\n if ignore_errors is None:\n ignore_errors = []\n\n command = self._get_command(command)\n logger.debug(\"Cmd %s\" % command)\n null = open('/dev/null', 'w')\n retcode = subprocess.call(command, stdout=null, stderr=null)\n null.close()\n\n if retcode in ignore_errors:\n logger.debug(\n \"<-- Cmd %s returned %d (ignored)\" % (command, retcode))\n return\n\n if retcode:\n logger.error(\n \"<-- Cmd %s returned: %d (error)\" % (command, retcode))\n raise subprocess.CalledProcessError(retcode, command)\n\n logger.debug(\"<-- Returned %d (good)\" % retcode)\n return"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngetting the user details from MAM.", "response": "def get_user(self, username):\n \"\"\" Get the user details from MAM. \"\"\"\n cmd = [\"glsuser\", \"-u\", username, \"--raw\"]\n results = self._read_output(cmd)\n\n if len(results) == 0:\n return None\n elif len(results) > 1:\n logger.error(\n \"Command returned multiple results for '%s'.\" % username)\n raise RuntimeError(\n \"Command returned multiple results for '%s'.\" % username)\n\n the_result = results[0]\n the_name = the_result[\"Name\"]\n if username.lower() != the_name.lower():\n logger.error(\n \"We expected username '%s' but got username '%s'.\"\n % (username, the_name))\n raise RuntimeError(\n \"We expected username '%s' but got username '%s'.\"\n % (username, the_name))\n\n return the_result"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_user_balance(self, username):\n cmd = [\"gbalance\", \"-u\", username, \"--raw\"]\n results = self._read_output(cmd)\n\n if len(results) == 0:\n return None\n\n return results", "response": "Get the user balance details from MAM."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_users_in_project(self, projectname):\n ds_project = self.get_project(projectname)\n if ds_project is None:\n logger.error(\n \"Project '%s' does not exist in MAM\" % projectname)\n raise RuntimeError(\n \"Project '%s' does not exist in MAM\" % projectname)\n\n user_list = []\n if ds_project[\"Users\"] != \"\":\n user_list = ds_project[\"Users\"].lower().split(\",\")\n return user_list", "response": "Get list of users in project from MAM."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_projects_in_user(self, username):\n ds_balance = self.get_user_balance(username)\n if ds_balance is None:\n return []\n\n project_list = []\n for bal in ds_balance:\n project_list.append(bal[\"Name\"])\n return project_list", "response": "Get list of projects in user from MAM."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncall when account is created or updated with username override.", "response": "def _save_account(self, account, username):\n \"\"\" Called when account is created/updated. With username override. \"\"\"\n\n # retrieve default project, or use null project if none\n default_project_name = self._null_project\n if account.default_project is not None:\n default_project_name = account.default_project.pid\n\n # account created\n # account updated\n\n ds_user = self.get_user(username)\n if account.date_deleted is None:\n # date_deleted is not set, user should exist\n logger.debug(\"account is active\")\n\n if ds_user is None:\n # create user if doesn't exist\n self._create_user(username, default_project_name)\n else:\n # or just set default project\n self._set_user_default_project(username, default_project_name)\n\n # update user meta information\n self._call([\n \"gchuser\",\n \"-n\", self._filter_string(account.person.get_full_name()),\n \"-u\", username])\n self._call([\n \"gchuser\",\n \"-E\", self._filter_string(account.person.email),\n \"-u\", username])\n\n # add rest of projects user belongs to\n mam_projects = self.get_projects_in_user(username)\n for project in account.person.projects.all():\n if project.pid not in mam_projects:\n self.add_account_to_project(account, project)\n else:\n # date_deleted is not set, user should not exist\n logger.debug(\"account is not active\")\n if ds_user is not None:\n # delete MAM user if account marked as deleted\n self._call([\"grmuser\", \"-u\", username], ignore_errors=[8])\n\n return"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _delete_account(self, account, username):\n\n # account deleted\n\n ds_user = self.get_user(username)\n if ds_user is not None:\n self._call([\"grmuser\", \"-u\", username], ignore_errors=[8])\n\n return", "response": "Called when account is deleted. With username override."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nset the username of an account.", "response": "def set_account_username(self, account, old_username, new_username):\n \"\"\" Account's username was changed. \"\"\"\n self._delete_account(account, old_username)\n self._save_account(account, new_username)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting the account details", "response": "def get_account_details(self, account):\n \"\"\" Get the account details \"\"\"\n result = self.get_user(account.username)\n if result is None:\n result = {}\n return result"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef save_project(self, project):\n pid = project.pid\n\n # project created\n # project updated\n\n if project.is_active:\n # project is not deleted\n logger.debug(\"project is active\")\n ds_project = self.get_project(pid)\n if ds_project is None:\n self._create_project(pid)\n\n # update project meta information\n name = self._truncate(project.name, 40)\n self._set_project(pid, name, project.institute)\n else:\n # project is deleted\n logger.debug(\"project is not active\")\n ds_project = self.get_project(pid)\n if ds_project is not None:\n self._delete_project(pid)\n\n return", "response": "Called when project is saved or updated."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef delete_project(self, project):\n pid = project.pid\n\n # project deleted\n\n ds_project = self.get_project(pid)\n if ds_project is not None:\n self._delete_project(pid)\n\n return", "response": "Called when a project is deleted."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets the project details.", "response": "def get_project_details(self, project):\n \"\"\" Get the project details. \"\"\"\n result = self.get_project(project.pid)\n if result is None:\n result = {}\n return result"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef save_institute(self, institute):\n name = institute.name\n logger.debug(\"save_institute '%s'\" % name)\n\n # institute created\n # institute updated\n\n if institute.is_active:\n # date_deleted is not set, user should exist\n logger.debug(\"institute is active\")\n\n self._call(\n [\"goldsh\", \"Organization\", \"Create\", \"Name=%s\" % name],\n ignore_errors=[185])\n else:\n # date_deleted is not set, user should not exist\n logger.debug(\"institute is not active\")\n # delete MAM organisation if institute marked as deleted\n self._call([\"goldsh\", \"Organization\", \"Delete\", \"Name==%s\" % name])\n\n logger.debug(\"returning\")\n return", "response": "Called when institute is created or updated."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ncalling when institute is deleted.", "response": "def delete_institute(self, institute):\n \"\"\" Called when institute is deleted. \"\"\"\n name = institute.name\n logger.debug(\"institute_deleted '%s'\" % name)\n\n # institute deleted\n self._call([\"goldsh\", \"Organization\", \"Delete\", \"Name==%s\" % name])\n\n logger.debug(\"returning\")\n return"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef add_account_to_project(self, account, project):\n username = account.username\n projectname = project.pid\n self._call([\n \"gchproject\",\n \"--add-user\", username,\n \"-p\", projectname],\n ignore_errors=[74])", "response": "Add account to project."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nrenders a Matplotlib figure in Zeppelin.", "response": "def show(fig, width=600):\n \"\"\" Renders a Matplotlib figure in Zeppelin.\n\n :param fig: a Matplotlib figure\n :param width: the width in pixel of the rendered figure, defaults to 600\n\n Usage example::\n\n import matplotlib.pyplot as plt\n from moztelemetry.zeppelin import show\n\n fig = plt.figure()\n plt.plot([1, 2, 3])\n show(fig)\n \"\"\"\n img = StringIO()\n fig.savefig(img, format='svg')\n img.seek(0)\n print(\"%html <div style='width:{}px'>{}</div>\".format(width, img.buf))"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns a fully - connected set of hub components using default filenames.", "response": "def default_hub(hub_name, genome, email, short_label=None, long_label=None):\n \"\"\"\n Returns a fully-connected set of hub components using default filenames.\n\n Parameters\n ----------\n\n hub_name : str\n Name of the hub\n\n genome : str\n Assembly name (hg38, dm6, etc)\n\n email : str\n Email to include with hub.\n\n short_label : str\n Short label for the hub. If None, defaults to the value of `hub_name`\n\n long_label : str\n Long label for the hub. If None, defaults to the value of `short_label`.\n \"\"\"\n if short_label is None:\n short_label = hub_name\n if long_label is None:\n long_label = short_label\n\n hub = Hub(\n hub=hub_name,\n short_label=short_label,\n long_label=long_label,\n email=email)\n\n genome = Genome(genome)\n genomes_file = GenomesFile()\n trackdb = TrackDb()\n hub.add_genomes_file(genomes_file)\n genomes_file.add_genome(genome)\n genome.add_trackdb(trackdb)\n return hub, genomes_file, genome, trackdb"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef parse_usage(machine, usage, date, machine_name, log_type):\n assert machine.name == machine_name\n\n year, month, day = date.split('-')\n date = datetime.date(int(year), int(month), int(day))\n\n return parse_logs(usage, date, machine_name, log_type)", "response": "Parses a usage object and returns a list of log objects."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef from_files(filenames, strict_type_checks=True):\n if strict_type_checks:\n load_whitelist()\n\n all_histograms = OrderedDict()\n for filename in filenames:\n parser = FILENAME_PARSERS[os.path.basename(filename)]\n histograms = parser(filename, strict_type_checks)\n\n # OrderedDicts are important, because then the iteration order over\n # the parsed histograms is stable, which makes the insertion into\n # all_histograms stable, which makes ordering in generated files\n # stable, which makes builds more deterministic.\n if not isinstance(histograms, OrderedDict):\n raise ParserError(\"Histogram parser did not provide an OrderedDict.\")\n\n for name, definition in iteritems(histograms):\n if name in all_histograms:\n raise ParserError('Duplicate histogram name \"%s\".' % name)\n all_histograms[name] = definition\n\n # We require that all USE_COUNTER2_* histograms be defined in a contiguous\n # block.\n use_counter_indices = filter(lambda x: x[1].startswith(\"USE_COUNTER2_\"),\n enumerate(iterkeys(all_histograms)))\n if use_counter_indices:\n lower_bound = use_counter_indices[0][0]\n upper_bound = use_counter_indices[-1][0]\n n_counters = upper_bound - lower_bound + 1\n if n_counters != len(use_counter_indices):\n raise ParserError(\"Use counter histograms must be defined in a contiguous block.\")\n\n # Check that histograms that were removed from Histograms.json etc.\n # are also removed from the whitelists.\n if whitelists is not None:\n all_whitelist_entries = itertools.chain.from_iterable(whitelists.itervalues())\n orphaned = set(all_whitelist_entries) - set(iterkeys(all_histograms))\n if len(orphaned) > 0:\n msg = 'The following entries are orphaned and should be removed from ' \\\n 'histogram-whitelists.json:\\n%s'\n raise ParserError(msg % (', '.join(sorted(orphaned))))\n\n for name, definition in iteritems(all_histograms):\n yield Histogram(name, definition, strict_type_checks=strict_type_checks)", "response": "Return an iterator that provides a sequence of Histograms for each file in filenames."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef ranges(self):\n bucket_fns = {\n 'boolean': linear_buckets,\n 'flag': linear_buckets,\n 'count': linear_buckets,\n 'enumerated': linear_buckets,\n 'categorical': linear_buckets,\n 'linear': linear_buckets,\n 'exponential': exponential_buckets,\n }\n\n if self._kind not in bucket_fns:\n raise ParserError('Unknown kind \"%s\" for histogram \"%s\".' % (self._kind, self._name))\n\n fn = bucket_fns[self._kind]\n return fn(self.low(), self.high(), self.n_buckets())", "response": "Return an array of lower bounds for each bucket in the histogram."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a new Project ID based on the number of the current institute.", "response": "def get_new_pid(institute):\n \"\"\" Return a new Project ID\n Keyword arguments:\n institute_id -- Institute id\n \"\"\"\n number = '0001'\n prefix = 'p%s' % institute.name.replace(' ', '')[:4]\n\n found = True\n while found:\n try:\n Project.objects.get(pid=prefix + number)\n number = str(int(number) + 1)\n if len(number) == 1:\n number = '000' + number\n elif len(number) == 2:\n number = '00' + number\n elif len(number) == 3:\n number = '0' + number\n except Project.DoesNotExist:\n found = False\n\n return prefix + number"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nadds items from extra_context dict to context.", "response": "def apply_extra_context(extra_context, context):\n \"\"\"\n Adds items from extra_context dict to context. If a value in extra_context\n is callable, then it is called and the result is added to context.\n \"\"\"\n for key, value in six.iteritems(extra_context):\n if callable(value):\n context[key] = value()\n else:\n context[key] = value"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_model_and_form_class(model, form_class):\n if form_class:\n return form_class._meta.model, form_class\n if model:\n # The inner Meta class fails if model = model is used for some reason.\n tmp_model = model\n # TODO: we should be able to construct a ModelForm without creating\n # and passing in a temporary inner class.\n\n class Meta:\n model = tmp_model\n\n class_name = model.__name__ + 'Form'\n form_class = ModelFormMetaclass(\n class_name, (ModelForm,), {'Meta': Meta})\n return model, form_class\n raise GenericViewError(\"Generic view must be called with either a model or\"\n \" form_class argument.\")", "response": "Returns a model and form class based on the model and form_class parameters."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning a HttpResponseRedirect to the post_save_redirect parameter.", "response": "def redirect(post_save_redirect, obj):\n \"\"\"\n Returns a HttpResponseRedirect to ``post_save_redirect``.\n\n ``post_save_redirect`` should be a string, and can contain named string-\n substitution place holders of ``obj`` field names.\n\n If ``post_save_redirect`` is None, then redirect to ``obj``'s URL returned\n by ``get_absolute_url()``. If ``obj`` has no ``get_absolute_url`` method,\n then raise ImproperlyConfigured.\n\n This function is meant to handle the post_save_redirect parameter to the\n ``create_object`` and ``update_object`` views.\n \"\"\"\n if post_save_redirect:\n return HttpResponseRedirect(post_save_redirect % obj.__dict__)\n elif hasattr(obj, 'get_absolute_url'):\n return HttpResponseRedirect(obj.get_absolute_url())\n else:\n raise ImproperlyConfigured(\n \"No URL to redirect to. Either pass a post_save_redirect\"\n \" parameter to the generic view or define a get_absolute_url\"\n \" method on the Model.\")"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef lookup_object(model, object_id, slug, slug_field):\n lookup_kwargs = {}\n if object_id:\n lookup_kwargs['%s__exact' % model._meta.pk.name] = object_id\n elif slug and slug_field:\n lookup_kwargs['%s__exact' % slug_field] = slug\n else:\n raise GenericViewError(\n \"Generic view must be called with either an object_id or a\"\n \" slug/slug_field.\")\n try:\n return model.objects.get(**lookup_kwargs)\n except ObjectDoesNotExist:\n raise Http404(\"No %s found for %s\"\n % (model._meta.verbose_name, lookup_kwargs))", "response": "Lookup the object with the passed object_id."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef create_object(\n request, model=None, template_name=None,\n template_loader=loader, extra_context=None, post_save_redirect=None,\n login_required=False, context_processors=None, form_class=None):\n \"\"\"\n Generic object-creation function.\n\n Templates: ``<app_label>/<model_name>_form.html``\n Context:\n form\n the form for the object\n \"\"\"\n if extra_context is None:\n extra_context = {}\n if login_required and not request.user.is_authenticated:\n return redirect_to_login(request.path)\n\n model, form_class = get_model_and_form_class(model, form_class)\n if request.method == 'POST':\n form = form_class(request.POST, request.FILES)\n if form.is_valid():\n new_object = form.save()\n\n msg = ugettext(\"The %(verbose_name)s was created successfully.\") %\\\n {\"verbose_name\": model._meta.verbose_name}\n messages.success(request, msg, fail_silently=True)\n return redirect(post_save_redirect, new_object)\n else:\n form = form_class()\n\n # Create the template, context, response\n if not template_name:\n template_name = \"%s/%s_form.html\" % (\n model._meta.app_label, model._meta.object_name.lower())\n t = template_loader.get_template(template_name)\n c = {\n 'form': form,\n }\n apply_extra_context(extra_context, c)\n return HttpResponse(t.render(context=c, request=request))", "response": "Create an object of the specified type."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef add_params(self, **kw):\n for k, v in kw.items():\n if k not in self.params:\n raise ValidationError(\n '\"%s\" is not a valid parameter for %s'\n % (k, self.__class__.__name__))\n self.params[k].validate(v)\n\n self._orig_kwargs.update(kw)\n self.kwargs = self._orig_kwargs.copy()", "response": "Add parameters to the Assembly."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef remove_params(self, *args):\n for a in args:\n self._orig_kwargs.pop(a)\n self.kwargs = self._orig_kwargs.copy()", "response": "Remove parameters from the Assembly."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nhooks that executes whenever the group members are changed.", "response": "def _members_changed(\n sender, instance, action, reverse, model, pk_set, **kwargs):\n \"\"\"\n Hook that executes whenever the group members are changed.\n \"\"\"\n if action == \"post_remove\":\n if not reverse:\n group = instance\n for person in model.objects.filter(pk__in=pk_set):\n _remove_group(group, person)\n else:\n person = instance\n for group in model.objects.filter(pk__in=pk_set):\n _remove_group(group, person)\n\n elif action == \"pre_clear\":\n # This has to occur in pre_clear, not post_clear, as otherwise\n # we won't see what groups need to be removed.\n if not reverse:\n group = instance\n for person in group.members.all():\n _remove_group(group, person)\n else:\n person = instance\n for group in person.groups.all():\n _remove_group(group, person)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef validate_username(username):\n\n # Check username looks ok\n\n if not username.islower():\n raise UsernameInvalid(six.u('Username must be all lowercase'))\n if len(username) < 2:\n raise UsernameInvalid(six.u('Username must be at least 2 characters'))\n if not username_re.search(username):\n raise UsernameInvalid(settings.USERNAME_VALIDATION_ERROR_MSG)\n\n return username", "response": "Validate the new username. If the username is invalid, raises\n :py:exc:`UsernameInvalid`.\n\n :param username: Username to validate."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nvalidates the new username for a new person.", "response": "def validate_username_for_new_person(username):\n \"\"\" Validate the new username for a new person. If the username is invalid\n or in use, raises :py:exc:`UsernameInvalid` or :py:exc:`UsernameTaken`.\n\n :param username: Username to validate.\n \"\"\"\n\n # is the username valid?\n\n validate_username(username)\n\n # Check for existing people\n\n count = Person.objects.filter(username__exact=username).count()\n if count >= 1:\n raise UsernameTaken(six.u(\n 'The username is already taken. Please choose another. '\n 'If this was the name of your old account please email %s')\n % settings.ACCOUNTS_EMAIL)\n\n # Check for existing accounts\n\n count = Account.objects.filter(username__exact=username).count()\n if count >= 1:\n raise UsernameTaken(six.u(\n 'The username is already taken. Please choose another. '\n 'If this was the name of your old account please email %s')\n % settings.ACCOUNTS_EMAIL)\n\n # Check account datastore, in case username created outside Karaage.\n\n if account_exists(username):\n raise UsernameTaken(\n six.u('Username is already in external account datastore.'))\n\n return username"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef validate_username_for_new_account(person, username):\n\n # This is much the same as validate_username_for_new_person, except\n # we don't care if the username is used by the person owning the account\n\n # is the username valid?\n\n validate_username(username)\n\n # Check for existing people\n\n query = Person.objects.filter(username__exact=username)\n count = query.exclude(pk=person.pk).count()\n if count >= 1:\n raise UsernameTaken(six.u(\n 'The username is already taken. Please choose another. '\n 'If this was the name of your old account please email %s')\n % settings.ACCOUNTS_EMAIL)\n\n # Check for existing accounts not belonging to this person\n\n query = Account.objects.filter(username__exact=username)\n count = query.exclude(person__pk=person.pk).count()\n if count >= 1:\n raise UsernameTaken(six.u(\n 'The username is already taken. Please choose another. '\n 'If this was the name of your old account please email %s')\n % settings.ACCOUNTS_EMAIL)\n\n # Check datastore, in case username created outside Karaage.\n # Make sure we don't count the entry for person.\n\n query = Person.objects.filter(username__exact=username)\n count = query.filter(pk=person.pk).count()\n if count == 0 and account_exists(username):\n raise UsernameTaken(\n six.u('Username is already in external personal datastore.'))", "response": "Validate the new username for a new account."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef check_username_for_new_account(person, username):\n\n query = Account.objects.filter(\n username__exact=username,\n date_deleted__isnull=True)\n\n if query.count() > 0:\n raise UsernameTaken(\n six.u('Username already in use.')\n )\n\n if account_exists(username):\n raise UsernameTaken(\n six.u('Username is already in datastore.')\n )\n\n return username", "response": "Checks the new username for a new account. If the username is already in use raises a UsernameTaken."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef validate_username_for_rename_person(username, person):\n\n # is the username valid?\n\n validate_username(username)\n\n # Check for existing people\n\n count = Person.objects.filter(username__exact=username) \\\n .exclude(pk=person.pk).count()\n if count >= 1:\n raise UsernameTaken(\n six.u('The username is already taken by an existing person.'))\n\n # Check for existing accounts not owned by person.\n # If there is a conflicting account owned by person it doesn't matter.\n\n count = Account.objects.filter(username__exact=username) \\\n .exclude(person=person).count()\n if count >= 1:\n raise UsernameTaken(\n six.u('The username is already taken by an existing account.'))\n\n # Check datastore, in case username created outside Karaage.\n\n count = Account.objects.filter(\n username__exact=username,\n person=person,\n date_deleted__isnull=True).count()\n if count == 0 and account_exists(username):\n raise UsernameTaken(\n six.u('Username is already in external account datastore.'))", "response": "Validate the new username for rename person."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nretrieve a link that will work for the current user.", "response": "def get_url(request, application, roles, label=None):\n \"\"\" Retrieve a link that will work for the current user. \"\"\"\n args = []\n if label is not None:\n args.append(label)\n\n # don't use secret_token unless we have to\n if 'is_admin' in roles:\n # Administrators can access anything without secrets\n require_secret = False\n elif 'is_applicant' not in roles:\n # we never give secrets to anybody but the applicant\n require_secret = False\n elif not request.user.is_authenticated:\n # If applicant is not logged in, we redirect them to secret URL\n require_secret = True\n elif request.user != application.applicant:\n # If logged in as different person, we redirect them to secret\n # URL. This could happen if the application was open with a different\n # email address, and the applicant is logged in when accessing it.\n require_secret = True\n else:\n # otherwise redirect them to URL that requires correct login.\n require_secret = False\n\n # return required url\n if not require_secret:\n url = reverse(\n 'kg_application_detail',\n args=[application.pk, application.state] + args)\n else:\n url = reverse(\n 'kg_application_unauthenticated',\n args=[application.secret_token, application.state] + args)\n return url"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_admin_email_link(application):\n url = '%s/applications/%d/' % (settings.ADMIN_BASE_URL, application.pk)\n is_secret = False\n return url, is_secret", "response": "Retrieve a link that can be emailed to the administrator."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_registration_email_link(application):\n url = '%s/applications/%d/' % (\n settings.REGISTRATION_BASE_URL, application.pk)\n is_secret = False\n return url, is_secret", "response": "Retrieve a link that can be emailed to the logged other users."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_email_link(application):\n # don't use secret_token unless we have to\n if (application.content_type.model == 'person'\n and application.applicant.has_usable_password()):\n url = '%s/applications/%d/' % (\n settings.REGISTRATION_BASE_URL, application.pk)\n is_secret = False\n else:\n url = '%s/applications/%s/' % (\n settings.REGISTRATION_BASE_URL, application.secret_token)\n is_secret = True\n return url, is_secret", "response": "Retrieve a link that can be emailed to the applicant."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef start(self, request, application, extra_roles=None):\n # Get the authentication of the current user\n roles = self._get_roles_for_request(request, application)\n if extra_roles is not None:\n roles.update(extra_roles)\n\n # Ensure current user is authenticated. If user isn't applicant,\n # leader, delegate or admin, they probably shouldn't be here.\n if 'is_authorised' not in roles:\n return HttpResponseForbidden('<h1>Access Denied</h1>')\n\n # Go to first state.\n return self._next(request, application, roles, self._first_state)", "response": "Start the state machine."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef process(\n self, request, application,\n expected_state, label, extra_roles=None):\n \"\"\" Process the view request at the current state. \"\"\"\n\n # Get the authentication of the current user\n roles = self._get_roles_for_request(request, application)\n if extra_roles is not None:\n roles.update(extra_roles)\n\n # Ensure current user is authenticated. If user isn't applicant,\n # leader, delegate or admin, they probably shouldn't be here.\n if 'is_authorised' not in roles:\n return HttpResponseForbidden('<h1>Access Denied</h1>')\n\n # If user didn't supply state on URL, redirect to full URL.\n if expected_state is None:\n url = get_url(request, application, roles, label)\n return HttpResponseRedirect(url)\n\n # Check that the current state is valid.\n if application.state not in self._config:\n raise RuntimeError(\"Invalid current state '%s'\" % application.state)\n\n # If state user expected is different to state we are in, warn user\n # and jump to expected state.\n if expected_state != application.state:\n # post data will be lost\n if request.method == \"POST\":\n messages.warning(\n request,\n \"Discarding request and jumping to current state.\")\n # note we discard the label, it probably isn't relevant for new\n # state\n url = get_url(request, application, roles)\n return HttpResponseRedirect(url)\n\n # Get the current state for this application\n state_config = self._config[application.state]\n\n # Finally do something\n instance = load_state_instance(state_config)\n if request.method == \"GET\":\n # if method is GET, state does not ever change.\n response = instance.get_next_config(request, application, label, roles)\n assert isinstance(response, HttpResponse)\n return response\n\n elif request.method == \"POST\":\n # if method is POST, it can return a HttpResponse or a string\n response = instance.get_next_config(request, application, label, roles)\n if isinstance(response, HttpResponse):\n # If it returned a HttpResponse, state not changed, just\n # display\n return response\n else:\n # If it returned a string, lookit up in the actions for this\n # state\n next_config = response\n # Go to the next state\n return self._next(request, application, roles, next_config)\n\n else:\n # Shouldn't happen, user did something weird\n return HttpResponseBadRequest(\"<h1>Bad Request</h1>\")", "response": "Process the view request at the current state of the application."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _get_roles_for_request(request, application):\n roles = application.get_roles_for_person(request.user)\n\n if common.is_admin(request):\n roles.add(\"is_admin\")\n roles.add('is_authorised')\n\n return roles", "response": "Get the roles that the user is authorized to access."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncontinuing the state machine at given state.", "response": "def _next(self, request, application, roles, next_config):\n \"\"\" Continue the state machine at given state. \"\"\"\n # we only support state changes for POST requests\n if request.method == \"POST\":\n key = None\n\n # If next state is a transition, process it\n while True:\n # We do not expect to get a direct state transition here.\n assert next_config['type'] in ['goto', 'transition']\n\n while next_config['type'] == 'goto':\n key = next_config['key']\n next_config = self._config[key]\n\n instance = load_instance(next_config)\n\n if not isinstance(instance, Transition):\n break\n\n next_config = instance.get_next_config(request, application, roles)\n\n # lookup next state\n assert key is not None\n state_key = key\n\n # enter that state\n instance.enter_state(request, application)\n application.state = state_key\n application.save()\n\n # log details\n log.change(application.application_ptr, \"state: %s\" % instance.name)\n\n # redirect to this new state\n url = get_url(request, application, roles)\n return HttpResponseRedirect(url)\n else:\n return HttpResponseBadRequest(\"<h1>Bad Request</h1>\")"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nchecks that the user is authenticated and if not raise PermissionDenied", "response": "def check_auth(self, request):\n \"\"\"\n to ensure that nobody can get your data via json simply by knowing the\n URL. public facing forms should write a custom LookupChannel to\n implement as you wish. also you could choose to return\n HttpResponseForbidden(\"who are you?\") instead of raising\n PermissionDenied (401 response)\n \"\"\"\n if not request.user.is_authenticated:\n raise PermissionDenied\n if not is_admin(request):\n raise PermissionDenied"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_query(self, q, request):\n return Institute.objects.filter(\n Q(name__icontains=q)\n )", "response": "return a query set for the given query string"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning a scalar for flag count histograms. Otherwise it returns either the raw histogram represented as a pandas Series. If autocast is disabled the underlying pandas series is always returned as is.", "response": "def get_value(self, only_median=False, autocast=True):\n \"\"\"\n Returns a scalar for flag and count histograms. Otherwise it returns either the\n raw histogram represented as a pandas Series or just the median if only_median\n is True.\n If autocast is disabled the underlying pandas series is always returned as is.\n \"\"\"\n\n if not autocast:\n return self.buckets\n\n if self.kind in [\"exponential\", \"linear\", \"enumerated\", \"boolean\"]:\n return float(self.percentile(50)) if only_median else self.buckets\n elif self.kind == \"categorical\" and not only_median:\n return self.buckets\n elif self.kind == \"count\":\n return int(self.buckets[0])\n elif self.kind == \"flag\":\n return self.buckets[1] == 1\n else:\n assert(False)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef percentile(self, percentile):\n assert(percentile >= 0 and percentile <= 100)\n assert(self.kind in [\"exponential\", \"linear\", \"enumerated\", \"boolean\"])\n\n fraction = percentile / 100\n to_count = fraction * self.buckets.sum()\n percentile_bucket = 0\n\n for percentile_bucket in range(len(self.buckets)):\n freq = self.buckets.values[percentile_bucket]\n if to_count - freq <= 0:\n break\n to_count -= freq\n\n percentile_lower_boundary = self.buckets.index[percentile_bucket]\n percentile_frequency = self.buckets.values[percentile_bucket]\n\n if percentile_bucket == len(self.buckets) - 1 or percentile_frequency == 0:\n return percentile_lower_boundary\n\n width = self.buckets.index[percentile_bucket + 1] - self.buckets.index[percentile_bucket]\n return percentile_lower_boundary + width * to_count / percentile_frequency", "response": "Returns the nth percentile of the histogram."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef tracktype(self, tracktype):\n self._tracktype = tracktype\n if tracktype is not None:\n if 'bed' in tracktype.lower():\n tracktype = 'bigBed'\n elif 'wig' in tracktype.lower():\n tracktype = 'bigWig'\n self.params.update(constants.track_typespecific_fields[tracktype])", "response": "Sets the track type for this instance"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef add_params(self, **kw):\n for k, v in kw.items():\n if (k not in self.params) and (k not in self.specific_params):\n raise ParameterError('\"%s\" is not a valid parameter for %s'\n % (k, self.__class__.__name__))\n try:\n self.params[k].validate(v)\n except KeyError:\n self.specific_params[k].validate(v)\n\n self._orig_kwargs.update(kw)\n self.kwargs = self._orig_kwargs.copy()", "response": "Add parameters to the UCSC object."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef add_subgroups(self, subgroups):\n if subgroups is None:\n subgroups = {}\n assert isinstance(subgroups, dict)\n self.subgroups.update(subgroups)", "response": "Adds the given subgroups to the internal subgroups list for this composite track."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nadding a list of SubGroupDefinition objects to this composite.", "response": "def add_subgroups(self, subgroups):\n \"\"\"\n Add a list of SubGroupDefinition objects to this composite.\n\n Note that in contrast to :meth:`BaseTrack`, which takes a single\n dictionary indicating the particular subgroups for the track, this\n method takes a list of :class:`SubGroupDefinition` objects representing\n the allowed subgroups for the composite.\n\n :param subgroups:\n List of SubGroupDefinition objects.\n \"\"\"\n if subgroups is None:\n subgroups = {}\n _subgroups = {}\n for sg in subgroups:\n assert isinstance(sg, SubGroupDefinition)\n _subgroups[sg.name] = sg\n self.subgroups = _subgroups"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nadd a child Track.", "response": "def add_subtrack(self, subtrack):\n \"\"\"\n Add a child :class:`Track`.\n \"\"\"\n self.add_child(subtrack)\n self.subtracks.append(subtrack)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_view(self, view):\n\n self.add_child(view)\n self.views.append(view)", "response": "Adds a ViewTrack object to this composite."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _str_subgroups(self):\n s = []\n\n i = 0\n\n # if there are any views, there must be a subGroup1 view View tag=val\n # as the first one. So create it automatically here\n if len(self.views) > 0:\n mapping = dict((i.view, i.view) for i in self.views)\n view_subgroup = SubGroupDefinition(\n name='view',\n label='Views',\n mapping=mapping)\n i += 1\n s.append('subGroup%s %s' % (i, view_subgroup))\n\n for subgroup in self.subgroups.values():\n i += 1\n s.append('subGroup%s %s' % (i, subgroup))\n return s", "response": "Renders subgroups to a list of strings"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nadding one or more tracks to this view.", "response": "def add_tracks(self, subtracks):\n \"\"\"\n Add one or more tracks to this view.\n\n subtracks : Track or iterable of Tracks\n A single Track instance or an iterable of them.\n \"\"\"\n if isinstance(subtracks, Track):\n subtracks = [subtracks]\n for subtrack in subtracks:\n subtrack.subgroups['view'] = self.view\n self.add_child(subtrack)\n self.subtracks.append(subtrack)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nadds one or more tracks.", "response": "def add_tracks(self, subtracks):\n \"\"\"\n Add one or more tracks.\n\n subtrack : Track or iterable of Tracks\n \"\"\"\n if isinstance(subtracks, BaseTrack):\n subtracks = [subtracks]\n for subtrack in subtracks:\n self.add_child(subtrack)\n self.subtracks.append(subtrack)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nparsing the scalars from the YAML file content to a dictionary of ScalarType s.", "response": "def _parse_scalars(scalars):\n \"\"\"Parse the scalars from the YAML file content to a dictionary of ScalarType(s).\n :return: A dictionary { 'full.scalar.label': ScalarType }\n \"\"\"\n scalar_dict = {}\n\n # Scalars are defined in a fixed two-level hierarchy within the definition file.\n # The first level contains the category name, while the second level contains the\n # probe name (e.g. \"category.name: probe: ...\").\n for category_name in scalars:\n category = scalars[category_name]\n\n for probe_name in category:\n # We found a scalar type. Go ahead and parse it.\n scalar_definition = category[probe_name]\n # We pass |strict_type_checks=False| as we don't want to do any check\n # server side. This includes skipping the checks for the required keys.\n scalar_info = ScalarType(category_name, probe_name, scalar_definition,\n strict_type_checks=False)\n scalar_dict[scalar_info.label] = scalar_info\n\n return scalar_dict"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef load_scalars(filename, strict_type_checks=True):\n\n # Parse the scalar definitions from the YAML file.\n scalars = None\n try:\n with open(filename, 'r') as f:\n scalars = yaml.safe_load(f)\n except IOError as e:\n raise ParserError('Error opening ' + filename + ': ' + e.message)\n except ValueError as e:\n raise ParserError('Error parsing scalars in {}: {}'\n '.\\nSee: {}'.format(filename, e.message, BASE_DOC_URL))\n\n scalar_list = []\n\n # Scalars are defined in a fixed two-level hierarchy within the definition file.\n # The first level contains the category name, while the second level contains the\n # probe name (e.g. \"category.name: probe: ...\").\n for category_name in scalars:\n category = scalars[category_name]\n\n # Make sure that the category has at least one probe in it.\n if not category or len(category) == 0:\n raise ParserError('Category \"{}\" must have at least one probe in it' +\n '.\\nSee: {}'.format(category_name, BASE_DOC_URL))\n\n for probe_name in category:\n # We found a scalar type. Go ahead and parse it.\n scalar_info = category[probe_name]\n scalar_list.append(\n ScalarType(category_name, probe_name, scalar_info, strict_type_checks))\n\n return scalar_list", "response": "Parses a YAML file containing the scalars definition."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef validate_names(self, category_name, probe_name):\n\n # Enforce a maximum length on category and probe names.\n MAX_NAME_LENGTH = 40\n for n in [category_name, probe_name]:\n if len(n) > MAX_NAME_LENGTH:\n raise ParserError((\"Name '{}' exceeds maximum name length of {} characters.\\n\"\n \"See: {}#the-yaml-definition-file\")\n .format(n, MAX_NAME_LENGTH, BASE_DOC_URL))\n\n def check_name(name, error_msg_prefix, allowed_char_regexp):\n # Check if we only have the allowed characters.\n chars_regxp = r'^[a-zA-Z0-9' + allowed_char_regexp + r']+$'\n if not re.search(chars_regxp, name):\n raise ParserError((error_msg_prefix + \" name must be alpha-numeric. Got: '{}'.\\n\"\n \"See: {}#the-yaml-definition-file\").format(name, BASE_DOC_URL))\n\n # Don't allow leading/trailing digits, '.' or '_'.\n if re.search(r'(^[\\d\\._])|([\\d\\._])$', name):\n raise ParserError((error_msg_prefix + \" name must not have a leading/trailing \"\n \"digit, a dot or underscore. Got: '{}'.\\n\"\n \" See: {}#the-yaml-definition-file\").format(name, BASE_DOC_URL))\n\n check_name(category_name, 'Category', r'\\.')\n check_name(probe_name, 'Probe', r'_')", "response": "Validate the names of the class and probe names."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef validate_values(self, definition):\n\n if not self._strict_type_checks:\n return\n\n # Validate the scalar kind.\n scalar_kind = definition.get('kind')\n if scalar_kind not in SCALAR_TYPES_MAP.keys():\n raise ParserError(self._name + ' - unknown scalar kind: ' + scalar_kind +\n '.\\nSee: {}'.format(BASE_DOC_URL))\n\n # Validate the collection policy.\n collection_policy = definition.get('release_channel_collection', None)\n if collection_policy and collection_policy not in ['opt-in', 'opt-out']:\n raise ParserError(self._name + ' - unknown collection policy: ' + collection_policy +\n '.\\nSee: {}#optional-fields'.format(BASE_DOC_URL))\n\n # Validate the cpp_guard.\n cpp_guard = definition.get('cpp_guard')\n if cpp_guard and re.match(r'\\W', cpp_guard):\n raise ParserError(self._name + ' - invalid cpp_guard: ' + cpp_guard +\n '.\\nSee: {}#optional-fields'.format(BASE_DOC_URL))\n\n # Validate record_in_processes.\n record_in_processes = definition.get('record_in_processes', [])\n for proc in record_in_processes:\n if not utils.is_valid_process_name(proc):\n raise ParserError(self._name + ' - unknown value in record_in_processes: ' + proc +\n '.\\nSee: {}'.format(BASE_DOC_URL))\n\n # Validate the expiration version.\n # Historical versions of Scalars.json may contain expiration versions\n # using the deprecated format 'N.Na1'. Those scripts set\n # self._strict_type_checks to false.\n expires = definition.get('expires')\n if not utils.validate_expiration_version(expires) and self._strict_type_checks:\n raise ParserError('{} - invalid expires: {}.\\nSee: {}#required-fields'\n .format(self._name, expires, BASE_DOC_URL))", "response": "This function checks that the fields have the correct values."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef add_child(self, child):\n child.parent = self\n self.children.append(child)\n return child", "response": "Adds a child to the current object."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef add_parent(self, parent):\n parent.add_child(self)\n self.parent = parent\n return parent", "response": "Adds self as child of parent and returns parent."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef root(self, cls=None, level=0):\n if cls is None:\n if self.parent is None:\n return self, level\n else:\n if isinstance(self, cls):\n if not isinstance(self.parent, cls):\n return self, level\n\n if self.parent is None:\n return None, None\n\n return self.parent.root(cls, level - 1)", "response": "Returns the root HubComponent of the given class."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef leaves(self, cls, level=0, intermediate=False):\n if intermediate:\n if isinstance(self, cls):\n yield self, level\n elif len(self.children) == 0:\n if isinstance(self, cls):\n yield self, level\n else:\n raise StopIteration\n\n for child in self.children:\n for leaf, _level in child.leaves(cls, level + 1, intermediate=intermediate):\n yield leaf, _level", "response": "Returns an iterator of the HubComponent leaves that are of class cls."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nrender the object to file returning a list of created files.", "response": "def render(self, staging=None):\n \"\"\"\n Renders the object to file, returning a list of created files.\n\n Calls validation code, and, as long as each child is also a subclass of\n :class:`HubComponent`, the rendering is recursive.\n \"\"\"\n self.validate()\n created_files = OrderedDict()\n if staging is None:\n staging = tempfile.mkdtemp()\n this = self._render(staging)\n if this:\n created_files[repr(self)] = this\n for child in self.children:\n created_files[repr(child)] = child.render(staging)\n return created_files"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nsend an email to admin asking to approve user application", "response": "def send_request_email(\n authorised_text, authorised_role, authorised_persons, application,\n link, is_secret):\n \"\"\"Sends an email to admin asking to approve user application\"\"\"\n context = CONTEXT.copy()\n context['requester'] = application.applicant\n context['link'] = link\n context['is_secret'] = is_secret\n context['application'] = application\n context['authorised_text'] = authorised_text\n _send_request_email(\n context,\n authorised_role, authorised_persons,\n \"common_request\")"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsends an email inviting someone to create an account", "response": "def send_invite_email(application, link, is_secret):\n \"\"\" Sends an email inviting someone to create an account\"\"\"\n\n if not application.applicant.email:\n return\n\n context = CONTEXT.copy()\n context['receiver'] = application.applicant\n context['application'] = application\n context['link'] = link\n context['is_secret'] = is_secret\n\n to_email = application.applicant.email\n subject, body = render_email('common_invite', context)\n\n send_mail(subject, body, settings.ACCOUNTS_EMAIL, [to_email])"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsends an email informing person application is approved", "response": "def send_approved_email(\n application, created_person, created_account, link, is_secret):\n \"\"\"Sends an email informing person application is approved\"\"\"\n if not application.applicant.email:\n return\n\n context = CONTEXT.copy()\n context['receiver'] = application.applicant\n context['application'] = application\n context['created_person'] = created_person\n context['created_account'] = created_account\n context['link'] = link\n context['is_secret'] = is_secret\n subject, body = render_email('common_approved', context)\n to_email = application.applicant.email\n\n send_mail(subject, body, settings.ACCOUNTS_EMAIL, [to_email])"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _add_person_to_group(person, group):\n from karaage.datastores import add_accounts_to_group\n from karaage.datastores import add_accounts_to_project\n from karaage.datastores import add_accounts_to_institute\n\n a_list = person.account_set\n add_accounts_to_group(a_list, group)\n for project in group.project_set.all():\n add_accounts_to_project(a_list, project)\n for institute in group.institute_set.all():\n add_accounts_to_institute(a_list, institute)", "response": "Call datastores after adding a person to a group."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncalling datastores after removing a person from a group.", "response": "def _remove_person_from_group(person, group):\n \"\"\" Call datastores after removing a person from a group. \"\"\"\n from karaage.datastores import remove_accounts_from_group\n from karaage.datastores import remove_accounts_from_project\n from karaage.datastores import remove_accounts_from_institute\n\n a_list = person.account_set\n remove_accounts_from_group(a_list, group)\n for project in group.project_set.all():\n remove_accounts_from_project(a_list, project)\n for institute in group.institute_set.all():\n remove_accounts_from_institute(a_list, institute)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nhooking that executes whenever the group members are changed.", "response": "def _members_changed(\n sender, instance, action, reverse, model, pk_set, **kwargs):\n \"\"\"\n Hook that executes whenever the group members are changed.\n \"\"\"\n if action == \"post_add\":\n if not reverse:\n group = instance\n for person in model.objects.filter(pk__in=pk_set):\n log.change(person, \"Added person to group %s\" % group)\n log.change(group, \"Added person %s to group\" % person)\n _add_person_to_group(person, group)\n else:\n person = instance\n for group in model.objects.filter(pk__in=pk_set):\n log.change(person, \"Added person to group %s\" % group)\n log.change(group, \"Added person %s to group\" % person)\n _add_person_to_group(person, group)\n\n elif action == \"post_remove\":\n if not reverse:\n group = instance\n for person in model.objects.filter(pk__in=pk_set):\n log.change(person, \"Removed person from group %s\" % group)\n log.change(group, \"Removed person %s from group\" % person)\n _remove_person_from_group(person, group)\n else:\n person = instance\n for group in model.objects.filter(pk__in=pk_set):\n log.change(person, \"Removed person from group %s\" % group)\n log.change(group, \"Removed person %s from group\" % person)\n _remove_person_from_group(person, group)\n\n elif action == \"pre_clear\":\n # This has to occur in pre_clear, not post_clear, as otherwise\n # we won't see what groups need to be removed.\n if not reverse:\n group = instance\n log.change(group, \"Removed all people from group\")\n for person in group.members.all():\n log.change(group, \"Removed person %s from group\" % person)\n _remove_person_from_group(person, group)\n else:\n person = instance\n log.change(person, \"Removed person from all groups\")\n for group in person.groups.all():\n log.change(group, \"Removed person %s from group\" % person)\n _remove_person_from_group(person, group)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ngive a sorted list of subgroups return a string appropriate to provide as a composite track s dimensions arg.", "response": "def dimensions_from_subgroups(s):\n \"\"\"\n Given a sorted list of subgroups, return a string appropriate to provide as\n a composite track's `dimensions` arg.\n\n Parameters\n ----------\n s : list of SubGroup objects (or anything with a `name` attribute)\n \"\"\"\n letters = 'XYABCDEFGHIJKLMNOPQRSTUVWZ'\n return ' '.join(['dim{0}={1}'.format(dim, sg.name) for dim, sg in zip(letters, s)])"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef filter_composite_from_subgroups(s):\n dims = []\n for letter, sg in zip('ABCDEFGHIJKLMNOPQRSTUVWZ', s[2:]):\n dims.append('dim{0}'.format(letter))\n if dims:\n return ' '.join(dims)", "response": "Given a sorted list of subgroups return a string appropriate to provide as\n the a composite track s filterComposite argument"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nconverting hex colors to RGB tuples", "response": "def hex2rgb(h):\n \"\"\"\n Convert hex colors to RGB tuples\n\n Parameters\n ----------\n h : str\n String hex color value\n\n >>> hex2rgb(\"#ff0033\")\n '255,0,51'\n \"\"\"\n if not h.startswith('#') or len(h) != 7:\n raise ValueError(\"Does not look like a hex color: '{0}'\".format(h))\n return ','.join(map(str, (\n int(h[1:3], 16),\n int(h[3:5], 16),\n int(h[5:7], 16),\n )))"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef sanitize(s, strict=True):\n allowed = ''.join(\n [\n 'ABCDEFGHIJKLMNOPQRSTUVWXYZ',\n 'abcdefghijklmnopqrstuvwxyz',\n '0123456789',\n ]\n )\n\n if not strict:\n allowed += '-_.'\n\n s = str(s).replace(' ', '_')\n\n return ''.join([i for i in s if i in allowed])", "response": "Sanitize a string.\n\n Spaces are converted to underscore; if strict=True they are then removed.\n\n Parameters\n ----------\n s : str\n String to sanitize\n\n strict : bool\n If True, only alphanumeric characters are allowed. If False, a limited\n set of additional characters (-._) will be allowed."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef show_rendered_files(results_dict):\n for k, v in results_dict.items():\n if isinstance(v, string_types):\n print(\"rendered file: %s (created by: %s)\" % (v, k))\n else:\n show_rendered_files(v)\n return", "response": "Prints the rendered files in a nested dictionary."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef print_rendered_results(results_dict):\n class _HubComponentEncoder(json.JSONEncoder):\n def default(self, o):\n if isinstance(o, base.HubComponent):\n return repr(o)\n return json.JSONEncoder.default(self, o)\n formatted = json.dumps(results_dict, indent=4, cls=_HubComponentEncoder)\n # the returned string contains lines with trailing spaces, which causes\n # doctests to fail. So fix that here.\n for s in formatted.splitlines():\n print(s.rstrip())", "response": "Pretty - prints the rendered results dictionary."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn list of example bigBed files", "response": "def example_bigbeds():\n \"\"\"\n Returns list of example bigBed files\n \"\"\"\n hits = []\n d = data_dir()\n for fn in os.listdir(d):\n fn = os.path.join(d, fn)\n if os.path.splitext(fn)[-1] == '.bigBed':\n hits.append(os.path.abspath(fn))\n return hits"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget color number index.", "response": "def get_colour(index):\n \"\"\" get color number index. \"\"\"\n colours = [\n 'red', 'blue', 'green', 'pink',\n 'yellow', 'magenta', 'orange', 'cyan',\n ]\n default_colour = 'purple'\n if index < len(colours):\n return colours[index]\n else:\n return default_colour"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngenerating a bar graph for a project.", "response": "def get_project_trend_graph_url(project, start, end):\n \"\"\"Generates a bar graph for a project. \"\"\"\n\n filename = get_project_trend_graph_filename(project, start, end)\n urls = {\n 'graph_url': urlparse.urljoin(GRAPH_URL, filename + \".png\"),\n 'data_url': urlparse.urljoin(GRAPH_URL, filename + \".csv\"),\n }\n\n return urls"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_institute_graph_url(start, end):\n\n filename = get_institute_graph_filename(start, end)\n urls = {\n 'graph_url': urlparse.urljoin(GRAPH_URL, filename + \".png\"),\n 'data_url': urlparse.urljoin(GRAPH_URL, filename + \".csv\"),\n }\n\n return urls", "response": "Pie chart comparing institutes usage."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns a dictionary of urls to the trend graph for a given start and end time.", "response": "def get_trend_graph_url(start, end):\n \"\"\" Total trend graph for machine category. \"\"\"\n\n filename = get_trend_graph_filename(start, end)\n urls = {\n 'graph_url': urlparse.urljoin(GRAPH_URL, filename + \".png\"),\n 'data_url': urlparse.urljoin(GRAPH_URL, filename + \".csv\"),\n }\n\n return urls"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_institute_trend_graph_url(institute, start, end):\n\n filename = get_institute_trend_graph_filename(institute, start, end)\n urls = {\n 'graph_url': urlparse.urljoin(GRAPH_URL, filename + \".png\"),\n 'data_url': urlparse.urljoin(GRAPH_URL, filename + \".csv\"),\n }\n\n return urls", "response": "Returns the URL of the Institute trend graph for the given machine category."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_institutes_trend_graph_urls(start, end):\n\n graph_list = []\n for institute in Institute.objects.all():\n urls = get_institute_trend_graph_url(institute, start, end)\n urls['institute'] = institute\n graph_list.append(urls)\n\n return graph_list", "response": "Get all institute trend graphs."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef common(request):\n ctx = {\n 'SHIB_SUPPORTED': settings.SHIB_SUPPORTED,\n 'org_name': settings.ACCOUNTS_ORG_NAME,\n 'accounts_email': settings.ACCOUNTS_EMAIL,\n 'is_admin': is_admin(request),\n 'kgversion': __version__,\n 'BUILD_DATE': settings.BUILD_DATE,\n 'VCS_REF': settings.VCS_REF,\n 'SLURM_VER': settings.SLURM_VER,\n }\n\n return ctx", "response": "Set context with common variables."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef context_processor(request):\n from .models import Application\n ctx = {}\n\n if request.user.is_authenticated:\n person = request.user\n my_applications = Application.objects.get_for_applicant(person)\n requires_attention = Application.objects.requires_attention(request)\n\n ctx['pending_applications'] = (\n my_applications.count() + requires_attention.count()\n )\n\n return ctx", "response": "Set context with common variables."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncreates a new user in the database.", "response": "def _create_user(\n self, username, email, short_name, full_name,\n institute, password, is_admin, **extra_fields):\n \"\"\"Creates a new active person. \"\"\"\n\n # Create Person\n person = self.model(\n username=username, email=email,\n short_name=short_name, full_name=full_name,\n is_admin=is_admin,\n institute=institute,\n **extra_fields\n )\n person.set_password(password)\n person.save()\n return person"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef create_user(\n self, username, email, short_name, full_name,\n institute, password=None, **extra_fields):\n \"\"\" Creates a new ordinary person. \"\"\"\n return self._create_user(\n username=username, email=email,\n short_name=short_name, full_name=full_name,\n institute=institute, password=password,\n is_admin=False, **extra_fields)", "response": "Creates a new user in the database."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef create_superuser(\n self, username, email, short_name, full_name,\n institute, password, **extra_fields):\n \"\"\" Creates a new person with super powers. \"\"\"\n return self._create_user(\n username=username, email=email,\n institute=institute, password=password,\n short_name=short_name, full_name=full_name,\n is_admin=True, **extra_fields)", "response": "Creates a new person with super powers."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef application_state(context, application):\n new_context = {\n 'roles': context['roles'],\n 'org_name': context['org_name'],\n 'application': application,\n }\n nodelist = template.loader.get_template(\n 'kgapplications/%s_common_state.html' % application.type)\n output = nodelist.render(new_context)\n return output", "response": "Render current state of application verbose."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nrender current state of application.", "response": "def application_simple_state(context, application):\n \"\"\" Render current state of application, verbose. \"\"\"\n state_machine = get_state_machine(application)\n state = state_machine.get_state(application)\n return state.name"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef do_application_actions_plus(parser, token):\n nodelist = parser.parse(('end_application_actions',))\n parser.delete_first_token()\n return ApplicationActionsPlus(nodelist)", "response": "Render actions available with extra text."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nwrap around subprocess. run with unicode decoding of output.", "response": "def run(cmds, **kwargs):\n \"\"\"\n Wrapper around subprocess.run, with unicode decoding of output.\n\n Additional kwargs are passed to subprocess.run.\n \"\"\"\n proc = sp.Popen(cmds, bufsize=-1, stdout=sp.PIPE, stderr=sp.STDOUT,\n close_fds=sys.platform != 'win32')\n for line in proc.stdout:\n print(line[:-1].decode())\n retcode = proc.wait()\n if retcode:\n raise sp.CalledProcessError(retcode, cmds)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef symlink(target, linkname):\n target = os.path.abspath(target)\n linkname = os.path.abspath(linkname)\n if not os.path.exists(target):\n raise ValueError(\"target {} not found\".format(target))\n\n link_dir = os.path.dirname(linkname)\n if not os.path.exists(link_dir):\n os.makedirs(link_dir)\n\n run(['ln', '-s', '-f', target, linkname])\n return linkname", "response": "Create a symlink to target and linkname."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nuploads a file or directory via rsync.", "response": "def upload(host, user, local_dir, remote_dir, rsync_options=RSYNC_OPTIONS):\n \"\"\"\n Upload a file or directory via rsync.\n\n Parameters\n ----------\n host : str or None\n If None, omit the host part and just transfer locally\n\n user : str or None\n If None, omit the user part\n\n local_dir : str\n If a directory, a trailing \"/\" will be added.\n\n remote_dir : str\n If a directory, a trailing \"/\" will be added.\n \"\"\"\n if user is None:\n user = \"\"\n else:\n user = user + \"@\"\n if host is None or host == 'localhost':\n host = \"\"\n else:\n host = host + \":\"\n\n if not local_dir.endswith('/'):\n local_dir = local_dir + '/'\n\n if not remote_dir.endswith('/'):\n remote_dir = remote_dir + '/'\n\n remote_string = '{user}{host}{remote_dir}'.format(**locals())\n cmds = ['rsync']\n cmds += shlex.split(rsync_options)\n cmds += [local_dir, remote_string]\n run(cmds)\n return [remote_string]"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef local_link(local_fn, remote_fn, staging):\n linkname = os.path.join(staging, remote_fn.lstrip(os.path.sep))\n return symlink(local_fn, linkname)", "response": "Creates a symlink to a local staging area."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nstages an object to the staging directory.", "response": "def stage(x, staging):\n \"\"\"\n Stage an object to the `staging` directory.\n\n If the object is a Track and is one of the types that needs an index file\n (bam, vcfTabix), then the index file will be staged as well.\n\n Returns a list of the linknames created.\n \"\"\"\n linknames = []\n\n # Objects that don't represent a file shouldn't be staged\n non_file_objects = (\n track.ViewTrack,\n track.CompositeTrack,\n track.AggregateTrack,\n track.SuperTrack,\n genome.Genome,\n )\n if isinstance(x, non_file_objects):\n return linknames\n\n # If it's an object representing a file, then render it.\n #\n # Track objects don't represent files, but their documentation does\n linknames.append(x.render(staging))\n\n if hasattr(x, 'source') and hasattr(x, 'filename'):\n def _stg(x, ext=''):\n # A remote track hosted elsewhere does not need staging. This is\n # defined by a track with a url, but no source or filename.\n if (\n x.source is None\n and x.filename is None\n and getattr(x, 'url', None) is not None\n ):\n return\n\n linknames.append(\n local_link(x.source + ext, x.filename + ext, staging)\n )\n\n _stg(x)\n\n if isinstance(x, track.Track):\n if x.tracktype == 'bam':\n _stg(x, ext='.bai')\n if x.tracktype == 'vcfTabix':\n _stg(x, ext='.tbi')\n\n if isinstance(x, track.CompositeTrack):\n if x._html:\n _stg(x._html)\n\n return linknames"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nstage a hub by symlinking all its connected files to a local directory.", "response": "def stage_hub(hub, staging=None):\n \"\"\"\n Stage a hub by symlinking all its connected files to a local directory.\n \"\"\"\n linknames = []\n if staging is None:\n staging = tempfile.mkdtemp()\n for obj, level in hub.leaves(base.HubComponent, intermediate=True):\n linknames.extend(stage(obj, staging))\n\n return staging, linknames"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nupload a hub to the remote host.", "response": "def upload_hub(hub, host, remote_dir, user=None, port=22, rsync_options=RSYNC_OPTIONS, staging=None):\n \"\"\"\n Renders, stages, and uploads a hub.\n \"\"\"\n hub.render()\n if staging is None:\n staging = tempfile.mkdtemp()\n staging, linknames = stage_hub(hub, staging=staging)\n local_dir = os.path.join(staging)\n upload(host, user, local_dir=local_dir, remote_dir=remote_dir, rsync_options=rsync_options)\n return linknames"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_project_members(machine, project_id):\n try:\n project = Project.objects.get(pid=project_id)\n except Project.DoesNotExist:\n return 'Project not found'\n\n return [x.username for x in project.group.members.all()]", "response": "Returns list of usernames given a project id"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_projects(machine):\n\n query = Project.active.all()\n return [x.pid for x in query]", "response": "Returns list of project ids that are active in the machine"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_project(username, project, machine_name=None):\n\n try:\n account = Account.objects.get(\n username=username,\n date_deleted__isnull=True)\n except Account.DoesNotExist:\n return \"Account '%s' not found\" % username\n\n if project is None:\n project = account.default_project\n else:\n try:\n project = Project.objects.get(pid=project)\n except Project.DoesNotExist:\n project = account.default_project\n\n if project is None:\n return \"None\"\n\n if account.person not in project.group.members.all():\n project = account.default_project\n\n if project is None:\n return \"None\"\n\n if account.person not in project.group.members.all():\n return \"None\"\n\n return project.pid", "response": "Get the project for a user"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngets the list of projects a user is part of", "response": "def get_users_projects(user):\n \"\"\"\n List projects a user is part of\n \"\"\"\n person = user\n projects = person.projects.filter(is_active=True)\n return 0, [x.pid for x in projects]"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ndirecting to a template.", "response": "def direct_to_template(\n request, template, extra_context=None, mimetype=None, **kwargs):\n \"\"\"\n Render a given template with any extra URL parameters in the context as\n ``{{ params }}``.\n \"\"\"\n if extra_context is None:\n extra_context = {}\n dictionary = {'params': kwargs}\n for key, value in extra_context.items():\n if callable(value):\n dictionary[key] = value()\n else:\n dictionary[key] = value\n t = loader.get_template(template)\n return HttpResponse(\n t.render(context=dictionary, request=request),\n content_type=mimetype)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef assert_strong_password(username, password, old_password=None):\n\n # test the length\n try:\n minlength = settings.MIN_PASSWORD_LENGTH\n except AttributeError:\n minlength = 12\n if len(password) < minlength:\n raise ValueError(\n \"Password must be at least %s characters long\" % minlength)\n\n if username is not None and username in password:\n raise ValueError(\"Password contains username\")\n\n return _assert_password(password, old_password)", "response": "Raises ValueError if the password isn t strong. Returns the old_password if the old_password is None."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef save_account(self, account: Account) -> None:\n person = account.person\n if self._primary_group == 'institute':\n lgroup = self._get_group(person.institute.group.name)\n elif self._primary_group == 'default_project':\n if account.default_project is None:\n lgroup = self._get_group(self._default_primary_group)\n else:\n lgroup = self._get_group(account.default_project.group.name)\n else:\n raise RuntimeError(\"Unknown value of PRIMARY_GROUP.\")\n\n if account.default_project is None:\n default_project = \"none\"\n else:\n default_project = account.default_project.pid\n\n try:\n luser = self._get_account(account.username)\n changes = changeset(luser, {})\n new_user = False\n except ObjectDoesNotExist:\n new_user = True\n luser = self._account_class()\n changes = changeset(luser, {\n 'uid': account.username\n })\n\n changes = changes.merge({\n 'gidNumber': lgroup['gidNumber'],\n 'givenName': person.first_name,\n 'sn': person.last_name,\n 'telephoneNumber': _str_or_none(person.telephone),\n 'mail': _str_or_none(person.email),\n 'title': _str_or_none(person.title),\n 'o': person.institute.name,\n 'cn': person.full_name,\n 'default_project': default_project,\n 'loginShell': account.shell,\n 'locked': account.is_locked()\n })\n save(changes, database=self._database)\n\n if new_user:\n # add all groups\n for group in account.person.groups.all():\n self.add_account_to_group(account, group)", "response": "Save the account to the database."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nset the account s password.", "response": "def set_account_password(self, account, raw_password):\n \"\"\" Account's password was changed. \"\"\"\n luser = self._get_account(account.username)\n changes = changeset(luser, {\n 'password': raw_password,\n })\n save(changes, database=self._database)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nchanges the username of an account.", "response": "def set_account_username(self, account, old_username, new_username):\n \"\"\" Account's username was changed. \"\"\"\n luser = self._get_account(old_username)\n rename(luser, database=self._database, uid=new_username)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nadds account to group.", "response": "def add_account_to_group(self, account, group):\n \"\"\" Add account to group. \"\"\"\n lgroup: OpenldapGroup = self._get_group(group.name)\n person: OpenldapAccount = self._get_account(account.username)\n\n changes = changeset(lgroup, {})\n changes = lgroup.add_member(changes, person)\n save(changes, database=self._database)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets the account details.", "response": "def get_account_details(self, account):\n \"\"\" Get the account details. \"\"\"\n result = {}\n try:\n luser = self._get_account(account.username)\n luser = preload(luser, database=self._database)\n except ObjectDoesNotExist:\n return result\n\n for i, j in luser.items():\n if i != 'userPassword' and j is not None:\n result[i] = j\n\n return result"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef save_group(self, group):\n # If group already exists, take over existing group rather then error.\n try:\n lgroup = self._get_group(group.name)\n changes = changeset(lgroup, {})\n except ObjectDoesNotExist:\n lgroup = self._group_class()\n changes = changeset(lgroup, {\n 'cn': group.name,\n })\n\n changes = changes.merge({\n 'description': group.description\n })\n save(changes, database=self._database)", "response": "Save group to database."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting the group details.", "response": "def get_group_details(self, group):\n \"\"\" Get the group details. \"\"\"\n result = {}\n try:\n lgroup = self._get_group(group.name)\n lgroup = preload(lgroup, database=self._database)\n except ObjectDoesNotExist:\n return result\n\n for i, j in lgroup.items():\n if j is not None:\n result[i] = j\n\n return result"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _rank(sample):\n rank = 1\n ranks = {}\n\n for k in sorted(sample.keys()):\n n = sample[k]\n ranks[k] = rank + (n - 1) / 2\n rank += n\n\n return ranks", "response": "Assign numeric ranks to all values in the sample."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns the tie correction value for U.", "response": "def _tie_correct(sample):\n \"\"\"\n Returns the tie correction value for U.\n\n See: https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.tiecorrect.html\n\n \"\"\"\n tc = 0\n n = sum(sample.values())\n\n if n < 2:\n return 1.0 # Avoid a ``ZeroDivisionError``.\n\n for k in sorted(sample.keys()):\n tc += math.pow(sample[k], 3) - sample[k]\n tc = 1 - tc / (math.pow(n, 3) - n)\n\n return tc"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning the area under the Gaussian probability density function integrated from minus infinity to x.", "response": "def ndtr(a):\n \"\"\"\n Returns the area under the Gaussian probability density function,\n integrated from minus infinity to x.\n\n See: https://docs.scipy.org/doc/scipy/reference/generated/scipy.special.ndtr.html#scipy.special.ndtr\n\n \"\"\"\n sqrth = math.sqrt(2) / 2\n x = float(a) * sqrth\n z = abs(x)\n if z < sqrth:\n y = 0.5 + 0.5 * math.erf(x)\n else:\n y = 0.5 * math.erfc(z)\n if x > 0:\n y = 1 - y\n return y"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef mann_whitney_u(sample1, sample2, use_continuity=True):\n # Merge dictionaries, adding values if keys match.\n sample = sample1.copy()\n for k, v in sample2.items():\n sample[k] = sample.get(k, 0) + v\n\n # Create a ranking dictionary using same keys for lookups.\n ranks = _rank(sample)\n\n sum_of_ranks = sum([sample1[k] * ranks[k] for k, v in sample1.items()])\n n1 = sum(sample1.values())\n n2 = sum(sample2.values())\n\n # Calculate Mann-Whitney U for both samples.\n u1 = sum_of_ranks - (n1 * (n1 + 1)) / 2\n u2 = n1 * n2 - u1\n\n tie_correction = _tie_correct(sample)\n if tie_correction == 0:\n raise ValueError('All provided sample values are identical.')\n\n sd_u = math.sqrt(tie_correction * n1 * n2 * (n1 + n2 + 1) / 12.0)\n mean_rank = n1 * n2 / 2.0 + 0.5 * use_continuity\n z = abs((max(u1, u2) - mean_rank) / sd_u)\n\n return mwu_result(min(u1, u2), ndtr(-z))", "response": "Compute the Mann - Whitney U test on both samples."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef deprecated(func):\n def newFunc(*args, **kwargs):\n print(\"Call to deprecated function %s.\" % func.__name__)\n return func(*args, **kwargs)\n newFunc.__name__ = func.__name__\n newFunc.__doc__ = func.__doc__\n newFunc.__dict__.update(func.__dict__)\n return newFunc", "response": "This is a decorator which can be used to mark functions\n as deprecated. It will result in a warning being emitted\n when the function is used."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning an RDD of Telemetry submissions for a given filtering criteria.", "response": "def get_pings(sc, app=None, build_id=None, channel=None, doc_type='saved_session',\n fraction=1.0, schema=None, source_name='telemetry', source_version='4',\n submission_date=None, version=None):\n \"\"\" Returns a RDD of Telemetry submissions for a given filtering criteria.\n\n :param sc: an instance of SparkContext\n :param app: an application name, e.g.: \"Firefox\"\n :param channel: a channel name, e.g.: \"nightly\"\n :param version: the application version, e.g.: \"40.0a1\"\n :param build_id: a build_id or a range of build_ids, e.g.:\n \"20150601000000\" or (\"20150601000000\", \"20150610999999\")\n :param submission_date: a submission date or a range of submission dates, e.g:\n \"20150601\" or (\"20150601\", \"20150610\")\n :param source_name: source name, set to \"telemetry\" by default\n :param source_version: source version, set to \"4\" by default\n :param doc_type: ping type, set to \"saved_session\" by default\n :param schema: (deprecated) version of the schema to use\n :param fraction: the fraction of pings to return, set to 1.0 by default\n \"\"\"\n if schema:\n print(\"The 'schema' parameter is deprecated. \"\n \"Version 4 is now the only schema supported.\")\n if schema != \"v4\":\n raise ValueError(\"Invalid schema version\")\n\n dataset = Dataset.from_source('telemetry')\n\n filters = (\n ('docType', doc_type),\n ('sourceName', source_name),\n ('sourceVersion', source_version),\n ('appName', app),\n ('appUpdateChannel', channel),\n ('appVersion', version),\n )\n\n for key, condition in filters:\n if condition and condition != '*':\n dataset = dataset.where(**{key: condition})\n\n # build_id and submission_date can be either strings or tuples or lists,\n # so they deserve a special treatment\n special_cases = dict(appBuildId=build_id, submissionDate=submission_date)\n\n def range_compare(min_val, max_val, val):\n return min_val <= val <= max_val\n\n for key, value in iteritems(special_cases):\n if value is not None and value != '*':\n if isinstance(value, string_types):\n condition = value\n elif isinstance(value, (list, tuple)) and len(value) == 2:\n start, end = value\n condition = partial(range_compare, start, end)\n else:\n raise ValueError(('{} must be either a string or a 2 elements '\n 'list/tuple'. format(key)))\n dataset = dataset.where(**{key: condition})\n\n return dataset.records(sc, sample=fraction)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets properties of a list of pings.", "response": "def get_pings_properties(pings, paths, only_median=False, with_processes=False,\n histograms_url=None, additional_histograms=None):\n \"\"\"\n Returns a RDD of a subset of properties of pings. Child histograms are\n automatically merged with the parent histogram.\n\n If one of the paths points to a keyedHistogram name without supplying the\n actual key, returns a dict of all available subhistograms for that property.\n\n :param with_processes: should separate parent and child histograms be\n included as well?\n :param paths: paths to properties in the payload, with levels separated by \"/\".\n These can be supplied either as a list, eg.\n [\"application/channel\", \"payload/info/subsessionStartDate\"],\n or as the values of a dict keyed by custom identifiers, eg.\n {\"channel\": \"application/channel\", \"ssd\": \"payload/info/subsessionStartDate\"}.\n :param histograms_url: see histogram.Histogram constructor\n :param additional_histograms: see histogram.Histogram constructor\n\n The returned RDD contains a dict for each ping with the required properties as values,\n keyed by the original paths (if 'paths' is a list) or the custom identifier keys\n (if 'paths' is a dict).\n \"\"\"\n if isinstance(pings.first(), binary_type):\n pings = pings.map(lambda p: json.loads(p.decode('utf-8')))\n\n if isinstance(paths, str):\n paths = [paths]\n\n # Use '/' as dots can appear in keyed histograms\n if isinstance(paths, dict):\n paths = [(prop_name, path.split(\"/\")) for prop_name, path in iteritems(paths)]\n else:\n paths = [(path, path.split(\"/\")) for path in paths]\n\n return pings.map(lambda p: _get_ping_properties(p, paths, only_median,\n with_processes,\n histograms_url,\n additional_histograms)) \\\n .filter(lambda p: p)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_one_ping_per_client(pings):\n if isinstance(pings.first(), binary_type):\n pings = pings.map(lambda p: json.loads(p.decode('utf-8')))\n\n filtered = pings.filter(lambda p: \"clientID\" in p or \"clientId\" in p)\n\n if not filtered:\n raise ValueError(\"Missing clientID/clientId attribute.\")\n\n if \"clientID\" in filtered.first():\n client_id = \"clientID\" # v2\n else:\n client_id = \"clientId\" # v4\n\n return filtered.map(lambda p: (p[client_id], p)) \\\n .reduceByKey(lambda p1, p2: p1) \\\n .map(lambda p: p[1])", "response": "Returns a single ping for each client in the RDD."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef admin_required(function=None):\n def check_perms(user):\n # if user not logged in, show login form\n if not user.is_authenticated:\n return False\n # if this site doesn't allow admin access, fail\n if settings.ADMIN_IGNORED:\n raise PermissionDenied\n # check if the user has admin rights\n if not user.is_admin:\n raise PermissionDenied\n return True\n\n actual_decorator = user_passes_test(check_perms, login_url=_login_url)\n if function:\n return actual_decorator(function)\n return actual_decorator", "response": "Decorator for views that checks that the user is an administrator."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef xmlrpc_machine_required(function=None):\n def actual_decorator(func):\n def wrapper(machine_name, password, *args):\n from django_xmlrpc.decorators import AuthenticationFailedException\n from karaage.machines.models import Machine\n machine = Machine.objects.authenticate(machine_name, password)\n if machine is None:\n raise AuthenticationFailedException\n return func(machine, *args)\n\n if hasattr(func, '_xmlrpc_signature'):\n sig = func._xmlrpc_signature\n sig['args'] = (['string'] * 2) + sig['args']\n wrapper._xmlrpc_signature = sig\n\n if func.__doc__:\n wrapper.__doc__ = func.__doc__ + \\\n \"\\nNote: Machine authentication is required.\"\n return wrapper\n\n if function:\n return actual_decorator(function)\n return actual_decorator", "response": "Decorator for views that checks that the user is logged in redirecting\n to the log - in page if necessary."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _call(self, command, ignore_errors=None):\n if ignore_errors is None:\n ignore_errors = []\n cmd = []\n cmd.extend(self._prefix)\n cmd.extend([self._path, \"-iP\"])\n cmd.extend(command)\n command = cmd\n\n logger.debug(\"Cmd %s\" % command)\n null = open('/dev/null', 'w')\n retcode = subprocess.call(command, stdout=null, stderr=null)\n null.close()\n\n if retcode in ignore_errors:\n logger.debug(\n \"<-- Cmd %s returned %d (ignored)\" % (command, retcode))\n return\n\n if retcode:\n logger.error(\n \"<-- Cmd %s returned: %d (error)\" % (command, retcode))\n raise subprocess.CalledProcessError(retcode, command)\n\n logger.debug(\"<-- Returned %d (good)\" % retcode)\n return", "response": "Call remote command with logging."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nread CSV delimited input from Slurm.", "response": "def _read_output(self, command):\n \"\"\" Read CSV delimited input from Slurm. \"\"\"\n cmd = []\n cmd.extend(self._prefix)\n cmd.extend([self._path, \"-iP\"])\n cmd.extend(command)\n command = cmd\n\n logger.debug(\"Cmd %s\" % command)\n null = open('/dev/null', 'w')\n process = subprocess.Popen(\n command, stdout=subprocess.PIPE, stderr=null)\n null.close()\n\n results = []\n reader = csv.reader(_input_csv(process.stdout), delimiter=str(\"|\"))\n\n try:\n headers = next(reader)\n logger.debug(\"<-- headers %s\" % headers)\n except StopIteration:\n logger.debug(\"Cmd %s headers not found\" % command)\n headers = []\n\n for row in reader:\n _output_csv(row)\n\n logger.debug(\"<-- row %s\" % row)\n this_row = {}\n\n i = 0\n for i in range(0, len(headers)):\n key = headers[i]\n value = row[i]\n this_row[key] = value\n\n results.append(this_row)\n\n process.stdout.close()\n retcode = process.wait()\n if retcode != 0:\n logger.error(\"<-- Cmd %s returned %d (error)\" % (command, retcode))\n raise subprocess.CalledProcessError(retcode, command)\n\n if len(headers) == 0:\n logger.error(\"Cmd %s didn't return any headers.\" % command)\n raise RuntimeError(\"Cmd %s didn't return any headers.\" % command)\n\n logger.debug(\"<-- Returned: %d (good)\" % retcode)\n return results"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_project(self, projectname):\n cmd = [\"list\", \"accounts\", \"where\", \"name=%s\" % projectname]\n results = self._read_output(cmd)\n\n if len(results) == 0:\n return None\n elif len(results) > 1:\n logger.error(\n \"Command returned multiple results for '%s'.\" % projectname)\n raise RuntimeError(\n \"Command returned multiple results for '%s'.\" % projectname)\n\n the_result = results[0]\n the_project = the_result[\"Account\"]\n if projectname.lower() != the_project.lower():\n logger.error(\n \"We expected projectname '%s' \"\n \"but got projectname '%s'.\" % (projectname, the_project))\n raise RuntimeError(\n \"We expected projectname '%s' \"\n \"but got projectname '%s'.\" % (projectname, the_project))\n\n return the_result", "response": "Get the project details from Slurm."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_users_in_project(self, projectname):\n cmd = [\"list\", \"assoc\", \"where\", \"account=%s\" % projectname]\n results = self._read_output(cmd)\n\n user_list = []\n for result in results:\n if result[\"User\"] != \"\":\n user_list.append(result[\"User\"])\n return user_list", "response": "Get list of users in project from Slurm."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting list of projects in user from Slurm.", "response": "def get_projects_in_user(self, username):\n \"\"\" Get list of projects in user from Slurm. \"\"\"\n cmd = [\"list\", \"assoc\", \"where\", \"user=%s\" % username]\n results = self._read_output(cmd)\n\n project_list = []\n for result in results:\n project_list.append(result[\"Account\"])\n return project_list"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _save_account(self, account, username):\n\n # retrieve default project, or use null project if none\n default_project_name = self._null_project\n if account.default_project is not None:\n default_project_name = account.default_project.pid\n\n # account created\n # account updated\n\n ds_user = self.get_user(username)\n if account.date_deleted is None:\n # date_deleted is not set, user should exist\n logger.debug(\"account is active\")\n\n if ds_user is None:\n # create user if doesn't exist\n self._call([\n \"add\", \"user\",\n \"accounts=%s\" % default_project_name,\n \"defaultaccount=%s\" % default_project_name,\n \"name=%s\" % username])\n else:\n # or just set default project\n self._call([\n \"modify\", \"user\",\n \"set\", \"defaultaccount=%s\" % default_project_name,\n \"where\", \"name=%s\" % username])\n\n # update user meta information\n\n # add rest of projects user belongs to\n slurm_projects = self.get_projects_in_user(username)\n slurm_projects = [project.lower() for project in slurm_projects]\n slurm_projects = set(slurm_projects)\n for project in account.person.projects.all():\n if project.pid.lower() not in slurm_projects:\n self._call([\n \"add\", \"user\",\n \"name=%s\" % username,\n \"accounts=%s\" % project.pid])\n else:\n # date_deleted is not set, user should not exist\n logger.debug(\"account is not active\")\n self._delete_account(username)\n\n return", "response": "Called when account is created or updated with username override."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncalling when username is deleted. With username override.", "response": "def _delete_account(self, username):\n \"\"\" Called when account is deleted. With username override. \"\"\"\n\n # account deleted\n\n ds_user = self.get_user(username)\n if ds_user is not None:\n self._call([\"delete\", \"user\", \"name=%s\" % username])\n\n return"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef save_project(self, project):\n pid = project.pid\n\n # project created\n # project updated\n\n if project.is_active:\n # project is not deleted\n logger.debug(\"project is active\")\n ds_project = self.get_project(pid)\n if ds_project is None:\n self._call([\"add\", \"account\", \"name=%s\" % pid] + self._add_account_extra)\n\n # update project meta information\n name = self._truncate(project.name, 40)\n self._call([\n \"modify\", \"account\",\n \"set\", \"Description=%s\" % self._filter_string(name),\n \"where\", \"name=%s\" % pid])\n self._call([\n \"modify\", \"account\",\n \"set\", \"Organization=%s\"\n % self._filter_string(project.institute.name),\n \"where\", \"name=%s\" % pid])\n else:\n # project is deleted\n logger.debug(\"project is not active\")\n ds_project = self.get_project(pid)\n if ds_project is not None:\n self._call([\"delete\", \"account\", \"name=%s\" % pid])\n\n return", "response": "Save the project meta information to the database."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ncalling when project is deleted.", "response": "def delete_project(self, project):\n \"\"\" Called when project is deleted. \"\"\"\n pid = project.pid\n\n # project deleted\n\n ds_project = self.get_project(pid)\n if ds_project is not None:\n self._call([\"delete\", \"account\", \"name=%s\" % pid])\n\n return"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_query(self, q, request):\n return Project.objects.filter(\n Q(pid__icontains=q)\n | Q(name__icontains=q)\n )", "response": "return a query set for the given query string"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nadds a track or iterable of tracks.", "response": "def add_tracks(self, track):\n \"\"\"\n Add a track or iterable of tracks.\n\n Parameters\n ----------\n\n track : iterable or Track\n Iterable of :class:`Track` objects, or a single :class:`Track`\n object.\n \"\"\"\n from trackhub import BaseTrack\n if isinstance(track, BaseTrack):\n self.add_child(track)\n self._tracks.append(track)\n else:\n for t in track:\n self.add_child(t)\n self._tracks.append(t)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef nicepass(alpha=8, numeric=4):\n import string\n import random\n vowels = ['a', 'e', 'i', 'o', 'u']\n consonants = [a for a in string.ascii_lowercase if a not in vowels]\n digits = string.digits\n\n # utility functions\n def a_part(slen):\n ret = ''\n for i in range(slen):\n if i % 2 == 0:\n randid = random.randint(0, 20) # number of consonants\n ret += consonants[randid]\n else:\n randid = random.randint(0, 4) # number of vowels\n ret += vowels[randid]\n return ret\n\n def n_part(slen):\n ret = ''\n for i in range(slen):\n randid = random.randint(0, 9) # number of digits\n ret += digits[randid]\n return ret\n\n ####\n fpl = alpha / 2\n if alpha % 2:\n fpl = int(alpha / 2) + 1\n lpl = alpha - fpl\n\n start = a_part(fpl)\n mid = n_part(numeric)\n end = a_part(lpl)\n\n return \"%s%s%s\" % (start, mid, end)", "response": "returns a human - readble password"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning a query set for the query string q", "response": "def get_query(self, q, request):\n \"\"\" return a query set searching for the query string q\n either implement this method yourself or set the search_field\n in the LookupChannel class definition\n \"\"\"\n return Person.objects.filter(\n Q(username__icontains=q)\n | Q(short_name__icontains=q)\n | Q(full_name__icontains=q)\n )"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn a query set for the given query string", "response": "def get_query(self, q, request):\n \"\"\" return a query set searching for the query string q\n either implement this method yourself or set the search_field\n in the LookupChannel class definition\n \"\"\"\n return Group.objects.filter(\n Q(name__icontains=q)\n | Q(description__icontains=q)\n )"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nformatting the match item for display in the dropdown", "response": "def format_match(self, obj):\n \"\"\"\n (HTML) formatted item for display in the dropdown\n \"\"\"\n result = [escape(obj.name)]\n\n if obj.description:\n result.append(escape(obj.description))\n\n return \" \".join(result)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn True if the object is a minimum Logger API", "response": "def loggable(obj):\n \"\"\"Return \"True\" if the obj implements the minimum Logger API\n required by the 'trace' decorator.\n \"\"\"\n if isinstance(obj, logging.Logger):\n return True\n else:\n return (inspect.isclass(obj)\n and inspect.ismethod(getattr(obj, 'debug', None))\n and inspect.ismethod(getattr(obj, 'isEnabledFor', None))\n and inspect.ismethod(getattr(obj, 'setLevel', None)))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _formatter_self(name, value):\n __mname = value.__module__\n if __mname != '__main__':\n return '%s = <%s.%s object at 0x%x>' \\\n % (name, __mname, value.__class__.__name__, id(value))\n else:\n return '%s = <%s object at 0x%x>' \\\n % (name, value.__class__.__name__, id(value))", "response": "Format the self variable and value on instance methods."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _formatter_class(name, value):\n __mname = value.__module__\n if __mname != '__main__':\n return \"%s = <type '%s.%s'>\" % (name, __mname, value.__name__)\n else:\n return \"%s = <type '%s'>\" % (name, value.__name__)", "response": "Format the klass variable and value on class methods."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_formatter(name):\n if name in ('self', 'instance', 'this'):\n return af_self\n elif name == 'class':\n return af_class\n elif name in ('named', 'param', 'parameter'):\n return af_named\n elif name in ('default', 'optional'):\n return af_default\n# elif name in ('anonymous', 'arbitrary', 'unnamed'):\n# return af_anonymous\n elif name in ('keyword', 'pair', 'pairs'):\n return af_keyword\n else:\n raise ValueError('unknown trace formatter %r' % name)", "response": "Return the named formatter function. See the function\n set_formatter for details."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreplaces the formatter function used by the trace decorator to handle formatting a specific kind of argument.", "response": "def set_formatter(name, func):\n \"\"\"Replace the formatter function used by the trace decorator to\n handle formatting a specific kind of argument. There are several\n kinds of arguments that trace discriminates between:\n\n * instance argument - the object bound to an instance method.\n * class argument - the class object bound to a class method.\n * positional arguments (named) - values bound to distinct names.\n * positional arguments (default) - named positional arguments with\n default values specified in the function declaration.\n * positional arguments (anonymous) - an arbitrary number of values\n that are all bound to the '*' variable.\n * keyword arguments - zero or more name-value pairs that are\n placed in a dictionary and bound to the double-star variable.\n\n \\var{name} - specifies the name of the formatter to be modified.\n\n * instance argument - \"self\", \"instance\" or \"this\"\n * class argument - \"class\"\n * named argument - \"named\", \"param\" or \"parameter\"\n * default argument - \"default\", \"optional\"\n * anonymous argument - \"anonymous\", \"arbitrary\" or \"unnamed\"\n * keyword argument - \"keyword\", \"pair\" or \"pairs\"\n\n \\var{func} - a function to format an argument.\n * For all but anonymous formatters this function must accept two\n arguments: the variable name and the value to which it is bound.\n * The anonymous formatter function is passed only one argument\n corresponding to an anonymous value.\n * if \\var{func} is \"None\" then the default formatter will be used.\n \"\"\"\n if name in ('self', 'instance', 'this'):\n global af_self\n af_self = _formatter_self if func is None else func\n elif name == 'class':\n global af_class\n af_class = _formatter_class if func is None else func\n elif name in ('named', 'param', 'parameter'):\n global af_named\n af_named = _formatter_named if func is None else func\n elif name in ('default', 'optional'):\n global af_default\n af_default = _formatter_defaults if func is None else func\n elif name in ('anonymous', 'arbitrary', 'unnamed'):\n global af_anonymous\n af_anonymous = chop if func is None else func\n elif name in ('keyword', 'pair', 'pairs'):\n global af_keyword\n af_keyword = _formatter_named if func is None else func\n else:\n raise ValueError('unknown trace formatter %r' % name)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef __lookup_builtin(name):\n global __builtin_functions\n if __builtin_functions is None:\n builtins = dict()\n for proto in __builtins:\n pos = proto.find('(')\n name, params, defaults = proto[:pos], list(), dict()\n for param in proto[pos + 1:-1].split(','):\n pos = param.find('=')\n if not pos < 0:\n param, value = param[:pos], param[pos + 1:]\n try:\n defaults[param] = __builtin_defaults[value]\n except KeyError:\n raise ValueError(\n 'builtin function %s: parameter %s: '\n 'unknown default %r' % (name, param, value))\n params.append(param)\n builtins[name] = (params, defaults)\n __builtin_functions = builtins\n\n try:\n params, defaults = __builtin_functions[name]\n except KeyError:\n params, defaults = tuple(), dict()\n __builtin_functions[name] = (params, defaults)\n print(\n \"Warning: builtin function %r is missing prototype\" % name,\n file=sys.stderr)\n return len(params), params, defaults", "response": "Lookup the parameter name and default parameter values for builtin functions."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef trace(_name):\n def decorator(_func):\n \"\"\"This is the actual decorator function that wraps the\n \\var{_func} function for detailed logging.\n \"\"\"\n\n def positional(name, value):\n \"\"\"Format one named positional argument.\n \"\"\"\n if name is __self:\n return af_self(name, value)\n elif name is __klass:\n return af_class(name, value)\n else:\n return af_named(name, value)\n\n def wrapper(*__argv, **__kwds):\n if not logger.isEnabledFor(logging.DEBUG) or _.value:\n return _func(*__argv, **__kwds)\n\n try:\n _.value = True\n params = dict(co_defaults)\n params.update(__kwds)\n params.update(zip(co_varnames, __argv))\n if 'raw_password' in params:\n params['raw_password'] = '<censored>'\n\n position = [\n positional(n, params.pop(n))\n for n in co_varnames[:len(__argv)]\n ]\n defaults = [\n af_default(n, params.pop(n))\n for n in co_varnames[len(__argv):]\n ]\n nameless = (\n af_unnamed(v) for v in __argv[co_argcount:]\n )\n keywords = (\n af_keyword(n, params[n]) for n in sorted(params.keys())\n )\n\n params = ', '.join(\n filter(None, chain(\n position,\n defaults,\n nameless,\n keywords)))\n # params = params.replace(', [', '[, ').replace('][, ', ', ')\n\n enter = [pre_enter]\n if params:\n enter.append(' ')\n enter.append(params)\n enter.append(' ')\n enter.append(')')\n\n leave = [pre_leave]\n\n try:\n logger.debug(''.join(enter))\n try:\n try:\n _.value = False\n result = _func(*__argv, **__kwds)\n finally:\n _.value = True\n except Exception:\n ex_type, value, traceback = sys.exc_info()\n leave.append(' => exception thrown\\n\\traise ')\n __mname = ex_type.__module__\n if __mname != '__main__':\n leave.append(__mname)\n leave.append('.')\n leave.append(ex_type.__name__)\n if value.args:\n leave.append('(')\n leave.append(\n ', '.join(chop(v) for v in value.args))\n leave.append(')')\n else:\n leave.append('()')\n raise\n else:\n if result is not None:\n leave.append(' => ')\n leave.append(chop(result))\n finally:\n logger.debug(''.join(leave))\n finally:\n _.value = False\n\n return result\n\n ####\n # decorator\n ####\n\n __self = False\n __klass = False\n\n def nop(x):\n return x\n\n __rewrap = nop\n if type(_func) in FunctionTypes:\n # functions do not belong to a class.\n __cname = None\n elif type(_func) in MethodTypes:\n # im_self is None for unbound instance methods.\n # Assumption: trace is only called on unbound methods.\n if _func.im_self is not None:\n __rewrap = classmethod\n __cname = _func.im_self.__name__\n __klass = True\n else:\n __cname = _func.im_class.__name__\n __self = True\n _func = _func.im_func\n else:\n # other callables are not supported yet.\n return _func\n __module = _func.__module__\n __fname = _func.__name__\n\n # Do not wrap initialization and conversion methods.\n if __fname in ('__init__', '__new__', '__repr__', '__str__'):\n return __rewrap(_func)\n\n # Generate the Fully Qualified Function Name.\n\n __fqfn = list()\n if __module != '__main__':\n __fqfn.append(__module)\n __fqfn.append('.')\n if __cname is not None:\n __fqfn.append(__cname)\n __fqfn.append('.')\n __fqfn.append(__fname)\n __fqfn = ''.join(__fqfn)\n\n if type(_name) in CallableTypes:\n logger = get_logger_factory().get_logger(__fqfn)\n elif loggable(_name):\n logger = _name\n elif isinstance(_name, six.string_types):\n logger = get_logger_factory().get_logger(_name)\n else:\n raise ValueError(\n 'invalid object %r: must be a function, a method, '\n 'a string or an object that implements the Logger API' % _name)\n\n pre_enter = ['>>> ', __fqfn, '(']\n pre_enter = ''.join(pre_enter)\n\n pre_leave = ['<<< ', __fqfn]\n pre_leave = ''.join(pre_leave)\n\n ####\n # Here we are really mucking around in function internals.\n # func_code is the low level 'code' instance that describes\n # the function arguments, variable and other stuff.\n #\n # func.func_code.co_argcount - number of function arguments.\n # func.func_code.co_varnames - function variables names, the\n # first co_argcount values are the argument names.\n # func.func_defaults - contains default arguments\n\n try:\n code = _func.__code__\n except AttributeError:\n co_argcount, co_varnames, co_defaults = \\\n __lookup_builtin(_func.__name__)\n else:\n co_argcount = code.co_argcount\n co_varnames = code.co_varnames[:co_argcount]\n if _func.__defaults__:\n co_defaults = dict(\n zip(co_varnames[-len(_func.__defaults__):],\n _func.__defaults__))\n else:\n co_defaults = dict()\n if __klass:\n __klass = co_varnames[0]\n if __self:\n __self = co_varnames[0]\n return __rewrap(wraps(_func)(wrapper))\n\n ####\n # trace\n ####\n\n# logging.basicConfig(level = logging.DEBUG)\n if type(_name) in CallableTypes:\n return decorator(_name)\n else:\n return decorator", "response": "Function decorator that logs function entry and exit details."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _leaders_changed(\n sender, instance, action, reverse, model, pk_set, **kwargs):\n \"\"\"\n Hook that executes whenever the group members are changed.\n \"\"\"\n # print(\"'%s','%s','%s','%s','%s'\"\n # %(instance, action, reverse, model, pk_set))\n\n if action == \"post_add\":\n if not reverse:\n project = instance\n for person in model.objects.filter(pk__in=pk_set):\n log.change(\n person,\n \"Added person to project %s leaders\" % project)\n log.change(\n project,\n \"Added person %s to project leaders\" % person)\n else:\n person = instance\n for project in model.objects.filter(pk__in=pk_set):\n log.change(\n person,\n \"Added person to project %s leaders\" % project)\n log.change(\n project,\n \"Added person %s to project leaders\" % person)\n\n elif action == \"post_remove\":\n if not reverse:\n project = instance\n for person in model.objects.filter(pk__in=pk_set):\n log.change(\n person,\n \"Removed person from project %s leaders\" % project)\n log.change(\n project,\n \"Removed person %s from project leaders\" % person)\n else:\n person = instance\n for project in model.objects.filter(pk__in=pk_set):\n log.change(\n person,\n \"Removed person from project %s leaders\" % project)\n log.change(\n project,\n \"Removed person %s from project leaders\" % person)\n\n elif action == \"pre_clear\":\n # This has to occur in pre_clear, not post_clear, as otherwise\n # we won't see what project leaders need to be removed.\n if not reverse:\n project = instance\n log.change(\n project,\n \"Removed all project leaders\")\n for person in project.leaders.all():\n log.change(\n project,\n \"Removed person %s from project leaders\" % person)\n else:\n person = instance\n log.change(\n person,\n \"Removed person from all project leaders\")\n for project in person.leads.all():\n log.change(\n project,\n \"Removed person %s from project leaders\" % person)", "response": "Hook that executes whenever the group members are changed."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nparsing log file lines in log_type format.", "response": "def parse_logs(log_list, date, machine_name, log_type):\n \"\"\"\n Parse log file lines in log_type format.\n \"\"\"\n output = []\n count = fail = skip = updated = 0\n\n # Check things are setup correctly\n try:\n machine = Machine.objects.get(name=machine_name)\n except Machine.DoesNotExist:\n return \"ERROR: Couldn't find machine named: %s\" % machine_name\n\n if log_type == \"alogger\":\n parser = AloggerParser()\n else:\n parser = get_parser(log_type)\n\n # Process each line\n for line_no, line in enumerate(log_list):\n try:\n data = parser.line_to_dict(line)\n except ValueError:\n output.append(\"%d: Error reading line\" % line_no)\n continue\n\n # if parser returns None, nothing to do, continue to next line\n if data is None:\n skip += 1\n continue\n\n # check for required fields\n required_fields = [\n 'user', 'project', 'jobid', 'jobname',\n 'cpu_usage', 'cores',\n 'act_wall_time', 'est_wall_time',\n 'mem', 'vmem', 'list_pmem', 'list_mem', 'list_pvmem',\n 'ctime', 'qtime', 'etime', 'start',\n ]\n\n for field in required_fields:\n if field not in data:\n output.append(\n \"line %d: %s field not given.\" % (line_no, field))\n fail = fail + 1\n continue\n\n # Process user --> account\n try:\n account = Account.objects.get(\n username=data['user'],\n date_deleted__isnull=True)\n\n except Account.DoesNotExist:\n # Couldn't find user account - Assign to user None\n output.append(\n \"line %d: Couldn't find user account for username=%s.\"\n % (line_no, data['user']))\n fail += 1\n continue\n\n except Account.MultipleObjectsReturned:\n output.append(\n \"line %d: Username %s has multiple active accounts.\"\n % (line_no, data['user']))\n fail += 1\n continue\n\n # Process project\n if data['project'] is None:\n output.append(\n \"line %d: Project was not supplied.\" % (line_no))\n fail += 1\n continue\n\n try:\n project = Project.objects.get(pid=data['project'])\n\n except Project.DoesNotExist:\n output.append(\n \"line %d: Couldn't find specified project %s\"\n % (line_no, data['project']))\n fail += 1\n continue\n\n # memory calculations\n if machine.mem_per_core:\n avail_mem_per_core = machine.mem_per_core * 1024\n avail_mem_for_job = avail_mem_per_core * data['cores']\n\n if data['list_pmem'] * data['cores'] > data['list_mem']:\n memory_used_per_core = data['list_pmem']\n memory_used_for_job = data['list_pmem'] * data['cores']\n else:\n memory_used_per_core = data['list_mem'] / data['cores']\n memory_used_for_job = data['list_mem']\n\n if memory_used_for_job > avail_mem_for_job:\n data['cpu_usage'] = ceil(\n memory_used_per_core / avail_mem_per_core\n * data['act_wall_time']\n * data['cores'])\n\n # apply scaling factor to cpu_usage\n data['cpu_usage'] = data['cpu_usage'] * machine.scaling_factor\n\n # Everything is good so add entry\n queue, created = Queue.objects.get_or_create(name=data['queue'])\n\n try:\n cpujob, created = CPUJob.objects.get_or_create(jobid=data['jobid'])\n cpujob.account = account\n cpujob.username = data['user']\n cpujob.project = project\n cpujob.machine = machine\n cpujob.date = date\n cpujob.queue = queue\n cpujob.cpu_usage = data['cpu_usage']\n cpujob.est_wall_time = data['est_wall_time']\n cpujob.act_wall_time = data['act_wall_time']\n cpujob.mem = data['mem']\n cpujob.vmem = data['vmem']\n cpujob.ctime = data['ctime']\n cpujob.qtime = data['qtime']\n cpujob.etime = data['etime']\n cpujob.start = data['start']\n cpujob.cores = data['cores']\n cpujob.exit_status = data['exit_status']\n cpujob.jobname = data['jobname']\n cpujob.list_mem = data['list_mem']\n cpujob.list_vmem = data['list_vmem']\n cpujob.list_pmem = data['list_pmem']\n cpujob.list_pvmem = data['list_pvmem']\n cpujob.save()\n\n except Exception as e:\n output.append(\n \"line %d: Failed to insert a line - %s\" % (line_no, e))\n fail += 1\n continue\n\n if created:\n count += 1\n else:\n updated += 1\n\n summary = (\n 'Inserted : %i\\nUpdated : %i\\nFailed : %i\\nSkiped : %i'\n % (count, updated, fail, skip)\n )\n\n logger.debug('Inserted : %i' % count)\n logger.debug('Updated : %i' % updated)\n logger.debug('Failed : %i' % fail)\n logger.debug('Skiped : %i' % skip)\n\n return summary, output"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef profile_application_list(request):\n config = tables.RequestConfig(request, paginate={\"per_page\": 5})\n\n person = request.user\n my_applications = Application.objects.get_for_applicant(person)\n my_applications = ApplicationTable(my_applications, prefix=\"mine-\")\n config.configure(my_applications)\n\n requires_attention = Application.objects.requires_attention(request)\n requires_attention = ApplicationTable(requires_attention, prefix=\"attn-\")\n config.configure(requires_attention)\n\n return render(\n template_name='kgapplications/profile_applications.html',\n context={\n 'person': request.user,\n 'my_applications': my_applications,\n 'requires_attention': requires_attention,\n },\n request=request)", "response": "a logged in user wants to see all his pending applications."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef application_unauthenticated(request, token, state=None, label=None):\n application = base.get_application(secret_token=token)\n if application.expires < datetime.datetime.now():\n return render(\n template_name='kgapplications/common_expired.html',\n context={'application': application},\n request=request)\n\n roles = {'is_applicant', 'is_authorised'}\n\n # redirect user to real url if possible.\n if request.user.is_authenticated:\n if request.user == application.applicant:\n url = base.get_url(\n request, application, roles, label)\n return HttpResponseRedirect(url)\n\n state_machine = base.get_state_machine(application)\n return state_machine.process(\n request, application, state, label, roles)", "response": "This function is called when the user is not authorized to access an application."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning cpu hours and number of jobs for an institute for a given period", "response": "def get_institute_usage(institute, start, end):\n \"\"\"Return a tuple of cpu hours and number of jobs for an institute\n for a given period\n\n Keyword arguments:\n institute --\n start -- start date\n end -- end date\n \"\"\"\n try:\n cache = InstituteCache.objects.get(\n institute=institute, date=datetime.date.today(),\n start=start, end=end)\n return cache.cpu_time, cache.no_jobs\n except InstituteCache.DoesNotExist:\n return 0, 0"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_project_usage(project, start, end):\n try:\n cache = ProjectCache.objects.get(\n project=project, date=datetime.date.today(),\n start=start, end=end)\n return cache.cpu_time, cache.no_jobs\n except ProjectCache.DoesNotExist:\n return 0, 0", "response": "Return a tuple of cpu hours and number of jobs for a given project"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning cpu hours and number of jobs for a person in a specific project", "response": "def get_person_usage(person, project, start, end):\n \"\"\"Return a tuple of cpu hours and number of jobs for a person in a\n specific project\n\n Keyword arguments:\n person --\n project -- The project the usage is from\n start -- start date\n end -- end date\n \"\"\"\n try:\n cache = PersonCache.objects.get(\n person=person, project=project, date=datetime.date.today(),\n start=start, end=end)\n return cache.cpu_time, cache.no_jobs\n except PersonCache.DoesNotExist:\n return 0, 0"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_machine_usage(machine, start, end):\n\n try:\n cache = MachineCache.objects.get(\n machine=machine, date=datetime.date.today(),\n start=start, end=end)\n return cache.cpu_time, cache.no_jobs\n except MachineCache.DoesNotExist:\n return 0, 0", "response": "Return a tuple of cpu hours and number of jobs for a given period"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_machine_category_usage(start, end):\n\n cache = MachineCategoryCache.objects.get(\n date=datetime.date.today(),\n start=start, end=end)\n return cache", "response": "Return a tuple of cpu hours and number of jobs\n for a given period"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets applicant from email address.", "response": "def get_applicant_from_email(email):\n \"\"\"\n Get applicant from email address.\n\n If the person exists, return (person, True)\n\n If multiple matches, return (None, True)\n\n Otherwise create applicant and return (applicant, False)\n \"\"\"\n try:\n applicant = Person.active.get(email=email)\n existing_person = True\n except Person.DoesNotExist:\n applicant = Applicant.objects.create(email=email)\n existing_person = False\n except Person.MultipleObjectsReturned:\n applicant = None\n existing_person = False\n return applicant, existing_person"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _send_invitation(request, project):\n form = forms.InviteUserApplicationForm(request.POST or None)\n if request.method == 'POST':\n if form.is_valid():\n\n email = form.cleaned_data['email']\n applicant, existing_person = get_applicant_from_email(email)\n\n # If applicant is None then there were multiple persons found.\n if applicant is None:\n return render(\n template_name='kgapplications/'\n 'project_common_invite_multiple.html',\n context={'form': form, 'email': email},\n request=request)\n\n if existing_person and 'existing' not in request.POST:\n return render(\n template_name='kgapplications/'\n 'project_common_invite_existing.html',\n context={'form': form, 'person': applicant},\n request=request)\n\n application = form.save(commit=False)\n application.applicant = applicant\n application.project = project\n application.save()\n state_machine = get_application_state_machine()\n response = state_machine.start(request, application)\n return response\n\n return render(\n template_name='kgapplications/project_common_invite_other.html',\n context={'form': form, 'project': project, },\n request=request)", "response": "Sends an invitation to the user who is logged in."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef send_invitation(request, project_id=None):\n\n project = None\n if project_id is not None:\n project = get_object_or_404(Project, id=project_id)\n\n if project is None:\n\n if not is_admin(request):\n return HttpResponseForbidden('<h1>Access Denied</h1>')\n\n else:\n\n if not project.can_edit(request):\n return HttpResponseForbidden('<h1>Access Denied</h1>')\n\n return _send_invitation(request, project)", "response": "Send an invitation to the current user."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef new_application(request):\n # Note default kgapplications/index.html will display error if user logged\n # in.\n if not settings.ALLOW_REGISTRATIONS:\n return render(\n template_name='kgapplications/project_common_disabled.html',\n context={},\n request=request)\n\n roles = {'is_applicant', 'is_authorised'}\n\n if not request.user.is_authenticated:\n attrs, _ = saml.parse_attributes(request)\n defaults = {'email': attrs['email']}\n form = forms.UnauthenticatedInviteUserApplicationForm(\n request.POST or None, initial=defaults)\n if request.method == 'POST':\n if form.is_valid():\n email = form.cleaned_data['email']\n applicant, existing_person = get_applicant_from_email(email)\n\n # If applicant is None then there were multiple persons found.\n # This should never happen as the\n # UnauthenticatedInviteUserApplicationForm form disallows\n # existing users applying unauthenticated.\n assert applicant is not None\n # Similarly existing_person should always be False here.\n assert not existing_person\n\n application = ProjectApplication()\n application.applicant = applicant\n application.save()\n\n state_machine = get_application_state_machine()\n state_machine.start(request, application, roles)\n # we do not show unauthenticated users the application at this\n # stage.\n url = reverse('index')\n return HttpResponseRedirect(url)\n return render(\n template_name='kgapplications/'\n 'project_common_invite_unauthenticated.html',\n context={'form': form, },\n request=request)\n else:\n if request.method == 'POST':\n person = request.user\n\n application = ProjectApplication()\n application.applicant = person\n application.save()\n\n state_machine = get_application_state_machine()\n response = state_machine.start(request, application, roles)\n return response\n return render(\n template_name='kgapplications/'\n 'project_common_invite_authenticated.html',\n context={},\n request=request)", "response": "A new application by a user to start a new project."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ndetermine the correct link flags for the current system.", "response": "def _get_ldflags():\n \"\"\"Determine the correct link flags. This attempts dummy compiles similar\n to how autotools does feature detection.\n \"\"\"\n # windows gcc does not support linking with unresolved symbols\n if sys.platform == 'win32': # pragma: no cover (windows)\n prefix = getattr(sys, 'real_prefix', sys.prefix)\n libs = os.path.join(prefix, str('libs'))\n return str('-L{} -lpython{}{}').format(libs, *sys.version_info[:2])\n\n cc = subprocess.check_output(('go', 'env', 'CC')).decode('UTF-8').strip()\n\n with _tmpdir() as tmpdir:\n testf = os.path.join(tmpdir, 'test.c')\n with io.open(testf, 'w') as f:\n f.write('int f(int); int main(void) { return f(0); }\\n')\n\n for lflag in LFLAGS: # pragma: no cover (platform specific)\n try:\n subprocess.check_call((cc, testf, lflag), cwd=tmpdir)\n return lflag\n except subprocess.CalledProcessError:\n pass\n else: # pragma: no cover (platform specific)\n # wellp, none of them worked, fall back to gcc and they'll get a\n # hopefully reasonable error message\n return LFLAG_GCC"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning a penalty model with the given specification.", "response": "def get_penalty_model(specification):\n \"\"\"Factory function for penaltymodel_maxgap.\n\n Args:\n specification (penaltymodel.Specification): The specification\n for the desired penalty model.\n\n Returns:\n :class:`penaltymodel.PenaltyModel`: Penalty model with the given specification.\n\n Raises:\n :class:`penaltymodel.ImpossiblePenaltyModel`: If the penalty cannot be built.\n\n Parameters:\n priority (int): -100\n\n \"\"\"\n\n # check that the feasible_configurations are spin\n feasible_configurations = specification.feasible_configurations\n if specification.vartype is dimod.BINARY:\n feasible_configurations = {tuple(2 * v - 1 for v in config): en\n for config, en in feasible_configurations.items()}\n\n # convert ising_quadratic_ranges to the form we expect\n ising_quadratic_ranges = specification.ising_quadratic_ranges\n quadratic_ranges = {(u, v): ising_quadratic_ranges[u][v] for u, v in specification.graph.edges}\n\n bqm, gap = generate(specification.graph,\n feasible_configurations,\n specification.decision_variables,\n specification.ising_linear_ranges,\n quadratic_ranges,\n specification.min_classical_gap,\n None) # unspecified smt solver\n\n try:\n ground = max(feasible_configurations.values())\n except ValueError:\n ground = 0.0 # if empty\n\n return pm.PenaltyModel.from_specification(specification, bqm, gap, ground)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ninsert a group of feasible configuration into the cache.", "response": "def insert_feasible_configurations(cur, feasible_configurations, encoded_data=None):\n \"\"\"Insert a group of feasible configurations into the cache.\n\n Args:\n cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function\n is meant to be run within a :obj:`with` statement.\n feasible_configurations (dict[tuple[int]): The set of feasible\n configurations. Each key should be a tuple of variable assignments.\n The values are the relative energies.\n encoded_data (dict, optional): If a dictionary is provided, it\n will be populated with the serialized data. This is useful for\n preventing encoding the same information many times.\n\n Examples:\n >>> feasible_configurations = {(-1, -1): 0.0, (+1, +1): 0.0}\n >>> with pmc.cache_connect(':memory:') as cur:\n ... pmc.insert_feasible_configurations(cur, feasible_configurations)\n\n \"\"\"\n if encoded_data is None:\n encoded_data = {}\n\n if 'num_variables' not in encoded_data:\n encoded_data['num_variables'] = len(next(iter(feasible_configurations)))\n if 'num_feasible_configurations' not in encoded_data:\n encoded_data['num_feasible_configurations'] = len(feasible_configurations)\n if 'feasible_configurations' not in encoded_data or 'energies' not in encoded_data:\n encoded = {_serialize_config(config): en for config, en in feasible_configurations.items()}\n\n configs, energies = zip(*sorted(encoded.items()))\n encoded_data['feasible_configurations'] = json.dumps(configs, separators=(',', ':'))\n encoded_data['energies'] = json.dumps(energies, separators=(',', ':'))\n\n insert = \"\"\"\n INSERT OR IGNORE INTO feasible_configurations(\n num_variables,\n num_feasible_configurations,\n feasible_configurations,\n energies)\n VALUES (\n :num_variables,\n :num_feasible_configurations,\n :feasible_configurations,\n :energies);\n \"\"\"\n\n cur.execute(insert, encoded_data)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef iter_feasible_configurations(cur):\n select = \\\n \"\"\"\n SELECT num_variables, feasible_configurations, energies\n FROM feasible_configurations\n \"\"\"\n for num_variables, feasible_configurations, energies in cur.execute(select):\n configs = json.loads(feasible_configurations)\n energies = json.loads(energies)\n\n yield {_decode_config(config, num_variables): energy\n for config, energy in zip(configs, energies)}", "response": "Iterate over all feasible configurations in the cache."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ninversing of _serialize_config always converts to spin.", "response": "def _decode_config(c, num_variables):\n \"\"\"inverse of _serialize_config, always converts to spin.\"\"\"\n def bits(c):\n n = 1 << (num_variables - 1)\n for __ in range(num_variables):\n yield 1 if c & n else -1\n n >>= 1\n return tuple(bits(c))"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ninserts an Ising model into the cache.", "response": "def insert_ising_model(cur, nodelist, edgelist, linear, quadratic, offset, encoded_data=None):\n \"\"\"Insert an Ising model into the cache.\n\n Args:\n cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function\n is meant to be run within a :obj:`with` statement.\n nodelist (list): The nodes in the graph.\n edgelist (list): The edges in the graph.\n linear (dict): The linear bias associated with each node in nodelist.\n quadratic (dict): The quadratic bias associated with teach edge in edgelist.\n offset (float): The constant offset applied to the ising problem.\n encoded_data (dict, optional): If a dictionary is provided, it\n will be populated with the serialized data. This is useful for\n preventing encoding the same information many times.\n\n \"\"\"\n if encoded_data is None:\n encoded_data = {}\n\n # insert graph and partially populate encoded_data with graph info\n insert_graph(cur, nodelist, edgelist, encoded_data=encoded_data)\n\n # need to encode the biases\n if 'linear_biases' not in encoded_data:\n encoded_data['linear_biases'] = _serialize_linear_biases(linear, nodelist)\n if 'quadratic_biases' not in encoded_data:\n encoded_data['quadratic_biases'] = _serialize_quadratic_biases(quadratic, edgelist)\n if 'offset' not in encoded_data:\n encoded_data['offset'] = offset\n if 'max_quadratic_bias' not in encoded_data:\n encoded_data['max_quadratic_bias'] = max(itervalues(quadratic))\n if 'min_quadratic_bias' not in encoded_data:\n encoded_data['min_quadratic_bias'] = min(itervalues(quadratic))\n if 'max_linear_bias' not in encoded_data:\n encoded_data['max_linear_bias'] = max(itervalues(linear))\n if 'min_linear_bias' not in encoded_data:\n encoded_data['min_linear_bias'] = min(itervalues(linear))\n\n insert = \\\n \"\"\"\n INSERT OR IGNORE INTO ising_model(\n linear_biases,\n quadratic_biases,\n offset,\n max_quadratic_bias,\n min_quadratic_bias,\n max_linear_bias,\n min_linear_bias,\n graph_id)\n SELECT\n :linear_biases,\n :quadratic_biases,\n :offset,\n :max_quadratic_bias,\n :min_quadratic_bias,\n :max_linear_bias,\n :min_linear_bias,\n graph.id\n FROM graph WHERE\n num_nodes = :num_nodes AND\n num_edges = :num_edges AND\n edges = :edges;\n \"\"\"\n\n cur.execute(insert, encoded_data)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nserializing the linear biases.", "response": "def _serialize_linear_biases(linear, nodelist):\n \"\"\"Serializes the linear biases.\n\n Args:\n linear: a interable object where linear[v] is the bias\n associated with v.\n nodelist (list): an ordered iterable containing the nodes.\n\n Returns:\n str: base 64 encoded string of little endian 8 byte floats,\n one for each of the biases in linear. Ordered according\n to nodelist.\n\n Examples:\n >>> _serialize_linear_biases({1: -1, 2: 1, 3: 0}, [1, 2, 3])\n 'AAAAAAAA8L8AAAAAAADwPwAAAAAAAAAA'\n >>> _serialize_linear_biases({1: -1, 2: 1, 3: 0}, [3, 2, 1])\n 'AAAAAAAAAAAAAAAAAADwPwAAAAAAAPC/'\n\n \"\"\"\n linear_bytes = struct.pack('<' + 'd' * len(linear), *[linear[i] for i in nodelist])\n return base64.b64encode(linear_bytes).decode('utf-8')"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _serialize_quadratic_biases(quadratic, edgelist):\n # assumes quadratic is upper-triangular or reflected in edgelist\n quadratic_list = [quadratic[(u, v)] if (u, v) in quadratic else quadratic[(v, u)]\n for u, v in edgelist]\n quadratic_bytes = struct.pack('<' + 'd' * len(quadratic), *quadratic_list)\n return base64.b64encode(quadratic_bytes).decode('utf-8')", "response": "Serializes the quadratic biases."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef iter_ising_model(cur):\n select = \\\n \"\"\"\n SELECT linear_biases, quadratic_biases, num_nodes, edges, offset\n FROM ising_model, graph\n WHERE graph.id = ising_model.graph_id;\n \"\"\"\n\n for linear_biases, quadratic_biases, num_nodes, edges, offset in cur.execute(select):\n nodelist = list(range(num_nodes))\n edgelist = json.loads(edges)\n yield (nodelist, edgelist,\n _decode_linear_biases(linear_biases, nodelist),\n _decode_quadratic_biases(quadratic_biases, edgelist),\n offset)", "response": "Iterate over all of the Ising models in the cache."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ninversing of _serialize_linear_biases. Args: linear_string (str): base 64 encoded string of little endian 8 byte floats, one for each of the nodes in nodelist. nodelist (list): list of the form [node1, node2, ...]. Returns: dict: linear biases in a dict. Examples: >>> _decode_linear_biases('AAAAAAAA8L8AAAAAAADwPwAAAAAAAAAA', [1, 2, 3]) {1: -1.0, 2: 1.0, 3: 0.0} >>> _decode_linear_biases('AAAAAAAA8L8AAAAAAADwPwAAAAAAAAAA', [3, 2, 1]) {1: 0.0, 2: 1.0, 3: -1.0}", "response": "def _decode_linear_biases(linear_string, nodelist):\n \"\"\"Inverse of _serialize_linear_biases.\n\n Args:\n linear_string (str): base 64 encoded string of little endian\n 8 byte floats, one for each of the nodes in nodelist.\n nodelist (list): list of the form [node1, node2, ...].\n\n Returns:\n dict: linear biases in a dict.\n\n Examples:\n >>> _decode_linear_biases('AAAAAAAA8L8AAAAAAADwPwAAAAAAAAAA', [1, 2, 3])\n {1: -1.0, 2: 1.0, 3: 0.0}\n >>> _decode_linear_biases('AAAAAAAA8L8AAAAAAADwPwAAAAAAAAAA', [3, 2, 1])\n {1: 0.0, 2: 1.0, 3: -1.0}\n\n \"\"\"\n linear_bytes = base64.b64decode(linear_string)\n return dict(zip(nodelist, struct.unpack('<' + 'd' * (len(linear_bytes) // 8), linear_bytes)))"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ninverses of _serialize_quadratic_biases Args: quadratic_string (str) : base 64 encoded string of little endian 8 byte floats, one for each of the edges. edgelist (list): a list of edges of the form [(node1, node2), ...]. Returns: dict: J. A dict of the form {edge1: bias1, ...} where each edge is of the form (node1, node2). Example: >>> _decode_quadratic_biases('AAAAAAAA8L8AAAAAAADwP5qZmZmZmdk/', ... [(0, 1), (1, 2), (0, 2)]) {(0, 1): -1.0, (0, 2): 0.4, (1, 2): 1.0}", "response": "def _decode_quadratic_biases(quadratic_string, edgelist):\n \"\"\"Inverse of _serialize_quadratic_biases\n\n Args:\n quadratic_string (str) : base 64 encoded string of little\n endian 8 byte floats, one for each of the edges.\n edgelist (list): a list of edges of the form [(node1, node2), ...].\n\n Returns:\n dict: J. A dict of the form {edge1: bias1, ...} where each\n edge is of the form (node1, node2).\n\n Example:\n >>> _decode_quadratic_biases('AAAAAAAA8L8AAAAAAADwP5qZmZmZmdk/',\n ... [(0, 1), (1, 2), (0, 2)])\n {(0, 1): -1.0, (0, 2): 0.4, (1, 2): 1.0}\n\n \"\"\"\n quadratic_bytes = base64.b64decode(quadratic_string)\n return {tuple(edge): bias for edge, bias in zip(edgelist,\n struct.unpack('<' + 'd' * (len(quadratic_bytes) // 8), quadratic_bytes))}"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef insert_penalty_model(cur, penalty_model):\n encoded_data = {}\n\n linear, quadratic, offset = penalty_model.model.to_ising()\n nodelist = sorted(linear)\n edgelist = sorted(sorted(edge) for edge in penalty_model.graph.edges)\n\n insert_graph(cur, nodelist, edgelist, encoded_data)\n insert_feasible_configurations(cur, penalty_model.feasible_configurations, encoded_data)\n insert_ising_model(cur, nodelist, edgelist, linear, quadratic, offset, encoded_data)\n\n encoded_data['decision_variables'] = json.dumps(penalty_model.decision_variables, separators=(',', ':'))\n encoded_data['classical_gap'] = penalty_model.classical_gap\n encoded_data['ground_energy'] = penalty_model.ground_energy\n\n insert = \\\n \"\"\"\n INSERT OR IGNORE INTO penalty_model(\n decision_variables,\n classical_gap,\n ground_energy,\n feasible_configurations_id,\n ising_model_id)\n SELECT\n :decision_variables,\n :classical_gap,\n :ground_energy,\n feasible_configurations.id,\n ising_model.id\n FROM feasible_configurations, ising_model, graph\n WHERE\n graph.edges = :edges AND\n graph.num_nodes = :num_nodes AND\n ising_model.graph_id = graph.id AND\n ising_model.linear_biases = :linear_biases AND\n ising_model.quadratic_biases = :quadratic_biases AND\n ising_model.offset = :offset AND\n feasible_configurations.num_variables = :num_variables AND\n feasible_configurations.num_feasible_configurations = :num_feasible_configurations AND\n feasible_configurations.feasible_configurations = :feasible_configurations AND\n feasible_configurations.energies = :energies;\n \"\"\"\n\n cur.execute(insert, encoded_data)", "response": "Insert a penalty model into the database."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef iter_penalty_model_from_specification(cur, specification):\n encoded_data = {}\n\n nodelist = sorted(specification.graph)\n edgelist = sorted(sorted(edge) for edge in specification.graph.edges)\n encoded_data['num_nodes'] = len(nodelist)\n encoded_data['num_edges'] = len(edgelist)\n encoded_data['edges'] = json.dumps(edgelist, separators=(',', ':'))\n encoded_data['num_variables'] = len(next(iter(specification.feasible_configurations)))\n encoded_data['num_feasible_configurations'] = len(specification.feasible_configurations)\n\n encoded = {_serialize_config(config): en for config, en in specification.feasible_configurations.items()}\n configs, energies = zip(*sorted(encoded.items()))\n encoded_data['feasible_configurations'] = json.dumps(configs, separators=(',', ':'))\n encoded_data['energies'] = json.dumps(energies, separators=(',', ':'))\n\n encoded_data['decision_variables'] = json.dumps(specification.decision_variables, separators=(',', ':'))\n encoded_data['classical_gap'] = json.dumps(specification.min_classical_gap, separators=(',', ':'))\n\n select = \\\n \"\"\"\n SELECT\n linear_biases,\n quadratic_biases,\n offset,\n decision_variables,\n classical_gap,\n ground_energy\n FROM penalty_model_view\n WHERE\n -- graph:\n num_nodes = :num_nodes AND\n num_edges = :num_edges AND\n edges = :edges AND\n -- feasible_configurations:\n num_variables = :num_variables AND\n num_feasible_configurations = :num_feasible_configurations AND\n feasible_configurations = :feasible_configurations AND\n energies = :energies AND\n -- decision variables:\n decision_variables = :decision_variables AND\n -- we could apply filters based on the energy ranges but in practice this seems slower\n classical_gap >= :classical_gap\n ORDER BY classical_gap DESC;\n \"\"\"\n\n for row in cur.execute(select, encoded_data):\n # we need to build the model\n linear = _decode_linear_biases(row['linear_biases'], nodelist)\n quadratic = _decode_quadratic_biases(row['quadratic_biases'], edgelist)\n\n model = dimod.BinaryQuadraticModel(linear, quadratic, row['offset'], dimod.SPIN) # always spin\n\n yield pm.PenaltyModel.from_specification(specification, model, row['classical_gap'], row['ground_energy'])", "response": "Iterate through all penalty models in the cache matching the given specification."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nconstructing a PenaltyModel from a Specification.", "response": "def from_specification(cls, specification, model, classical_gap, ground_energy):\n \"\"\"Construct a PenaltyModel from a Specification.\n\n Args:\n specification (:class:`.Specification`): A specification that was used\n to generate the model.\n model (:class:`dimod.BinaryQuadraticModel`): A binary quadratic model\n that has ground states that match the feasible_configurations.\n classical_gap (numeric): The difference in classical energy between the ground\n state and the first excited state. Must be positive.\n ground_energy (numeric): The minimum energy of all possible configurations.\n\n Returns:\n :class:`.PenaltyModel`\n\n \"\"\"\n\n # Author note: there might be a way that avoids rechecking all of the values without\n # side-effects or lots of repeated code, but this seems simpler and more explicit\n return cls(specification.graph,\n specification.decision_variables,\n specification.feasible_configurations,\n specification.vartype,\n model,\n classical_gap,\n ground_energy,\n ising_linear_ranges=specification.ising_linear_ranges,\n ising_quadratic_ranges=specification.ising_quadratic_ranges)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef relabel_variables(self, mapping, inplace=True):\n # just use the relabeling of each component\n if inplace:\n Specification.relabel_variables(self, mapping, inplace=True)\n self.model.relabel_variables(mapping, inplace=True)\n return self\n else:\n spec = Specification.relabel_variables(self, mapping, inplace=False)\n model = self.model.relabel_variables(mapping, inplace=False)\n return PenaltyModel.from_specification(spec, model, self.classical_gap, self.ground_energy)", "response": "Relabel the variables and nodes according to the given mapping."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn a penalty model with the given specification.", "response": "def get_penalty_model(specification, database=None):\n \"\"\"Factory function for penaltymodel_cache.\n\n Args:\n specification (penaltymodel.Specification): The specification\n for the desired penalty model.\n database (str, optional): The path to the desired sqlite database\n file. If None, will use the default.\n\n Returns:\n :class:`penaltymodel.PenaltyModel`: Penalty model with the given specification.\n\n Raises:\n :class:`penaltymodel.MissingPenaltyModel`: If the penalty model is not in the\n cache.\n\n Parameters:\n priority (int): 100\n\n \"\"\"\n # only handles index-labelled nodes\n if not _is_index_labelled(specification.graph):\n relabel_applied = True\n mapping, inverse_mapping = _graph_canonicalization(specification.graph)\n specification = specification.relabel_variables(mapping, inplace=False)\n else:\n relabel_applied = False\n\n # connect to the database. Note that once the connection is made it cannot be\n # broken up between several processes.\n if database is None:\n conn = cache_connect()\n else:\n conn = cache_connect(database)\n\n # get the penalty_model\n with conn as cur:\n try:\n widget = next(iter_penalty_model_from_specification(cur, specification))\n except StopIteration:\n widget = None\n\n # close the connection\n conn.close()\n\n if widget is None:\n raise pm.MissingPenaltyModel(\"no penalty model with the given specification found in cache\")\n\n if relabel_applied:\n # relabel the widget in-place\n widget.relabel_variables(inverse_mapping, inplace=True)\n\n return widget"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ncaching function for penaltymodel_cache.", "response": "def cache_penalty_model(penalty_model, database=None):\n \"\"\"Caching function for penaltymodel_cache.\n\n Args:\n penalty_model (:class:`penaltymodel.PenaltyModel`): Penalty model to\n be cached.\n database (str, optional): The path to the desired sqlite database\n file. If None, will use the default.\n\n \"\"\"\n\n # only handles index-labelled nodes\n if not _is_index_labelled(penalty_model.graph):\n mapping, __ = _graph_canonicalization(penalty_model.graph)\n penalty_model = penalty_model.relabel_variables(mapping, inplace=False)\n\n # connect to the database. Note that once the connection is made it cannot be\n # broken up between several processes.\n if database is None:\n conn = cache_connect()\n else:\n conn = cache_connect(database)\n\n # load into the database\n with conn as cur:\n insert_penalty_model(cur, penalty_model)\n\n # close the connection\n conn.close()"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nretrieving a PenaltyModel from one of the available PenaltyModel factories.", "response": "def get_penalty_model(specification):\n \"\"\"Retrieve a PenaltyModel from one of the available factories.\n\n Args:\n specification (:class:`.Specification`): The specification\n for the desired PenaltyModel.\n\n Returns:\n :class:`.PenaltyModel`/None: A PenaltyModel as returned by\n the highest priority factory, or None if no factory could\n produce it.\n\n Raises:\n :exc:`ImpossiblePenaltyModel`: If the specification\n describes a penalty model that cannot be built by any\n factory.\n\n \"\"\"\n\n # Iterate through the available factories until one gives a penalty model\n for factory in iter_factories():\n try:\n pm = factory(specification)\n except ImpossiblePenaltyModel as e:\n # information about impossible models should be propagated\n raise e\n except FactoryException:\n # any other type of factory exception, continue through the list\n continue\n\n # if penalty model was found, broadcast to all of the caches. This could be done\n # asynchronously\n for cache in iter_caches():\n cache(pm)\n\n return pm\n\n return None"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef iter_factories():\n # retrieve all of the factories with\n factories = (entry.load() for entry in iter_entry_points(FACTORY_ENTRYPOINT))\n\n # sort the factories from highest priority to lowest. Any factory with unknown priority\n # gets assigned priority -1000.\n for factory in sorted(factories, key=lambda f: getattr(f, 'priority', -1000), reverse=True):\n yield factory", "response": "Iterate through all of the factories in the entry point."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef generate_bqm(graph, table, decision,\n linear_energy_ranges=None, quadratic_energy_ranges=None, min_classical_gap=2,\n precision=7, max_decision=8, max_variables=10,\n return_auxiliary=False):\n \"\"\"Get a binary quadratic model with specific ground states.\n\n Args:\n graph (:obj:`~networkx.Graph`):\n Defines the structure of the generated binary quadratic model.\n\n table (iterable):\n Iterable of valid configurations (of spin-values). Each configuration is a tuple of\n variable assignments ordered by `decision`.\n\n decision (list/tuple):\n The variables in the binary quadratic model which have specified configurations.\n\n linear_energy_ranges (dict, optional):\n Dict of the form {v: (min, max, ...} where min and max are the range of values allowed\n to v. The default range is [-2, 2].\n\n quadratic_energy_ranges (dict, optional):\n Dict of the form {(u, v): (min, max), ...} where min and max are the range of values\n allowed to (u, v). The default range is [-1, 1].\n\n min_classical_gap (float):\n The minimum energy gap between the highest feasible state and the lowest infeasible\n state.\n\n precision (int, optional, default=7):\n Values returned by the optimization solver are rounded to `precision` digits of\n precision.\n\n max_decision (int, optional, default=4):\n Maximum number of decision variables allowed. The algorithm is valid for arbitrary\n sizes of problem but can be extremely slow.\n\n max_variables (int, optional, default=4):\n Maximum number of variables allowed. The algorithm is valid for arbitrary\n sizes of problem but can be extremely slow.\n\n return_auxiliary (bool, optional, False):\n If True, the auxiliary configurations are returned for each configuration in table.\n\n Returns:\n If return_auxiliary is False:\n\n :obj:`dimod.BinaryQuadraticModel`: The binary quadratic model.\n\n float: The classical gap.\n\n If return_auxiliary is True:\n\n :obj:`dimod.BinaryQuadraticModel`: The binary quadratic model.\n\n float: The classical gap.\n\n dict: The auxiliary configurations, keyed on the configurations in table.\n\n Raises:\n ImpossiblePenaltyModel: If the penalty model cannot be built. Normally due\n to a non-zero infeasible gap.\n\n \"\"\"\n\n # Developer note: This function is input checking and output formatting. The logic is\n # in _generate_ising\n\n if not isinstance(graph, nx.Graph):\n raise TypeError(\"expected input graph to be a NetworkX Graph.\")\n\n if not set().union(*table).issubset({-1, 1}):\n raise ValueError(\"expected table to be spin-valued\")\n\n if not isinstance(decision, list):\n decision = list(decision) # handle iterables\n if not all(v in graph for v in decision):\n raise ValueError(\"given graph does not match the variable labels in decision variables\")\n\n num_var = len(decision)\n if any(len(config) != num_var for config in table):\n raise ValueError(\"number of decision variables does not match all of the entires in the table\")\n\n if len(decision) > max_decision:\n raise ValueError((\"The table is too large. Note that larger models can be built by setting \"\n \"max_decision to a higher number, but generation could be extremely slow.\"))\n\n if len(graph) > max_variables:\n raise ValueError((\"The graph is too large. Note that larger models can be built by setting \"\n \"max_variables to a higher number, but generation could be extremely slow.\"))\n\n if linear_energy_ranges is None:\n linear_energy_ranges = defaultdict(lambda: (-2, 2))\n if quadratic_energy_ranges is None:\n quadratic_energy_ranges = defaultdict(lambda: (-1, 1))\n\n if not isinstance(table, Mapping):\n table = {config: 0. for config in table}\n\n h, J, offset, gap, aux = _generate_ising(graph, table, decision, min_classical_gap,\n linear_energy_ranges, quadratic_energy_ranges)\n\n bqm = dimod.BinaryQuadraticModel.empty(dimod.SPIN)\n bqm.add_variables_from((v, round(bias, precision)) for v, bias in h.items())\n bqm.add_interactions_from((u, v, round(bias, precision)) for (u, v), bias in J.items())\n bqm.add_offset(round(offset, precision))\n\n if return_auxiliary:\n return bqm, round(gap, precision), aux\n else:\n return bqm, round(gap, precision)", "response": "Generates a binary quadratic model with specific ground states."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_penalty_model(specification):\n # check that the feasible_configurations are spin\n feasible_configurations = specification.feasible_configurations\n if specification.vartype is dimod.BINARY:\n feasible_configurations = {tuple(2 * v - 1 for v in config): en\n for config, en in iteritems(feasible_configurations)}\n\n # convert ising_quadratic_ranges to the form we expect\n ising_quadratic_ranges = specification.ising_quadratic_ranges\n quadratic_ranges = {(u, v): ising_quadratic_ranges[u][v] for u, v in specification.graph.edges}\n\n try:\n bqm, gap = generate_bqm(specification.graph, feasible_configurations,\n specification.decision_variables,\n linear_energy_ranges=specification.ising_linear_ranges,\n quadratic_energy_ranges=quadratic_ranges,\n min_classical_gap=specification.min_classical_gap)\n except ValueError:\n raise pm.exceptions.FactoryException(\"Specification is for too large of a model\")\n\n return pm.PenaltyModel.from_specification(specification, bqm, gap, 0.0)", "response": "Returns a penalty model with the given specification."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nchecking correctness of ising_linear_ranges.", "response": "def _check_ising_linear_ranges(linear_ranges, graph):\n \"\"\"check correctness/populate defaults for ising_linear_ranges.\"\"\"\n if linear_ranges is None:\n linear_ranges = {}\n\n for v in graph:\n if v in linear_ranges:\n # check\n linear_ranges[v] = Specification._check_range(linear_ranges[v])\n else:\n # set default\n linear_ranges[v] = [-2, 2]\n\n return linear_ranges"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncheck correctness of ising_quadratic_ranges.", "response": "def _check_ising_quadratic_ranges(quad_ranges, graph):\n \"\"\"check correctness/populate defaults for ising_quadratic_ranges.\"\"\"\n if quad_ranges is None:\n quad_ranges = {}\n\n # first just populate the top level so we can rely on the structure\n for u in graph:\n if u not in quad_ranges:\n quad_ranges[u] = {}\n\n # next let's propgate and check what is already present\n for u, neighbors in iteritems(quad_ranges):\n for v, rang in iteritems(neighbors):\n # check the range\n rang = Specification._check_range(rang)\n\n if u in quad_ranges[v]:\n # it's symmetric\n if quad_ranges[u][v] != quad_ranges[v][u]:\n raise ValueError(\"mismatched ranges for ising_quadratic_ranges\")\n quad_ranges[v][u] = quad_ranges[u][v] = rang\n\n # finally fill in the missing stuff\n for u, v in graph.edges:\n if u not in quad_ranges[v]:\n quad_ranges[u][v] = quad_ranges[v][u] = [-1, 1]\n\n return quad_ranges"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nchecks that a range is in the format we expect min max and return", "response": "def _check_range(range_):\n \"\"\"Check that a range is in the format we expect [min, max] and return\"\"\"\n try:\n if not isinstance(range_, list):\n range_ = list(range_)\n min_, max_ = range_\n except (ValueError, TypeError):\n raise TypeError(\"each range in ising_linear_ranges should be a list of length 2.\")\n if not isinstance(min_, Number) or not isinstance(max_, Number) or min_ > max_:\n raise ValueError((\"each range in ising_linear_ranges should be a 2-tuple \"\n \"(min, max) where min <= max\"))\n return range_"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nrelabel the variables and nodes according to the given mapping.", "response": "def relabel_variables(self, mapping, inplace=True):\n \"\"\"Relabel the variables and nodes according to the given mapping.\n\n Args:\n mapping (dict): a dict mapping the current variable/node labels\n to new ones.\n inplace (bool, optional, default=True):\n If True, the specification is updated in-place; otherwise, a new specification\n is returned.\n\n Returns:\n :class:`.Specification`: A Specification with the variables\n relabeled according to mapping. If copy=False returns itself,\n if copy=True returns a new Specification.\n\n \"\"\"\n graph = self.graph\n ising_linear_ranges = self.ising_linear_ranges\n ising_quadratic_ranges = self.ising_quadratic_ranges\n\n try:\n old_labels = set(iterkeys(mapping))\n new_labels = set(itervalues(mapping))\n except TypeError:\n raise ValueError(\"mapping targets must be hashable objects\")\n\n for v in new_labels:\n if v in graph and v not in old_labels:\n raise ValueError(('A variable cannot be relabeled \"{}\" without also relabeling '\n \"the existing variable of the same name\").format(v))\n\n if not inplace:\n return Specification(nx.relabel_nodes(graph, mapping, copy=True), # also checks the mapping\n tuple(mapping.get(v, v) for v in self.decision_variables),\n self.feasible_configurations, # does not change\n vartype=self.vartype, # does not change\n ising_linear_ranges={mapping.get(v, v): ising_linear_ranges[v] for v in graph},\n ising_quadratic_ranges={mapping.get(v, v): {mapping.get(u, u): r\n for u, r in iteritems(neighbors)}\n for v, neighbors in iteritems(ising_quadratic_ranges)})\n else:\n # now we need the ising_linear_ranges and ising_quadratic_ranges\n shared = old_labels & new_labels\n\n if shared:\n # in this case we need to transform to an intermediate state\n # counter will be used to generate the intermediate labels, as an easy optimization\n # we start the counter with a high number because often variables are labeled by\n # integers starting from 0\n counter = itertools.count(2 * len(self))\n\n old_to_intermediate = {}\n intermediate_to_new = {}\n\n for old, new in iteritems(mapping):\n if old == new:\n # we can remove self-labels\n continue\n\n if old in new_labels or new in old_labels:\n\n # try to get a new unique label\n lbl = next(counter)\n while lbl in new_labels or lbl in old_labels:\n lbl = next(counter)\n\n # add it to the mapping\n old_to_intermediate[old] = lbl\n intermediate_to_new[lbl] = new\n\n else:\n old_to_intermediate[old] = new\n # don't need to add it to intermediate_to_new because it is a self-label\n\n Specification.relabel_variables(self, old_to_intermediate, inplace=True)\n Specification.relabel_variables(self, intermediate_to_new, inplace=True)\n return self\n\n # modifies graph in place\n nx.relabel_nodes(self.graph, mapping, copy=False)\n\n # this is always a new object\n self.decision_variables = tuple(mapping.get(v, v) for v in self.decision_variables)\n\n # we can just relabel in-place without worrying about conflict\n for v in old_labels:\n if v in mapping:\n ising_linear_ranges[mapping[v]] = ising_linear_ranges[v]\n del ising_linear_ranges[v]\n\n # need to do the deeper level first\n for neighbors in itervalues(ising_quadratic_ranges):\n for v in list(neighbors):\n if v in mapping:\n neighbors[mapping[v]] = neighbors[v]\n del neighbors[v]\n\n # now the top level\n for v in old_labels:\n if v in mapping:\n ising_quadratic_ranges[mapping[v]] = ising_quadratic_ranges[v]\n del ising_quadratic_ranges[v]\n\n return self"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngrabs values from a dictionary using an unordered tuple as a key.", "response": "def get_item(dictionary, tuple_key, default_value):\n \"\"\"Grab values from a dictionary using an unordered tuple as a key.\n\n Dictionary should not contain None, 0, or False as dictionary values.\n\n Args:\n dictionary: Dictionary that uses two-element tuple as keys\n tuple_key: Unordered tuple of two elements\n default_value: Value that is returned when the tuple_key is not found in the dictionary\n \"\"\"\n u, v = tuple_key\n\n # Grab tuple-values from dictionary\n tuple1 = dictionary.get((u, v), None)\n tuple2 = dictionary.get((v, u), None)\n\n # Return the first value that is not {None, 0, False}\n return tuple1 or tuple2 or default_value"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates a linear programming matrix based on the spin states graph and scalars provided.", "response": "def _get_lp_matrix(spin_states, nodes, edges, offset_weight, gap_weight):\n \"\"\"Creates an linear programming matrix based on the spin states, graph, and scalars provided.\n LP matrix:\n [spin_states, corresponding states of edges, offset_weight, gap_weight]\n\n Args:\n spin_states: Numpy array of spin states\n nodes: Iterable\n edges: Iterable of tuples\n offset_weight: Numpy 1-D array or number\n gap_weight: Numpy 1-D array or a number\n \"\"\"\n if len(spin_states) == 0:\n return None\n\n # Set up an empty matrix\n n_states = len(spin_states)\n m_linear = len(nodes)\n m_quadratic = len(edges)\n matrix = np.empty((n_states, m_linear + m_quadratic + 2)) # +2 columns for offset and gap\n\n # Populate linear terms (i.e. spin states)\n if spin_states.ndim == 1:\n spin_states = np.expand_dims(spin_states, 1)\n matrix[:, :m_linear] = spin_states\n\n # Populate quadratic terms\n node_indices = dict(zip(nodes, range(m_linear)))\n for j, (u, v) in enumerate(edges):\n u_ind = node_indices[u]\n v_ind = node_indices[v]\n matrix[:, j + m_linear] = np.multiply(matrix[:, u_ind], matrix[:, v_ind])\n\n # Populate offset and gap columns, respectively\n matrix[:, -2] = offset_weight\n matrix[:, -1] = gap_weight\n return matrix"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngenerates a binary quadratic model for the given set of variables in the given graph.", "response": "def generate_bqm(graph, table, decision_variables,\n linear_energy_ranges=None, quadratic_energy_ranges=None, min_classical_gap=2):\n \"\"\"\n Args:\n graph: A networkx.Graph\n table: An iterable of valid spin configurations. Each configuration is a tuple of\n variable assignments ordered by `decision`.\n decision_variables: An ordered iterable of the variables in the binary quadratic model.\n linear_energy_ranges: Dictionary of the form {v: (min, max), ...} where min and\n max are the range of values allowed to v. The default range is [-2, 2].\n quadratic_energy_ranges: Dict of the form {(u, v): (min, max), ...} where min and max are\n the range of values allowed to (u, v). The default range is [-1, 1].\n min_classical_gap: A float. The minimum energy gap between the highest feasible state and\n the lowest infeasible state.\n \"\"\"\n # Check for auxiliary variables in the graph\n if len(graph) != len(decision_variables):\n raise ValueError('Penaltymodel-lp does not handle problems with auxiliary variables')\n\n if not linear_energy_ranges:\n linear_energy_ranges = {}\n\n if not quadratic_energy_ranges:\n quadratic_energy_ranges = {}\n\n # Simplify graph naming\n # Note: nodes' and edges' order determine the column order of the LP\n nodes = decision_variables\n edges = graph.edges\n\n # Set variable names for lengths\n m_linear = len(nodes) # Number of linear biases\n m_quadratic = len(edges) # Number of quadratic biases\n n_noted = len(table) # Number of spin combinations specified in the table\n n_unnoted = 2**m_linear - n_noted # Number of spin combinations that were not specified\n\n # Linear programming matrix for spin states specified by 'table'\n noted_states = table.keys() if isinstance(table, dict) else table\n noted_states = list(noted_states)\n noted_matrix = _get_lp_matrix(np.asarray(noted_states), nodes, edges, 1, 0)\n\n # Linear programming matrix for spins states that were not specified by 'table'\n spin_states = product([-1, 1], repeat=m_linear) if m_linear > 1 else [-1, 1]\n unnoted_states = [state for state in spin_states if state not in noted_states]\n unnoted_matrix = _get_lp_matrix(np.asarray(unnoted_states), nodes, edges, 1, -1)\n if unnoted_matrix is not None:\n unnoted_matrix *= -1 # Taking negative in order to flip the inequality\n\n # Constraints\n if isinstance(table, dict):\n noted_bound = np.asarray([table[state] for state in noted_states])\n unnoted_bound = np.full((n_unnoted, 1), -1 * max(table.values())) # -1 for flipped inequality\n else:\n noted_bound = np.zeros((n_noted, 1))\n unnoted_bound = np.zeros((n_unnoted, 1))\n\n # Bounds\n linear_range = (MIN_LINEAR_BIAS, MAX_LINEAR_BIAS)\n quadratic_range = (MIN_QUADRATIC_BIAS, MAX_QUADRATIC_BIAS)\n\n bounds = [linear_energy_ranges.get(node, linear_range) for node in nodes]\n bounds += [get_item(quadratic_energy_ranges, edge, quadratic_range) for edge in edges]\n\n # Note: Since ising has {-1, 1}, the largest possible gap is [-largest_bias, largest_bias],\n # hence that 2 * sum(largest_biases)\n max_gap = 2 * sum(max(abs(lbound), abs(ubound)) for lbound, ubound in bounds)\n bounds.append((None, None)) # Bound for offset\n bounds.append((min_classical_gap, max_gap)) # Bound for gap.\n\n # Cost function\n cost_weights = np.zeros((1, m_linear + m_quadratic + 2))\n cost_weights[0, -1] = -1 # Only interested in maximizing the gap\n\n # Returns a Scipy OptimizeResult\n result = linprog(cost_weights.flatten(), A_eq=noted_matrix, b_eq=noted_bound,\n A_ub=unnoted_matrix, b_ub=unnoted_bound, bounds=bounds)\n\n #TODO: propagate scipy.optimize.linprog's error message?\n if not result.success:\n raise ValueError('Penaltymodel-lp is unable to find a solution.')\n\n # Split result\n x = result.x\n h = x[:m_linear]\n j = x[m_linear:-2]\n offset = x[-2]\n gap = x[-1]\n\n if gap <= 0:\n raise ValueError('Penaltymodel-lp is unable to find a solution.')\n\n # Create BQM\n bqm = dimod.BinaryQuadraticModel.empty(dimod.SPIN)\n bqm.add_variables_from((v, bias) for v, bias in zip(nodes, h))\n bqm.add_interactions_from((u, v, bias) for (u, v), bias in zip(edges, j))\n bqm.add_offset(offset)\n\n return bqm, gap"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef generate(graph, feasible_configurations, decision_variables,\n linear_energy_ranges, quadratic_energy_ranges, min_classical_gap,\n smt_solver_name=None):\n \"\"\"Generates the Ising model that induces the given feasible configurations. The code is based\n on the papers [#do]_ and [#mc]_.\n\n Args:\n graph (nx.Graph): The target graph on which the Ising model is to be built.\n feasible_configurations (dict): The set of feasible configurations\n of the decision variables. The key is a feasible configuration\n as a tuple of spins, the values are the associated energy.\n decision_variables (list/tuple): Which variables in the graph are\n assigned as decision variables.\n linear_energy_ranges (dict, optional): A dict of the form\n {v: (min, max), ...} where min and max are the range\n of values allowed to v.\n quadratic_energy_ranges (dict): A dict of the form\n {(u, v): (min, max), ...} where min and max are the range\n of values allowed to (u, v).\n min_classical_gap (float): The minimum energy gap between the highest feasible state and the\n lowest infeasible state.\n smt_solver_name (str/None): The name of the smt solver. Must\n be a solver available to pysmt. If None, uses the pysmt default.\n\n Returns:\n tuple: A 4-tuple containing:\n\n dict: The linear biases of the Ising problem.\n\n dict: The quadratic biases of the Ising problem.\n\n :obj:`dimod.BinaryQuadraticModel`\n\n float: The classical energy gap between ground and the first\n excited state.\n\n Raises:\n ImpossiblePenaltyModel: If the penalty model cannot be built. Normally due\n to a non-zero infeasible gap.\n\n .. [#do] Bian et al., \"Discrete optimization using quantum annealing on sparse Ising models\",\n https://www.frontiersin.org/articles/10.3389/fphy.2014.00056/full\n\n .. [#mc] Z. Bian, F. Chudak, R. Israel, B. Lackey, W. G. Macready, and A. Roy\n \"Mapping constrained optimization problems to quantum annealing with application to fault diagnosis\"\n https://arxiv.org/pdf/1603.03111.pdf\n\n \"\"\"\n if len(graph) == 0:\n return dimod.BinaryQuadraticModel.empty(dimod.SPIN), float('inf')\n\n # we need to build a Table. The table encodes all of the information used by the smt solver\n table = Table(graph, decision_variables, linear_energy_ranges, quadratic_energy_ranges)\n\n # iterate over every possible configuration of the decision variables.\n for config in itertools.product((-1, 1), repeat=len(decision_variables)):\n\n # determine the spin associated with each variable in decision variables.\n spins = dict(zip(decision_variables, config))\n\n if config in feasible_configurations:\n # if the configuration is feasible, we require that the minimum energy over all\n # possible aux variable settings be exactly its target energy (given by the value)\n table.set_energy(spins, feasible_configurations[config])\n else:\n # if the configuration is infeasible, we simply want its minimum energy over all\n # possible aux variable settings to be an upper bound on the classical gap.\n if isinstance(feasible_configurations, dict) and feasible_configurations:\n highest_feasible_energy = max(feasible_configurations.values())\n else:\n highest_feasible_energy = 0\n\n table.set_energy_upperbound(spins, highest_feasible_energy)\n\n # now we just need to get a solver\n with Solver(smt_solver_name) as solver:\n\n # add all of the assertions from the table to the solver\n for assertion in table.assertions:\n solver.add_assertion(assertion)\n\n # add min classical gap assertion\n gap_assertion = table.gap_bound_assertion(min_classical_gap)\n solver.add_assertion(gap_assertion)\n\n # check if the model is feasible at all.\n if solver.solve():\n # since we know the current model is feasible, grab the initial model.\n model = solver.get_model()\n\n # we want to increase the gap until we have found the max classical gap\n # note: gmax is the maximum possible gap for a particular set of variables. To find it,\n # we take the sum of the largest coefficients possible and double it. We double it\n # because in Ising, the largest gap possible from the largest coefficient is the\n # negative of said coefficient. Example: consider a graph with one node A, with a\n # energy range of [-2, 1]. The largest energy gap between spins +1 and -1 is 4;\n # namely, the largest absolute coefficient -2 with the ising spins results to\n # gap = (-2)(-1) - (-2)(1) = 4.\n gmin = min_classical_gap\n gmax = sum(max(abs(r) for r in linear_energy_ranges[v]) for v in graph)\n gmax += sum(max(abs(r) for r in quadratic_energy_ranges[(u, v)])\n for (u, v) in graph.edges)\n gmax *= 2\n\n # 2 is a good target gap\n g = max(2., gmin)\n\n while abs(gmax - gmin) >= MAX_GAP_DELTA:\n solver.push()\n\n gap_assertion = table.gap_bound_assertion(g)\n solver.add_assertion(gap_assertion)\n\n if solver.solve():\n model = solver.get_model()\n gmin = float(model.get_py_value(table.gap))\n\n else:\n solver.pop()\n gmax = g\n\n g = min(gmin + .1, (gmax + gmin) / 2)\n\n else:\n raise ImpossiblePenaltyModel(\"Model cannot be built\")\n\n # finally we need to convert our values back into python floats.\n\n classical_gap = float(model.get_py_value(table.gap))\n\n # if the problem is fully specified (or empty) it has infinite gap\n if (len(decision_variables) == len(graph) and\n decision_variables and # at least one variable\n len(feasible_configurations) == 2**len(decision_variables)):\n classical_gap = float('inf')\n\n return table.theta.to_bqm(model), classical_gap", "response": "Generates the Ising model that induces the given feasible configurations."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncreates an pysmt Real constant from a number x and the denominator limited.", "response": "def limitReal(x, max_denominator=1000000):\n \"\"\"Creates an pysmt Real constant from x.\n\n Args:\n x (number): A number to be cast to a pysmt constant.\n max_denominator (int, optional): The maximum size of the denominator.\n Default 1000000.\n\n Returns:\n A Real constant with the given value and the denominator limited.\n\n \"\"\"\n f = Fraction(x).limit_denominator(max_denominator)\n return Real((f.numerator, f.denominator))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncreating Theta from a networkx. Graph object and a set of energy ranges.", "response": "def from_graph(cls, graph, linear_energy_ranges, quadratic_energy_ranges):\n \"\"\"Create Theta from a graph and energy ranges.\n\n Args:\n graph (:obj:`networkx.Graph`):\n Provides the structure for Theta.\n\n linear_energy_ranges (dict):\n A dict of the form {v: (min, max), ...} where min and max are the\n range of values allowed to v.\n quadratic_energy_ranges (dict):\n A dict of the form {(u, v): (min, max), ...} where min and max\n are the range of values allowed to (u, v).\n\n Returns:\n :obj:`.Theta`\n\n \"\"\"\n get_env().enable_infix_notation = True # not sure why we need this here\n\n theta = cls.empty(dimod.SPIN)\n\n theta.add_offset(Symbol('offset', REAL))\n\n def Linear(v):\n \"\"\"Create a Symbol for the linear bias including the energy range\n constraints.\"\"\"\n bias = Symbol('h_{}'.format(v), REAL)\n\n min_, max_ = linear_energy_ranges[v]\n\n theta.assertions.add(LE(bias, limitReal(max_)))\n theta.assertions.add(GE(bias, limitReal(min_)))\n\n return bias\n\n def Quadratic(u, v):\n \"\"\"Create a Symbol for the quadratic bias including the energy range\n constraints.\"\"\"\n bias = Symbol('J_{},{}'.format(u, v), REAL)\n\n if (v, u) in quadratic_energy_ranges:\n min_, max_ = quadratic_energy_ranges[(v, u)]\n else:\n min_, max_ = quadratic_energy_ranges[(u, v)]\n\n theta.assertions.add(LE(bias, limitReal(max_)))\n theta.assertions.add(GE(bias, limitReal(min_)))\n\n return bias\n\n for v in graph.nodes:\n theta.add_variable(v, Linear(v))\n\n for u, v in graph.edges:\n theta.add_interaction(u, v, Quadratic(u, v))\n\n return theta"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef to_bqm(self, model):\n linear = ((v, float(model.get_py_value(bias)))\n for v, bias in self.linear.items())\n quadratic = ((u, v, float(model.get_py_value(bias)))\n for (u, v), bias in self.quadratic.items())\n offset = float(model.get_py_value(self.offset))\n\n return dimod.BinaryQuadraticModel(linear, quadratic, offset, dimod.SPIN)", "response": "Given a pysmt model return a bqm."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef SpinTimes(spin, bias):\n if not isinstance(spin, int):\n raise TypeError('spin must be an int')\n if spin == -1:\n return Times(Real((-1, 1)), bias) # -1 / 1\n elif spin == 1:\n # identity\n return bias\n else:\n raise ValueError('expected spins to be -1., or 1.')", "response": "Define our own multiplication for bias times spins."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef energy(self, spins, break_aux_symmetry=True):\n subtheta = self.theta.copy()\n subtheta.fix_variables(spins)\n\n # we need aux variables\n av = next(self._auxvar_counter)\n auxvars = {v: Symbol('aux{}_{}'.format(av, v), BOOL) for v in subtheta.linear}\n if break_aux_symmetry and av == 0:\n # without loss of generality, we can assume that the aux variables are all\n # spin-up for one configuration\n self.assertions.update(set(auxvars.values()))\n\n trees = self._trees\n\n if not trees:\n # if there are no variables to eliminate, then the offset of\n # subtheta is the exact value and we can just return it\n assert not subtheta.linear and not subtheta.quadratic\n return subtheta.offset\n\n energy = Plus(self.message(trees, {}, subtheta, auxvars), subtheta.offset)\n\n return energy", "response": "A formula for the exact energy of Theta with spins fixed."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ndetermining the energy of the elimination tree. Args: tree (dict): The current elimination tree spins (dict): The current fixed spins subtheta (dict): Theta with spins fixed. auxvars (dict): The auxiliary variables for the given spins. Returns: The formula for the energy of the tree.", "response": "def message(self, tree, spins, subtheta, auxvars):\n \"\"\"Determine the energy of the elimination tree.\n\n Args:\n tree (dict): The current elimination tree\n spins (dict): The current fixed spins\n subtheta (dict): Theta with spins fixed.\n auxvars (dict): The auxiliary variables for the given spins.\n\n Returns:\n The formula for the energy of the tree.\n\n \"\"\"\n energy_sources = set()\n for v, children in tree.items():\n aux = auxvars[v]\n\n assert all(u in spins for u in self._ancestors[v])\n\n # build an iterable over all of the energies contributions\n # that we can exactly determine given v and our known spins\n # in these contributions we assume that v is positive\n def energy_contributions():\n yield subtheta.linear[v]\n\n for u, bias in subtheta.adj[v].items():\n if u in spins:\n yield SpinTimes(spins[u], bias)\n\n plus_energy = Plus(energy_contributions())\n minus_energy = SpinTimes(-1, plus_energy)\n\n # if the variable has children, we need to recursively determine their energies\n if children:\n # set v to be positive\n spins[v] = 1\n plus_energy = Plus(plus_energy, self.message(children, spins, subtheta, auxvars))\n spins[v] = -1\n minus_energy = Plus(minus_energy, self.message(children, spins, subtheta, auxvars))\n del spins[v]\n\n # we now need a real-valued smt variable to be our message\n m = FreshSymbol(REAL)\n\n ancestor_aux = {auxvars[u] if spins[u] > 0 else Not(auxvars[u])\n for u in self._ancestors[v]}\n plus_aux = And({aux}.union(ancestor_aux))\n minus_aux = And({Not(aux)}.union(ancestor_aux))\n\n self.assertions.update({LE(m, plus_energy),\n LE(m, minus_energy),\n Implies(plus_aux, GE(m, plus_energy)),\n Implies(minus_aux, GE(m, minus_energy))\n })\n\n energy_sources.add(m)\n\n return Plus(energy_sources)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef message_upperbound(self, tree, spins, subtheta):\n energy_sources = set()\n for v, subtree in tree.items():\n\n assert all(u in spins for u in self._ancestors[v])\n\n # build an iterable over all of the energies contributions\n # that we can exactly determine given v and our known spins\n # in these contributions we assume that v is positive\n def energy_contributions():\n yield subtheta.linear[v]\n\n for u, bias in subtheta.adj[v].items():\n if u in spins:\n yield Times(limitReal(spins[u]), bias)\n\n energy = Plus(energy_contributions())\n\n # if there are no more variables in the order, we can stop\n # otherwise we need the next message variable\n if subtree:\n spins[v] = 1.\n plus = self.message_upperbound(subtree, spins, subtheta)\n spins[v] = -1.\n minus = self.message_upperbound(subtree, spins, subtheta)\n del spins[v]\n else:\n plus = minus = limitReal(0.0)\n\n # we now need a real-valued smt variable to be our message\n m = FreshSymbol(REAL)\n\n self.assertions.update({LE(m, Plus(energy, plus)),\n LE(m, Plus(Times(energy, limitReal(-1.)), minus))})\n\n energy_sources.add(m)\n\n return Plus(energy_sources)", "response": "Determine an upper bound on the energy of the current elimination tree and the current fixed spins and subtheta."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nset the energy of Theta with spins fixed to target_energy.", "response": "def set_energy(self, spins, target_energy):\n \"\"\"Set the energy of Theta with spins fixed to target_energy.\n\n Args:\n spins (dict): Spin values for a subset of the variables in Theta.\n target_energy (float): The desired energy for Theta with spins fixed.\n\n Notes:\n Add equality constraint to assertions.\n\n \"\"\"\n spin_energy = self.energy(spins)\n self.assertions.add(Equals(spin_energy, limitReal(target_energy)))"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nsets the energy of the Theta with spins fixed to be greater than gap + offset.", "response": "def set_energy_upperbound(self, spins, offset=0):\n \"\"\"Upper bound the energy of Theta with spins fixed to be greater than (gap + offset).\n\n Args:\n spins (dict): Spin values for a subset of the variables in Theta.\n offset (float): A value that is added to the upper bound. Default value is 0.\n\n Notes:\n Add equality constraint to assertions.\n\n \"\"\"\n spin_energy = self.energy_upperbound(spins)\n self.assertions.add(GE(spin_energy, self.gap + offset))"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nlooping until a change is received from the server", "response": "def _poll_loop(self):\n \"\"\"At self.poll_period poll for changes\"\"\"\n next_poll = time.time()\n while True:\n next_poll += self._poll_period\n timeout = next_poll - time.time()\n if timeout < 0:\n timeout = 0\n try:\n return self._stop_queue.get(timeout=timeout)\n except TimeoutError:\n # No stop, no problem\n pass\n try:\n self.handle_changes(self.client.get_changes())\n except Exception:\n # TODO: should fault here?\n self.log.exception(\"Error while getting changes\")"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ntake a camelCaseFieldName and returns an Title Case Field Name", "response": "def camel_to_title(name):\n \"\"\"Takes a camelCaseFieldName and returns an Title Case Field Name\n\n Args:\n name (str): E.g. camelCaseFieldName\n\n Returns:\n str: Title Case converted name. E.g. Camel Case Field Name\n \"\"\"\n split = re.findall(r\"[A-Z]?[a-z0-9]+|[A-Z]+(?=[A-Z]|$)\", name)\n ret = \" \".join(split)\n ret = ret[0].upper() + ret[1:]\n return ret"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ntakes a snake_field_name and returns a camelCaseFieldName", "response": "def snake_to_camel(name):\n \"\"\"Takes a snake_field_name and returns a camelCaseFieldName\n\n Args:\n name (str): E.g. snake_field_name or SNAKE_FIELD_NAME\n\n Returns:\n str: camelCase converted name. E.g. capsFieldName\n \"\"\"\n ret = \"\".join(x.title() for x in name.split(\"_\"))\n ret = ret[0].lower() + ret[1:]\n return ret"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates a dictionary representation of object attributes", "response": "def to_dict(self):\n # type: () -> OrderedDict\n \"\"\"Create a dictionary representation of object attributes\n\n Returns:\n OrderedDict serialised version of self\n \"\"\"\n\n d = OrderedDict()\n if self.typeid:\n d[\"typeid\"] = self.typeid\n\n for k in self.call_types:\n # check_camel_case(k)\n d[k] = serialize_object(getattr(self, k))\n\n return d"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncreates an instance of cls from a serialized version of cls. d is a dict of endpoints of cls to set", "response": "def from_dict(cls, d, ignore=()):\n \"\"\"Create an instance from a serialized version of cls\n\n Args:\n d(dict): Endpoints of cls to set\n ignore(tuple): Keys to ignore\n\n Returns:\n Instance of this class\n \"\"\"\n filtered = {}\n for k, v in d.items():\n if k == \"typeid\":\n assert v == cls.typeid, \\\n \"Dict has typeid %s but %s has typeid %s\" % \\\n (v, cls, cls.typeid)\n elif k not in ignore:\n filtered[k] = v\n try:\n inst = cls(**filtered)\n except TypeError as e:\n raise TypeError(\"%s raised error: %s\" % (cls.typeid, str(e)))\n return inst"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef register_subclass(cls, typeid):\n def decorator(subclass):\n cls._subcls_lookup[typeid] = subclass\n subclass.typeid = typeid\n return subclass\n return decorator", "response": "Register a subclass so from_dict works"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nlooking up a class based on a serialized dictionary containing a typeid", "response": "def lookup_subclass(cls, d):\n \"\"\"Look up a class based on a serialized dictionary containing a typeid\n\n Args:\n d (dict): Dictionary with key \"typeid\"\n\n Returns:\n Serializable subclass\n \"\"\"\n try:\n typeid = d[\"typeid\"]\n except KeyError:\n raise FieldError(\"typeid not present in keys %s\" % list(d))\n\n subclass = cls._subcls_lookup.get(typeid, None)\n if not subclass:\n raise FieldError(\"'%s' not a valid typeid\" % typeid)\n else:\n return subclass"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nstart the process going", "response": "def start(self, timeout=None):\n \"\"\"Start the process going\n\n Args:\n timeout (float): Maximum amount of time to wait for each spawned\n process. None means forever\n \"\"\"\n assert self.state == STOPPED, \"Process already started\"\n self.state = STARTING\n should_publish = self._start_controllers(\n self._controllers.values(), timeout)\n if should_publish:\n self._publish_controllers(timeout)\n self.state = STARTED"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nstop the process and wait for it to finish", "response": "def stop(self, timeout=None):\n \"\"\"Stop the process and wait for it to finish\n\n Args:\n timeout (float): Maximum amount of time to wait for each spawned\n object. None means forever\n \"\"\"\n assert self.state == STARTED, \"Process not started\"\n self.state = STOPPING\n # Allow every controller a chance to clean up\n self._run_hook(ProcessStopHook, timeout=timeout)\n for s in self._spawned:\n if not s.ready():\n self.log.debug(\n \"Waiting for %s *%s **%s\", s._function, s._args, s._kwargs)\n s.wait(timeout=timeout)\n self._spawned = []\n self._controllers = OrderedDict()\n self._unpublished = set()\n self.state = STOPPED\n self.log.debug(\"Done process.stop()\")"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nrun the function in a worker thread returning a Spawned object.", "response": "def spawn(self, function, *args, **kwargs):\n # type: (Callable[..., Any], *Any, **Any) -> Spawned\n \"\"\"Runs the function in a worker thread, returning a Result object\n\n Args:\n function: Function to run\n args: Positional arguments to run the function with\n kwargs: Keyword arguments to run the function with\n\n Returns:\n Spawned: Something you can call wait(timeout) on to see when it's\n finished executing\n \"\"\"\n assert self.state != STOPPED, \"Can't spawn when process stopped\"\n spawned = Spawned(function, args, kwargs)\n self._spawned.append(spawned)\n self._spawn_count += 1\n # Filter out things that are ready to avoid memory leaks\n if self._spawn_count > SPAWN_CLEAR_COUNT:\n self._clear_spawn_list()\n return spawned"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nadding a controller to be hosted by this process.", "response": "def add_controller(self, controller, timeout=None):\n # type: (Controller, float) -> None\n \"\"\"Add a controller to be hosted by this process\n\n Args:\n controller (Controller): Its controller\n timeout (float): Maximum amount of time to wait for each spawned\n object. None means forever\n \"\"\"\n assert controller.mri not in self._controllers, \\\n \"Controller already exists for %s\" % controller.mri\n self._controllers[controller.mri] = controller\n controller.setup(self)\n if self.state:\n should_publish = self._start_controllers([controller], timeout)\n if self.state == STARTED and should_publish:\n self._publish_controllers(timeout)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef block_view(self, mri):\n # type: (str) -> Block\n \"\"\"Get a Block view from a Controller with given mri\"\"\"\n controller = self.get_controller(mri)\n block = controller.block_view()\n return block", "response": "Get a Block view from a Controller with given mri"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nupdate the label of the Block Meta object with the title of the given TitleInfo.", "response": "def update_title(self, _, info):\n # type: (object, TitleInfo) -> None\n \"\"\"Set the label of the Block Meta object\"\"\"\n with self._lock:\n self._block.meta.set_label(info.title)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nupdates the health attribute. Called from part.", "response": "def update_health(self, reporter, info):\n # type: (object, HealthInfo) -> None\n \"\"\"Set the health attribute. Called from part\"\"\"\n with self.changes_squashed:\n alarm = info.alarm\n if alarm.is_ok():\n self._faults.pop(reporter, None)\n else:\n self._faults[reporter] = alarm\n if self._faults:\n # Sort them by severity\n faults = sorted(self._faults.values(),\n key=lambda a: a.severity.value)\n alarm = faults[-1]\n text = faults[-1].message\n else:\n alarm = None\n text = \"OK\"\n self.health.set_value(text, alarm=alarm)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncreates the relevant parts for the current object.", "response": "def make_parts_for(self, field_name, field_data):\n \"\"\"Create the relevant parts for this field\n\n Args:\n field_name (str): Short field name, e.g. VAL\n field_data (FieldData): Field data object\n \"\"\"\n typ = field_data.field_type\n subtyp = field_data.field_subtype\n\n if typ in (\"read\", \"xadc\"):\n writeable = False\n else:\n writeable = True\n\n if typ == \"time\" or typ in (\"param\", \"read\") and subtyp == \"time\":\n self._make_time_parts(field_name, field_data, writeable)\n elif typ == \"write\" and subtyp == \"action\":\n self._make_action_part(field_name, field_data)\n elif typ in (\"param\", \"read\", \"write\", \"xadc\"):\n self._make_param_part(field_name, field_data, writeable)\n elif typ == \"bit_out\":\n self._make_out(field_name, field_data, \"bit\")\n elif typ == \"pos_out\":\n self._make_out(field_name, field_data, \"pos\")\n self._make_scale_offset(field_name)\n self._make_out_capture(field_name, field_data)\n elif typ == \"ext_out\":\n self._make_out_capture(field_name, field_data)\n elif typ == \"bit_mux\":\n self._make_mux(field_name, field_data, \"bit\")\n self._make_mux_delay(field_name)\n elif typ == \"pos_mux\":\n self._make_mux(field_name, field_data, \"pos\")\n elif typ == \"table\":\n self._make_table(field_name, field_data)\n else:\n raise ValueError(\"Unknown type %r subtype %r\" % (typ, subtyp))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget a view of a block in the controller with the given mri", "response": "def block_view(self, mri):\n # type: (str) -> Block\n \"\"\"Get a view of a block\n\n Args:\n mri: The mri of the controller hosting the block\n\n Returns:\n Block: The block we control\n \"\"\"\n controller = self.get_controller(mri)\n block = controller.block_view(weakref.proxy(self))\n return block"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsetting function to call just before requests are dispatched", "response": "def set_notify_dispatch_request(self, notify_dispatch_request, *args):\n \"\"\"Set function to call just before requests are dispatched\n\n Args:\n notify_dispatch_request (callable): function will be called\n with request as single arg just before request is dispatched\n \"\"\"\n self._notify_dispatch_request = notify_dispatch_request\n self._notify_args = args"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef ignore_stops_before_now(self):\n self._sentinel_stop = object()\n self._q.put(self._sentinel_stop)", "response": "Ignore any stops received before this point"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef put(self, path, value, timeout=None, event_timeout=None):\n future = self.put_async(path, value)\n self.wait_all_futures(\n future, timeout=timeout, event_timeout=event_timeout)\n return future.result()", "response": "Puts a value to a path and returns when it completes\n "} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef put_async(self, path, value):\n request = Put(self._get_next_id(), path, value)\n request.set_callback(self._q.put)\n future = self._dispatch_request(request)\n return future", "response": "Puts a value to a path and returns immediately\n "} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef post_async(self, path, params=None):\n request = Post(self._get_next_id(), path, params)\n request.set_callback(self._q.put)\n future = self._dispatch_request(request)\n return future", "response": "Asynchronously calls a function on a child block\n and returns a Future that resolves to the result\n ."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef subscribe(self, path, callback, *args):\n request = Subscribe(self._get_next_id(), path, delta=False)\n request.set_callback(self._q.put)\n # If self is in args, then make weak version of it\n saved_args = []\n for arg in args:\n if arg is self:\n saved_args.append(weakref.proxy(self))\n else:\n saved_args.append(arg)\n self._subscriptions[request.id] = (callback, saved_args)\n future = self._dispatch_request(request)\n return future", "response": "Subscribe to changes in a given attribute and call callback when it changes\n "} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef unsubscribe(self, future):\n assert future not in self._pending_unsubscribes, \\\n \"%r has already been unsubscribed from\" % \\\n self._pending_unsubscribes[future]\n subscribe = self._requests[future]\n self._pending_unsubscribes[future] = subscribe\n # Clear out the subscription\n self._subscriptions.pop(subscribe.id)\n request = Unsubscribe(subscribe.id)\n request.set_callback(self._q.put)\n try:\n controller = self.get_controller(subscribe.path[0])\n except ValueError:\n # Controller has already gone, probably during tearDown\n pass\n else:\n self.handle_request(controller, request)", "response": "Terminates the subscription given by a future."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nsends an unsubscribe for all active subscriptions", "response": "def unsubscribe_all(self, callback=False):\n \"\"\"Send an unsubscribe for all active subscriptions\"\"\"\n futures = ((f, r) for f, r in self._requests.items()\n if isinstance(r, Subscribe)\n and f not in self._pending_unsubscribes)\n if futures:\n for future, request in futures:\n if callback:\n log.warn(\"Unsubscribing from %s\", request.path)\n cothread.Callback(self.unsubscribe, future)\n else:\n self.unsubscribe(future)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef when_matches(self, path, good_value, bad_values=None, timeout=None,\n event_timeout=None):\n \"\"\"Resolve when an path value equals value\n\n Args:\n path (list): The path to wait to\n good_value (object): the value to wait for\n bad_values (list): values to raise an error on\n timeout (float): time in seconds to wait for responses, wait\n forever if None\n event_timeout: maximum time in seconds to wait between each response\n event, wait forever if None\n \"\"\"\n future = self.when_matches_async(path, good_value, bad_values)\n self.wait_all_futures(\n future, timeout=timeout, event_timeout=event_timeout)", "response": "Resolve when an path value equals value\n "} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef when_matches_async(self, path, good_value, bad_values=None):\n when = When(good_value, bad_values)\n future = self.subscribe(path, when)\n when.set_future_context(future, weakref.proxy(self))\n return future", "response": "Wait for an attribute to become a given value"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef wait_all_futures(self, futures, timeout=None, event_timeout=None):\n # type: (Union[List[Future], Future, None], float, float) -> None\n \"\"\"Services all futures until the list 'futures' are all done\n then returns. Calls relevant subscription callbacks as they\n come off the queue and raises an exception on abort\n\n Args:\n futures: a `Future` or list of all futures that the caller\n wants to wait for\n timeout: maximum total time in seconds to wait for responses, wait\n forever if None\n event_timeout: maximum time in seconds to wait between each response\n event, wait forever if None\n \"\"\"\n if timeout is None:\n end = None\n else:\n end = time.time() + timeout\n\n if not isinstance(futures, list):\n if futures:\n futures = [futures]\n else:\n futures = []\n\n filtered_futures = []\n\n for f in futures:\n if f.done():\n if f.exception() is not None:\n raise f.exception()\n else:\n filtered_futures.append(f)\n\n while filtered_futures:\n if event_timeout is not None:\n until = time.time() + event_timeout\n if end is not None:\n until = min(until, end)\n else:\n until = end\n self._service_futures(filtered_futures, until)", "response": "Wait for all futures to complete and return the result of the service."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef sleep(self, seconds):\n until = time.time() + seconds\n try:\n while True:\n self._service_futures([], until)\n except TimeoutError:\n return", "response": "Services all futures until seconds have passed"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _service_futures(self, futures, until=None):\n if until is None:\n timeout = None\n else:\n timeout = until - time.time()\n if timeout < 0:\n timeout = 0\n try:\n response = self._q.get(timeout)\n except TimeoutError:\n raise TimeoutError(\n \"Timeout waiting for %s\" % self._describe_futures(futures))\n if response is self._sentinel_stop:\n self._sentinel_stop = None\n elif response is self.STOP:\n if self._sentinel_stop is None:\n # This is a stop we should listen to...\n raise AbortedError(\n \"Aborted waiting for %s\" % self._describe_futures(futures))\n elif isinstance(response, Update):\n # This is an update for a subscription\n if response.id in self._subscriptions:\n func, args = self._subscriptions[response.id]\n func(response.value, *args)\n elif isinstance(response, Return):\n future = self._futures.pop(response.id)\n del self._requests[future]\n self._pending_unsubscribes.pop(future, None)\n result = response.value\n future.set_result(result)\n try:\n futures.remove(future)\n except ValueError:\n pass\n elif isinstance(response, Error):\n future = self._futures.pop(response.id)\n del self._requests[future]\n future.set_exception(response.message)\n try:\n futures.remove(future)\n except ValueError:\n pass\n else:\n raise response.message", "response": "This method is used to service the futures in the cluster."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns the result or raise the error the function has produced", "response": "def get(self, timeout=None):\n # type: (float) -> T\n \"\"\"Return the result or raise the error the function has produced\"\"\"\n self.wait(timeout)\n if isinstance(self._result, Exception):\n raise self._result\n return self._result"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nextracts the version number from the version. py file.", "response": "def get_version():\n \"\"\"Extracts the version number from the version.py file.\n \"\"\"\n VERSION_FILE = os.path.join(module_name, 'version.py')\n txt = open(VERSION_FILE).read()\n mo = re.search(r'^__version__ = [\\'\"]([^\\'\"]*)[\\'\"]', txt, re.M)\n if mo:\n version = mo.group(1)\n bs_version = os.environ.get('MODULEVER', '0.0')\n assert bs_version == \"0.0\" or bs_version == version, \\\n \"Version {} specified by the build system doesn't match {} in \" \\\n \"version.py\".format(bs_version, version)\n return version\n else:\n raise RuntimeError('Unable to find version string in {0}.'\n .format(VERSION_FILE))"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nupdates the dict of parameters that are needed to configure the controller.", "response": "def update_configure_params(self, part=None, info=None):\n # type: (Part, ConfigureParamsInfo) -> None\n \"\"\"Tell controller part needs different things passed to Configure\"\"\"\n with self.changes_squashed:\n # Update the dict\n if part:\n self.part_configure_params[part] = info\n\n # No process yet, so don't do this yet\n if self.process is None:\n return\n\n # Get the model of our configure method as the starting point\n configure_model = MethodModel.from_callable(self.configure)\n\n # These will not be inserted as the already exist\n ignored = tuple(ConfigureHook.call_types)\n\n # Re-calculate the following\n required = []\n takes_elements = OrderedDict()\n defaults = OrderedDict()\n\n # First do the required arguments\n for k in configure_model.takes.required:\n required.append(k)\n takes_elements[k] = configure_model.takes.elements[k]\n for part in self.parts.values():\n try:\n info = self.part_configure_params[part]\n except KeyError:\n continue\n for k in info.required:\n if k not in required and k not in ignored:\n required.append(k)\n takes_elements[k] = info.metas[k]\n\n # Now the default and optional\n for k in configure_model.takes.elements:\n if k not in required:\n takes_elements[k] = configure_model.takes.elements[k]\n for part in self.parts.values():\n try:\n info = self.part_configure_params[part]\n except KeyError:\n continue\n for k in info.metas:\n if k not in required and k not in ignored:\n takes_elements[k] = info.metas[k]\n if k in info.defaults:\n defaults[k] = info.defaults[k]\n\n # Set the values\n configure_model.takes.set_elements(takes_elements)\n configure_model.takes.set_required(required)\n configure_model.set_defaults(defaults)\n\n # Update methods from the new metas\n self._block.configure.set_takes(configure_model.takes)\n self._block.configure.set_defaults(configure_model.defaults)\n\n # Now make a validate model with returns\n validate_model = MethodModel.from_dict(configure_model.to_dict())\n returns = MapMeta.from_dict(validate_model.takes.to_dict())\n for v in returns.elements.values():\n v.set_writeable(False)\n self._block.validate.set_takes(validate_model.takes)\n self._block.validate.set_defaults(validate_model.defaults)\n self._block.validate.set_returns(returns)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef validate(self, generator, axesToMove=None, **kwargs):\n # type: (AGenerator, AAxesToMove, **Any) -> AConfigureParams\n \"\"\"Validate configuration parameters and return validated parameters.\n\n Doesn't take device state into account so can be run in any state\n \"\"\"\n iterations = 10\n # We will return this, so make sure we fill in defaults\n for k, default in self._block.configure.defaults.items():\n if k not in kwargs:\n kwargs[k] = default\n # The validated parameters we will eventually return\n params = ConfigureParams(generator, axesToMove, **kwargs)\n # Make some tasks just for validate\n part_contexts = self.create_part_contexts()\n # Get any status from all parts\n status_part_info = self.run_hooks(\n ReportStatusHook(p, c) for p, c in part_contexts.items())\n while iterations > 0:\n # Try up to 10 times to get a valid set of parameters\n iterations -= 1\n # Validate the params with all the parts\n validate_part_info = self.run_hooks(\n ValidateHook(p, c, status_part_info, **kwargs)\n for p, c, kwargs in self._part_params(part_contexts, params))\n tweaks = ParameterTweakInfo.filter_values(validate_part_info)\n if tweaks:\n for tweak in tweaks:\n deserialized = self._block.configure.takes.elements[\n tweak.parameter].validate(tweak.value)\n setattr(params, tweak.parameter, deserialized)\n self.log.debug(\n \"Tweaking %s to %s\", tweak.parameter, deserialized)\n else:\n # Consistent set, just return the params\n return params\n raise ValueError(\"Could not get a consistent set of parameters\")", "response": "Validate the configuration parameters and return validated parameters."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef configure(self, generator, axesToMove=None, **kwargs):\n # type: (AGenerator, AAxesToMove, **Any) -> None\n \"\"\"Validate the params then configure the device ready for run().\n\n Try to prepare the device as much as possible so that run() is quick to\n start, this may involve potentially long running activities like moving\n motors.\n\n Normally it will return in Armed state. If the user aborts then it will\n return in Aborted state. If something goes wrong it will return in Fault\n state. If the user disables then it will return in Disabled state.\n \"\"\"\n params = self.validate(generator, axesToMove, **kwargs)\n try:\n self.transition(ss.CONFIGURING)\n self.do_configure(params)\n self.abortable_transition(ss.ARMED)\n except AbortedError:\n self.abort_queue.put(None)\n raise\n except Exception as e:\n self.go_to_error_state(e)\n raise", "response": "Configure the current state of the current state of the current state."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef run(self):\n # type: () -> None\n \"\"\"Run a device where configure() has already be called\n\n Normally it will return in Ready state. If setup for multiple-runs with\n a single configure() then it will return in Armed state. If the user\n aborts then it will return in Aborted state. If something goes wrong it\n will return in Fault state. If the user disables then it will return in\n Disabled state.\n \"\"\"\n if self.configured_steps.value < self.total_steps.value:\n next_state = ss.ARMED\n else:\n next_state = ss.READY\n try:\n self.transition(ss.RUNNING)\n hook = RunHook\n going = True\n while going:\n try:\n self.do_run(hook)\n except AbortedError:\n self.abort_queue.put(None)\n # Wait for a response on the resume_queue\n should_resume = self.resume_queue.get()\n if should_resume:\n # we need to resume\n hook = ResumeHook\n self.log.debug(\"Resuming run\")\n else:\n # we don't need to resume, just drop out\n raise\n else:\n going = False\n self.abortable_transition(next_state)\n except AbortedError:\n raise\n except Exception as e:\n self.go_to_error_state(e)\n raise", "response": "Run a device where configure has already been called with\n ."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef abort(self):\n # type: () -> None\n \"\"\"Abort the current operation and block until aborted\n\n Normally it will return in Aborted state. If something goes wrong it\n will return in Fault state. If the user disables then it will return in\n Disabled state.\n \"\"\"\n # Tell _call_do_run not to resume\n if self.resume_queue:\n self.resume_queue.put(False)\n self.try_aborting_function(ss.ABORTING, ss.ABORTED, self.do_abort)", "response": "Abort the current operation and block until aborted\n is set to True."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef pause(self, lastGoodStep=0):\n # type: (ALastGoodStep) -> None\n \"\"\"Pause a run() so that resume() can be called later, or seek within\n an Armed or Paused state.\n\n The original call to run() will not be interrupted by pause(), it will\n wait until the scan completes or is aborted.\n\n Normally it will return in Paused state. If the user aborts then it will\n return in Aborted state. If something goes wrong it will return in Fault\n state. If the user disables then it will return in Disabled state.\n \"\"\"\n current_state = self.state.value\n if lastGoodStep <= 0:\n last_good_step = self.completed_steps.value\n else:\n last_good_step = lastGoodStep\n if current_state == ss.RUNNING:\n next_state = ss.PAUSED\n else:\n next_state = current_state\n assert last_good_step < self.total_steps.value, \\\n \"Cannot seek to after the end of the scan\"\n self.try_aborting_function(\n ss.SEEKING, next_state, self.do_pause, last_good_step)", "response": "Pause a run so that the user can seek within the scan."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef resume(self):\n # type: () -> None\n \"\"\"Resume a paused scan.\n\n Normally it will return in Running state. If something goes wrong it\n will return in Fault state.\n \"\"\"\n self.transition(ss.RUNNING)\n self.resume_queue.put(True)", "response": "Resume a paused scan."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef create_info(cls, configure_func):\n # type: (Callable) -> ConfigureParamsInfo\n \"\"\"Create a `ConfigureParamsInfo` describing the extra parameters\n that should be passed at configure\"\"\"\n call_types = getattr(configure_func, \"call_types\",\n {}) # type: Dict[str, Anno]\n metas = OrderedDict()\n required = []\n defaults = OrderedDict()\n for k, anno in call_types.items():\n if k not in cls.call_types:\n scls = VMeta.lookup_annotype_converter(anno)\n metas[k] = scls.from_annotype(anno, writeable=True)\n if anno.default is NO_DEFAULT:\n required.append(k)\n elif anno.default is not None:\n defaults[k] = anno.default\n return ConfigureParamsInfo(metas, required, defaults)", "response": "Create a ConfigureParamsInfo describing the extra parameters\n that should be passed at configure"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef register_hooked(self,\n hooks, # type: Union[Type[Hook], Sequence[Type[Hook]]]\n func, # type: Hooked\n args_gen=None # type: Optional[ArgsGen]\n ):\n # type: (Type[Hook], Callable, Optional[Callable]) -> None\n \"\"\"Register func to be run when any of the hooks are run by parent\n\n Args:\n hooks: A Hook class or list of Hook classes of interest\n func: The callable that should be run on that Hook\n args_gen: Optionally specify the argument names that should be\n passed to func. If not given then use func.call_types.keys\n \"\"\"\n if self.hooked is None:\n self.hooked = {}\n if args_gen is None:\n args_gen = getattr(func, \"call_types\", {}).keys\n if not isinstance(hooks, Sequence):\n hooks = [hooks]\n for hook_cls in hooks:\n self.hooked[hook_cls] = (func, args_gen)", "response": "Register func to be run when any of the hooks are run by parent\n "} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ntaking a hook and optionally calls hook. run on a function", "response": "def on_hook(self, hook):\n # type: (Hook) -> None\n \"\"\"Takes a hook, and optionally calls hook.run on a function\"\"\"\n try:\n func, args_gen = self.hooked[type(hook)]\n except (KeyError, TypeError):\n return\n else:\n hook(func, args_gen())"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef transition_allowed(self, initial_state, target_state):\n # type: (str, str) -> bool\n \"\"\"Check if a transition between two states is allowed\"\"\"\n assert initial_state in self._allowed, \\\n \"%s is not in %s\" % (initial_state, list(self._allowed))\n return target_state in self._allowed[initial_state]", "response": "Check if a transition between two states is allowed"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef set_allowed(self, initial_state, *allowed_states):\n # type: (str, *str) -> None\n \"\"\"Add an allowed transition from initial_state to allowed_states\"\"\"\n allowed_states = list(allowed_states)\n self._allowed.setdefault(initial_state, set()).update(allowed_states)\n for state in allowed_states + [initial_state]:\n if state not in self.possible_states:\n self.possible_states.append(state)", "response": "Add an allowed transition from initial_state to allowed_states"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ndefining a string parameter coming from a shell command to be used within this YAML file.", "response": "def cmd_string(name, cmd):\n # type: (AName, ACmd) -> ADefine\n \"\"\"Define a string parameter coming from a shell command to be used within\n this YAML file. Trailing newlines will be stripped.\"\"\"\n value = subprocess.check_output(cmd, shell=True).rstrip(\"\\n\")\n return Define(name, value)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef export_env_string(name, value):\n # type: (AEnvName, AEnvValue) -> ADefine\n \"\"\"Exports an environment variable with the given value\"\"\"\n os.environ[name] = value\n return Define(name, value)", "response": "Exports an environment variable with the given value"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef module_path(name, path):\n # type: (AModuleName, AModulePath) -> ADefine\n \"\"\"Load an external malcolm module (e.g. ADCore/etc/malcolm)\"\"\"\n define = Define(name, path)\n assert os.path.isdir(path), \"%r doesn't exist\" % path\n name = \"malcolm.modules.%s\" % name\n import_package_from_path(name, path)\n return define", "response": "Load an external malcolm module."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\npass response from server to process receive queue", "response": "def on_message(self, message):\n \"\"\"Pass response from server to process receive queue\n\n Args:\n message(str): Received message\n \"\"\"\n # Called in tornado loop\n try:\n self.log.debug(\"Got message %s\", message)\n d = json_decode(message)\n response = deserialize_object(d, Response)\n if isinstance(response, (Return, Error)):\n request = self._request_lookup.pop(response.id)\n if isinstance(response, Error):\n # Make the message an exception so it can be raised\n response.message = ResponseError(response.message)\n else:\n request = self._request_lookup[response.id]\n # Transfer the work of the callback to cothread\n cothread.Callback(request.callback, response)\n except Exception:\n # If we don't catch the exception here, tornado will spew odd\n # error messages about 'HTTPRequest' object has no attribute 'path'\n self.log.exception(\"on_message(%r) failed\", message)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ndispatches a Put request to the server to send the response to the server", "response": "def send_put(self, mri, attribute_name, value):\n \"\"\"Abstract method to dispatch a Put to the server\n\n Args:\n mri (str): The mri of the Block\n attribute_name (str): The name of the Attribute within the Block\n value: The value to put\n \"\"\"\n q = Queue()\n request = Put(\n path=[mri, attribute_name, \"value\"],\n value=value)\n request.set_callback(q.put)\n IOLoopHelper.call(self._send_request, request)\n response = q.get()\n if isinstance(response, Error):\n raise response.message\n else:\n return response.value"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef send_post(self, mri, method_name, **params):\n q = Queue()\n request = Post(\n path=[mri, method_name],\n parameters=params)\n request.set_callback(q.put)\n IOLoopHelper.call(self._send_request, request)\n response = q.get()\n if isinstance(response, Error):\n raise response.message\n else:\n return response.value", "response": "Dispatch a Post to the server"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef filter_parts(cls, part_info):\n # type: (Type[T], PartInfo) -> Dict[str, List[T]]\n \"\"\"Filter the part_info dict looking for instances of our class\n\n Args:\n part_info (dict): {part_name: [Info] or None} as returned from\n Controller.run_hook()\n\n Returns:\n dict: {part_name: [info]} where info is a subclass of cls\n \"\"\"\n filtered = OrderedDict()\n for part_name, info_list in part_info.items():\n if info_list is None or isinstance(info_list, Exception):\n continue\n info_list = [i for i in info_list if isinstance(i, cls)]\n if info_list:\n filtered[part_name] = info_list\n return filtered", "response": "Filter the part_info dict looking for instances of cls in the class\n "} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef filter_values(cls, part_info):\n # type: (Type[T], PartInfo) -> List[T]\n \"\"\"Filter the part_info dict list looking for instances of our class\n\n Args:\n part_info (dict): {part_name: [Info] or None} as returned from\n Controller.run_hook()\n\n Returns:\n list: [info] where info is a subclass of cls\n \"\"\"\n filtered = []\n for info_list in cls.filter_parts(part_info).values():\n filtered += info_list\n return filtered", "response": "Filter the part_info dict list looking for instances of our class\n "} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nfilters the part_info dict list looking for a single value of our class.", "response": "def filter_single_value(cls, part_info, error_msg=None):\n # type: (Type[T], PartInfo, str) -> T\n \"\"\"Filter the part_info dict list looking for a single instance of our\n class\n\n Args:\n part_info (dict): {part_name: [Info] or None} as returned from\n Controller.run_hook()\n error_msg (str, optional): Specific error message to show if\n there isn't a single value\n\n Returns:\n info subclass of cls\n \"\"\"\n filtered = cls.filter_values(part_info)\n if len(filtered) != 1:\n if error_msg is None:\n error_msg = \"Expected a single %s, got %s of them\" % \\\n (cls.__name__, len(filtered))\n raise BadValueError(error_msg)\n return filtered[0]"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ndisconnecting anyone listening to any of the given mris.", "response": "def disconnect_pv_clients(self, mris):\n # type: (List[str]) -> None\n \"\"\"Disconnect anyone listening to any of the given mris\"\"\"\n for mri in mris:\n for pv in self._pvs.pop(mri, {}).values():\n # Close pv with force destroy on, this will call\n # onLastDisconnect\n pv.close(destroy=True, sync=True, timeout=1.0)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nadd a Source Port tag to the tags list removing any other Source Ports", "response": "def with_source_port_tag(self, tags, connected_value):\n \"\"\"Add a Source Port tag to the tags list, removing any other Source\n Ports\"\"\"\n new_tags = [t for t in tags if not t.startswith(\"sourcePort:\")]\n new_tags.append(self.source_port_tag(connected_value))\n return new_tags"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nsearches tags for port info returning it if found", "response": "def port_tag_details(cls, tags):\n # type: (Sequence[str]) -> Union[Tuple[bool, Port, str], None]\n \"\"\"Search tags for port info, returning it\n\n Args:\n tags: A list of tags to check\n\n Returns:\n None or (is_source, port, connected_value|disconnected_value)\n where port is one of the Enum entries of Port\n \"\"\"\n for tag in tags:\n match = port_tag_re.match(tag)\n if match:\n source_sink, port, extra = match.groups()\n return source_sink == \"source\", cls(port), extra"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ntransitioning the state of the current object to a new state.", "response": "def transition(self, state, message=\"\"):\n \"\"\"Change to a new state if the transition is allowed\n\n Args:\n state (str): State to transition to\n message (str): Message if the transition is to a fault state\n \"\"\"\n with self.changes_squashed:\n initial_state = self.state.value\n if self.state_set.transition_allowed(\n initial_state=initial_state, target_state=state):\n self.log.debug(\n \"%s: Transitioning from %s to %s\",\n self.mri, initial_state, state)\n if state == ss.DISABLED:\n alarm = Alarm.invalid(\"Disabled\")\n elif state == ss.FAULT:\n alarm = Alarm.major(message)\n else:\n alarm = Alarm()\n self.update_health(self, HealthInfo(alarm))\n self.state.set_value(state)\n self.state.set_alarm(alarm)\n for child, writeable in self._children_writeable[state].items():\n if isinstance(child, AttributeModel):\n child.meta.set_writeable(writeable)\n elif isinstance(child, MethodModel):\n child.set_writeable(writeable)\n else:\n raise TypeError(\"Cannot transition from %s to %s\" %\n (initial_state, state))"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef send_put(self, mri, attribute_name, value):\n path = attribute_name + \".value\"\n typ, value = convert_to_type_tuple_value(serialize_object(value))\n if isinstance(typ, tuple):\n # Structure, make into a Value\n _, typeid, fields = typ\n value = Value(Type(fields, typeid), value)\n try:\n self._ctxt.put(mri, {path: value}, path)\n except RemoteError:\n if attribute_name == \"exports\":\n # TODO: use a tag instead of a name\n # This will change the structure of the block\n # Wait for reconnect\n self._queues[mri].get(timeout=DEFAULT_TIMEOUT)\n else:\n # Not expected, raise\n raise", "response": "Dispatch a Put to the server"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef send_post(self, mri, method_name, **params):\n typ, parameters = convert_to_type_tuple_value(serialize_object(params))\n uri = NTURI(typ[2])\n\n uri = uri.wrap(\n path=\"%s.%s\" % (mri, method_name),\n kws=parameters,\n scheme=\"pva\"\n )\n value = self._ctxt.rpc(mri, uri, timeout=None)\n return convert_value_to_dict(value)", "response": "Abstract method to dispatch a Post to the server"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef make_view(self, context, data, child_name):\n # type: (Context, Model, str) -> Any\n \"\"\"Make a child View of data[child_name]\"\"\"\n with self._lock:\n child = data[child_name]\n child_view = make_view(self, context, child)\n return child_view", "response": "Make a child View of the child_name."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _handle_get(self, request):\n # type: (Get) -> CallbackResponses\n \"\"\"Called with the lock taken\"\"\"\n data = self._block\n\n for i, endpoint in enumerate(request.path[1:]):\n try:\n data = data[endpoint]\n except KeyError:\n if hasattr(data, \"typeid\"):\n typ = data.typeid\n else:\n typ = type(data)\n raise UnexpectedError(\n \"Object %s of type %r has no attribute %r\" % (\n request.path[:i+1], typ, endpoint))\n # Important to serialize now with the lock so we get a consistent set\n serialized = serialize_object(data)\n ret = [request.return_response(serialized)]\n return ret", "response": "Handles a GET request."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nhandle a Put request.", "response": "def _handle_put(self, request):\n # type: (Put) -> CallbackResponses\n \"\"\"Called with the lock taken\"\"\"\n attribute_name = request.path[1]\n\n attribute = self._block[attribute_name]\n assert isinstance(attribute, AttributeModel), \\\n \"Cannot Put to %s which is a %s\" % (attribute.path, type(attribute))\n self.check_field_writeable(attribute)\n\n put_function = self.get_put_function(attribute_name)\n value = attribute.meta.validate(request.value)\n\n with self.lock_released:\n result = put_function(value)\n\n if request.get and result is None:\n # We asked for a Get, and didn't get given a return, so do return\n # the current value. Don't serialize here as value is immutable\n # (as long as we don't try too hard to break the rules)\n result = self._block[attribute_name].value\n elif not request.get:\n # We didn't ask for a Get, so throw result away\n result = None\n ret = [request.return_response(result)]\n return ret"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncalls with the lock taken", "response": "def _handle_post(self, request):\n # type: (Post) -> CallbackResponses\n \"\"\"Called with the lock taken\"\"\"\n method_name = request.path[1]\n\n method = self._block[method_name]\n assert isinstance(method, MethodModel), \\\n \"Cannot Post to %s which is a %s\" % (method.path, type(method))\n self.check_field_writeable(method)\n\n post_function = self.get_post_function(method_name)\n args = method.validate(request.parameters)\n\n with self.lock_released:\n result = post_function(**args)\n\n # Don't need to serialize as the result should be immutable\n ret = [request.return_response(result)]\n return ret"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nwait until a StatefulController has initialized the child block", "response": "def wait_for_stateful_block_init(context, mri, timeout=DEFAULT_TIMEOUT):\n \"\"\"Wait until a Block backed by a StatefulController has initialized\n\n Args:\n context (Context): The context to use to make the child block\n mri (str): The mri of the child block\n timeout (float): The maximum time to wait\n \"\"\"\n context.when_matches(\n [mri, \"state\", \"value\"], StatefulStates.READY,\n bad_values=[StatefulStates.FAULT, StatefulStates.DISABLED],\n timeout=timeout)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn the result of the call that the future represents.", "response": "def result(self, timeout=None):\n \"\"\"Return the result of the call that the future represents.\n\n Args:\n timeout: The number of seconds to wait for the result if the future\n isn't done. If None, then there is no limit on the wait time.\n\n Returns:\n The result of the call that the future represents.\n\n Raises:\n TimeoutError: If the future didn't finish executing before the given\n timeout.\n exceptions.Exception: If the call raised then that exception will be\n raised.\n \"\"\"\n if self._state == self.RUNNING:\n self._context.wait_all_futures([self], timeout)\n return self.__get_result()"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef exception(self, timeout=None):\n if self._state == self.RUNNING:\n self._context.wait_all_futures([self], timeout)\n return self._exception", "response": "Returns the exception raised by the call that the future represents."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsetting the result of the work associated with the future.", "response": "def set_result(self, result):\n \"\"\"Sets the return value of work associated with the future.\n\n Should only be used by Task and unit tests.\n \"\"\"\n self._result = result\n self._state = self.FINISHED"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef set_exception(self, exception):\n assert isinstance(exception, Exception), \\\n \"%r should be an Exception\" % exception\n self._exception = exception\n self._state = self.FINISHED", "response": "Sets the result of the future as being the given exception."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nchanges the name of the logger that log. should call", "response": "def set_logger(self, **fields):\n \"\"\"Change the name of the logger that log.* should call\n\n Args:\n **fields: Extra fields to be logged. Logger name will be:\n \".\".join([<module_name>, <cls_name>] + fields_sorted_on_key)\n \"\"\"\n names = [self.__module__, self.__class__.__name__]\n for field, value in sorted(fields.items()):\n names.append(value)\n # names should be something like this for one field:\n # [\"malcolm.modules.scanning.controllers.runnablecontroller\",\n # \"RunnableController\", \"BL45P-ML-SCAN-01\"]\n self.log = logging.getLogger(\".\".join(names))\n if fields:\n self.log.addFilter(FieldFilter(fields))\n return self.log"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate a Return Response object to signal a return value", "response": "def return_response(self, value=None):\n # type: (Any) -> Tuple[Callback, Return]\n \"\"\"Create a Return Response object to signal a return value\"\"\"\n response = Return(id=self.id, value=value)\n return self.callback, response"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef error_response(self, exception):\n # type: (Exception) -> Tuple[Callback, Error]\n \"\"\"Create an Error Response object to signal an error\"\"\"\n response = Error(id=self.id, message=exception)\n log.exception(\"Exception raised for request %s\", self)\n return self.callback, response", "response": "Create an Error Response object to signal an error"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nmaking a View subclass containing properties specific for given data", "response": "def make_view(controller, context, data):\n # type: (Controller, Context, Any) -> Any\n \"\"\"Make a View subclass containing properties specific for given data\n\n Args:\n controller (Controller): The child controller that hosts the data\n context (Context): The context the parent has made that the View should\n use for manipulating the data\n data (Model): The actual data that context will be manipulating\n\n Returns:\n View: A View subclass instance that provides a user-focused API to\n the given data\n \"\"\"\n if isinstance(data, BlockModel):\n # Make an Block View\n view = _make_view_subclass(Block, controller, context, data)\n elif isinstance(data, AttributeModel):\n # Make an Attribute View\n view = Attribute(controller, context, data)\n elif isinstance(data, MethodModel):\n # Make a Method View\n view = Method(controller, context, data)\n elif isinstance(data, Model):\n # Make a generic View\n view = _make_view_subclass(View, controller, context, data)\n elif isinstance(data, dict):\n # Make a dict of Views\n d = OrderedDict()\n for k, v in data.items():\n d[k] = make_view(controller, context, v)\n view = d\n elif isinstance(data, list):\n # Need to recurse down\n view = [make_view(controller, context, x) for x in data]\n else:\n # Just return the data unwrapped as it should be immutable\n view = data\n return view"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nput a value to the Attribute and wait for completion", "response": "def put_value(self, value, timeout=None):\n \"\"\"Put a value to the Attribute and wait for completion\"\"\"\n self._context.put(self._data.path + [\"value\"], value, timeout=timeout)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef calculate_exposure(self, duration):\n # type: (float) -> float\n \"\"\"Calculate the exposure to set the detector to given the duration of\n the frame and the readout_time and frequency_accuracy\"\"\"\n exposure = duration - self.frequency_accuracy * duration / 1000000.0 - \\\n self.readout_time\n assert exposure > 0.0, \\\n \"Exposure time %s too small when deadtime taken into account\" % (\n exposure,)\n return exposure", "response": "Calculate the exposure to set the detector to given the duration of\n the frame and the readout_time and frequency_accuracy."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsetting the notifier and the path from the path from the block root", "response": "def set_notifier_path(self, notifier, path):\n \"\"\"Sets the notifier, and the path from the path from block root\n\n Args:\n notifier (Notifier): The Notifier to tell when endpoint data changes\n path (list): The absolute path to get to this object\n \"\"\"\n # type: (Union[Notifier, DummyNotifier], List[str]) -> None\n # This function should either change from the DummyNotifier or to\n # the DummyNotifier, never between two valid notifiers\n assert self.notifier is Model.notifier or notifier is Model.notifier, \\\n \"Already have a notifier %s path %s\" % (self.notifier, self.path)\n self.notifier = notifier\n self.path = path\n # Tell all our children too\n for name, ct in self.call_types.items():\n if ct.is_mapping:\n child = getattr(self, name)\n if child and issubclass(ct.typ[1], Model):\n for k, v in child.items():\n v.set_notifier_path(notifier, self.path + [name, k])\n elif issubclass(ct.typ, Model):\n assert not ct.is_array, \\\n \"Can't deal with Arrays of Models %s\" % ct\n child = getattr(self, name)\n child.set_notifier_path(notifier, self.path + [name])"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef apply_change(self, path, *args):\n # type: (List[str], Any) -> None\n \"\"\"Take a single change from a Delta and apply it to this model\"\"\"\n if len(path) > 1:\n # This is for a child\n self[path[0]].apply_change(path[1:], *args)\n else:\n # This is for us\n assert len(path) == 1 and len(args) == 1, \\\n \"Cannot process change %s\" % ([self.path + path] + list(args))\n getattr(self, \"set_%s\" % path[0])(args[0])", "response": "Take a single change from a Delta and apply it to this model"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates an AttributeModel instance of the correct type for this Meta", "response": "def create_attribute_model(self, initial_value=None):\n # type: (Any) -> AttributeModel\n \"\"\"Make an AttributeModel instance of the correct type for this Meta\n\n Args:\n initial_value: The initial value the Attribute should take\n\n Returns:\n AttributeModel: The created attribute model instance\n \"\"\"\n attr = self.attribute_class(meta=self, value=initial_value)\n return attr"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn an instance of this class from an Anno.", "response": "def from_annotype(cls, anno, writeable, **kwargs):\n # type: (Anno, bool, **Any) -> VMeta\n \"\"\"Return an instance of this class from an Anno\"\"\"\n ret = cls(description=anno.description, writeable=writeable, **kwargs)\n widget = ret.default_widget()\n if widget != Widget.NONE:\n ret.set_tags([widget.tag()])\n return ret"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef register_annotype_converter(cls, types, is_array=False,\n is_mapping=False):\n # type: (Union[Sequence[type], type], bool, bool) -> Any\n \"\"\"Register this class as a converter for Anno instances\"\"\"\n if not isinstance(types, Sequence):\n types = [types]\n\n def decorator(subclass):\n for typ in types:\n cls._annotype_lookup[(typ, is_array, is_mapping)] = subclass\n return subclass\n\n return decorator", "response": "Register this class as a converter for Anno instances"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nlook up a vmeta based on an Anno", "response": "def lookup_annotype_converter(cls, anno):\n # type: (Anno) -> Type[VMeta]\n \"\"\"Look up a vmeta based on an Anno\"\"\"\n if hasattr(anno.typ, \"__bases__\"):\n # This is a proper type\n bases = inspect.getmro(anno.typ)\n else:\n # This is a numpy dtype\n bases = [anno.typ]\n for typ in bases:\n key = (typ, bool(anno.is_array), bool(anno.is_mapping))\n try:\n return cls._annotype_lookup[key]\n except KeyError:\n pass\n raise KeyError(anno)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef set_value(self, value, set_alarm_ts=True, alarm=None, ts=None):\n # type: (Any, bool, Alarm, TimeStamp) -> Any\n \"\"\"Set value, calculating alarm and ts if requested\"\"\"\n value = self.meta.validate(value)\n if set_alarm_ts:\n if alarm is None:\n alarm = Alarm.ok\n else:\n alarm = deserialize_object(alarm, Alarm)\n if ts is None:\n ts = TimeStamp()\n else:\n ts = deserialize_object(ts, TimeStamp)\n self.set_value_alarm_ts(value, alarm, ts)\n else:\n self.set_endpoint_data(\"value\", value)\n return self.value", "response": "Set value for the current instance of the class."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nset value with pre - validated alarm and timeStamp.", "response": "def set_value_alarm_ts(self, value, alarm, ts):\n \"\"\"Set value with pre-validated alarm and timeStamp\"\"\"\n # type: (Any, Alarm, TimeStamp) -> None\n with self.notifier.changes_squashed:\n # Assume they are of the right format\n self.value = value\n self.notifier.add_squashed_change(self.path + [\"value\"], value)\n if alarm is not self.alarm:\n self.alarm = alarm\n self.notifier.add_squashed_change(self.path + [\"alarm\"], alarm)\n self.timeStamp = ts\n self.notifier.add_squashed_change(self.path + [\"timeStamp\"], ts)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn a greeting to name.", "response": "def greet(self, name, sleep=0):\n # type: (AName, ASleep) -> AGreeting\n \"\"\"Optionally sleep <sleep> seconds, then return a greeting to <name>\"\"\"\n print(\"Manufacturing greeting...\")\n sleep_for(sleep)\n greeting = \"Hello %s\" % name\n return greeting"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nextract the version number from the version. py file.", "response": "def get_version():\n \"\"\"Extracts the version number from the version.py file.\"\"\"\n VERSION_FILE = '../malcolm/version.py'\n mo = re.search(r'^__version__ = [\\'\"]([^\\'\"]*)[\\'\"]',\n open(VERSION_FILE, 'rt').read(), re.M)\n if mo:\n return mo.group(1)\n else:\n raise RuntimeError(\n 'Unable to find version string in {0}.'.format(VERSION_FILE))"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef add_nexus_nodes(generator, vds_file_path):\n\n # create the axes dimensions attribute, a comma separated list giving size\n # of the axis dimensions padded with . for the detector dimensions and\n # multidimensional dimensions\n pad_dims = []\n for d in generator.dimensions:\n if len(d.axes) == 1:\n pad_dims.append(\"%s_set\" % d.axes[0])\n else:\n pad_dims.append(\".\")\n\n pad_dims += [\".\"] * 2 # assume a 2 dimensional detector\n\n with h5py.File(vds_file_path, 'r+', libver=\"latest\") as vds:\n for data, node in zip(set_data, set_bases):\n # create a group for this entry\n vds.require_group(node)\n # points to the axis demand data sets\n vds[node].attrs[\"axes\"] = pad_dims\n vds[node].attrs[\"NX_class\"] = ['NXdata']\n\n # points to the detector dataset for this entry\n vds[node].attrs[\"signal\"] = data.split('/')[-1]\n # a hard link from this entry 'signal' to the actual data\n vds[node + data] = vds[data]\n\n axis_sets = {}\n # iterate the axes in each dimension of the generator to create the\n # axis information nodes\n for i, d in enumerate(generator.dimensions):\n for axis in d.axes:\n # add signal data dimension for axis\n axis_indices = '{}_set_indices'.format(axis)\n vds[node].attrs[axis_indices] = i\n\n # demand positions for axis\n axis_set = '{}_set'.format(axis)\n if axis_sets.get(axis_set):\n # link to the first entry's demand list\n vds[node + axis_set] = axis_sets[axis_set]\n else:\n # create the demand list for the first entry only\n axis_demands = d.get_positions(axis)\n vds.create_dataset(\n node + axis_set, data=axis_demands)\n vds[node + axis_set].attrs[\"units\"] = \\\n generator.units[axis]\n axis_sets[axis_set] = vds[node + axis_set]\n\n vds['entry'].attrs[\"NX_class\"] = ['NXentry']", "response": "Add in the additional information to make this into a standard nexus tree."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef send_recv(self, message, timeout=10.0):\n response_queue = self.send(message)\n response = self.recv(response_queue, timeout)\n return response", "response": "Send a message to a PandABox and wait for the response"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _send_loop(self):\n while True:\n message, response_queue = self._send_queue.get()\n if message is self.STOP:\n break\n try:\n self._response_queues.put(response_queue)\n self._socket.send(message)\n except Exception: # pylint:disable=broad-except\n log.exception(\"Exception sending message %s\", message)", "response": "Send a message to the server."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nresponds to the person waiting", "response": "def _respond(self, resp):\n \"\"\"Respond to the person waiting\"\"\"\n response_queue = self._response_queues.get(timeout=0.1)\n response_queue.put(resp)\n self._completed_response_lines = []\n self._is_multiline = None"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _recv_loop(self):\n self._completed_response_lines = []\n self._is_multiline = None\n lines_iterator = self._get_lines()\n while True:\n try:\n line = next(lines_iterator)\n if self._is_multiline is None:\n self._is_multiline = line.startswith(\"!\") or line == \".\"\n if line.startswith(\"ERR\"):\n self._respond(ValueError(line))\n elif self._is_multiline:\n if line == \".\":\n self._respond(self._completed_response_lines)\n else:\n assert line[0] == \"!\", \\\n \"Multiline response {} doesn't start with !\" \\\n .format(repr(line))\n self._completed_response_lines.append(line[1:])\n else:\n self._respond(line)\n except StopIteration:\n return\n except Exception:\n log.exception(\"Exception receiving message\")\n raise", "response": "This method is called by the service socket to receive messages from the server."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef parameterized_send(self, request, parameter_list):\n response_queues = OrderedDict()\n for parameter in parameter_list:\n response_queues[parameter] = self.send(request % parameter)\n return response_queues", "response": "Send a request to the server with a list of parameters"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncalculates the readout time of the detector from the EPICS driver", "response": "def get_readout_time(self, child, duration):\n \"\"\"Calculate the readout time of the detector from the EPICS driver:\n - Set exposure and acquire period to same value\n - Acquire period will be set to lowest acceptable value\n - Difference will be readout time (this value is affected by\n detector settings)\n \"\"\"\n child.exposure.put_value(duration)\n child.acquirePeriod.put_value(duration)\n readout_time = child.acquirePeriod.value - child.exposure.value\n # It seems that the difference between acquirePeriod and exposure\n # doesn't tell the whole story, we seem to need an additional bit\n # of readout (or something) time on top\n fudge_factor = duration * 0.004 + 0.001\n return readout_time + fudge_factor"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef notify_dispatch_request(self, request):\n # type: (Request) -> None\n \"\"\"Will be called when a context passed to a hooked function is about\n to dispatch a request\"\"\"\n if isinstance(request, Put) and request.path[0] == self.mri:\n # This means the context we were passed has just made a Put request\n # so mark the field as \"we_modified\" so it doesn't screw up the\n # modified led\n attribute_name = request.path[-2]\n if attribute_name not in self.no_save:\n self.log.warning(\n \"Part %s tried to set '%s' that is not in self.no_save. \"\n \"This will stop the 'modified' attribute from working.\",\n self, attribute_name)", "response": "Called by the hooked function when a request is about to dispatch a new object."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef sever_sink_ports(self, context, ports, connected_to=None):\n # type: (AContext, APortMap, str) -> None\n \"\"\"Conditionally sever Sink Ports of the child. If connected_to\n is then None then sever all, otherwise restrict to connected_to's\n Source Ports\n\n Args:\n context (Context): The context to use\n ports (dict): {part_name: [PortInfo]}\n connected_to (str): Restrict severing to this part\n \"\"\"\n # Find the Source Ports to connect to\n if connected_to:\n # Calculate a lookup of the Source Port \"name\" to type\n source_port_lookup = self._source_port_lookup(\n ports.get(connected_to, []))\n else:\n source_port_lookup = True\n\n # Find our Sink Ports\n sink_ports = self._get_flowgraph_ports(ports, SinkPortInfo)\n\n # If we have Sunk Ports that need to be disconnected then do so\n if sink_ports and source_port_lookup:\n child = context.block_view(self.mri)\n attribute_values = {}\n for name, port_info in sink_ports.items():\n if source_port_lookup is True or source_port_lookup.get(\n child[name].value, None) == port_info.port:\n attribute_values[name] = port_info.disconnected_value\n child.put_attribute_values(attribute_values)", "response": "Conditionally sever Sink Ports of the child."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncalculates what is connected to what", "response": "def calculate_part_visibility(self, ports):\n # type: (APortMap) -> None\n \"\"\"Calculate what is connected to what\n\n Args:\n ports: {part_name: [PortInfo]} from other ports\n \"\"\"\n # Calculate a lookup of Source Port connected_value to part_name\n source_port_lookup = {}\n for part_name, port_infos in SourcePortInfo.filter_parts(ports).items():\n for port_info in port_infos:\n source_port_lookup[port_info.connected_value] = (\n part_name, port_info.port)\n # Look through all the Sink Ports, and set both ends of the\n # connection to visible if they aren't specified\n for part_name, port_infos in SinkPortInfo.filter_parts(\n ports).items():\n for port_info in port_infos:\n if port_info.value != port_info.disconnected_value:\n conn_part, port = source_port_lookup.get(\n port_info.value, (None, None))\n if conn_part and port == port_info.port:\n if conn_part not in self.part_visibility:\n self.part_visibility[conn_part] = True\n if part_name not in self.part_visibility:\n self.part_visibility[part_name] = True"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef handle_subscribe(self, request):\n # type: (Subscribe) -> CallbackResponses\n \"\"\"Handle a Subscribe request from outside. Called with lock taken\"\"\"\n ret = self._tree.handle_subscribe(request, request.path[1:])\n self._subscription_keys[request.generate_key()] = request\n return ret", "response": "Handle a Subscribe request from outside. Called with lock taken."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nhandle a Unsubscribe request from outside. Called with lock taken.", "response": "def handle_unsubscribe(self, request):\n # type: (Unsubscribe) -> CallbackResponses\n \"\"\"Handle a Unsubscribe request from outside. Called with lock taken\"\"\"\n subscribe = self._subscription_keys.pop(request.generate_key())\n ret = self._tree.handle_unsubscribe(subscribe, subscribe.path[1:])\n return ret"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nregisters a squashed change to a particular path", "response": "def add_squashed_change(self, path, data):\n # type: (List[str], Any) -> None\n \"\"\"Register a squashed change to a particular path\n\n Args:\n path (list): The path of what has changed, relative from Block\n data (object): The new data\n \"\"\"\n assert self._squashed_count, \"Called while not squashing changes\"\n self._squashed_changes.append([path[1:], data])"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsets our data and notify anyone listening to the changes", "response": "def notify_changes(self, changes):\n # type: (List[List]) -> CallbackResponses\n \"\"\"Set our data and notify anyone listening\n\n Args:\n changes (list): [[path, optional data]] where path is the path to\n what has changed, and data is the unserialized object that has\n changed\n\n Returns:\n list: [(callback, Response)] that need to be called\n \"\"\"\n ret = []\n child_changes = {}\n for change in changes:\n # Add any changes that our children need to know about\n self._add_child_change(change, child_changes)\n\n # If we have update subscribers, serialize at this level\n if self.update_requests:\n serialized = serialize_object(self.data)\n for request in self.update_requests:\n ret.append(request.update_response(serialized))\n\n # If we have delta subscribers, serialize the changes\n if self.delta_requests:\n for change in changes:\n change[-1] = serialize_object(change[-1])\n for request in self.delta_requests:\n ret.append(request.delta_response(changes))\n\n # Now notify our children\n for name, child_changes in child_changes.items():\n ret += self.children[name].notify_changes(child_changes)\n return ret"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nupdating our data and notify any subscribers of children what has changed", "response": "def _update_data(self, data):\n # type: (Any) -> Dict[str, List]\n \"\"\"Set our data and notify any subscribers of children what has changed\n\n Args:\n data (object): The new data\n\n Returns:\n dict: {child_name: [path_list, optional child_data]} of the change\n that needs to be passed to a child as a result of this\n \"\"\"\n self.data = data\n child_change_dict = {}\n # Reflect change of data to children\n for name in self.children:\n child_data = getattr(data, name, None)\n if child_data is None:\n # Deletion\n child_change_dict[name] = [[]]\n else:\n # Change\n child_change_dict[name] = [[], child_data]\n return child_change_dict"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nadding to the list of request to notify and notify the initial value of", "response": "def handle_subscribe(self, request, path):\n # type: (Subscribe, List[str]) -> CallbackResponses\n \"\"\"Add to the list of request to notify, and notify the initial value of\n the data held\n\n Args:\n request (Subscribe): The subscribe request\n path (list): The relative path from ourself\n\n Returns:\n list: [(callback, Response)] that need to be called\n \"\"\"\n ret = []\n if path:\n # Recurse down\n name = path[0]\n if name not in self.children:\n self.children[name] = NotifierNode(\n getattr(self.data, name, None), self)\n ret += self.children[name].handle_subscribe(request, path[1:])\n else:\n # This is for us\n serialized = serialize_object(self.data)\n if request.delta:\n self.delta_requests.append(request)\n ret.append(request.delta_response([[[], serialized]]))\n else:\n self.update_requests.append(request)\n ret.append(request.update_response(serialized))\n return ret"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef handle_unsubscribe(self, request, path):\n # type: (Subscribe, List[str]) -> CallbackResponses\n \"\"\"Remove from the notifier list and send a return\n\n Args:\n request (Subscribe): The original subscribe request\n path (list): The relative path from ourself\n\n Returns:\n list: [(callback, Response)] that need to be called\n \"\"\"\n ret = []\n if path:\n # Recurse down\n name = path[0]\n child = self.children[name]\n ret += child.handle_unsubscribe(request, path[1:])\n if not child.children and not child.update_requests \\\n and not child.delta_requests:\n del self.children[name]\n else:\n # This is for us\n if request in self.update_requests:\n self.update_requests.remove(request)\n else:\n self.delta_requests.remove(request)\n ret.append(request.return_response())\n return ret", "response": "This function is called by the user when a subscription is removed from the notifier list and the return_response is sent to the user when the user is done with it."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nadd a string parameter to be passed when instantiating this YAML file", "response": "def string(name, description, default=None):\n # type: (AName, ADescription, AStringDefault) -> AAnno\n \"\"\"Add a string parameter to be passed when instantiating this YAML file\"\"\"\n args = common_args(name, default)\n return Anno(description, typ=str, **args)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nadding a float64 parameter to be passed when instantiating this YAML file", "response": "def float64(name, description, default=None):\n # type: (AName, ADescription, AFloat64Default) -> AAnno\n \"\"\"Add a float64 parameter to be passed when instantiating this YAML file\"\"\"\n args = common_args(name, default)\n return Anno(description, typ=float, **args)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef int32(name, description, default=None):\n # type: (AName, ADescription, AInt32Default) -> AAnno\n \"\"\"Add an int32 parameter to be passed when instantiating this YAML file\"\"\"\n args = common_args(name, default)\n return Anno(description, typ=int, **args)", "response": "Add an int32 parameter to be passed when instantiating this YAML file"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef make_block_creator(yaml_path, filename=None):\n # type: (str, str) -> Callable[..., List[Controller]]\n \"\"\"Make a collection function that will create a list of blocks\n\n Args:\n yaml_path (str): File path to YAML file, or a file in the same dir\n filename (str): If give, use this filename as the last element in\n the yaml_path (so yaml_path can be __file__)\n\n Returns:\n function: A collection function decorated with @takes. This can be\n used in other blocks or instantiated by the process. If the\n YAML text specified controllers or parts then a block instance\n with the given name will be instantiated. If there are any\n blocks listed then they will be called. All created blocks\n by this or any sub collection will be returned\n \"\"\"\n sections, yamlname, docstring = Section.from_yaml(yaml_path, filename)\n yamldir = os.path.dirname(yaml_path)\n\n # Check we have only one controller\n controller_sections = [s for s in sections if s.section == \"controllers\"]\n assert len(controller_sections) == 1, \\\n \"Expected exactly 1 controller, got %s\" % (controller_sections,)\n controller_section = controller_sections[0]\n\n def block_creator(kwargs):\n # Create the param dict of the static defined arguments\n defines = _create_defines(sections, yamlname, yamldir, kwargs)\n controllers, parts = _create_blocks_and_parts(sections, defines)\n # Make the controller\n controller = controller_section.instantiate(defines)\n for part in parts:\n controller.add_part(part)\n controllers.append(controller)\n return controllers\n\n creator = creator_with_nice_signature(\n block_creator, sections, yamlname, yaml_path, docstring)\n return creator", "response": "Create a function that creates a list of blocks with the given name."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nkeeping recursing down from base using dotted name then call it with self. params and args", "response": "def instantiate(self, substitutions):\n \"\"\"Keep recursing down from base using dotted name, then call it with\n self.params and args\n\n Args:\n substitutions (dict): Substitutions to make to self.param_dict\n\n Returns:\n The found object called with (*args, map_from_d)\n\n E.g. if ob is malcolm.parts, and name is \"ca.CADoublePart\", then the\n object will be malcolm.parts.ca.CADoublePart\n \"\"\"\n param_dict = self.substitute_params(substitutions)\n pkg, ident = self.name.rsplit(\".\", 1)\n pkg = \"malcolm.modules.%s\" % pkg\n try:\n ob = importlib.import_module(pkg)\n except ImportError as e:\n raise_with_traceback(\n ImportError(\"\\n%s:%d:\\n%s\" % (\n self.filename, self.lineno, e)))\n try:\n ob = getattr(ob, ident)\n except AttributeError:\n raise_with_traceback(\n ImportError(\"\\n%s:%d:\\nPackage %r has no ident %r\" % (\n self.filename, self.lineno, pkg, ident)))\n try:\n model = MethodModel.from_callable(ob, returns=False)\n args = model.validate(param_dict)\n ret = ob(**args)\n except Exception as e:\n sourcefile = inspect.getsourcefile(ob)\n lineno = inspect.getsourcelines(ob)[1]\n raise_with_traceback(\n YamlError(\"\\n%s:%d:\\n%s:%d:\\n%s\" % (\n self.filename, self.lineno, sourcefile, lineno, e)))\n else:\n return ret"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nparse a YAML file and return a tuple of the sections yamlname and docstring", "response": "def from_yaml(cls, yaml_path, filename=None):\n \"\"\"Split a dictionary into parameters controllers parts blocks defines\n\n Args:\n yaml_path (str): File path to YAML file, or a file in the same dir\n filename (str): If give, use this filename as the last element in\n the yaml_path (so yaml_path can be __file__)\n\n Returns:\n tuple: (sections, yamlname, docstring) where sections is a\n list of created sections\n \"\"\"\n if filename:\n # different filename to support passing __file__\n yaml_path = os.path.join(os.path.dirname(yaml_path), filename)\n assert yaml_path.endswith(\".yaml\"), \\\n \"Expected a/path/to/<yamlname>.yaml, got %r\" % yaml_path\n yamlname = os.path.basename(yaml_path)[:-5]\n log.debug(\"Parsing %s\", yaml_path)\n with open(yaml_path) as f:\n text = f.read()\n # First separate them into their relevant sections\n ds = yaml.load(text, Loader=yaml.RoundTripLoader)\n docstring = None\n sections = []\n for d in ds:\n assert len(d) == 1, \\\n \"Expected section length 1, got %d\" % len(d)\n lineno = d._yaml_line_col.line + 1\n name = list(d)[0]\n sections.append(cls(\n yaml_path, lineno, name, d[name]))\n if name == \"builtin.defines.docstring\":\n docstring = d[name][\"value\"]\n\n return sections, yamlname, docstring"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nsubstitute param values in our param_dict from params", "response": "def substitute_params(self, substitutions):\n \"\"\"Substitute param values in our param_dict from params\n\n Args:\n substitutions (Map or dict): Values to substitute. E.g. Map of\n {\"name\": \"me\"}\n\n E.g. if self.param_dict is:\n {\"name\": \"$(name):pos\", \"exposure\": 1.0}\n And substitutions is:\n {\"name\": \"me\"}\n After the call self.param_dict will be:\n {\"name\": \"me:pos\", \"exposure\": 1.0}\n \"\"\"\n param_dict = {}\n # TODO: this should be yaml.add_implicit_resolver()\n for k, v in self.param_dict.items():\n param_dict[k] = replace_substitutions(v, substitutions)\n return param_dict"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _make_padded_ramp(self, v1, v2, pad_velocity, total_time):\n # The time taken to ramp from v1 to pad_velocity\n t1 = self.acceleration_time(v1, pad_velocity)\n # Then on to v2\n t2 = self.acceleration_time(pad_velocity, v2)\n # The distance during the pad\n tp = total_time - t1 - t2\n # Yield the points\n yield t1, pad_velocity\n yield tp, pad_velocity\n yield t2, v2", "response": "Makes a ramp that looks like this : v1 \\ __ pad_velocity \\ __ v2 \\ __ pad_velocity \\ __ total_time \\ __"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nmaking a hat that looks like this : _ vm v1 v2 | \\ d1 \\ dm \\ d2 \\ d3 \\ min_time \\ _ vm \\ min_time \\ _ vm \\ min_time \\ _ vm \\ min_time \\ _ vm \\ min_time \\ _ vm \\ min_time \\ _ vm \\ min_time \\ _ vm \\ min_time \\ _ vm \\ min_time \\ _ vm \\ min_time \\ min_time \\ min_time", "response": "def _make_hat(self, v1, v2, acceleration, distance, min_time):\n \"\"\"Make a hat that looks like this:\n\n ______ vm\n v1 /| | \\\n d1| dm|d2\\ v2\n | |\n t1 tm t2\n\n Such that the area under the graph (d1+d2+d3) is distance and\n t1+t2+t3 >= min_time\n \"\"\"\n if min_time > 0:\n # We are trying to meet time constraints\n # Solve quadratic to give vm\n b = v1 + v2 + min_time * acceleration\n c = distance * acceleration + (v1*v1 + v2*v2) / 2\n op = b*b - 4 * c\n if np.isclose(op, 0):\n # Might have a negative number as rounding error...\n op = 0\n elif op < 0:\n # Can't do this, set something massive to fail vm check...\n op = 10000000000\n\n def get_times(vm):\n t1 = (vm - v1) / acceleration\n t2 = (vm - v2) / acceleration\n tm = min_time - t1 - t2\n assert -self.max_velocity <= vm <= self.max_velocity\n assert t1 >= 0 and t2 >= 0 and tm >= 0\n return t1, tm, t2\n\n try:\n # Try negative root\n vm = (b - np.sqrt(op)) / 2\n t1, tm, t2 = get_times(vm)\n except AssertionError:\n try:\n # Try positive root\n vm = (b + np.sqrt(op)) / 2\n t1, tm, t2 = get_times(vm)\n except AssertionError:\n # If vm is out of range or any segment takes negative time,\n # we can't do it in min_time, so act as if unconstrained\n t1, tm, t2, vm = self._calculate_hat_params(\n v1, v2, acceleration, distance)\n else:\n t1, tm, t2, vm = self._calculate_hat_params(\n v1, v2, acceleration, distance)\n\n # If middle segment needs to be negative time then we need to cap\n # vm and spend no time at vm\n if tm < 0:\n # Solve the quadratic to work out how long to spend accelerating\n vm = np.sqrt(\n (2 * acceleration * distance + v1 * v1 + v2 * v2) / 2)\n if acceleration < 0:\n vm = -vm\n t1 = self.acceleration_time(v1, vm)\n t2 = self.acceleration_time(vm, v2)\n tm = 0\n\n # Yield the result\n yield t1, vm\n yield tm, vm\n yield t2, v2"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncalculating PVT points that will perform the move within motor params Args: v1 (float): Starting velocity in EGUs/s v2 (float): Ending velocity in EGUs/s distance (float): Relative distance to travel in EGUs min_time (float): The minimum time the move should take Returns: tuple: (time_list, position_list) where time_list is a list of relative time points in seconds, and position_list is the position in EGUs that the motor should be", "response": "def make_velocity_profile(self, v1, v2, distance, min_time):\n \"\"\"Calculate PVT points that will perform the move within motor params\n\n Args:\n v1 (float): Starting velocity in EGUs/s\n v2 (float): Ending velocity in EGUs/s\n distance (float): Relative distance to travel in EGUs\n min_time (float): The minimum time the move should take\n\n Returns:\n tuple: (time_list, position_list) where time_list is a list of\n relative time points in seconds, and position_list is the\n position in EGUs that the motor should be\n \"\"\"\n # Take off the settle time and distance\n if min_time > 0:\n min_time -= self.velocity_settle\n distance -= self.velocity_settle * v2\n # The ramp time and distance of a continuous ramp from v1 to v2\n ramp_time = self.acceleration_time(v1, v2)\n ramp_distance = self.ramp_distance(v1, v2, ramp_time)\n remaining_distance = distance - ramp_distance\n # Check if we need to stretch in time\n if min_time > ramp_time:\n # Check how fast we would need to be going so that the total move\n # completes in min_time\n pad_velocity = remaining_distance / (min_time - ramp_time)\n if pad_velocity > max(v1, v2):\n # Can't just pad the ramp, make a hat pointing up\n it = self._make_hat(\n v1, v2, self.acceleration, distance, min_time)\n elif pad_velocity < min(v1, v2):\n # Can't just pad the ramp, make a hat pointing down\n it = self._make_hat(\n v1, v2, -self.acceleration, distance, min_time)\n else:\n # Make a padded ramp\n it = self._make_padded_ramp(v1, v2, pad_velocity, min_time)\n elif remaining_distance < 0:\n # Make a hat pointing down\n it = self._make_hat(v1, v2, -self.acceleration, distance, min_time)\n else:\n # Make a hat pointing up\n it = self._make_hat(v1, v2, self.acceleration, distance, min_time)\n # Create the time and velocity arrays\n time_array = [0.0]\n velocity_array = [v1]\n for t, v in it:\n assert t >= 0, \"Got negative t %s\" % t\n if t == 0:\n assert v == velocity_array[-1], \\\n \"Can't move velocity in zero time\"\n continue\n if v * velocity_array[-1] < 0:\n # Crossed zero, put in an explicit zero velocity\n fraction = velocity_array[-1] / (velocity_array[-1] - v)\n time_array.append(time_array[-1] + fraction * t)\n velocity_array.append(0)\n t -= fraction * t\n time_array.append(time_array[-1] + t)\n velocity_array.append(v)\n # Add on the settle time\n if self.velocity_settle > 0:\n time_array.append(time_array[-1] + self.velocity_settle)\n velocity_array.append(v2)\n return time_array, velocity_array"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngives the parts of a CS and a set of axes to move return the cs_port and the mapping of cs_axis to MotorInfo.", "response": "def cs_axis_mapping(cls,\n part_info, # type: Dict[str, Optional[Sequence]]\n axes_to_move # type: Sequence[str]\n ):\n # type: (...) -> Tuple[str, Dict[str, MotorInfo]]\n \"\"\"Given the motor infos for the parts, filter those with scannable\n names in axes_to_move, check they are all in the same CS, and return\n the cs_port and mapping of cs_axis to MotorInfo\"\"\"\n cs_ports = set() # type: Set[str]\n axis_mapping = {} # type: Dict[str, MotorInfo]\n for motor_info in cls.filter_values(part_info):\n if motor_info.scannable in axes_to_move:\n assert motor_info.cs_axis in cs_axis_names, \\\n \"Can only scan 1-1 mappings, %r is %r\" % \\\n (motor_info.scannable, motor_info.cs_axis)\n cs_ports.add(motor_info.cs_port)\n axis_mapping[motor_info.scannable] = motor_info\n missing = list(set(axes_to_move) - set(axis_mapping))\n assert not missing, \\\n \"Some scannables %s are not in the CS mapping %s\" % (\n missing, axis_mapping)\n assert len(cs_ports) == 1, \\\n \"Requested axes %s are in multiple CS numbers %s\" % (\n axes_to_move, list(cs_ports))\n cs_axis_counts = Counter([x.cs_axis for x in axis_mapping.values()])\n # Any cs_axis defs that are used for more that one raw motor\n overlap = [k for k, v in cs_axis_counts.items() if v > 1]\n assert not overlap, \\\n \"CS axis defs %s have more that one raw motor attached\" % overlap\n return cs_ports.pop(), axis_mapping"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nset the layout table value. Called on attribute put.", "response": "def set_layout(self, value):\n \"\"\"Set the layout table value. Called on attribute put\"\"\"\n # Can't do this with changes_squashed as it will call update_modified\n # from another thread and deadlock. Need RLock.is_owned() from update_*\n part_info = self.run_hooks(\n LayoutHook(p, c, self.port_info, value)\n for p, c in self.create_part_contexts(only_visible=False).items())\n with self.changes_squashed:\n layout_parts = LayoutInfo.filter_parts(part_info)\n name, mri, x, y, visible = [], [], [], [], []\n for part_name, layout_infos in layout_parts.items():\n for layout_info in layout_infos:\n name.append(part_name)\n mri.append(layout_info.mri)\n x.append(layout_info.x)\n y.append(layout_info.y)\n visible.append(layout_info.visible)\n layout_table = LayoutTable(name, mri, x, y, visible)\n try:\n # Compare the Array seq to get at the numpy array\n np.testing.assert_equal(\n layout_table.visible.seq, self.layout.value.visible.seq)\n except AssertionError:\n visibility_changed = True\n else:\n visibility_changed = False\n self.layout.set_value(layout_table)\n if self.saved_visibility is None:\n # First write of table, set layout and exports saves\n self.saved_visibility = layout_table.visible\n self.saved_exports = self.exports.value.to_dict()\n # Force visibility changed so we update_block_endpoints\n # even if there weren't any visible\n visibility_changed = True\n if visibility_changed:\n self.update_modified()\n self.update_exportable()\n # Part visibility changed, might have attributes or methods\n # that we need to hide or show\n self.update_block_endpoints()"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nsaves the current design to file", "response": "def save(self, designName=\"\"):\n # type: (ASaveDesign) -> None\n \"\"\"Save the current design to file\"\"\"\n self.try_stateful_function(\n ss.SAVING, ss.READY, self.do_save, designName)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nmake config dir and return full file path and extension", "response": "def _validated_config_filename(self, name):\n \"\"\"Make config dir and return full file path and extension\n\n Args:\n name (str): Filename without dir or extension\n\n Returns:\n str: Full path including extension\n \"\"\"\n dir_name = self._make_config_dir()\n filename = os.path.join(dir_name, name.split(\".json\")[0] + \".json\")\n return filename"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef do_load(self, design, init=False):\n # type: (str, bool) -> None\n \"\"\"Load a design name, running the child LoadHooks.\n\n Args:\n design: Name of the design json file, without extension\n init: Passed to the LoadHook to tell the children if this is being\n run at Init or not\n \"\"\"\n if design:\n filename = self._validated_config_filename(design)\n with open(filename, \"r\") as f:\n text = f.read()\n structure = json_decode(text)\n else:\n structure = {}\n # Attributes and Children used to be merged, support this\n attributes = structure.get(\"attributes\", structure)\n children = structure.get(\"children\", structure)\n # Set the layout table\n name, mri, x, y, visible = [], [], [], [], []\n for part_name, d in attributes.get(\"layout\", {}).items():\n name.append(part_name)\n mri.append(\"\")\n x.append(d[\"x\"])\n y.append(d[\"y\"])\n visible.append(d[\"visible\"])\n self.set_layout(LayoutTable(name, mri, x, y, visible))\n # Set the exports table\n source, export = [], []\n for source_name, export_name in attributes.get(\"exports\", {}).items():\n source.append(source_name)\n export.append(export_name)\n self.exports.set_value(ExportTable(source, export))\n # Set other attributes\n our_values = {k: v for k, v in attributes.items()\n if k in self.our_config_attributes}\n block = self.block_view()\n block.put_attribute_values(our_values)\n # Run the load hook to get parts to load their own structure\n self.run_hooks(\n LoadHook(p, c, children.get(p.name, {}), init)\n for p, c in self.create_part_contexts(only_visible=False).items())\n self._mark_clean(design, init)", "response": "Load a design name and run the child LoadHooks."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nregistering a function to be added to the block", "response": "def add_method_model(self,\n func, # type: Callable\n name=None, # type: Optional[str]\n description=None, # type: Optional[str]\n owner=None, # type: object\n ):\n # type: (...) -> MethodModel\n \"\"\"Register a function to be added to the block\"\"\"\n if name is None:\n name = func.__name__\n method = MethodModel.from_callable(func, description)\n self._add_field(owner, name, method, func)\n return method"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nregister a function to be added to the Block as a MethodModel.", "response": "def add_method_model(self,\n func, # type: Callable\n name=None, # type: Optional[str]\n description=None, # type: Optional[str]\n ):\n # type: (...) -> MethodModel\n \"\"\"Register a function to be added to the Block as a MethodModel\"\"\"\n return self._field_registry.add_method_model(\n func, name, description, self._part)"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nregistering a pre - existing AttributeModel to be added to the Block.", "response": "def add_attribute_model(self,\n name, # type: str\n attr, # type: AttributeModel\n writeable_func=None, # type: Optional[Callable]\n ):\n # type: (...) -> AttributeModel\n \"\"\"Register a pre-existing AttributeModel to be added to the Block\"\"\"\n return self._field_registry.add_attribute_model(\n name, attr, writeable_func, self._part)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nadds variables to the internal list of variables.", "response": "def add_variables(\n self, variables: Union[List[\"Variable\"], \"Variable\"], index: int = None\n ) -> None:\n \"\"\"\n Parameters\n ----------\n variable : list of variables or single variable\n Add variables as problem variables\n\n index : int\n Location to add variables, if None add to the end\n\n \"\"\"\n if isinstance(variables, Variable):\n addvars = copy.deepcopy([variables])\n else:\n addvars = copy.deepcopy(variables)\n\n if index is None:\n self.variables.extend(addvars)\n else:\n self.variables[index:index] = addvars"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _centroids(n_clusters: int, points: List[List[float]]) -> List[List[float]]:\n\n k_means = KMeans(n_clusters=n_clusters)\n k_means.fit(points)\n\n closest, _ = pairwise_distances_argmin_min(k_means.cluster_centers_, points)\n\n return list(map(list, np.array(points)[closest.tolist()]))", "response": "Return n_clusters centroids of points\n "} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef random_weights(nobj: int, nweight: int) -> List[List[float]]:\n\n # Initial wector space as per\n # Miettinen, K. Nonlinear Multiobjective Optimization\n # Kluwer Academic Publishers, 1999\n wspace = 50 * nobj\n while wspace < nweight:\n wspace *= 2\n\n weights = np.random.rand(wspace, nobj)\n return _centroids(nobj, weights)", "response": "Generate random weights for nof objectives"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef new_points(\n factory: IterationPointFactory, solution, weights: List[List[float]] = None\n) -> List[Tuple[np.ndarray, List[float]]]:\n \"\"\"Generate approximate set of points\n\n Generate set of Pareto optimal solutions projecting from the Pareto optimal solution\n using weights to determine the direction.\n\n\n Parameters\n ----------\n\n factory:\n IterationPointFactory with suitable optimization problem\n\n solution:\n Current solution from which new solutions are projected\n\n weights:\n Direction of the projection, if not given generate with\n :func:random_weights\n\n \"\"\"\n from desdeo.preference.direct import DirectSpecification\n\n points = []\n nof = factory.optimization_method.optimization_problem.problem.nof_objectives()\n if not weights:\n weights = random_weights(nof, 50 * nof)\n for pref in map(\n lambda w: DirectSpecification(factory.optimization_method, np.array(w)), weights\n ):\n points.append(factory.result(pref, solution))\n return points", "response": "Generate approximate set of points for a new projecting from the Pareto optimal solution."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef as_minimized(values: List[float], maximized: List[bool]) -> List[float]:\n return [v * -1. if m else v for v, m in zip(values, maximized)]", "response": "Return vector values as minimized\n "} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nwrap for prompt and prompt_wrapper.", "response": "def _prompt_wrapper(message, default=None, validator=None):\n \"\"\" Handle references piped from file\n \"\"\"\n\n class MockDocument:\n\n def __init__(self, text):\n self.text = text\n\n if HAS_INPUT:\n ret = prompt(message, default=default, validator=validator)\n else:\n ret = sys.stdin.readline().strip()\n print(message, ret)\n if validator:\n validator.validate(MockDocument(ret))\n if \"q\" in ret:\n if not HAS_OUTPUT:\n print(\"User exit\")\n sys.exit(\"User exit\")\n return ret"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef init_nautilus(method):\n\n print(\"Preference elicitation options:\")\n print(\"\\t1 - Percentages\")\n print(\"\\t2 - Relative ranks\")\n print(\"\\t3 - Direct\")\n\n PREFCLASSES = [PercentageSpecifictation, RelativeRanking, DirectSpecification]\n\n pref_sel = int(\n _prompt_wrapper(\n \"Reference elicitation \",\n default=u\"%s\" % (1),\n validator=NumberValidator([1, 3]),\n )\n )\n\n preference_class = PREFCLASSES[pref_sel - 1]\n\n print(\"Nadir: %s\" % method.problem.nadir)\n print(\"Ideal: %s\" % method.problem.ideal)\n\n if method.current_iter - method.user_iters:\n finished_iter = method.user_iters - method.current_iter\n else:\n finished_iter = 0\n new_iters = int(\n _prompt_wrapper(\n u\"Ni: \", default=u\"%s\" % (method.current_iter), validator=NumberValidator()\n )\n )\n method.current_iter = new_iters\n method.user_iters = finished_iter + new_iters\n\n return preference_class", "response": "Initialize nautilus method\n\n Parameters\n ----------\n\n method\n Interactive method used for the process\n\n Returns\n -------\n\n PreferenceInformation subclass to be initialized"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\niterate NAUTILUS method interactively or using given preferences if given.", "response": "def iter_nautilus(method):\n \"\"\" Iterate NAUTILUS method either interactively, or using given preferences if given\n\n Parameters\n ----------\n\n method : instance of NAUTILUS subclass\n Fully initialized NAUTILUS method instance\n\n \"\"\"\n solution = None\n while method.current_iter:\n preference_class = init_nautilus(method)\n pref = preference_class(method, None)\n default = \",\".join(map(str, pref.default_input()))\n while method.current_iter:\n method.print_current_iteration()\n\n pref_input = _prompt_wrapper(\n u\"Preferences: \",\n default=default,\n validator=VectorValidator(method, pref),\n )\n\n cmd = _check_cmd(pref_input)\n if cmd:\n solution = method.zh\n break\n pref = preference_class(\n method, np.fromstring(pref_input, dtype=np.float, sep=\",\")\n )\n default = \",\".join(map(str, pref.pref_input))\n solution, _ = method.next_iteration(pref)\n if cmd and list(cmd)[0] == \"c\":\n break\n return solution"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nchecking that value is in values", "response": "def isin(value, values):\n \"\"\" Check that value is in values \"\"\"\n for i, v in enumerate(value):\n if v not in np.array(values)[:, i]:\n return False\n return True"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ngenerating n solutions which attempt to trade - off objs1 and objs2.", "response": "def between(self, objs1: List[float], objs2: List[float], n=1):\n \"\"\"\n Generate `n` solutions which attempt to trade-off `objs1` and `objs2`.\n\n Parameters\n ----------\n objs1\n First boundary point for desired objective function values\n\n objs2\n Second boundary point for desired objective function values\n\n n\n Number of solutions to generate\n \"\"\"\n from desdeo.preference.base import ReferencePoint\n\n objs1_arr = np.array(objs1)\n objs2_arr = np.array(objs2)\n segments = n + 1\n diff = objs2_arr - objs1_arr\n solutions = []\n for x in range(1, segments):\n btwn_obj = objs1_arr + float(x) / segments * diff\n solutions.append(\n self._get_ach().result(ReferencePoint(self, btwn_obj), None)\n )\n return ResultSet(solutions)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nfinds the first focusable window within the node tree", "response": "def find_focusable(node):\n \"\"\"\n Search for the first focusable window within the node tree\n \"\"\"\n\n if not node.children:\n return node\n\n if node.focus:\n return find_focusable(node.children_dict[node.focus[0]])"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef find_parent_split(node, orientation):\n\n if (node and node.orientation == orientation\n and len(node.children) > 1):\n return node\n\n if not node or node.type == \"workspace\":\n return None\n\n return find_parent_split(node.parent, orientation)", "response": "Find the first parent split relative to the given node according to the desired orientation."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncycle through windows of the current workspace.", "response": "def cycle_windows(tree, direction):\n \"\"\"\n Cycle through windows of the current workspace\n \"\"\"\n wanted = {\n \"orientation\": (\"vertical\" if direction in (\"up\", \"down\")\n else \"horizontal\"),\n \"direction\": (1 if direction in (\"down\", \"right\")\n else -1),\n }\n split = find_parent_split(tree.focused.parent, wanted[\"orientation\"])\n\n if split:\n # Get the next child given the direction\n child_ids = [child.id for child in split.children]\n focus_idx = child_ids.index(split.focused_child.id)\n next_idx = (focus_idx + wanted['direction']) % len(child_ids)\n next_node = split.children[next_idx]\n return find_focusable(next_node)\n return None"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef cycle_outputs(tree, direction):\n direction = 1 if direction == \"next\" else -1\n outputs = [output for output in tree.root.children\n if output.name != \"__i3\"]\n focus_idx = outputs.index(tree.root.focused_child)\n next_idx = (focus_idx + direction) % len(outputs)\n next_output = outputs[next_idx]\n return find_focusable(next_output)", "response": "Cycle through the outputs of a tree."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreturns functions with the class", "response": "def with_class(self, cls):\n \"\"\" Return functions with the class\n \"\"\"\n rcls = []\n for key, value in self._classification.items():\n if value[0] == cls:\n rcls.append(key)\n return rcls"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _as_reference_point(self) -> np.ndarray:\n ref_val = []\n for fn, f in self._classification.items():\n if f[0] == \"<\":\n ref_val.append(self._method.problem.ideal[fn])\n elif f[0] == \"<>\":\n ref_val.append(self._method.problem.nadir[fn])\n else:\n ref_val.append(f[1])\n\n return np.array(ref_val)", "response": "Return classification information as reference point"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsolving River Pollution problem with NAUTILUS V1 and E - NAUTILUS Methods", "response": "def main(logfile=False):\n \"\"\" Solve River Pollution problem with NAUTILUS V1 and E-NAUTILUS Methods\n \"\"\"\n # Duplicate output to log file\n\n class NAUTILUSOptionValidator(Validator):\n\n def validate(self, document):\n if document.text not in \"ao\":\n raise ValidationError(\n message=\"Please select a for apriori or o for optimization option\",\n cursor_position=0,\n )\n\n if logfile:\n Tee(logfile)\n first = True\n current_iter = 0\n while first or current_iter:\n # SciPy breaks box constraints\n\n nautilus_v1 = NAUTILUSv1(RiverPollution(), SciPyDE)\n if not first:\n nautilus_v1.current_iter = current_iter\n first = False\n nadir = nautilus_v1.problem.nadir\n ideal = nautilus_v1.problem.ideal\n\n solution = tui.iter_nautilus(nautilus_v1)\n\n current_iter = nautilus_v1.current_iter\n\n # TODO: Move to tui module\n method_e = None\n if current_iter > 0:\n option = _prompt_wrapper(\n \"select a for apriori or o for optimization option: \",\n default=\"o\",\n validator=NAUTILUSOptionValidator(),\n )\n if option.lower() == \"a\":\n wi = _prompt_wrapper(\n \"Number of PO solutions (10 or 20): \",\n default=\"20\",\n validator=tui.NumberValidator(),\n )\n weights = WEIGHTS[wi]\n\n factory = IterationPointFactory(\n SciPyDE(NautilusAchievementProblem(RiverPollution()))\n )\n points = misc.new_points(factory, solution, weights=weights)\n\n method_e = ENAUTILUS(PreGeneratedProblem(points=points), PointSearch)\n method_e.zh_prev = solution\n\n else:\n method_e = ENAUTILUS(RiverPollution(), SciPyDE)\n\n # method_e.zh = solution\n method_e.current_iter = nautilus_v1.current_iter\n method_e.user_iters = nautilus_v1.user_iters\n\n print(\n \"E-NAUTILUS\\nselected iteration point: %s:\"\n % \",\".join(map(str, solution))\n )\n\n while method_e and method_e.current_iter > 0:\n if solution is None:\n solution = method_e.problem.nadir\n\n method_e.problem.nadir = nadir\n method_e.problem.ideal = ideal\n cmd = tui.iter_enautilus(\n method_e, initial_iterpoint=solution, initial_bound=method_e.fh_lo\n )\n if cmd:\n print(method_e.current_iter)\n current_iter = method_e.current_iter\n break\n if tui.HAS_INPUT:\n input(\"Press ENTER to exit\")"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nadds a link to the sitemap", "response": "def add_html_link(app, pagename, templatename, context, doctree):\n \"\"\"As each page is built, collect page names for the sitemap\"\"\"\n app.sitemap_links.append(pagename + \".html\")"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngenerating the sitemap. xml from the collected HTML page links and the language of the collected HTML page links.", "response": "def create_sitemap(app, exception):\n \"\"\"Generates the sitemap.xml from the collected HTML page links\"\"\"\n site_url = app.builder.config.site_url or app.builder.config.html_baseurl\n if not site_url:\n print(\"sphinx-sitemap error: neither html_baseurl nor site_url \"\n \"are set in conf.py. Sitemap not built.\")\n return\n if (not app.sitemap_links):\n print(\"sphinx-sitemap warning: No pages generated for sitemap.xml\")\n return\n\n ET.register_namespace('xhtml', \"http://www.w3.org/1999/xhtml\")\n\n root = ET.Element(\"urlset\")\n root.set(\"xmlns\", \"http://www.sitemaps.org/schemas/sitemap/0.9\")\n\n get_locales(app, exception)\n\n if app.builder.config.version:\n version = app.builder.config.version + '/'\n else:\n version = app.builder.config.version\n\n for link in app.sitemap_links:\n url = ET.SubElement(root, \"url\")\n if app.builder.config.language is not None:\n ET.SubElement(url, \"loc\").text = site_url + \\\n app.builder.config.language + '/' + version + link\n if len(app.locales) > 0:\n for lang in app.locales:\n linktag = ET.SubElement(\n url,\n \"{http://www.w3.org/1999/xhtml}link\"\n )\n linktag.set(\"rel\", \"alternate\")\n linktag.set(\"hreflang\", lang)\n linktag.set(\"href\", site_url + lang + '/' + version + link)\n elif app.builder.config.version:\n ET.SubElement(url, \"loc\").text = site_url + version + link\n else:\n ET.SubElement(url, \"loc\").text = site_url + link\n\n filename = app.outdir + \"/sitemap.xml\"\n ET.ElementTree(root).write(filename,\n xml_declaration=True,\n encoding='utf-8',\n method=\"xml\")\n print(\"sitemap.xml was generated for URL %s in %s\" % (site_url, filename))"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef estimate_payoff_table(\n opt_meth_cls: Type[OptimizationMethod], mo_prob: MOProblem\n) -> Tuple[List[float], List[float]]:\n \"\"\"\n Estimates the ideal and nadir by using a payoff table. This should give a\n good estimate for the ideal, but can be very inaccurate for the nadir. For an explanation of why, see [DEB2010]_.\n\n References\n ----------\n\n .. [DEB2010] Deb, K., Miettinen, K., & Chaudhuri, S. (2010).\n Toward an estimation of nadir objective vector using a hybrid of evolutionary and local search approaches.\n IEEE Transactions on Evolutionary Computation, 14(6), 821-841.\n \"\"\"\n ideal = [float(\"inf\")] * mo_prob.nobj\n nadir = [float(\"-inf\")] * mo_prob.nobj\n for i in range(mo_prob.nobj):\n meth = opt_meth_cls(SelectedOptimizationProblem(mo_prob, i))\n _decis, obj = meth.search()\n for j, obj_val in enumerate(obj):\n if obj_val < ideal[j]:\n ideal[j] = obj_val\n if obj_val > nadir[j]:\n nadir[j] = obj_val\n return ideal, nadir", "response": "Estimate the ideal and nadir by using a payoff table."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\npadding an ideal and nadir estimate.", "response": "def pad(idnad: Tuple[List[float], List[float]], pad_nadir=0.05, pad_ideal=0.0):\n \"\"\"\n Pad an ideal/nadir estimate. This is mainly useful for padding the nadir\n estimated by a payoff table for safety purposes.\n \"\"\"\n ideal, nadir = idnad\n ideal_arr = np.array(ideal)\n nadir_arr = np.array(nadir)\n idnad_range = nadir_arr - ideal_arr\n nadir_arr += pad_nadir * idnad_range\n ideal_arr -= pad_ideal * idnad_range\n return list(ideal_arr), list(nadir_arr)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef round_off(idnad: Tuple[List[float], List[float]], dp: int = 2):\n ideal, nadir = idnad\n mult = np.power(10, dp)\n ideal_arr = np.floor(np.array(ideal) * mult) / mult\n nadir_arr = np.ceil(np.array(nadir) * mult) / mult\n return list(ideal_arr), list(nadir_arr)", "response": "Round off an ideal and nadir estimate e. g. so that it s looks nicer when it s plotted."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef default_estimate(\n opt_meth: Type[OptimizationMethod], mo_prob: MOProblem, dp: int = 2\n) -> Tuple[List[float], List[float]]:\n \"\"\"\n The recommended nadir/ideal estimator - use a payoff table and then round\n off the result.\n \"\"\"\n return round_off(estimate_payoff_table(opt_meth, mo_prob), dp)", "response": "Default estimate for a nadir orideal node."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nsearch for the optimal solution of the logarithmic logarithm.", "response": "def search(self, max=False, **params) -> Tuple[np.ndarray, List[float]]:\n \"\"\"\n Search for the optimal solution\n\n This sets up the search for the optimization and calls the _search method\n\n Parameters\n ----------\n max : bool (default False)\n If true find mximum of the objective function instead of minimum\n\n **params : dict [optional]\n Parameters for single objective optimization method\n \"\"\"\n\n self._max = max\n if max:\n self._coeff = -1.0\n else:\n self._coeff = 1.0\n\n return self._search(**params)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef next_iteration(self, preference=None):\n if preference:\n self.preference = preference\n print((\"Given preference: %s\" % self.preference.pref_input))\n self._update_fh()\n\n # tmpzh = list(self.zh)\n self._update_zh(self.zh, self.fh)\n # self.zh = list(np.array(self.zh) / 2. + np.array(self.zh_prev) / 2.)\n # self.zh_prev = tmpzh\n if self.current_iter != 1:\n self.fh_lo = list(self.lower_bounds_factory.result(self.zh_prev))\n\n self.current_iter -= 1\n\n return self.fh_lo, self.zh", "response": "Return next iteration bounds"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef next_iteration(self, ref_point, bounds=None):\n if bounds:\n self.problem.points = reachable_points(\n self.problem.points, self.problem.ideal, bounds\n )\n if not utils.isin(self.fh, self.problem.points) or ref_point != self.ref_point:\n self.ref_point = list(ref_point)\n self._update_fh()\n\n self._update_zh(self.zh, self.fh)\n\n self.fh_lo = list(self.lower_bounds_factory.result(self.zh))\n self.fh_up = list(self.upper_bounds_factory.result(self.zh))\n\n logging.debug(f\"Updated upper boundary: {self.fh_up}\")\n logging.debug(f\"Uppadet lower boundary: {self.fh_lo}\")\n\n if not np.all(np.array(self.fh_up) > np.array(self.fh_lo)):\n warn(self.NegativeIntervalWarning())\n\n assert utils.isin(self.fh_up, self.problem.points)\n assert utils.isin(self.fh_lo, self.problem.points)\n\n dist = self.distance(self.zh, self.fh)\n\n # Reachable points\n self.update_points()\n\n lP = len(self.problem.points)\n self.current_iter -= 1\n\n return dist, self.fh, self.zh, self.fh_lo, self.fh_up, lP", "response": "Calculate the next iteration point to be shown to the DM."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef output_default(paragraphs, fp=sys.stdout, no_boilerplate=True):\n for paragraph in paragraphs:\n if paragraph.class_type == 'good':\n if paragraph.heading:\n tag = 'h'\n else:\n tag = 'p'\n elif no_boilerplate:\n continue\n else:\n tag = 'b'\n\n print('<%s> %s' % (tag, cgi.escape(paragraph.text)), file=fp)", "response": "Prints the paragraphs as a default list of paragraphs."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef output_detailed(paragraphs, fp=sys.stdout):\n for paragraph in paragraphs:\n output = '<p class=\"%s\" cfclass=\"%s\" heading=\"%i\" xpath=\"%s\"> %s' % (\n paragraph.class_type,\n paragraph.cf_class,\n int(paragraph.heading),\n paragraph.xpath,\n cgi.escape(paragraph.text)\n )\n print(output, file=fp)", "response": "Output detailed information about the given list of paragraphs."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef output_krdwrd(paragraphs, fp=sys.stdout):\n for paragraph in paragraphs:\n if paragraph.class_type in ('good', 'neargood'):\n if paragraph.heading:\n cls = 2\n else:\n cls = 3\n else:\n cls = 1\n\n for text_node in paragraph.text_nodes:\n print('%i\\t%s' % (cls, text_node.strip()), file=fp)", "response": "Outputs the given list of paragraphs in a KrdWrd compatible format."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef html_to_dom(html, default_encoding=DEFAULT_ENCODING, encoding=None, errors=DEFAULT_ENC_ERRORS):\n if isinstance(html, unicode):\n decoded_html = html\n # encode HTML for case it's XML with encoding declaration\n forced_encoding = encoding if encoding else default_encoding\n html = html.encode(forced_encoding, errors)\n else:\n decoded_html = decode_html(html, default_encoding, encoding, errors)\n\n try:\n dom = lxml.html.fromstring(decoded_html, parser=lxml.html.HTMLParser())\n except ValueError:\n # Unicode strings with encoding declaration are not supported.\n # for XHTML files with encoding declaration, use the declared encoding\n dom = lxml.html.fromstring(html, parser=lxml.html.HTMLParser())\n\n return dom", "response": "Converts HTML to DOM."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef decode_html(html, default_encoding=DEFAULT_ENCODING, encoding=None, errors=DEFAULT_ENC_ERRORS):\n if isinstance(html, unicode):\n return html\n\n if encoding:\n return html.decode(encoding, errors)\n\n match = CHARSET_META_TAG_PATTERN.search(html)\n if match:\n declared_encoding = match.group(1).decode(\"ASCII\")\n # proceed unknown encoding as if it wasn't found at all\n with ignored(LookupError):\n return html.decode(declared_encoding, errors)\n\n # unknown encoding\n try:\n # try UTF-8 first\n return html.decode(\"utf8\")\n except UnicodeDecodeError:\n # try lucky with default encoding\n try:\n return html.decode(default_encoding, errors)\n except UnicodeDecodeError as e:\n raise JustextError(\"Unable to decode the HTML to Unicode: \" + unicode(e))", "response": "Converts a HTML page into Unicode."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nremove unwanted parts of DOM.", "response": "def preprocessor(dom):\n \"Removes unwanted parts of DOM.\"\n options = {\n \"processing_instructions\": False,\n \"remove_unknown_tags\": False,\n \"safe_attrs_only\": False,\n \"page_structure\": False,\n \"annoying_tags\": False,\n \"frames\": False,\n \"meta\": False,\n \"links\": False,\n \"javascript\": False,\n \"scripts\": True,\n \"comments\": True,\n \"style\": True,\n \"embedded\": True,\n \"forms\": True,\n \"kill_tags\": (\"head\",),\n }\n cleaner = Cleaner(**options)\n\n return cleaner.clean_html(dom)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nget the next neighbour of the given paragraphs.", "response": "def get_next_neighbour(i, paragraphs, ignore_neargood):\n \"\"\"\n Return the class of the paragraph at the bottom end of the short/neargood\n paragraphs block. If ignore_neargood is True, than only 'bad' or 'good'\n can be returned, otherwise 'neargood' can be returned, too.\n \"\"\"\n return _get_neighbour(i, paragraphs, ignore_neargood, 1, len(paragraphs))"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef revise_paragraph_classification(paragraphs, max_heading_distance=MAX_HEADING_DISTANCE_DEFAULT):\n # copy classes\n for paragraph in paragraphs:\n paragraph.class_type = paragraph.cf_class\n\n # good headings\n for i, paragraph in enumerate(paragraphs):\n if not (paragraph.heading and paragraph.class_type == 'short'):\n continue\n j = i + 1\n distance = 0\n while j < len(paragraphs) and distance <= max_heading_distance:\n if paragraphs[j].class_type == 'good':\n paragraph.class_type = 'neargood'\n break\n distance += len(paragraphs[j].text)\n j += 1\n\n # classify short\n new_classes = {}\n for i, paragraph in enumerate(paragraphs):\n if paragraph.class_type != 'short':\n continue\n prev_neighbour = get_prev_neighbour(i, paragraphs, ignore_neargood=True)\n next_neighbour = get_next_neighbour(i, paragraphs, ignore_neargood=True)\n neighbours = set((prev_neighbour, next_neighbour))\n if neighbours == set(['good']):\n new_classes[i] = 'good'\n elif neighbours == set(['bad']):\n new_classes[i] = 'bad'\n # it must be set(['good', 'bad'])\n elif (prev_neighbour == 'bad' and get_prev_neighbour(i, paragraphs, ignore_neargood=False) == 'neargood') or \\\n (next_neighbour == 'bad' and get_next_neighbour(i, paragraphs, ignore_neargood=False) == 'neargood'):\n new_classes[i] = 'good'\n else:\n new_classes[i] = 'bad'\n\n for i, c in new_classes.items():\n paragraphs[i].class_type = c\n\n # revise neargood\n for i, paragraph in enumerate(paragraphs):\n if paragraph.class_type != 'neargood':\n continue\n prev_neighbour = get_prev_neighbour(i, paragraphs, ignore_neargood=True)\n next_neighbour = get_next_neighbour(i, paragraphs, ignore_neargood=True)\n if (prev_neighbour, next_neighbour) == ('bad', 'bad'):\n paragraph.class_type = 'bad'\n else:\n paragraph.class_type = 'good'\n\n # more good headings\n for i, paragraph in enumerate(paragraphs):\n if not (paragraph.heading and paragraph.class_type == 'bad' and paragraph.cf_class != 'bad'):\n continue\n j = i + 1\n distance = 0\n while j < len(paragraphs) and distance <= max_heading_distance:\n if paragraphs[j].class_type == 'good':\n paragraph.class_type = 'good'\n break\n distance += len(paragraphs[j].text)\n j += 1", "response": "This function revise the context - sensitive paragraph classification."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nconverting an HTML page into a list of classified paragraphs.", "response": "def justext(html_text, stoplist, length_low=LENGTH_LOW_DEFAULT,\n length_high=LENGTH_HIGH_DEFAULT, stopwords_low=STOPWORDS_LOW_DEFAULT,\n stopwords_high=STOPWORDS_HIGH_DEFAULT, max_link_density=MAX_LINK_DENSITY_DEFAULT,\n max_heading_distance=MAX_HEADING_DISTANCE_DEFAULT, no_headings=NO_HEADINGS_DEFAULT,\n encoding=None, default_encoding=DEFAULT_ENCODING,\n enc_errors=DEFAULT_ENC_ERRORS, preprocessor=preprocessor):\n \"\"\"\n Converts an HTML page into a list of classified paragraphs. Each paragraph\n is represented as instance of class \u02d9\u02d9justext.paragraph.Paragraph\u02d9\u02d9.\n \"\"\"\n dom = html_to_dom(html_text, default_encoding, encoding, enc_errors)\n dom = preprocessor(dom)\n\n paragraphs = ParagraphMaker.make_paragraphs(dom)\n\n classify_paragraphs(paragraphs, stoplist, length_low, length_high,\n stopwords_low, stopwords_high, max_link_density, no_headings)\n revise_paragraph_classification(paragraphs, max_heading_distance)\n\n return paragraphs"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nconverts DOM into paragraphs.", "response": "def make_paragraphs(cls, root):\n \"\"\"Converts DOM into paragraphs.\"\"\"\n handler = cls()\n lxml.sax.saxify(root, handler)\n return handler.paragraphs"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning a collection of built - in stop - lists.", "response": "def get_stoplists():\n \"\"\"Returns a collection of built-in stop-lists.\"\"\"\n path_to_stoplists = os.path.dirname(sys.modules[\"justext\"].__file__)\n path_to_stoplists = os.path.join(path_to_stoplists, \"stoplists\")\n\n stoplist_names = []\n for filename in os.listdir(path_to_stoplists):\n name, extension = os.path.splitext(filename)\n if extension == \".txt\":\n stoplist_names.append(name)\n\n return frozenset(stoplist_names)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_stoplist(language):\n file_path = os.path.join(\"stoplists\", \"%s.txt\" % language)\n try:\n stopwords = pkgutil.get_data(\"justext\", file_path)\n except IOError:\n raise ValueError(\n \"Stoplist for language '%s' is missing. \"\n \"Please use function 'get_stoplists' for complete list of stoplists \"\n \"and feel free to contribute by your own stoplist.\" % language\n )\n\n return frozenset(w.decode(\"utf8\").lower() for w in stopwords.splitlines())", "response": "Returns an empty stop - list for the language as a set of words."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_boto_client(\n client,\n region=None,\n aws_access_key_id=None,\n aws_secret_access_key=None,\n aws_session_token=None,\n endpoint_url=None\n ):\n \"\"\"Get a boto3 client connection.\"\"\"\n cache_key = '{0}:{1}:{2}:{3}'.format(\n client,\n region,\n aws_access_key_id,\n endpoint_url or ''\n )\n if not aws_session_token:\n if cache_key in CLIENT_CACHE:\n return CLIENT_CACHE[cache_key]\n session = get_boto_session(\n region,\n aws_access_key_id,\n aws_secret_access_key,\n aws_session_token\n )\n if not session:\n logging.error(\"Failed to get {0} client.\".format(client))\n return None\n\n CLIENT_CACHE[cache_key] = session.client(\n client,\n endpoint_url=endpoint_url\n )\n return CLIENT_CACHE[cache_key]", "response": "Get a boto3 client connection."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ngetting a boto resource connection.", "response": "def get_boto_resource(\n resource,\n region=None,\n aws_access_key_id=None,\n aws_secret_access_key=None,\n aws_session_token=None,\n endpoint_url=None\n ):\n \"\"\"Get a boto resource connection.\"\"\"\n cache_key = '{0}:{1}:{2}:{3}'.format(\n resource,\n region,\n aws_access_key_id,\n endpoint_url or ''\n )\n if not aws_session_token:\n if cache_key in RESOURCE_CACHE:\n return RESOURCE_CACHE[cache_key]\n session = get_boto_session(\n region,\n aws_access_key_id,\n aws_secret_access_key,\n aws_session_token\n )\n if not session:\n logging.error(\"Failed to get {0} resource.\".format(resource))\n return None\n\n RESOURCE_CACHE[cache_key] = session.resource(\n resource,\n endpoint_url=endpoint_url\n )\n return RESOURCE_CACHE[cache_key]"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_boto_session(\n region,\n aws_access_key_id=None,\n aws_secret_access_key=None,\n aws_session_token=None\n ):\n \"\"\"Get a boto3 session.\"\"\"\n return boto3.session.Session(\n region_name=region,\n aws_secret_access_key=aws_secret_access_key,\n aws_access_key_id=aws_access_key_id,\n aws_session_token=aws_session_token\n )", "response": "Get a boto3 session."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nensure an input is a string decoding if it is bytes.", "response": "def ensure_text(str_or_bytes, encoding='utf-8'):\n \"\"\"Ensures an input is a string, decoding if it is bytes.\n \"\"\"\n if not isinstance(str_or_bytes, six.text_type):\n return str_or_bytes.decode(encoding)\n return str_or_bytes"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef ensure_bytes(str_or_bytes, encoding='utf-8', errors='strict'):\n if isinstance(str_or_bytes, six.text_type):\n return str_or_bytes.encode(encoding, errors)\n return str_or_bytes", "response": "Ensures an input is bytes encoding if it is a string."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _get_key_alias_from_cache(self, key_arn):\n '''\n Find a key's alias by looking up its key_arn in the KEY_METADATA\n cache. This function will only work after a key has been lookedup by\n its alias and is meant as a convenience function for turning an ARN\n that's already been looked up back into its alias.\n '''\n for alias in self.KEY_METADATA:\n if self.KEY_METADATA[alias]['KeyMetadata']['Arn'] == key_arn:\n return alias\n return None", "response": "Find a key s alias by looking up its key_arn in the KEY_METADATA cache."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting a username formatted for a specific token version.", "response": "def get_username(self):\n \"\"\"Get a username formatted for a specific token version.\"\"\"\n _from = self.auth_context['from']\n if self.token_version == 1:\n return '{0}'.format(_from)\n elif self.token_version == 2:\n _user_type = self.auth_context['user_type']\n return '{0}/{1}/{2}'.format(\n self.token_version,\n _user_type,\n _from\n )"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget an authentication token.", "response": "def get_token(self):\n \"\"\"Get an authentication token.\"\"\"\n # Generate string formatted timestamps for not_before and not_after,\n # for the lifetime specified in minutes.\n now = datetime.datetime.utcnow()\n # Start the not_before time x minutes in the past, to avoid clock skew\n # issues.\n _not_before = now - datetime.timedelta(minutes=TOKEN_SKEW)\n not_before = _not_before.strftime(TIME_FORMAT)\n # Set the not_after time in the future, by the lifetime, but ensure the\n # skew we applied to not_before is taken into account.\n _not_after = now + datetime.timedelta(\n minutes=self.token_lifetime - TOKEN_SKEW\n )\n not_after = _not_after.strftime(TIME_FORMAT)\n # Generate a json string for the encryption payload contents.\n payload = json.dumps({\n 'not_before': not_before,\n 'not_after': not_after\n })\n token = self._get_cached_token()\n if token:\n return token\n # Generate a base64 encoded KMS encrypted token to use for\n # authentication. We encrypt the token lifetime information as the\n # payload for verification in Confidant.\n try:\n token = self.kms_client.encrypt(\n KeyId=self.auth_key,\n Plaintext=payload,\n EncryptionContext=self.auth_context\n )['CiphertextBlob']\n token = base64.b64encode(ensure_bytes(token))\n except (ConnectionError, EndpointConnectionError) as e:\n logging.exception('Failure connecting to AWS: {}'.format(str(e)))\n raise ServiceConnectionError()\n except Exception:\n logging.exception('Failed to create auth token.')\n raise TokenGenerationError()\n self._cache_token(token, not_after)\n return token"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nparse a key = value string as a tuple", "response": "def dictstr(arg):\n \"\"\"\n Parse a key=value string as a tuple (key, value) that can be provided as an argument to dict()\n \"\"\"\n key, value = arg.split(\"=\")\n\n if value.lower() == \"true\" or value.lower() == \"false\":\n value = bool(value)\n elif INT_RE.match(value):\n value = int(value)\n elif FLOAT_RE.match(value):\n value = float(value)\n return (key, value)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef regex_matches_to_indexed_words(matches):\n words = [dict(v, **dict([('sentence', i)]))\n for i, s in enumerate(matches['sentences'])\n for k, v in s.items() if k != 'length']\n return words", "response": "Transforms tokensregex and semgrex matches to indexed words."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsending a request to CoreNLP server.", "response": "def _request(self, buf, properties, date=None):\n \"\"\"Send a request to the CoreNLP server.\n\n :param (str | unicode) text: raw text for the CoreNLPServer to parse\n :param (dict) properties: properties that the server expects\n :param (str) date: reference date of document, used by server to set docDate - expects YYYY-MM-DD\n :return: request result\n \"\"\"\n self.ensure_alive()\n\n try:\n input_format = properties.get(\"inputFormat\", \"text\")\n if input_format == \"text\":\n ctype = \"text/plain; charset=utf-8\"\n elif input_format == \"serialized\":\n ctype = \"application/x-protobuf\"\n else:\n raise ValueError(\"Unrecognized inputFormat \" + input_format)\n\n if date:\n params = {'properties': str(properties),'date': str(date)}\n else:\n params = {'properties': str(properties)}\n\n r = requests.post(self.endpoint,\n params=params,\n data=buf, headers={'content-type': ctype},\n timeout=(self.timeout*2)/1000)\n r.raise_for_status()\n return r\n except requests.HTTPError as e:\n if r.text == \"CoreNLP request timed out. Your document may be too long.\":\n raise TimeoutException(r.text)\n else:\n raise AnnotationException(r.text)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef annotate(self, text, annotators=None, output_format=None, properties=None, date=None):\n # set properties for server call\n if properties is None:\n properties = self.default_properties\n properties.update({\n 'annotators': ','.join(annotators or self.default_annotators),\n 'inputFormat': 'text',\n 'outputFormat': self.default_output_format,\n 'serializer': 'edu.stanford.nlp.pipeline.ProtobufAnnotationSerializer'\n })\n elif \"annotators\" not in properties:\n properties.update({'annotators': ','.join(annotators or self.default_annotators)})\n # if an output_format is specified, use that to override\n if output_format is not None:\n properties[\"outputFormat\"] = output_format\n # make the request\n r = self._request(text.encode('utf-8'), properties, date)\n # customize what is returned based outputFormat\n if properties[\"outputFormat\"] == \"serialized\":\n doc = Document()\n parseFromDelimitedString(doc, r.content)\n return doc\n elif properties[\"outputFormat\"] == \"json\":\n return r.json()\n elif properties[\"outputFormat\"] in [\"text\", \"conllu\", \"conll\", \"xml\"]:\n return r.text\n else:\n return r", "response": "Send a request to the CoreNLP server to parse the raw text."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef __regex(self, path, text, pattern, filter, annotators=None, properties=None):\n self.ensure_alive()\n if properties is None:\n properties = self.default_properties\n properties.update({\n 'annotators': ','.join(annotators or self.default_annotators),\n 'inputFormat': 'text',\n 'outputFormat': self.default_output_format,\n 'serializer': 'edu.stanford.nlp.pipeline.ProtobufAnnotationSerializer'\n })\n elif \"annotators\" not in properties:\n properties.update({'annotators': ','.join(annotators or self.default_annotators)})\n\n # HACK: For some stupid reason, CoreNLPServer will timeout if we\n # need to annotate something from scratch. So, we need to call\n # this to ensure that the _regex call doesn't timeout.\n self.annotate(text, properties=properties)\n\n try:\n # Error occurs unless put properties in params\n input_format = properties.get(\"inputFormat\", \"text\")\n if input_format == \"text\":\n ctype = \"text/plain; charset=utf-8\"\n elif input_format == \"serialized\":\n ctype = \"application/x-protobuf\"\n else:\n raise ValueError(\"Unrecognized inputFormat \" + input_format)\n # change request method from `get` to `post` as required by CoreNLP\n r = requests.post(\n self.endpoint + path, params={\n 'pattern': pattern,\n 'filter': filter,\n 'properties': str(properties)\n }, data=text,\n headers={'content-type': ctype},\n timeout=(self.timeout*2)/1000,\n )\n r.raise_for_status()\n return json.loads(r.text)\n except requests.HTTPError as e:\n if r.text.startswith(\"Timeout\"):\n raise TimeoutException(r.text)\n else:\n raise AnnotationException(r.text)\n except json.JSONDecodeError:\n raise AnnotationException(r.text)", "response": "Send a regex - related request to CoreNLP server."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a dictionary of properties that define this anntoator.", "response": "def properties(self):\n \"\"\"\n Defines a Java property to define this anntoator to CoreNLP.\n \"\"\"\n return {\n \"customAnnotatorClass.{}\".format(self.name): \"edu.stanford.nlp.pipeline.GenericWebServiceAnnotator\",\n \"generic.endpoint\": \"http://{}:{}\".format(self.host, self.port),\n \"generic.requires\": \",\".join(self.requires),\n \"generic.provides\": \",\".join(self.provides),\n }"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nruns the server using Python s simple HTTPServer.", "response": "def run(self):\n \"\"\"\n Runs the server using Python's simple HTTPServer.\n TODO: make this multithreaded.\n \"\"\"\n httpd = HTTPServer((self.host, self.port), self._Handler)\n sa = httpd.socket.getsockname()\n serve_message = \"Serving HTTP on {host} port {port} (http://{host}:{port}/) ...\"\n print(serve_message.format(host=sa[0], port=sa[1]))\n try:\n httpd.serve_forever()\n except KeyboardInterrupt:\n print(\"\\nKeyboard interrupt received, exiting.\")\n httpd.shutdown()"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncompiling or format the wildcard inclusion or exclusion pattern.", "response": "def _compile_wildcard(self, pattern, pathname=False):\n \"\"\"Compile or format the wildcard inclusion/exclusion pattern.\"\"\"\n\n patterns = None\n flags = self.flags\n if pathname:\n flags |= _wcparse.PATHNAME\n if pattern:\n patterns = _wcparse.WcSplit(pattern, flags=flags).split()\n\n return _wcparse.compile(patterns, flags) if patterns else patterns"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn whether a file can be searched.", "response": "def _valid_file(self, base, name):\n \"\"\"Return whether a file can be searched.\"\"\"\n\n valid = False\n fullpath = os.path.join(base, name)\n if self.file_check is not None and self.compare_file(fullpath[self._base_len:] if self.file_pathname else name):\n valid = True\n if valid and (not self.show_hidden and util.is_hidden(fullpath)):\n valid = False\n return self.on_validate_file(base, name) if valid else valid"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _valid_folder(self, base, name):\n\n valid = True\n fullpath = os.path.join(base, name)\n if (\n not self.recursive or\n (\n self.folder_exclude_check is not None and\n not self.compare_directory(fullpath[self._base_len:] if self.dir_pathname else name)\n )\n ):\n valid = False\n if valid and (not self.show_hidden and util.is_hidden(fullpath)):\n valid = False\n return self.on_validate_directory(base, name) if valid else valid", "response": "Return whether a folder can be searched."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nstart the walking function.", "response": "def _walk(self):\n \"\"\"Start search for valid files.\"\"\"\n\n self._base_len = len(self.base)\n\n for base, dirs, files in os.walk(self.base, followlinks=self.follow_links):\n # Remove child folders based on exclude rules\n for name in dirs[:]:\n try:\n if not self._valid_folder(base, name):\n dirs.remove(name)\n except Exception:\n dirs.remove(name)\n value = self.on_error(base, name)\n if value is not None: # pragma: no cover\n yield value\n\n if self._abort:\n break\n\n # Search files if they were found\n if len(files):\n # Only search files that are in the include rules\n for name in files:\n try:\n valid = self._valid_file(base, name)\n except Exception:\n valid = False\n value = self.on_error(base, name)\n if value is not None:\n yield value\n\n if valid:\n yield self.on_match(base, name)\n else:\n self._skipped += 1\n value = self.on_skip(base, name)\n if value is not None:\n yield value\n\n if self._abort:\n break\n\n if self._abort:\n break"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef is_negative(pattern, flags):\n\n if flags & MINUSNEGATE:\n return flags & NEGATE and pattern[0:1] in MINUS_NEGATIVE_SYM\n else:\n return flags & NEGATE and pattern[0:1] in NEGATIVE_SYM", "response": "Check if negative pattern."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_case(flags):\n\n if not bool(flags & CASE_FLAGS):\n case_sensitive = util.is_case_sensitive()\n elif flags & FORCECASE:\n case_sensitive = True\n else:\n case_sensitive = False\n return case_sensitive", "response": "Parse flags for case sensitivity settings."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nchecks if we should use Unix style.", "response": "def is_unix_style(flags):\n \"\"\"Check if we should use Unix style.\"\"\"\n\n return (util.platform() != \"windows\" or (not bool(flags & REALPATH) and get_case(flags))) and not flags & _FORCEWIN"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncompiles the pattern to regex.", "response": "def _compile(pattern, flags):\n \"\"\"Compile the pattern to regex.\"\"\"\n\n return re.compile(WcParse(pattern, flags & FLAG_MASK).parse())"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nmatch a pattern against a file.", "response": "def _fs_match(pattern, filename, sep, follow, symlinks):\n \"\"\"\n Match path against the pattern.\n\n Since `globstar` doesn't match symlinks (unless `FOLLOW` is enabled), we must look for symlinks.\n If we identify a symlink in a `globstar` match, we know this result should not actually match.\n \"\"\"\n\n matched = False\n\n base = None\n m = pattern.fullmatch(filename)\n if m:\n matched = True\n # Lets look at the captured `globstar` groups and see if that part of the path\n # contains symlinks.\n if not follow:\n groups = m.groups()\n last = len(groups)\n for i, star in enumerate(m.groups(), 1):\n if star:\n parts = star.strip(sep).split(sep)\n if base is None:\n base = filename[:m.start(i)]\n for part in parts:\n base = os.path.join(base, part)\n is_link = symlinks.get(base, None)\n if is_link is not None:\n matched = not is_link\n elif i != last or os.path.isdir(base):\n is_link = os.path.islink(base)\n symlinks[base] = is_link\n matched = not is_link\n if not matched:\n break\n if matched:\n break\n return matched"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nmatches real filename includes and excludes.", "response": "def _match_real(filename, include, exclude, follow, symlinks):\n \"\"\"Match real filename includes and excludes.\"\"\"\n\n sep = '\\\\' if util.platform() == \"windows\" else '/'\n if isinstance(filename, bytes):\n sep = os.fsencode(sep)\n if not filename.endswith(sep) and os.path.isdir(filename):\n filename += sep\n\n matched = False\n for pattern in include:\n if _fs_match(pattern, filename, sep, follow, symlinks):\n matched = True\n break\n\n if matched:\n matched = True\n if exclude:\n for pattern in exclude:\n if _fs_match(pattern, filename, sep, follow, symlinks):\n matched = False\n break\n return matched"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _match_pattern(filename, include, exclude, real, path, follow):\n\n if real:\n symlinks = {}\n if isinstance(filename, bytes):\n curdir = os.fsencode(os.curdir)\n mount = RE_BWIN_MOUNT if util.platform() == \"windows\" else RE_BMOUNT\n else:\n curdir = os.curdir\n mount = RE_WIN_MOUNT if util.platform() == \"windows\" else RE_MOUNT\n\n if not mount.match(filename):\n exists = os.path.lexists(os.path.join(curdir, filename))\n else:\n exists = os.path.lexists(filename)\n\n if not exists:\n return False\n if path:\n return _match_real(filename, include, exclude, follow, symlinks)\n\n matched = False\n for pattern in include:\n if pattern.fullmatch(filename):\n matched = True\n break\n\n if not include and exclude:\n matched = True\n\n if matched:\n matched = True\n if exclude:\n for pattern in exclude:\n if not pattern.fullmatch(filename):\n matched = False\n break\n return matched", "response": "Match includes and excludes."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nparse extended pattern lists.", "response": "def parse_extend(self, c, i):\n \"\"\"Parse extended pattern lists.\"\"\"\n\n # Start list parsing\n success = True\n index = i.index\n list_type = c\n try:\n c = next(i)\n if c != '(':\n raise StopIteration\n while c != ')':\n c = next(i)\n\n if self.extend and c in EXT_TYPES and self.parse_extend(c, i):\n continue\n\n if c == '\\\\':\n try:\n self._references(i)\n except StopIteration:\n pass\n elif c == '[':\n index = i.index\n try:\n self._sequence(i)\n except StopIteration:\n i.rewind(i.index - index)\n\n except StopIteration:\n success = False\n c = list_type\n i.rewind(i.index - index)\n\n return success"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef store(self, value, l, dir_only):\n\n if l and value in (b'', ''):\n return\n\n globstar = value in (b'**', '**') and self.globstar\n magic = self.is_magic(value)\n if magic:\n value = compile(value, self.flags)\n l.append(WcGlob(value, magic, globstar, dir_only, False))", "response": "Group patterns by literals and potential magic patterns."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef split(self):\n\n split_index = []\n parts = []\n start = -1\n\n pattern = self.pattern.decode('latin-1') if self.is_bytes else self.pattern\n\n i = util.StringIter(pattern)\n iter(i)\n\n # Detect and store away windows drive as a literal\n if self.win_drive_detect:\n m = RE_WIN_PATH.match(pattern)\n if m:\n drive = m.group(0).replace('\\\\\\\\', '\\\\')\n if self.is_bytes:\n drive = drive.encode('latin-1')\n parts.append(WcGlob(drive, False, False, True, True))\n start = m.end(0) - 1\n i.advance(start + 1)\n elif pattern.startswith('\\\\\\\\'):\n parts.append(WcGlob(b'\\\\' if self.is_bytes else '\\\\', False, False, True, True))\n start = 1\n i.advance(2)\n elif not self.win_drive_detect and pattern.startswith('/'):\n parts.append(WcGlob(b'/' if self.is_bytes else '/', False, False, True, True))\n start = 0\n i.advance(1)\n\n for c in i:\n if self.extend and c in EXT_TYPES and self.parse_extend(c, i):\n continue\n\n if c == '\\\\':\n index = i.index\n value = ''\n try:\n value = self._references(i)\n if self.bslash_abort and value == '\\\\':\n split_index.append((i.index - 2, 1))\n except StopIteration:\n i.rewind(i.index - index)\n if self.bslash_abort:\n split_index.append((i.index - 1, 0))\n elif c == '/':\n split_index.append((i.index - 1, 0))\n elif c == '[':\n index = i.index\n try:\n self._sequence(i)\n except StopIteration:\n i.rewind(i.index - index)\n\n for split, offset in split_index:\n if self.is_bytes:\n value = pattern[start + 1:split].encode('latin-1')\n else:\n value = pattern[start + 1:split]\n self.store(value, parts, True)\n start = split + offset\n\n if start < len(pattern):\n if self.is_bytes:\n value = pattern[start + 1:].encode('latin-1')\n else:\n value = pattern[start + 1:]\n if value:\n self.store(value, parts, False)\n if len(pattern) == 0:\n parts.append(WcGlob(pattern.encode('latin-1') if self.is_bytes else pattern, False, False, False, False))\n\n return parts", "response": "Start parsing the pattern and return a list of the parts that are part of the pattern."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nstarts parsing the pattern.", "response": "def split(self):\n \"\"\"Start parsing the pattern.\"\"\"\n\n split_index = []\n parts = []\n\n pattern = self.pattern.decode('latin-1') if self.is_bytes else self.pattern\n\n i = util.StringIter(pattern)\n iter(i)\n for c in i:\n if self.extend and c in EXT_TYPES and self.parse_extend(c, i):\n continue\n\n if c == '|':\n split_index.append(i.index - 1)\n elif c == '\\\\':\n index = i.index\n try:\n self._references(i)\n except StopIteration:\n i.rewind(i.index - index)\n elif c == '[':\n index = i.index\n try:\n self._sequence(i)\n except StopIteration:\n i.rewind(i.index - index)\n\n start = -1\n for split in split_index:\n p = pattern[start + 1:split]\n parts.append(p.encode('latin-1') if self.is_bytes else p)\n start = split\n\n if start < len(pattern):\n p = pattern[start + 1:]\n parts.append(p.encode('latin-1') if self.is_bytes else p)\n\n return tuple(parts)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef update_dir_state(self):\n\n if self.dir_start and not self.after_start:\n self.set_after_start()\n elif not self.dir_start and self.after_start:\n self.reset_dir_track()", "response": "Update the directory state."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nchecking if the sequence is in the range of the last entry in the result list.", "response": "def _sequence_range_check(self, result, last):\n \"\"\"\n If range backwards, remove it.\n\n A bad range will cause the regular expression to fail,\n so we need to remove it, but return that we removed it\n so the caller can know the sequence wasn't empty.\n Caller will have to craft a sequence that makes sense\n if empty at the end with either an impossible sequence\n for inclusive sequences or a sequence that matches\n everything for an exclusive sequence.\n \"\"\"\n\n removed = False\n first = result[-2]\n v1 = ord(first[1:2] if len(first) > 1 else first)\n v2 = ord(last[1:2] if len(last) > 1 else last)\n if v2 < v1:\n result.pop()\n result.pop()\n removed = True\n else:\n result.append(last)\n return removed"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef clean_up_inverse(self, current):\n\n if not self.inv_ext:\n return\n\n index = len(current) - 1\n while index >= 0:\n if isinstance(current[index], InvPlaceholder):\n content = current[index + 1:]\n content.append(_EOP if not self.pathname else self.path_eop)\n current[index] = (''.join(content)) + (_EXCLA_GROUP_CLOSE % str(current[index]))\n index -= 1\n self.inv_ext = 0", "response": "Clean up the inverse of the current."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef parse_extend(self, c, i, current, reset_dot=False):\n\n # Save state\n temp_dir_start = self.dir_start\n temp_after_start = self.after_start\n temp_in_list = self.in_list\n temp_inv_ext = self.inv_ext\n self.in_list = True\n if reset_dot:\n self.allow_special_dir = False\n\n # Start list parsing\n success = True\n index = i.index\n list_type = c\n extended = []\n try:\n c = next(i)\n if c != '(':\n raise StopIteration\n while c != ')':\n c = next(i)\n\n if self.extend and c in EXT_TYPES and self.parse_extend(c, i, extended):\n # Nothing more to do\n pass\n elif c == '*':\n self._handle_star(i, extended)\n elif c == '.' and self.after_start:\n extended.append(re.escape(c))\n self.allow_special_dir = True\n self.reset_dir_track()\n elif c == '?':\n extended.append(self._restrict_sequence() + _QMARK)\n elif c == '/':\n if self.pathname:\n extended.append(self._restrict_extended_slash())\n extended.append(re.escape(c))\n elif c == \"|\":\n self.clean_up_inverse(extended)\n extended.append(c)\n if temp_after_start:\n self.set_start_dir()\n elif c == '\\\\':\n try:\n extended.append(self._references(i))\n except StopIteration:\n # We've reached the end.\n # Do nothing because this is going to abort the `extmatch` anyways.\n pass\n elif c == '[':\n subindex = i.index\n try:\n extended.append(self._sequence(i))\n except StopIteration:\n i.rewind(i.index - subindex)\n extended.append(r'\\[')\n elif c != ')':\n extended.append(re.escape(c))\n\n self.update_dir_state()\n\n self.clean_up_inverse(extended)\n if list_type == '?':\n current.append(_QMARK_GROUP % ''.join(extended))\n elif list_type == '*':\n current.append(_STAR_GROUP % ''.join(extended))\n elif list_type == '+':\n current.append(_PLUS_GROUP % ''.join(extended))\n elif list_type == '@':\n current.append(_GROUP % ''.join(extended))\n elif list_type == '!':\n self.inv_ext += 1\n # If pattern is at the end, anchor the match to the end.\n current.append(_EXCLA_GROUP % ''.join(extended))\n if self.pathname:\n if not temp_after_start or self.allow_special_dir:\n star = self.path_star\n elif temp_after_start and not self.dot:\n star = self.path_star_dot2\n else:\n star = self.path_star_dot1\n else:\n if not temp_after_start or self.dot:\n star = _STAR\n else:\n star = _NO_DOT + _STAR\n\n if temp_after_start:\n star = _NEED_CHAR + star\n # Place holder for closing, but store the proper star\n # so we know which one to use\n current.append(InvPlaceholder(star))\n\n except StopIteration:\n success = False\n self.inv_ext = temp_inv_ext\n i.rewind(i.index - index)\n\n # Either restore if extend parsing failed, or reset if it worked\n if not temp_in_list:\n self.in_list = False\n if success:\n self.reset_dir_track()\n else:\n self.dir_start = temp_dir_start\n self.after_start = temp_after_start\n\n return success", "response": "Parse extended pattern lists."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef consume_path_sep(self, i):\n\n try:\n if self.bslash_abort:\n count = -1\n c = '\\\\'\n while c == '\\\\':\n count += 1\n c = next(i)\n i.rewind(1)\n # Rewind one more if we have an odd number (escape): \\\\\\*\n if count > 0 and count % 2:\n i.rewind(1)\n else:\n c = '/'\n while c == '/':\n c = next(i)\n i.rewind(1)\n except StopIteration:\n pass", "response": "Consume any consecutive path separators are they count as one."} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nstarts parsing the pattern.", "response": "def root(self, pattern, current):\n \"\"\"Start parsing the pattern.\"\"\"\n\n self.set_after_start()\n i = util.StringIter(pattern)\n iter(i)\n root_specified = False\n if self.win_drive_detect:\n m = RE_WIN_PATH.match(pattern)\n if m:\n drive = m.group(0).replace('\\\\\\\\', '\\\\')\n if drive.endswith('\\\\'):\n slash = True\n drive = drive[:-1]\n current.append(re.escape(drive))\n if slash:\n current.append(self.get_path_sep() + _ONE_OR_MORE)\n i.advance(m.end(0))\n self.consume_path_sep(i)\n root_specified = True\n elif pattern.startswith('\\\\\\\\'):\n root_specified = True\n elif not self.win_drive_detect and self.pathname and pattern.startswith('/'):\n root_specified = True\n\n if not root_specified and self.realpath:\n current.append(_NO_WIN_ROOT if self.win_drive_detect else _NO_ROOT)\n current.append('')\n\n for c in i:\n\n index = i.index\n if self.extend and c in EXT_TYPES and self.parse_extend(c, i, current, True):\n # Nothing to do\n pass\n elif c == '*':\n self._handle_star(i, current)\n elif c == '?':\n current.append(self._restrict_sequence() + _QMARK)\n elif c == '/':\n if self.pathname:\n self.set_start_dir()\n self.clean_up_inverse(current)\n current.append(self.get_path_sep() + _ONE_OR_MORE)\n self.consume_path_sep(i)\n else:\n current.append(re.escape(c))\n elif c == '\\\\':\n index = i.index\n try:\n value = self._references(i)\n if self.dir_start:\n self.clean_up_inverse(current)\n self.consume_path_sep(i)\n current.append(value)\n except StopIteration:\n i.rewind(i.index - index)\n current.append(re.escape(c))\n elif c == '[':\n index = i.index\n try:\n current.append(self._sequence(i))\n except StopIteration:\n i.rewind(i.index - index)\n current.append(re.escape(c))\n else:\n current.append(re.escape(c))\n\n self.update_dir_state()\n\n self.clean_up_inverse(current)\n if self.pathname:\n current.append(_PATH_TRAIL % self.get_path_sep())"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nparsing the pattern list.", "response": "def parse(self):\n \"\"\"Parse pattern list.\"\"\"\n\n result = ['']\n negative = False\n\n p = util.norm_pattern(self.pattern, not self.unix, self.raw_chars)\n\n p = p.decode('latin-1') if self.is_bytes else p\n if is_negative(p, self.flags):\n negative = True\n p = p[1:]\n\n self.root(p, result)\n\n case_flag = 'i' if not self.case_sensitive else ''\n if util.PY36:\n pattern = (\n r'^(?!(?s%s:%s)$).*?$' if negative and not self.globstar_capture else r'^(?s%s:%s)$'\n ) % (case_flag, ''.join(result))\n else:\n pattern = (\n r'(?s%s)^(?!(?:%s)$).*?$' if negative and not self.globstar_capture else r'(?s%s)^(?:%s)$'\n ) % (case_flag, ''.join(result))\n\n if self.is_bytes:\n pattern = pattern.encode('latin-1')\n\n return pattern"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ntransforms flags to glob defaults.", "response": "def _flag_transform(flags):\n \"\"\"Transform flags to glob defaults.\"\"\"\n\n # Here we force `PATHNAME`.\n flags = (flags & FLAG_MASK) | _wcparse.PATHNAME\n if flags & _wcparse.REALPATH and util.platform() == \"windows\":\n flags |= _wcparse._FORCEWIN\n if flags & _wcparse.FORCECASE:\n flags ^= _wcparse.FORCECASE\n return flags"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ncheck if filename matches patterns.", "response": "def globmatch(filename, patterns, *, flags=0):\n \"\"\"\n Check if filename matches pattern.\n\n By default case sensitivity is determined by the file system,\n but if `case_sensitive` is set, respect that instead.\n \"\"\"\n\n flags = _flag_transform(flags)\n if not _wcparse.is_unix_style(flags):\n filename = util.norm_slash(filename)\n return _wcparse.compile(_wcparse.split(patterns, flags), flags).match(filename)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nfilter filenames using pattern.", "response": "def globfilter(filenames, patterns, *, flags=0):\n \"\"\"Filter names using pattern.\"\"\"\n\n matches = []\n\n flags = _flag_transform(flags)\n unix = _wcparse.is_unix_style(flags)\n obj = _wcparse.compile(_wcparse.split(patterns, flags), flags)\n\n for filename in filenames:\n if not unix:\n filename = util.norm_slash(filename)\n if obj.match(filename):\n matches.append(filename)\n return matches"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\napplying raw character transform before applying escape.", "response": "def raw_escape(pattern, unix=False):\n \"\"\"Apply raw character transform before applying escape.\"\"\"\n\n pattern = util.norm_pattern(pattern, False, True)\n return escape(pattern, unix)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _match_excluded(self, filename, patterns):\n\n return _wcparse._match_real(\n filename, patterns._include, patterns._exclude, patterns._follow, self.symlinks\n )", "response": "Call match real directly to skip unnecessary exists check."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _is_excluded(self, path, dir_only):\n\n return self.npatterns and self._match_excluded(path, self.npatterns)", "response": "Check if file is excluded."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _glob_dir(self, curdir, matcher, dir_only=False, deep=False):\n\n scandir = self.current if not curdir else curdir\n\n # Python will never return . or .., so fake it.\n if os.path.isdir(scandir) and matcher is not None:\n for special in self.specials:\n if matcher(special):\n yield os.path.join(curdir, special)\n\n try:\n if NO_SCANDIR_WORKAROUND:\n # Our current directory can be empty if the path starts with magic,\n # But we don't want to return paths with '.', so just use it to list\n # files, but use '' when constructing the path.\n with os.scandir(scandir) as scan:\n for f in scan:\n try:\n # Quicker to just test this way than to run through `fnmatch`.\n if deep and self._is_hidden(f.name):\n continue\n path = os.path.join(curdir, f.name)\n is_dir = f.is_dir()\n if is_dir:\n is_link = f.is_symlink()\n self.symlinks[path] = is_link\n else:\n # We don't care if a file is a link\n is_link = False\n if deep and not self.follow_links and is_link:\n continue\n if (not dir_only or is_dir) and (matcher is None or matcher(f.name)):\n yield path\n if deep and is_dir:\n yield from self._glob_dir(path, matcher, dir_only, deep)\n except OSError: # pragma: no cover\n pass\n else:\n for f in os.listdir(scandir):\n # Quicker to just test this way than to run through `fnmatch`.\n if deep and self._is_hidden(f):\n continue\n path = os.path.join(curdir, f)\n is_dir = os.path.isdir(path)\n if is_dir:\n is_link = os.path.islink(path)\n self.symlinks[path] = is_link\n else:\n is_link = False\n if deep and not self.follow_links and is_link:\n continue\n if (not dir_only or is_dir) and (matcher is None or matcher(f)):\n yield path\n if deep and is_dir:\n yield from self._glob_dir(path, matcher, dir_only, deep)\n except OSError: # pragma: no cover\n pass", "response": "Non recursive directory glob."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nhandle glob flow. There are really only a couple of cases: - File name. - File name pattern (magic). - Directory. - Directory name pattern (magic). - Extra slashes `////`. - `globstar` `**`.", "response": "def _glob(self, curdir, this, rest):\n \"\"\"\n Handle glob flow.\n\n There are really only a couple of cases:\n\n - File name.\n - File name pattern (magic).\n - Directory.\n - Directory name pattern (magic).\n - Extra slashes `////`.\n - `globstar` `**`.\n \"\"\"\n\n is_magic = this.is_magic\n dir_only = this.dir_only\n target = this.pattern\n is_globstar = this.is_globstar\n\n if is_magic and is_globstar:\n # Glob star directory `**`.\n\n # Throw away multiple consecutive `globstars`\n # and acquire the pattern after the `globstars` if available.\n this = rest.pop(0) if rest else None\n globstar_end = this is None\n while this and not globstar_end:\n if this:\n dir_only = this.dir_only\n target = this.pattern\n if this and this.is_globstar:\n this = rest.pop(0) if rest else None\n if this is None:\n globstar_end = True\n else:\n break\n\n if globstar_end:\n target = None\n\n # We match `**/next` during a deep glob, so what ever comes back,\n # we will send back through `_glob` with pattern after `next` (`**/next/after`).\n # So grab `after` if available.\n this = rest.pop(0) if rest else None\n\n # Deep searching is the unique case where we\n # might feed in a `None` for the next pattern to match.\n # Deep glob will account for this.\n matcher = self._get_matcher(target)\n\n # If our pattern ends with `curdir/**`, but does not start with `**` it matches zero or more,\n # so it should return `curdir/`, signifying `curdir` + no match.\n # If a pattern follows `**/something`, we always get the appropriate\n # return already, so this isn't needed in that case.\n # There is one quirk though with Bash, if `curdir` had magic before `**`, Bash\n # omits the trailing `/`. We don't worry about that.\n if globstar_end and curdir:\n yield os.path.join(curdir, self.empty)\n\n # Search\n for path in self._glob_dir(curdir, matcher, dir_only, deep=True):\n if this:\n yield from self._glob(path, this, rest[:])\n else:\n yield path\n\n elif not dir_only:\n # Files: no need to recursively search at this point as we are done.\n matcher = self._get_matcher(target)\n yield from self._glob_dir(curdir, matcher)\n\n else:\n # Directory: search current directory against pattern\n # and feed the results back through with the next pattern.\n this = rest.pop(0) if rest else None\n matcher = self._get_matcher(target)\n for path in self._glob_dir(curdir, matcher, True):\n if this:\n yield from self._glob(path, this, rest[:])\n else:\n yield path"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting the starting paths of the log files.", "response": "def _get_starting_paths(self, curdir):\n \"\"\"\n Get the starting location.\n\n For case sensitive paths, we have to \"glob\" for\n it first as Python doesn't like for its users to\n think about case. By scanning for it, we can get\n the actual casing and then compare.\n \"\"\"\n\n results = [curdir]\n\n if not self._is_parent(curdir) and not self._is_this(curdir):\n fullpath = os.path.abspath(curdir)\n basename = os.path.basename(fullpath)\n dirname = os.path.dirname(fullpath)\n if basename:\n matcher = self._get_matcher(basename)\n results = [os.path.basename(name) for name in self._glob_dir(dirname, matcher, self)]\n\n return results"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nyields the file names and directories that match the pattern.", "response": "def glob(self):\n \"\"\"Starts off the glob iterator.\"\"\"\n\n # Cached symlinks\n self.symlinks = {}\n\n if self.is_bytes:\n curdir = os.fsencode(os.curdir)\n else:\n curdir = os.curdir\n\n for pattern in self.pattern:\n # If the pattern ends with `/` we return the files ending with `/`.\n dir_only = pattern[-1].dir_only if pattern else False\n\n if pattern:\n if not pattern[0].is_magic:\n # Path starts with normal plain text\n # Lets verify the case of the starting directory (if possible)\n this = pattern[0]\n\n curdir = this[0]\n\n if not os.path.lexists(curdir):\n return\n\n # Make sure case matches, but running case insensitive\n # on a case sensitive file system may return more than\n # one starting location.\n results = [curdir] if this.is_drive else self._get_starting_paths(curdir)\n if not results:\n if not dir_only:\n # There is no directory with this name,\n # but we have a file and no directory restriction\n yield curdir\n return\n\n if this.dir_only:\n # Glob these directories if they exists\n for start in results:\n if os.path.isdir(start):\n rest = pattern[1:]\n if rest:\n this = rest.pop(0)\n for match in self._glob(curdir, this, rest):\n if not self._is_excluded(match, dir_only):\n yield os.path.join(match, self.empty) if dir_only else match\n elif not self._is_excluded(curdir, dir_only):\n yield os.path.join(curdir, self.empty) if dir_only else curdir\n else:\n # Return the file(s) and finish.\n for start in results:\n if os.path.lexists(start) and not self._is_excluded(start, dir_only):\n yield os.path.join(start, self.empty) if dir_only else start\n else:\n # Path starts with a magic pattern, let's get globbing\n rest = pattern[:]\n this = rest.pop(0)\n for match in self._glob(curdir if not curdir == self.current else self.empty, this, rest):\n if not self._is_excluded(match, dir_only):\n yield os.path.join(match, self.empty) if dir_only else match"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef norm_pattern(pattern, normalize, is_raw_chars):\n\n is_bytes = isinstance(pattern, bytes)\n\n if not normalize and not is_raw_chars:\n return pattern\n\n def norm_char(token):\n \"\"\"Normalize slash.\"\"\"\n\n if normalize and token in ('/', b'/'):\n token = br'\\\\' if is_bytes else r'\\\\'\n return token\n\n def norm(m):\n \"\"\"Normalize the pattern.\"\"\"\n\n if m.group(1):\n char = m.group(1)\n if normalize:\n char = br'\\\\\\\\' if is_bytes else r'\\\\\\\\' if len(char) > 1 else norm_char(char)\n elif m.group(2):\n char = norm_char(BACK_SLASH_TRANSLATION[m.group(2)] if is_raw_chars else m.group(2))\n elif is_raw_chars and m.group(4):\n char = norm_char(bytes([int(m.group(4), 8) & 0xFF]) if is_bytes else chr(int(m.group(4), 8)))\n elif is_raw_chars and m.group(3):\n char = norm_char(bytes([int(m.group(3)[2:], 16)]) if is_bytes else chr(int(m.group(3)[2:], 16)))\n elif is_raw_chars and not is_bytes and m.group(5):\n char = norm_char(unicodedata.lookup(m.group(5)[3:-1]))\n elif not is_raw_chars:\n char = m.group(0)\n else:\n value = m.group(5) if is_bytes else m.group(6)\n pos = m.start(5) if is_bytes else m.start(6)\n raise SyntaxError(\"Could not convert character value %s at position %d\" % (value, pos))\n return char\n\n return (RE_BNORM if is_bytes else RE_NORM).sub(norm, pattern)", "response": "r Normalize a pattern."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncheck if file is hidden.", "response": "def is_hidden(path):\n \"\"\"Check if file is hidden.\"\"\"\n\n hidden = False\n f = os.path.basename(path)\n if f[:1] in ('.', b'.'):\n # Count dot file as hidden on all systems\n hidden = True\n elif _PLATFORM == 'windows':\n # On Windows, look for `FILE_ATTRIBUTE_HIDDEN`\n FILE_ATTRIBUTE_HIDDEN = 0x2\n if PY35:\n results = os.lstat(path)\n hidden = bool(results.st_file_attributes & FILE_ATTRIBUTE_HIDDEN)\n else:\n if isinstance(path, bytes):\n attrs = ctypes.windll.kernel32.GetFileAttributesA(path)\n else:\n attrs = ctypes.windll.kernel32.GetFileAttributesW(path)\n hidden = attrs != -1 and attrs & FILE_ATTRIBUTE_HIDDEN\n elif _PLATFORM == \"osx\": # pragma: no cover\n # On macOS, look for `UF_HIDDEN`\n results = os.lstat(path)\n hidden = bool(results.st_flags & stat.UF_HIDDEN)\n return hidden"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef match(self, pattern):\n\n m = pattern.match(self._string, self._index)\n if m:\n self._index = m.end()\n return m", "response": "Perform regex match at index."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\niterating through characters of the string.", "response": "def iternext(self):\n \"\"\"Iterate through characters of the string.\"\"\"\n\n try:\n char = self._string[self._index]\n self._index += 1\n except IndexError: # pragma: no cover\n raise StopIteration\n\n return char"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef generate_slug(text, tail_number=0):\n from wagtail.core.models import Page\n \"\"\"\n Returns a new unique slug. Object must provide a SlugField called slug.\n URL friendly slugs are generated using django.template.defaultfilters'\n slugify. Numbers are added to the end of slugs for uniqueness.\n\n based on implementation in jmbo.utils\n https://github.com/praekelt/jmbo/blob/develop/jmbo/utils/__init__.py\n \"\"\"\n\n # Empty slugs are ugly (eg. '-1' may be generated) so force non-empty\n if not text:\n text = 'no-title'\n\n # use django slugify filter to slugify\n slug = cautious_slugify(text)[:255]\n\n values_list = Page.objects.filter(\n slug__startswith=slug\n ).values_list('id', 'slug')\n\n # Find highest suffix\n max = -1\n for tu in values_list:\n if tu[1] == slug:\n if max == -1:\n # Set max to indicate a collision\n max = 0\n\n # Update max if suffix is greater\n match = RE_NUMERICAL_SUFFIX.match(tu[1])\n if match is not None:\n i = int(match.group(1))\n if i > max:\n max = i\n\n if max >= 0:\n # There were collisions\n return \"%s-%s\" % (slug, max + 1)\n else:\n # No collisions\n return slug", "response": "Generate a unique slug for a page."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef update_media_file(upload_file):\n '''\n Update the Current Media Folder.\n\n Returns list of files copied across or\n raises an exception.\n '''\n temp_directory = tempfile.mkdtemp()\n temp_file = tempfile.TemporaryFile()\n # assumes the zip file contains a directory called media\n temp_media_file = os.path.join(temp_directory, 'media')\n try:\n for chunk in upload_file.chunks():\n temp_file.write(chunk)\n\n with zipfile.ZipFile(temp_file, 'r') as z:\n z.extractall(temp_directory)\n\n if os.path.exists(temp_media_file):\n return distutils.dir_util.copy_tree(\n temp_media_file,\n settings.MEDIA_ROOT)\n else:\n raise Exception(\"Error: There is no directory called \"\n \"'media' in the root of the zipped file\")\n except Exception as e:\n raise e\n finally:\n temp_file.close()\n if os.path.exists(temp_directory):\n shutil.rmtree(temp_directory)", "response": "Update the Current Media Folder."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn an MD5 hash of the image file", "response": "def get_image_hash(image):\n '''\n Returns an MD5 hash of the image file\n\n Handles images stored locally and on AWS\n\n I know this code is ugly.\n Please don't ask.\n The rabbit hole is deep.\n '''\n md5 = hashlib.md5()\n try:\n for chunk in image.file.chunks():\n md5.update(chunk)\n return md5.hexdigest()\n # this should only occur in tests\n except ValueError:\n # see link below for why we try not to use .open()\n # https://docs.djangoproject.com/en/1.9/ref/files/uploads/#django.core.files.uploadedfile.UploadedFile.chunks # noqa\n image.file.open()\n\n for chunk in image.file.chunks():\n md5.update(chunk)\n return md5.hexdigest()\n finally:\n image.file.close()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef separate_fields(fields):\n flat_fields = {}\n nested_fields = {}\n\n # exclude \"id\" and \"meta\" elements\n for k, v in fields.items():\n # TODO: remove dependence on KEYS_TO_INCLUDE\n if k not in KEYS_TO_EXCLUDE:\n if type(v) not in [type({}), type([])]:\n flat_fields.update({k: v})\n else:\n nested_fields.update({k: v})\n\n return flat_fields, nested_fields", "response": "Separate foreign key fields into a single field."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef attach_image(field, nested_fields, page, record_keeper=None):\n '''\n Returns a function that attaches an image to page if it exists\n\n Currenlty assumes that images have already been imported and info\n has been stored in record_keeper\n '''\n if (field in nested_fields) and nested_fields[field]:\n foreign_image_id = nested_fields[field][\"id\"]\n # Handle the following\n # record keeper may not exist\n # record keeper may not have image ref\n if record_keeper:\n try:\n local_image_id = record_keeper.get_local_image(\n foreign_image_id)\n local_image = Image.objects.get(id=local_image_id)\n setattr(page, field, local_image)\n except ObjectDoesNotExist:\n raise ObjectDoesNotExist(\n (\"executing attach_image: local image referenced\"\n \"in record_keeper does not actually exist.\"),\n None)\n except Exception:\n raise\n else:\n raise Exception(\n (\"Attempted to attach image without record_keeper. \"\n \"This functionality is not yet implemented\"))", "response": "Returns a function that attaches an image to a page if it exists and if it does not exist it will raise an exception if the image is already attached."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef rotate_content(day=None):\n # getting the content rotation settings from site settings\n\n for main in Main.objects.all():\n site = main.sites_rooted_here.all().first()\n main_lang = Languages.for_site(site).languages.filter(\n is_main_language=True).first()\n index = SectionIndexPage.objects.live().child_of(main).first()\n site_settings = SiteSettings.for_site(site)\n if day is None:\n day = timezone.now().weekday()\n\n # calls the two rotate methods with the necessary params\n if main and index:\n rotate_latest(main_lang, index, main, site_settings, day)\n rotate_featured_in_homepage(main_lang, day, main)", "response": "This method rotates all content in the current site to the given day."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nload all descendant articles for a given section.", "response": "def load_descendant_articles_for_section(\n context, section, featured_in_homepage=None, featured_in_section=None,\n featured_in_latest=None, count=5):\n \"\"\"\n Returns all descendant articles (filtered using the parameters)\n If the `locale_code` in the context is not the main language, it will\n return the translations of the live articles.\n \"\"\"\n request = context.get('request')\n locale = context.get('locale_code')\n page = section.get_main_language_page()\n settings = SiteSettings.for_site(request.site) \\\n if request else None\n\n qs = ArticlePage.objects.descendant_of(page).filter(\n language__is_main_language=True)\n\n article_ordering = settings \\\n and settings.article_ordering_within_section\n\n cms_ordering = article_ordering \\\n and settings.article_ordering_within_section !=\\\n ArticleOrderingChoices.CMS_DEFAULT_SORTING\n\n if article_ordering and cms_ordering:\n order_by = ArticleOrderingChoices.\\\n get(settings.article_ordering_within_section).name.lower()\n\n order_by = order_by if order_by.find('_desc') == -1 \\\n else '-{}'.format(order_by.replace('_desc', ''))\n qs = qs.order_by(order_by)\n\n if featured_in_homepage is not None:\n qs = qs.filter(featured_in_homepage=featured_in_homepage)\\\n .order_by('-featured_in_homepage_start_date')\n\n if featured_in_latest is not None:\n qs = qs.filter(featured_in_latest=featured_in_latest)\n\n if featured_in_section is not None:\n qs = qs.filter(featured_in_section=featured_in_section)\\\n .order_by('-featured_in_section_start_date')\n\n if not locale:\n return qs.live()[:count]\n\n return get_pages(context, qs, locale)[:count]"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef load_child_articles_for_section(\n context, section, featured_in_section=None, count=5):\n \"\"\"\n Returns all child articles\n If the `locale_code` in the context is not the main language, it will\n return the translations of the live articles.\n \"\"\"\n request = context.get('request')\n locale = context.get('locale_code')\n main_language_page = section.get_main_language_page()\n settings = SiteSettings.for_site(request.site) \\\n if request else None\n\n # TODO: Consider caching the pks of these articles using a timestamp on\n # section as the key so tha twe don't always do these joins\n article_ordering = settings and settings.article_ordering_within_section\n order_by = ArticleOrderingChoices.\\\n get(settings.article_ordering_within_section).name.lower() \\\n if article_ordering \\\n and settings.article_ordering_within_section !=\\\n ArticleOrderingChoices.CMS_DEFAULT_SORTING\\\n else '-first_published_at'\n\n order_by = order_by if order_by.find('_desc') == -1 \\\n else '-{}'.format(order_by.replace('_desc', ''))\n\n child_articles = ArticlePage.objects.child_of(\n main_language_page).filter(\n language__is_main_language=True).order_by(order_by)\n\n if featured_in_section is not None:\n child_articles = child_articles.filter(\n featured_in_section=featured_in_section)\\\n .order_by('-featured_in_section_start_date')\n\n related_articles = ArticlePage.objects.filter(\n related_sections__section__slug=main_language_page.slug)\n qs = list(chain(\n get_pages(context, child_articles, locale),\n get_pages(context, related_articles, locale)))\n\n # Pagination\n if count:\n p = context.get('p', 1)\n paginator = Paginator(qs, count)\n\n try:\n articles = paginator.page(p)\n except PageNotAnInteger:\n articles = paginator.page(1)\n except EmptyPage:\n articles = paginator.page(paginator.num_pages)\n else:\n articles = qs\n if not locale:\n return articles\n\n context.update({'articles_paginated': articles})\n return articles", "response": "Load all child articles for a given section."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn all child sections for the given section.", "response": "def load_child_sections_for_section(context, section, count=None):\n '''\n Returns all child sections\n If the `locale_code` in the context is not the main language, it will\n return the translations of the live articles.\n '''\n page = section.get_main_language_page()\n locale = context.get('locale_code')\n\n qs = SectionPage.objects.child_of(page).filter(\n language__is_main_language=True)\n\n if not locale:\n return qs[:count]\n return get_pages(context, qs, locale)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef load_sibling_sections(context, section, count=None):\n '''\n Returns all sibling sections\n If the `locale_code` in the context is not the main language, it will\n return the translations of the live articles.\n '''\n page = section.get_main_language_page()\n locale = context.get('locale_code')\n\n qs = SectionPage.objects.sibling_of(page).filter(\n language__is_main_language=True)\n\n if not locale:\n return qs[:count]\n return get_pages(context, qs, locale)", "response": "Load sibling sections of a given section."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncreate a new page from the content and returns the page and the context dict in a tuple.", "response": "def create_page(self, content, class_, record_keeper=None, logger=None):\n '''\n Robust as possible\n\n Attempts to create the page\n If any of the functions used to attach content to the page\n fail, keep going, keep a record of those errors in a context dict\n return the page and the context dict in a tuple\n '''\n fields, nested_fields = separate_fields(content)\n\n foreign_id = content.pop('id')\n\n # remove unwanted fields\n if 'latest_revision_created_at' in content:\n content.pop('latest_revision_created_at')\n\n page = class_(**fields)\n\n # create functions to attach attributes\n function_args_mapping = (\n # add_section_time\n (add_json_dump, (\"time\", nested_fields, page)),\n # add_tags\n (add_list_of_things, (\"tags\", nested_fields, page)),\n # add_metadata_tags\n (add_list_of_things, (\"metadata_tags\", nested_fields, page)),\n\n # attach_image\n (attach_image, (\"image\", nested_fields, page, record_keeper)),\n # attach_social_media_image\n (attach_image, (\"social_media_image\", nested_fields,\n page, record_keeper)),\n # attach_banner_image\n (attach_image, (\"banner\", nested_fields, page, record_keeper)),\n )\n\n for mapping in function_args_mapping:\n function = mapping[0]\n _args = mapping[1]\n try:\n function(*_args)\n except Exception as e:\n if logger:\n logger.log(\n ERROR,\n \"Failed to create page content\",\n {\n \"foreign_page_id\": foreign_id,\n \"exception\": e,\n \"function\": function.__name__,\n })\n\n # Handle content in nested_fields\n body = add_stream_fields(nested_fields, page)\n # body has not been added as it contains reference to pages\n if body:\n record_keeper.article_bodies[foreign_id] = body\n\n # Handle relationships in nested_fields\n if record_keeper:\n record_relation_functions = [\n record_keeper.record_nav_tags,\n record_keeper.record_recommended_articles,\n record_keeper.record_reaction_questions,\n record_keeper.record_related_sections,\n record_keeper.record_section_tags,\n record_keeper.record_banner_page_link,\n ]\n\n for function in record_relation_functions:\n try:\n function(nested_fields, foreign_id)\n except Exception as e:\n if logger:\n logger.log(\n ERROR,\n \"Failed to record content\",\n {\n \"foreign_page_id\": foreign_id,\n \"exception\": e,\n \"function\": function.__name__,\n })\n\n return page"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef queryset(self, request, queryset):\n if self.value():\n try:\n section = SectionPage.objects.get(id=self.value())\n return queryset.child_of(section).all()\n except (ObjectDoesNotExist, MultipleObjectsReturned):\n return None", "response": "Returns the queryset based on the value\n provided in the query string and retrievable via the self. value."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef upload_file(request):\n '''Upload a Zip File Containing a single file containing media.'''\n if request.method == 'POST':\n form = MediaForm(request.POST, request.FILES)\n if form.is_valid():\n context_dict = {}\n try:\n context_dict['copied_files'] = update_media_file(\n request.FILES['zip_file'])\n except Exception as e:\n context_dict['error_message'] = e.message\n return render(request,\n 'django_admin/transfer_media_message.html',\n context_dict)\n else:\n form = MediaForm()\n return render(request, 'django_admin/upload_media.html', {'form': form})", "response": "Upload a Zip File Containing a single file containing media."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ncreate and download a zip file containing the media file.", "response": "def download_file(request):\n '''Create and download a zip file containing the media file.'''\n if request.method == \"GET\":\n if path.exists(settings.MEDIA_ROOT):\n zipfile_name = 'media_%s.zip' % settings.SITE_NAME\n in_memory_file = BytesIO()\n\n media_zipfile = zipfile.ZipFile(in_memory_file, 'w',\n zipfile.ZIP_DEFLATED)\n\n directory_name = path.split(settings.MEDIA_ROOT)[-1]\n for root, dirs, files in walk(directory_name):\n for file in files:\n media_zipfile.write(path.join(root, file))\n\n media_zipfile.close()\n\n resp = HttpResponse(in_memory_file.getvalue(),\n content_type=\"application/x-zip-compressed\")\n resp['Content-Disposition'] = (\n 'attachment; filename=%s' % zipfile_name)\n\n else:\n resp = render(request,\n 'django_admin/transfer_media_message.html',\n {'error_message':\n 'media file does not exist'})\n else:\n resp = HttpResponseNotAllowed(permitted_methods=['GET'])\n\n return resp"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef update_permissions_for_group(apps, schema_editor):\n '''\n Update permissions for some users.\n\n Give bulk-delete permissions to moderators.\n Give edit permission to moderators and editors in order\n to display 'Main' page in the explorer.\n '''\n db_alias = schema_editor.connection.alias\n try:\n # Django 1.9\n emit_post_migrate_signal(2, False, db_alias)\n except TypeError:\n # Django < 1.9\n try:\n # Django 1.8\n emit_post_migrate_signal(2, False, 'default', db_alias)\n except TypeError: # Django < 1.8\n emit_post_migrate_signal([], 2, False, 'default', db_alias)\n\n Group = apps.get_model('auth.Group')\n Permission = apps.get_model('auth.Permission')\n GroupPagePermission = apps.get_model('wagtailcore.GroupPagePermission')\n SectionIndexPage = apps.get_model('core.SectionIndexPage')\n MainPage = apps.get_model('core.Main')\n\n moderator_group = Group.objects.filter(name='Moderators').first()\n editor_group = Group.objects.filter(name='Editors').first()\n\n if moderator_group:\n sections = SectionIndexPage.objects.first()\n GroupPagePermission.objects.get_or_create(\n group_id=moderator_group.id,\n page_id=sections.id,\n permission_type='bulk_delete'\n )\n\n main = MainPage.objects.first()\n GroupPagePermission.objects.get_or_create(\n group_id=moderator_group.id,\n page_id=main.id,\n permission_type='edit'\n )\n\n if editor_group:\n main = MainPage.objects.first()\n GroupPagePermission.objects.get_or_create(\n group_id=editor_group.id,\n page_id=main.id,\n permission_type='edit'\n )", "response": "Update permissions for some users in order to display Moderators and editors in order to display Moderators and editors in order to display Moderators and editors in order to display Main page in explorer."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef filter_queryset(self, request, queryset, view):\n if 'is_main_language' in request.GET:\n # TODO investigate possible error cases where page\n # does not have language\n return queryset.filter(languages__language__is_main_language=True)\n else:\n return queryset", "response": "Filter the queryset to only pages in the main language"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create_recomended_articles(main_article, article_list):\n '''\n Creates recommended article objects from article_list\n and _prepends_ to existing recommended articles.\n '''\n\n # store existing recommended articles\n existing_recommended_articles = [\n ra.recommended_article.specific\n for ra in main_article.recommended_articles.all()]\n # delete existing recommended articles\n ArticlePageRecommendedSections.objects.filter(page=main_article).delete()\n\n for hyperlinked_article in article_list:\n ArticlePageRecommendedSections(\n page=main_article,\n recommended_article=hyperlinked_article).save()\n\n # re-create existing recommended articles\n for article in existing_recommended_articles:\n if article not in article_list:\n ArticlePageRecommendedSections(\n page=main_article,\n recommended_article=article).save()", "response": "Creates recommended articles from article_list and _prepends_ to existing recommended articles."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef seperate_end_page_links(stream_data):\n '''\n Seperate out page blocks at the end of a StreamField.\n\n Accepts: List of streamfield blocks\n Returns: Tuple of 2 lists of blocks - (remaining body, final article)\n '''\n stream_data_copy = list(stream_data)\n end_page_links = []\n\n for block in stream_data_copy[::-1]:\n if block['type'] == 'page':\n end_page_links.insert(0, block)\n stream_data_copy.pop()\n else:\n break\n\n return (stream_data_copy, end_page_links)", "response": "Seperate out page blocks at the end of a StreamField."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning a list of specific page objects from a list of page ids", "response": "def get_pages_from_id_list(id_list):\n '''\n Accepts: list of page ids\n Returns: list of specific page objects\n '''\n page_list = []\n for id_ in id_list:\n try:\n page_list.append(\n Page.objects.get(id=id_).specific)\n except ObjectDoesNotExist:\n logging.error(\n \"Attempted to fetch non-existent\"\n \" page with id of {}\".format(id_))\n return page_list"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef move_page_links_to_recommended_articles():\n '''\n Derived from https://github.com/wagtail/wagtail/issues/2110\n '''\n articles = ArticlePage.objects.all()\n\n for article in articles:\n (remaining_blocks, linked_article_blocks) = seperate_end_page_links(\n article.body.stream_data)\n\n if linked_article_blocks:\n linked_article_ids = get_page_ids_from_page_blocks(\n linked_article_blocks)\n linked_articles = get_pages_from_id_list(linked_article_ids)\n create_recomended_articles(article, linked_articles)\n parent = article.get_parent().specific\n parent.enable_recommended_section = True\n parent.save()\n\n stream_block = article.body.stream_block\n article.body = StreamValue(stream_block,\n remaining_blocks,\n is_lazy=True)\n article.save()", "response": "Move all page links to recommended articles."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef cookiecutter(template, checkout=None, no_input=False, extra_context=None):\n\n # Get user config from ~/.cookiecutterrc or equivalent\n # If no config file, sensible defaults from config.DEFAULT_CONFIG are used\n config_dict = get_user_config()\n\n template = expand_abbreviations(template, config_dict)\n\n # TODO: find a better way to tell if it's a repo URL\n if 'git@' in template or 'https://' in template:\n repo_dir = clone(\n repo_url=template,\n checkout=checkout,\n clone_to_dir=config_dict['cookiecutters_dir'],\n no_input=no_input\n )\n else:\n # If it's a local repo, no need to clone or copy to your\n # cookiecutters_dir\n repo_dir = template\n\n context_file = os.path.join(repo_dir, 'cookiecutter.json')\n logging.debug('context_file is {0}'.format(context_file))\n\n context = generate_context(\n context_file=context_file,\n default_context=config_dict['default_context'],\n extra_context=extra_context,\n )\n\n # Create project from local context and project template.\n generate_files(\n repo_dir=repo_dir,\n context=context\n )", "response": "This function replaces cookiecutter s own cookiecutter function with the contents of the given template."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_queryset(self):\n '''\n This is overwritten in order to not exclude drafts\n and pages submitted for moderation\n '''\n request = self.request\n\n # Allow pages to be filtered to a specific type\n if 'type' not in request.GET:\n model = Page\n else:\n model_name = request.GET['type']\n try:\n model = resolve_model_string(model_name)\n except LookupError:\n raise BadRequestError(\"type doesn't exist\")\n if not issubclass(model, Page):\n raise BadRequestError(\"type doesn't exist\")\n\n # This is the overwritten line\n queryset = model.objects.public() # exclude .live()\n\n # Filter by site\n queryset = queryset.descendant_of(\n request.site.root_page, inclusive=True)\n\n return queryset", "response": "Returns a queryset of all the pages that are in the moderation tree."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef list_of_objects_from_api(url):\n '''\n API only serves 20 pages by default\n This fetches info on all of items and return them as a list\n\n Assumption: limit of API is not less than 20\n '''\n response = requests.get(url)\n\n content = json.loads(response.content)\n count = content[\"meta\"][\"total_count\"]\n\n if count <= 20:\n return content[\"items\"]\n else:\n items = [] + content[\"items\"]\n num_requests = int(math.ceil(count // 20))\n\n for i in range(1, num_requests + 1):\n paginated_url = \"{}?limit=20&offset={}\".format(\n url, str(i * 20))\n paginated_response = requests.get(paginated_url)\n items = items + json.loads(paginated_response.content)[\"items\"]\n return items", "response": "This function fetches all of items and returns them as a list of objects"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_content_from_url(self, base_url):\n # assemble url\n base_url = base_url.rstrip(\"/\")\n url = base_url + API_PAGES_ENDPOINT + \"?type=\" + self._content_type + \\\n \"&fields=\" + \",\".join(self._fields) + \\\n \"&order=latest_revision_created_at\"\n\n # make request\n try:\n response = requests.get(url)\n self._base_url = base_url\n self._content = response.json()\n self._content = self._content[\"items\"]\n return self._content\n except requests.exceptions.ConnectionError:\n return \"No content could be found from {}. \" \\\n \"Are you sure this is the correct URL?\".format(base_url)\n except requests.exceptions.RequestException:\n return \"Content could not be imported at this time. \" \\\n \"Please try again later.\"", "response": "Get the content from the given base URL."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef save(self, indexes, parent_id):\n if self.content():\n parent = Page.objects.get(id=parent_id)\n\n # Save the selected section page\n response = requests.get(\n self._base_url + API_PAGES_ENDPOINT + str(indexes[0]) + \"/\"\n )\n\n section_page = response.json()\n self.process_child_section(section_page[\"id\"], parent)", "response": "Save the selected section. This will save the selected section and all its direct child pages obtained through the?child_of\n query parameter."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef fetch_and_create_image(self, url, image_title):\n '''\n fetches, creates image object\n\n returns tuple with Image object and context dictionary containing\n request URL\n '''\n\n context = {\n \"file_url\": url,\n \"foreign_title\": image_title,\n }\n try:\n image_file = requests.get(url)\n local_image = Image(\n title=image_title,\n file=ImageFile(\n BytesIO(image_file.content),\n name=image_title\n )\n )\n local_image.save()\n return (local_image, context)\n except Exception as e:\n context.update({\n \"exception\": e,\n })\n raise ImageCreationFailed(context, None)", "response": "Fetches and creates an image object and returns tuple with Image object and context dictionary containing request URL\n "} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nimporting and returns tuple with image and context dict.", "response": "def import_image(self, image_id):\n '''\n Imports and returns tuple with image and context dict\n\n Input: foreign image ID\n\n Output: (Image: imported image, Dict: info about import)\n\n Side effects: If Importer object has a record_keeper, it\n will update the record of foreign to local images.\n\n Attempts to avoid duplicates by matching image dimensions\n and hashes. If a match is found it refers to local instance\n instead. If it is not, the image is fetched, created and\n referenced.\n '''\n image_detail_url = \"{}{}/\".format(self.image_url, image_id)\n\n try:\n img_response = requests.get(image_detail_url)\n img_info = json.loads(img_response.content)\n except Exception as e:\n error_context = {\n \"image detail url\": image_detail_url,\n \"exception\": e,\n }\n raise ImageInfoFetchFailed(error_context)\n\n if img_info[\"image_hash\"] is None:\n raise ValueError('image hash should not be none')\n\n # check if a replica exists\n local_image = self.get_replica_image(\n img_info[\"image_hash\"])\n\n file_url = img_info['image_url']\n\n # handle when image_url is relative\n # assumes that image import means local storage\n if img_info['image_url'][0] == '/':\n file_url = \"{}{}\".format(\n self.base_url, img_info['image_url'])\n\n if local_image:\n context = {\n \"local_version_existed\": True,\n \"file_url\": file_url,\n \"image_detail_url\": image_detail_url,\n \"foreign_title\": img_info[\"title\"],\n }\n # update record keeper\n if self.record_keeper:\n self.record_keeper.record_image_relation(\n image_id,\n local_image.id)\n return (local_image, context)\n else:\n new_image, context = self.fetch_and_create_image(\n file_url,\n img_info[\"title\"])\n # update record keeper\n if self.record_keeper:\n self.record_keeper.record_image_relation(\n image_id,\n new_image.id)\n context.update({\n \"local_version_existed\": False,\n })\n return (new_image, context)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nimporting all images from site and creates them", "response": "def import_images(self):\n '''\n Fetches all images from site\n\n Handles Errors in creation process\n Updates record_keeper\n Logs the result of each attempt to create an image\n '''\n self.log(ACTION, \"Importing Images\")\n try:\n images = list_of_objects_from_api(self.image_url)\n except Exception as e:\n raise ImageInfoFetchFailed(\n \"Something went wrong fetching list of images\")\n\n if not images:\n return None\n\n # iterate through foreign images\n for image_summary in images:\n self.log(ACTION, \"Importing Image\", depth=1)\n try:\n (image, context) = self.import_image(image_summary[\"id\"])\n # log success\n self.log(SUCCESS, \"Importing Image\",\n context=context,\n depth=1)\n except ImageInfoFetchFailed as e:\n self.log(ERROR, \"Importing Images\", e, depth=1)\n except ImageCreationFailed as e:\n self.log(ERROR, \"Importing Images\", e.message, depth=1)\n except Exception as e:\n context = {\n \"exception\": e,\n \"foreign_image_id\": image_summary[\"id\"],\n }\n self.log(ERROR, \"Importing Images\", context, depth=1)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_language_ids(self):\n '''\n Return list of foreign language IDs from API language endpoint\n\n TODO: add in validation before creating languages\n '''\n languages = list_of_objects_from_api(self.language_url)\n\n language_ids = []\n for language in languages:\n language_ids.append(language[\"id\"])\n return language_ids", "response": "Return list of foreign language IDs from API language endpoint"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef recreate_relationships(self, class_, attribute_name, key):\n '''\n Recreates one-to-many relationship\n '''\n iterable = self.record_keeper.foreign_to_many_foreign_map[key]\n for foreign_page_id, foreign_page_id_list in iteritems(iterable):\n\n # Assumption: local page has been indexed and exists\n # TODO: handle case where it doesn't exist\n local_page_id = self.record_keeper.get_local_page(foreign_page_id)\n local_page = Page.objects.get(id=local_page_id).specific\n\n for _foreign_page_id in foreign_page_id_list:\n try:\n local_version_page_id = (self.record_keeper\n .get_local_page(_foreign_page_id))\n foreign_page = Page.objects.get(\n id=local_version_page_id).specific\n realtionship_object = class_(page=local_page)\n setattr(realtionship_object, attribute_name, foreign_page)\n realtionship_object.save()\n except Exception as e:\n context = {\n \"exception\": e,\n \"function_schema\": (\"recreate_relationships\"\n \"(class, attribute_name, key)\"),\n \"attribute_name\": str(attribute_name),\n \"key\": str(key),\n \"class\": str(class_),\n \"foreign_page_id\": str(foreign_page_id),\n }\n self.log(ERROR, \"recreating relationships\",\n context=context)", "response": "Recreates one - to - many relationship."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef recreate_relationship(self, attribute_name, key):\n '''\n Recreates one-to-one relationship\n '''\n iterable = self.record_keeper.foreign_to_foreign_map[\"banner_link_page\"] # noqa\n for foreign_page_id, linked_page_foreign_id in iteritems(iterable):\n # get local banner page\n local_page_id = self.record_keeper.get_local_page(foreign_page_id)\n local_page = Page.objects.get(id=local_page_id).specific\n\n # get local linked page\n local_id = self.record_keeper.get_local_page(\n linked_page_foreign_id)\n linked_page = Page.objects.get(id=local_id).specific\n\n # link the two together\n setattr(local_page, attribute_name, linked_page)\n # TODO: review publishing and saving revisions\n local_page.save_revision().publish()", "response": "Recreates one - to - one relationship."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef recreate_article_body(self):\n '''\n Handles case where article body contained page or image.\n\n Assumes all articles and images have been created.\n '''\n for foreign_id, body in iteritems(self.record_keeper.article_bodies):\n try:\n local_page_id = self.record_keeper.get_local_page(foreign_id)\n page = Page.objects.get(id=local_page_id).specific\n\n # iterate through the body\n new_body = []\n for item in body:\n if not item['value']:\n continue\n if item['type'] == 'page':\n new_page_id = self.record_keeper.get_local_page(\n item['value'])\n item['value'] = new_page_id\n elif item['type'] == 'image':\n new_image_id = self.record_keeper.get_local_image(\n item['value'])\n item['value'] = new_image_id\n\n new_body.append(item)\n\n setattr(page, 'body', json.dumps(new_body))\n page.save_revision().publish()\n\n except Exception as e:\n self.log(ERROR, \"recreating article body\",\n {\n \"exception\": e,\n \"foreign_id\": foreign_id,\n \"body\": body,\n },\n depth=1)", "response": "Handles case where article body contained page or image."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef attach_translated_content(self, local_main_lang_page,\n content, locale):\n '''\n Wrapper for attach_page\n\n Creates the content\n Then attaches a language relation from the main language page to the\n newly created Page\n Note: we get the parent from the main language page\n '''\n try:\n page = self.attach_page(\n local_main_lang_page.get_parent(),\n content)\n except:\n # TODO: log this\n return None\n\n try:\n # create the translation object for page\n language = SiteLanguageRelation.objects.get(\n language_setting=self.language_setting,\n locale=locale)\n page.language = language\n page.translated_pages.add(local_main_lang_page)\n local_main_lang_page.translated_pages.add(page)\n page.save()\n local_main_lang_page.save()\n except:\n # TODO: log that creating translation failed\n # TODO: log that page is now being deleted\n page.delete()\n return page", "response": "Wrapper for attach_page attach_page_with_language_with_language_with_page attach_page attach_page_with_language_with_page attach_page attach"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef copy_page_and_children(self, foreign_id, parent_id, depth=0):\n '''\n Recusively copies over pages, their translations, and child pages\n '''\n url = \"{}/api/v2/pages/{}/\".format(self.base_url, foreign_id)\n\n self.log(ACTION, \"Requesting Data\", {\"url\": url}, depth)\n try:\n # TODO: create a robust wrapper around this functionality\n response = requests.get(url)\n content = json.loads(response.content)\n except Exception as e:\n self.log(ERROR, \"Requesting Data - abandoning copy\",\n {\"url\": url, \"exception\": e}, depth)\n return None\n\n parent = Page.objects.get(id=parent_id).specific\n page = None\n try:\n self.log(ACTION, \"Create Page\", {\"url\": url}, depth)\n page = self.attach_page(parent, content)\n if page:\n self.log(SUCCESS, \"Create Page\",\n {\"url\": url,\n \"page title\": page.title.encode('utf-8')},\n depth)\n except PageNotImportable as e:\n message = e.message.pop(\"message\")\n self.log(WARNING, message, e.message.pop(\"message\"), depth)\n return None\n\n if page:\n # create translations\n if content[\"meta\"][\"translations\"]:\n for translation_obj in content[\"meta\"][\"translations\"]:\n _url = \"{}/api/v2/pages/{}/\".format(self.base_url,\n translation_obj[\"id\"])\n # TODO: create a robust wrapper around this functionality\n _response = requests.get(_url)\n self.log(\n ACTION,\n \"Getting translated content\",\n {\"url\": _url}, depth)\n if _response.content:\n _content = json.loads(_response.content)\n\n if (\"locale\" in translation_obj and\n translation_obj[\"locale\"]):\n self.attach_translated_content(\n page, _content, translation_obj[\"locale\"])\n else:\n self.log(\n ERROR,\n \"locale is null\",\n {\"url\": _url, }, depth)\n else:\n self.log(\n ERROR,\n \"Getting translated content\",\n {\"url\": _url}, depth)\n\n main_language_child_ids = content[\"meta\"][\"main_language_children\"]\n\n # recursively iterate through child nodes\n if main_language_child_ids:\n for main_language_child_id in main_language_child_ids:\n try:\n self.copy_page_and_children(\n foreign_id=main_language_child_id,\n parent_id=page.id, depth=depth + 1)\n except Exception as e:\n self.log(ERROR, \"Copying Children\",\n {\"url\": url, \"exception\": e})", "response": "Recusively copies over pages their translations and child pages"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ninitiating copying of tree with existing_node acting as root", "response": "def copy_children(self, foreign_id, existing_node):\n '''\n Initiates copying of tree, with existing_node acting as root\n '''\n url = \"{}/api/v2/pages/{}/\".format(self.base_url, foreign_id)\n self.log(\n ACTION,\n \"Copying Children\",\n {\"existing node type\": str(type(existing_node))})\n\n # TODO: create a robust wrapper around this functionality\n try:\n self.log(ACTION, \"Requesting Data\", {\"url\": url})\n response = requests.get(url)\n content = json.loads(response.content)\n self.log(SUCCESS, \"Data Fetched Successfully\", {\"url\": url})\n\n main_language_child_ids = content[\"meta\"][\"main_language_children\"]\n if main_language_child_ids:\n for main_language_child_id in main_language_child_ids:\n self.copy_page_and_children(\n foreign_id=main_language_child_id,\n parent_id=existing_node.id, depth=1)\n else:\n self.log(SUCCESS, \"No children to copy\")\n except Exception as e:\n self.log(ERROR, \"Copying Children\", {\"url\": url, \"exception\": e})"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns a string representation of the email logs.", "response": "def get_email_logs(self):\n '''\n Returns a string representation of logs.\n\n Only displays errors and warnings in the email logs\n to avoid being verbose\n '''\n message = \"\"\n for log in self.record:\n if log[\"log_type\"] in [ERROR, WARNING]:\n message += self.format_message(**log)\n\n return message"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef chain_to_bitcoin_network(chain):\n if chain == Chain.bitcoin_mainnet:\n return SYS_NETWORK_BITCOIN_MAINNET\n elif chain == Chain.bitcoin_testnet:\n return SYS_NETWORK_BITCOIN_TESTNET\n elif chain == Chain.bitcoin_regtest:\n return SYS_NETWORK_BITCOIN_REGTEST\n else:\n message = 'This chain cannot be converted to a bitcoin netcode; chain='\n if chain:\n message += chain.name\n else:\n message += '<NULL>'\n raise UnknownChainError(message)", "response": "Convert a chain to a bitcoin netcode."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nmake sure the directories exist for filename.", "response": "def ensure_dirs(filename):\n \"\"\"Make sure the directories exist for `filename`.\"\"\"\n dirname, _ = os.path.split(filename)\n if dirname and not os.path.exists(dirname):\n os.makedirs(dirname)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_certificate(self, certificate_uid):\n certificate_json = self.get_certificate_json(certificate_uid)\n return model.to_certificate_model(certificate_json)", "response": "Returns a certificate object. Propagates KeyError if key isn t found\n "} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_certificate_json(self, certificate_uid):\n\n if certificate_uid.startswith(URN_UUID_PREFIX):\n uid = certificate_uid[len(URN_UUID_PREFIX):]\n elif certificate_uid.startswith('http'):\n last_slash = certificate_uid.rindex('/')\n uid = certificate_uid[last_slash + 1:]\n else:\n uid = certificate_uid\n logging.debug('Retrieving certificate for uid=%s', uid)\n certificate_bytes = self._get_certificate_raw(uid)\n logging.debug('Found certificate for uid=%s', uid)\n certificate_json = helpers.certificate_bytes_to_json(certificate_bytes)\n return certificate_json", "response": "Get certificate as json. Propagates KeyError if key isn t found\n "} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets the certificate as raw bytes. Per kvstore contract raises KeyError.", "response": "def _get_certificate_raw(self, certificate_uid):\n \"\"\"\n Returns certificate as raw bytes. Per kvstore contract, raises an KeyError if key isn't found.\n :param certificate_uid:\n :return:\n \"\"\"\n cert_file_bytes = self.kv_store.get(certificate_uid_to_filename(certificate_uid))\n return cert_file_bytes"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn the certificate as byte array.", "response": "def get_certificate(self, certificate_uid):\n \"\"\"\n Returns certificate as byte array. We need this for v1 certs, which compute a binary hash. Raises\n KeyError if not found\n :param certificate_uid:\n :return:\n \"\"\"\n logging.debug('Retrieving certificate for uid=%s', certificate_uid)\n\n is_v1_uid = model.is_v1_uid(certificate_uid)\n if not is_v1_uid:\n return super(V1AwareCertificateStore, self).get_certificate(certificate_uid)\n\n # else it's V1.1 (if not valid, it will throw)\n certificate = self._find_certificate_metadata(uid=certificate_uid)\n if certificate:\n certificate_bytes = self._get_certificate_raw(certificate_uid)\n certificate_json = helpers.certificate_bytes_to_json(certificate_bytes)\n return model.to_certificate_model(certificate_json, certificate['txid'], certificate_bytes)\n\n message = 'Certificate metadata not found for certificate uid=%s' % certificate_uid\n logging.error(message)\n raise KeyError(message)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _find_certificate_metadata(self, uid=None):\n certificate = self.db.certificates.find_one({'uid': uid})\n return certificate", "response": "Find certificate by uid"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nexecuting git command in bash", "response": "def run(self, cmd):\n \"\"\"Execute git command in bash\"\"\"\n cmd = ['git', '--git-dir=%s' % self.path] + cmd\n print(\"cmd list\", cmd)\n print(\"cmd\", ' '.join(cmd))\n res = None\n try:\n res = subprocess.check_output(cmd)\n except BaseException:\n pass\n if res:\n try:\n res = res.decode()\n except UnicodeDecodeError:\n res = res.decode('utf-8')\n return res"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef update(self):\n if not os.path.isdir(os.path.join(self.path)):\n os.makedirs(self.path)\n if not os.path.isdir(os.path.join(self.path, 'refs')):\n subprocess.check_output([\n 'git', 'clone', '--bare', self.repo_git, self.path\n ])\n self.run(['gc', '--auto', '--prune=all'])\n self.run(['fetch', '-p', 'origin', '+refs/heads/*:refs/heads/*'])\n # github support\n self.run(['fetch', 'origin', '+refs/pull/*/head:refs/pull/*'])\n # gitlab support\n self.run([\n 'fetch', 'origin', '+refs/merge-requests/*/head:refs/pull/*'])", "response": "Get a repository git or update it"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef oauth_session(request, state=None, token=None):\n if settings.DISCORD_REDIRECT_URI is not None:\n redirect_uri = settings.DISCORD_REDIRECT_URI\n else:\n redirect_uri = request.build_absolute_uri(\n reverse('discord_bind_callback'))\n scope = (['email', 'guilds.join'] if settings.DISCORD_EMAIL_SCOPE\n else ['identity', 'guilds.join'])\n return OAuth2Session(settings.DISCORD_CLIENT_ID,\n redirect_uri=redirect_uri,\n scope=scope,\n token=token,\n state=state)", "response": "Constructs the OAuth2 session object."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef copy_path(self, path):\n src = os.path.expandvars(os.path.expanduser(path))\n basename = os.path.basename(src)\n dest_path = os.path.expandvars(os.path.expanduser(\n os.path.join(self.curr_work_path, basename)))\n if os.path.isdir(dest_path):\n shutil.rmtree(dest_path)\n if os.path.isdir(src):\n try:\n shutil.copytree(src, dest_path)\n except shutil.Error:\n pass # There are permissions errors to copy\n elif os.path.isfile(src):\n shutil.copy(src, dest_path)\n else:\n raise UserWarning(\n \"Just directory or file is supported to copy [%s]\" % src)\n return os.path.relpath(dest_path, self.curr_work_path)", "response": "Copy the source file and return the path to the new file."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngenerates transcripts from sjson to SubRip.", "response": "def generate_sjson_from_srt(srt_subs):\n \"\"\"\n Generate transcripts from sjson to SubRip (*.srt).\n\n Arguments:\n srt_subs(SubRip): \"SRT\" subs object\n\n Returns:\n Subs converted to \"SJSON\" format.\n \"\"\"\n sub_starts = []\n sub_ends = []\n sub_texts = []\n for sub in srt_subs:\n sub_starts.append(sub.start.ordinal)\n sub_ends.append(sub.end.ordinal)\n sub_texts.append(sub.text.replace('\\n', ' '))\n\n sjson_subs = {\n 'start': sub_starts,\n 'end': sub_ends,\n 'text': sub_texts\n }\n return sjson_subs"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef generate_srt_from_sjson(sjson_subs):\n\n output = ''\n\n equal_len = len(sjson_subs['start']) == len(sjson_subs['end']) == len(sjson_subs['text'])\n if not equal_len:\n return output\n\n for i in range(len(sjson_subs['start'])):\n item = SubRipItem(\n index=i,\n start=SubRipTime(milliseconds=sjson_subs['start'][i]),\n end=SubRipTime(milliseconds=sjson_subs['end'][i]),\n text=sjson_subs['text'][i]\n )\n output += (six.text_type(item))\n output += '\\n'\n return output", "response": "Generate transcripts from sjson to SubRip."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef convert(cls, content, input_format, output_format):\n assert input_format in ('srt', 'sjson')\n assert output_format in ('srt', 'sjson')\n\n # Decode the content with utf-8-sig which will also\n # skip byte order mark(BOM) character if found.\n content = content.decode('utf-8-sig')\n\n if input_format == output_format:\n return content\n\n if input_format == 'srt':\n\n if output_format == 'sjson':\n try:\n # With error handling (set to 'ERROR_RAISE'), we will be getting\n # the exception if something went wrong in parsing the transcript.\n srt_subs = SubRipFile.from_string(content, error_handling=SubRipFile.ERROR_RAISE)\n except Error as ex: # Base exception from pysrt\n raise TranscriptsGenerationException(text_type(ex))\n\n return json.dumps(cls.generate_sjson_from_srt(srt_subs))\n\n if input_format == 'sjson':\n\n if output_format == 'srt':\n return cls.generate_srt_from_sjson(json.loads(content))", "response": "Convert a string from input_format to output_format."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nvalidates the transcript data.", "response": "def validate(self, data):\n \"\"\"\n Validates the transcript data.\n \"\"\"\n video_id = self.context.get('video_id')\n video = Video.get_or_none(edx_video_id=video_id)\n if not video:\n raise serializers.ValidationError('Video \"{video_id}\" is not valid.'.format(video_id=video_id))\n\n data.update(video=video)\n return data"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nconvert data into CourseVideo instance and image filename tuple.", "response": "def to_internal_value(self, data):\n \"\"\"\n Convert data into CourseVideo instance and image filename tuple.\n \"\"\"\n course_id = data\n course_video = image = ''\n if data:\n if isinstance(data, dict):\n (course_id, image), = list(data.items())\n\n course_video = CourseVideo(course_id=course_id)\n course_video.full_clean(exclude=['video'])\n\n return course_video, image"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef validate(self, data):\n if data is not None and not isinstance(data, dict):\n raise serializers.ValidationError(\"Invalid data\")\n\n try:\n profiles = [ev[\"profile\"] for ev in data.get(\"encoded_videos\", [])]\n if len(profiles) != len(set(profiles)):\n raise serializers.ValidationError(\"Invalid data: duplicate profiles\")\n except KeyError:\n raise serializers.ValidationError(\"profile required for deserializing\")\n except TypeError:\n raise serializers.ValidationError(\"profile field needs to be a profile_name (str)\")\n\n # Clean course_video list from any invalid data.\n course_videos = [(course_video, image) for course_video, image in data.get('courses', []) if course_video]\n data['courses'] = course_videos\n\n return data", "response": "Validate the video data."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates the video and its nested resources.", "response": "def create(self, validated_data):\n \"\"\"\n Create the video and its nested resources.\n \"\"\"\n courses = validated_data.pop(\"courses\", [])\n encoded_videos = validated_data.pop(\"encoded_videos\", [])\n\n video = Video.objects.create(**validated_data)\n\n EncodedVideo.objects.bulk_create(\n EncodedVideo(video=video, **video_data)\n for video_data in encoded_videos\n )\n\n # The CourseSerializer will already have converted the course data\n # to CourseVideo models, so we can just set the video and save.\n # Also create VideoImage objects if an image filename is present\n for course_video, image_name in courses:\n course_video.video = video\n course_video.save()\n if image_name:\n VideoImage.create_or_update(course_video, image_name)\n\n return video"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef update(self, instance, validated_data):\n instance.status = validated_data[\"status\"]\n instance.client_video_id = validated_data[\"client_video_id\"]\n instance.duration = validated_data[\"duration\"]\n instance.save()\n\n # Set encoded videos\n instance.encoded_videos.all().delete()\n EncodedVideo.objects.bulk_create(\n EncodedVideo(video=instance, **video_data)\n for video_data in validated_data.get(\"encoded_videos\", [])\n )\n\n # Set courses\n # NOTE: for backwards compatibility with the DRF v2 behavior,\n # we do NOT delete existing course videos during the update.\n # Also update VideoImage objects if an image filename is present\n for course_video, image_name in validated_data.get(\"courses\", []):\n course_video.video = instance\n course_video.save()\n if image_name:\n VideoImage.create_or_update(course_video, image_name)\n\n return instance", "response": "Update an existing video resource."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_video_image_storage():\n if hasattr(settings, 'VIDEO_IMAGE_SETTINGS'):\n return get_storage_class(\n settings.VIDEO_IMAGE_SETTINGS.get('STORAGE_CLASS'),\n )(**settings.VIDEO_IMAGE_SETTINGS.get('STORAGE_KWARGS', {}))\n else:\n # during edx-platform loading this method gets called but settings are not ready yet\n # so in that case we will return default(FileSystemStorage) storage class instance\n return get_storage_class()()", "response": "Returns the configured django storage backend."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_video_transcript_storage():\n if hasattr(settings, 'VIDEO_TRANSCRIPTS_SETTINGS'):\n return get_storage_class(\n settings.VIDEO_TRANSCRIPTS_SETTINGS.get('STORAGE_CLASS'),\n )(**settings.VIDEO_TRANSCRIPTS_SETTINGS.get('STORAGE_KWARGS', {}))\n else:\n # during edx-platform loading this method gets called but settings are not ready yet\n # so in that case we will return default(FileSystemStorage) storage class instance\n return get_storage_class()()", "response": "Returns the configured django storage backend for video transcripts."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates a new file in specific file system.", "response": "def create_file_in_fs(file_data, file_name, file_system, static_dir):\n \"\"\"\n Writes file in specific file system.\n\n Arguments:\n file_data (str): Data to store into the file.\n file_name (str): File name of the file to be created.\n file_system (OSFS): Import file system.\n static_dir (str): The Directory to retrieve transcript file.\n \"\"\"\n with file_system.open(combine(static_dir, file_name), 'wb') as f:\n f.write(file_data.encode('utf-8'))"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning the format of the given transcript.", "response": "def get_transcript_format(transcript_content):\n \"\"\"\n Returns transcript format.\n\n Arguments:\n transcript_content (str): Transcript file content.\n \"\"\"\n try:\n sjson_obj = json.loads(transcript_content)\n except ValueError:\n # With error handling (set to 'ERROR_RAISE'), we will be getting\n # the exception if something went wrong in parsing the transcript.\n srt_subs = SubRipFile.from_string(transcript_content, error_handling=SubRipFile.ERROR_RAISE)\n if len(srt_subs) > 0:\n return TranscriptFormat.SRT\n return TranscriptFormat.SJSON"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns an object instance that should be used for detail views.", "response": "def get_object(self):\n \"\"\"\n Returns an object instance that should be used for detail views.\n \"\"\"\n queryset = self.get_queryset() # Get the base queryset\n queryset = self.filter_queryset(queryset) # Apply any filter backends\n filter = {} # pylint: disable=W0622\n for field in self.lookup_fields:\n filter[field] = self.kwargs[field]\n return get_object_or_404(queryset, **filter)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef post(self, request):\n attrs = ('video_id', 'name', 'language_code', 'provider', 'file_format')\n missing = [attr for attr in attrs if attr not in request.data]\n if missing:\n LOGGER.warn(\n '[VAL] Required transcript params are missing. %s', ' and '.join(missing)\n )\n return Response(\n status=status.HTTP_400_BAD_REQUEST,\n data=dict(message=u'{missing} must be specified.'.format(missing=' and '.join(missing)))\n )\n\n video_id = request.data['video_id']\n language_code = request.data['language_code']\n transcript_name = request.data['name']\n provider = request.data['provider']\n file_format = request.data['file_format']\n\n supported_formats = sorted(dict(TranscriptFormat.CHOICES).keys())\n if file_format not in supported_formats:\n message = (\n u'\"{format}\" transcript file type is not supported. Supported formats are \"{supported_formats}\"'\n ).format(format=file_format, supported_formats=supported_formats)\n return Response(status=status.HTTP_400_BAD_REQUEST, data={'message': message})\n\n supported_providers = sorted(dict(TranscriptProviderType.CHOICES).keys())\n if provider not in supported_providers:\n message = (\n u'\"{provider}\" provider is not supported. Supported transcription providers are \"{supported_providers}\"'\n ).format(provider=provider, supported_providers=supported_providers)\n return Response(status=status.HTTP_400_BAD_REQUEST, data={'message': message})\n\n transcript = VideoTranscript.get_or_none(video_id, language_code)\n if transcript is None:\n create_or_update_video_transcript(video_id, language_code, metadata={\n 'provider': provider,\n 'file_name': transcript_name,\n 'file_format': file_format\n })\n response = Response(status=status.HTTP_200_OK)\n else:\n message = (\n u'Can not override existing transcript for video \"{video_id}\" and language code \"{language}\".'\n ).format(video_id=video_id, language=language_code)\n response = Response(status=status.HTTP_400_BAD_REQUEST, data={'message': message})\n\n return response", "response": "Creates a new video transcript instance with the given information."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef patch(self, request):\n attrs = ('edx_video_id', 'status')\n missing = [attr for attr in attrs if attr not in request.data]\n if missing:\n return Response(\n status=status.HTTP_400_BAD_REQUEST,\n data={'message': u'\"{missing}\" params must be specified.'.format(missing=' and '.join(missing))}\n )\n\n edx_video_id = request.data['edx_video_id']\n video_status = request.data['status']\n if video_status not in VALID_VIDEO_STATUSES:\n return Response(\n status=status.HTTP_400_BAD_REQUEST,\n data={'message': u'\"{status}\" is not a valid Video status.'.format(status=video_status)}\n )\n\n try:\n video = Video.objects.get(edx_video_id=edx_video_id)\n video.status = video_status\n video.save()\n response_status = status.HTTP_200_OK\n response_payload = {}\n except Video.DoesNotExist:\n response_status = status.HTTP_400_BAD_REQUEST\n response_payload = {\n 'message': u'Video is not found for specified edx_video_id: {edx_video_id}'.format(\n edx_video_id=edx_video_id\n )\n }\n\n return Response(status=response_status, data=response_payload)", "response": "Update the status of a video."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef post(self, request):\n attrs = ('course_id', 'edx_video_id', 'generated_images')\n missing = [attr for attr in attrs if attr not in request.data]\n if missing:\n return Response(\n status=status.HTTP_400_BAD_REQUEST,\n data={\n 'message': u'{missing} must be specified to update a video image.'.format(\n missing=' and '.join(missing)\n )\n }\n )\n\n course_id = request.data['course_id']\n edx_video_id = request.data['edx_video_id']\n generated_images = request.data['generated_images']\n\n try:\n course_video = CourseVideo.objects.select_related('video_image').get(\n course_id=six.text_type(course_id), video__edx_video_id=edx_video_id\n )\n except CourseVideo.DoesNotExist:\n return Response(\n status=status.HTTP_400_BAD_REQUEST,\n data={'message': u'CourseVideo not found for course_id: {course_id}'.format(course_id=course_id)}\n )\n\n try:\n VideoImage.create_or_update(course_video, generated_images=generated_images)\n except ValidationError as ex:\n return Response(\n status=status.HTTP_400_BAD_REQUEST,\n data={'message': str(ex)}\n )\n\n return Response()", "response": "Update a course video image instance with auto generated image names."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef post(self, request):\n courses = request.data.get('courses')\n batch_size = request.data.get('batch_size', 50)\n offset = request.data.get('offset', 0)\n if courses:\n videos = (CourseVideo.objects.select_related('video')\n .prefetch_related('video__encoded_videos', 'video__encoded_videos__profile')\n .filter(course_id__in=courses, video__status='file_complete')\n .exclude(video__encoded_videos__profile__profile_name='hls')\n .values_list('video__edx_video_id', flat=True)\n .distinct())\n\n response = Response({'videos': videos}, status=status.HTTP_200_OK)\n else:\n videos = (Video.objects.prefetch_related('encoded_videos', 'encoded_videos__profile')\n .filter(status='file_complete')\n .exclude(encoded_videos__profile__profile_name='hls')\n .order_by('id')\n .values_list('edx_video_id', flat=True)\n .distinct())\n\n response = Response(\n {\n 'videos': videos[offset: offset+batch_size],\n 'total': videos.count(),\n 'offset': offset,\n 'batch_size': batch_size,\n },\n status=status.HTTP_200_OK\n )\n\n return response", "response": "This endpoint handles POST requests."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nupdates a single profile for a given video.", "response": "def put(self, request):\n \"\"\"\n Update a single profile for a given video.\n\n Example request data:\n {\n 'edx_video_id': '1234'\n 'profile': 'hls',\n 'encode_data': {\n 'url': 'foo.com/qwe.m3u8'\n 'file_size': 34\n 'bitrate': 12\n }\n }\n \"\"\"\n edx_video_id = request.data['edx_video_id']\n profile = request.data['profile']\n encode_data = request.data['encode_data']\n\n video = Video.objects.get(edx_video_id=edx_video_id)\n profile = Profile.objects.get(profile_name=profile)\n\n # Delete existing similar profile if its present and\n # create new one with updated data.\n EncodedVideo.objects.filter(video=video, profile=profile).delete()\n EncodedVideo.objects.create(video=video, profile=profile, **encode_data)\n\n return Response(status=status.HTTP_200_OK)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nlogging video status for an existing video instance", "response": "def video_status_update_callback(sender, **kwargs): # pylint: disable=unused-argument\n \"\"\"\n Log video status for an existing video instance\n \"\"\"\n video = kwargs['instance']\n\n if kwargs['created']:\n logger.info('VAL: Video created with id [%s] and status [%s]', video.edx_video_id, video.status)\n else:\n logger.info('VAL: Status changed to [%s] for video [%s]', video.status, video.edx_video_id)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef create_with_validation(cls, *args, **kwargs):\n ret_val = cls(*args, **kwargs)\n ret_val.full_clean()\n ret_val.save()\n return ret_val", "response": "Create a new object with validation."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_or_create_with_validation(cls, *args, **kwargs):\n try:\n return cls.objects.get(*args, **kwargs), False\n except cls.DoesNotExist:\n return cls.create_with_validation(*args, **kwargs), True", "response": "Get or create a new object from the database."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_or_none(cls, **filter_kwargs):\n try:\n video = cls.objects.get(**filter_kwargs)\n except cls.DoesNotExist:\n video = None\n\n return video", "response": "Returns a video or None if no video exists."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef by_youtube_id(cls, youtube_id):\n qset = cls.objects.filter(\n encoded_videos__profile__profile_name='youtube',\n encoded_videos__url=youtube_id\n ).prefetch_related('encoded_videos', 'courses')\n return qset", "response": "Look up video by youtube id"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nconverts a list to its json representation to store in database as text.", "response": "def get_prep_value(self, value):\n \"\"\"\n Converts a list to its json representation to store in database as text.\n \"\"\"\n if value and not isinstance(value, list):\n raise ValidationError(u'ListField value {} is not a list.'.format(value))\n return json.dumps(self.validate_list(value) or [])"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef to_python(self, value):\n if not value:\n value = []\n\n # If a list is set then validated its items\n if isinstance(value, list):\n py_list = self.validate_list(value)\n else: # try to de-serialize value and expect list and then validate\n try:\n py_list = json.loads(value)\n\n if not isinstance(py_list, list):\n raise TypeError\n\n self.validate_list(py_list)\n except (ValueError, TypeError):\n raise ValidationError(u'Must be a valid list of strings.')\n\n return py_list", "response": "Converts the value into a list."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef validate_list(self, value):\n if len(value) > self.max_items:\n raise ValidationError(\n u'list must not contain more than {max_items} items.'.format(max_items=self.max_items)\n )\n\n if all(isinstance(item, six.string_types) for item in value) is False:\n raise ValidationError(u'list must only contain strings.')\n\n return value", "response": "Validate data before saving to database."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncreate a VideoImage object for a CourseVideo.", "response": "def create_or_update(cls, course_video, file_name=None, image_data=None, generated_images=None):\n \"\"\"\n Create a VideoImage object for a CourseVideo.\n\n NOTE: If `image_data` is None then `file_name` value will be used as it is, otherwise\n a new file name is constructed based on uuid and extension from `file_name` value.\n `image_data` will be None in case of course re-run and export. `generated_images` list\n contains names of images auto generated by VEDA. If an image is not already set then first\n image name from `generated_images` list will be used.\n\n Arguments:\n course_video (CourseVideo): CourseVideo instance\n file_name (str): File name of the image\n image_data (InMemoryUploadedFile): Image data to be saved.\n generated_images (list): auto generated image names\n\n Returns:\n Returns a tuple of (video_image, created).\n \"\"\"\n video_image, created = cls.objects.get_or_create(course_video=course_video)\n if image_data:\n # Delete the existing image only if this image is not used by anyone else. This is necessary because\n # after a course re-run, a video in original course and the new course points to same image, So when\n # we update an image in new course and delete the existing image. This will delete the image from\n # original course as well, thus leaving video with having no image.\n if not created and VideoImage.objects.filter(image=video_image.image).count() == 1:\n video_image.image.delete()\n\n with closing(image_data) as image_file:\n file_name = '{uuid}{ext}'.format(uuid=uuid4().hex, ext=os.path.splitext(file_name)[1])\n try:\n video_image.image.save(file_name, image_file)\n except Exception: # pylint: disable=broad-except\n logger.exception(\n 'VAL: Video Image save failed to storage for course_id [%s] and video_id [%s]',\n course_video.course_id,\n course_video.video.edx_video_id\n )\n raise\n else:\n if generated_images:\n video_image.generated_images = generated_images\n if not video_image.image.name:\n file_name = generated_images[0]\n\n video_image.image.name = file_name\n\n video_image.save()\n return video_image, created"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef filename(self):\n client_id, __ = os.path.splitext(self.video.client_video_id)\n file_name = u'{name}-{language}.{format}'.format(\n name=client_id,\n language=self.language_code,\n format=self.file_format\n ).replace('\\n', ' ')\n \n return file_name", "response": "Returns readable filename for a transcript\n "} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_or_none(cls, video_id, language_code):\n try:\n transcript = cls.objects.get(video__edx_video_id=video_id, language_code=language_code)\n except cls.DoesNotExist:\n transcript = None\n\n return transcript", "response": "Returns a data model object if found or None otherwise."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncreating a Video Transcript object.", "response": "def create(cls, video, language_code, file_format, content, provider):\n \"\"\"\n Create a Video Transcript.\n\n Arguments:\n video(Video): Video data model object\n language_code(unicode): A language code.\n file_format(unicode): Transcript file format.\n content(InMemoryUploadedFile): Transcript content.\n provider(unicode): Transcript provider.\n \"\"\"\n video_transcript = cls(video=video, language_code=language_code, file_format=file_format, provider=provider)\n with closing(content) as transcript_content:\n try:\n file_name = '{uuid}.{ext}'.format(uuid=uuid4().hex, ext=video_transcript.file_format)\n video_transcript.transcript.save(file_name, transcript_content)\n video_transcript.save()\n except Exception:\n logger.exception(\n '[VAL] Transcript save failed to storage for video_id \"%s\" language code \"%s\"',\n video.edx_video_id,\n language_code\n )\n raise\n\n return video_transcript"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ncreating or updates a Transcript object for the given video and language code.", "response": "def create_or_update(cls, video, language_code, metadata, file_data=None):\n \"\"\"\n Create or update Transcript object.\n\n Arguments:\n video (Video): Video for which transcript is going to be saved.\n language_code (str): language code for (to be created/updated) transcript\n metadata (dict): A dict containing (to be overwritten) properties\n file_data (InMemoryUploadedFile): File data to be saved\n\n Returns:\n Returns a tuple of (video_transcript, created).\n \"\"\"\n try:\n video_transcript = cls.objects.get(video=video, language_code=language_code)\n retrieved = True\n except cls.DoesNotExist:\n video_transcript = cls(video=video, language_code=language_code)\n retrieved = False\n\n for prop, value in six.iteritems(metadata):\n if prop in ['language_code', 'file_format', 'provider']:\n setattr(video_transcript, prop, value)\n\n transcript_name = metadata.get('file_name')\n\n try:\n if transcript_name:\n video_transcript.transcript.name = transcript_name\n elif file_data:\n with closing(file_data) as transcript_file_data:\n file_name = '{uuid}.{ext}'.format(uuid=uuid4().hex, ext=video_transcript.file_format)\n video_transcript.transcript.save(file_name, transcript_file_data)\n\n video_transcript.save()\n except Exception:\n logger.exception(\n '[VAL] Transcript save failed to storage for video_id \"%s\" language code \"%s\"',\n video.edx_video_id,\n language_code\n )\n raise\n\n return video_transcript, not retrieved"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nupdate or create a new user s credentials state.", "response": "def update_or_create(cls, org, provider, exists):\n \"\"\"\n Update or create credentials state.\n \"\"\"\n instance, created = cls.objects.update_or_create(\n org=org,\n provider=provider,\n defaults={'exists': exists},\n )\n\n return instance, created"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef create_video(video_data):\n\n serializer = VideoSerializer(data=video_data)\n if serializer.is_valid():\n serializer.save()\n return video_data.get(\"edx_video_id\")\n else:\n raise ValCannotCreateError(serializer.errors)", "response": "Create a new EncodedVideo object in the database."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nupdates the Video object with the given video_data.", "response": "def update_video(video_data):\n \"\"\"\n Called on to update Video objects in the database\n\n update_video is used to update Video objects by the given edx_video_id in the video_data.\n\n Args:\n video_data (dict):\n {\n url: api url to the video\n edx_video_id: ID of the video\n duration: Length of video in seconds\n client_video_id: client ID of video\n encoded_video: a list of EncodedVideo dicts\n url: url of the video\n file_size: size of the video in bytes\n profile: ID of the profile\n courses: Courses associated with this video\n }\n\n Raises:\n Raises ValVideoNotFoundError if the video cannot be retrieved.\n Raises ValCannotUpdateError if the video cannot be updated.\n\n Returns the successfully updated Video object\n \"\"\"\n\n try:\n video = _get_video(video_data.get(\"edx_video_id\"))\n except Video.DoesNotExist:\n error_message = u\"Video not found when trying to update video with edx_video_id: {0}\".format(video_data.get(\"edx_video_id\"))\n raise ValVideoNotFoundError(error_message)\n\n serializer = VideoSerializer(video, data=video_data)\n if serializer.is_valid():\n serializer.save()\n return video_data.get(\"edx_video_id\")\n else:\n raise ValCannotUpdateError(serializer.errors)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nupdate the status of an existing video.", "response": "def update_video_status(edx_video_id, status):\n \"\"\"\n Update status for an existing video.\n\n Args:\n edx_video_id: ID of the video\n status: video status\n\n Raises:\n Raises ValVideoNotFoundError if the video cannot be retrieved.\n \"\"\"\n\n try:\n video = _get_video(edx_video_id)\n except Video.DoesNotExist:\n error_message = u\"Video not found when trying to update video status with edx_video_id: {0}\".format(\n edx_video_id\n )\n raise ValVideoNotFoundError(error_message)\n\n video.status = status\n video.save()"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns the state of the ThirdPartyTranscriptCredentialsState for an org", "response": "def get_transcript_credentials_state_for_org(org, provider=None):\n \"\"\"\n Returns transcript credentials state for an org\n\n Arguments:\n org (unicode): course organization\n provider (unicode): transcript provider\n\n Returns:\n dict: provider name and their credential existance map\n\n {\n u'Cielo24': True\n }\n {\n u'3PlayMedia': False,\n u'Cielo24': True\n }\n \"\"\"\n query_filter = {'org': org}\n if provider:\n query_filter['provider'] = provider\n\n return {\n credential.provider: credential.exists\n for credential in ThirdPartyTranscriptCredentialsState.objects.filter(**query_filter)\n }"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef is_transcript_available(video_id, language_code=None):\n filter_attrs = {'video__edx_video_id': video_id}\n if language_code:\n filter_attrs['language_code'] = language_code\n\n transcript_set = VideoTranscript.objects.filter(**filter_attrs)\n return transcript_set.exists()", "response": "Returns whether the transcripts are available for a video."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_video_transcript(video_id, language_code):\n transcript = VideoTranscript.get_or_none(video_id=video_id, language_code=language_code)\n return TranscriptSerializer(transcript).data if transcript else None", "response": "Get video transcript info from the database."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_video_transcript_data(video_id, language_code):\n video_transcript = VideoTranscript.get_or_none(video_id, language_code)\n if video_transcript:\n try:\n return dict(file_name=video_transcript.filename, content=video_transcript.transcript.file.read())\n except Exception:\n logger.exception(\n '[edx-val] Error while retrieving transcript for video=%s -- language_code=%s',\n video_id,\n language_code\n )\n raise", "response": "Get video transcript data"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_available_transcript_languages(video_id):\n available_languages = VideoTranscript.objects.filter(\n video__edx_video_id=video_id\n ).values_list(\n 'language_code', flat=True\n )\n return list(available_languages)", "response": "Returns a list of available transcript languages for the given video."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the url of the video transcript", "response": "def get_video_transcript_url(video_id, language_code):\n \"\"\"\n Returns course video transcript url or None if no transcript\n\n Arguments:\n video_id: it can be an edx_video_id or an external_id extracted from external sources in a video component.\n language_code: language code of a video transcript\n \"\"\"\n video_transcript = VideoTranscript.get_or_none(video_id, language_code)\n if video_transcript:\n return video_transcript.url()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create_video_transcript(video_id, language_code, file_format, content, provider=TranscriptProviderType.CUSTOM):\n transcript_serializer = TranscriptSerializer(\n data=dict(provider=provider, language_code=language_code, file_format=file_format),\n context=dict(video_id=video_id),\n )\n if transcript_serializer.is_valid():\n transcript_serializer.save(content=content)\n return transcript_serializer.data\n else:\n raise ValCannotCreateError(transcript_serializer.errors)", "response": "Create a video transcript."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncreates or update a video transcript for an existing video.", "response": "def create_or_update_video_transcript(video_id, language_code, metadata, file_data=None):\n \"\"\"\n Create or Update video transcript for an existing video.\n\n Arguments:\n video_id: it can be an edx_video_id or an external_id extracted from external sources in a video component.\n language_code: language code of a video transcript\n metadata (dict): A dict containing (to be overwritten) properties\n file_data (InMemoryUploadedFile): Transcript data to be saved for a course video.\n\n Returns:\n video transcript url\n \"\"\"\n # Filter wanted properties\n metadata = {\n prop: value\n for prop, value in six.iteritems(metadata)\n if prop in ['provider', 'language_code', 'file_name', 'file_format'] and value\n }\n\n file_format = metadata.get('file_format')\n if file_format and file_format not in list(dict(TranscriptFormat.CHOICES).keys()):\n raise InvalidTranscriptFormat('{} transcript format is not supported'.format(file_format))\n\n provider = metadata.get('provider')\n if provider and provider not in list(dict(TranscriptProviderType.CHOICES).keys()):\n raise InvalidTranscriptProvider('{} transcript provider is not supported'.format(provider))\n\n try:\n # Video should be present in edxval in order to attach transcripts to it.\n video = Video.objects.get(edx_video_id=video_id)\n video_transcript, __ = VideoTranscript.create_or_update(video, language_code, metadata, file_data)\n except Video.DoesNotExist:\n return None\n\n return video_transcript.url()"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ndeletes a video transcript", "response": "def delete_video_transcript(video_id, language_code):\n \"\"\"\n Delete transcript for an existing video.\n\n Arguments:\n video_id: id identifying the video to which the transcript is associated.\n language_code: language code of a video transcript.\n \"\"\"\n video_transcript = VideoTranscript.get_or_none(video_id, language_code)\n if video_transcript:\n # delete the transcript content from storage.\n video_transcript.transcript.delete()\n # delete the transcript metadata from db.\n video_transcript.delete()\n logger.info('Transcript is removed for video \"%s\" and language code \"%s\"', video_id, language_code)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_transcript_preferences(course_id):\n try:\n transcript_preference = TranscriptPreference.objects.get(course_id=course_id)\n except TranscriptPreference.DoesNotExist:\n return\n\n return TranscriptPreferenceSerializer(transcript_preference).data", "response": "Returns the course wide transcript preferences"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncreate or updates the transcript preferences for the given course - wide transcript.", "response": "def create_or_update_transcript_preferences(course_id, **preferences):\n \"\"\"\n Creates or updates course-wide transcript preferences\n\n Arguments:\n course_id(str): course id\n\n Keyword Arguments:\n preferences(dict): keyword arguments\n \"\"\"\n transcript_preference, __ = TranscriptPreference.objects.update_or_create(\n course_id=course_id, defaults=preferences\n )\n return TranscriptPreferenceSerializer(transcript_preference).data"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nremove course - wide transcript preferences.", "response": "def remove_transcript_preferences(course_id):\n \"\"\"\n Deletes course-wide transcript preferences.\n\n Arguments:\n course_id(str): course id\n \"\"\"\n try:\n transcript_preference = TranscriptPreference.objects.get(course_id=course_id)\n transcript_preference.delete()\n except TranscriptPreference.DoesNotExist:\n pass"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_course_video_image_url(course_id, edx_video_id):\n try:\n video_image = CourseVideo.objects.select_related('video_image').get(\n course_id=course_id, video__edx_video_id=edx_video_id\n ).video_image\n return video_image.image_url()\n except ObjectDoesNotExist:\n return None", "response": "Returns course video image url or None if no image found"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef update_video_image(edx_video_id, course_id, image_data, file_name):\n try:\n course_video = CourseVideo.objects.select_related('video').get(\n course_id=course_id, video__edx_video_id=edx_video_id\n )\n except ObjectDoesNotExist:\n error_message = u'VAL: CourseVideo not found for edx_video_id: {0} and course_id: {1}'.format(\n edx_video_id,\n course_id\n )\n raise ValVideoNotFoundError(error_message)\n\n video_image, _ = VideoImage.create_or_update(course_video, file_name, image_data)\n return video_image.image_url()", "response": "Update a video image for an existing video."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ncreating a profile in the database.", "response": "def create_profile(profile_name):\n \"\"\"\n Used to create Profile objects in the database\n\n A profile needs to exists before an EncodedVideo object can be created.\n\n Args:\n profile_name (str): ID of the profile\n\n Raises:\n ValCannotCreateError: Raised if the profile name is invalid or exists\n \"\"\"\n try:\n profile = Profile(profile_name=profile_name)\n profile.full_clean()\n profile.save()\n except ValidationError as err:\n raise ValCannotCreateError(err.message_dict)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nget a Video instance prefetching encoded video and course information.", "response": "def _get_video(edx_video_id):\n \"\"\"\n Get a Video instance, prefetching encoded video and course information.\n\n Raises ValVideoNotFoundError if the video cannot be retrieved.\n \"\"\"\n try:\n return Video.objects.prefetch_related(\"encoded_videos\", \"courses\").get(edx_video_id=edx_video_id)\n except Video.DoesNotExist:\n error_message = u\"Video not found for edx_video_id: {0}\".format(edx_video_id)\n raise ValVideoNotFoundError(error_message)\n except Exception:\n error_message = u\"Could not get edx_video_id: {0}\".format(edx_video_id)\n logger.exception(error_message)\n raise ValInternalError(error_message)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning a dict mapping profiles to urls.", "response": "def get_urls_for_profiles(edx_video_id, profiles):\n \"\"\"\n Returns a dict mapping profiles to URLs.\n\n If the profiles or video is not found, urls will be blank.\n\n Args:\n edx_video_id (str): id of the video\n profiles (list): list of profiles we want to search for\n\n Returns:\n (dict): A dict containing the profile to url pair\n \"\"\"\n profiles_to_urls = {profile: None for profile in profiles}\n try:\n video_info = get_video_info(edx_video_id)\n except ValVideoNotFoundError:\n return profiles_to_urls\n\n for encoded_video in video_info[\"encoded_videos\"]:\n if encoded_video[\"profile\"] in profiles:\n profiles_to_urls[encoded_video[\"profile\"]] = encoded_video[\"url\"]\n\n return profiles_to_urls"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _get_videos_for_filter(video_filter, sort_field=None, sort_dir=SortDirection.asc, pagination_conf=None):\n videos = Video.objects.filter(**video_filter)\n paginator_context = {}\n\n if sort_field:\n # Refining by edx_video_id ensures a total order\n videos = videos.order_by(sort_field.value, \"edx_video_id\")\n if sort_dir == SortDirection.desc:\n videos = videos.reverse()\n\n if pagination_conf:\n videos_per_page = pagination_conf.get('videos_per_page')\n paginator = Paginator(videos, videos_per_page)\n videos = paginator.page(pagination_conf.get('page_number'))\n paginator_context = {\n 'current_page': videos.number,\n 'total_pages': videos.paginator.num_pages,\n 'items_on_one_page':videos_per_page\n }\n\n return (VideoSerializer(video).data for video in videos), paginator_context", "response": "Returns a generator expression that contains the videos found for the given video filter."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_course_video_ids_with_youtube_profile(course_ids=None, offset=None, limit=None):\n course_videos = (CourseVideo.objects.select_related('video')\n .prefetch_related('video__encoded_videos', 'video__encoded_videos__profile')\n .filter(video__encoded_videos__profile__profile_name='youtube')\n .order_by('id')\n .distinct())\n\n if course_ids:\n course_videos = course_videos.filter(course_id__in=course_ids)\n\n course_videos = course_videos.values_list('course_id', 'video__edx_video_id')\n if limit is not None and offset is not None:\n course_videos = course_videos[offset: offset+limit]\n\n course_videos_with_yt_profile = []\n for course_id, edx_video_id in course_videos:\n yt_profile = EncodedVideo.objects.filter(\n video__edx_video_id=edx_video_id,\n profile__profile_name='youtube'\n ).first()\n\n if yt_profile:\n course_videos_with_yt_profile.append((\n course_id, edx_video_id, yt_profile.url\n ))\n\n return course_videos_with_yt_profile", "response": "Returns a list of all the course ids and video ids with the youtube profile"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_videos_for_course(course_id, sort_field=None, sort_dir=SortDirection.asc, pagination_conf=None):\n return _get_videos_for_filter(\n {'courses__course_id': six.text_type(course_id), 'courses__is_hidden': False},\n sort_field,\n sort_dir,\n pagination_conf,\n )", "response": "Returns an iterator of videos for the given course id."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_videos_for_ids(\n edx_video_ids,\n sort_field=None,\n sort_dir=SortDirection.asc\n):\n \"\"\"\n Returns an iterator of videos that match the given list of ids.\n\n Args:\n edx_video_ids (list)\n sort_field (VideoSortField)\n sort_dir (SortDirection)\n\n Returns:\n A generator expression that contains the videos found, sorted by the\n given field and direction, with ties broken by edx_video_id to ensure a\n total order\n \"\"\"\n videos, __ = _get_videos_for_filter(\n {\"edx_video_id__in\":edx_video_ids},\n sort_field,\n sort_dir,\n )\n return videos", "response": "Returns an iterator of videos that match the given list of ids."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nreturn a dict of edx_video_ids with a dict of requested profiles.", "response": "def get_video_info_for_course_and_profiles(course_id, profiles):\n \"\"\"\n Returns a dict of edx_video_ids with a dict of requested profiles.\n\n Args:\n course_id (str): id of the course\n profiles (list): list of profile_names\n Returns:\n (dict): Returns all the profiles attached to a specific\n edx_video_id\n {\n edx_video_id: {\n 'duration': length of the video in seconds,\n 'profiles': {\n profile_name: {\n 'url': url of the encoding\n 'file_size': size of the file in bytes\n },\n }\n },\n }\n Example:\n Given two videos with two profiles each in course_id 'test_course':\n {\n u'edx_video_id_1': {\n u'duration: 1111,\n u'profiles': {\n u'mobile': {\n 'url': u'http: //www.example.com/meow',\n 'file_size': 2222\n },\n u'desktop': {\n 'url': u'http: //www.example.com/woof',\n 'file_size': 4444\n }\n }\n },\n u'edx_video_id_2': {\n u'duration: 2222,\n u'profiles': {\n u'mobile': {\n 'url': u'http: //www.example.com/roar',\n 'file_size': 6666\n },\n u'desktop': {\n 'url': u'http: //www.example.com/bzzz',\n 'file_size': 8888\n }\n }\n }\n }\n \"\"\"\n # In case someone passes in a key (VAL doesn't really understand opaque keys)\n course_id = six.text_type(course_id)\n try:\n encoded_videos = EncodedVideo.objects.filter(\n profile__profile_name__in=profiles,\n video__courses__course_id=course_id\n ).select_related()\n except Exception:\n error_message = u\"Could not get encoded videos for course: {0}\".format(course_id)\n logger.exception(error_message)\n raise ValInternalError(error_message)\n\n # DRF serializers were causing extra queries for some reason...\n return_dict = {}\n for enc_vid in encoded_videos:\n # Add duration to edx_video_id\n return_dict.setdefault(enc_vid.video.edx_video_id, {}).update(\n {\n \"duration\": enc_vid.video.duration,\n }\n )\n # Add profile information to edx_video_id's profiles\n return_dict[enc_vid.video.edx_video_id].setdefault(\"profiles\", {}).update(\n {enc_vid.profile.profile_name: {\n \"url\": enc_vid.url,\n \"file_size\": enc_vid.file_size,\n }}\n )\n return return_dict"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef copy_course_videos(source_course_id, destination_course_id):\n if source_course_id == destination_course_id:\n return\n\n course_videos = CourseVideo.objects.select_related('video', 'video_image').filter(\n course_id=six.text_type(source_course_id)\n )\n\n for course_video in course_videos:\n destination_course_video, __ = CourseVideo.objects.get_or_create(\n video=course_video.video,\n course_id=destination_course_id\n )\n if hasattr(course_video, 'video_image'):\n VideoImage.create_or_update(\n course_video=destination_course_video,\n file_name=course_video.video_image.image.name\n )", "response": "Copy the videos from the source_course_id to the destination_course_id."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef export_to_xml(video_id, resource_fs, static_dir, course_id=None):\n video_image_name = ''\n video = _get_video(video_id)\n\n try:\n course_video = CourseVideo.objects.select_related('video_image').get(course_id=course_id, video=video)\n video_image_name = course_video.video_image.image.name\n except ObjectDoesNotExist:\n pass\n\n video_el = Element(\n 'video_asset',\n attrib={\n 'client_video_id': video.client_video_id,\n 'duration': six.text_type(video.duration),\n 'image': video_image_name\n }\n )\n for encoded_video in video.encoded_videos.all():\n SubElement(\n video_el,\n 'encoded_video',\n {\n name: six.text_type(getattr(encoded_video, name))\n for name in ['profile', 'url', 'file_size', 'bitrate']\n }\n )\n\n return create_transcripts_xml(video_id, video_el, resource_fs, static_dir)", "response": "Exports data for a video into an xml object."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncreating a transcript file in the file system.", "response": "def create_transcript_file(video_id, language_code, file_format, resource_fs, static_dir):\n \"\"\"\n Writes transcript file to file system.\n\n Arguments:\n video_id (str): Video id of the video transcript file is attached.\n language_code (str): Language code of the transcript.\n file_format (str): File format of the transcript file.\n static_dir (str): The Directory to store transcript file.\n resource_fs (SubFS): The file system to store transcripts.\n \"\"\"\n transcript_filename = '{video_id}-{language_code}.srt'.format(\n video_id=video_id,\n language_code=language_code\n )\n transcript_data = get_video_transcript_data(video_id, language_code)\n if transcript_data:\n transcript_content = Transcript.convert(\n transcript_data['content'],\n input_format=file_format,\n output_format=Transcript.SRT\n )\n create_file_in_fs(transcript_content, transcript_filename, resource_fs, static_dir)\n\n return transcript_filename"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create_transcripts_xml(video_id, video_el, resource_fs, static_dir):\n video_transcripts = VideoTranscript.objects.filter(video__edx_video_id=video_id).order_by('language_code')\n # create transcripts node only when we have transcripts for a video\n if video_transcripts.exists():\n transcripts_el = SubElement(video_el, 'transcripts')\n\n transcript_files_map = {}\n for video_transcript in video_transcripts:\n language_code = video_transcript.language_code\n file_format = video_transcript.file_format\n\n try:\n transcript_filename = create_transcript_file(\n video_id=video_id,\n language_code=language_code,\n file_format=file_format,\n resource_fs=resource_fs.delegate_fs(),\n static_dir=combine(u'course', static_dir) # File system should not start from /draft directory.\n )\n transcript_files_map[language_code] = transcript_filename\n except TranscriptsGenerationException:\n # we don't want to halt export in this case, just log and move to the next transcript.\n logger.exception('[VAL] Error while generating \"%s\" transcript for video[\"%s\"].', language_code, video_id)\n continue\n\n SubElement(\n transcripts_el,\n 'transcript',\n {\n 'language_code': language_code,\n 'file_format': Transcript.SRT,\n 'provider': video_transcript.provider,\n }\n )\n\n return dict(xml=video_el, transcripts=transcript_files_map)", "response": "Creates xml for transcripts for a video."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef import_from_xml(xml, edx_video_id, resource_fs, static_dir, external_transcripts=dict(), course_id=None):\n if xml.tag != 'video_asset':\n raise ValCannotCreateError('Invalid XML')\n\n # If video with edx_video_id already exists, associate it with the given course_id.\n try:\n if not edx_video_id:\n raise Video.DoesNotExist\n\n video = Video.objects.get(edx_video_id=edx_video_id)\n logger.info(\n \"edx_video_id '%s' present in course '%s' not imported because it exists in VAL.\",\n edx_video_id,\n course_id,\n )\n\n # We don't want to link an existing video to course if its an external video.\n # External videos do not have any playback profiles associated, these are just to track video\n # transcripts for those video components who do not use edx hosted videos for playback.\n if course_id and video.status != EXTERNAL_VIDEO_STATUS:\n course_video, __ = CourseVideo.get_or_create_with_validation(video=video, course_id=course_id)\n\n image_file_name = xml.get('image', '').strip()\n if image_file_name:\n VideoImage.create_or_update(course_video, image_file_name)\n\n return edx_video_id\n except ValidationError as err:\n logger.exception(err.message)\n raise ValCannotCreateError(err.message_dict)\n except Video.DoesNotExist:\n pass\n\n if edx_video_id:\n # Video with edx_video_id did not exist, so create one from xml data.\n data = {\n 'edx_video_id': edx_video_id,\n 'client_video_id': xml.get('client_video_id'),\n 'duration': xml.get('duration'),\n 'status': 'imported',\n 'encoded_videos': [],\n 'courses': [{course_id: xml.get('image')}] if course_id else [],\n }\n for encoded_video_el in xml.iterfind('encoded_video'):\n profile_name = encoded_video_el.get('profile')\n try:\n Profile.objects.get(profile_name=profile_name)\n except Profile.DoesNotExist:\n logger.info(\n \"Imported edx_video_id '%s' contains unknown profile '%s'.\",\n edx_video_id,\n profile_name\n )\n continue\n data['encoded_videos'].append({\n 'profile': profile_name,\n 'url': encoded_video_el.get('url'),\n 'file_size': encoded_video_el.get('file_size'),\n 'bitrate': encoded_video_el.get('bitrate'),\n })\n\n if not data['encoded_videos']:\n # Video's status does not get included in video xml at the time of export. So, at this point,\n # we cannot tell from xml that whether a video had an external status. But if encoded videos\n # are not set, the chances are, the video was an external one, in which case, we will not link\n # it to the course(s). Even if the video wasn't an external one and it is having 0 encodes in\n # xml, it does not have a side effect if not linked to a course, since the video was already\n # non-playable.\n data['status'] = EXTERNAL_VIDEO_STATUS\n data['courses'] = []\n\n # Create external video if no edx_video_id.\n edx_video_id = create_video(data)\n else:\n edx_video_id = create_external_video('External Video')\n\n create_transcript_objects(xml, edx_video_id, resource_fs, static_dir, external_transcripts)\n return edx_video_id", "response": "Imports data from an XML video_asset element and returns a new video_asset element."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef import_transcript_from_fs(edx_video_id, language_code, file_name, provider, resource_fs, static_dir):\n file_format = None\n transcript_data = get_video_transcript_data(edx_video_id, language_code)\n\n # First check if transcript record does not exist.\n if not transcript_data:\n # Read file from import file system and attach it to transcript record in DS.\n try:\n with resource_fs.open(combine(static_dir, file_name), 'rb') as f:\n file_content = f.read()\n file_content = file_content.decode('utf-8-sig')\n except ResourceNotFound as exc:\n # Don't raise exception in case transcript file is not found in course OLX.\n logger.warn(\n '[edx-val] \"%s\" transcript \"%s\" for video \"%s\" is not found.',\n language_code,\n file_name,\n edx_video_id\n )\n return\n except UnicodeDecodeError:\n # Don't raise exception in case transcript contains non-utf8 content.\n logger.warn(\n '[edx-val] \"%s\" transcript \"%s\" for video \"%s\" contains a non-utf8 file content.',\n language_code,\n file_name,\n edx_video_id\n )\n return\n\n # Get file format from transcript content.\n try:\n file_format = get_transcript_format(file_content)\n except Error as ex:\n # Don't raise exception, just don't create transcript record.\n logger.warn(\n '[edx-val] Error while getting transcript format for video=%s -- language_code=%s --file_name=%s',\n edx_video_id,\n language_code,\n file_name\n )\n return\n\n # Create transcript record.\n create_video_transcript(\n video_id=edx_video_id,\n language_code=language_code,\n file_format=file_format,\n content=ContentFile(file_content),\n provider=provider\n )", "response": "Imports transcript file from file system and creates transcript record in DS."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncreate VideoTranscript objects. Arguments: xml (Element): lxml Element object. edx_video_id (str): Video id of the video. resource_fs (OSFS): Import file system. static_dir (str): The Directory to retrieve transcript file. external_transcripts (dict): A dict containing the list of names of the external transcripts. Example: { 'en': ['The_Flash.srt', 'Harry_Potter.srt'], 'es': ['Green_Arrow.srt'] }", "response": "def create_transcript_objects(xml, edx_video_id, resource_fs, static_dir, external_transcripts):\n \"\"\"\n Create VideoTranscript objects.\n\n Arguments:\n xml (Element): lxml Element object.\n edx_video_id (str): Video id of the video.\n resource_fs (OSFS): Import file system.\n static_dir (str): The Directory to retrieve transcript file.\n external_transcripts (dict): A dict containing the list of names of the external transcripts.\n Example:\n {\n 'en': ['The_Flash.srt', 'Harry_Potter.srt'],\n 'es': ['Green_Arrow.srt']\n }\n \"\"\"\n # File system should not start from /draft directory.\n with open_fs(resource_fs.root_path.split('/drafts')[0]) as file_system:\n # First import VAL transcripts.\n for transcript in xml.findall('.//transcripts/transcript'):\n try:\n file_format = transcript.attrib['file_format']\n language_code = transcript.attrib['language_code']\n transcript_file_name = u'{edx_video_id}-{language_code}.{file_format}'.format(\n edx_video_id=edx_video_id,\n language_code=language_code,\n file_format=file_format\n )\n\n import_transcript_from_fs(\n edx_video_id=edx_video_id,\n language_code=transcript.attrib['language_code'],\n file_name=transcript_file_name,\n provider=transcript.attrib['provider'],\n resource_fs=file_system,\n static_dir=static_dir\n )\n except KeyError:\n logger.warn(\"VAL: Required attributes are missing from xml, xml=[%s]\", etree.tostring(transcript).strip())\n\n # This won't overwrite transcript for a language which is already present for the video.\n for language_code, transcript_file_names in six.iteritems(external_transcripts):\n for transcript_file_name in transcript_file_names:\n import_transcript_from_fs(\n edx_video_id=edx_video_id,\n language_code=language_code,\n file_name=transcript_file_name,\n provider=TranscriptProviderType.CUSTOM,\n resource_fs=file_system,\n static_dir=static_dir\n )"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_cache_stats():\n cache_stats = []\n for name, _ in six.iteritems(settings.CACHES):\n cache_backend = caches[name]\n\n try:\n cache_backend_stats = cache_backend._cache.get_stats()\n except AttributeError: # this backend doesn't provide stats\n logger.info(\n 'The memcached backend \"{0}\" does not support or '\n 'provide stats.'.format(name)\n )\n continue\n\n for address, stats in cache_backend_stats:\n cache_stats.append(\n {'name': name, 'address': address, 'stats': stats}\n )\n return cache_stats", "response": "Returns a list of dictionaries of all memcached servers and their stats if they provide stats."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef register_git_injector(username, password):\n fd, tmp_path = mkstemp()\n atexit.register(lambda: os.remove(tmp_path))\n\n with os.fdopen(fd, 'w') as f:\n f.write(ASKPASS.format(username=username, password=password or ''))\n\n chmod(tmp_path, 0o700)\n os.environ['GIT_ASKPASS'] = tmp_path", "response": "Generate a script that writes the password to the git command line tool"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nconvert a string to snake case", "response": "def to_snake(camel):\n \"\"\"TimeSkill -> time_skill\"\"\"\n if not camel:\n return camel\n return ''.join('_' + x if 'A' <= x <= 'Z' else x for x in camel).lower()[camel[0].isupper():]"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef serialized(func):\n\n @wraps(func)\n def wrapper(*args, **kwargs):\n return '\\n'.join(\n ' '.join(parts) if isinstance(parts, tuple) else parts\n for parts in func(*args, **kwargs)\n )\n\n return wrapper", "response": "Write a serializer by yielding each line of output"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef create_pr_message(self, skill_git: Git, skill_repo: Repository) -> tuple:\n title = 'Upgrade ' + self.skill.name\n body = body_template.format(\n skill_name=self.skill.name,\n commits='\\n'.join(\n ' - [{}]({})'.format(\n skill_git.show('-s', sha, format='%s'),\n skill_repo.get_commit(sha).html_url\n )\n for sha in skill_git.rev_list(\n '--ancestry-path', '{}..{}'.format(self.skill.entry.sha, 'HEAD')\n ).split('\\n')\n )\n )\n return title, body", "response": "Reads git commits from skill repo to create a PR content"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef ship_move(ship, x, y, speed):\n click.echo('Moving ship %s to %s,%s with speed %s' % (ship, x, y, speed))", "response": "Moves SHIP to the new location X Y with speed"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _check_status(sdp_state):\n try:\n errval = \"error\"\n errdict = dict(state=\"unknown\", reason=\"unknown\")\n if sdp_state.current_state == \"unknown\":\n errdict['reason'] = 'database not initialised.'\n LOG.debug('Current state is unknown;')\n LOG.debug('Target state is %s;', sdp_state.target_state)\n LOG.debug('Current state timestamp is %s;',\n sdp_state.current_timestamp)\n elif sdp_state.current_state is None:\n errdict['reason'] = 'Master Controller Services may have died.'\n LOG.debug('Current state is NONE;')\n LOG.debug('Target state is %s;', sdp_state.target_state)\n LOG.debug('Current state timestamp is %s;',\n sdp_state.current_timestamp)\n elif sdp_state.target_state is None:\n errdict['reason'] = 'Master Controller Services may have died.'\n LOG.debug('Current state is %s;',\n sdp_state.current_state)\n LOG.debug('Target state is NONE;')\n LOG.debug('Current state timestamp is %s;',\n sdp_state.current_timestamp)\n LOG.debug('Target state timestamp is %s;',\n sdp_state.target_timestamp)\n elif sdp_state.current_timestamp is None:\n errdict['reason'] = 'Master Controller Services may have died.'\n LOG.debug('Current state is %s;',\n sdp_state.current_state)\n LOG.debug('Target state is %s;', sdp_state.target_state)\n LOG.debug('Current state timestamp is NONE')\n LOG.debug('Target state timestamp is %s;',\n sdp_state.target_timestamp)\n elif sdp_state.target_timestamp is None:\n errdict['reason'] = 'Master Controller Services may have died.'\n LOG.debug('Current state is %s;',\n sdp_state.current_state)\n LOG.debug('Target state is %s;', sdp_state.target_state)\n LOG.debug('Current state timestamp is %s;',\n sdp_state.current_timestamp)\n LOG.debug('Target state timestamp is NONE')\n elif sdp_state.current_timestamp < sdp_state.target_timestamp:\n errdict['reason'] = \\\n 'Timestamp for Master Controller Services is stale.'\n LOG.debug('Current state is %s;',\n sdp_state.current_state)\n LOG.debug('Target state is %s;', sdp_state.target_state)\n LOG.debug('Current state timestamp is %s;',\n sdp_state.current_timestamp)\n LOG.debug('Target state timestamp is %s;',\n sdp_state.target_timestamp)\n else:\n errval = \"okay\"\n except ConnectionError as err:\n errdict['reason'] = err\n LOG.debug('Connection Error %s', err)\n return errval, errdict", "response": "Check the status of the current and target states."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef root():\n return {\n \"message\": \"Welcome to the SIP Master Controller (flask variant)\",\n \"_links\": {\n \"items\": [\n {\n \"Link\": \"Health\",\n \"href\": \"{}health\".format(request.url)\n },\n {\n \"Link\": \"Version\",\n \"href\": \"{}version\".format(request.url)\n },\n {\n \"Link\": \"Allowed target states\",\n \"href\": \"{}allowed_target_sdp_states\".format(request.url)\n },\n {\n \"Link\": \"SDP state\",\n \"href\": \"{}state\".format(request.url)\n },\n {\n \"Link\": \"SDP target state\",\n \"href\": \"{}state/target\".format(request.url)\n },\n {\n \"Link\": \"SDP target state\",\n \"href\": \"{}target_state\".format(request.url)\n },\n {\n \"Link\": \"SDP current state\",\n \"href\": \"{}state/current\".format(request.url)\n },\n {\n \"Link\": \"Scheduling Block Instances\",\n \"href\": \"{}scheduling_block_instances\".format(request.url)\n },\n {\n \"Link\": \"Processing Blocks\",\n \"href\": \"{}processing_blocks\".format(request.url)\n },\n {\n \"Link\": \"Resource Availability\",\n \"href\": \"{}resource_availability\".format(request.url)\n },\n {\n \"Link\": \"Configure SBI\",\n \"href\": \"{}configure_sbi\".format(request.url)\n }\n ]\n }\n }", "response": "Return the root page of the SIP Master Controller."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nchecking the health of this service.", "response": "def health():\n \"\"\"Check the health of this service.\"\"\"\n up_time = time.time() - START_TIME\n response = dict(service=__service_id__,\n uptime='{:.2f}s'.format(up_time))\n return response, HTTPStatus.OK"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef allowed_transitions():\n try:\n sdp_state = SDPState()\n return sdp_state.allowed_target_states[sdp_state.current_state]\n except KeyError:\n LOG.error(\"Key Error\")\n return dict(state=\"KeyError\", reason=\"KeyError\")", "response": "Get target states allowed for the current state."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_state():\n sdp_state = SDPState()\n errval, errdict = _check_status(sdp_state)\n if errval == \"error\":\n LOG.debug(errdict['reason'])\n return dict(\n current_state=\"unknown\",\n target_state=\"unknown\",\n last_updated=\"unknown\",\n reason=errdict['reason']\n )\n _last_updated = sdp_state.current_timestamp\n if sdp_state.target_timestamp > _last_updated:\n _last_updated = sdp_state.target_timestamp\n\n return dict(\n current_state=sdp_state.current_state,\n target_state=sdp_state.target_state,\n allowed_target_states=sdp_state.allowed_target_states[\n sdp_state.current_state],\n last_updated=_last_updated.isoformat()\n ), HTTPStatus.OK", "response": "Return current state target state and allowed target states."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_target_state():\n sdp_state = SDPState()\n errval, errdict = _check_status(sdp_state)\n if errval == \"error\":\n LOG.debug(errdict['reason'])\n return dict(\n current_target_state=\"unknown\",\n last_updated=\"unknown\",\n reason=errdict['reason']\n )\n LOG.debug('Getting target state')\n target_state = sdp_state.target_state\n LOG.debug('Target state = %s', target_state)\n return dict(\n current_target_state=target_state,\n allowed_target_states=sdp_state.allowed_target_states[\n sdp_state.current_state],\n last_updated=sdp_state.target_timestamp.isoformat())", "response": "Get the target state of the current SDP media."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the SDP State and the timestamp for when it was updated.", "response": "def get_current_state():\n \"\"\"Return the SDP State and the timestamp for when it was updated.\"\"\"\n sdp_state = SDPState()\n errval, errdict = _check_status(sdp_state)\n if errval == \"error\":\n LOG.debug(errdict['reason'])\n return dict(\n current_state=\"unknown\",\n last_updated=\"unknown\",\n reason=errdict['reason']\n )\n LOG.debug('Current State: %s', sdp_state.current_state)\n LOG.debug('Current State last updated: %s',\n sdp_state.current_timestamp.isoformat())\n return dict(\n current_state=sdp_state.current_state,\n last_updated=sdp_state.current_timestamp.isoformat()\n ), HTTPStatus.OK"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nupdate the target state of the current element in the SDP tree.", "response": "def put_target_state():\n \"\"\"SDP target State.\n\n Sets the target state\n \"\"\"\n sdp_state = SDPState()\n errval, errdict = _check_status(sdp_state)\n if errval == \"error\":\n LOG.debug(errdict['reason'])\n rdict = dict(\n current_state=\"unknown\",\n last_updated=\"unknown\",\n reason=errdict['reason']\n )\n else:\n try:\n LOG.debug('request is of type %s', type(request))\n request_data = request.data\n LOG.debug('request data is of type %s', type(request_data))\n LOG.debug('request is %s', request_data)\n request_data = request.data\n target_state = request_data['value'].lower()\n sdp_state.update_target_state(target_state)\n rdict = dict(message='Target state successfully updated to {}'\n .format(target_state))\n except ValueError as error:\n rdict = dict(error='Failed to set target state',\n reason=str(error))\n except RuntimeError as error:\n rdict = dict(error='RunTime error',\n reason=str(error))\n return rdict"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the list of processing blocks known to SDP.", "response": "def processing_block_list():\n \"\"\"Return the list of processing blocks known to SDP.\"\"\"\n pb_list = ProcessingBlockList()\n return dict(active=pb_list.active,\n completed=pb_list.completed,\n aborted=pb_list.aborted)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns list of Scheduling Block instances known to SDP.", "response": "def scheduling_blocks():\n \"\"\"Return list of Scheduling Block instances known to SDP.\"\"\"\n sbi_list = SchedulingBlockInstanceList()\n return dict(active=sbi_list.active,\n completed=sbi_list.completed,\n aborted=sbi_list.aborted)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nconfigure an SBI using POSTed configuration.", "response": "def configure_sbi():\n \"\"\"Configure an SBI using POSTed configuration.\"\"\"\n # Need an ID for the subarray - guessing I just get\n # the list of inactive subarrays and use the first\n inactive_list = SubarrayList().inactive\n request_data = request.data\n LOG.debug('request is of type %s', type(request_data))\n try:\n sbi = Subarray(inactive_list[0])\n sbi.activate()\n sbi.configure_sbi(request_data)\n except jsonschema.exceptions.ValidationError as error:\n LOG.error('Error configuring SBI: %s', error)\n return dict(path=error.absolute_path.__str__(),\n schema_path=error.schema_path.__str__(),\n message=error.message)\n return dict(status=\"Accepted SBI: {}\".format(sbi.id))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncatch all path - return a JSON 404", "response": "def catch_all(path):\n \"\"\"Catch all path - return a JSON 404 \"\"\"\n return (dict(error='Invalid URL: /{}'.format(path),\n links=dict(root='{}{}'.format(request.url_root, PREFIX[1:]))),\n HTTPStatus.NOT_FOUND)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_active() -> List[str]:\n active = []\n for i in range(__num_subarrays__):\n key = Subarray.get_key(i)\n if DB.get_hash_value(key, 'active').upper() == 'TRUE':\n active.append(Subarray.get_id(i))\n return active", "response": "Return the list of active subarrays."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns the list of inactive subarrays.", "response": "def get_inactive() -> List[str]:\n \"\"\"Return the list of inactive subarrays.\"\"\"\n inactive = []\n for i in range(__num_subarrays__):\n key = Subarray.get_key(i)\n if DB.get_hash_value(key, 'active').upper() == 'FALSE':\n inactive.append(Subarray.get_id(i))\n return inactive"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nmake an image from gridded visibilities and saves it to a FITS file.", "response": "def save_image(imager, grid_data, grid_norm, output_file):\n \"\"\"Makes an image from gridded visibilities and saves it to a FITS file.\n\n Args:\n imager (oskar.Imager): Handle to configured imager.\n grid_data (numpy.ndarray): Final visibility grid.\n grid_norm (float): Grid normalisation to apply.\n output_file (str): Name of output FITS file to write.\n \"\"\"\n # Make the image (take the FFT, normalise, and apply grid correction).\n imager.finalise_plane(grid_data, grid_norm)\n grid_data = numpy.real(grid_data)\n\n # Trim the image if required.\n border = (imager.plane_size - imager.image_size) // 2\n if border > 0:\n end = border + imager.image_size\n grid_data = grid_data[border:end, border:end]\n\n # Write the FITS file.\n hdr = fits.header.Header()\n fits.writeto(output_file, grid_data, hdr, clobber=True)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nperform an element - wise addition of sequences into a new list.", "response": "def reduce_sequences(object_a, object_b):\n \"\"\"Performs an element-wise addition of sequences into a new list.\n\n Both sequences must have the same length, and the addition operator must be\n defined for each element of the sequence.\n \"\"\"\n def is_seq(obj):\n \"\"\"Returns true if the object passed is a sequence.\"\"\"\n return hasattr(obj, \"__getitem__\") or hasattr(obj, \"__iter__\")\n if object_a is None or object_b is None:\n return None\n elif is_seq(object_a) and is_seq(object_b):\n reduced = []\n for element_a, element_b in zip(object_a, object_b):\n if element_a is not None and element_b is not None:\n reduced.append(element_a + element_b)\n else:\n reduced.append(None)\n return reduced\n else:\n return object_a + object_b"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef main():\n # Check command line arguments.\n if len(sys.argv) < 3:\n raise RuntimeError(\n 'Usage: spark-submit spark_imager_test.py <settings_file> <dir> '\n '[partitions]')\n\n # Create log object.\n log = logging.getLogger('pyspark')\n log.setLevel(logging.INFO)\n log.addHandler(logging.StreamHandler(sys.stdout))\n\n # Load pipeline settings.\n with open(sys.argv[1]) as f:\n settings = json.load(f)\n\n # Get a list of input Measurement Sets to process.\n data_dir = str(sys.argv[2])\n inputs = glob(join(data_dir, '*.ms')) + glob(join(data_dir, '*.MS'))\n inputs = filter(None, inputs)\n log.info('Found input Measurement Sets: %s', ', '.join(inputs))\n\n # Get a Spark context.\n context = pyspark.SparkContext(appName=\"spark_imager_test\")\n\n # Create the Spark RDD containing the input filenames,\n # suitably parallelized.\n partitions = int(sys.argv[3]) if len(sys.argv) > 3 else 2\n rdd = context.parallelize(inputs, partitions)\n\n # Define Spark broadcast variables.\n bc_settings = context.broadcast(settings)\n bc_grid_weights = None\n\n # Process coordinates first if required.\n if (settings['combine'] and (\n settings['imager']['weighting'] == 'Uniform' or\n settings['imager']['algorithm'] == 'W-projection')):\n # Create RDD to generate weights grids.\n rdd_coords = rdd.map(\n partial(node_run, coords_only=True, bc_settings=bc_settings,\n bc_grid_weights=None))\n\n # Mark this RDD as persistent so it isn't computed more than once.\n rdd_coords.persist()\n\n # Get the maximum number of W-planes required, and update settings.\n num_w_planes = rdd_coords.map(lambda x: x[1]).max()\n settings['imager']['num_w_planes'] = num_w_planes\n\n # Get the combined grid of weights and broadcast it to nodes.\n output = rdd_coords.reduce(reduce_sequences)\n bc_grid_weights = context.broadcast(output[0])\n\n # Delete this RDD.\n rdd_coords.unpersist()\n\n # Re-broadcast updated settings.\n bc_settings = context.broadcast(settings)\n\n # Run parallel pipeline on worker nodes and combine visibility grids.\n output = rdd.map(\n partial(node_run, coords_only=False, bc_settings=bc_settings,\n bc_grid_weights=bc_grid_weights)).reduce(reduce_sequences)\n\n # Finalise combined visibility grids if required.\n if settings['combine']:\n # Create an imager to finalise (FFT) the gridded data.\n imager = oskar.Imager(settings['precision'])\n for key, value in settings['imager'].items():\n setattr(imager, key, value)\n\n # Finalise grid and save image.\n save_image(imager, output[0], output[1], settings['output_file'])\n log.info('Finished. Output file is %s', settings['output_file'])\n\n context.stop()", "response": "Runs test imaging pipeline using Spark."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef load_schema(path):\n with open(path) as json_data:\n schema = json.load(json_data)\n return schema", "response": "Loads a JSON schema file."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef clear_db():\n cursor = '0'\n while cursor != 0:\n cursor, keys = DB.scan(cursor, match='*', count=5000)\n if keys:\n DB.delete(*keys)", "response": "Clear the entire db."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_scheduling_block_ids():\n ids = [key.split('/')[-1]\n for key in DB.keys(pattern='scheduling_block/*')]\n return sorted(ids)", "response": "Return list of scheduling block IDs"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nadding a Scheduling Block to the Configuration Database.", "response": "def add_scheduling_block(config, schema_path=None):\n \"\"\"Add a Scheduling Block to the Configuration Database.\n\n The configuration dictionary must match the schema defined in\n in the schema_path variable at the top of the function.\n\n Args:\n config (dict): Scheduling Block instance request configuration.\n schema_path (str): Path to schema file used to validate the\n Scheduling Block Instance request\n \"\"\"\n\n if schema_path is None:\n schema_path = os.path.join(os.path.dirname(__file__),\n 'sbi_post.json')\n schema = load_schema(schema_path)\n jsonschema.validate(config, schema)\n\n # Add the scheduling block to the database\n # (This is done as a single k/v pair here but would probably be\n # expanded to a set of keys in the actual implementation)\n DB.set('scheduling_block/{}'.format(config['id']), json.dumps(config))\n\n # Add a event to the scheduling block event list to notify\n # of a new scheduling block being added to the db.\n DB.rpush('scheduling_block_events',\n json.dumps(dict(type=\"created\", id=config[\"id\"])))"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ndeletes the Scheduling Block with the specified ID", "response": "def delete_scheduling_block(block_id):\n \"\"\"Delete Scheduling Block with the specified ID\"\"\"\n DB.delete('scheduling_block/{}'.format(block_id))\n\n # Add a event to the scheduling block event list to notify\n # of a deleting a scheduling block from the db\n DB.rpush('scheduling_block_events',\n json.dumps(dict(type=\"deleted\", id=block_id)))"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning the latest Scheduling Block event", "response": "def get_scheduling_block_event():\n \"\"\"Return the latest Scheduling Block event\"\"\"\n event = DB.rpoplpush('scheduling_block_events',\n 'scheduling_block_event_history')\n if event:\n event = json.loads(event.decode('utf-8'))\n return event"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_sub_array_ids():\n ids = set()\n for key in sorted(DB.keys(pattern='scheduling_block/*')):\n config = json.loads(DB.get(key))\n ids.add(config['sub_array_id'])\n return sorted(list(ids))", "response": "Return list of sub - array Id s currently known to SDP"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_subarray_sbi_ids(sub_array_id):\n ids = []\n for key in sorted(DB.keys(pattern='scheduling_block/*')):\n config = json.loads(DB.get(key))\n if config['sub_array_id'] == sub_array_id:\n ids.append(config['id'])\n return ids", "response": "Return list of scheduling block Id s associated with the given sub_array_id\n "} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_processing_block_ids():\n ids = []\n for key in sorted(DB.keys(pattern='scheduling_block/*')):\n config = json.loads(DB.get(key))\n for processing_block in config['processing_blocks']:\n ids.append(processing_block['id'])\n return ids", "response": "Return an array of Processing Block ids"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_processing_block(block_id):\n identifiers = block_id.split(':')\n scheduling_block_id = identifiers[0]\n scheduling_block_config = get_scheduling_block(scheduling_block_id)\n for processing_block in scheduling_block_config['processing_blocks']:\n if processing_block['id'] == block_id:\n return processing_block\n raise KeyError('Unknown Processing Block id: {} ({})'\n .format(identifiers[-1], block_id))", "response": "Return the Processing Block Configuration for the specified ID"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ndeleting a Processing Block with the specified ID", "response": "def delete_processing_block(processing_block_id):\n \"\"\"Delete Processing Block with the specified ID\"\"\"\n scheduling_block_id = processing_block_id.split(':')[0]\n config = get_scheduling_block(scheduling_block_id)\n processing_blocks = config.get('processing_blocks')\n processing_block = list(filter(\n lambda x: x.get('id') == processing_block_id, processing_blocks))[0]\n config['processing_blocks'].remove(processing_block)\n DB.set('scheduling_block/{}'.format(config['id']), json.dumps(config))\n\n # Add a event to the scheduling block event list to notify\n # of a new scheduling block being added to the db.\n DB.rpush('processing_block_events',\n json.dumps(dict(type=\"deleted\", id=processing_block_id)))"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning the latest Processing Block event", "response": "def get_processing_block_event():\n \"\"\"Return the latest Processing Block event\"\"\"\n event = DB.rpoplpush('processing_block_events',\n 'processing_block_event_history')\n if event:\n event = json.loads(event.decode('utf-8'))\n return event"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsubscribing to the specified object type.", "response": "def subscribe(object_type: str, subscriber: str,\n callback_handler: Callable = None) -> EventQueue:\n \"\"\"Subscribe to the specified object type.\n\n Returns an EventQueue object which can be used to query events\n associated with the object type for this subscriber.\n\n Args:\n object_type (str): Object type\n subscriber (str): Subscriber name\n callback_handler (function, optional): Callback handler function.\n\n Returns:\n EventQueue, event queue object.\n\n \"\"\"\n key = _keys.subscribers(object_type)\n DB.remove_from_list(key, subscriber)\n DB.append_to_list(key, subscriber)\n return EventQueue(object_type, subscriber, callback_handler)"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nget the list of subscribers to events of the object type.", "response": "def get_subscribers(object_type: str) -> List[str]:\n \"\"\"Get the list of subscribers to events of the object type.\n\n Args:\n object_type (str): Type of object.\n\n Returns:\n List[str], list of subscriber names.\n\n \"\"\"\n return DB.get_list(_keys.subscribers(object_type))"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\npublishes an event to all subscribers and stores the event with the object.", "response": "def publish(event_type: str,\n event_data: dict = None,\n object_type: str = None,\n object_id: str = None,\n object_key: str = None,\n origin: str = None):\n \"\"\"Publish an event.\n\n Published the event to all subscribers and stores the event with the\n object.\n\n Args:\n event_type (str): The event type\n event_data (dict, optional): Optional event data\n object_type (str): Type of object.\n object_id (str): Object ID\n object_key (str, optional): Key used to stored the object. If None,\n the default assume the key is of the form <object type>:<object id>\n origin (str): Origin or publisher of the event.\n\n \"\"\"\n event = Event(event_id=_get_event_id(object_type),\n event_type=event_type,\n event_data=event_data,\n event_origin=origin,\n object_type=object_type,\n object_id=object_id,\n object_key=object_key)\n\n # Publish the event to subscribers\n _publish_to_subscribers(event)\n\n # Update the object event list and data.\n if object_key is None:\n object_key = '{}:{}'.format(object_type, object_id)\n _update_object(object_key, event)\n\n # Execute the set of db transactions as an atomic transaction.\n DB.execute()"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ngetting list of event ids for the object with the specified key.", "response": "def _get_events_list(object_key: str) -> List[str]:\n \"\"\"Get list of event ids for the object with the specified key.\n\n Args:\n object_key (str): Key of an object in the database.\n\n \"\"\"\n return DB.get_list(_keys.events_list(object_key))"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets the list of event data for the object with the specified key.", "response": "def _get_events_data(object_key: str) -> List[dict]:\n \"\"\"Get the list of event data for the object with the specified key.\n\n Args:\n object_key (str): Key of an object in the database.\n\n \"\"\"\n events_data = []\n key = _keys.events_data(object_key)\n for event_id in _get_events_list(object_key):\n event_dict = literal_eval(DB.get_hash_value(key, event_id))\n events_data.append(event_dict)\n return events_data"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_events(object_key: str) -> List[Event]:\n events_data = _get_events_data(object_key)\n return [Event.from_config(event_dict) for event_dict in events_data]", "response": "Get list of events for the object with the specified key."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\npublish and event to all subscribers.", "response": "def _publish_to_subscribers(event: Event):\n \"\"\"Publish and event to all subscribers.\n\n - Adds the event id to the published event list for all subscribers.\n - Adds the event data to the published event data for all subscribers.\n - Publishes the event id notification to all subscribers.\n\n Args:\n event (Event): Event object to publish.\n\n \"\"\"\n subscribers = get_subscribers(event.object_type)\n\n # Add the event to each subscribers published list\n for sub in subscribers:\n DB.prepend_to_list(_keys.published(event.object_type, sub),\n event.id, pipeline=True)\n event_dict = deepcopy(event.config)\n event_dict.pop('id')\n DB.set_hash_value(_keys.data(event.object_type, sub), event.id,\n str(event_dict), pipeline=True)\n DB.publish(event.object_type, event.id, pipeline=True)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _update_object(object_key: str, event: Event):\n events_list_key = _keys.events_list(object_key)\n events_data_key = _keys.events_data(object_key)\n event_dict = deepcopy(event.config)\n event_dict.pop('id')\n DB.append_to_list(events_list_key, event.id, pipeline=True)\n DB.set_hash_value(events_data_key, event.id, json.dumps(event_dict),\n pipeline=True)", "response": "Update the events list and events data for the object."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _get_event_id(object_type: str) -> str:\n key = _keys.event_counter(object_type)\n DB.watch(key, pipeline=True)\n count = DB.get_value(key)\n DB.increment(key)\n DB.execute()\n if count is None:\n count = 0\n return '{}_event_{:08d}'.format(object_type, int(count))", "response": "Return an event id for the object type."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ninitializing the PB device.", "response": "def init_device(self):\n \"\"\"Device constructor.\"\"\"\n start_time = time.time()\n Device.init_device(self)\n self._pb_id = ''\n LOG.debug('init PB device %s, time taken %.6f s (total: %.2f s)',\n self.get_name(), (time.time() - start_time),\n (time.time() - self._start_time))\n self.set_state(DevState.STANDBY)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsets the PB Id for this device.", "response": "def pb_id(self, pb_id: str):\n \"\"\"Set the PB Id for this device.\"\"\"\n # FIXME(BMo) instead of creating the object to check if the PB exists\n # use a method on PB List?\n # ProcessingBlock(pb_id)\n self.set_state(DevState.ON)\n self._pb_id = pb_id"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef pb_config(self):\n pb = ProcessingBlock(self._pb_id)\n return json.dumps(pb.config)", "response": "Return the PB configuration."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget the current state timestamp.", "response": "def current_timestamp(self) -> datetime:\n \"\"\"Get the current state timestamp.\"\"\"\n timestamp = DB.get_hash_value(self._key, 'current_timestamp')\n return datetime_from_isoformat(timestamp)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget the target state timestamp.", "response": "def target_timestamp(self) -> datetime:\n \"\"\"Get the target state timestamp.\"\"\"\n timestamp = DB.get_hash_value(self._key, 'target_timestamp')\n return datetime_from_isoformat(timestamp)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef update_target_state(self, value: str, force: bool = True) -> datetime:\n value = value.lower()\n if not force:\n current_state = self.current_state\n if current_state == 'unknown':\n raise RuntimeError(\"Unable to set target state when current \"\n \"state is 'unknown'\")\n\n allowed_target_states = self._allowed_target_states[current_state]\n\n LOG.debug('Updating target state of %s to %s', self._id, value)\n\n if value not in allowed_target_states:\n raise ValueError(\"Invalid target state: '{}'. {} can be \"\n \"commanded to states: {}\".\n format(value, current_state,\n allowed_target_states))\n\n return self._update_state('target', value)", "response": "Set the target state of the object."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nupdates the current state of the resource.", "response": "def update_current_state(self, value: str,\n force: bool = False) -> datetime:\n \"\"\"Update the current state.\n\n Args:\n value (str): New value for sdp state\n force (bool): If true, ignore allowed transitions\n\n Returns:\n datetime, update timestamp\n\n Raises:\n ValueError: If the specified current state is not allowed.\n\n \"\"\"\n value = value.lower()\n if not force:\n current_state = self.current_state\n # IF the current state is unknown, it can be set to any of the\n # allowed states, otherwise only allow certain transitions.\n if current_state == 'unknown':\n allowed_transitions = self._allowed_states\n else:\n allowed_transitions = self._allowed_transitions[current_state]\n allowed_transitions.append(current_state)\n\n LOG.debug('Updating current state of %s to %s', self._id, value)\n\n if value not in allowed_transitions:\n raise ValueError(\"Invalid current state update: '{}'. '{}' \"\n \"can be transitioned to states: {}\"\n .format(value, current_state,\n allowed_transitions))\n\n return self._update_state('current', value)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _initialise(self, initial_state: str = 'unknown') -> dict:\n initial_state = initial_state.lower()\n if initial_state != 'unknown' and \\\n initial_state not in self._allowed_states:\n raise ValueError('Invalid initial state: {}'.format(initial_state))\n _initial_state = dict(\n current_state=initial_state,\n target_state=initial_state,\n current_timestamp=datetime.utcnow().isoformat(),\n target_timestamp=datetime.utcnow().isoformat())\n return _initial_state", "response": "Return a dictionary used to initialise a state object."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _update_state(self, state_type: str, value: str) -> datetime:\n timestamp = datetime.utcnow()\n field = '{}_state'.format(state_type)\n old_state = DB.get_hash_value(self._key, field)\n DB.set_hash_value(self._key, field, value, pipeline=True)\n DB.set_hash_value(self._key, '{}_timestamp'.format(state_type),\n timestamp.isoformat(), pipeline=True)\n DB.execute()\n\n # Publish an event to notify subscribers of the change in state\n self.publish('{}_state_updated'.format(state_type),\n event_data=dict(state=value, old_state=old_state))\n\n return timestamp", "response": "Update the state of the current or target."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nconverting allowed state transitions / target states to lowercase.", "response": "def _dict_lower(dictionary: dict):\n \"\"\"Convert allowed state transitions / target states to lowercase.\"\"\"\n return {key.lower(): [value.lower() for value in value]\n for key, value in dictionary.items()}"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef add_sched_block_instance(self, config_dict):\n # Get schema for validation\n schema = self._get_schema()\n LOG.debug('Adding SBI with config: %s', config_dict)\n\n # Validates the schema\n validate(config_dict, schema)\n\n # Add status field and value to the data\n updated_block = self._add_status(config_dict)\n\n # Splitting into different names and fields before\n # adding to the database\n scheduling_block_data, processing_block_data = \\\n self._split_sched_block_instance(updated_block)\n\n # Adding Scheduling block instance with id\n name = \"scheduling_block:\" + updated_block[\"id\"]\n self._db.set_specified_values(name, scheduling_block_data)\n\n # Add a event to the scheduling block event list to notify\n # of a new scheduling block being added to the db.\n self._db.push_event(self.scheduling_event_name,\n updated_block[\"status\"],\n updated_block[\"id\"])\n\n # Adding Processing block with id\n for value in processing_block_data:\n name = (\"scheduling_block:\" + updated_block[\"id\"] +\n \":processing_block:\" + value['id'])\n self._db.set_specified_values(name, value)\n\n # Add a event to the processing block event list to notify\n # of a new processing block being added to the db.\n self._db.push_event(self.processing_event_name,\n value[\"status\"],\n value[\"id\"])", "response": "Adds a Scheduling Block instance to the database."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget unordered list of scheduling block ids", "response": "def get_sched_block_instance_ids(self):\n \"\"\"Get unordered list of scheduling block ids\"\"\"\n\n # Initialise empty list\n scheduling_block_ids = []\n\n # Pattern used to search scheduling block ids\n pattern = 'scheduling_block:*'\n block_ids = self._db.get_ids(pattern)\n\n for block_id in block_ids:\n if 'processing_block' not in block_id:\n id_split = block_id.split(':')[-1]\n scheduling_block_ids.append(id_split)\n return sorted(scheduling_block_ids)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget list of processing block ids using the processing block id", "response": "def get_processing_block_ids(self):\n \"\"\"Get list of processing block ids using the processing block id\"\"\"\n\n # Initialise empty list\n _processing_block_ids = []\n\n # Pattern used to search processing block ids\n pattern = '*:processing_block:*'\n block_ids = self._db.get_ids(pattern)\n\n for block_id in block_ids:\n id_split = block_id.split(':')[-1]\n _processing_block_ids.append(id_split)\n return sorted(_processing_block_ids)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngets list of sub array ids", "response": "def get_sub_array_ids(self):\n \"\"\"Get list of sub array ids\"\"\"\n\n # Initialise empty list\n _scheduling_block_ids = []\n _sub_array_ids = []\n\n for blocks_id in self.get_sched_block_instance_ids():\n _scheduling_block_ids.append(blocks_id)\n block_details = self.get_block_details(_scheduling_block_ids)\n for details in block_details:\n _sub_array_ids.append(details['sub_array_id'])\n _sub_array_ids = sorted(list(set(_sub_array_ids)))\n return _sub_array_ids"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nget Scheduling Block Instance ID associated with sub array id", "response": "def get_sub_array_sbi_ids(self, sub_array_id):\n \"\"\"Get Scheduling Block Instance ID associated with sub array id\"\"\"\n _ids = []\n sbi_ids = self.get_sched_block_instance_ids()\n for details in self.get_block_details(sbi_ids):\n if details['sub_array_id'] == sub_array_id:\n _ids.append(details['id'])\n return sorted(_ids)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets details of scheduling or processing block.", "response": "def get_block_details(self, block_ids):\n \"\"\"Get details of scheduling or processing block\n\n Args:\n block_ids (list): List of block IDs\n \"\"\"\n # Convert input to list if needed\n if not hasattr(block_ids, \"__iter__\"):\n block_ids = [block_ids]\n\n for _id in block_ids:\n block_key = self._db.get_block(_id)[0]\n block_data = self._db.get_all_field_value(block_key)\n # NOTE(BM) unfortunately this doesn't quite work for keys where \\\n # the value is a python type (list, dict etc) \\\n # The following hack works for now but is probably not infallible\n for key in block_data:\n for char in ['[', '{']:\n if char in block_data[key]:\n block_data[key] = ast.literal_eval(\n str(block_data[key]))\n yield block_data"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nupdate the value of the given block id and field", "response": "def update_value(self, block_id, field, value):\n \"\"\"\"Update the value of the given block id and field\"\"\"\n block_name = self._db.get_block(block_id)\n for name in block_name:\n self._db.set_value(name, field, value)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ndeleting the specified Scheduling Block Instance.", "response": "def delete_sched_block_instance(self, block_id):\n \"\"\"Delete the specified Scheduling Block Instance.\n\n Removes the Scheduling Block Instance, and all Processing Blocks\n that belong to it from the database\"\"\"\n LOG.debug('Deleting SBI %s', block_id)\n\n scheduling_blocks = self._db.get_all_blocks(block_id)\n if not scheduling_blocks:\n raise RuntimeError('Scheduling block not found: {}'.\n format(block_id))\n\n if scheduling_blocks:\n for blocks in scheduling_blocks:\n if \"processing_block\" not in blocks:\n self._db.delete_block(blocks)\n else:\n split_key = blocks.split(':')\n self._db.delete_block(blocks)\n\n # Add a event to the processing block event list to notify\n # about deleting from the db\n self._db.push_event(self.processing_event_name, \"deleted\",\n split_key[3])\n\n # Add a event to the scheduling block event list to notify\n # of a deleting a scheduling block from the db\n self._db.push_event(self.scheduling_event_name, \"deleted\",\n block_id)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ndeleting a processing block from the database.", "response": "def delete_processing_block(self, processing_block_id):\n \"\"\"Delete Processing Block(s).\n\n Uses Processing Block IDs\n \"\"\"\n LOG.debug(\"Deleting Processing Block %s ...\", processing_block_id)\n processing_block = self._db.get_block(processing_block_id)\n if not processing_block:\n raise RuntimeError('Invalid Processing Block ID: {}'\n .format(processing_block_id))\n\n for block in processing_block:\n if 'processing_block' in block:\n self._db.delete_block(block)\n\n # Remove processing block id from scheduling block id\n scheduling_block_id = block.split(':')\n scheduling_block_details = self.get_block_details(\n [scheduling_block_id[1]])\n for block_details in scheduling_block_details:\n block_list = block_details['processing_block_ids']\n if processing_block_id in block_list:\n block_list.remove(processing_block_id)\n self.update_value(scheduling_block_id[1],\n 'processing_block_ids', block_list)\n\n # Add a event to the processing block event list to notify\n # about deleting from the db\n self._db.push_event(self.processing_event_name, \"deleted\",\n id)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _get_schema():\n schema_path = os.path.join(os.path.dirname(__file__),\n 'schema', 'scheduling_block_schema.json')\n with open(schema_path, 'r') as file:\n schema_data = file.read()\n schema = json.loads(schema_data)\n return schema", "response": "Get the schema for validation"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nsplit the scheduling block data into multiple names before adding to the configuration database", "response": "def _split_sched_block_instance(self, scheduling_block):\n \"\"\"Split the scheduling block data into multiple names\n before adding to the configuration database\"\"\"\n\n # Initialise empty list\n _scheduling_block_data = {}\n _processing_block_data = {}\n _processing_block_id = []\n\n for block in scheduling_block:\n values = scheduling_block[block]\n\n if block != 'processing_blocks':\n _scheduling_block_data[block] = values\n else:\n # Check if there is a processing block that already exits in\n # the database\n processing_block_id = self.get_processing_block_ids()\n\n for value in values:\n if value['id'] not in processing_block_id:\n _processing_block_data = values\n else:\n raise Exception(\"Processing block already exits\",\n value['id'])\n\n # Adding processing block id to the scheduling block list\n for block_id in _processing_block_data:\n _processing_block_id.append(block_id['id'])\n _scheduling_block_data['processing_block_ids'] = _processing_block_id\n\n return _scheduling_block_data, _processing_block_data"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef init_logger(logger_name='sip', log_level=None, p3_mode: bool = True,\n show_thread: bool = False, propagate: bool = False,\n show_log_origin=False):\n \"\"\"Initialise the SIP logger.\n\n Attaches a stdout stream handler to the 'sip' logger. This will\n apply to all logger objects with a name prefixed by 'sip.'\n\n This function respects the 'SIP_LOG_LEVEL' environment variable to\n set the logging level.\n\n Args:\n logger_name (str, optional): Name of the logger object.\n log_level (str or int, optional): Logging level for the SIP logger.\n p3_mode (bool, optional): Print logging statements in a format that\n P3 can support.\n show_thread (bool, optional): Display the thread in the log message.\n propagate (bool, optional): Propagate settings to parent loggers.\n show_log_origin (boo, optional): If true show the origin\n (file, line no.) of log messages.\n\n \"\"\"\n log = logging.getLogger(logger_name)\n log.propagate = propagate\n\n # Remove existing handlers (avoids duplicate messages)\n for handler in log.handlers:\n log.removeHandler(handler)\n\n _debug = '%(filename)s:%(lineno)d | ' if show_log_origin else ''\n\n # P3 mode is intended to work with the fluentd configuration on P3.\n # This has ms timestamp precision and uses '-' as a delimiter\n # between statements in the log file.\n if p3_mode:\n _prefix = '%(asctime)s - %(name)s - %(levelname)s'\n if show_thread:\n _format = '{} - %(threadName)s - {}%(message)s'\\\n .format(_prefix, _debug)\n else:\n _format = '{} - {}%(message)s'.format(_prefix, _debug)\n formatter = logging.Formatter(_format)\n formatter.converter = time.gmtime\n # If not in P3 mode, the timestamp will be us precision and use '|'\n # as a separator.\n else:\n _prefix = '%(asctime)s | %(name)s | %(levelname)s'\n if show_thread:\n _format = '{} | %(threadName)s | {}%(message)s'\\\n .format(_prefix, _debug)\n else:\n _format = '{} | {}%(message)s'.format(_prefix, _debug)\n formatter = SIPFormatter(_format, datefmt='%Y-%m-%dT%H:%M:%S.%fZ')\n\n handler = logging.StreamHandler(stream=sys.stdout)\n handler.setFormatter(formatter)\n log.addHandler(handler)\n\n # Set the logging level.\n if log_level:\n log.setLevel(log_level)\n else:\n log.setLevel(os.getenv('SIP_LOG_LEVEL', 'DEBUG'))", "response": "Initialises the SIP logger."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ndisable output for the specified logger.", "response": "def disable_logger(logger_name: str, propagate: bool = False):\n \"\"\"Disable output for the logger of the specified name.\"\"\"\n log = logging.getLogger(logger_name)\n log.propagate = propagate\n for handler in log.handlers:\n log.removeHandler(handler)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef set_log_level(logger_name: str, log_level: str, propagate: bool = False):\n log = logging.getLogger(logger_name)\n log.propagate = propagate\n log.setLevel(log_level)", "response": "Set the log level of the specified logger."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef formatTime(self, record, datefmt=None):\n _seconds_fraction = record.created - int(record.created)\n _datetime_utc = time.mktime(time.gmtime(record.created))\n _datetime_utc += _seconds_fraction\n _created = self.converter(_datetime_utc)\n\n if datefmt:\n time_string = _created.strftime(datefmt)\n else:\n time_string = _created.strftime('%Y-%m-%dT%H:%M:%S.%fZ')\n time_string = \"%s,%03d\" % (time_string, record.msecs)\n return time_string", "response": "Format the log timestamp."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngenerating a SBI config JSON string.", "response": "def generate_sbi(index: int = None):\n \"\"\"Generate a SBI config JSON string.\"\"\"\n date = datetime.datetime.utcnow().strftime('%Y%m%d')\n if index is None:\n index = randint(0, 999)\n sbi_id = 'SBI-{}-sip-demo-{:03d}'.format(date, index)\n sb_id = 'SBI-{}-sip-demo-{:03d}'.format(date, index)\n pb_id = 'PB-{}-sip-demo-{:03d}'.format(date, index)\n print('* Generating SBI: %s, PB: %s' % (sb_id, pb_id))\n sbi = dict(\n id=sbi_id,\n version='1.0.0',\n scheduling_block=dict(\n id=sb_id,\n project='sip',\n programme_block='sip_demos'\n ),\n processing_blocks=[\n dict(\n id=pb_id,\n version='1.0.0',\n type='offline',\n priority=1,\n dependencies=[],\n resources_required=[],\n workflow=dict(\n id='mock_workflow',\n version='1.0.0',\n parameters=dict(\n stage1=dict(duration=30),\n stage2=dict(duration=30),\n stage3=dict(duration=30)\n )\n )\n )\n ]\n )\n return sbi"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef send(self, config, log, obs_id, beam_id):\n log.info('Starting Pulsar Data Transfer...')\n socket = self._ftp.transfercmd('STOR {0}_{1}'.format(obs_id, beam_id))\n socket.send(json.dumps(config).encode())\n socket.send(bytearray(1000 * 1000))\n\n # Overwrites the metadata name in the config dict\n # and re-sends the data to the receiver.\n config['metadata']['name'] = 'candidate_two'\n socket.send(json.dumps(config).encode())\n socket.send(bytearray(1000 * 1000))\n socket.close()\n log.info('Pulsar Data Transfer Completed...')", "response": "Send the pulsar data to the server"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get(block_id):\n _url = get_root_url()\n try:\n block = DB.get_block_details([block_id]).__next__()\n response = block\n\n response['links'] = {\n 'self': '{}'.format(request.url),\n 'list': '{}/processing-blocks'.format(_url),\n 'home': '{}'.format(_url)\n }\n return block\n except IndexError as error:\n response = dict(message='Unable to GET Processing Block',\n id='{}'.format(block_id),\n error=error.__str__())\n response['links'] = {\n 'list': '{}/processing-blocks'.format(_url),\n 'home': '{}'.format(_url)\n }\n return response, HTTPStatus.NOT_FOUND", "response": "Get a single Processing Block detail resource."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ndelete a Processing Block", "response": "def delete(block_id):\n \"\"\"Processing block detail resource.\"\"\"\n _url = get_root_url()\n try:\n DB.delete_processing_block(block_id)\n response = dict(message='Deleted block',\n id='{}'.format(block_id),\n links=dict(list='{}/processing-blocks'.format(_url),\n home='{}'.format(_url)))\n return response, HTTPStatus.OK\n except RuntimeError as error:\n response = dict(error='Failed to delete Processing Block: {}'.\n format(block_id),\n reason=str(error),\n links=dict(list='{}/processing-blocks'.format(_url),\n home='{}'.format(_url)))\n return response, HTTPStatus.OK"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nconfigure an SBI for this subarray.", "response": "def configure(self, sbi_config: str):\n \"\"\"Configure an SBI for this subarray.\n\n Args:\n sbi_config (str): SBI configuration JSON\n\n Returns:\n str,\n\n \"\"\"\n # print(sbi_config)\n config_dict = json.loads(sbi_config)\n self.debug_stream('SBI configuration:\\n%s',\n json.dumps(config_dict, indent=2))\n try:\n sbi = Subarray(self.get_name()).configure_sbi(config_dict)\n except jsonschema.exceptions.ValidationError as error:\n return json.dumps(dict(path=error.absolute_path.__str__(),\n schema_path=error.schema_path.__str__(),\n message=error.message), indent=2)\n except RuntimeError as error:\n return json.dumps(dict(error=str(error)), indent=2)\n return 'Accepted SBI: {}'.format(sbi.id)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef processing_blocks(self):\n sbi_ids = Subarray(self.get_name()).sbi_ids\n pbs = []\n for sbi_id in sbi_ids:\n sbi = SchedulingBlockInstance(sbi_id)\n pbs.append(sbi.processing_block_ids)\n return 'PB', pbs", "response": "Return list of PBs associated with the subarray."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning a datetime object from an ISO - 8601 string.", "response": "def datetime_from_isoformat(value: str):\n \"\"\"Return a datetime object from an isoformat string.\n\n Args:\n value (str): Datetime string in isoformat.\n\n \"\"\"\n if sys.version_info >= (3, 7):\n return datetime.fromisoformat(value)\n\n return datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%f')"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ninitializes the device proxy.", "response": "def init_device(self):\n \"\"\"Device constructor.\"\"\"\n Device.init_device(self)\n\n # Add anything here that has to be done before the device is set to\n # its ON state.\n\n self._set_master_state('on')\n self._devProxy = DeviceProxy(self.get_name())"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nrun for each command.", "response": "def always_executed_hook(self):\n \"\"\"Run for each command.\"\"\"\n _logT = self._devProxy.get_logging_target()\n if 'device::sip_sdp_logger' not in _logT:\n try:\n self._devProxy.add_logging_target('device::sip_sdp/elt/logger')\n self.info_stream(\"Test of Tango logging from \"\n \"'tc_tango_master'\")\n\n except Exception as e:\n LOG.debug('Failed to setup Tango logging %s', e )"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef configure(self, value):\n\n # Only accept new SBIs if the SDP is on.\n if self._sdp_state.current_state != 'on':\n raise RuntimeWarning('Unable to configure SBIs unless SDP is '\n '\\'on\\'.')\n\n # Check that the new SBI is not already registered.\n sbi_config_dict = json.loads(value)\n sbi_list = SchedulingBlockInstanceList()\n LOG.info('SBIs before: %s', sbi_list.active)\n if sbi_config_dict.get('id') in sbi_list.active:\n raise RuntimeWarning('Unable to add SBI with ID {}, an SBI with '\n 'this ID is already registered with SDP!'\n .format(sbi_config_dict.get('id')))\n\n # Add the SBI to the dictionary.\n LOG.info('Scheduling offline SBI! Config:\\n%s', value)\n sbi = SchedulingBlockInstance.from_config(sbi_config_dict)\n LOG.info('SBIs after: %s', sbi_list.active)\n\n sbi_pb_ids = sbi.processing_block_ids\n LOG.info('SBI \"%s\" contains PBs: %s', sbi.id, sbi_pb_ids)\n # pb_list = ProcessingBlockList()\n # LOG.info('Active PBs: %s', pb_list.active)\n\n # Get the list and number of Tango PB devices\n tango_db = Database()\n pb_device_class = \"ProcessingBlockDevice\"\n pb_device_server_instance = \"processing_block_ds/1\"\n pb_devices = tango_db.get_device_name(pb_device_server_instance,\n pb_device_class)\n LOG.info('Number of PB devices in the pool = %d', len(pb_devices))\n\n # Get a PB device which has not been assigned.\n for pb_id in sbi_pb_ids:\n for pb_device_name in pb_devices:\n device = DeviceProxy(pb_device_name)\n\n if not device.pb_id:\n LOG.info('Assigning PB device = %s to PB id = %s',\n pb_device_name, pb_id)\n # Set the device attribute 'pb_id' to the processing block\n # id it is tracking.\n device.pb_id = pb_id\n break", "response": "Configure an offline only SBI with SDP."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ngetting the Service state object for the specified id.", "response": "def _get_service_state(service_id: str):\n \"\"\"Get the Service state object for the specified id.\"\"\"\n LOG.debug('Getting state of service %s', service_id)\n services = get_service_id_list()\n service_ids = [s for s in services if service_id in s]\n if len(service_ids) != 1:\n return 'Service not found! services = {}'.format(str(services))\n subsystem, name, version = service_ids[0].split(':')\n return ServiceState(subsystem, name, version)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget the state of a SDP service.", "response": "def get_current_service_state(self, service_id: str):\n \"\"\"Get the state of a SDP service.\"\"\"\n state = self._get_service_state(service_id)\n return state.current_state"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get_target_service_state(self, service_id: str):\n state = self._get_service_state(service_id)\n return state.target_state", "response": "Get the state of a SDP service."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn a list of allowed target states for the current state.", "response": "def allowed_target_sdp_states(self):\n \"\"\"Return a list of allowed target states for the current state.\"\"\"\n _current_state = self._sdp_state.current_state\n _allowed_target_states = self._sdp_state.allowed_target_states[\n _current_state]\n return json.dumps(dict(allowed_target_sdp_states=\n _allowed_target_states))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef target_sdp_state(self, state):\n LOG.info('Setting SDP target state to %s', state)\n if self._sdp_state.current_state == state:\n LOG.info('Target state ignored, SDP is already \"%s\"!', state)\n if state == 'on':\n self.set_state(DevState.ON)\n if state == 'off':\n self.set_state(DevState.OFF)\n if state == 'standby':\n self.set_state(DevState.STANDBY)\n if state == 'disable':\n self.set_state(DevState.DISABLE)\n self._sdp_state.update_target_state(state)", "response": "Update the target state of the SDP."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a JSON dict encoding the SBIs known to SDP.", "response": "def scheduling_block_instances(self):\n \"\"\"Return the a JSON dict encoding the SBIs known to SDP.\"\"\"\n # TODO(BMo) change this to a pipe?\n sbi_list = SchedulingBlockInstanceList()\n return json.dumps(dict(active=sbi_list.active,\n completed=sbi_list.completed,\n aborted=sbi_list.aborted))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef processing_blocks(self):\n pb_list = ProcessingBlockList()\n # TODO(BMo) realtime, offline etc.\n return json.dumps(dict(active=pb_list.active,\n completed=pb_list.completed,\n aborted=pb_list.aborted))", "response": "Return a JSON dict encoding the PBs known to SDP."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget list of processing block devices.", "response": "def processing_block_devices(self):\n \"\"\"Get list of processing block devices.\"\"\"\n # Get the list and number of Tango PB devices\n tango_db = Database()\n pb_device_class = \"ProcessingBlockDevice\"\n pb_device_server_instance = \"processing_block_ds/1\"\n pb_devices = tango_db.get_device_name(pb_device_server_instance,\n pb_device_class)\n LOG.info('Number of PB devices in the pool = %d', len(pb_devices))\n\n pb_device_map = []\n for pb_device_name in pb_devices:\n device = DeviceProxy(pb_device_name)\n if device.pb_id:\n LOG.info('%s %s', pb_device_name, device.pb_id)\n pb_device_map.append((pb_device_name, device.pb_id))\n\n return str(pb_device_map)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _set_master_state(self, state):\n if state == 'init':\n self._service_state.update_current_state('init', force=True)\n self.set_state(DevState.INIT)\n\n elif state == 'on':\n self.set_state(DevState.ON)\n self._service_state.update_current_state('on')", "response": "Set the state of the SDPMaster."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nadds any missing SBI workflow definitions as placeholders.", "response": "def add_workflow_definitions(sbi_config: dict):\n \"\"\"Add any missing SBI workflow definitions as placeholders.\n\n This is a utility function used in testing and adds mock / test workflow\n definitions to the database for workflows defined in the specified\n SBI config.\n\n Args:\n sbi_config (dict): SBI configuration dictionary.\n\n \"\"\"\n registered_workflows = []\n for i in range(len(sbi_config['processing_blocks'])):\n workflow_config = sbi_config['processing_blocks'][i]['workflow']\n workflow_name = '{}:{}'.format(workflow_config['id'],\n workflow_config['version'])\n if workflow_name in registered_workflows:\n continue\n workflow_definition = dict(\n id=workflow_config['id'],\n version=workflow_config['version'],\n stages=[]\n )\n key = \"workflow_definitions:{}:{}\".format(workflow_config['id'],\n workflow_config['version'])\n DB.save_dict(key, workflow_definition, hierarchical=False)\n registered_workflows.append(workflow_name)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef generate_version(max_major: int = 1, max_minor: int = 7,\n max_patch: int = 15) -> str:\n \"\"\"Select a random version.\n\n Args:\n max_major (int, optional) maximum major version\n max_minor (int, optional) maximum minor version\n max_patch (int, optional) maximum patch version\n\n Returns:\n str, Version String\n\n \"\"\"\n major = randint(0, max_major)\n minor = randint(0, max_minor)\n patch = randint(0, max_patch)\n return '{:d}.{:d}.{:d}'.format(major, minor, patch)", "response": "Select a random version."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ngenerate a SBI Scheduling Block data object.", "response": "def generate_sb(date: datetime.datetime, project: str,\n programme_block: str) -> dict:\n \"\"\"Generate a Scheduling Block data object.\n\n Args:\n date (datetime.datetime): UTC date of the SBI\n project (str): Project Name\n programme_block (str): Programme\n\n Returns:\n str, Scheduling Block Instance (SBI) ID.\n\n \"\"\"\n date = date.strftime('%Y%m%d')\n instance_id = randint(0, 9999)\n sb_id = 'SB-{}-{}-{:04d}'.format(date, project, instance_id)\n return dict(id=sb_id, project=project, programme_block=programme_block)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef generate_pb_config(pb_id: str,\n pb_config: dict = None,\n workflow_config: dict = None) -> dict:\n \"\"\"Generate a PB configuration dictionary.\n\n Args:\n pb_id (str): Processing Block Id\n pb_config (dict, optional) PB configuration.\n workflow_config (dict, optional): Workflow configuration\n\n Returns:\n dict, PB configuration dictionary.\n\n \"\"\"\n if workflow_config is None:\n workflow_config = dict()\n if pb_config is None:\n pb_config = dict()\n pb_type = pb_config.get('type', choice(PB_TYPES))\n workflow_id = workflow_config.get('id')\n if workflow_id is None:\n if pb_type == 'offline':\n workflow_id = choice(OFFLINE_WORKFLOWS)\n else:\n workflow_id = choice(REALTIME_WORKFLOWS)\n workflow_version = workflow_config.get('version', generate_version())\n workflow_parameters = workflow_config.get('parameters', dict())\n pb_data = dict(\n id=pb_id,\n version=__pb_version__,\n type=pb_type,\n priority=pb_config.get('priority', randint(0, 10)),\n dependencies=pb_config.get('dependencies', []),\n resources_required=pb_config.get('resources_required', []),\n workflow=dict(\n id=workflow_id,\n version=workflow_version,\n parameters=workflow_parameters\n )\n )\n return pb_data", "response": "Generate a PB configuration dictionary."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef generate_sbi_config(num_pbs: int = 3, project: str = 'sip',\n programme_block: str = 'sip_demos',\n pb_config: Union[dict, List[dict]] = None,\n workflow_config:\n Union[dict, List[dict]] = None,\n register_workflows=False) -> dict:\n \"\"\"Generate a SBI configuration dictionary.\n\n Args:\n num_pbs (int, optional): Number of Processing Blocks (default = 3)\n project (str, optional): Project to associate the SBI with.\n programme_block (str, optional): SBI programme block\n pb_config (dict, List[dict], optional): PB configuration\n workflow_config (dict, List[dict], optional): Workflow configuration\n register_workflows (bool, optional): If true also register workflows.\n\n Returns:\n dict, SBI configuration dictionary\n\n \"\"\"\n if isinstance(workflow_config, dict):\n workflow_config = [workflow_config]\n if isinstance(pb_config, dict):\n pb_config = [pb_config]\n utc_now = datetime.datetime.utcnow()\n pb_list = []\n for i in range(num_pbs):\n pb_id = ProcessingBlock.get_id(utc_now)\n if workflow_config is not None:\n _workflow_config = workflow_config[i]\n else:\n _workflow_config = None\n if pb_config is not None:\n _pb_config = pb_config[i]\n else:\n _pb_config = None\n pb_dict = generate_pb_config(pb_id, _pb_config, _workflow_config)\n pb_list.append(pb_dict)\n sbi_config = dict(\n id=SchedulingBlockInstance.get_id(utc_now, project),\n version=__sbi_version__,\n scheduling_block=generate_sb(utc_now, project, programme_block),\n processing_blocks=pb_list\n )\n if register_workflows:\n add_workflow_definitions(sbi_config)\n return sbi_config", "response": "Generate a SBI configuration dictionary."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef generate_sbi_json(num_pbs: int = 3, project: str = 'sip',\n programme_block: str = 'sip_demos',\n pb_config: Union[dict, List[dict]] = None,\n workflow_config:\n Union[dict, List[dict]] = None,\n register_workflows=True) -> str:\n \"\"\"Return a JSON string used to configure an SBI.\"\"\"\n return json.dumps(generate_sbi_config(num_pbs, project, programme_block,\n pb_config,\n workflow_config, register_workflows))", "response": "Generates a JSON string used to configure an SBI."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef log(self, argin):\n #\n # Tango Manual Appendix 9 gives the format\n # argin[0] = millisecond Unix timestamp\n # argin[1] = log level\n # argin[2] = the source log device name\n # argin[3] = the log message\n # argin[4] = Not used - reserved\n # argin[5] = thread identifier of originating message\n\n tm = datetime.datetime.fromtimestamp(float(argin[0])/1000.)\n fmt = \"%Y-%m-%d %H:%M:%S\"\n message = \"TANGO Log message - {} - {} {} {}\".format(\n tm.strftime(fmt), argin[1], argin[2], argin[3])\n LOG.info(message)", "response": "Log a command for the SDP STango ubsystem devices."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget the Scheduling block detail resource.", "response": "def get(block_id):\n \"\"\"Scheduling block detail resource.\"\"\"\n try:\n sbi_details = DB.get_block_details([block_id]).__next__()\n sbi_details['processing_blocks'] = []\n pb_ids = sbi_details['processing_block_ids']\n for pb_details in DB.get_block_details(pb_ids):\n sbi_details['processing_blocks'].append(pb_details)\n del sbi_details['processing_block_ids']\n response = sbi_details\n _url = get_root_url()\n response['links'] = {\n 'scheduling-blocks': '{}/scheduling-blocks'.format(_url),\n 'home': '{}'.format(_url)\n }\n return sbi_details, HTTPStatus.OK\n\n except IndexError:\n return {'error': 'specified block id not found {}'.format(block_id)}, \\\n HTTPStatus.NOT_FOUND"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ndeletes the Scheduling block instance with the given ID.", "response": "def delete(block_id):\n \"\"\"Scheduling block detail resource.\"\"\"\n _url = get_root_url()\n LOG.debug('Requested delete of SBI %s', block_id)\n try:\n DB.delete_sched_block_instance(block_id)\n response = dict(message='Deleted block: _id = {}'.format(block_id))\n response['_links'] = {\n 'list': '{}/scheduling-blocks'.format(_url)\n }\n return response, HTTPStatus.OK\n except RuntimeError as error:\n return dict(error=str(error)), HTTPStatus.BAD_REQUEST"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngenerates Scheduling Block instance IDs", "response": "def _scheduling_block_ids(num_blocks, start_id, project):\n \"\"\"Generate Scheduling Block instance ID\"\"\"\n for i in range(num_blocks):\n _root = '{}-{}'.format(strftime(\"%Y%m%d\", gmtime()), project)\n yield '{}-sb{:03d}'.format(_root, i + start_id), \\\n '{}-sbi{:03d}'.format(_root, i + start_id)"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngenerating a number of Processing Blocks", "response": "def _generate_processing_blocks(start_id, min_blocks=0, max_blocks=4):\n \"\"\"Generate a number of Processing Blocks\"\"\"\n processing_blocks = []\n num_blocks = random.randint(min_blocks, max_blocks)\n for i in range(start_id, start_id + num_blocks):\n _id = 'sip-pb{:03d}'.format(i)\n block = dict(id=_id, resources_requirement={}, workflow={})\n processing_blocks.append(block)\n return processing_blocks"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _scheduling_block_config(num_blocks=5, start_sbi_id=0, start_pb_id=0,\n project='sip'):\n \"\"\"Return a Scheduling Block Configuration dictionary\"\"\"\n pb_id = start_pb_id\n for sb_id, sbi_id in _scheduling_block_ids(num_blocks, start_sbi_id,\n project):\n sub_array_id = 'subarray-{:02d}'.format(random.choice(range(5)))\n config = dict(id=sbi_id,\n sched_block_id=sb_id,\n sub_array_id=sub_array_id,\n processing_blocks=_generate_processing_blocks(pb_id))\n pb_id += len(config['processing_blocks'])\n yield config", "response": "Generate a Scheduling Block Configuration dictionary"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef add_scheduling_blocks(num_blocks, clear=True):\n db_client = ConfigDb()\n if clear:\n LOG.info('Resetting database ...')\n db_client.clear()\n start_sbi_id = 0\n start_pb_id = 0\n else:\n start_sbi_id = len(db_client.get_sched_block_instance_ids())\n start_pb_id = len(db_client.get_processing_block_ids())\n\n LOG.info(\"Adding %i SBIs to the db\", num_blocks)\n for config in _scheduling_block_config(num_blocks, start_sbi_id,\n start_pb_id):\n LOG.info('Creating SBI %s with %i PBs.', config['id'],\n len(config['processing_blocks']))\n db_client.add_sched_block_instance(config)", "response": "Add a number of scheduling blocks to the db."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef main(args=None, **kwargs):\n LOG.info('Starting SDP PB devices.')\n return run([ProcessingBlockDevice], verbose=True, msg_stream=sys.stdout,\n args=args, **kwargs)", "response": "Start the Processing Block device server."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngenerates a Processing Block Instance ID.", "response": "def get_id(date: datetime.datetime) -> str:\n \"\"\"Generate a Processing Block (PB) Instance ID.\n\n Args:\n date (datetime.datetime): UTC date of the PB\n\n Returns:\n str, Processing Block ID\n\n \"\"\"\n date = date.strftime('%Y%m%d')\n return 'PB-{}-{}-{:03d}'.format(date, 'sip', randint(0, 100))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef dependencies(self) -> List[Dependency]:\n dependencies_str = DB.get_hash_value(self.key, 'dependencies')\n dependencies = []\n for dependency in ast.literal_eval(dependencies_str):\n dependencies.append(Dependency(dependency))\n return dependencies", "response": "Return the PB dependencies."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn list of resources assigned to the PB.", "response": "def resources_assigned(self) -> List[Resource]:\n \"\"\"Return list of resources assigned to the PB.\"\"\"\n resources_str = DB.get_hash_value(self.key, 'resources_assigned')\n resources_assigned = []\n for resource in ast.literal_eval(resources_str):\n resources_assigned.append(Resource(resource))\n return resources_assigned"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef workflow_stages(self) -> List[WorkflowStage]:\n workflow_stages = []\n stages = DB.get_hash_value(self.key, 'workflow_stages')\n for index in range(len(ast.literal_eval(stages))):\n workflow_stages.append(WorkflowStage(self.id, index))\n return workflow_stages", "response": "Return list of workflow stages."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef add_assigned_resource(self, resource_type: str,\n value: Union[str, int, float, bool],\n parameters: dict = None):\n \"\"\"Add assigned resource to the processing block.\n\n Args:\n resource_type (str): Resource type\n value: Resource value\n parameters (dict, optional): Parameters specific to the resource\n\n \"\"\"\n if parameters is None:\n parameters = dict()\n resources = DB.get_hash_value(self.key, 'resources_assigned')\n resources = ast.literal_eval(resources)\n resources.append(dict(type=resource_type, value=value,\n parameters=parameters))\n DB.set_hash_value(self.key, 'resources_assigned', resources)", "response": "Adds assigned resource to the processing block."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nremove assigned resources from the processing block.", "response": "def remove_assigned_resource(self, resource_type: str,\n value: Union[str, int, float, bool] = None,\n parameters: dict = None):\n \"\"\"Remove assigned resources from the processing block.\n\n All matching resources will be removed. If only type is specified\n all resources of the specified type will be removed.\n If value and/or parameters are specified they will be used\n for matching the resource to remove.\n\n Args:\n resource_type (str): Resource type\n value: Resource value\n parameters (dict, optional): Parameters specific to the resource\n\n \"\"\"\n resources = DB.get_hash_value(self.key, 'resources_assigned')\n resources = ast.literal_eval(resources)\n new_resources = []\n for resource in resources:\n if resource['type'] != resource_type:\n new_resources.append(resource)\n elif value is not None and resource['value'] != value:\n new_resources.append(resource)\n elif parameters is not None and \\\n resource['parameters'] != parameters:\n new_resources.append(resource)\n DB.set_hash_value(self.key, 'resources_assigned', new_resources)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nupdate the updated timestamp.", "response": "def _mark_updated(self):\n \"\"\"Update the updated timestamp.\"\"\"\n timestamp = datetime.datetime.utcnow().isoformat()\n DB.set_hash_value(self.key, 'updated', timestamp)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef get():\n _url = get_root_url()\n LOG.debug('GET Sub array list')\n\n sub_array_ids = sorted(DB.get_sub_array_ids())\n\n response = dict(sub_arrays=[])\n for array_id in sub_array_ids:\n array_summary = dict(sub_arrary_id=array_id)\n block_ids = DB.get_sub_array_sbi_ids(array_id)\n LOG.debug('Subarray IDs: %s', array_id)\n LOG.debug('SBI IDs: %s', block_ids)\n array_summary['num_scheduling_blocks'] = len(block_ids)\n array_summary['links'] = {\n 'detail': '{}/sub-array/{}'.format(_url, array_id)\n }\n response['sub_arrays'].append(array_summary)\n response['links'] = dict(self=request.url, home=_url)\n return response, status.HTTP_200_OK", "response": "Subarray list. This method will list all sub - arrays known to SDP."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsetting the subarray parameters.", "response": "def set_parameters(self, parameters_dict):\n \"\"\"Set the subarray parameters.\n\n Args:\n parameters_dict (dict): Dictionary of Subarray parameters\n \"\"\"\n DB.set_hash_value(self._key, 'parameters', parameters_dict)\n self.publish(\"parameters_updated\")"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef sbi_ids(self) -> List[str]:\n return ast.literal_eval(DB.get_hash_value(self._key, 'sbi_ids'))", "response": "Get the list of SBI Ids associated with this subarray."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nadding a new SBI to the database associated with this subarray.", "response": "def configure_sbi(self, sbi_config: dict, schema_path: str = None):\n \"\"\"Add a new SBI to the database associated with this subarray.\n\n Args:\n sbi_config (dict): SBI configuration.\n schema_path (str, optional): Path to the SBI config schema.\n\n \"\"\"\n if not self.active:\n raise RuntimeError(\"Unable to add SBIs to inactive subarray!\")\n sbi_config['subarray_id'] = self._id\n sbi = SchedulingBlockInstance.from_config(sbi_config, schema_path)\n self._add_sbi_id(sbi_config['id'])\n return sbi"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef abort(self):\n for sbi_id in self.sbi_ids:\n sbi = SchedulingBlockInstance(sbi_id)\n sbi.abort()\n self.set_state('ABORTED')", "response": "Abort all SBIs associated with the subarray."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nremoving an SBI Identifier from the SBI cache.", "response": "def remove_sbi_id(self, sbi_id):\n \"\"\"Remove an SBI Identifier.\"\"\"\n sbi_ids = self.sbi_ids\n sbi_ids.remove(sbi_id)\n DB.set_hash_value(self._key, 'sbi_ids', sbi_ids)"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nadds a SBI Identifier.", "response": "def _add_sbi_id(self, sbi_id):\n \"\"\"Add a SBI Identifier.\"\"\"\n sbi_ids = self.sbi_ids\n sbi_ids.append(sbi_id)\n DB.set_hash_value(self._key, 'sbi_ids', sbi_ids)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get(self) -> Union[Event, None]:\n message = self._queue.get_message()\n if message and message['type'] == 'message':\n event_id = DB.get_event(self._pub_key, self._processed_key)\n event_data_str = DB.get_hash_value(self._data_key, event_id)\n event_dict = ast.literal_eval(event_data_str)\n event_dict['id'] = event_id\n event_dict['subscriber'] = self._subscriber\n return Event.from_config(event_dict)\n return None", "response": "Get the latest event from the queue."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_published_events(self, process=True) -> List[Event]:\n LOG.debug('Getting published events (%s)', self._pub_key)\n if process:\n LOG.debug('Marking returned published events as processed.')\n DB.watch(self._pub_key, pipeline=True)\n event_ids = DB.get_list(self._pub_key, pipeline=True)\n if event_ids:\n DB.delete(self._pub_key, pipeline=True)\n DB.append_to_list(self._processed_key, *event_ids,\n pipeline=True)\n DB.execute()\n else:\n event_ids = DB.get_list(self._pub_key)\n\n events = []\n for event_id in event_ids[::-1]:\n event_str = DB.get_hash_value(self._data_key, event_id)\n event_dict = ast.literal_eval(event_str)\n event_dict['id'] = event_id\n event = Event.from_config(event_dict)\n LOG.debug('Loaded event: %s (%s)', event.id, event.type)\n events.append(event)\n return events", "response": "Get a list of published events."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nget all processed events.", "response": "def get_processed_events(self) -> List[Event]:\n \"\"\"Get all processed events.\n\n This method is intended to be used to recover events stuck in the\n processed state which could happen if an event handling processing\n an processed event goes down before completing the event processing.\n\n Returns:\n list[Events], list of event objects.\n\n \"\"\"\n event_ids = DB.get_list(self._processed_key)\n events = []\n for event_id in event_ids:\n event_str = DB.get_hash_value(self._data_key, event_id)\n event_dict = ast.literal_eval(event_str)\n event_dict['id'] = event_id\n event_dict['subscriber'] = self._subscriber\n events.append(Event.from_config(event_dict))\n return events"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef complete_event(self, event_id: str):\n event_ids = DB.get_list(self._processed_key)\n if event_id not in event_ids:\n raise KeyError('Unable to complete event. Event {} has not been '\n 'processed (ie. it is not in the processed '\n 'list).'.format(event_id))\n DB.remove_from_list(self._processed_key, event_id, pipeline=True)\n key = _keys.completed_events(self._object_type, self._subscriber)\n DB.append_to_list(key, event_id, pipeline=True)\n DB.execute()", "response": "Complete the specified event."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nadds a workflow definition to the Configuration Database.", "response": "def add(workflow_definition: dict, templates_root: str):\n \"\"\"Add a workflow definition to the Configuration Database.\n\n Templates are expected to be found in a directory tree with the following\n structure:\n\n - workflow_id:\n |- workflow_version\n |- stage_id\n |- stage_version\n |- <templates>\n\n Args:\n workflow_definition (dict): Workflow definition.\n templates_root (str): Workflow templates root path\n\n \"\"\"\n schema_path = join(dirname(__file__), 'schema', 'workflow_definition.json')\n with open(schema_path, 'r') as file:\n schema = json.loads(file.read())\n\n jsonschema.validate(workflow_definition, schema)\n\n _id = workflow_definition['id']\n _version = workflow_definition['version']\n _load_templates(workflow_definition, templates_root)\n\n workflow_id = workflow_definition['id']\n version = workflow_definition['version']\n name = \"workflow_definitions:{}:{}\".format(workflow_id, version)\n\n if DB.get_keys(name):\n raise KeyError('Workflow definition already exists: {}'.format(name))\n\n # DB.set_hash_values(name, workflow_definition)\n DB.save_dict(name, workflow_definition, hierarchical=False)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nregisters an empty workflow definition in the database.", "response": "def register(workflow_id, workflow_version):\n \"\"\"Register an (empty) workflow definition in the database.\"\"\"\n name = \"workflow_definitions:{}:{}\".format(workflow_id, workflow_version)\n workflow_definition = dict(id=workflow_id, version=workflow_version,\n stages=[])\n # DB.set_hash_values(name, workflow_definition)\n DB.save_dict(name, workflow_definition, hierarchical=False)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ndeletes all the workflow_id and version from the database.", "response": "def delete(workflow_id: str = None, workflow_version: str = None):\n \"\"\"Delete workflow definitions.\n\n Args:\n workflow_id (str, optional): Optional workflow identifier\n workflow_version (str, optional): Optional workflow identifier version\n\n If workflow_id and workflow_version are None, delete all workflow\n definitions.\n\n \"\"\"\n if workflow_id is None and workflow_version is None:\n keys = DB.get_keys(\"workflow_definitions:*\")\n DB.delete(*keys)\n elif workflow_id is not None and workflow_version is None:\n keys = DB.get_keys(\"workflow_definitions:{}:*\".format(workflow_id))\n DB.delete(*keys)\n elif workflow_id is None and workflow_version is not None:\n keys = DB.get_keys(\"workflow_definitions:*:{}\"\n .format(workflow_version))\n DB.delete(*keys)\n else:\n name = \"workflow_definitions:{}:{}\".format(workflow_id,\n workflow_version)\n DB.delete(name)"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets a workflow definition from the Configuration Database.", "response": "def get_workflow(workflow_id: str, workflow_version: str) -> dict:\n \"\"\"Get a workflow definition from the Configuration Database.\n\n Args:\n workflow_id (str): Workflow identifier\n workflow_version (str): Workflow version\n\n Returns:\n dict, Workflow definition dictionary\n\n \"\"\"\n name = \"workflow_definitions:{}:{}\".format(workflow_id, workflow_version)\n workflow = DB.get_hash_dict(name)\n workflow['stages'] = ast.literal_eval(workflow['stages'])\n return workflow"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ngetting dict of ALL known workflow definitions. Returns list of dictionaries", "response": "def get_workflows() -> dict:\n \"\"\"Get dict of ALL known workflow definitions.\n\n Returns\n list[dict]\n\n \"\"\"\n keys = DB.get_keys(\"workflow_definitions:*\")\n known_workflows = dict()\n for key in keys:\n values = key.split(':')\n if values[1] not in known_workflows:\n known_workflows[values[1]] = list()\n known_workflows[values[1]].append(values[2])\n\n return known_workflows"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ndeletes PBs devices from the Tango database.", "response": "def delete_pb_devices():\n \"\"\"Delete PBs devices from the Tango database.\"\"\"\n parser = argparse.ArgumentParser(description='Register PB devices.')\n parser.add_argument('num_pb', type=int,\n help='Number of PBs devices to register.')\n args = parser.parse_args()\n\n log = logging.getLogger('sip.tango_control.subarray')\n tango_db = Database()\n log.info(\"Deleting PB devices:\")\n for index in range(args.num_pb):\n name = 'sip_sdp/pb/{:05d}'.format(index)\n log.info(\"\\t%s\", name)\n tango_db.delete_device(name)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nblocks function to populate data in the heap.", "response": "def fill_buffer(heap_data, i_chan):\n \"\"\"Blocking function to populate data in the heap.\n This is run in an executor.\n \"\"\"\n # Calculate the time count and fraction.\n now = datetime.datetime.utcnow()\n time_full = now.timestamp()\n time_count = int(time_full)\n time_fraction = int((time_full - time_count) * (2**32 - 1))\n diff = now - (now.replace(hour=0, minute=0, second=0, microsecond=0))\n time_data = diff.seconds + 1e-6 * diff.microseconds\n\n # Write the data into the buffer.\n heap_data['visibility_timestamp_count'] = time_count\n heap_data['visibility_timestamp_fraction'] = time_fraction\n heap_data['correlator_output_data']['VIS'][:][:] = \\\n time_data + i_chan * 1j"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\nasync def _run_loop(self, executor):\n # SPEAD heap descriptor.\n # One channel per stream and one time index per heap.\n max_duration_sec = self._config['max_duration_sec']\n num_stations = self._config['heap']['num_stations']\n num_pols = self._config['heap']['num_pols']\n num_baselines = num_stations * (num_stations + 1) // 2\n reporting_interval_sec = self._config['reporting_interval_sec']\n start_chan = self._config['start_channel']\n dtype = [('TCI', 'i1'), ('FD', 'u1'), ('VIS', '<c8', num_pols)]\n descriptor = {\n 'visibility_timestamp_count': {\n 'id': 0x6000,\n 'dtype': '<u4',\n },\n 'visibility_timestamp_fraction': {\n 'id': 0x6001,\n 'dtype': '<u4',\n },\n 'visibility_channel_id': {\n 'id': 0x6002,\n 'dtype': '<u4'\n },\n 'visibility_channel_count': {\n 'id': 0x6003,\n 'dtype': '<u4'\n },\n 'visibility_baseline_polarisation_id': {\n 'id': 0x6004,\n 'dtype': '<u4'\n },\n 'visibility_baseline_count': {\n 'id': 0x6005,\n 'dtype': '<u4'\n },\n 'phase_bin_id': {\n 'id': 0x6006,\n 'dtype': '<u2'\n },\n 'phase_bin_count': {\n 'id': 0x6007,\n 'dtype': '<u2'\n },\n 'scan_id': {\n 'id': 0x6008,\n 'dtype': '<u8'\n },\n 'visibility_hardware_id': {\n 'id': 0x6009,\n 'dtype': '<u4'\n },\n 'correlator_output_data': {\n 'id': 0x600A,\n 'dtype': dtype,\n 'shape': (num_baselines,)\n }\n }\n\n # Create shadow heap buffers for each stream/channel.\n for i_buffer in range(2):\n self._buffer.append([])\n for _ in range(len(self._streams)):\n heap_data = {\n 'visibility_timestamp_count': 0,\n 'visibility_timestamp_fraction': 0,\n 'correlator_output_data': numpy.zeros(\n (num_baselines,), dtype=dtype)\n }\n self._buffer[i_buffer].append(heap_data)\n\n # Add items to each stream's item group, and\n # send the start of stream message to each stream.\n tasks = []\n for i_stream, (stream, item_group) in enumerate(self._streams):\n for key, item in descriptor.items():\n item_shape = item['shape'] if 'shape' in item else tuple()\n item_group.add_item(\n id=item['id'], name=key, description='',\n shape=item_shape, dtype=item['dtype'])\n\n # Write the header information.\n item_group['visibility_channel_id'].value = i_stream + start_chan\n item_group['visibility_channel_count'].value = 1\n item_group['visibility_baseline_count'].value = num_baselines\n tasks.append(stream.async_send_heap(item_group.get_start()))\n await asyncio.gather(*tasks)\n\n # Calculate the total data size being sent.\n num_streams = self._config['num_streams']\n data_size = (num_baselines * (num_pols * 8 + 2)) / 1e6\n self._log.info('Heap size is %.3f MB', data_size)\n\n # Enter the main loop over time samples.\n done = False\n loop = asyncio.get_event_loop()\n heaps_sent = 0\n i_time = 0\n start_time = time.time()\n timer1 = time.time()\n while not done:\n # Schedule sends for the previous buffer, if available.\n tasks = []\n if i_time > 0:\n i_buffer = (i_time - 1) % 2\n for i_stream, (stream, item_group) in enumerate(self._streams):\n for key, value in self._buffer[i_buffer][i_stream].items():\n item_group[key].value = value # Update values in heap.\n # Always send the descriptors AND the static data!\n tasks.append(stream.async_send_heap(\n item_group.get_heap(descriptors='all', data='all')))\n heaps_sent += 1\n\n # Fill a buffer by distributing it among worker threads.\n i_buffer = i_time % 2\n for i_stream in range(num_streams):\n tasks.append(loop.run_in_executor(\n executor, self.fill_buffer,\n self._buffer[i_buffer][i_stream], i_stream))\n\n # Ensure processing tasks and previous asynchronous sends are done.\n await asyncio.gather(*tasks)\n\n # Increment time index.\n i_time += 1\n\n # Periodically report the sender data rate.\n now = time.time()\n if (now - timer1) >= reporting_interval_sec:\n self._log.info('Sent %i heaps in %.3f sec (%.1f MB/s)',\n heaps_sent, (now - timer1),\n (data_size * heaps_sent) / (now - timer1))\n heaps_sent = 0\n timer1 = now\n\n # Check for completion.\n if now - start_time > max_duration_sec > 0:\n done = True\n\n # End the streams.\n tasks = []\n for stream, item_group in self._streams:\n tasks.append(stream.async_send_heap(item_group.get_end()))\n await asyncio.gather(*tasks)", "response": "Main loop for the SPEAD heap."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nmark the specified object as completed.", "response": "def set_complete(self, object_id: str):\n \"\"\"Mark the specified object as completed.\"\"\"\n if object_id in self.active:\n DB.remove_from_list('{}:active'.format(self.type), object_id)\n DB.append_to_list('{}:completed'.format(self.type), object_id)"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef publish(self, object_id: str, event_type: str,\n event_data: dict = None):\n \"\"\"Publish a scheduling object event.\n\n Args:\n object_id (str): ID of the scheduling object\n event_type (str): Type of event.\n event_data (dict, optional): Event data.\n\n \"\"\"\n object_key = SchedulingObject.get_key(self.type, object_id)\n publish(event_type=event_type,\n event_data=event_data,\n object_type=self.type,\n object_id=object_id,\n object_key=object_key,\n origin=None)", "response": "Publish a scheduling object event."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nstarting a workflow stage by starting a number of docker services.", "response": "def _start_workflow_stages(pb: ProcessingBlock, pb_id: str,\n workflow_stage_dict: dict,\n workflow_stage: WorkflowStage,\n docker: DockerSwarmClient):\n \"\"\"Start a workflow stage by starting a number of docker services.\n\n This function first assesses if the specified workflow stage can be\n started based on its dependencies. If this is found to be the case,\n the workflow stage is stared by first resolving and template arguments\n in the workflow stage configuration, and then using the Docker Swarm Client\n API to start workflow stage services. As part of this, the\n workflow_stage_dict data structure is updated accordingly.\n\n TODO(BMo) This function will need refactoring at some point as part\n of an update to the way workflow state metadata is stored in the\n configuration database. Currently the stage_data dictionary\n is a bit of a hack for a badly specified Configuration Database\n backed WorkflowStage object.\n\n This function is used by `execute_processing_block`.\n\n Args:\n pb (ProcessingBlock): Configuration database Processing Block data\n object\n pb_id (str): Processing Block identifier\n workflow_stage_dict (dict): Workflow stage metadata structure\n workflow_stage (WorkflowStage): Workflow state configuration database\n data object.\n docker (DockerClient): Docker Swarm Client object.\n\n \"\"\"\n # FIXME(BMo) replace pb_id argument, get this from the pb instead!\n stage_data = workflow_stage_dict[workflow_stage.id]\n stage_data['start'] = False\n\n # Determine if the stage can be started.\n if stage_data['status'] == 'none':\n if not workflow_stage.dependencies:\n stage_data['start'] = True\n else:\n dependency_status = []\n for dependency in workflow_stage.dependencies:\n dependency_status.append(\n workflow_stage_dict[dependency['value']][\n 'status'] == 'complete')\n # ii += 1\n stage_data['start'] = all(dependency_status)\n\n # Start the workflow stage.\n if stage_data['start']:\n # Configure EE (set up templates)\n LOG.info('-- Starting workflow stage: %s --', workflow_stage.id)\n LOG.info('Configuring EE templates.')\n args_template = jinja2.Template(workflow_stage.args_template)\n stage_params = pb.workflow_parameters[workflow_stage.id]\n template_params = {**workflow_stage.config, **stage_params}\n args = args_template.render(stage=template_params)\n LOG.info('Resolving workflow script arguments.')\n\n args = json.dumps(json.loads(args))\n compose_template = jinja2.Template(\n workflow_stage.compose_template)\n compose_str = compose_template.render(stage=dict(args=args))\n\n # Prefix service names with the PB id\n compose_dict = yaml.load(compose_str)\n service_names = compose_dict['services'].keys()\n new_service_names = [\n '{}_{}_{}'.format(pb_id, pb.workflow_id, name)\n for name in service_names]\n for new, old in zip(new_service_names, service_names):\n compose_dict['services'][new] = \\\n compose_dict['services'].pop(old)\n compose_str = yaml.dump(compose_dict)\n\n # Run the compose file\n service_ids = docker.create_services(compose_str)\n LOG.info('Staring workflow containers:')\n for service_id in service_ids:\n service_name = docker.get_service_name(service_id)\n LOG.info(\" %s, %s \", service_name, service_id)\n stage_data['services'][service_id] = {}\n LOG.info('Created Services: %s', service_ids)\n stage_data['services'][service_id] = dict(\n name=docker.get_service_name(service_id),\n status='running',\n complete=False\n )\n stage_data[\"status\"] = 'running'"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncheck and update the status of a workflow stage. This function checks and updates the status of a workflow stage specified by the parameters in the specified stage_data dictionary. If the workflow stage is not marked as complete, this function will check with the Docker Swarm API on the status of Docker services defined for the stage. If **all** services are found to be complete (based on their service state being reported as 'shutdown', the workflow stage is marked complete. This function is used by `execute_processing_block`. TODO(BMo) This function will need refactoring at some point as part of an update to the way workflow state metadata is stored in the configuration database. Currently the stage_data dictionary is a bit of a hack for a badly specified Configuration Database backed WorkflowStage object. Args: stage_data (dict): Dictionary holding workflow stage metadata. workflow_stage (WorkflowStage): Workflow stage data object. docker (DockerClient): Docker Swarm Client object.", "response": "def _update_workflow_stages(stage_data: dict, workflow_stage: WorkflowStage,\n docker: DockerSwarmClient):\n \"\"\"Check and update the status of a workflow stage.\n\n This function checks and updates the status of a workflow stage\n specified by the parameters in the specified stage_data dictionary.\n\n If the workflow stage is not marked as complete, this function will\n check with the Docker Swarm API on the status of Docker services\n defined for the stage. If **all** services are found to be complete\n (based on their service state being reported as 'shutdown',\n the workflow stage is marked complete.\n\n This function is used by `execute_processing_block`.\n\n TODO(BMo) This function will need refactoring at some point as part\n of an update to the way workflow state metadata is stored in the\n configuration database. Currently the stage_data dictionary\n is a bit of a hack for a badly specified Configuration Database\n backed WorkflowStage object.\n\n Args:\n stage_data (dict): Dictionary holding workflow stage metadata.\n workflow_stage (WorkflowStage): Workflow stage data object.\n docker (DockerClient): Docker Swarm Client object.\n\n \"\"\"\n service_status_complete = []\n\n # FIXME(BMo) is not \"complete\" -> is \"running\"\n if stage_data[\"status\"] != \"complete\":\n for service_id, service_dict in stage_data['services'].items():\n service_state = docker.get_service_state(service_id)\n if service_state == 'shutdown':\n docker.delete_service(service_id)\n service_dict['status'] = service_state\n service_dict['complete'] = (service_state == 'shutdown')\n service_status_complete.append(service_dict['complete'])\n if all(service_status_complete):\n LOG.info('Workflow stage service %s complete!',\n workflow_stage.id)\n stage_data['status'] = \"complete\""} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _abort_workflow(pb: ProcessingBlock, workflow_stage_dict: dict,\n docker: DockerSwarmClient):\n \"\"\"Abort the workflow.\n\n TODO(BMo): This function currently does nothing as the abort flag\n is hardcoded to False!\n\n This function is used by `execute_processing_block`.\n\n Args:\n pb (ProcessingBlock): Configuration database Processing block object.\n workflow_stage_dict (dict): Workflow stage metadata dictionary.\n docker (DockerClient): Docker Swarm Client object.\n\n Returns:\n bool, True if the stage is aborted, otherwise False.\n\n \"\"\"\n # TODO(BMo) Ask the database if the abort flag on the PB is set.\n _abort_flag = False\n if _abort_flag:\n for workflow_stage in pb.workflow_stages:\n for service_id, _ in \\\n workflow_stage_dict[workflow_stage.id]['services'].items():\n docker.delete_service(service_id)\n LOG.info(\"Deleted Service Id %s\", service_id)\n return True\n return False", "response": "Abort the workflow.\n\n TODO(BMo): This function currently does nothing as the abort flag\n is hardcoded to False!\n\n This function is used by `execute_processing_block`.\n\n Args:\n pb (ProcessingBlock): Configuration database Processing block object.\n workflow_stage_dict (dict): Workflow stage metadata dictionary.\n docker (DockerClient): Docker Swarm Client object.\n\n Returns:\n bool, True if the stage is aborted, otherwise False."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _workflow_complete(workflow_stage_dict: dict):\n # Check if all stages are complete, if so end the PBC by breaking\n # out of the while loop\n complete_stages = []\n for _, stage_config in workflow_stage_dict.items():\n complete_stages.append((stage_config['status'] == 'complete'))\n if all(complete_stages):\n LOG.info('PB workflow complete!')\n return True\n return False", "response": "Check if the entire workflow is complete."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nexecute a processing block.", "response": "def execute_processing_block(pb_id: str, log_level='DEBUG'):\n \"\"\"Execute a processing block.\n\n Celery tasks that executes a workflow defined in a Configuration database\n Processing Block data object.\n\n Args:\n pb_id (str): The PB id for the PBC\n log_level (str): Python logging level.\n\n \"\"\"\n init_logger('sip', show_log_origin=True, propagate=False,\n log_level=log_level)\n LOG.info('+' * 40)\n LOG.info('+ Executing Processing block: %s!', pb_id)\n LOG.info('+' * 40)\n LOG.info('Processing Block Controller version: %s', __version__)\n LOG.info('Docker Swarm API version: %s', sip_swarm_api_version)\n LOG.info('Configuration database API version: %s', config_db_version)\n\n pb = ProcessingBlock(pb_id)\n\n LOG.info('Starting workflow %s %s', pb.workflow_id, pb.workflow_version)\n\n pb.set_status('running')\n docker = DockerSwarmClient()\n\n # Coping workflow stages to a dict\n workflow_stage_dict = {}\n for stage in pb.workflow_stages:\n workflow_stage_dict[stage.id] = deepcopy(stage.config)\n workflow_stage_dict[stage.id]['services'] = dict()\n\n # Loop until workflow stages are complete.\n while True:\n time.sleep(0.1)\n\n for workflow_stage in pb.workflow_stages:\n\n _start_workflow_stages(pb, pb_id, workflow_stage_dict,\n workflow_stage, docker)\n\n _update_workflow_stages(workflow_stage_dict[workflow_stage.id],\n workflow_stage, docker)\n\n if _abort_workflow(pb, workflow_stage_dict, docker):\n break\n\n if _workflow_complete(workflow_stage_dict):\n break\n\n pb_list = ProcessingBlockList()\n pb_list.set_complete(pb_id)\n pb.set_status('completed')\n LOG.info('-' * 40)\n LOG.info('- Destroying PBC for %s', pb_id)\n LOG.info('-' * 40)\n return pb.status"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef put(self, block_id, priority, pb_type='offline'):\n if pb_type not in ('offline', 'realtime'):\n raise ValueError('Invalid PB type.')\n\n with self._mutex:\n added_time = datetime.datetime.utcnow().isoformat()\n entry = (priority, sys.maxsize-self._index, block_id, pb_type,\n added_time)\n self._index += 1\n if self._block_map.get(block_id) is not None:\n raise KeyError('ERROR: Block id \"{}\" already exists in '\n 'PC PB queue!'.\n format(block_id))\n self._block_map[block_id] = entry\n LOG.debug(\"Adding PB %s to queue\", block_id)\n self._queue.append(entry)\n self._queue.sort() # Sort by priority followed by insertion order.\n self._queue.reverse()", "response": "Adds a new entry to the processing block queue."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting the highest priority Processing Block from the queue.", "response": "def get(self):\n \"\"\"Get the highest priority Processing Block from the queue.\"\"\"\n with self._mutex:\n entry = self._queue.pop()\n del self._block_map[entry[2]]\n return entry[2]"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nremoving a Processing Block from the queue.", "response": "def remove(self, block_id):\n \"\"\"Remove a Processing Block from the queue.\n\n Args:\n block_id (str):\n \"\"\"\n with self._mutex:\n entry = self._block_map[block_id]\n self._queue.remove(entry)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a list of ServiceState objects known to SDP.", "response": "def get_service_state_list() -> List[ServiceState]:\n \"\"\"Return a list of ServiceState objects known to SDP.\"\"\"\n keys = DB.get_keys('states*')\n LOG.debug('Loading list of known services.')\n services = []\n for key in keys:\n values = key.split(':')\n if len(values) == 4:\n services.append(ServiceState(*values[1:4]))\n return services"} {"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns list of Services.", "response": "def get_service_id_list() -> List[tuple]:\n \"\"\"Return list of Services.\"\"\"\n keys = DB.get_keys('states*')\n services = []\n for key in keys:\n values = key.split(':')\n if len(values) == 4:\n services.append(':'.join(values[1:]))\n return services"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreturning the list of Processing Blocks known to SDP.", "response": "def get():\n \"\"\"Return the list of Processing Blocks known to SDP.\"\"\"\n\n LOG.debug('GET Processing Block list')\n _url = get_root_url()\n\n # Get list of Processing block Ids\n block_ids = sorted(DB.get_processing_block_ids())\n LOG.debug('Processing Block IDs: %s', block_ids)\n\n # Construct response object\n response = dict(num_processing_blocks=len(block_ids),\n processing_blocks=list())\n\n # Loop over blocks and add block summary to response.\n for block in DB.get_block_details(block_ids):\n block_id = block['id']\n LOG.debug('Creating PB summary for %s', block_id)\n block['links'] = dict(\n detail='{}/processing-block/{}'.format(_url, block_id),\n scheduling_block='{}/scheduling-block/{}'\n .format(_url, block_id.split(':')[0])\n )\n response['processing_blocks'].append(block)\n response['links'] = {\n 'self': '{}'.format(request.url),\n 'home': '{}'.format(_url)\n }\n return response, HTTPStatus.OK"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef config(self) -> dict:\n # Check that the key exists\n self._check_object_exists()\n config_dict = DB.get_hash_dict(self.key)\n for _, value in config_dict.items():\n for char in ['[', '{']:\n if char in value:\n value = ast.literal_eval(value)\n return config_dict", "response": "Get the scheduling object config."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef get_property(self, property_key: str) -> str:\n self._check_object_exists()\n return DB.get_hash_value(self.key, property_key)", "response": "Get a scheduling object property."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nsetting the status of the scheduling object.", "response": "def set_status(self, value):\n \"\"\"Set the status of the scheduling object.\"\"\"\n self._check_object_exists()\n DB.set_hash_value(self.key, 'status', value)\n self.publish('status_changed', event_data=dict(status=value))"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef publish(self, event_type: str, event_data: dict = None):\n import inspect\n import os.path\n _stack = inspect.stack()\n _origin = os.path.basename(_stack[3][1]) + '::' + \\\n _stack[3][3]+'::L{}'.format(_stack[3][2])\n\n publish(event_type=event_type,\n event_data=event_data,\n object_type=self._type,\n object_id=self._id,\n object_key=self._key,\n origin=_origin)", "response": "Publish an event associated with the object."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_events(self) -> List[Event]:\n LOG.debug('Getting events for %s', self.key)\n return get_events(self.key)", "response": "Get the list of events associated with the scheduling object."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _check_object_exists(self):\n if not DB.get_keys(self.key):\n raise KeyError(\"Object with key '{}' not exist\".format(self.key))", "response": "Raise a KeyError if the object does not exist in the database."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_service_state_object_id(subsystem: str, name: str,\n version: str) -> str:\n \"\"\"Return service state data object key.\n\n Args:\n subsystem (str): Subsystem the service belongs to\n name (str): Name of the Service\n version (str): Version of the Service\n\n Returns:\n str, Key used to store the service state data object\n\n \"\"\"\n return '{}:{}:{}'.format(subsystem, name, version)", "response": "Return the service state data object key."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef create_services(self, compose_str: str) -> list:\n # Raise an exception if we are not a manager\n if not self._manager:\n raise RuntimeError('Services can only be run on '\n 'swarm manager nodes')\n\n # Initialise empty list\n services_ids = []\n\n try:\n service_config = yaml.load(compose_str)\n\n # Deepcopy the service config\n service_list = copy.deepcopy(service_config)\n\n # Removing version and service from the dict\n service_config.pop('version')\n service_config.pop('services')\n\n for service_name in service_list['services']:\n service_exist = self._client.services.list(\n filters={'name': service_name})\n if not service_exist:\n service_config['name'] = service_name\n service_spec = self._parse_services(\n service_config, service_name, service_list)\n created_service = self._client.services.create(\n **service_spec)\n service_id = created_service.short_id\n LOG.debug('Service created: %s', service_id)\n services_ids.append(service_id)\n else:\n LOG.debug('Services already exists')\n\n except yaml.YAMLError as exc:\n print(exc)\n\n # Returning list of services created\n return services_ids", "response": "Create new docker services.\n\n Args:\n compose_str (string): Docker compose 'file' string\n\n Return:\n service_names, list"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncreates a new docker volume.", "response": "def create_volume(self, volume_name: str, driver_spec: str = None):\n \"\"\"Create new docker volumes.\n\n Only the manager nodes can create a volume\n\n Args:\n volume_name (string): Name for the new docker volume\n driver_spec (string): Driver for the docker volume\n \"\"\"\n # Default values\n if driver_spec:\n driver = driver_spec\n else:\n driver = 'local'\n\n # Raise an exception if we are not a manager\n if not self._manager:\n raise RuntimeError('Services can only be deleted '\n 'on swarm manager nodes')\n\n self._client.volumes.create(name=volume_name, driver=driver)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef delete_service(self, service: str):\n # Raise an exception if we are not a manager\n if not self._manager:\n raise RuntimeError('Services can only be deleted '\n 'on swarm manager nodes')\n\n # Remove service\n self._api_client.remove_service(service)", "response": "Removes a docker service."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nremoves all services in the cluster.", "response": "def delete_all_services(self):\n \"\"\"Removes/stops a service.\n\n Only the manager nodes can delete a service\n \"\"\"\n # Raise an exception if we are not a manager\n if not self._manager:\n raise RuntimeError('Services can only be deleted '\n 'on swarm manager nodes')\n\n service_list = self.get_service_list()\n for services in service_list:\n # Remove all the services\n self._api_client.remove_service(services)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef delete_volume(self, volume_name: str):\n # Raise an exception if we are not a manager\n if not self._manager:\n raise RuntimeError('Volumes can only be deleted '\n 'on swarm manager nodes')\n\n # Remove volume\n self._api_client.remove_volume(volume_name)", "response": "Removes a docker volume."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nremove all the volumes.", "response": "def delete_all_volumes(self):\n \"\"\"Remove all the volumes.\n\n Only the manager nodes can delete a volume\n \"\"\"\n # Raise an exception if we are not a manager\n if not self._manager:\n raise RuntimeError('Volumes can only be deleted '\n 'on swarm manager nodes')\n\n volume_list = self.get_volume_list()\n for volumes in volume_list:\n # Remove all the services\n self._api_client.remove_volume(volumes, force=True)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_service_list(self) -> list:\n # Initialising empty list\n services = []\n\n # Raise an exception if we are not a manager\n if not self._manager:\n raise RuntimeError('Only the Swarm manager node can retrieve'\n ' all the services.')\n\n service_list = self._client.services.list()\n for s_list in service_list:\n services.append(s_list.short_id)\n return services", "response": "Get a list of docker services."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_service_name(self, service_id: str) -> str:\n # Raise an exception if we are not a manager\n if not self._manager:\n raise RuntimeError('Only the Swarm manager node can retrieve all'\n ' the services details.')\n\n service = self._client.services.get(service_id)\n return service.name", "response": "Get the name of the docker service."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_service_details(self, service_id: str) -> dict:\n # Raise an exception if we are not a manager\n if not self._manager:\n raise RuntimeError('Only the Swarm manager node can retrieve all'\n ' the services details.')\n\n service = self._client.services.get(service_id)\n return service.attrs", "response": "Get details of a service."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nget the state of the service.", "response": "def get_service_state(self, service_id: str) -> str:\n \"\"\"Get the state of the service.\n\n Only the manager nodes can retrieve service state\n\n Args:\n service_id (str): Service id\n\n Returns:\n str, state of the service\n\n \"\"\"\n # Get service\n service = self._client.services.get(service_id)\n\n # Get the state of the service\n for service_task in service.tasks():\n service_state = service_task['DesiredState']\n return service_state"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_node_list(self) -> list:\n # Initialising empty list\n nodes = []\n\n # Raise an exception if we are not a manager\n if not self._manager:\n raise RuntimeError('Only the Swarm manager node '\n 'can retrieve all the nodes.')\n\n node_list = self._client.nodes.list()\n for n_list in node_list:\n nodes.append(n_list.id)\n return nodes", "response": "Get a list of nodes."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngets details of a node.", "response": "def get_node_details(self, node_id: list) -> dict:\n \"\"\"Get details of a node.\n\n Only the manager nodes can retrieve details of a node\n\n Args:\n node_id (list): List of node ID\n\n Returns:\n dict, details of the node\n\n \"\"\"\n # Raise an exception if we are not a manager\n if not self._manager:\n raise RuntimeError('Only the Swarm manager node can '\n 'retrieve node details.')\n\n node = self._client.nodes.get(node_id)\n return node.attrs"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_container_list(self) -> list:\n # Initialising empty list\n containers = []\n\n containers_list = self._client.containers.list()\n for c_list in containers_list:\n containers.append(c_list.short_id)\n return containers", "response": "Get list of containers."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef get_container_details(self, container_id_or_name: str) -> dict:\n container = self._client.containers.get(container_id_or_name)\n return container.attrs", "response": "Get details of a container."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nget a list of docker volumes.", "response": "def get_volume_list(self) -> list:\n \"\"\"Get a list of docker volumes.\n\n Only the manager nodes can retrieve all the volumes\n\n Returns:\n list, all the names of the volumes in swarm\n\n \"\"\"\n # Initialising empty list\n volumes = []\n\n # Raise an exception if we are not a manager\n if not self._manager:\n raise RuntimeError('Only the Swarm manager node can retrieve'\n ' all the services.')\n\n volume_list = self._client.volumes.list()\n for v_list in volume_list:\n volumes.append(v_list.name)\n return volumes"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget details of the volume.", "response": "def get_volume_details(self, volume_name: str) -> dict:\n \"\"\"Get details of the volume.\n\n Args:\n volume_name (str): Name of the volume\n\n Returns:\n dict, details of the volume\n\n \"\"\"\n if volume_name not in self.volumes:\n raise RuntimeError('No such volume found: ', volume_name)\n\n volume = self._client.volumes.get(volume_name)\n return volume.attrs"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef get_actual_replica(self, service_id: str) -> str:\n # Raise an exception if we are not a manager\n if not self._manager:\n raise RuntimeError('Only the Swarm manager node can retrieve '\n 'replication level of the service')\n\n service_details = self.get_service_details(service_id)\n actual_replica = service_details[\"Spec\"][\"Mode\"][\n \"Replicated\"][\"Replicas\"]\n return actual_replica", "response": "Get the actual replica level of a service."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_replicas(self, service_id: str) -> str:\n # Initialising empty list\n replicas = []\n\n # Raise an exception if we are not a manager\n if not self._manager:\n raise RuntimeError('Only the Swarm manager node can retrieve '\n 'replication level of the service')\n\n service_tasks = self._client.services.get(service_id).tasks()\n for task in service_tasks:\n if task['Status']['State'] == \"running\":\n replicas.append(task)\n return len(replicas)", "response": "Get the replication level of a service."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef update_labels(self, node_name: str, labels: dict):\n # Raise an exception if we are not a manager\n if not self._manager:\n raise RuntimeError('Only the Swarm manager node can update '\n 'node details.')\n\n # Node specification\n node_spec = {'Availability': 'active',\n 'Name': node_name,\n 'Role': 'manager',\n 'Labels': labels}\n node = self._client.nodes.get(node_name)\n node.update(node_spec)", "response": "Update the labels of a node."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _parse_services(self, service_config: dict, service_name: str,\n service_list: dict) -> dict:\n \"\"\"Parse the docker compose file.\n\n Args:\n service_config (dict): Service configurations from the compose file\n service_name (string): Name of the services\n service_list (dict): Service configuration list\n\n Returns:\n dict, service specifications extracted from the compose file\n\n \"\"\"\n for key, value in service_list['services'][service_name].items():\n service_config[key] = value\n if 'command' in key:\n key = \"args\"\n service_config['args'] = value\n service_config.pop('command')\n if 'ports' in key:\n endpoint_spec = self._parse_ports(value)\n service_config['endpoint_spec'] = endpoint_spec\n service_config.pop('ports')\n if 'volumes' in key:\n volume_spec = self._parse_volumes(value)\n service_config['mounts'] = volume_spec\n service_config.pop('volumes')\n if 'deploy' in key:\n self._parse_deploy(value, service_config)\n service_config.pop('deploy')\n if 'networks' in key:\n network_spec = self._parse_networks(service_list)\n service_config['networks'] = network_spec\n if 'logging' in key:\n self._parse_logging(value, service_config)\n service_config.pop('logging')\n if 'environment' in key:\n service_config['env'] = value\n service_config.pop('environment')\n\n # LOG.info('Service Config: %s', service_config)\n return service_config", "response": "Parse the docker compose file."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _parse_deploy(self, deploy_values: dict, service_config: dict):\n # Initialising empty dictionary\n mode = {}\n\n for d_value in deploy_values:\n if 'restart_policy' in d_value:\n restart_spec = docker.types.RestartPolicy(\n **deploy_values[d_value])\n service_config['restart_policy'] = restart_spec\n if 'placement' in d_value:\n for constraints_key, constraints_value in \\\n deploy_values[d_value].items():\n service_config[constraints_key] = constraints_value\n if 'mode' in d_value:\n mode[d_value] = deploy_values[d_value]\n if 'replicas' in d_value:\n mode[d_value] = deploy_values[d_value]\n if 'resources' in d_value:\n resource_spec = self._parse_resources(\n deploy_values, d_value)\n service_config['resources'] = resource_spec\n\n # Setting the types\n mode_spec = docker.types.ServiceMode(**mode)\n service_config['mode'] = mode_spec", "response": "Parse the deploy key."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nparsing the ports key. and return a Ports specification which contains exposed ports", "response": "def _parse_ports(port_values: dict) -> dict:\n \"\"\"Parse ports key.\n\n Args:\n port_values (dict): ports configuration values\n\n Returns:\n dict, Ports specification which contains exposed ports\n\n \"\"\"\n # Initialising empty dictionary\n endpoints = {}\n\n for port_element in port_values:\n target_port = port_element.split(':')\n for port in target_port:\n endpoints[int(port)] = int(port)\n\n # Setting the types\n endpoint_spec = docker.types.EndpointSpec(ports=endpoints)\n return endpoint_spec"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _parse_volumes(volume_values: dict) -> str:\n for v_values in volume_values:\n for v_key, v_value in v_values.items():\n if v_key == 'source':\n if v_value == '.':\n source = os.path.dirname(\n os.path.abspath(__file__))\n else:\n source = v_value\n if v_key == 'target':\n target = v_value\n volume_spec = [source + ':' + target]\n return volume_spec", "response": "Parse volumes key.\n\n Args:\n volume_values (dict): volume configuration values\n\n Returns:\n string, volume specification with mount source and container path"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _parse_resources(resource_values: dict, resource_name: str) -> dict:\n # Initialising empty dictionary\n resources = {}\n\n for r_values in resource_values[resource_name]:\n if 'limits' in r_values:\n for r_key, r_value in \\\n resource_values[resource_name][r_values].items():\n if 'cpu' in r_key:\n cpu_value = float(r_value) * 10 ** 9\n cpu_key = r_key[:3] + '_limit'\n resources[cpu_key] = int(cpu_value)\n if 'mem' in r_key:\n mem_value = re.sub('M', '', r_value)\n mem_key = r_key[:3] + '_limit'\n resources[mem_key] = int(mem_value) * 1048576\n resources_spec = docker.types.Resources(**resources)\n\n return resources_spec", "response": "Parse the resource key. nco."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nparse network key. nco.", "response": "def _parse_networks(service_list: dict) -> list:\n \"\"\"Parse network key.\n\n Args:\n service_list (dict): Service configurations\n\n Returns:\n list, List of networks\n\n \"\"\"\n # Initialising empty list\n networks = []\n\n for n_values in service_list['networks'].values():\n for n_key, n_value in n_values.items():\n if 'name' in n_key:\n networks.append(n_value)\n return networks"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nparsing log key. Args : log_values - logging configuration values", "response": "def _parse_logging(log_values: dict, service_config: dict):\n \"\"\"Parse log key.\n\n Args:\n log_values (dict): logging configuration values\n service_config (dict): Service specification\n \"\"\"\n for log_key, log_value in log_values.items():\n if 'driver' in log_key:\n service_config['log_driver'] = log_value\n if 'options' in log_key:\n service_config['log_driver_options'] = log_value"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _init_queue():\n LOG.info('Initialising Processing Block queue.')\n queue = ProcessingBlockQueue()\n active_pb_ids = ProcessingBlockList().active\n LOG.info('Initialising PC PB queue: %s', active_pb_ids)\n for pb_id in active_pb_ids:\n pb = ProcessingBlock(pb_id)\n queue.put(pb.id, pb.priority, pb.type)\n return queue", "response": "Initialise the Processing Block queue from the database."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _monitor_events(self):\n LOG.info(\"Starting to monitor PB events\")\n check_counter = 0\n while True:\n if check_counter == 50:\n check_counter = 0\n LOG.debug('Checking for PB events...')\n\n published_events = self._pb_events.get_published_events()\n for event in published_events:\n if event.type == 'status_changed':\n LOG.info('PB status changed event: %s',\n event.data['status'])\n\n if event.data['status'] == 'created':\n LOG.info('Acknowledged PB created event (%s) for %s, '\n '[timestamp: %s]', event.id,\n event.object_id, event.timestamp)\n pb = ProcessingBlock(event.object_id)\n self._queue.put(event.object_id, pb.priority, pb.type)\n\n if event.data['status'] == 'completed':\n LOG.info('Acknowledged PB completed event (%s) for %s,'\n ' [timestamp: %s]', event.id,\n event.object_id, event.timestamp)\n self._num_pbcs -= 1\n if self._num_pbcs < 0:\n self._num_pbcs = 0\n\n time.sleep(0.1)\n check_counter += 1", "response": "Watch for Processing Block events."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _processing_controller_status(self):\n LOG.info('Starting Processing Block queue reporter.')\n while True:\n LOG.info('PB queue length = %d', len(self._queue))\n time.sleep(self._report_interval)\n if active_count() != 5:\n LOG.critical('Processing Controller not running '\n 'correctly! (%d/%d threads active)',\n active_count(), 5)", "response": "Report on the status of the Processing Block queue."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _schedule_processing_blocks(self):\n LOG.info('Starting to Schedule Processing Blocks.')\n while True:\n time.sleep(0.5)\n if not self._queue:\n continue\n if self._num_pbcs >= self._max_pbcs:\n LOG.warning('Resource limit reached!')\n continue\n _inspect = Inspect(app=APP)\n if self._queue and _inspect.active() is not None:\n next_pb = self._queue[-1]\n LOG.info('Considering %s for execution...', next_pb[2])\n utc_now = datetime.datetime.utcnow()\n time_in_queue = (utc_now -\n datetime_from_isoformat(next_pb[4]))\n if time_in_queue.total_seconds() >= 10:\n item = self._queue.get()\n LOG.info('------------------------------------')\n LOG.info('>>> Executing %s! <<<', item)\n LOG.info('------------------------------------')\n execute_processing_block.delay(item)\n self._num_pbcs += 1\n else:\n LOG.info('Waiting for resources for %s', next_pb[2])", "response": "Schedule processing blocks for execution."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _monitor_pbc_status(self):\n LOG.info('Starting to Monitor PBC status.')\n inspect = celery.current_app.control.inspect()\n workers = inspect.ping()\n start_time = time.time()\n while workers is None:\n time.sleep(0.1)\n elapsed = time.time() - start_time\n if elapsed > 20.0:\n LOG.warning('PBC not found!')\n break\n if workers is not None:\n for worker in workers:\n _tasks = inspect.registered_tasks()[worker]\n LOG.info('Worker: %s tasks:', worker)\n for task_index, task_name in enumerate(_tasks):\n LOG.info(' %02d : %s', task_index, task_name)\n\n while True:\n LOG.info('Checking PBC status (%d/%d)', self._num_pbcs,\n self._max_pbcs)\n celery_app = celery.current_app\n inspect = celery_app.control.inspect()\n workers = inspect.ping()\n if workers is None:\n LOG.warning('PBC service not found!')\n else:\n LOG.info('PBC state: %s', celery_app.events.State())\n _active = inspect.active()\n _scheduled = inspect.scheduled()\n for worker in workers:\n LOG.info(' Worker %s: scheduled: %s, active: %s',\n worker, _active[worker], _scheduled[worker])\n time.sleep(self._report_interval)", "response": "Monitor the PBC status."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef start(self):\n # TODO(BMo) having this check is probably a good idea but I've \\\n # disabled it for now while the PBC is in flux.\n # assert sip_pbc.release.__version__ == '1.2.3'\n\n scheduler_threads = [\n Thread(target=self._monitor_events, daemon=True),\n Thread(target=self._processing_controller_status, daemon=True),\n Thread(target=self._schedule_processing_blocks, daemon=True),\n Thread(target=self._monitor_pbc_status, daemon=True)\n ]\n\n for thread in scheduler_threads:\n thread.start()\n\n try:\n for thread in scheduler_threads:\n thread.join()\n except KeyboardInterrupt:\n LOG.info('Keyboard interrupt!')\n sys.exit(0)\n finally:\n LOG.info('Finally!')", "response": "Start the scheduler threads."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef alarm():\n if request.method == 'POST':\n response = {'message': 'POST Accepted'}\n logging.info('alarm POSTED!')\n data = request.data\n logging.info(data)\n string = json.dumps(data)\n producer.send('SIP-alarms', string.encode())\n return response\n return \"\"", "response": "A alarm is a POST request."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nupdating the current state of a service.", "response": "def _update_service_current_state(service: ServiceState):\n \"\"\"Update the current state of a service.\n\n Updates the current state of services after their target state has changed.\n\n Args:\n service (ServiceState): Service state object to update\n\n \"\"\"\n LOG.debug(\"Setting current state from target state for %s\", service.id)\n service.update_current_state(service.target_state)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _update_services_instant_gratification(sdp_target_state: str):\n service_states = get_service_state_list()\n\n # Set the target state of services\n for service in service_states:\n if service.current_state != sdp_target_state:\n LOG.debug('Setting the current state of %s to be %s', service.id,\n sdp_target_state)\n service.update_current_state(sdp_target_state)", "response": "For demonstration purposes only."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nupdates the target states of services based on the SDP target state.", "response": "def _update_services_target_state(sdp_target_state: str):\n \"\"\"Update the target states of services based on SDP target state.\n\n When we get a new target state this function is called to ensure\n components receive the target state(s) and/or act on them.\n\n Args:\n sdp_target_state (str): Target state of SDP\n\n \"\"\"\n service_states = get_service_state_list()\n\n # Set the target state of services\n for service in service_states:\n if service.current_state != sdp_target_state:\n LOG.debug('Setting the target state of %s to be %s', service.id,\n sdp_target_state)\n service.update_target_state(sdp_target_state)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _handle_sdp_target_state_updated(sdp_state: SDPState):\n LOG.info('Handling SDP target state updated event...')\n LOG.info('SDP target state: %s', sdp_state.target_state)\n\n # Map between the SDP target state and the service target state?\n if sdp_state.target_state == 'off':\n _update_services_target_state('off')\n\n # TODO: Work out if the state of SDP has reached the target state.\n\n # If yes, update the current state.\n sdp_state.update_current_state(sdp_state.target_state)", "response": "Handle an SDP target state change event."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _init(sdp_state: SDPState):\n # Parse command line arguments.\n LOG.info(\"Initialising: %s\", __service_id__)\n\n # FIXME(BMo) There is a bug when SDP or services 'start' in the 'off'\n # state. At the moment it is impossible to transition out of this.\n\n # FIXME(BMo) **Hack** Register all services or if already registered do\n # nothing (this is handled by the ServiceState object).\n _services = [\n \"ExecutionControl:AlarmReceiver:1.0.0\",\n \"ExecutionControl:AlertManager:1.0.0\",\n \"ExecutionControl:ConfigurationDatabase:5.0.1\",\n \"ExecutionControl:MasterController:1.3.0\",\n \"ExecutionControl:ProcessingController:1.2.6\",\n \"ExecutionControl:ProcessingBlockController:1.3.0\",\n \"TangoControl:Database:1.0.4\",\n \"TangoControl:MySQL:1.0.3\",\n \"TangoControl:SDPMaster:1.2.1\",\n \"TangoControl:Subarrays:1.2.0\",\n \"TangoControl:ProcessingBlocks:1.2.0\",\n \"Platform:Kafka:2.1.1\",\n \"Platform:Prometheus:1.0.0\",\n \"Platform:PrometheusPushGateway:0.7.0\",\n \"Platform:RedisCommander:210.0.0\",\n \"Platform:Zookeeper:3.4.13\"\n ]\n for service_id in _services:\n subsystem, name, version = service_id.split(':')\n ServiceState(subsystem, name, version)\n\n # If the SDP state is 'unknown', mark the SDP state as init.\n # FIXME(BMo) This is not right as we want to allow for recovery from\n # failure without just reinitialising...!? ie. respect the old sate\n # NOTE: If the state is 'off' we will want to reset the database\n # with 'skasip_config_db_init --clear'\n if sdp_state.current_state in ['unknown', 'off']:\n try:\n LOG.info(\"Setting the SDPState to 'init'\")\n sdp_state.update_current_state('init', force=True)\n except ValueError as error:\n LOG.critical('Unable to set the State of SDP to init! %s',\n str(error))\n\n LOG.info(\"Updating Service States\")\n service_state_list = get_service_state_list()\n\n # FIXME(BMo) **Hack** Mark all Services in the 'unknown' state as\n # initialising.\n for service_state in service_state_list:\n if service_state.current_state in ['unknown', 'off']:\n service_state.update_current_state('init', force=True)\n\n # FIXME(BMo) **Hack** After 'checking' that the services are 'on' set\n # their state on 'on' after a short delay.\n # FIXME(BMo) This check should not be serialised!!! (should be part of the\n # event loop)\n for service_state in service_state_list:\n if service_state.current_state == 'init':\n time.sleep(random.uniform(0, 0.2))\n service_state.update_current_state('on')\n\n # FIXME(BMo): **Hack** Now the all services are on, set the sate of SDP to\n # 'standby'\n # FIXME(BMo) This should also be part of the event loop.\n services_on = [service.current_state == 'on'\n for service in service_state_list]\n if all(services_on):\n LOG.info('All Services are online!.')\n sdp_state.update_current_state('standby')\n else:\n LOG.critical('Master Controller failed to initialise.')\n\n return service_state_list", "response": "Initialize the master controller service."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nprocess an event from the SDP state machine.", "response": "def _process_event(event: Event, sdp_state: SDPState,\n service_states: List[ServiceState]):\n \"\"\"Process a SDP state change event.\"\"\"\n LOG.debug('Event detected! (id : \"%s\", type: \"%s\", data: \"%s\")',\n event.object_id, event.type, event.data)\n\n if event.object_id == 'SDP' and event.type == 'current_state_updated':\n LOG.info('SDP current state updated, no action required!')\n\n if event.object_id == 'SDP' and event.type == 'target_state_updated':\n LOG.info(\"SDP target state changed to '%s'\",\n sdp_state.target_state)\n\n # If the sdp is already in the target state do nothing\n if sdp_state.target_state == sdp_state.current_state:\n LOG.warning('SDP already in %s state',\n sdp_state.current_state)\n return\n\n # Check that a transition to the target state is allowed in the\n # current state.\n if not sdp_state.is_target_state_allowed(sdp_state.target_state):\n LOG.error('Transition to %s is not allowed when in state %s',\n sdp_state.target_state, sdp_state.current_state)\n sdp_state.target_state = sdp_state.current_state\n return\n\n _update_services_target_state(sdp_state.target_state)\n\n # If asking SDP to turn off, also turn off services.\n if sdp_state.target_state == 'off':\n LOG.info('Turning off services!')\n for service_state in service_states:\n service_state.update_target_state('off')\n service_state.update_current_state('off')\n\n LOG.info('Processing target state change request ...')\n time.sleep(0.1)\n LOG.info('Done processing target state change request!')\n\n # Assuming that the SDP has responding to the target\n # target state command by now, set the current state\n # to the target state.\n sdp_state.update_current_state(sdp_state.target_state)\n\n if sdp_state.current_state == 'alarm':\n LOG.debug('raising SDP state alarm')\n SIP_STATE_ALARM.set(1)\n else:\n SIP_STATE_ALARM.set(0)\n try:\n # FIXME(BMo) the pushgateway host should not be hardcoded!\n push_to_gateway('platform_pushgateway:9091', job='SIP',\n registry=COLLECTOR_REGISTRY)\n except urllib.error.URLError:\n LOG.warning(\"Unable to connect to the Alarms service!\")"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _process_state_change_events():\n sdp_state = SDPState()\n service_states = get_service_state_list()\n state_events = sdp_state.get_event_queue(subscriber=__service_name__)\n state_is_off = sdp_state.current_state == 'off'\n\n counter = 0\n while True:\n time.sleep(0.1)\n if not state_is_off:\n\n # *Hack* to avoid problems with historical events not being\n # correctly handled by EventQueue.get(), replay old events every\n # 10s\n # - see issue #54\n if counter % 1000 == 0:\n LOG.debug('Checking published events ... %d', counter / 1000)\n _published_events = state_events.get_published_events(\n process=True)\n for _state_event in _published_events:\n _process_event(_state_event, sdp_state, service_states)\n else:\n _state_event = state_events.get()\n if _state_event:\n _process_event(_state_event, sdp_state, service_states)\n state_is_off = sdp_state.current_state == 'off'\n counter += 1", "response": "This function processes events relating to the overall state of SDP and responds to SDP state change events."} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef main():\n # Parse command line args.\n _parse_args()\n LOG.info(\"Starting: %s\", __service_id__)\n\n # Subscribe to state change events.\n # FIXME(BMo) This API is unfortunate as it looks like we are only\n # subscribing to sdp_state events.\n LOG.info('Subscribing to state change events (subscriber = %s)',\n __service_name__)\n sdp_state = SDPState()\n _ = sdp_state.subscribe(subscriber=__service_name__)\n\n # Initialise the service.\n _ = _init(sdp_state)\n LOG.info('Finished initialising!')\n\n # Enter a pseudo event-loop (using Sched) to monitor for state change\n # events\n # (Also random set services into a fault or alarm state if enabled)\n LOG.info('Responding to state change events ...')\n try:\n _process_state_change_events()\n except ValueError as error:\n LOG.critical('Value error: %s', str(error))\n except KeyboardInterrupt as err:\n LOG.debug('Keyboard Interrupt %s', err)\n LOG.info('Exiting!')", "response": "Merge temp_main and main."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef main():\n stream_config = spead2.send.StreamConfig(\n max_packet_size=16356, rate=1000e6, burst_size=10, max_heaps=1)\n item_group = spead2.send.ItemGroup(flavour=spead2.Flavour(4, 64, 48, 0))\n\n # Add item descriptors to the heap.\n num_baselines = (512 * 513) // 2\n dtype = [('TCI', 'i1'), ('FD', 'u1'), ('VIS', '<c8', 4)]\n item_group.add_item(\n id=0x6000, name='visibility_timestamp_count', description='',\n shape=tuple(), format=None, dtype='<u4')\n item_group.add_item(\n id=0x6001, name='visibility_timestamp_fraction', description='',\n shape=tuple(), format=None, dtype='<u4')\n item_group.add_item(\n id=0x6005, name='visibility_baseline_count', description='',\n shape=tuple(), format=None, dtype='<u4')\n item_group.add_item(\n id=0x6008, name='scan_id', description='',\n shape=tuple(), format=None, dtype='<u8')\n item_group.add_item(\n id=0x600A, name='correlator_output_data', description='',\n shape=(num_baselines,), dtype=dtype)\n\n # Create streams and send start-of-stream message.\n streams = []\n num_streams = 2\n for i in range(num_streams):\n stream = spead2.send.UdpStream(\n thread_pool=spead2.ThreadPool(threads=1),\n hostname='127.0.0.1', port=41000 + i, config=stream_config)\n stream.send_heap(item_group.get_start())\n streams.append(stream)\n\n vis = numpy.zeros(shape=(num_baselines,), dtype=dtype)\n num_heaps = 200\n start_time = time.time()\n for stream in streams:\n # Update values in the heap.\n item_group['visibility_timestamp_count'].value = 1\n item_group['visibility_timestamp_fraction'].value = 0\n item_group['visibility_baseline_count'].value = num_baselines\n item_group['scan_id'].value = 100000000\n item_group['correlator_output_data'].value = vis\n # Iterate heaps.\n for i in range(num_heaps):\n # Send heap.\n stream.send_heap(item_group.get_heap(descriptors='all', data='all'))\n\n # Print time taken.\n duration = time.time() - start_time\n data_size = num_streams * num_heaps * (vis.nbytes / 1e6)\n print(\"Sent %.3f MB in %.3f sec (%.3f MB/sec)\" % (\n data_size, duration, (data_size/duration)))\n\n # Send end-of-stream message.\n for stream in streams:\n stream.send_heap(item_group.get_end())", "response": "Runs the test sender."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ncreates an event object from an event dictionary object.", "response": "def from_config(cls, config: dict):\n \"\"\"Create an event object from an event dictionary object.\n\n Args:\n config (dict): Event Configuration dictionary.\n\n \"\"\"\n timestamp = config.get('timestamp', None)\n return cls(config.get('id'),\n config.get('type'),\n config.get('data', dict()),\n config.get('origin', None),\n timestamp,\n config.get('object_type', None),\n config.get('object_id', None),\n config.get('object_key', None))"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreads visibility data from a Measurement Set and updates the grid normalisation.", "response": "def process_input_data(filename, imager, grid_data, grid_norm, grid_weights):\n \"\"\"Reads visibility data from a Measurement Set.\n\n The visibility grid or weights grid is updated accordingly.\n\n Visibility data are read from disk in blocks of size num_baselines.\n\n Args:\n filename (str): Name of Measurement Set to open.\n imager (oskar.Imager): Handle to configured imager.\n grid_data (numpy.ndarray or None): Visibility grid to populate.\n grid_norm (float) Current grid normalisation.\n grid_weights (numpy.ndarray): Weights grid to populate or read.\n\n Returns:\n grid_norm (float): Updated grid normalisation.\n \"\"\"\n # Get data from the input Measurement Set.\n ms = oskar.MeasurementSet.open(filename)\n block_start = 0\n num_rows = ms.num_rows\n num_baselines = ms.num_stations * (ms.num_stations - 1) // 2\n\n # Loop over data blocks of size num_baselines.\n while block_start < num_rows:\n block_size = num_rows - block_start\n if block_size > num_baselines:\n block_size = num_baselines\n\n # Get the baseline coordinates. (Replace this with a query to LTS.)\n uvw = ms.read_column('UVW', block_start, block_size)\n\n # Read the Stokes-I visibility weights.\n vis_weights = ms.read_column('WEIGHT', block_start, block_size)\n if ms.num_pols == 4:\n vis_weights = 0.5 * (vis_weights[:, 0] + vis_weights[:, 3])\n\n # Loop over frequency channels.\n # (We expect there to be only one channel here, but loop just in case.)\n for j in range(ms.num_channels):\n # Get coordinates in wavelengths.\n coords = uvw * (ms.freq_start_hz + j * ms.freq_inc_hz) / 299792458.\n\n # Get the Stokes-I visibilities for this channel.\n vis_data = None\n if not imager.coords_only:\n vis_data = ms.read_vis(block_start, j, 1, block_size)\n if ms.num_pols == 4:\n vis_data = 0.5 * (vis_data[0, :, 0] + vis_data[0, :, 3])\n\n # Update the grid plane with this visibility block.\n grid_norm = imager.update_plane(\n coords[:, 0], coords[:, 1], coords[:, 2], vis_data,\n vis_weights, grid_data, grid_norm, grid_weights)\n\n # Increment start row by block size.\n block_start += block_size\n\n # Return updated grid normalisation.\n return grid_norm"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef main():\n # Check command line arguments.\n if len(sys.argv) < 2:\n raise RuntimeError(\n 'Usage: mpiexec -n <np> '\n 'python mpi_imager_test.py <settings_file> <dir>')\n\n # Get the MPI communicator and initialise broadcast variables.\n comm = MPI.COMM_WORLD\n settings = None\n inputs = None\n grid_weights = None\n\n # Create log.\n log = logging.getLogger()\n log.setLevel(logging.DEBUG)\n if len(log.handlers) == 0:\n log.addHandler(logging.StreamHandler(sys.stdout))\n\n if comm.Get_rank() == 0:\n # Load pipeline settings.\n with open(sys.argv[1]) as f:\n settings = json.load(f)\n\n # Get a list of input Measurement Sets to process.\n data_dir = str(sys.argv[2])\n inputs = glob(join(data_dir, '*.ms')) + glob(join(data_dir, '*.MS'))\n inputs = filter(None, inputs)\n log.info('Found input Measurement Sets: %s', ', '.join(inputs))\n\n # Distribute the list of Measurement Sets among processors.\n inputs = chunks(inputs, comm.Get_size())\n\n # Broadcast settings and scatter list of input files.\n comm.barrier()\n settings = comm.bcast(settings)\n inputs = comm.scatter(inputs)\n\n # Record which file(s) this node is working on.\n log.debug('Rank %d, processing [%s]', comm.Get_rank(), ', '.join(inputs))\n\n # Create an imager and configure it.\n precision = settings['precision']\n imager = oskar.Imager(precision)\n for key, value in settings['imager'].items():\n setattr(imager, key, value)\n\n # Allocate a local visibility grid.\n grid_norm = 0.\n grid_dim = [imager.plane_size, imager.plane_size]\n grid_data = numpy.zeros(grid_dim,\n dtype='c8' if precision == 'single' else 'c16')\n\n # Process data according to mode.\n if settings['combine']:\n if imager.weighting == 'Uniform' or imager.algorithm == 'W-projection':\n # If necessary, generate a local weights grid.\n local_weights = None\n if imager.weighting == 'Uniform':\n grid_weights = numpy.zeros(grid_dim, dtype=precision)\n local_weights = numpy.zeros(grid_dim, dtype=precision)\n\n # Do a first pass for uniform weighting or W-projection.\n imager.coords_only = True\n for f in inputs:\n log.info('Reading coordinates from %s', f)\n process_input_data(f, imager, None, 0.0, local_weights)\n imager.coords_only = False\n\n # Get maximum number of W-projection planes required.\n num_w_planes = imager.num_w_planes\n num_w_planes = comm.allreduce(num_w_planes, op=MPI.MAX)\n imager.num_w_planes = num_w_planes\n\n # Combine (reduce) weights grids, and broadcast the result.\n if local_weights is not None:\n comm.Allreduce(local_weights, grid_weights, op=MPI.SUM)\n\n # Populate the local visibility grid.\n for f in inputs:\n log.info('Reading visibilities from %s', f)\n grid_norm = process_input_data(f, imager, grid_data, grid_norm,\n grid_weights)\n\n # Combine (reduce) visibility grids.\n grid = numpy.zeros_like(grid_data)\n comm.Reduce(grid_data, grid, op=MPI.SUM)\n grid_norm = comm.reduce(grid_norm, op=MPI.SUM)\n\n # Finalise grid and save image.\n if comm.Get_rank() == 0:\n save_image(imager, grid, grid_norm, settings['output_file'])\n log.info('Finished. Output file is %s', settings['output_file'])\n else:\n for f in inputs:\n # Clear the grid.\n grid_norm = 0.\n grid_data.fill(0)\n if imager.weighting == 'Uniform':\n grid_weights = numpy.zeros(grid_dim, dtype=precision)\n\n # Do a first pass for uniform weighting or W-projection.\n if imager.weighting == 'Uniform' or \\\n imager.algorithm == 'W-projection':\n imager.coords_only = True\n log.info('Reading coordinates from %s', f)\n process_input_data(f, imager, None, 0.0, grid_weights)\n imager.coords_only = False\n\n # Populate the local visibility grid.\n log.info('Reading visibilities from %s', f)\n grid_norm = process_input_data(f, imager, grid_data, grid_norm,\n grid_weights)\n\n # Save image by finalising grid.\n output_file = splitext(f)[0] + '.fits'\n save_image(imager, grid_data, grid_norm, output_file)\n log.info('Finished. Output file is %s', output_file)", "response": "Runs test imaging pipeline using MPI."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nget list of Scheduling Blocks Instances known to SDP.", "response": "def get():\n \"\"\"Return list of Scheduling Blocks Instances known to SDP .\"\"\"\n LOG.debug('GET list of SBIs.')\n\n # Construct response object.\n _url = get_root_url()\n response = dict(scheduling_blocks=[],\n links=dict(home='{}'.format(_url)))\n\n # Get ordered list of SBI ID's.\n block_ids = DB.get_sched_block_instance_ids()\n\n # Loop over SBIs and add summary of each to the list of SBIs in the\n # response.\n for block in DB.get_block_details(block_ids):\n block_id = block['id']\n LOG.debug('Adding SBI %s to list', block_id)\n LOG.debug(block)\n\n block['num_processing_blocks'] = len(block['processing_block_ids'])\n\n temp = ['OK'] * 10 + ['WAITING'] * 4 + ['FAILED'] * 2\n block['status'] = choice(temp)\n try:\n del block['processing_block_ids']\n except KeyError:\n pass\n block['links'] = {\n 'detail': '{}/scheduling-block/{}' .format(_url, block_id)\n }\n response['scheduling_blocks'].append(block)\n return response, HTTPStatus.OK"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nproviding table of scheduling block instance metadata for use with AJAX tables", "response": "def get_table():\n \"\"\"Provides table of scheduling block instance metadata for use with AJAX\n tables\"\"\"\n response = dict(blocks=[])\n block_ids = DB.get_sched_block_instance_ids()\n for index, block_id in enumerate(block_ids):\n block = DB.get_block_details([block_id]).__next__()\n info = [\n index,\n block['id'],\n block['sub_array_id'],\n len(block['processing_blocks'])\n ]\n response['blocks'].append(info)\n return response, HTTPStatus.OK"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef set_value(self, key, field, value):\n self._db.hset(key, field, value)", "response": "Add the state of the key and field to the state of the key and field"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\npushes the specified event to the tail of the list stored at the key event_name with the specified event_type and block_id.", "response": "def push_event(self, event_name, event_type, block_id):\n \"\"\"Push inserts all the specified values at the tail of the list\n stored at the key\"\"\"\n self._db.rpush(event_name, dict(type=event_type, id=block_id))"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef save_dict(self, key: str, my_dict: dict, hierarchical: bool = False):\n for _key, _value in my_dict.items():\n if isinstance(_value, dict):\n if not hierarchical:\n self._db.hmset(key, {_key: json.dumps(_value)})\n else:\n self.save_dict(key + ':' + _key, _value, hierarchical)\n elif isinstance(_value, list):\n if not hierarchical:\n self._db.hmset(key, {_key: str(_value)})\n else:\n print('saving list at ', key + ':' + _key)\n self._db.lpush(key + ':' + _key, *_value[::-1])\n elif isinstance(_value, bool):\n self._db.hmset(key, {_key: str(_value)})\n else:\n self._db.hmset(key, {_key: _value})", "response": "Store the specified dictionary at the specified key."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _build_dict(my_dict, keys, values):\n temp = my_dict\n for depth, key in enumerate(keys):\n if depth < len(keys) - 1:\n if key not in temp:\n temp[key] = dict()\n temp = temp[key]\n else:\n if key not in temp:\n temp[key] = values\n else:\n temp[key] = {**temp[key], **values}\n return my_dict", "response": "Build a dictionary from a set of redis hashes."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _load_values(self, db_key: str) -> dict:\n if self._db.type(db_key) == 'list':\n db_values = self._db.lrange(db_key, 0, -1)\n for i, value in enumerate(db_values):\n try:\n db_values[i] = ast.literal_eval(value)\n except SyntaxError:\n pass\n except ValueError:\n pass\n else: # self._db.type == 'hash'\n db_values = self._db.hgetall(db_key)\n for _key, _value in db_values.items():\n try:\n db_values[_key] = ast.literal_eval(_value)\n except SyntaxError:\n pass\n except ValueError:\n pass\n return db_values", "response": "Load values from the db at the specified key."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _load_dict_hierarchical(self, db_key: str) -> dict:\n db_keys = self._db.keys(pattern=db_key + '*')\n my_dict = {}\n for _db_key in db_keys:\n if self._db.type(_db_key) == 'list':\n db_values = self._db.lrange(_db_key, 0, -1)\n for i, value in enumerate(db_values):\n try:\n db_values[i] = ast.literal_eval(value)\n except SyntaxError:\n pass\n except ValueError:\n pass\n else: # self._db.type == 'hash'\n db_values = self._db.hgetall(_db_key)\n for _key, _value in db_values.items():\n try:\n db_values[_key] = ast.literal_eval(_value)\n except SyntaxError:\n pass\n except ValueError:\n pass\n my_dict = self._build_dict(my_dict, _db_key.split(':'),\n db_values)\n return my_dict[db_key]", "response": "Load a dictionary stored hierarchically at db_key."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef load_dict(self, db_key: str, hierarchical: bool = False) -> dict:\n if not hierarchical:\n db_values = self._db.hgetall(db_key)\n for _key, _value in db_values.items():\n if isinstance(_value, str):\n db_values[_key] = ast.literal_eval(_value)\n my_dict = db_values\n else:\n my_dict = self._load_dict_hierarchical(db_key)\n return my_dict", "response": "Load the dictionary at the specified key."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nloading values from a dictionary with the specified dict_keys.", "response": "def load_dict_values(self, db_key: str, dict_keys: List[str],\n hierarchical: bool = False) -> List:\n \"\"\"Load values from a dictionary with the specified dict_keys.\n\n Args:\n db_key (str): Key where the dictionary is stored\n dict_keys (List[str]): Keys within the dictionary to load.\n hierarchical (bool): If True, expect the dictionary to have been\n stored hierarchically. If False, expect the dictionary to have\n been stored flat.\n\n\n Returns:\n object: The value stored at dict_key in the dictionary stored at\n key\n\n \"\"\"\n result = []\n if not hierarchical:\n _values = self._db.hmget(db_key, *dict_keys)\n result = [ast.literal_eval(_value) for _value in _values]\n else:\n # Get all keys in the set of keys for this dict 'db_key'\n db_keys = self._db.keys(pattern=db_key + '*')\n for _db_key in db_keys:\n # Check if one of the dict_keys is an entire sub-dict entry\n for name in _db_key.split(':')[1:]:\n if name in dict_keys:\n _values = self._load_values(_db_key)\n result.append(_values)\n\n # Look in the sub-dict for any of the dict_keys\n _values = self._db.hmget(_db_key, *dict_keys)\n for i, value in enumerate(_values):\n try:\n _values[i] = ast.literal_eval(value)\n except SyntaxError:\n pass\n except ValueError:\n pass\n result += [value for value in _values if value is not None]\n return result"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nset the value of field in a hash stored at key.", "response": "def set_hash_value(self, key, field, value, pipeline=False):\n \"\"\"Set the value of field in a hash stored at key.\n\n Args:\n key (str): key (name) of the hash\n field (str): Field within the hash to set\n value: Value to set\n pipeline (bool): True, start a transaction block. Default false.\n\n \"\"\"\n # FIXME(BMo): new name for this function -> save_dict_value ?\n if pipeline:\n self._pipeline.hset(key, field, str(value))\n else:\n self._db.hset(key, field, str(value))"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef prepend_to_list(self, key, *value, pipeline=False):\n if pipeline:\n self._pipeline.lpush(key, *value)\n else:\n self._db.lpush(key, *value)", "response": "Prepends a new element to the beginning of the list stored at key."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nadding a new element to the end of the list stored at key.", "response": "def append_to_list(self, key, *value, pipeline=False):\n \"\"\"Add new element to the end of the list stored at key.\n\n Args:\n key (str): Key where the list is stored\n value: Value to add to the list\n pipeline (bool): True, start a transaction block. Default false.\n\n \"\"\"\n if pipeline:\n self._pipeline.rpush(key, *value)\n else:\n self._db.rpush(key, *value)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_list(self, key, pipeline=False):\n if pipeline:\n return self._pipeline.lrange(key, 0, -1)\n\n return self._db.lrange(key, 0, -1)", "response": "Get all the values in the list stored at key."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef delete(self, *names: str, pipeline=False):\n if pipeline:\n self._pipeline.delete(*names)\n else:\n self._db.delete(*names)", "response": "Delete one or more keys specified by names."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef get_event(self, event_name, event_history=None):\n if event_history is None:\n event_history = event_name + '_history'\n return self._db.rpoplpush(event_name, event_history)", "response": "Get an event from the database."} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nremoves specified value from the list stored at key.", "response": "def remove_from_list(self, key: str, value, count: int = 0,\n pipeline: bool = False):\n \"\"\"Remove specified value(s) from the list stored at key.\n\n Args:\n key (str): Key where the list is stored.\n value: value to remove\n count (int): Number of entries to remove, default 0 == all\n pipeline(bool): If True, start a transaction block. Default False.\n\n \"\"\"\n if pipeline:\n if redis.__version__ == '2.10.6':\n self._pipeline.lrem(name=key, value=value, num=count)\n else:\n self._pipeline.lrem(key, count, value)\n else:\n if self._db.exists(key):\n if redis.__version__ == '2.10.6':\n self._db.lrem(name=key, value=value, num=count)\n else:\n self._db.lrem(key, count, value)"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef watch(self, key, pipeline=False):\n if pipeline:\n self._pipeline.watch(key)\n else:\n self._db.watch(key)", "response": "Watch the given key for conditional execution of a transaction."} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\npost a message to a given channel.", "response": "def publish(self, channel, message, pipeline=False):\n \"\"\"Post a message to a given channel.\n\n Args:\n channel (str): Channel where the message will be published\n message (str): Message to publish\n pipeline (bool): True, start a transaction block. Default false.\n\n \"\"\"\n if pipeline:\n self._pipeline.publish(channel, message)\n else:\n self._db.publish(channel, message)"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nget the health of all services.", "response": "def get_services_health(self) -> dict:\n \"\"\"Get the health of all services.\n\n Returns:\n dict, services id and health status\n\n \"\"\"\n # Initialise\n services_health = {}\n\n # Get Service IDs\n services_ids = self._get_services()\n\n for service_id in services_ids:\n service_name = DC.get_service_name(service_id)\n\n # Check if the current and actual replica levels are the same\n if DC.get_replicas(service_id) != \\\n DC.get_actual_replica(service_id):\n services_health[service_name] = \"Unhealthy\"\n else:\n services_health[service_name] = \"Healthy\"\n\n return services_health"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get_overall_services_health(self) -> str:\n services_health_status = self.get_services_health()\n\n # Evaluate overall health\n health_status = all(status == \"Healthy\" for status in\n services_health_status.values())\n\n # Converting from bool to str\n if health_status:\n overall_status = \"Healthy\"\n else:\n overall_status = \"Unhealthy\"\n\n return overall_status", "response": "Get the overall health of all the services."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nget the health of a service using service_id.", "response": "def get_service_health(service_id: str) -> str:\n \"\"\"Get the health of a service using service_id.\n\n Args:\n service_id\n\n Returns:\n str, health status\n\n \"\"\"\n # Check if the current and actual replica levels are the same\n if DC.get_replicas(service_id) != DC.get_actual_replica(service_id):\n health_status = \"Unhealthy\"\n else:\n health_status = \"Healthy\"\n\n return health_status"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef get():\n uptime = time.time() - START_TIME\n response = dict(uptime=f'{uptime:.2f}s',\n links=dict(root='{}'.format(get_root_url())))\n\n # TODO(BM) check if we can connect to the config database ...\n # try:\n # DB.get_sub_array_ids()\n # except ConnectionError as error:\n # response['state'] = 'ERROR'\n # response['message'] = str(error)\n return response, HTTPStatus.OK", "response": "Check the health of this service"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nprocesses the received heaps and store the data in the internal buffer.", "response": "def process_buffer(self, i_block, receive_buffer):\n \"\"\"Blocking function to process the received heaps.\n This is run in an executor.\n \"\"\"\n self._log.info(\"Worker thread processing block %i\", i_block)\n time_overall0 = time.time()\n time_unpack = 0.0\n time_write = 0.0\n for i_heap, heap in enumerate(receive_buffer.result()):\n # Skip and log any incomplete heaps.\n if isinstance(heap, spead2.recv.IncompleteHeap):\n self._log.info(\"Dropped incomplete heap %i\", heap.cnt + 1)\n continue\n\n # Update the item group from this heap.\n items = self._item_group.update(heap)\n\n # Get the time and channel indices from the heap index.\n i_chan = i_heap // self._num_buffer_times\n i_time = i_heap % self._num_buffer_times\n\n if 'correlator_output_data' in items:\n vis_data = items['correlator_output_data'].value['VIS']\n if self._block is None:\n num_baselines = vis_data.shape[0]\n num_pols = vis_data[0].shape[0]\n self._block = numpy.zeros((self._num_buffer_times,\n self._num_streams,\n num_baselines),\n dtype=('c8', num_pols))\n self._block[:, :, :] = 0 # To make the copies faster.\n\n # Unpack data from the heap into the block to be processed.\n time_unpack0 = time.time()\n self._block[i_time, i_chan, :] = vis_data\n time_unpack += time.time() - time_unpack0\n\n # Check the data for debugging!\n val = self._block[i_time, i_chan, -1][-1].real\n self._log.debug(\"Data: %.3f\", val)\n\n if self._block is not None:\n # Process the buffered data here.\n if self._config['process_data']:\n pass\n\n # Write the buffered data to storage.\n if self._config['write_data']:\n time_write0 = time.time()\n with open(self._config['filename'], 'ab') as f:\n # Don't use pickle, it's really slow (even protocol 4)!\n numpy.save(f, self._block, allow_pickle=False)\n time_write += time.time() - time_write0\n\n # Report time taken.\n time_overall = time.time() - time_overall0\n self._log.info(\"Total processing time: %.1f ms\", 1000 * time_overall)\n self._log.info(\"Unpack was %.1f %%\", 100 * time_unpack / time_overall)\n self._log.info(\"Write was %.1f %%\", 100 * time_write / time_overall)\n if time_unpack != 0.0:\n self._log.info(\"Memory speed %.1f MB/s\",\n (self._block.nbytes * 1e-6) / time_unpack)\n if time_write != 0.0:\n self._log.info(\"Write speed %.1f MB/s\",\n (self._block.nbytes * 1e-6) / time_write)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\nasync def _run_loop(self, executor):\n loop = asyncio.get_event_loop()\n\n # Get first heap in each stream (should be empty).\n self._log.info(\"Waiting for %d streams to start...\", self._num_streams)\n for stream in self._streams:\n await stream.get(loop=loop)\n\n i_block = 0\n receive_buffer = [None, None]\n while True:\n # Process the previous buffer, if available.\n processing_tasks = None\n if i_block > 0:\n i_buffer_proc = (i_block - 1) % 2\n processing_tasks = loop.run_in_executor(\n executor, self.process_buffer, i_block - 1,\n receive_buffer[i_buffer_proc])\n\n # Set up asynchronous receive on all streams.\n i_buffer_recv = i_block % 2\n receive_tasks = []\n for stream in self._streams:\n for _ in range(self._num_buffer_times):\n # NOTE: loop.create_task() is needed to schedule these\n # in the right order!\n receive_tasks.append(\n loop.create_task(stream.get(loop=loop)))\n receive_buffer[i_buffer_recv] = asyncio.gather(*receive_tasks)\n\n # Ensure asynchronous receives and previous processing tasks are\n # done.\n self._log.info(\"Receiving block %i\", i_block)\n try:\n await receive_buffer[i_buffer_recv]\n self._log.info(\"Received block %i\", i_block)\n except spead2.Stopped:\n self._log.info(\"Stream Stopped\")\n break\n if processing_tasks:\n await processing_tasks\n\n i_block += 1", "response": "Main loop for the main loop."} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nclosing the buffer and return the buffer.", "response": "def close(self):\n \"\"\"Add docstring!\"\"\"\n\n # Seek to the start of the buffer\n self.seek(0)\n while True:\n\n # Copy bytes from the buffer until we reach the end of the JSON\n brace_count = 0\n quoted = False\n json_string = ''\n while True:\n\n # Read a character\n b = self.read(1)\n if b == b'':\n return\n c = b.decode()\n\n # If it is a \\ then copy the next character\n if c == '\\\\':\n json_string += c\n json_string += self.read(1).decode()\n continue\n\n # If we are inside quotes we just need to detect a\n # closing quote\n if c == '\"':\n if quoted:\n quoted = False\n else:\n quoted = True\n\n # Otherwise we count the braces\n if c == '{':\n brace_count += 1\n elif c == '}':\n brace_count -= 1\n\n # Copy the character into the JSON string\n json_string += c\n\n # If the brace count is zero we are done\n if brace_count == 0:\n break\n\n # Parse the JSON so that we can get the size of the data array\n meta = json.loads(json_string)\n n_channels = meta['data_cube']['n_channels']\n n_sub_integrations = meta['data_cube']['n_sub_integrations']\n\n # Save the JSON to a file\n f = open('{0}_{1}_{2}.json'.format(\n meta['metadata']['observation_id'],\n meta['metadata']['beam_id'],\n meta['metadata']['name']), 'w')\n\n f.write(json.dumps(meta))\n f.close()\n\n # Read the data\n data = self.read(n_channels * n_sub_integrations)\n\n # Write it to a file\n f = open('{0}_{1}_{2}.data'.format(\n meta['metadata']['observation_id'],\n meta['metadata']['beam_id'],\n meta['metadata']['name']), 'wb')\n f.write(data)\n f.close()"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef run(self):\n self._log.info('Starting Pulsar Search Interface')\n # Instantiate a dummy authorizer for managing 'virtual' users\n authorizer = DummyAuthorizer()\n\n # Define a new user having full r/w permissions and a read-only\n # anonymous user\n authorizer.add_user(self._config['login']['user'],\n self._config['login']['psswd'], '.',\n perm=self._config['login']['perm'])\n authorizer.add_anonymous(os.getcwd())\n\n # Instantiate FTP handler class\n handler = FTPHandler\n handler.authorizer = authorizer\n handler.abstracted_fs = PulsarFileSystem\n\n # Define a customized banner (string returned when client connects)\n handler.banner = \"SKA SDP pulsar search interface.\"\n\n # Instantiate FTP server class and listen on 0.0.0.0:7878\n address = (self._config['address']['listen'],\n self._config['address']['port'])\n server = FTPServer(address, handler)\n\n # set a limit for connections\n server.max_cons = 256\n server.max_cons_per_ip = 5\n\n # start ftp server\n server.serve_forever()", "response": "Start the FTP server for pulsar search."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nreturns the workflow stage status.", "response": "def status(self) -> str:\n \"\"\"Return the workflow stage status.\"\"\"\n # As status is a modifiable property, have to reload from the db.\n self._config = self._load_config()\n return self._config.get('status')"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nsetting the workflow stage status.", "response": "def status(self, value):\n \"\"\"Set the workflow stage status.\"\"\"\n # FIXME(BM) This is currently a hack because workflow stages\n # don't each have their own db entry.\n pb_key = SchedulingObject.get_key(PB_KEY, self._pb_id)\n stages = DB.get_hash_value(pb_key, 'workflow_stages')\n stages = ast.literal_eval(stages)\n stages[self._index]['status'] = value\n DB.set_hash_value(pb_key, 'workflow_stages', stages)"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _load_config(self):\n pb_key = SchedulingObject.get_key(PB_KEY, self._pb_id)\n stages = DB.get_hash_value(pb_key, 'workflow_stages')\n stages = ast.literal_eval(stages)\n return stages[self._index]", "response": "Load the workflow stage config from the database."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef register_pb_devices(num_pbs: int = 100):\n tango_db = Database()\n LOG.info(\"Registering PB devices:\")\n dev_info = DbDevInfo()\n # pylint: disable=protected-access\n dev_info._class = 'ProcessingBlockDevice'\n dev_info.server = 'processing_block_ds/1'\n\n for index in range(num_pbs):\n dev_info.name = 'sip_sdp/pb/{:05d}'.format(index)\n LOG.info(\"\\t%s\", dev_info.name)\n tango_db.add_device(dev_info)", "response": "Register PBs devices.\n\n Note(BMo): Ideally we do not want to register any devices here. There\n does not seem to be a way to create a device server with no registered\n devices in Tango. This is (probably) because Tango devices must have been\n registered before the server starts ..."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ncreating an SBI object from the specified configuration dictionary.", "response": "def from_config(cls, config_dict: dict, schema_path: str = None):\n \"\"\"Create an SBI object from the specified configuration dict.\n\n NOTE(BM) This should really be done as a single atomic db transaction.\n\n Args:\n config_dict(dict): SBI configuration dictionary\n schema_path(str, optional): Path to the SBI config schema.\n\n \"\"\"\n # Validate the SBI config schema\n if schema_path is None:\n schema_path = join(dirname(__file__), 'schema',\n 'configure_sbi.json')\n with open(schema_path, 'r') as file:\n schema = json.loads(file.read())\n validate(config_dict, schema)\n\n # Add SBI status field\n config_dict['status'] = 'created'\n\n # Set the subarray field to None if not defined.\n if 'subarray_id' not in config_dict:\n config_dict['subarray_id'] = 'None'\n\n # Add created, and updated timestamps.\n timestamp = datetime.datetime.utcnow().isoformat()\n config_dict['created'] = timestamp\n config_dict['updated'] = timestamp\n\n # Split out the processing block data array\n pb_list = copy.deepcopy(config_dict['processing_blocks'])\n\n # Remove processing blocks from the SBI configuration.\n config_dict.pop('processing_blocks', None)\n\n # Add list of PB ids to the SBI configuration\n config_dict['processing_block_ids'] = []\n for pb in pb_list:\n config_dict['processing_block_ids'].append(pb['id'])\n\n # Add the SBI data object to the database.\n key = SchedulingObject.get_key(SBI_KEY, config_dict['id'])\n DB.save_dict(key, config_dict, hierarchical=False)\n # DB.set_hash_values(key, config_dict)\n\n # Add the SBI id to the list of active SBIs\n key = '{}:active'.format(SBI_KEY)\n DB.append_to_list(key, config_dict['id'])\n\n # Publish notification to subscribers\n sbi = SchedulingObject(SBI_KEY, config_dict['id'])\n sbi.set_status('created')\n\n for pb in pb_list:\n pb['sbi_id'] = config_dict['id']\n cls._add_pb(pb)\n\n return cls(config_dict['id'])"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\naborting the SBI and associated PBs.", "response": "def abort(self):\n \"\"\"Abort the SBI (and associated PBs).\"\"\"\n self.set_status('aborted')\n DB.remove_from_list('{}:active'.format(self._type), self._id)\n DB.append_to_list('{}:aborted'.format(self._type), self._id)\n sbi_pb_ids = ast.literal_eval(\n DB.get_hash_value(self._key, 'processing_block_ids'))\n\n for pb_id in sbi_pb_ids:\n pb = ProcessingBlock(pb_id)\n pb.abort()"} {"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the list of PB ids associated with the SBI.", "response": "def get_pb_ids(self) -> List[str]:\n \"\"\"Return the list of PB ids associated with the SBI.\n\n Returns:\n list, Processing block ids\n\n \"\"\"\n values = DB.get_hash_value(self._key, 'processing_block_ids')\n return ast.literal_eval(values)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef get_id(date=None, project: str = 'sip',\n instance_id: int = None) -> str:\n \"\"\"Get a SBI Identifier.\n\n Args:\n date (str or datetime.datetime, optional): UTC date of the SBI\n project (str, optional ): Project Name\n instance_id (int, optional): SBI instance identifier\n\n Returns:\n str, Scheduling Block Instance (SBI) ID.\n\n \"\"\"\n if date is None:\n date = datetime.datetime.utcnow()\n\n if isinstance(date, datetime.datetime):\n date = date.strftime('%Y%m%d')\n\n if instance_id is None:\n instance_id = randint(0, 9999)\n\n return 'SBI-{}-{}-{:04d}'.format(date, project, instance_id)", "response": "Get a SBI Identifier."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef _add_pb(pb_config: dict):\n # Add status field to the PB\n pb_config['status'] = 'created'\n\n # Add created and updated timestamps to the PB\n timestamp = datetime.datetime.utcnow().isoformat()\n pb_config['created'] = timestamp\n pb_config['updated'] = timestamp\n\n # set default priority, if not defined\n if 'priority' not in pb_config:\n pb_config['priority'] = 0\n\n # Retrieve the workflow definition\n SchedulingBlockInstance._update_workflow_definition(pb_config)\n\n # If needed, add resources and dependencies fields\n keys = ['resources_required', 'resources_assigned', 'dependencies']\n for key in keys:\n if key not in pb_config:\n pb_config[key] = []\n for stage in pb_config['workflow_stages']:\n if key not in stage:\n stage[key] = []\n\n # Add PB to the the database\n key = SchedulingObject.get_key(PB_KEY, pb_config['id'])\n # DB.set_hash_values(key, pb_config)\n DB.save_dict(key, pb_config, hierarchical=False)\n\n # Add to list of PB ids\n key = '{}:active'.format(PB_KEY)\n DB.append_to_list(key, pb_config['id'])\n key = '{}:active:{}'.format(PB_KEY, pb_config['type'])\n DB.append_to_list(key, pb_config['id'])\n\n # Publish an event to to notify subscribers of the new PB\n pb = SchedulingObject(PB_KEY, pb_config['id'])\n pb.set_status('created')", "response": "Add a PB to the database."} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nupdates the PB configuration workflow definition.", "response": "def _update_workflow_definition(pb_config: dict):\n \"\"\"Update the PB configuration workflow definition.\n\n Args:\n pb_config (dict): PB configuration dictionary\n\n Raises:\n RunTimeError, if the workflow definition (id, version)\n specified in the sbi_config is not known.\n\n \"\"\"\n known_workflows = get_workflows()\n workflow_id = pb_config['workflow']['id']\n workflow_version = pb_config['workflow']['version']\n if workflow_id not in known_workflows or \\\n workflow_version not in known_workflows[workflow_id]:\n raise RuntimeError(\"Unknown workflow definition: {}:{}\"\n .format(workflow_id, workflow_version))\n workflow = get_workflow(workflow_id, workflow_version)\n for stage in workflow['stages']:\n stage['status'] = 'none'\n pb_config['workflow_parameters'] = pb_config['workflow']['parameters']\n pb_config['workflow_id'] = pb_config['workflow']['id']\n pb_config['workflow_version'] = pb_config['workflow']['version']\n pb_config['workflow_stages'] = workflow['stages']\n pb_config.pop('workflow', None)"} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef root():\n response = {\n \"links\": {\n \"message\": \"Welcome to the SIP Processing Controller Interface\",\n \"items\": [\n {\"href\": \"{}health\".format(request.url)},\n {\"href\": \"{}subarrays\".format(request.url)},\n {\"href\": \"{}scheduling_blocks\".format(request.url)},\n {\"href\": \"{}processing_blocks\".format(request.url)}\n ]\n }\n }\n return response, HTTPStatus.OK", "response": "Placeholder root url for the PCI."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngenerates a scheduling_block id", "response": "def generate_scheduling_block_id(num_blocks, project='test'):\n \"\"\"Generate a scheduling_block id\"\"\"\n _date = strftime(\"%Y%m%d\", gmtime())\n _project = project\n for i in range(num_blocks):\n yield '{}-{}-sbi{:03d}'.format(_date, _project, i)"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nadds a scheduling block to the database returning a response object", "response": "def add_scheduling_block(config):\n \"\"\"Adds a scheduling block to the database, returning a response object\"\"\"\n try:\n DB.add_sbi(config)\n except jsonschema.ValidationError as error:\n error_dict = error.__dict__\n for key in error_dict:\n error_dict[key] = error_dict[key].__str__()\n error_response = dict(message=\"Failed to add scheduling block\",\n reason=\"JSON validation error\",\n details=error_dict)\n return error_response, HTTPStatus.BAD_REQUEST\n response = dict(config=config,\n message='Successfully registered scheduling block '\n 'instance with ID: {}'.format(config['id']))\n response['links'] = {\n 'self': '{}scheduling-block/{}'.format(request.url_root,\n config['id']),\n 'list': '{}'.format(request.url),\n 'home': '{}'.format(request.url_root)\n }\n return response, HTTPStatus.ACCEPTED"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef missing_db_response(func):\n @wraps(func)\n def with_exception_handling(*args, **kwargs):\n \"\"\"Wrapper to check for connection failures\"\"\"\n try:\n return func(*args, **kwargs)\n except ConnectionError as error:\n return (dict(error='Unable to connect to Configuration Db.',\n error_message=str(error),\n links=dict(root='{}'.format(get_root_url()))),\n HTTPStatus.NOT_FOUND)\n return with_exception_handling", "response": "Decorator to wrap a function to check for connection failures and return a response dict."} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nrun the workflow task.", "response": "def main():\n \"\"\"Run the workflow task.\"\"\"\n log = logging.getLogger('sip.mock_workflow_stage')\n\n if len(sys.argv) != 2:\n log.critical('Expecting JSON string as first argument!')\n return\n\n config = json.loads(sys.argv[1])\n log.info('Running mock_workflow_stage (version: %s).', __version__)\n log.info('Received configuration: %s', json.dumps(config))\n log.info('Starting task')\n\n i = 0\n start_time = time.time()\n duration = config.get('duration', 20)\n while time.time() - start_time <= duration:\n time.sleep(duration / 20)\n elapsed = time.time() - start_time\n log.info(\" %s %2i / 20 (elapsed %.2f s)\",\n config.get('message', 'Progress '),\n i + 1, elapsed)\n i += 1\n\n log.info('Task complete!')"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef main(sleep_length=0.1):\n log = logging.getLogger('sip.examples.log_spammer')\n\n log.info('Starting to spam log messages every %fs', sleep_length)\n counter = 0\n try:\n while True:\n log.info('Hello %06i (log_spammer: %s, sip logging: %s)',\n counter, _version.__version__, __version__)\n counter += 1\n time.sleep(sleep_length)\n except KeyboardInterrupt:\n log.info('Exiting...')", "response": "Main function for spam log messages"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef main():\n init_logging()\n\n LOG.info(\"Starting imaging-pipeline\")\n\n # Read parameters\n PARFILE = 'parameters.json'\n if len(sys.argv) > 1:\n PARFILE = sys.argv[1]\n LOG.info(\"JSON parameter file = %s\", PARFILE)\n try: \t\n with open(PARFILE, \"r\") as par_file:\n jspar = json.load(par_file) \n except AssertionError as error:\n LOG.critical('ERROR %s', error)\n return\n\n # We will use dask\n arlexecute.set_client(get_dask_Client())\n arlexecute.run(init_logging)\n\n # Import visibility list from HDF5 file\n vis_list = import_blockvisibility_from_hdf5(\n '%s/%s' % (RESULTS_DIR, jspar[\"files\"][\"vis_list\"]))\n\n # Now read the BlockVisibilities constructed using a model drawn from GLEAM\n predicted_vislist = import_blockvisibility_from_hdf5(\n '%s/%s' % (RESULTS_DIR, jspar[\"files\"][\"predicted_vis_list\"]))\n corrupted_vislist = import_blockvisibility_from_hdf5(\n '%s/%s' % (RESULTS_DIR, jspar[\"files\"][\"corrupted_vis_list\"]))\n\n# Reproduce parameters from the visibility data\n ntimes = vis_list[0].nvis\n\n phasecentre = vis_list[0].phasecentre\n print(phasecentre)\n polframe = vis_list[0].polarisation_frame.type\n LOG.info(\"Polarisation Frame of vis_list: %s\", polframe)\n\n wprojection_planes = jspar[\"advice\"][\"wprojection_planes\"]\n guard_band_image = jspar[\"advice\"][\"guard_band_image\"]\n delA = jspar[\"advice\"][\"delA\"]\n advice_low = advise_wide_field(vis_list[0], guard_band_image=guard_band_image,\n delA=delA,\n wprojection_planes=wprojection_planes)\n advice_high = advise_wide_field(vis_list[-1], guard_band_image=guard_band_image,\n delA=delA,\n wprojection_planes=wprojection_planes)\n\n vis_slices = advice_low['vis_slices']\n npixel = advice_high['npixels2']\n cellsize = min(advice_low['cellsize'], advice_high['cellsize'])\n \n# Recovering frequencies\n fstart = vis_list[0].frequency\n fend = vis_list[-1].frequency\n num_freq_win = len(vis_list)\n frequency = numpy.linspace(fstart, fend, num_freq_win)\n\n# Recovering bandwidths\n channel_bandwidth = numpy.array(num_freq_win * [vis_list[1].frequency - vis_list[0].frequency])\n \n # Get the LSM. This is currently blank.\n model_list = [\n arlexecute.execute(create_image_from_visibility)(\n vis_list[f],\n npixel=npixel,\n frequency=[frequency[f]],\n channel_bandwidth=[channel_bandwidth[f]],\n cellsize=cellsize,\n phasecentre=phasecentre,\n polarisation_frame=PolarisationFrame(polframe))\n for f, freq in enumerate(frequency)\n ]\n # future_predicted_vislist = arlexecute.scatter(predicted_vislist)\n\n # Create and execute graphs to make the dirty image and PSF\n # LOG.info('About to run invert to get dirty image')\n # dirty_list = invert_component(future_predicted_vislist, model_list,\n # context='wstack',\n # vis_slices=vis_slices, dopsf=False)\n # dirty_list = arlexecute.compute(dirty_list, sync=True)\n\n # LOG.info('About to run invert to get PSF')\n # psf_list = invert_component(future_predicted_vislist, model_list,\n # context='wstack',\n # vis_slices=vis_slices, dopsf=True)\n # psf_list = arlexecute.compute(psf_list, sync=True)\n\n # Now deconvolve using msclean\n # LOG.info('About to run deconvolve')\n # deconvolve_list, _ = deconvolve_component(\n # dirty_list, psf_list,\n # model_imagelist=model_list,\n # deconvolve_facets=8,\n # deconvolve_overlap=16,\n # deconvolve_taper='tukey',\n # scales=[0, 3, 10],\n # algorithm='msclean',\n # niter=1000,\n # fractional_threshold=0.1,\n # threshold=0.1,\n # gain=0.1,\n # psf_support=64)\n # deconvolved = arlexecute.compute(deconvolve_list, sync=True)\n\n LOG.info('About to run continuum imaging')\n continuum_imaging_list = continuum_imaging_arlexecute(\n predicted_vislist,\n model_imagelist\t\t= model_list,\n context\t\t\t= jspar[\"processing\"][\"continuum_imaging\"][\"context\"] , #'wstack',\n vis_slices\t\t= vis_slices,\n scales\t\t\t= jspar[\"processing\"][\"continuum_imaging\"][\"scales\"], #[0, 3, 10],\n algorithm\t\t= jspar[\"processing\"][\"continuum_imaging\"][\"algorithm\"], #'mmclean',\n nmoment\t\t\t= jspar[\"processing\"][\"continuum_imaging\"][\"nmoment\"], #3,\n niter\t\t\t= jspar[\"processing\"][\"continuum_imaging\"][\"niter\"],\t\t #1000,\n fractional_threshold\t= jspar[\"processing\"][\"continuum_imaging\"][\"fractional_threshold\"], #0.1,\n threshold\t\t= jspar[\"processing\"][\"continuum_imaging\"][\"threshold\"], \t #0.1,\n nmajor\t\t\t= jspar[\"processing\"][\"continuum_imaging\"][\"nmajor\"], \t\t #5,\n gain\t\t\t= jspar[\"processing\"][\"continuum_imaging\"][\"gain\"], #0.25,\n deconvolve_facets\t= jspar[\"processing\"][\"continuum_imaging\"][\"deconvolve_facets\"], #8,\n deconvolve_overlap\t= jspar[\"processing\"][\"continuum_imaging\"][\"deconvolve_overlap\"], #16,\n deconvolve_taper\t= jspar[\"processing\"][\"continuum_imaging\"][\"deconvolve_taper\"], #'tukey',\n psf_support\t\t= jspar[\"processing\"][\"continuum_imaging\"][\"psf_support\"] ) #64)\n result = arlexecute.compute(continuum_imaging_list, sync=True)\n deconvolved = result[0][0]\n residual = result[1][0]\n restored = result[2][0]\n\n print(qa_image(deconvolved, context='Clean image - no selfcal'))\n print(qa_image(restored, context='Restored clean image - no selfcal'))\n export_image_to_fits(restored,\n '%s/%s' % (RESULTS_DIR, jspar[\"files\"][\"continuum_imaging_restored\"]))\n\n print(qa_image(residual[0], context='Residual clean image - no selfcal'))\n export_image_to_fits(residual[0],\n '%s/%s' % (RESULTS_DIR, jspar[\"files\"][\"continuum_imaging_residual\"]))\n\n controls = create_calibration_controls()\n\n controls['T']['first_selfcal'] = jspar[\"processing\"][\"controls\"][\"T\"][\"first_selfcal\"]\n controls['G']['first_selfcal'] = jspar[\"processing\"][\"controls\"][\"G\"][\"first_selfcal\"]\n controls['B']['first_selfcal'] = jspar[\"processing\"][\"controls\"][\"B\"][\"first_selfcal\"]\n\n controls['T']['timescale'] = jspar[\"processing\"][\"controls\"][\"T\"][\"timescale\"]\n controls['G']['timescale'] = jspar[\"processing\"][\"controls\"][\"G\"][\"timescale\"]\n controls['B']['timescale'] = jspar[\"processing\"][\"controls\"][\"B\"][\"timescale\"]\n\n PP.pprint(controls)\n\n future_corrupted_vislist = arlexecute.scatter(corrupted_vislist)\n ical_list = ical_arlexecute(future_corrupted_vislist,\n model_imagelist\t\t= model_list,\n context\t\t\t= jspar[\"processing\"][\"ical\"][\"context\"] , \t\t#'wstack',\n calibration_context\t= jspar[\"processing\"][\"ical\"][\"calibration_context\"] , \t#'TG',\n controls\t\t= controls,\n vis_slices\t\t= ntimes,\n scales\t\t\t= jspar[\"processing\"][\"ical\"][\"scales\"], \t#[0, 3, 10],\n timeslice\t\t= jspar[\"processing\"][\"ical\"][\"timeslice\"], \t#'auto',\n algorithm\t\t= jspar[\"processing\"][\"ical\"][\"algorithm\"], \t#'mmclean',\n nmoment\t\t\t= jspar[\"processing\"][\"ical\"][\"nmoment\"], \t#3,\n niter\t\t\t= jspar[\"processing\"][\"ical\"][\"niter\"],\t\t \t#1000,\n fractional_threshold\t= jspar[\"processing\"][\"ical\"][\"fractional_threshold\"], \t#0.1,\n threshold\t\t= jspar[\"processing\"][\"ical\"][\"threshold\"], \t \t#0.1,\n nmajor\t\t\t= jspar[\"processing\"][\"ical\"][\"nmajor\"], \t\t#5,\n gain\t\t\t= jspar[\"processing\"][\"ical\"][\"gain\"], \t#0.25,\n deconvolve_facets\t= jspar[\"processing\"][\"ical\"][\"deconvolve_facets\"], \t#8,\n deconvolve_overlap\t= jspar[\"processing\"][\"ical\"][\"deconvolve_overlap\"], \t#16,\n deconvolve_taper\t= jspar[\"processing\"][\"ical\"][\"deconvolve_taper\"], \t#'tukey',\n global_solution\t\t= jspar[\"processing\"][\"ical\"][\"global_solution\"], \t#False,\n do_selfcal\t\t= jspar[\"processing\"][\"ical\"][\"do_selfcal\"], \t#True,\n psf_support\t\t= jspar[\"processing\"][\"ical\"][\"psf_support\"]) \t#64\n\n\n LOG.info('About to run ical')\n result = arlexecute.compute(ical_list, sync=True)\n deconvolved = result[0][0]\n residual = result[1][0]\n restored = result[2][0]\n\n print(qa_image(deconvolved, context='Clean image'))\n print(qa_image(restored, context='Restored clean image'))\n export_image_to_fits(restored, '%s/%s' % (RESULTS_DIR, jspar[\"files\"][\"ical_restored\"]))\n\n print(qa_image(residual[0], context='Residual clean image'))\n export_image_to_fits(residual[0], '%s/%s' % (RESULTS_DIR, jspar[\"files\"][\"ical_residual\"]))\n\n arlexecute.close()", "response": "Main function for the imaging - pipeline."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsubbing array detail resource. This method will list scheduling blocks and processing blocks in the specified sub-array.", "response": "def get(sub_array_id):\n \"\"\"Sub array detail resource.\n\n This method will list scheduling blocks and processing blocks\n in the specified sub-array.\n \"\"\"\n if not re.match(r'^subarray-0[0-9]|subarray-1[0-5]$', sub_array_id):\n response = dict(error='Invalid sub-array ID specified \"{}\" does not '\n 'match sub-array ID naming convention '\n '(ie. subarray-[00-15]).'.\n format(sub_array_id))\n return response, HTTPStatus.BAD_REQUEST\n if sub_array_id not in DB.get_sub_array_ids():\n response = dict(error='Sub-array \"{}\" does not currently exist. '\n 'Known sub-arrays = {}'\n .format(sub_array_id, DB.get_sub_array_ids()))\n return response, HTTPStatus.NOT_FOUND\n\n block_ids = DB.get_sub_array_sbi_ids(sub_array_id)\n _blocks = [b for b in DB.get_block_details(block_ids)]\n response = dict(scheduling_blocks=[])\n _url = get_root_url()\n for block in _blocks:\n block['links'] = {\n 'self': '{}/scheduling-block/{}'.format(_url, block['id'])\n }\n response['scheduling_blocks'].append(block)\n response['links'] = {\n 'self': '{}'.format(request.url),\n 'list': '{}/sub-arrays'.format(_url),\n 'home': '{}'.format(_url),\n }\n return response, HTTPStatus.OK"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create(sub_array_id):\n config = request.data\n config['sub_array_id'] = 'subarray-{:02d}'.format(sub_array_id)\n return add_scheduling_block(config)", "response": "Create a SDP Scheduling Block instance with SDP."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn the list of scheduling blocks instances associated with the sub_array_id", "response": "def get_scheduling_block(sub_array_id, block_id):\n \"\"\"Return the list of scheduling blocks instances associated with the sub\n array\"\"\"\n block_ids = DB.get_sub_array_sbi_ids(sub_array_id)\n if block_id in block_ids:\n block = DB.get_block_details([block_id]).__next__()\n return block, HTTPStatus.OK\n\n return dict(error=\"unknown id\"), HTTPStatus.NOT_FOUND"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ngetting list of data files to add to the package.", "response": "def package_files(directory):\n \"\"\"Get list of data files to add to the package.\"\"\"\n paths = []\n for (path, _, file_names) in walk(directory):\n for filename in file_names:\n paths.append(join('..', path, filename))\n return paths"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef register_master():\n tango_db = Database()\n device = \"sip_sdp/elt/master\"\n device_info = DbDevInfo()\n device_info._class = \"SDPMasterDevice\"\n device_info.server = \"sdp_master_ds/1\"\n device_info.name = device\n devices = tango_db.get_device_name(device_info.server, device_info._class)\n if device not in devices:\n LOG.info('Registering device \"%s\" with device server \"%s\"',\n device_info.name, device_info.server)\n tango_db.add_device(device_info)", "response": "Register the SDP Master device."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef main(args=None, **kwargs):\n LOG.info('Starting %s', __service_id__)\n return run([SDPMasterDevice], verbose=True, msg_stream=sys.stdout,\n args=args, **kwargs)", "response": "Run the Tango SDP Master device server."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef parse_command_line():\n parser = argparse.ArgumentParser(\n prog='csp_pulsar_sender',\n description='Send fake pulsar data using ftp protocol.')\n parser.add_argument('config_file', type=argparse.FileType('r'),\n help='JSON configuration file.')\n parser.add_argument('-v', '--verbose', help='Enable verbose messages.',\n action='store_true')\n parser.add_argument('-p', '--print_settings', help='Print settings file.',\n action='store_true')\n return parser.parse_args()", "response": "Parse command line arguments."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef _init_log(level=logging.DEBUG):\n log = logging.getLogger(__file__)\n log.setLevel(level)\n handler = logging.StreamHandler(sys.stdout)\n handler.setLevel(level)\n formatter = logging.Formatter('%(asctime)s: %(message)s',\n '%Y/%m/%d-%H:%M:%S')\n handler.setFormatter(formatter)\n log.addHandler(handler)\n return log", "response": "Initialise the logging object."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef wrap_subscribe(transport_layer, channel, callback, *args, **kwargs):\n\n return _wrap_subscription(\n transport_layer, transport_layer.subscribe, channel, callback, *args, **kwargs\n )", "response": "Wrap a subscribe call in the transport layer."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef wrap_subscribe_broadcast(transport_layer, channel, callback, *args, **kwargs):\n\n return _wrap_subscription(\n transport_layer,\n transport_layer.subscribe_broadcast,\n channel,\n callback,\n *args,\n **kwargs\n )", "response": "Wrap subscribe_broadcast in a new ID\n ."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nstarting the transport layer.", "response": "def start_transport(self):\n \"\"\"If a transport object has been defined then connect it now.\"\"\"\n if self.transport:\n if self.transport.connect():\n self.log.debug(\"Service successfully connected to transport layer\")\n else:\n raise RuntimeError(\"Service could not connect to transport layer\")\n # direct all transport callbacks into the main queue\n self._transport_interceptor_counter = itertools.count()\n self.transport.subscription_callback_set_intercept(\n self._transport_interceptor\n )\n else:\n self.log.debug(\"No transport layer defined for service. Skipping.\")"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ntake a callback function and returns a function that takes headers and messages and places them on the main service queue.", "response": "def _transport_interceptor(self, callback):\n \"\"\"Takes a callback function and returns a function that takes headers and\n messages and places them on the main service queue.\"\"\"\n\n def add_item_to_queue(header, message):\n queue_item = (\n Priority.TRANSPORT,\n next(\n self._transport_interceptor_counter\n ), # insertion sequence to keep messages in order\n (callback, header, message),\n )\n self.__queue.put(\n queue_item\n ) # Block incoming transport until insertion completes\n\n return add_item_to_queue"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\ninjecting pipes connecting the service to the frontend. Two arguments are supported.", "response": "def connect(self, frontend=None, commands=None):\n \"\"\"Inject pipes connecting the service to the frontend. Two arguments are\n supported: frontend= for messages from the service to the frontend,\n and commands= for messages from the frontend to the service.\n The injection should happen before the service is started, otherwise the\n underlying file descriptor references may not be handled correctly.\"\"\"\n if frontend:\n self.__pipe_frontend = frontend\n self.__send_service_status_to_frontend()\n if commands:\n self.__pipe_commands = commands"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef extend_log(self, field, value):\n self.__log_extensions.append((field, value))\n try:\n yield\n except Exception as e:\n setattr(e, \"workflows_log_\" + field, value)\n raise\n finally:\n self.__log_extensions.remove((field, value))", "response": "A context wherein a specified extra field in log messages is populated\n with a fixed value. This affects all log messages within the context."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nfunction to continuously retrieve data from the frontend. Commands are sent to the central priority queue. If the pipe from the frontend is closed the service shutdown is initiated. Check every second if service has shut down, then terminate. This function is run by a separate daemon thread, which is started by the __start_command_queue_listener function.", "response": "def __command_queue_listener(self):\n \"\"\"Function to continuously retrieve data from the frontend. Commands are\n sent to the central priority queue. If the pipe from the frontend is\n closed the service shutdown is initiated. Check every second if service\n has shut down, then terminate.\n This function is run by a separate daemon thread, which is started by\n the __start_command_queue_listener function.\n \"\"\"\n self.log.debug(\"Queue listener thread started\")\n counter = itertools.count() # insertion sequence to keep messages in order\n while not self.__shutdown:\n if self.__pipe_commands.poll(1):\n try:\n message = self.__pipe_commands.recv()\n except EOFError:\n # Pipe was closed by frontend. Shut down service.\n self.__shutdown = True\n self.log.error(\n \"Pipe closed by frontend, shutting down service\", exc_info=True\n )\n break\n queue_item = (Priority.COMMAND, next(counter), message)\n try:\n self.__queue.put(queue_item, True, 60)\n except queue.Full:\n # If the message can't be stored within 60 seconds then the service is\n # operating outside normal parameters. Try to shut it down.\n self.__shutdown = True\n self.log.error(\n \"Write to service priority queue failed, shutting down service\",\n exc_info=True,\n )\n break\n self.log.debug(\"Queue listener thread terminating\")"} {"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nstarts the function __command_queue_listener in a separate thread. This function is continuously listens to the frontend connected to the frontend.", "response": "def __start_command_queue_listener(self):\n \"\"\"Start the function __command_queue_listener in a separate thread. This\n function continuously listens to the pipe connected to the frontend.\n \"\"\"\n thread_function = self.__command_queue_listener\n\n class QueueListenerThread(threading.Thread):\n def run(qltself):\n thread_function()\n\n assert not hasattr(self, \"__queue_listener_thread\")\n self.log.debug(\"Starting queue listener thread\")\n self.__queue_listener_thread = QueueListenerThread()\n self.__queue_listener_thread.daemon = True\n self.__queue_listener_thread.name = \"Command Queue Listener\"\n self.__queue_listener_thread.start()"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _log_send(self, logrecord):\n for field, value in self.__log_extensions:\n setattr(logrecord, field, value)\n self.__send_to_frontend({\"band\": \"log\", \"payload\": logrecord})", "response": "Forward log records to the frontend."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nregister a callback function that is run when idling for a given time span.", "response": "def _register_idle(self, idle_time, callback):\n \"\"\"Register a callback function that is run when idling for a given\n time span (in seconds).\"\"\"\n self._idle_callback = callback\n self._idle_time = idle_time"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nupdate the internal status of the service object and notify frontend.", "response": "def __update_service_status(self, statuscode):\n \"\"\"Set the internal status of the service object, and notify frontend.\"\"\"\n if self.__service_status != statuscode:\n self.__service_status = statuscode\n self.__send_service_status_to_frontend()"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nsetting a new name for this service and notify the frontend accordingly.", "response": "def _set_name(self, name):\n \"\"\"Set a new name for this service, and notify the frontend accordingly.\"\"\"\n self._service_name = name\n self.__send_to_frontend({\"band\": \"set_name\", \"name\": self._service_name})"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ninitializing the logging for the service process.", "response": "def initialize_logging(self):\n \"\"\"Reset the logging for the service process. All logged messages are\n forwarded to the frontend. If any filtering is desired, then this must\n take place on the service side.\"\"\"\n # Reset logging to pass logrecords into the queue to the frontend only.\n # Existing handlers may be broken as they were copied into a new process,\n # so should be discarded.\n for loggername in [None] + list(logging.Logger.manager.loggerDict.keys()):\n logger = logging.getLogger(loggername)\n while logger.handlers:\n logger.removeHandler(logger.handlers[0])\n\n # Re-enable logging to console\n root_logger = logging.getLogger()\n\n # By default pass all warning (and higher) level messages to the frontend\n root_logger.setLevel(logging.WARN)\n root_logger.addHandler(workflows.logging.CallbackHandler(self._log_send))\n\n # Set up the service logger and pass all info (and higher) level messages\n # (or other level if set differently)\n self.log = logging.getLogger(self._logger_name)\n\n if self.start_kwargs.get(\"verbose_log\"):\n self.log_verbosity = logging.DEBUG\n self.log.setLevel(self.log_verbosity)\n\n # Additionally, write all critical messages directly to console\n console = logging.StreamHandler()\n console.setLevel(logging.CRITICAL)\n root_logger.addHandler(console)"} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nstarts listening to command queue process commands in main loop", "response": "def start(self, **kwargs):\n \"\"\"Start listening to command queue, process commands in main loop,\n set status, etc...\n This function is most likely called by the frontend in a separate\n process.\"\"\"\n\n # Keep a copy of keyword arguments for use in subclasses\n self.start_kwargs.update(kwargs)\n try:\n self.initialize_logging()\n\n self.__update_service_status(self.SERVICE_STATUS_STARTING)\n\n self.start_transport()\n\n self.initializing()\n self._register(\"command\", self.__process_command)\n\n if self.__pipe_commands is None:\n # can only listen to commands if command queue is defined\n self.__shutdown = True\n else:\n # start listening to command queue in separate thread\n self.__start_command_queue_listener()\n\n while not self.__shutdown: # main loop\n self.__update_service_status(self.SERVICE_STATUS_IDLE)\n\n if self._idle_time is None:\n task = self.__queue.get()\n else:\n try:\n task = self.__queue.get(True, self._idle_time)\n except queue.Empty:\n self.__update_service_status(self.SERVICE_STATUS_TIMER)\n if self._idle_callback:\n self._idle_callback()\n continue\n\n self.__update_service_status(self.SERVICE_STATUS_PROCESSING)\n\n if task[0] == Priority.COMMAND:\n message = task[2]\n if message and \"band\" in message:\n processor = self.__callback_register.get(message[\"band\"])\n if processor is None:\n self.log.warning(\n \"received message on unregistered band\\n%s\", message\n )\n else:\n processor(message.get(\"payload\"))\n else:\n self.log.warning(\n \"received message without band information\\n%s\", message\n )\n elif task[0] == Priority.TRANSPORT:\n callback, header, message = task[2]\n callback(header, message)\n else:\n self.log.warning(\"Unknown item on main service queue\\n%r\", task)\n\n except KeyboardInterrupt:\n self.log.warning(\"Ctrl+C detected. Shutting down.\")\n\n except Exception as e:\n self.process_uncaught_exception(e)\n self.__update_service_status(self.SERVICE_STATUS_ERROR)\n self.in_shutdown()\n return\n\n try:\n self.__update_service_status(self.SERVICE_STATUS_SHUTDOWN)\n self.in_shutdown()\n self.__update_service_status(self.SERVICE_STATUS_END)\n except Exception as e:\n self.process_uncaught_exception(e)\n self.__update_service_status(self.SERVICE_STATUS_ERROR)"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef process_uncaught_exception(self, e):\n # Add information about the actual exception to the log message\n # This includes the file, line and piece of code causing the exception.\n # exc_info=True adds the full stack trace to the log message.\n exc_file_fullpath, exc_file, exc_lineno, exc_func, exc_line = (\n workflows.logging.get_exception_source()\n )\n added_information = {\n \"workflows_exc_lineno\": exc_lineno,\n \"workflows_exc_funcName\": exc_func,\n \"workflows_exc_line\": exc_line,\n \"workflows_exc_pathname\": exc_file_fullpath,\n \"workflows_exc_filename\": exc_file,\n }\n for field in filter(lambda x: x.startswith(\"workflows_log_\"), dir(e)):\n added_information[field[14:]] = getattr(e, field, None)\n self.log.critical(\n \"Unhandled service exception: %s\", e, exc_info=True, extra=added_information\n )", "response": "This is called when an uncaught exception occurs in the service. It is called to handle uncaught exceptions from the service."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef connect(self, *args, **kwargs):\n if self.use_ssl:\n factory = irc.connection.Factory(wrapper=ssl.wrap_socket)\n else:\n factory = irc.connection.Factory()\n self.connection.connect(server=self.server,\n port=self.port,\n nickname=self.nickname,\n connect_factory=factory,\n password=self.password,\n username=self.username,\n ircname=self.ircname)", "response": "Connect to a server."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef on_pubmsg(self, c, e):\n text = e.arguments[0]\n metadata = self.set_metadata(e)\n metadata['is_private_message'] = False\n message = Message(text=text, metadata=metadata).__dict__\n self.baseplate.tell(message)", "response": "This function runs when the bot receives a public message."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef on_privmsg(self, c, e):\n text = e.arguments[0]\n logger.debug('{0!s}'.format(e.source))\n metadata = self.set_metadata(e)\n metadata['is_private_message'] = True\n message = Message(text=text, metadata=metadata).__dict__\n self.baseplate.tell(message)", "response": "This function runs when the bot receives a private message."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef on_welcome(self, c, e):\n self.backoff = 1 # Assume we had a good connection. Reset backoff.\n if self.nickserv:\n if Utilities.isNotEmpty(self.nickserv_pass):\n self.identify(c, e, self.nickserv_pass)\n time.sleep(3) # Make sure Nickserv really sees us\n else:\n logger.error('If nickserv is enabled, you must supply'\n ' a password')\n\n if self.nickserv is False and self.nickserv_pass is not None:\n logger.warn('It appears you provided a nickserv password but '\n 'did not enable nickserv authentication')\n\n for channel in self.my_channels:\n logger.debug('Attempting to join {0!s}'.format(channel))\n c.join(channel)", "response": "This function runs when the bot connects to the IRC server."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nstarting the bot in a thread.", "response": "def run(self):\n \"\"\"\n Run the bot in a thread.\n\n Implementing the IRC listener as a thread allows it to\n listen without blocking IRCLego's ability to listen\n as a pykka actor.\n\n :return: None\n \"\"\"\n self._connect()\n super(irc.bot.SingleServerIRCBot, self).start()"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef handle(self, message):\n '''\n Attempts to send a message to the specified destination in IRC\n Extends Legobot.Lego.handle()\n\n Args:\n message (Legobot.Message): message w/ metadata to send.\n '''\n\n logger.debug(message)\n if Utilities.isNotEmpty(message['metadata']['opts']):\n target = message['metadata']['opts']['target']\n\n for split_line in Utilities.tokenize(message['text']):\n for truncated_line in Utilities.truncate(split_line):\n self.botThread.connection.privmsg(target, truncated_line)\n # Delay to prevent floods\n time.sleep(0.25)", "response": "Handles a message from the specified destination in IRC."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nchecking config data consistency and set self. sparselib to umfpack if necessary", "response": "def check(self):\n \"\"\"\n Check config data consistency\n\n Returns\n -------\n\n \"\"\"\n if self.sparselib not in self.sparselib_alt:\n logger.warning(\"Invalid sparse library <{}>\".format(self.sparselib))\n self.sparselib = 'umfpack'\n\n if self.sparselib == 'klu' and not KLU:\n logger.info(\"Optional package \\\"cvxoptklu\\\" available for speed up\")\n self.sparselib = 'umfpack'\n\n return True"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nchecking time and apply faults", "response": "def apply(self, actual_time):\n \"\"\"Check time and apply faults\"\"\"\n if self.time != actual_time:\n self.time = actual_time\n else:\n return\n\n for i in range(self.n):\n if self.tf[i] == self.time:\n logger.info(\n ' <Fault> Applying fault on Bus <{}> at t={}.'.format(\n self.bus[i], self.tf[i]))\n self.u[i] = 1\n self.active += 1\n self.angle0 = self.system.dae.y[self.system.Bus.a]\n self.volt0 = self.system.dae.y[self.system.Bus.n:]\n self.system.dae.factorize = True\n\n elif self.tc[i] == self.time:\n logger.info(\n ' <Fault> Clearing fault on Bus <{}> at t={}.'.format(\n self.bus[i], self.tc[i]))\n self.u[i] = 0\n self.active -= 1\n self.system.dae.y[self.system.Bus.n:] = self.volt0\n # self.system.dae.y[self.a] = self.anglepre\n self.system.dae.factorize = True"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef consume_message(self, header, message):\n logmessage = {\n \"time\": (time.time() % 1000) * 1000,\n \"header\": \"\",\n \"message\": message,\n }\n if header:\n logmessage[\"header\"] = (\n json.dumps(header, indent=2) + \"\\n\" + \"----------------\" + \"\\n\"\n )\n if isinstance(message, dict):\n logmessage[\"message\"] = (\n json.dumps(message, indent=2) + \"\\n\" + \"----------------\" + \"\\n\"\n )\n\n print(\"=== Consume ====\\n{header}{message}\".format(**logmessage))\n self.log.info(\"Received message @{time}\".format(**logmessage))\n self.log.debug(\n \"Received message @{time}\\n{header}{message}\".format(**logmessage)\n )\n time.sleep(0.1)", "response": "Consume a message from the cache"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef on_receive(self, message):\n if self.log_file is not None and message['should_log']:\n message_copy = Message(message['text'],\n Metadata(None).__dict__,\n message['should_log']).__dict__\n with open(self.log_file, mode='w') as f:\n f.write(json.dumps(message_copy))\n logger.info(message['metadata']['source'])\n if self.listening_for(message):\n self_thread = self.HandlerThread(self.handle, message)\n self_thread.start()\n self.cleanup()\n for child in self.children:\n child.tell(message)", "response": "This function is called when a message is received from the Lego."} {"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncleans up finished children.", "response": "def cleanup(self):\n \"\"\"\n Clean up finished children.\n\n :return: None\n \"\"\"\n self.lock.acquire()\n logger.debug('Acquired lock in cleanup for ' + str(self))\n self.children = [child for child in self.children if child.is_alive()]\n self.lock.release()"} {"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ninitializes and keep track of a child.", "response": "def add_child(self, child_type, *args, **kwargs):\n \"\"\"\n Initialize and keep track of a child.\n\n :param child_type: a class inheriting from Lego to initialize \\\n an instance of\n :param args: arguments for initializing the child\n :param kwargs: keyword arguments for initializing the child\n :return:\n \"\"\"\n try:\n baseplate = kwargs['baseplate']\n except:\n if self.baseplate is None:\n baseplate = self.actor_ref\n else:\n baseplate = self.baseplate\n try:\n lock = kwargs['lock']\n except:\n lock = self.lock\n child = child_type.start(baseplate, lock, *args, **kwargs)\n self.children.append(child)"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef reply(self, message, text, opts=None):\n metadata = Metadata(source=self.actor_urn,\n dest=message['metadata']['source']).__dict__\n metadata['opts'] = opts\n message = Message(text=text, metadata=metadata,\n should_log=message['should_log']).__dict__\n dest_actor = ActorRegistry.get_by_urn(message['metadata']['dest'])\n if dest_actor is not None:\n dest_actor.tell(message)\n else:\n raise(\"Tried to send message to nonexistent actor\")", "response": "Reply to the sender of the provided message with a message containing the provided text."} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef build_reply_opts(self, message):\n try:\n source = message['metadata']['source_channel']\n thread = message['metadata'].get('thread_ts')\n opts = {'target': source, 'thread': thread}\n except LookupError:\n source = None\n opts = None\n logger.error(\"Could not identify source from message:{}\\n\"\n .format(str(message)))\n return opts", "response": "Builds the reply options for a specific aaterange."} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef alter(data, system):\n device = data[0]\n action = data[1]\n if data[2] == '*':\n data[2] = '.*'\n regex = re.compile(data[2])\n prop = data[3]\n value = float(data[4])\n if action == 'MUL':\n for item in range(system.__dict__[device].n):\n if regex.search(system.__dict__[device].name[item]):\n system.__dict__[device].__dict__[prop][item] *= value\n elif action == 'REP':\n for item in range(system.__dict__[device].n):\n if regex.search(system.__dict__[device].name[item]):\n system.__dict__[device].__dict__[prop][item] = value\n elif action == 'DIV':\n if not value:\n return\n for item in range(system.__dict__[device].n):\n if regex.search(system.__dict__[device].name[item]):\n system.__dict__[device].__dict__[prop][item] /= value\n elif action == 'SUM':\n for item in range(system.__dict__[device].n):\n if regex.search(system.__dict__[device].name[item]):\n system.__dict__[device].__dict__[prop][item] += value\n elif action == 'SUB':\n for item in range(system.__dict__[device].n):\n if regex.search(system.__dict__[device].name[item]):\n system.__dict__[device].__dict__[prop][item] -= value\n elif action == 'POW':\n for item in range(system.__dict__[device].n):\n if regex.search(system.__dict__[device].name[item]):\n system.__dict__[device].__dict__[prop][item] **= value\n else:\n print('ALTER action <%s> is not defined', action)", "response": "Alter data in dm format devices"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef read(file, system, header=True):\n retval = True\n fid = open(file, 'r')\n sep = re.compile(r'\\s*,\\s*')\n comment = re.compile(r'^#\\s*')\n equal = re.compile(r'\\s*=\\s*')\n math = re.compile(r'[*/+-]')\n double = re.compile(r'[+-]? *(?:\\d+(?:\\.\\d*)?|\\.\\d+)(?:[eE][+-]?\\d+)?')\n\n # parse data\n while True:\n line = fid.readline()\n if not line:\n break\n line = line.replace('\\n', '')\n line = line.strip()\n if not line:\n continue\n if comment.search(line):\n continue\n # span multiple line\n while line.endswith(',') or line.endswith(';'):\n newline = fid.readline()\n line = line.replace('\\n', '')\n newline = newline.strip()\n if not newline:\n continue\n if comment.search(newline):\n continue\n line += ' ' + newline\n data = sep.split(line)\n device = data.pop(0)\n device = device.strip()\n if device == 'ALTER':\n alter(data, system)\n continue\n if device == 'INCLUDE':\n logger.debug('Parsing include file <{}>'.format(data[0]))\n newpath = data[0]\n if not os.path.isfile(newpath):\n newpath = os.path.join(system.files.path, data[0])\n if not os.path.isfile(newpath):\n raise FileNotFoundError(\n 'Unable to locate file in {}'.format(newpath))\n read(newpath, system, header=False) # recursive call\n logger.debug('Parsing of include file <{}> completed.'.format(\n data[0]))\n continue\n kwargs = {}\n for item in data:\n pair = equal.split(item)\n key = pair[0].strip()\n value = pair[1].strip()\n if value.startswith('\"'):\n value = value[1:-1]\n elif value.startswith('['):\n array = value[1:-1].split(';')\n if math.search(value): # execute simple operations\n value = list(map(lambda x: eval(x), array))\n else:\n value = list(map(lambda x: float(x), array))\n elif double.search(value):\n if math.search(value): # execute simple operations\n value = eval(value)\n else:\n value = float(value)\n elif value == 'True':\n value = True\n elif value == 'False':\n value = False\n else:\n value = int(value)\n kwargs[key] = value\n index = kwargs.pop('idx', None)\n namex = kwargs.pop('name', None)\n try:\n system.__dict__[device].elem_add(idx=index, name=namex, **kwargs)\n except KeyError:\n logger.error(\n 'Error adding device {:s} to powersystem object.'.format(\n device))\n logger.debug(\n 'Make sure you have added the jit models in __init__.py'\n )\n\n fid.close()\n return retval", "response": "Read a dm format file and elem_add to system"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef write(file, system):\n\n # TODO: Check for bugs!!!\n\n out = list()\n out.append('# DOME format version 1.0')\n ppl = 7 # parameter per line\n retval = True\n dev_list = sorted(system.devman.devices)\n for dev in dev_list:\n model = system.__dict__[dev]\n if not model.n:\n continue\n\n out.append('')\n header = dev + ', '\n space = ' ' * (len(dev) + 2)\n keys = list(model._data.keys())\n keys.extend(['name', 'idx'])\n keys = sorted(keys)\n\n # remove non-existent keys\n for key in keys:\n if key not in model.__dict__.keys():\n keys.pop(key)\n\n nline = int(ceil(len(keys) / ppl))\n nelement = model.n\n vals = [''] * len(keys)\n\n # for each element, read values\n for elem in range(nelement):\n for idx, key in enumerate(keys):\n if model._flags['sysbase'] and key in model._store.keys():\n val = model._store[key][elem]\n else:\n val = model.__dict__[key][elem]\n\n if isinstance(val, float):\n val = round(val, 5)\n elif isinstance(val, str):\n val = '\"{}\"'.format(val)\n elif isinstance(val, list):\n val = list(val)\n val = '; '.join(str(i) for i in val)\n val = '[{}]'.format(val)\n elif val is None:\n val = 0\n vals[idx] = val\n\n pair = []\n for key, val in zip(keys, vals):\n pair.append('{} = {}'.format(key, val))\n\n for line in range(nline):\n string = ', '.join(pair[ppl * line:ppl * (line + 1)])\n if line == 0: # append header or space\n string = header + string\n else:\n string = space + string\n if not line == nline - 1: # add comma except for last line\n string += ','\n\n out.append(string)\n\n fid = open(file, 'w')\n for line in out:\n fid.write(line + '\\n')\n\n fid.close()\n return retval", "response": "Write data in system to a dm file"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef add_command_line_options(cls, parser):\n if \"add_argument\" in dir(parser):\n return cls.add_command_line_options_argparse(parser)\n else:\n return cls.add_command_line_options_optparse(parser)", "response": "function to inject command line parameters"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef add_command_line_options_argparse(cls, argparser):\n import argparse\n\n class SetParameter(argparse.Action):\n \"\"\"callback object for ArgumentParser\"\"\"\n\n def __call__(self, parser, namespace, value, option_string=None):\n cls.config[option_string] = value\n if option_string == \"--stomp-conf\":\n cls.load_configuration_file(value)\n\n argparser.add_argument(\n \"--stomp-host\",\n metavar=\"HOST\",\n default=cls.defaults.get(\"--stomp-host\"),\n help=\"Stomp broker address, default '%(default)s'\",\n type=str,\n action=SetParameter,\n )\n argparser.add_argument(\n \"--stomp-port\",\n metavar=\"PORT\",\n default=cls.defaults.get(\"--stomp-port\"),\n help=\"Stomp broker port, default '%(default)s'\",\n type=int,\n action=SetParameter,\n )\n argparser.add_argument(\n \"--stomp-user\",\n metavar=\"USER\",\n default=cls.defaults.get(\"--stomp-user\"),\n help=\"Stomp user, default '%(default)s'\",\n type=str,\n action=SetParameter,\n )\n argparser.add_argument(\n \"--stomp-pass\",\n metavar=\"PASS\",\n default=cls.defaults.get(\"--stomp-pass\"),\n help=\"Stomp password\",\n type=str,\n action=SetParameter,\n )\n argparser.add_argument(\n \"--stomp-prfx\",\n metavar=\"PRE\",\n default=cls.defaults.get(\"--stomp-prfx\"),\n help=\"Stomp namespace prefix, default '%(default)s'\",\n type=str,\n action=SetParameter,\n )\n argparser.add_argument(\n \"--stomp-conf\",\n metavar=\"CNF\",\n default=cls.defaults.get(\"--stomp-conf\"),\n help=\"Stomp configuration file containing connection information, disables default values\",\n type=str,\n action=SetParameter,\n )", "response": "function to inject command line parameters into a Python ArgumentParser."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nfunctioning to inject command line parameters into a Python OptionParser.", "response": "def add_command_line_options_optparse(cls, optparser):\n \"\"\"function to inject command line parameters into\n a Python OptionParser.\"\"\"\n\n def set_parameter(option, opt, value, parser):\n \"\"\"callback function for OptionParser\"\"\"\n cls.config[opt] = value\n if opt == \"--stomp-conf\":\n cls.load_configuration_file(value)\n\n optparser.add_option(\n \"--stomp-host\",\n metavar=\"HOST\",\n default=cls.defaults.get(\"--stomp-host\"),\n help=\"Stomp broker address, default '%default'\",\n type=\"string\",\n nargs=1,\n action=\"callback\",\n callback=set_parameter,\n )\n optparser.add_option(\n \"--stomp-port\",\n metavar=\"PORT\",\n default=cls.defaults.get(\"--stomp-port\"),\n help=\"Stomp broker port, default '%default'\",\n type=\"int\",\n nargs=1,\n action=\"callback\",\n callback=set_parameter,\n )\n optparser.add_option(\n \"--stomp-user\",\n metavar=\"USER\",\n default=cls.defaults.get(\"--stomp-user\"),\n help=\"Stomp user, default '%default'\",\n type=\"string\",\n nargs=1,\n action=\"callback\",\n callback=set_parameter,\n )\n optparser.add_option(\n \"--stomp-pass\",\n metavar=\"PASS\",\n default=cls.defaults.get(\"--stomp-pass\"),\n help=\"Stomp password\",\n type=\"string\",\n nargs=1,\n action=\"callback\",\n callback=set_parameter,\n )\n optparser.add_option(\n \"--stomp-prfx\",\n metavar=\"PRE\",\n default=cls.defaults.get(\"--stomp-prfx\"),\n help=\"Stomp namespace prefix, default '%default'\",\n type=\"string\",\n nargs=1,\n action=\"callback\",\n callback=set_parameter,\n )\n optparser.add_option(\n \"--stomp-conf\",\n metavar=\"CNF\",\n default=cls.defaults.get(\"--stomp-conf\"),\n help=\"Stomp configuration file containing connection information, disables default values\",\n type=\"string\",\n nargs=1,\n action=\"callback\",\n callback=set_parameter,\n )"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef broadcast_status(self, status):\n self._broadcast(\n \"transient.status\",\n json.dumps(status),\n headers={\"expires\": str(int((15 + time.time()) * 1000))},\n )", "response": "Broadcast transient status information to all listeners"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef _subscribe(self, sub_id, channel, callback, **kwargs):\n headers = {}\n if kwargs.get(\"exclusive\"):\n headers[\"activemq.exclusive\"] = \"true\"\n if kwargs.get(\"ignore_namespace\"):\n destination = \"/queue/\" + channel\n else:\n destination = \"/queue/\" + self._namespace + channel\n if kwargs.get(\"priority\"):\n headers[\"activemq.priority\"] = kwargs[\"priority\"]\n if kwargs.get(\"retroactive\"):\n headers[\"activemq.retroactive\"] = \"true\"\n if kwargs.get(\"selector\"):\n headers[\"selector\"] = kwargs[\"selector\"]\n if kwargs.get(\"transformation\"):\n if kwargs[\"transformation\"] == True:\n headers[\"transformation\"] = \"jms-object-json\"\n else:\n headers[\"transformation\"] = kwargs[\"transformation\"]\n if kwargs.get(\"acknowledgement\"):\n ack = \"client-individual\"\n else:\n ack = \"auto\"\n\n self._conn.subscribe(destination, sub_id, headers=headers, ack=ack)", "response": "Subscribe to a queue."} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef _subscribe_broadcast(self, sub_id, channel, callback, **kwargs):\n headers = {}\n if kwargs.get(\"ignore_namespace\"):\n destination = \"/topic/\" + channel\n else:\n destination = \"/topic/\" + self._namespace + channel\n if kwargs.get(\"retroactive\"):\n headers[\"activemq.retroactive\"] = \"true\"\n if kwargs.get(\"transformation\"):\n if kwargs[\"transformation\"] == True:\n headers[\"transformation\"] = \"jms-object-json\"\n else:\n headers[\"transformation\"] = kwargs[\"transformation\"]\n self._conn.subscribe(destination, sub_id, headers=headers)", "response": "Subscribe to a broadcast topic."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nsending a message to a queue.", "response": "def _send(\n self, destination, message, headers=None, delay=None, expiration=None, **kwargs\n ):\n \"\"\"Send a message to a queue.\n :param destination: Queue name to send to\n :param message: A string to be sent\n :param **kwargs: Further parameters for the transport layer. For example\n delay: Delay transport of message by this many seconds\n expiration: Optional expiration time, relative to sending time\n headers: Optional dictionary of header entries\n ignore_namespace: Do not apply namespace to the destination name\n persistent: Whether to mark messages as persistent, to be kept\n between broker restarts. Default is 'true'.\n transaction: Transaction ID if message should be part of a\n transaction\n \"\"\"\n if not headers:\n headers = {}\n if \"persistent\" not in headers:\n headers[\"persistent\"] = \"true\"\n if delay:\n # The 'delay' mechanism is only supported when\n # schedulerSupport is enabled on the broker.\n headers[\"AMQ_SCHEDULED_DELAY\"] = int(1000 * delay)\n if expiration:\n headers[\"expires\"] = int((time.time() + expiration) * 1000)\n if kwargs.get(\"ignore_namespace\"):\n destination = \"/queue/\" + destination\n else:\n destination = \"/queue/\" + self._namespace + destination\n try:\n self._conn.send(destination, message, headers=headers, **kwargs)\n except stomp.exception.NotConnectedException:\n self._connected = False\n raise workflows.Disconnected(\"No connection to stomp host\")"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef _ack(self, message_id, subscription_id, **kwargs):\n self._conn.ack(message_id, subscription_id, **kwargs)", "response": "Acknowledge receipt of a message."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef _nack(self, message_id, subscription_id, **kwargs):\n self._conn.nack(message_id, subscription_id, **kwargs)", "response": "Reject receipt of a message."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nregistering a device to the device list", "response": "def register_device(self, dev_name):\n \"\"\"register a device to the device list\"\"\"\n if dev_name not in self.devices:\n self.devices.append(dev_name)\n group_name = self.system.__dict__[dev_name]._group\n if group_name not in self.group.keys():\n self.group[group_name] = {}"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef register_element(self, dev_name, idx=None):\n if dev_name not in self.devices:\n logger.error(\n 'Device {} missing. call add_device before adding elements'.\n format(dev_name))\n return\n group_name = self.system.__dict__[dev_name]._group\n if idx is None: # \"if not idx\" will fail for idx==0.0\n idx = dev_name + '_' + str(len(self.group[group_name].keys()))\n self.group[group_name][idx] = dev_name\n return idx", "response": "Register a device element to the group list"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef sort_device(self):\n\n self.devices.sort()\n # idx: the indices of order-sensitive models\n # names: an ordered list of order-sensitive models\n idx = []\n names = []\n for dev in order:\n # if ``dev`` in ``order`` is a model file name:\n # initialize the models in alphabet order\n if dev in all_models:\n all_dev = list(sorted(all_models[dev].keys()))\n for item in all_dev:\n if item in self.devices:\n idx.append(self.devices.index(item))\n names.append(item)\n\n # if ``dev`` presents as a model name\n elif dev in self.devices:\n idx.append(self.devices.index(dev))\n names.append(dev)\n\n idx = sorted(idx)\n for id, name in zip(idx, names):\n self.devices[id] = name", "response": "Sort the list of devices in alphabetical order."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ninitializing the internal state of the object.", "response": "def init1(self, dae):\n \"\"\"New initialization function\"\"\"\n self.servcall(dae)\n retval = True\n\n mva = self.system.mva\n self.p0 = mul(self.p0, self.gammap)\n self.q0 = mul(self.q0, self.gammaq)\n\n dae.y[self.vsd] = mul(dae.y[self.v], -sin(dae.y[self.a]))\n dae.y[self.vsq] = mul(dae.y[self.v], cos(dae.y[self.a]))\n\n rs = matrix(self.rs)\n rr = matrix(self.rr)\n xmu = matrix(self.xmu)\n x1 = matrix(self.xs) + xmu\n x2 = matrix(self.xr) + xmu\n Pg = matrix(self.p0)\n Qg = matrix(self.q0)\n Vc = dae.y[self.v]\n vsq = dae.y[self.vsq]\n vsd = dae.y[self.vsd]\n\n toSn = div(mva, self.Sn) # to machine base\n toSb = self.Sn / mva # to system base\n\n # rotor speed\n omega = 1 * (ageb(mva * Pg, self.Sn)) + \\\n mul(0.5 + 0.5 * mul(Pg, toSn),\n aandb(agtb(Pg, 0), altb(mva * Pg, self.Sn))) + \\\n 0.5 * (aleb(mva * Pg, 0))\n\n slip = 1 - omega\n theta = mul(self.Kp, mround(1000 * (omega - 1)) / 1000)\n theta = mmax(theta, 0)\n\n # prepare for the iterations\n\n irq = mul(-x1, toSb, (2 * omega - 1), div(1, Vc), div(1, xmu),\n div(1, omega))\n isd = zeros(*irq.size)\n isq = zeros(*irq.size)\n\n # obtain ird isd isq\n for i in range(self.n):\n A = sparse([[-rs[i], vsq[i]], [x1[i], -vsd[i]]])\n B = matrix([vsd[i] - xmu[i] * irq[i], Qg[i]])\n linsolve(A, B)\n isd[i] = B[0]\n isq[i] = B[1]\n ird = -div(vsq + mul(rs, isq) + mul(x1, isd), xmu)\n vrd = -mul(rr, ird) + mul(\n slip,\n mul(x2, irq) + mul(xmu, isq)) # todo: check x1 or x2\n vrq = -mul(rr, irq) - mul(slip, mul(x2, ird) + mul(xmu, isd))\n\n # main iterations\n for i in range(self.n):\n mis = ones(6, 1)\n rows = [0, 0, 0, 1, 1, 1, 2, 2, 3, 3, 4, 4, 5]\n cols = [0, 1, 3, 0, 1, 2, 2, 4, 3, 5, 0, 1, 2]\n\n x = matrix([isd[i], isq[i], ird[i], irq[i], vrd[i], vrq[i]])\n # vals = [-rs, x1, xmu, -x1, -rs, -xmu, -rr,\n # -1, -rr, -1, vsd, vsq, -xmu * Vc / x1]\n\n vals = [\n -rs[i], x1[i], xmu[i], -x1[i], -rs[i], -xmu[i], -rr[i], -1,\n -rr[i], -1, vsd[i], vsq[i], -xmu[i] * Vc[i] / x1[i]\n ]\n jac0 = spmatrix(vals, rows, cols, (6, 6), 'd')\n iter = 0\n\n while max(abs(mis)) > self.system.tds.config.tol:\n if iter > 20:\n logger.error(\n 'Initialization of DFIG <{}> failed.'.format(\n self.name[i]))\n retval = False\n break\n\n mis[0] = -rs[i] * x[0] + x1[i] * x[1] + xmu[i] * x[3] - vsd[i]\n mis[1] = -rs[i] * x[1] - x1[i] * x[0] - xmu[i] * x[2] - vsq[i]\n mis[2] = -rr[i] * x[2] + slip[i] * (\n x2[i] * x[3] + xmu[i] * x[1]) - x[4]\n mis[3] = -rr[i] * x[3] - slip[i] * (\n x2[i] * x[2] + xmu[i] * x[0]) - x[5]\n mis[4] = vsd[i] * x[0] + vsq[i] * x[1] + x[4] * x[2] + \\\n x[5] * x[3] - Pg[i]\n mis[5] = -xmu[i] * Vc[i] * x[2] / x1[i] - \\\n Vc[i] * Vc[i] / x1[i] - Qg[i]\n\n rows = [2, 2, 3, 3, 4, 4, 4, 4]\n cols = [1, 3, 0, 2, 2, 3, 4, 5]\n vals = [\n slip[i] * xmu[i], slip[i] * x2[i], -slip[i] * xmu[i],\n -slip[i] * x2[i], x[4], x[5], x[2], x[3]\n ]\n\n jac = jac0 + spmatrix(vals, rows, cols, (6, 6), 'd')\n\n linsolve(jac, mis)\n\n x -= mis\n iter += 1\n\n isd[i] = x[0]\n isq[i] = x[1]\n ird[i] = x[2]\n irq[i] = x[3]\n vrd[i] = x[4]\n vrq[i] = x[5]\n\n dae.x[self.ird] = mul(self.u0, ird)\n dae.x[self.irq] = mul(self.u0, irq)\n dae.y[self.isd] = isd\n dae.y[self.isq] = isq\n dae.y[self.vrd] = vrd\n dae.y[self.vrq] = vrq\n\n dae.x[self.omega_m] = mul(self.u0, omega)\n dae.x[self.theta_p] = mul(self.u0, theta)\n dae.y[self.pwa] = mmax(mmin(2 * dae.x[self.omega_m] - 1, 1), 0)\n\n self.vref0 = mul(\n aneb(self.KV, 0), Vc - div(ird + div(Vc, xmu), self.KV))\n dae.y[self.vref] = self.vref0\n k = mul(div(x1, Vc, xmu, omega), toSb)\n\n self.irq_off = -mul(k, mmax(mmin(2 * omega - 1, 1), 0)) - irq\n\n # electrical torque in pu\n te = mul(\n xmu,\n mul(dae.x[self.irq], dae.y[self.isd]) - mul(\n dae.x[self.ird], dae.y[self.isq]))\n\n for i in range(self.n):\n if te[i] < 0:\n logger.error(\n 'Pe < 0 on bus <{}>. Wind speed initialize failed.'.\n format(self.bus[i]))\n retval = False\n\n # wind power in pu\n pw = mul(te, omega)\n dae.y[self.pw] = pw\n\n # wind speed initialization loop\n\n R = 4 * pi * self.system.freq * mul(self.R, self.ngb,\n div(1, self.npole))\n AA = pi * self.R**2\n vw = 0.9 * self.Vwn\n\n for i in range(self.n):\n mis = 1\n iter = 0\n while abs(mis) > self.system.tds.config.tol:\n if iter > 50:\n logger.error(\n 'Wind <{}> init failed. '\n 'Try increasing the nominal wind speed.'.\n format(self.wind[i]))\n retval = False\n break\n\n pw_iter, jac = self.windpower(self.ngen[i], self.rho[i], vw[i],\n AA[i], R[i], omega[i], theta[i])\n\n mis = pw_iter - pw[i]\n inc = -mis / jac[1]\n vw[i] += inc\n iter += 1\n\n # set wind speed\n dae.x[self.vw] = div(vw, self.Vwn)\n\n lamb = div(omega, vw, div(1, R))\n ilamb = div(1,\n (div(1, lamb + 0.08 * theta) - div(0.035, theta**3 + 1)))\n cp = 0.22 * mul(\n div(116, ilamb) - 0.4 * theta - 5, exp(div(-12.5, ilamb)))\n\n dae.y[self.lamb] = lamb\n dae.y[self.ilamb] = ilamb\n dae.y[self.cp] = cp\n\n self.system.rmgen(self.gen)\n\n if not retval:\n logger.error('DFIG initialization failed')\n\n return retval"} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncompute the first time step and save to self. h", "response": "def _calc_time_step_first(self):\n \"\"\"\n Compute the first time step and save to ``self.h``\n\n Returns\n -------\n None\n \"\"\"\n system = self.system\n config = self.config\n\n if not system.dae.n:\n freq = 1.0\n elif system.dae.n == 1:\n B = matrix(system.dae.Gx)\n self.solver.linsolve(system.dae.Gy, B)\n As = system.dae.Fx - system.dae.Fy * B\n freq = abs(As[0, 0])\n else:\n freq = 20.0\n\n if freq > system.freq:\n freq = float(system.freq)\n\n tspan = abs(config.tf - config.t0)\n tcycle = 1 / freq\n config.deltatmax = min(5 * tcycle, tspan / 100.0)\n config.deltat = min(tcycle, tspan / 100.0)\n config.deltatmin = min(tcycle / 64, config.deltatmax / 20)\n\n if config.fixt:\n if config.tstep <= 0:\n logger.warning('Fixed time step is negative or zero')\n logger.warning('Switching to automatic time step')\n config.fixt = False\n else:\n config.deltat = config.tstep\n if config.tstep < config.deltatmin:\n logger.warning(\n 'Fixed time step is below the estimated minimum')\n\n self.h = config.deltat"} {"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ncalculating the time step during time domain simulations.", "response": "def calc_time_step(self):\n \"\"\"\n Set the time step during time domain simulations\n\n Parameters\n ----------\n convergence: bool\n truth value of the convergence of the last step\n niter: int\n current iteration count\n t: float\n current simulation time\n\n Returns\n -------\n float\n computed time step size\n \"\"\"\n system = self.system\n config = self.config\n convergence = self.convergence\n niter = self.niter\n t = self.t\n\n if t == 0:\n self._calc_time_step_first()\n return\n\n if convergence:\n if niter >= 15:\n config.deltat = max(config.deltat * 0.5, config.deltatmin)\n elif niter <= 6:\n config.deltat = min(config.deltat * 1.1, config.deltatmax)\n else:\n config.deltat = max(config.deltat * 0.95, config.deltatmin)\n\n # adjust fixed time step if niter is high\n if config.fixt:\n config.deltat = min(config.tstep, config.deltat)\n else:\n config.deltat *= 0.9\n if config.deltat < config.deltatmin:\n config.deltat = 0\n\n if system.Fault.is_time(t) or system.Breaker.is_time(t):\n config.deltat = min(config.deltat, 0.002778)\n elif system.check_event(t):\n config.deltat = min(config.deltat, 0.002778)\n\n if config.method == 'fwdeuler':\n config.deltat = min(config.deltat, config.tstep)\n\n # last step size\n if self.t + config.deltat > config.tf:\n config.deltat = config.tf - self.t\n\n # reduce time step for fixed_times events\n for fixed_t in self.fixed_times:\n if (fixed_t > self.t) and (fixed_t <= self.t + config.deltat):\n config.deltat = fixed_t - self.t\n self.switch = True\n break\n\n self.h = config.deltat"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ninitializing the time domain simulation and initialize the dynamic model.", "response": "def init(self):\n \"\"\"\n Initialize time domain simulation\n\n Returns\n -------\n None\n \"\"\"\n system = self.system\n config = self.config\n dae = self.system.dae\n if system.pflow.solved is False:\n return\n\n t, s = elapsed()\n\n # Assign indices for post-powerflow device variables\n system.xy_addr1()\n\n # Assign variable names for bus injections and line flows if enabled\n system.varname.resize_for_flows()\n system.varname.bus_line_names()\n\n # Reshape dae to retain power flow solutions\n system.dae.init1()\n\n # Initialize post-powerflow device variables\n for device, init1 in zip(system.devman.devices, system.call.init1):\n if init1:\n system.__dict__[device].init1(system.dae)\n\n # compute line and area flow\n if config.compute_flows:\n dae.init_fg()\n self.compute_flows() # TODO: move to PowerSystem\n\n t, s = elapsed(t)\n\n if system.dae.n:\n logger.debug('Dynamic models initialized in {:s}.'.format(s))\n else:\n logger.debug('No dynamic model loaded.')\n\n # system.dae flags initialize\n system.dae.factorize = True\n system.dae.mu = 1.0\n system.dae.kg = 0.0"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef run(self):\n ret = False\n system = self.system\n config = self.config\n dae = self.system.dae\n\n # maxit = config.maxit\n # tol = config.tol\n\n if system.pflow.solved is False:\n logger.warning('Power flow not solved. Simulation cannot continue.')\n return ret\n t0, _ = elapsed()\n t1 = t0\n\n self.streaming_init()\n\n logger.info('')\n logger.info('-> Time Domain Simulation: {} method, t={} s'\n .format(self.config.method, self.config.tf))\n\n self.load_pert()\n\n self.run_step0()\n\n config.qrtstart = time()\n\n while self.t < config.tf:\n self.check_fixed_times()\n self.calc_time_step()\n\n if self.callpert is not None:\n self.callpert(self.t, self.system)\n\n if self.h == 0:\n break\n # progress time and set time in dae\n self.t += self.h\n dae.t = self.t\n\n # backup actual variables\n self.x0 = matrix(dae.x)\n self.y0 = matrix(dae.y)\n self.f0 = matrix(dae.f)\n\n # apply fixed_time interventions and perturbations\n self.event_actions()\n\n # reset flags used in each step\n self.err = 1\n self.niter = 0\n self.convergence = False\n\n self.implicit_step()\n\n if self.convergence is False:\n try:\n self.restore_values()\n continue\n except ValueError:\n self.t = config.tf\n ret = False\n break\n\n self.step += 1\n self.compute_flows()\n system.varout.store(self.t, self.step)\n self.streaming_step()\n\n # plot variables and display iteration status\n perc = max(min((self.t - config.t0) / (config.tf - config.t0) * 100, 100), 0)\n\n # show iteration info every 30 seconds or every 20%\n\n t2, _ = elapsed(t1)\n if t2 - t1 >= 30:\n t1 = t2\n logger.info(' ({:.0f}%) time = {:.4f}s, step = {}, niter = {}'\n .format(100 * self.t / config.tf, self.t, self.step,\n self.niter))\n\n if perc > self.next_pc or self.t == config.tf:\n self.next_pc += 20\n logger.info(' ({:.0f}%) time = {:.4f}s, step = {}, niter = {}'\n .format(100 * self.t / config.tf, self.t, self.step, self.niter))\n\n # compute max rotor angle difference\n # diff_max = anglediff()\n\n # quasi-real-time check and wait\n rt_end = config.qrtstart + (self.t - config.t0) * config.kqrt\n\n if config.qrt:\n # the ending time has passed\n if time() - rt_end > 0:\n # simulation is too slow\n if time() - rt_end > config.kqrt:\n logger.debug('Simulation over-run at t={:4.4g} s.'.format(self.t))\n # wait to finish\n else:\n self.headroom += (rt_end - time())\n while time() - rt_end < 0:\n sleep(1e-5)\n\n if config.qrt:\n logger.debug('RT headroom time: {} s.'.format(str(self.headroom)))\n\n if self.t != config.tf:\n logger.error('Reached minimum time step. Convergence is not likely.')\n ret = False\n else:\n ret = True\n\n if system.config.dime_enable:\n system.streaming.finalize()\n\n _, s = elapsed(t0)\n\n if ret is True:\n logger.info(' Time domain simulation finished in {:s}.'.format(s))\n else:\n logger.info(' Time domain simulation failed in {:s}.'.format(s))\n\n self.success = ret\n self.dump_results(success=self.success)\n\n return ret", "response": "Run time domain simulation\n\n Returns\n -------\n bool\n Success flag"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef restore_values(self):\n if self.convergence is True:\n return\n dae = self.system.dae\n system = self.system\n\n inc_g = self.inc[dae.n:dae.m + dae.n]\n max_g_err_sign = 1 if abs(max(inc_g)) > abs(min(inc_g)) else -1\n if max_g_err_sign == 1:\n max_g_err_idx = list(inc_g).index(max(inc_g))\n else:\n max_g_err_idx = list(inc_g).index(min(inc_g))\n logger.debug(\n 'Maximum mismatch = {:.4g} at equation <{}>'.format(\n max(abs(inc_g)), system.varname.unamey[max_g_err_idx]))\n logger.debug(\n 'Reducing time step h={:.4g}s for t={:.4g}'.format(self.h, self.t))\n\n # restore initial variable data\n dae.x = matrix(self.x0)\n dae.y = matrix(self.y0)\n dae.f = matrix(self.f0)", "response": "Restore x y and f values if converged\n is True Returns None"} {"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nintegrate one step using trapezoidal method. Sets convergence and niter flags.", "response": "def implicit_step(self):\n \"\"\"\n Integrate one step using trapezoidal method. Sets convergence and niter flags.\n\n Returns\n -------\n None\n \"\"\"\n config = self.config\n system = self.system\n dae = self.system.dae\n\n # constant short names\n In = spdiag([1] * dae.n)\n h = self.h\n\n while self.err > config.tol and self.niter < config.maxit:\n if self.t - self.t_jac >= 5:\n dae.rebuild = True\n self.t_jac = self.t\n elif self.niter > 4:\n dae.rebuild = True\n elif dae.factorize:\n dae.rebuild = True\n\n # rebuild Jacobian\n if dae.rebuild:\n exec(system.call.int)\n dae.rebuild = False\n else:\n exec(system.call.int_fg)\n\n # complete Jacobian matrix dae.Ac\n if config.method == 'euler':\n dae.Ac = sparse(\n [[In - h * dae.Fx, dae.Gx], [-h * dae.Fy, dae.Gy]],\n 'd')\n dae.q = dae.x - self.x0 - h * dae.f\n\n elif config.method == 'trapezoidal':\n dae.Ac = sparse([[In - h * 0.5 * dae.Fx, dae.Gx],\n [-h * 0.5 * dae.Fy, dae.Gy]], 'd')\n dae.q = dae.x - self.x0 - h * 0.5 * (dae.f + self.f0)\n\n # windup limiters\n dae.reset_Ac()\n\n if dae.factorize:\n self.F = self.solver.symbolic(dae.Ac)\n dae.factorize = False\n self.inc = -matrix([dae.q, dae.g])\n\n try:\n N = self.solver.numeric(dae.Ac, self.F)\n self.solver.solve(dae.Ac, self.F, N, self.inc)\n except ArithmeticError:\n logger.error('Singular matrix')\n dae.check_diag(dae.Gy, 'unamey')\n dae.check_diag(dae.Fx, 'unamex')\n # force quit\n self.niter = config.maxit + 1\n break\n except ValueError:\n logger.warning('Unexpected symbolic factorization')\n dae.factorize = True\n continue\n else:\n inc_x = self.inc[:dae.n]\n inc_y = self.inc[dae.n:dae.m + dae.n]\n dae.x += inc_x\n dae.y += inc_y\n\n self.err = max(abs(self.inc))\n if np.isnan(self.inc).any():\n logger.error('Iteration error: NaN detected.')\n self.niter = config.maxit + 1\n break\n\n self.niter += 1\n\n if self.niter <= config.maxit:\n self.convergence = True"} {"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef event_actions(self):\n system = self.system\n dae = system.dae\n if self.switch:\n system.Breaker.apply(self.t)\n for item in system.check_event(self.t):\n system.__dict__[item].apply(self.t)\n\n dae.rebuild = True\n self.switch = False", "response": "Take actions for timed events\n Returns ------- None"} {"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef load_pert(self):\n system = self.system\n\n if system.files.pert:\n try:\n sys.path.append(system.files.path)\n module = importlib.import_module(system.files.pert[:-3])\n self.callpert = getattr(module, 'pert')\n except ImportError:\n logger.warning('Pert file is discarded due to import errors.')\n self.callpert = None", "response": "Load perturbation files to self. callpert."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nstore the data and stream the data for the 0th step", "response": "def run_step0(self):\n \"\"\"\n For the 0th step, store the data and stream data\n\n Returns\n -------\n None\n \"\"\"\n dae = self.system.dae\n system = self.system\n\n self.inc = zeros(dae.m + dae.n, 1)\n system.varout.store(self.t, self.step)\n\n self.streaming_step()"} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef streaming_step(self):\n system = self.system\n if system.config.dime_enable:\n system.streaming.sync_and_handle()\n system.streaming.vars_to_modules()\n system.streaming.vars_to_pmu()", "response": "Sync and handle and streaming for each integration step Returns None"} {"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef streaming_init(self):\n system = self.system\n config = self.config\n if system.config.dime_enable:\n config.compute_flows = True\n system.streaming.send_init(recepient='all')\n logger.info('Waiting for modules to send init info...')\n sleep(0.5)\n system.streaming.sync_and_handle()", "response": "Send out initialization variables and process init from modules\n "} {"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef compute_flows(self):\n system = self.system\n config = self.config\n dae = system.dae\n\n if config.compute_flows:\n # compute and append series injections on buses\n\n exec(system.call.bus_injection)\n bus_inj = dae.g[:2 * system.Bus.n]\n\n exec(system.call.seriesflow)\n system.Area.seriesflow(system.dae)\n system.Area.interchange_varout()\n dae.y = matrix([\n dae.y, bus_inj, system.Line._line_flows, system.Area.inter_varout\n ])", "response": "Compute the line flows after each step."} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef dump_results(self, success):\n system = self.system\n\n t, _ = elapsed()\n\n if success and (not system.files.no_output):\n\n # system.varout.dump()\n system.varout.dump_np_vars()\n _, s = elapsed(t)\n logger.info('Simulation data dumped in {:s}.'.format(s))", "response": "Dump simulation results to dat and lst files."} {"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nparsing an ANDES card file into internal variables", "response": "def read(file, system):\n \"\"\"Parse an ANDES card file into internal variables\"\"\"\n try:\n fid = open(file, 'r')\n raw_file = fid.readlines()\n except IOError:\n print('* IOError while reading input card file.')\n return\n\n ret_dict = dict()\n ret_dict['outfile'] = file.split('.')[0].lower() + '.py'\n key, val = None, None\n for idx, line in enumerate(raw_file):\n line = line.strip()\n if not line:\n continue\n if line.startswith('#'):\n continue\n elif '#' in line:\n line = line.split('#')[0]\n if '=' in line: # defining a field\n key, val = line.split('=')\n key, val = key.strip(), val.strip()\n val = [] if val == '' else val\n ret_dict.update({key: val})\n if val:\n val = val.split(';')\n else:\n val.extend(line.split(';'))\n if val:\n val = de_blank(val)\n ret_dict[key] = val\n\n ret_dict_ord = dict(ret_dict)\n\n for key, val in ret_dict.items():\n if not val:\n continue\n if type(val) == list:\n if ':' in val[0]:\n new_val = {} # return in a dictionary\n new_val_ord = [\n ] # return in an ordered list with the dict keys at 0\n for item in val:\n try:\n m, n = item.split(':')\n except ValueError:\n print('* Error: check line <{}>'.format(item))\n return\n m, n = m.strip(), n.strip()\n if ',' in n:\n n = n.split(',')\n n = de_blank(n)\n n = [to_number(i) for i in n]\n else:\n n = to_number(n)\n new_val.update({m.strip(): n})\n new_val_ord.append([m.strip(), n])\n ret_dict[key] = new_val\n ret_dict_ord[key] = new_val_ord\n\n ret_dict['name'] = ret_dict['name'][0]\n ret_dict['doc_string'] = ret_dict['doc_string'][0]\n ret_dict['group'] = ret_dict['group'][0]\n ret_dict['service_keys'] = list(ret_dict['service_eq'].keys())\n ret_dict['consts'] = list(ret_dict['data'].keys()) + list(\n ret_dict['service_eq'].keys())\n ret_dict['init1_eq'] = ret_dict_ord['init1_eq']\n ret_dict['service_eq'] = ret_dict_ord['service_eq']\n ret_dict['ctrl'] = ret_dict_ord['ctrl']\n\n copy_algebs = []\n copy_states = []\n for item in ret_dict['ctrl']:\n key, val = item\n if val[3] == 'y':\n copy_algebs.append(key)\n elif val[3] == 'x':\n copy_states.append(key)\n elif val[3] == 'c':\n ret_dict['consts'].append(key)\n ret_dict['copy_algebs'] = copy_algebs\n ret_dict['copy_states'] = copy_states\n\n return run(system, **ret_dict)"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nremove blank elements in val and return ret", "response": "def de_blank(val):\n \"\"\"Remove blank elements in `val` and return `ret`\"\"\"\n ret = list(val)\n if type(val) == list:\n for idx, item in enumerate(val):\n if item.strip() == '':\n ret.remove(item)\n else:\n ret[idx] = item.strip()\n return ret"} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef run(system,\n outfile='',\n name='',\n doc_string='',\n group='',\n data={},\n descr={},\n units={},\n params=[],\n fnamex=[],\n fnamey=[],\n mandatory=[],\n zeros=[],\n powers=[],\n currents=[],\n voltages=[],\n z=[],\n y=[],\n dccurrents=[],\n dcvoltages=[],\n r=[],\n g=[],\n times=[],\n ac={},\n dc={},\n ctrl={},\n consts=[],\n algebs=[],\n interfaces=[],\n states=[],\n init1_eq={},\n service_eq={},\n algeb_eq=[],\n windup={},\n hard_limit={},\n diff_eq=[],\n anti_windup={},\n copy_algebs=[],\n copy_states=[],\n service_keys=[],\n **kwargs):\n retval = True\n space4 = ' '\n space8 = space4 * 2\n \"\"\"Input data consistency check\"\"\"\n to_check = {\n 'param': params,\n 'mandatory': mandatory,\n 'zero': zeros,\n 'power': powers,\n 'voltage': voltages,\n 'currents': currents,\n 'z': z,\n 'y': y,\n 'dccurrent': dccurrents,\n 'dcvoltage': dcvoltages,\n 'r': r,\n 'g': g,\n 'times': times,\n }\n\n if not data:\n print('* Error: <data> dictionary is not defined.')\n return\n\n for key, val in to_check.items():\n if not val:\n continue\n for item in val:\n if item not in data.keys():\n print('* Warning: {} <{}> is not in data.'.format(key, item))\n\n for item in interfaces:\n if item not in algebs + copy_algebs:\n print('* Warning: interface <{}> is not defined'.format(item))\n\n for key, val in hard_limit.items():\n if key not in algebs:\n print('* Warning: variable <{}> in hard_limit not defined.'.format(\n key))\n for item in val:\n if type(item) in (int, float):\n pass\n elif item not in consts:\n print(\n '* Warning: const <{}> in hard_limit not defined.'.format(\n item))\n\n for key, val in windup.items():\n if key not in algebs:\n print(\n '* Warning: variable <{}> in windup not defined.'.format(key))\n for item in val:\n if type(item) in (int, float):\n continue\n elif item not in consts:\n print('* Warning: const <{}> in windup not defined.'.format(\n item))\n\n for key, val in anti_windup.items():\n if key not in states:\n print(\n '* Warning: variable <{}> in anti_windup not defined.'.format(\n key))\n for item in val:\n if type(item) in (int, float):\n continue\n elif item not in consts:\n print(\n '* Warning: const <{}> in anti_windup not defined.'.format(\n item))\n \"\"\"Equation and variable number check\"\"\"\n nalgebs, nalgeb_eq, nstates, ndiff_eq, ninterfaces = len(algebs), len(\n algeb_eq), len(states), len(diff_eq), len(interfaces)\n\n if nalgebs + ninterfaces != nalgeb_eq:\n print('* Warning: there are {} algebs and {} algeb equations.'.format(\n nalgebs, nalgeb_eq))\n\n if nstates != ndiff_eq:\n print('* Warning: there are {} states and {} differential equations.'.\n format(nstates, ndiff_eq))\n\n # check for duplicate names\n var_names = consts + algebs + states + copy_algebs + copy_states\n if len(set(var_names)) != len(var_names):\n raise NameError('Duplicated names are declared!')\n # Set up sympy symbols for variables, constants and equations\n sym_consts, sym_algebs, sym_states, sym_interfaces = [], [], [], []\n sym_algebs_ext, sym_states_ext = [], []\n sym_f, sym_g, sym_serv, sym_init1 = [], [], [], []\n sym_hard_limit, sym_windup, sym_anti_windup = [], [], []\n\n states_anti_windup = list(anti_windup.keys())\n # algebs_windup = list(windup.keys())\n # algebs_hard_limit = list(hard_limit.keys())\n\n # remove interface variables in copy_algebs\n for item in interfaces:\n if item in copy_algebs:\n copy_algebs.remove(item)\n\n # algebs_ext = algebs + interfaces + copy_algebs\n # states_ext = states + copy_states\n\n for idx, var in enumerate(states):\n if var in states_anti_windup:\n tpl = '({} - {}) / {}'\n diff_eq[idx] = tpl.format(diff_eq[idx], var, anti_windup[var][0])\n\n # convert consts and variables into sympy.Symbol\n sym_maping = {\n 'consts': sym_consts,\n 'algebs': sym_algebs,\n 'states': sym_states,\n 'algebs_ext': sym_algebs_ext,\n 'states_ext': sym_states_ext,\n 'interfaces': sym_interfaces,\n 'states_anti_windup': sym_anti_windup,\n 'algebs_windup': sym_windup,\n 'algebs_hard_limit': sym_hard_limit,\n }\n for key, val in sym_maping.items():\n for item in eval(key):\n call = '{} = Symbol(item)'.format(item)\n exec(call)\n val.append(eval(item))\n\n # convert service_eq.keys() into sympy.Symbols and values into equations\n # for var, eq in service_eq.items():\n if service_eq:\n for item in service_eq:\n var = item[0]\n eq = item[1]\n if var not in consts:\n print('* Warning: declaring undefined service variable <{}>'.\n format(var))\n call = '{} = Symbol(var)'.format(var)\n exec(call)\n expr = eval('{}'.format(eq))\n sym_serv.append([eval(var), expr])\n\n if init1_eq:\n for item in init1_eq:\n var = item[0]\n eq = item[1]\n if var not in states + algebs:\n print('* Warning: initializing undefined variable <{}>'.format(\n var))\n call = '{} = Symbol(var)'.format(var)\n exec(call)\n expr = eval('{}'.format(eq))\n sym_init1.append([eval(var), expr])\n\n # convert equations into symbolic expression\n for item in algeb_eq:\n expr = eval('{}'.format(item))\n sym_g.append(expr)\n\n for item in diff_eq:\n expr = eval('{}'.format(item))\n sym_f.append(expr)\n \"\"\"Derive the jacobians of equation f and g.\n Save to Fx, Fy and Gx Gy in a list of three elements: [equation_idx, var_idx, expression]\"\"\"\n Fx, Fy, Gx, Gy = list(), list(), list(), list()\n for eq_idx, expr in enumerate(sym_g):\n try:\n free_syms = expr.free_symbols\n except AttributeError:\n free_syms = []\n\n for sym in free_syms:\n if sym in sym_algebs_ext:\n # take the derivative and go to Gy\n sym_idx = sym_algebs_ext.index(sym)\n Gy.append([eq_idx, sym_idx, expr.diff(sym)])\n elif sym in sym_states_ext:\n # take the derivative and go to Gx\n sym_idx = sym_states_ext.index(sym)\n Gx.append([eq_idx, sym_idx, expr.diff(sym)])\n else:\n pass # skip constants\n\n for eq_idx, expr in enumerate(sym_f):\n try:\n free_syms = expr.free_symbols\n except AttributeError:\n free_syms = []\n\n for sym in free_syms:\n if sym in sym_algebs_ext:\n sym_idx = sym_algebs_ext.index(sym)\n Fy.append([eq_idx, sym_idx, expr.diff(sym)])\n elif sym in sym_states_ext:\n sym_idx = sym_states_ext.index(sym)\n Fx.append([eq_idx, sym_idx, expr.diff(sym)])\n # Save equations into callable CVXOPT functions\n fcall, gcall = [], []\n gycall, fxcall, jac0 = [], [], []\n jac0_line = 'dae.add_jac({}0, {}, self.{}, self.{})'\n call_line = 'dae.add_jac({}, {}, self.{}, self.{})'\n\n # format f and g equations\n fcall_anti_windup = 'dae.anti_windup(self.{0}, {1}, {2})'\n\n for sym, eq in zip(sym_states, sym_f):\n string_eq = stringfy(eq, sym_consts, sym_states_ext, sym_algebs_ext)\n # handling anti_windup\n template = 'dae.f[self.{0}] = {1}'\n fcall.append(template.format(sym, string_eq))\n if sym in sym_anti_windup:\n val = eval('anti_windup[\\'{}\\']'.format(sym))\n if type(val[1]) not in (int, float):\n val[1] = 'self.' + val[1]\n if type(val[2]) not in (int, float):\n val[2] = 'self.' + val[2]\n fcall.append(fcall_anti_windup.format(sym, val[1], val[2]))\n\n gcall_windup = 'dae.windup(self.{0}, self.{1}, self.{2})'\n gcall_hard_limit = 'dae.hard_limit(self.{0}, {1}, {2})'\n\n for sym, eq in zip(sym_algebs + sym_interfaces, sym_g):\n string_eq = stringfy(eq, sym_consts, sym_states_ext, sym_algebs_ext)\n if sym in sym_interfaces:\n template = 'dae.g += spmatrix({1}, self.{0}, [0]*self.n, (dae.m, 1), \\'d\\')'\n else:\n template = 'dae.g[self.{0}] = {1}'\n gcall.append(template.format(sym, string_eq))\n\n if sym in sym_windup:\n val = eval('windup[\\'{}\\']'.format(sym))\n gcall.append(gcall_windup.format(sym, val[0], val[1]))\n elif sym in sym_hard_limit:\n val = eval('hard_limit[\\'{}\\']'.format(sym))\n val_formatted = list(val)\n for idx, item in enumerate(val):\n if type(item) in (int, float):\n pass\n else:\n val_formatted[idx] = 'self.{}'.format(item)\n gcall.append(\n gcall_hard_limit.format(sym, val_formatted[0],\n val_formatted[1]))\n\n # format Jacobians\n jacobians = ['Gy', 'Gx', 'Fx', 'Fy']\n mapping = dict(\n F=sym_states_ext, G=sym_algebs_ext, y=sym_algebs_ext, x=sym_states_ext)\n\n for jac in jacobians:\n for item in eval(jac):\n eqname = mapping[jac[0]][item[0]]\n varname = mapping[jac[1]][item[1]]\n equation = item[2]\n try:\n free_syms = equation.free_symbols\n except AttributeError:\n free_syms = []\n\n string_eq = stringfy(equation, sym_consts, sym_states_ext,\n sym_algebs_ext)\n\n isjac0 = 1\n for sym in free_syms:\n if sym in sym_consts:\n continue\n elif sym in sym_algebs_ext + sym_states_ext:\n isjac0 = 0\n break\n else:\n raise KeyError\n\n if isjac0:\n jac0.append(jac0_line.format(jac, string_eq, eqname, varname))\n else:\n if jac == 'Gy':\n gycall.append(\n call_line.format(jac, string_eq, eqname, varname))\n else:\n fxcall.append(\n call_line.format(jac, string_eq, eqname, varname))\n \"\"\"Format initialization and service calls\"\"\"\n init1call = []\n for item in sym_init1:\n rhs = stringfy(item[1], sym_consts, sym_states_ext, sym_algebs_ext)\n if item[0] in sym_algebs_ext:\n out = 'dae.y[self.{}] = {}'.format(item[0], rhs)\n elif item[0] in sym_states_ext:\n out = 'dae.x[self.{}] = {}'.format(item[0], rhs)\n else:\n raise KeyError\n init1call.append(out)\n\n servcall = []\n for item in ctrl:\n key, val = item\n out = 'self.copy_data_ext(\\'{}\\', \\'{}\\', \\'dest={}\\', idx=self.{})'.format(\n val[0], val[1], key, val[2])\n servcall.append(out)\n for item in sym_serv:\n rhs = stringfy(item[1], sym_consts, sym_states_ext, sym_algebs_ext)\n out = 'self.{} = {}'.format(item[0], rhs)\n servcall.append(out)\n\n calls = {\n 'gcall': not not gcall,\n 'fcall': not not fcall,\n 'gycall': not not gycall,\n 'fxcall': not not fxcall,\n 'jac0': (not not jac0) or (not not sym_algebs),\n 'init1': (not not init1call) or (not not servcall),\n }\n \"\"\"Build function call strings\"\"\"\n out_calls = []\n if servcall:\n out_calls.append(space4 + 'def servcall(self, dae):')\n for item in servcall:\n out_calls.append(space8 + item)\n out_calls.append('')\n\n if init1call or servcall:\n out_calls.append(space4 + 'def init1(self, dae):')\n out_calls.append(space8 + 'self.servcall(dae)')\n for item in init1call:\n out_calls.append(space8 + item)\n out_calls.append('')\n\n if gcall:\n out_calls.append(space4 + 'def gcall(self, dae):')\n for item in gcall:\n out_calls.append(space8 + item)\n out_calls.append('')\n\n if fcall:\n out_calls.append(space4 + 'def fcall(self, dae):')\n for item in fcall:\n out_calls.append(space8 + item)\n out_calls.append('')\n\n if gycall:\n out_calls.append(space4 + 'def gycall(self, dae):')\n for item in gycall:\n out_calls.append(space8 + item)\n out_calls.append('')\n if fxcall:\n out_calls.append(space4 + 'def fxcall(self, dae):')\n for item in fxcall:\n out_calls.append(space8 + item)\n out_calls.append('')\n\n tinyGy = 'dae.add_jac(Gy0, 1e-6, self.{0}, self.{0})'\n if jac0 or sym_states:\n out_calls.append(space4 + 'def jac0(self, dae):')\n for item in jac0:\n out_calls.append(space8 + item)\n for item in sym_algebs:\n out_calls.append(space8 + tinyGy.format(item))\n \"\"\"Class definitions in out_init\"\"\"\n # bulk update or extend of dict and list\n param_assign = space8 + 'self._{} = {}'\n list_extend = space8 + 'self._{}.extend({})'\n dict_update = space8 + 'self._{}.update(\\n{})'\n\n out_init = list() # def __init__ call strings\n out_init.append('from cvxopt import matrix, spmatrix')\n out_init.append('from cvxopt import mul, div, sin, cos, exp')\n out_init.append('from ..consts import *')\n out_init.append('from .base import ModelBase\\n\\n')\n out_init.append('class {}(ModelBase):'.format(name))\n if doc_string:\n out_init.append(space4 + \"\\\"\\\"\\\"{}\\\"\\\"\\\"\".format(doc_string))\n out_init.append(space4 + 'def __init__(self, system, name):')\n out_init.append(space8 + 'super().__init__(system, name)')\n if not group:\n print('*Error: Group name is not defined!')\n else:\n out_init.append(param_assign.format('group', add_quotes(group)))\n if name:\n out_init.append(param_assign.format('name', add_quotes(name)))\n\n meta_dict_upd = {\n 'data': data,\n 'units': units,\n 'config_descr': descr,\n 'ac': ac,\n 'dc': dc,\n # 'ctrl': ctrl,\n }\n meta_list_ext = {\n 'params': params,\n 'algebs': algebs,\n 'states': states,\n 'fnamex': fnamex,\n 'fnamey': fnamey,\n 'service': service_keys,\n 'mandatory': mandatory,\n 'zeros': zeros,\n 'powers': powers,\n 'voltages': voltages,\n 'currents': currents,\n 'z': z,\n 'y': y,\n 'dccurrents': dccurrents,\n 'dcvoltages': dcvoltages,\n 'r': r,\n 'g': g,\n 'times': times,\n }\n\n for key in sorted(meta_list_ext.keys()):\n val = meta_list_ext[key]\n if val:\n out_init.append(list_extend.format(key, val))\n for key in sorted(meta_dict_upd.keys()):\n val = meta_dict_upd[key]\n if val:\n out_init.append(\n dict_update.format(key, pprint.pformat(val, indent=12)))\n\n out_init.append(space8 + 'self.calls.update({})'.format(calls))\n\n out_init.append(space8 + 'self._init()')\n out_init.append('')\n\n # write to file\n try:\n fid = open(outfile, 'w')\n for line in out_init:\n fid.write(line + '\\n')\n for line in out_calls:\n fid.write(line + '\\n')\n fid.close()\n except IOError:\n logger.error('IOError while writing card output.')\n retval = False\n\n if retval:\n logger.info(\n 'Card file successfully saved to <{}> with'.format(outfile))\n logger.info(\n '* constants: {}, algebs: {}, interfaces: {}, states: {}'.format(\n len(sym_consts), len(sym_algebs), len(interfaces),\n len(sym_states)))\n logger.info('* diff equations: {}, algeb equations: {}'.format(\n len(fcall), len(gcall)))\n logger.info('* fxcall: {}, gycall: {}, jac0: {}'.format(\n len(fxcall), len(gycall), len(jac0)))\n\n sys.exit(0)", "response": "Run a consistency check on a single file."} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef stringfy(expr, sym_const=None, sym_states=None, sym_algebs=None):\n if not sym_const:\n sym_const = []\n if not sym_states:\n sym_states = []\n if not sym_algebs:\n sym_algebs = []\n expr_str = []\n if type(expr) in (int, float):\n return expr\n if expr.is_Atom:\n if expr in sym_const:\n expr_str = 'self.{}'.format(expr)\n elif expr in sym_states:\n expr_str = 'dae.x[self.{}]'.format(expr)\n elif expr in sym_algebs:\n expr_str = 'dae.y[self.{}]'.format(expr)\n elif expr.is_Number:\n if expr.is_Integer:\n expr_str = str(int(expr))\n else:\n expr_str = str(float(expr))\n # if expr.is_negative:\n # expr_str = '{}'.format(expr)\n # else:\n # expr_str = str(expr)\n else:\n raise AttributeError('Unknown free symbol <{}>'.format(expr))\n else:\n nargs = len(expr.args)\n arg_str = []\n for arg in expr.args:\n arg_str.append(stringfy(arg, sym_const, sym_states, sym_algebs))\n\n if expr.is_Add:\n expr_str = ''\n for idx, item in enumerate(arg_str):\n if idx == 0:\n if len(item) > 1 and item[1] == ' ':\n item = item[0] + item[2:]\n if idx > 0:\n if item[0] == '-':\n item = ' ' + item\n else:\n item = ' + ' + item\n expr_str += item\n\n elif expr.is_Mul:\n if nargs == 2 and expr.args[0].is_Integer: # number * matrix\n if expr.args[0].is_positive:\n expr_str = '{}*{}'.format(*arg_str)\n elif expr.args[0] == Integer('-1'):\n expr_str = '- {}'.format(arg_str[1])\n else: # negative but not -1\n expr_str = '{}*{}'.format(*arg_str)\n else: # matrix dot multiplication\n if expr.args[0] == Integer('-1'):\n # bring '-' out of mul()\n expr_str = ', '.join(arg_str[1:])\n expr_str = '- mul(' + expr_str + ')'\n else:\n expr_str = ', '.join(arg_str)\n expr_str = 'mul(' + expr_str + ')'\n elif expr.is_Function:\n expr_str = ', '.join(arg_str)\n expr_str = str(expr.func) + '(' + expr_str + ')'\n elif expr.is_Pow:\n if arg_str[1] == '-1':\n expr_str = 'div(1, {})'.format(arg_str[0])\n else:\n expr_str = '({})**{}'.format(*arg_str)\n elif expr.is_Div:\n expr_str = ', '.join(arg_str)\n expr_str = 'div(' + expr_str + ')'\n else:\n raise NotImplementedError\n return expr_str", "response": "Convert the right - hand - side of an equation into CVXOPT matrix operations"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nreading PSS / E RAW file v32 format", "response": "def read(file, system):\n \"\"\"read PSS/E RAW file v32 format\"\"\"\n\n blocks = [\n 'bus', 'load', 'fshunt', 'gen', 'branch', 'transf', 'area',\n 'twotermdc', 'vscdc', 'impedcorr', 'mtdc', 'msline', 'zone',\n 'interarea', 'owner', 'facts', 'swshunt', 'gne', 'Q'\n ]\n nol = [1, 1, 1, 1, 1, 4, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0]\n rawd = re.compile(r'rawd\\d\\d')\n\n retval = True\n version = 0\n b = 0 # current block index\n raw = {}\n for item in blocks:\n raw[item] = []\n\n data = []\n mdata = [] # multi-line data\n mline = 0 # line counter for multi-line models\n\n # parse file into raw with to_number conversions\n fid = open(file, 'r')\n for num, line in enumerate(fid.readlines()):\n line = line.strip()\n if num == 0: # get basemva and frequency\n data = line.split('/')[0]\n data = data.split(',')\n\n mva = float(data[1])\n system.config.mva = mva\n system.config.freq = float(data[5])\n\n # get raw file version\n version = int(data[2])\n if not version:\n version = int(rawd.search(line).group(0).strip('rawd'))\n logger.debug('PSSE raw version {} detected'.format(version))\n\n if version < 32 or version > 33:\n logger.warning(\n 'RAW file version is not 32 or 33. Error may occur.')\n continue\n\n elif num == 1: # store the case info line\n logger.info(line)\n continue\n elif num == 2:\n continue\n elif num >= 3:\n if line[0:2] == '0 ' or line[0:3] == ' 0 ': # end of block\n b += 1\n continue\n elif line[0] == 'Q': # end of file\n break\n data = line.split(',')\n\n data = [to_number(item) for item in data]\n mdata.append(data)\n mline += 1\n nol = get_nol(b, mdata)\n\n if mline >= nol:\n if nol == 1:\n mdata = mdata[0]\n raw[blocks[b]].append(mdata)\n mdata = []\n mline = 0\n fid.close()\n\n # add device elements to system\n sw = {} # idx:a0\n max_bus = []\n for data in raw['bus']:\n \"\"\"version 32:\n 0, 1, 2, 3, 4, 5, 6, 7, 8\n ID, NAME, BasekV, Type, Area Zone Owner Vm, Va\n \"\"\"\n idx = data[0]\n max_bus.append(idx)\n ty = data[3]\n a0 = data[8] * deg2rad\n\n if ty == 3:\n sw[idx] = a0\n param = {\n 'idx': idx,\n 'name': data[1],\n 'Vn': data[2],\n 'voltage': data[7],\n 'angle': a0,\n 'area': data[4],\n 'zone': data[5],\n 'owner': data[6],\n }\n system.Bus.elem_add(**param)\n max_bus = max(max_bus)\n for data in raw['load']:\n \"\"\"version 32:\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11\n Bus, Id, Status, Area, Zone, PL(MW), QL (MW), IP, IQ, YP, YQ, OWNER\n \"\"\"\n bus = data[0]\n vn = system.Bus.get_field('Vn', bus)\n voltage = system.Bus.get_field('voltage', bus)\n param = {\n 'bus': bus,\n 'Vn': vn,\n 'Sn': mva,\n 'p': (data[5] + data[7] * voltage + data[9] * voltage**2) / mva,\n 'q': (data[6] + data[8] * voltage - data[10] * voltage**2) / mva,\n 'owner': data[11],\n }\n system.PQ.elem_add(**param)\n\n for data in raw['fshunt']:\n \"\"\"\n 0, 1, 2, 3, 4\n Bus, name, Status, g (MW), b (Mvar)\n \"\"\"\n bus = data[0]\n vn = system.Bus.get_field('Vn', bus)\n param = {\n 'bus': bus,\n 'Vn': vn,\n 'u': data[2],\n 'Sn': mva,\n 'g': data[3] / mva,\n 'b': data[4] / mva,\n }\n system.Shunt.elem_add(**param)\n\n gen_idx = 0\n for data in raw['gen']:\n \"\"\"\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11, 12, 13, 14, 15, 16,17,18,19\n I,ID,PG,QG,QT,QB,VS,IREG,MBASE,ZR,ZX,RT,XT,GTAP,STAT,RMPCT,PT,PB,O1,F1\n \"\"\"\n bus = data[0]\n vn = system.Bus.get_field('Vn', bus)\n gen_mva = data[8] # unused yet\n gen_idx += 1\n status = data[14]\n param = {\n 'Sn': gen_mva,\n 'Vn': vn,\n 'u': status,\n 'idx': gen_idx,\n 'bus': bus,\n 'pg': status * data[2] / mva,\n 'qg': status * data[3] / mva,\n 'qmax': data[4] / mva,\n 'qmin': data[5] / mva,\n 'v0': data[6],\n 'ra': data[9], # ra armature resistance\n 'xs': data[10], # xs synchronous reactance\n 'pmax': data[16] / mva,\n 'pmin': data[17] / mva,\n }\n if data[0] in sw.keys():\n param.update({\n 'a0': sw[data[0]],\n })\n system.SW.elem_add(**param)\n else:\n system.PV.elem_add(**param)\n\n for data in raw['branch']:\n \"\"\"\n I,J,CKT,R,X,B,RATEA,RATEB,RATEC,GI,BI,GJ,BJ,ST,LEN,O1,F1,...,O4,F4\n \"\"\"\n param = {\n 'bus1': data[0],\n 'bus2': data[1],\n 'r': data[3],\n 'x': data[4],\n 'b': data[5],\n 'rate_a': data[6],\n 'Vn': system.Bus.get_field('Vn', data[0]),\n 'Vn2': system.Bus.get_field('Vn', data[1]),\n }\n system.Line.elem_add(**param)\n\n xf_3_count = 1\n for data in raw['transf']:\n if len(data) == 4:\n \"\"\"\n I,J,K,CKT,CW,CZ,CM,MAG1,MAG2,NMETR,'NAME',STAT,O1,F1,...,O4,F4\n R1-2,X1-2,SBASE1-2\n WINDV1,NOMV1,ANG1,RATA1,RATB1,RATC1,COD1,CONT1,RMA1,RMI1,VMA1,VMI1,NTP1,TAB1,CR1,CX1\n WINDV2,NOMV2\n \"\"\"\n\n tap = data[2][0]\n phi = data[2][2]\n\n if tap == 1 and phi == 0:\n trasf = False\n else:\n trasf = True\n param = {\n 'trasf': trasf,\n 'bus1': data[0][0],\n 'bus2': data[0][1],\n 'u': data[0][11],\n 'b': data[0][8],\n 'r': data[1][0],\n 'x': data[1][1],\n 'tap': tap,\n 'phi': phi,\n 'rate_a': data[2][3],\n 'Vn': system.Bus.get_field('Vn', data[0][0]),\n 'Vn2': system.Bus.get_field('Vn', data[0][1]),\n }\n system.Line.elem_add(**param)\n else:\n \"\"\"\n I, J, K, CKT, CW, CZ, CM, MAG1, MAG2, NMETR, 'NAME', STAT, Ol, Fl,...,o4, F4\n R1\u20142, X1\u20142, SBASE1\u20142, R2\u20143, X2\u20143, SBASE2\u20143, R3\u20141, X3\u20141, SBASE3\u20141, VMSTAR, ANSTAR\n WINDV1, NOMV1, ANG1, RATA1, BATB1, RATC1, COD1, CONT1, RMA1, RMI1, VMA1, VMI1, NTP1, TAB1, CR1, CX1\n WINDV2, NOMV2, ANG2, RATA2, BATB2, RATC2, COD2, CONT2, RMA2, RMI2, VMA2, VMI2, NTP2, TAB2, CR2, CX2\n WINDV3, NOMV3, ANG3, RATA3, BATB3, RATC3, COD3, CONT3, RMA3, RMI3, VMA3, VMI3, NTP3, TAB3, CR3, CX3\n \"\"\"\n param = {\n 'idx': max_bus+xf_3_count,\n 'name': '_'.join(str(data[0][:3])),\n 'Vn': system.Bus.get_field('Vn', data[0][0]),\n 'voltage': data[1][-2],\n 'angle': data[1][-1]/180*3.14\n }\n system.Bus.elem_add(**param)\n\n r = []\n x = []\n r.append((data[1][0] + data[1][6] - data[1][3])/2)\n r.append((data[1][3] + data[1][0] - data[1][6])/2)\n r.append((data[1][6] + data[1][3] - data[1][0])/2)\n x.append((data[1][1] + data[1][7] - data[1][4])/2)\n x.append((data[1][4] + data[1][1] - data[1][7])/2)\n x.append((data[1][7] + data[1][4] - data[1][1])/2)\n for i in range(0, 3):\n param = {\n 'trasf': True,\n 'bus1': data[0][i],\n 'bus2': max_bus+xf_3_count,\n 'u': data[0][11],\n 'b': data[0][8],\n 'r': r[i],\n 'x': x[i],\n 'tap': data[2+i][0],\n 'phi': data[2+i][2],\n 'rate_a': data[2+i][3],\n 'Vn': system.Bus.get_field('Vn', data[0][i]),\n 'Vn2': system.Bus.get_field('Vn', data[0][0]),\n }\n system.Line.elem_add(**param)\n xf_3_count += 1\n\n for data in raw['swshunt']:\n # I, MODSW, ADJM, STAT, VSWHI, VSWLO, SWREM, RMPCT, \u2019RMIDNT\u2019,\n # BINIT, N1, B1, N2, B2, ... N8, B8\n bus = data[0]\n vn = system.Bus.get_field('Vn', bus)\n param = {\n 'bus': bus,\n 'Vn': vn,\n 'Sn': mva,\n 'u': data[3],\n 'b': data[9] / mva,\n }\n system.Shunt.elem_add(**param)\n\n for data in raw['area']:\n \"\"\"ID, ISW, PDES, PTOL, ARNAME\"\"\"\n param = {\n 'idx': data[0],\n 'isw': data[1],\n 'pdes': data[2],\n 'ptol': data[3],\n 'name': data[4],\n }\n system.Area.elem_add(**param)\n\n for data in raw['zone']:\n \"\"\"ID, NAME\"\"\"\n param = {\n 'idx': data[0],\n 'name': data[1],\n }\n system.Zone.elem_add(**param)\n\n return retval"} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nread DYR file and add all device elements to system", "response": "def readadd(file, system):\n \"\"\"read DYR file\"\"\"\n dyr = {}\n data = []\n end = 0\n retval = True\n sep = ','\n\n fid = open(file, 'r')\n for line in fid.readlines():\n if line.find('/') >= 0:\n line = line.split('/')[0]\n end = 1\n if line.find(',') >= 0: # mixed comma and space splitter not allowed\n line = [to_number(item.strip()) for item in line.split(sep)]\n else:\n line = [to_number(item.strip()) for item in line.split()]\n if not line:\n end = 0\n continue\n data.extend(line)\n if end == 1:\n field = data[1]\n if field not in dyr.keys():\n dyr[field] = []\n dyr[field].append(data)\n end = 0\n data = []\n fid.close()\n\n # elem_add device elements to system\n supported = [\n 'GENROU',\n 'GENCLS',\n 'ESST3A',\n 'ESDC2A',\n 'SEXS',\n 'EXST1',\n 'ST2CUT',\n 'IEEEST',\n 'TGOV1',\n ]\n used = list(supported)\n for model in supported:\n if model not in dyr.keys():\n used.remove(model)\n continue\n for data in dyr[model]:\n add_dyn(system, model, data)\n\n needed = list(dyr.keys())\n for i in supported:\n if i in needed:\n needed.remove(i)\n\n logger.warning('Models currently unsupported: {}'.format(\n ', '.join(needed)))\n\n return retval"} {"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef _sanitize(recipe):\n recipe = recipe.copy()\n for k in list(recipe):\n if k not in (\"start\", \"error\") and int(k) and k != int(k):\n recipe[int(k)] = recipe[k]\n del recipe[k]\n for k in list(recipe):\n if \"output\" in recipe[k] and not isinstance(\n recipe[k][\"output\"], (list, dict)\n ):\n recipe[k][\"output\"] = [recipe[k][\"output\"]]\n # dicts should be normalized, too\n if \"start\" in recipe:\n recipe[\"start\"] = [tuple(x) for x in recipe[\"start\"]]\n return recipe", "response": "Clean up a recipe that may have been stored as serialized json string."} {"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef validate(self):\n if not self.recipe:\n raise workflows.Error(\"Invalid recipe: No recipe defined\")\n\n # Without a 'start' node nothing would happen\n if \"start\" not in self.recipe:\n raise workflows.Error('Invalid recipe: \"start\" node missing')\n if not self.recipe[\"start\"]:\n raise workflows.Error('Invalid recipe: \"start\" node empty')\n if not all(\n isinstance(x, (list, tuple)) and len(x) == 2 for x in self.recipe[\"start\"]\n ):\n raise workflows.Error('Invalid recipe: \"start\" node invalid')\n if any(x[0] == \"start\" for x in self.recipe[\"start\"]):\n raise workflows.Error('Invalid recipe: \"start\" node points to itself')\n\n # Check that 'error' node points to regular nodes only\n if \"error\" in self.recipe and isinstance(\n self.recipe[\"error\"], (list, tuple, basestring)\n ):\n if \"start\" in self.recipe[\"error\"]:\n raise workflows.Error(\n 'Invalid recipe: \"error\" node points to \"start\" node'\n )\n if \"error\" in self.recipe[\"error\"]:\n raise workflows.Error('Invalid recipe: \"error\" node points to itself')\n\n # All other nodes must be numeric\n nodes = list(\n filter(\n lambda x: not isinstance(x, int) and x not in (\"start\", \"error\"),\n self.recipe,\n )\n )\n if nodes:\n raise workflows.Error('Invalid recipe: Node \"%s\" is not numeric' % nodes[0])\n\n # Detect cycles\n touched_nodes = set([\"start\", \"error\"])\n\n def flatten_links(struct):\n \"\"\"Take an output/error link object, list or dictionary and return flat list of linked nodes.\"\"\"\n if struct is None:\n return []\n if isinstance(struct, int):\n return [struct]\n if isinstance(struct, list):\n if not all(isinstance(x, int) for x in struct):\n raise workflows.Error(\n \"Invalid recipe: Invalid link in recipe (%s)\" % str(struct)\n )\n return struct\n if isinstance(struct, dict):\n joined_list = []\n for sub_list in struct.values():\n joined_list += flatten_links(sub_list)\n return joined_list\n raise workflows.Error(\n \"Invalid recipe: Invalid link in recipe (%s)\" % str(struct)\n )\n\n def find_cycles(path):\n \"\"\"Depth-First-Search helper function to identify cycles.\"\"\"\n if path[-1] not in self.recipe:\n raise workflows.Error(\n 'Invalid recipe: Node \"%s\" is referenced via \"%s\" but missing'\n % (str(path[-1]), str(path[:-1]))\n )\n touched_nodes.add(path[-1])\n node = self.recipe[path[-1]]\n for outgoing in (\"output\", \"error\"):\n if outgoing in node:\n references = flatten_links(node[outgoing])\n for n in references:\n if n in path:\n raise workflows.Error(\n \"Invalid recipe: Recipe contains cycle (%s -> %s)\"\n % (str(path), str(n))\n )\n find_cycles(path + [n])\n\n for link in self.recipe[\"start\"]:\n find_cycles([\"start\", link[0]])\n if \"error\" in self.recipe:\n if isinstance(self.recipe[\"error\"], (list, tuple)):\n for link in self.recipe[\"error\"]:\n find_cycles([\"error\", link])\n else:\n find_cycles([\"error\", self.recipe[\"error\"]])\n\n # Test recipe for unreferenced nodes\n for node in self.recipe:\n if node not in touched_nodes:\n raise workflows.Error(\n 'Invalid recipe: Recipe contains unreferenced node \"%s\"' % str(node)\n )", "response": "Check whether the encoded recipe is valid."} {"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn a new Recipe object containing all the information from the current recipe and the other recipe.", "response": "def merge(self, other):\n \"\"\"Merge two recipes together, returning a single recipe containing all\n nodes.\n Note: This does NOT yet return a minimal recipe.\n :param other: A Recipe object that should be merged with the current\n Recipe object.\n :return: A new Recipe object containing information from both recipes.\n \"\"\"\n\n # Merging empty values returns a copy of the original\n if not other:\n return Recipe(self.recipe)\n\n # When a string is passed, merge with a constructed recipe object\n if isinstance(other, basestring):\n return self.merge(Recipe(other))\n\n # Merging empty recipes returns a copy of the original\n if not other.recipe:\n return Recipe(self.recipe)\n\n # If own recipe empty, use other recipe\n if not self.recipe:\n return Recipe(other.recipe)\n\n # Assuming both recipes are valid\n self.validate()\n other.validate()\n\n # Start from current recipe\n new_recipe = self.recipe\n\n # Find the maximum index of the current recipe\n max_index = max(1, *filter(lambda x: isinstance(x, int), self.recipe.keys()))\n next_index = max_index + 1\n\n # Set up a translation table for indices and copy all entries\n translation = {}\n for key, value in other.recipe.items():\n if isinstance(key, int):\n if key not in translation:\n translation[key] = next_index\n next_index = next_index + 1\n new_recipe[translation[key]] = value\n\n # Rewrite all copied entries to point to new keys\n def translate(x):\n if isinstance(x, list):\n return list(map(translate, x))\n elif isinstance(x, tuple):\n return tuple(map(translate, x))\n elif isinstance(x, dict):\n return {k: translate(v) for k, v in x.items()}\n else:\n return translation[x]\n\n for idx in translation.values():\n if \"output\" in new_recipe[idx]:\n new_recipe[idx][\"output\"] = translate(new_recipe[idx][\"output\"])\n if \"error\" in new_recipe[idx]:\n new_recipe[idx][\"error\"] = translate(new_recipe[idx][\"error\"])\n\n # Join 'start' nodes\n for (idx, param) in other.recipe[\"start\"]:\n new_recipe[\"start\"].append((translate(idx), param))\n\n # Join 'error' nodes\n if \"error\" in other.recipe:\n if \"error\" not in new_recipe:\n new_recipe[\"error\"] = translate(other.recipe[\"error\"])\n else:\n if isinstance(new_recipe[\"error\"], (list, tuple)):\n new_recipe[\"error\"] = list(new_recipe[\"error\"])\n else:\n new_recipe[\"error\"] = list([new_recipe[\"error\"]])\n if isinstance(other.recipe[\"error\"], (list, tuple)):\n new_recipe[\"error\"].extend(translate(other.recipe[\"error\"]))\n else:\n new_recipe[\"error\"].append(translate(other.recipe[\"error\"]))\n\n # # Minimize DAG\n # queuehash, topichash = {}, {}\n # for k, v in new_recipe.items():\n # if isinstance(v, dict):\n # if 'queue' in v:\n # queuehash[v['queue']] = queuehash.get(v['queue'], [])\n # queuehash[v['queue']].append(k)\n # if 'topic' in v:\n # topichash[v['topic']] = topichash.get(v['topic'], [])\n # topichash[v['topic']].append(k)\n #\n # print queuehash\n # print topichash\n\n return Recipe(new_recipe)"} {"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef strip_spaces_and_quotes(value):\n value = value.strip() if value else \"\"\n if value and len(value) > 1 and (value[0] == value[-1] == '\"'):\n value = value[1:-1]\n if not value:\n value = \"\"\n return value", "response": "Remove invalid whitespace and single pair of dquotes and return None for empty strings."} {"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef parse_string(data, unquote=default_unquote):\n if data is None:\n return None\n\n # We'll soon need to unquote to recover our UTF-8 data.\n # In Python 2, unquote crashes on chars beyond ASCII. So encode functions\n # had better not include anything beyond ASCII in data.\n # In Python 3, unquote crashes on bytes objects, requiring conversion to\n # str objects (unicode) using decode().\n # But in Python 2, the same decode causes unquote to butcher the data.\n # So in that case, just leave the bytes.\n if isinstance(data, bytes):\n if sys.version_info > (3, 0, 0): # pragma: no cover\n data = data.decode('ascii')\n # Recover URL encoded data\n unquoted = unquote(data)\n # Without this step, Python 2 may have good URL decoded *bytes*,\n # which will therefore not normalize as unicode and not compare to\n # the original.\n if isinstance(unquoted, bytes):\n unquoted = unquoted.decode('utf-8')\n return unquoted", "response": "Decode URL - encoded strings to UTF - 8 containing the escaped chars."}