_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q261600
build
validation
def build(ctx, project, build): # pylint:disable=redefined-outer-name """Commands for build jobs.""" ctx.obj = ctx.obj or {} ctx.obj['project'] = project ctx.obj['build'] = build
python
{ "resource": "" }
q261601
get
validation
def get(ctx): """Get build job. Uses [Caching](/references/polyaxon-cli/#caching) Examples: \b ```bash $ polyaxon build -b 1 get ``` \b ```bash $ polyaxon build --build=1 --project=project_name get ``` """ user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build')) try: response = PolyaxonClient().build_job.get_build(user, project_name, _build) cache.cache(config_manager=BuildJobManager, response=response) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not get build job `{}`.'.format(_build)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) get_build_details(response)
python
{ "resource": "" }
q261602
delete
validation
def delete(ctx): """Delete build job. Uses [Caching](/references/polyaxon-cli/#caching) Example: \b ```bash $ polyaxon build delete ``` \b ```bash $ polyaxon build -b 2 delete ``` """ user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build')) if not click.confirm("Are sure you want to delete build job `{}`".format(_build)): click.echo('Existing without deleting build job.') sys.exit(1) try: response = PolyaxonClient().build_job.delete_build( user, project_name, _build) # Purge caching BuildJobManager.purge() except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not delete job `{}`.'.format(_build)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) if response.status_code == 204: Printer.print_success("Build job `{}` was deleted successfully".format(_build))
python
{ "resource": "" }
q261603
update
validation
def update(ctx, name, description, tags): """Update build. Uses [Caching](/references/polyaxon-cli/#caching) Example: \b ```bash $ polyaxon build -b 2 update --description="new description for my build" ``` """ user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build')) update_dict = {} if name: update_dict['name'] = name if description: update_dict['description'] = description tags = validate_tags(tags) if tags: update_dict['tags'] = tags if not update_dict: Printer.print_warning('No argument was provided to update the build.') sys.exit(0) try: response = PolyaxonClient().build_job.update_build( user, project_name, _build, update_dict) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not update build `{}`.'.format(_build)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) Printer.print_success("Build updated.") get_build_details(response)
python
{ "resource": "" }
q261604
stop
validation
def stop(ctx, yes): """Stop build job. Uses [Caching](/references/polyaxon-cli/#caching) Examples: \b ```bash $ polyaxon build stop ``` \b ```bash $ polyaxon build -b 2 stop ``` """ user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build')) if not yes and not click.confirm("Are sure you want to stop " "job `{}`".format(_build)): click.echo('Existing without stopping build job.') sys.exit(0) try: PolyaxonClient().build_job.stop(user, project_name, _build) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not stop build job `{}`.'.format(_build)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) Printer.print_success("Build job is being stopped.")
python
{ "resource": "" }
q261605
bookmark
validation
def bookmark(ctx): """Bookmark build job. Uses [Caching](/references/polyaxon-cli/#caching) Examples: \b ```bash $ polyaxon build bookmark ``` \b ```bash $ polyaxon build -b 2 bookmark ``` """ user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build')) try: PolyaxonClient().build_job.bookmark(user, project_name, _build) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not bookmark build job `{}`.'.format(_build)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) Printer.print_success("Build job bookmarked.")
python
{ "resource": "" }
q261606
resources
validation
def resources(ctx, gpu): """Get build job resources. Uses [Caching](/references/polyaxon-cli/#caching) Examples: \b ```bash $ polyaxon build -b 2 resources ``` For GPU resources \b ```bash $ polyaxon build -b 2 resources --gpu ``` """ user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build')) try: message_handler = Printer.gpu_resources if gpu else Printer.resources PolyaxonClient().build_job.resources(user, project_name, _build, message_handler=message_handler) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not get resources for build job `{}`.'.format(_build)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1)
python
{ "resource": "" }
q261607
init
validation
def init(project, polyaxonfile): """Initialize a new polyaxonfile specification.""" user, project_name = get_project_or_local(project) try: project_config = PolyaxonClient().project.get_project(user, project_name) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Make sure you have a project with this name `{}`'.format(project)) Printer.print_error( 'You can a create new project with this command: ' 'polyaxon project create ' '--name={} [--description=...] [--tags=...]'.format(project_name)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) init_project = False if ProjectManager.is_initialized(): local_project = ProjectManager.get_config() click.echo('Warning! This project is already initialized with the following project:') with clint.textui.indent(4): clint.textui.puts('User: {}'.format(local_project.user)) clint.textui.puts('Project: {}'.format(local_project.name)) if click.confirm('Would you like to override this current config?', default=False): init_project = True else: init_project = True if init_project: ProjectManager.purge() ProjectManager.set_config(project_config, init=True) Printer.print_success('Project was initialized') else: Printer.print_header('Project config was not changed.') init_ignore = False if IgnoreManager.is_initialized(): click.echo('Warning! Found a .polyaxonignore file.') if click.confirm('Would you like to override it?', default=False): init_ignore = True else: init_ignore = True if init_ignore: IgnoreManager.init_config() Printer.print_success('New .polyaxonignore file was created.') else: Printer.print_header('.polyaxonignore file was not changed.') if polyaxonfile: create_polyaxonfile()
python
{ "resource": "" }
q261608
bookmark
validation
def bookmark(ctx, username): # pylint:disable=redefined-outer-name """Commands for bookmarks.""" ctx.obj = ctx.obj or {} ctx.obj['username'] = username
python
{ "resource": "" }
q261609
projects
validation
def projects(ctx, page): """List bookmarked projects for user. Uses [Caching](/references/polyaxon-cli/#caching) Examples: \b ```bash $ polyaxon bookmark projects ``` \b ```bash $ polyaxon bookmark -u adam projects ``` """ user = get_username_or_local(ctx.obj.get('username')) page = page or 1 try: response = PolyaxonClient().bookmark.projects(username=user, page=page) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error( 'Could not get bookmarked projects for user `{}`.'.format(user)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) meta = get_meta_response(response) if meta: Printer.print_header('Bookmarked projects for user `{}`.'.format(user)) Printer.print_header('Navigation:') dict_tabulate(meta) else: Printer.print_header('No bookmarked projects found for user `{}`.'.format(user)) objects = [Printer.add_status_color(o.to_light_dict(humanize_values=True)) for o in response['results']] objects = list_dicts_to_tabulate(objects) if objects: Printer.print_header("Projects:") dict_tabulate(objects, is_list_dict=True)
python
{ "resource": "" }
q261610
IgnoreManager._remove_trailing_spaces
validation
def _remove_trailing_spaces(line): """Remove trailing spaces unless they are quoted with a backslash.""" while line.endswith(' ') and not line.endswith('\\ '): line = line[:-1] return line.replace('\\ ', ' ')
python
{ "resource": "" }
q261611
IgnoreManager.find_matching
validation
def find_matching(cls, path, patterns): """Yield all matching patterns for path.""" for pattern in patterns: if pattern.match(path): yield pattern
python
{ "resource": "" }
q261612
IgnoreManager.is_ignored
validation
def is_ignored(cls, path, patterns): """Check whether a path is ignored. For directories, include a trailing slash.""" status = None for pattern in cls.find_matching(path, patterns): status = pattern.is_exclude return status
python
{ "resource": "" }
q261613
IgnoreManager._matches_patterns
validation
def _matches_patterns(path, patterns): """Given a list of patterns, returns a if a path matches any pattern.""" for glob in patterns: try: if PurePath(path).match(glob): return True except TypeError: pass return False
python
{ "resource": "" }
q261614
IgnoreManager._ignore_path
validation
def _ignore_path(cls, path, ignore_list=None, white_list=None): """Returns a whether a path should be ignored or not.""" ignore_list = ignore_list or [] white_list = white_list or [] return (cls._matches_patterns(path, ignore_list) and not cls._matches_patterns(path, white_list))
python
{ "resource": "" }
q261615
group
validation
def group(ctx, project, group): # pylint:disable=redefined-outer-name """Commands for experiment groups.""" ctx.obj = ctx.obj or {} ctx.obj['project'] = project ctx.obj['group'] = group
python
{ "resource": "" }
q261616
get
validation
def get(ctx): """Get experiment group by uuid. Uses [Caching](/references/polyaxon-cli/#caching) Examples: \b ```bash $ polyaxon group -g 13 get ``` """ user, project_name, _group = get_project_group_or_local(ctx.obj.get('project'), ctx.obj.get('group')) try: response = PolyaxonClient().experiment_group.get_experiment_group( user, project_name, _group) cache.cache(config_manager=GroupManager, response=response) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not get experiment group `{}`.'.format(_group)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) get_group_details(response)
python
{ "resource": "" }
q261617
delete
validation
def delete(ctx): """Delete experiment group. Uses [Caching](/references/polyaxon-cli/#caching) """ user, project_name, _group = get_project_group_or_local(ctx.obj.get('project'), ctx.obj.get('group')) if not click.confirm("Are sure you want to delete experiment group `{}`".format(_group)): click.echo('Existing without deleting experiment group.') sys.exit(0) try: response = PolyaxonClient().experiment_group.delete_experiment_group( user, project_name, _group) # Purge caching GroupManager.purge() except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not delete experiment group `{}`.'.format(_group)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) if response.status_code == 204: Printer.print_success("Experiment group `{}` was delete successfully".format(_group))
python
{ "resource": "" }
q261618
update
validation
def update(ctx, name, description, tags): """Update experiment group. Uses [Caching](/references/polyaxon-cli/#caching) Example: \b ```bash $ polyaxon group -g 2 update --description="new description for this group" ``` \b ```bash $ polyaxon update --tags="foo, bar" ``` """ user, project_name, _group = get_project_group_or_local(ctx.obj.get('project'), ctx.obj.get('group')) update_dict = {} if name: update_dict['name'] = name if description: update_dict['description'] = description tags = validate_tags(tags) if tags: update_dict['tags'] = tags if not update_dict: Printer.print_warning('No argument was provided to update the experiment group.') sys.exit(0) try: response = PolyaxonClient().experiment_group.update_experiment_group( user, project_name, _group, update_dict) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not update experiment group `{}`.'.format(_group)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) Printer.print_success("Experiment group updated.") get_group_details(response)
python
{ "resource": "" }
q261619
stop
validation
def stop(ctx, yes, pending): """Stop experiments in the group. Uses [Caching](/references/polyaxon-cli/#caching) Examples: stop only pending experiments \b ```bash $ polyaxon group stop --pending ``` Examples: stop all unfinished \b ```bash $ polyaxon group stop ``` \b ```bash $ polyaxon group -g 2 stop ``` """ user, project_name, _group = get_project_group_or_local(ctx.obj.get('project'), ctx.obj.get('group')) if not yes and not click.confirm("Are sure you want to stop experiments " "in group `{}`".format(_group)): click.echo('Existing without stopping experiments in group.') sys.exit(0) try: PolyaxonClient().experiment_group.stop(user, project_name, _group, pending=pending) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not stop experiments in group `{}`.'.format(_group)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) Printer.print_success("Experiments in group are being stopped.")
python
{ "resource": "" }
q261620
bookmark
validation
def bookmark(ctx): """Bookmark group. Uses [Caching](/references/polyaxon-cli/#caching) Examples: \b ```bash $ polyaxon group bookmark ``` \b ```bash $ polyaxon group -g 2 bookmark ``` """ user, project_name, _group = get_project_group_or_local(ctx.obj.get('project'), ctx.obj.get('group')) try: PolyaxonClient().experiment_group.bookmark(user, project_name, _group) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not bookmark group `{}`.'.format(_group)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) Printer.print_success("Experiments group is bookmarked.")
python
{ "resource": "" }
q261621
config
validation
def config(list): # pylint:disable=redefined-builtin """Set and get the global configurations.""" if list: _config = GlobalConfigManager.get_config_or_default() Printer.print_header('Current config:') dict_tabulate(_config.to_dict())
python
{ "resource": "" }
q261622
get
validation
def get(keys): """Get the global config values by keys. Example: \b ```bash $ polyaxon config get host http_port ``` """ _config = GlobalConfigManager.get_config_or_default() if not keys: return print_values = {} for key in keys: if hasattr(_config, key): print_values[key] = getattr(_config, key) else: click.echo('Key `{}` is not recognised.'.format(key)) dict_tabulate(print_values, )
python
{ "resource": "" }
q261623
set
validation
def set(verbose, # pylint:disable=redefined-builtin host, http_port, ws_port, use_https, verify_ssl): """Set the global config values. Example: \b ```bash $ polyaxon config set --hots=localhost http_port=80 ``` """ _config = GlobalConfigManager.get_config_or_default() if verbose is not None: _config.verbose = verbose if host is not None: _config.host = host if http_port is not None: _config.http_port = http_port if ws_port is not None: _config.ws_port = ws_port if use_https is not None: _config.use_https = use_https if verify_ssl is False: _config.verify_ssl = verify_ssl GlobalConfigManager.set_config(_config) Printer.print_success('Config was updated.') # Reset cli config CliConfigManager.purge()
python
{ "resource": "" }
q261624
activate
validation
def activate(username): """Activate a user. Example: \b ```bash $ polyaxon user activate david ``` """ try: PolyaxonClient().user.activate_user(username) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not activate user `{}`.'.format(username)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) Printer.print_success("User `{}` was activated successfully.".format(username))
python
{ "resource": "" }
q261625
delete
validation
def delete(username): """Delete a user. Example: \b ```bash $ polyaxon user delete david ``` """ try: PolyaxonClient().user.delete_user(username) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not delete user `{}`.'.format(username)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) Printer.print_success("User `{}` was deleted successfully.".format(username))
python
{ "resource": "" }
q261626
deploy
validation
def deploy(file, manager_path, check, dry_run): # pylint:disable=redefined-builtin """Deploy polyaxon.""" config = read_deployment_config(file) manager = DeployManager(config=config, filepath=file, manager_path=manager_path, dry_run=dry_run) exception = None if check: manager.check() Printer.print_success('Polyaxon deployment file is valid.') else: try: manager.install() except Exception as e: Printer.print_error('Polyaxon could not be installed.') exception = e if exception: Printer.print_error('Error message `{}`.'.format(exception))
python
{ "resource": "" }
q261627
teardown
validation
def teardown(file): # pylint:disable=redefined-builtin """Teardown a polyaxon deployment given a config file.""" config = read_deployment_config(file) manager = DeployManager(config=config, filepath=file) exception = None try: if click.confirm('Would you like to execute pre-delete hooks?', default=True): manager.teardown(hooks=True) else: manager.teardown(hooks=False) except Exception as e: Printer.print_error('Polyaxon could not teardown the deployment.') exception = e if exception: Printer.print_error('Error message `{}`.'.format(exception))
python
{ "resource": "" }
q261628
create_tarfile
validation
def create_tarfile(files, project_name): """Create a tar file based on the list of files passed""" fd, filename = tempfile.mkstemp(prefix="polyaxon_{}".format(project_name), suffix='.tar.gz') with tarfile.open(filename, "w:gz") as tar: for f in files: tar.add(f) yield filename # clear os.close(fd) os.remove(filename)
python
{ "resource": "" }
q261629
version
validation
def version(cli, platform): """Print the current version of the cli and platform.""" version_client = PolyaxonClient().version cli = cli or not any([cli, platform]) if cli: try: server_version = version_client.get_cli_version() except AuthorizationError: session_expired() sys.exit(1) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not get cli version.') Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) cli_version = get_version(PROJECT_CLI_NAME) Printer.print_header('Current cli version: {}.'.format(cli_version)) Printer.print_header('Supported cli versions:') dict_tabulate(server_version.to_dict()) if platform: try: platform_version = version_client.get_platform_version() except AuthorizationError: session_expired() sys.exit(1) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not get platform version.') Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) chart_version = version_client.get_chart_version() Printer.print_header('Current platform version: {}.'.format(chart_version.version)) Printer.print_header('Supported platform versions:') dict_tabulate(platform_version.to_dict())
python
{ "resource": "" }
q261630
dashboard
validation
def dashboard(yes, url): """Open dashboard in browser.""" dashboard_url = "{}/app".format(PolyaxonClient().api_config.http_host) if url: click.echo(dashboard_url) sys.exit(0) if not yes: click.confirm('Dashboard page will now open in your browser. Continue?', abort=True, default=True) click.launch(dashboard_url)
python
{ "resource": "" }
q261631
grant
validation
def grant(username): """Grant superuser role to a user. Example: \b ```bash $ polyaxon superuser grant david ``` """ try: PolyaxonClient().user.grant_superuser(username) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not grant superuser role to user `{}`.'.format(username)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) Printer.print_success( "Superuser role was granted successfully to user `{}`.".format(username))
python
{ "resource": "" }
q261632
revoke
validation
def revoke(username): """Revoke superuser role to a user. Example: \b ```bash $ polyaxon superuser revoke david ``` """ try: PolyaxonClient().user.revoke_superuser(username) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not revoke superuser role from user `{}`.'.format(username)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) Printer.print_success( "Superuser role was revoked successfully from user `{}`.".format(username))
python
{ "resource": "" }
q261633
url
validation
def url(ctx): """Prints the notebook url for this project. Uses [Caching](/references/polyaxon-cli/#caching) Example: \b ```bash $ polyaxon notebook url ``` """ user, project_name = get_project_or_local(ctx.obj.get('project')) try: response = PolyaxonClient().project.get_project(user, project_name) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not get project `{}`.'.format(project_name)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) if response.has_notebook: click.echo(get_notebook_url(user, project_name)) else: Printer.print_warning( 'This project `{}` does not have a running notebook.'.format(project_name)) click.echo('You can start a notebook with this command: polyaxon notebook start --help')
python
{ "resource": "" }
q261634
start
validation
def start(ctx, file, u): # pylint:disable=redefined-builtin """Start a notebook deployment for this project. Uses [Caching](/references/polyaxon-cli/#caching) Example: \b ```bash $ polyaxon notebook start -f file -f file_override ... ``` Example: upload before running \b ```bash $ polyaxon -p user12/mnist notebook start -f file -u ``` """ specification = None job_config = None if file: specification = check_polyaxonfile(file, log=False).specification # Check if we need to upload if u: ctx.invoke(upload, sync=False) if specification: # pylint:disable=protected-access check_polyaxonfile_kind(specification=specification, kind=specification._NOTEBOOK) job_config = specification.parsed_data user, project_name = get_project_or_local(ctx.obj.get('project')) try: response = PolyaxonClient().project.start_notebook(user, project_name, job_config) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not start notebook project `{}`.'.format(project_name)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) if response.status_code == 200: Printer.print_header("A notebook for this project is already running on:") click.echo(get_notebook_url(user, project_name)) sys.exit(0) if response.status_code != 201: Printer.print_error('Something went wrong, Notebook was not created.') sys.exit(1) Printer.print_success('Notebook is being deployed for project `{}`'.format(project_name)) clint.textui.puts("It may take some time before you can access the notebook.\n") clint.textui.puts("Your notebook will be available on:\n") with clint.textui.indent(4): clint.textui.puts(get_notebook_url(user, project_name))
python
{ "resource": "" }
q261635
stop
validation
def stop(ctx, commit, yes): """Stops the notebook deployment for this project if it exists. Uses [Caching](/references/polyaxon-cli/#caching) """ user, project_name = get_project_or_local(ctx.obj.get('project')) if not yes and not click.confirm("Are sure you want to stop notebook " "for project `{}/{}`".format(user, project_name)): click.echo('Existing without stopping notebook.') sys.exit(1) if commit is None: commit = True try: PolyaxonClient().project.stop_notebook(user, project_name, commit) Printer.print_success('Notebook is being deleted') except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not stop notebook project `{}`.'.format(project_name)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1)
python
{ "resource": "" }
q261636
DeployManager.check
validation
def check(self): """Add platform specific checks""" if not self.is_valid: raise PolyaxonDeploymentConfigError( 'Deployment type `{}` not supported'.format(self.deployment_type)) check = False if self.is_kubernetes: check = self.check_for_kubernetes() elif self.is_docker_compose: check = self.check_for_docker_compose() elif self.is_docker: check = self.check_for_docker() elif self.is_heroku: check = self.check_for_heroku() if not check: raise PolyaxonDeploymentConfigError( 'Deployment `{}` is not valid'.format(self.deployment_type))
python
{ "resource": "" }
q261637
DeployManager.install
validation
def install(self): """Install polyaxon using the current config to the correct platform.""" if not self.is_valid: raise PolyaxonDeploymentConfigError( 'Deployment type `{}` not supported'.format(self.deployment_type)) if self.is_kubernetes: self.install_on_kubernetes() elif self.is_docker_compose: self.install_on_docker_compose() elif self.is_docker: self.install_on_docker() elif self.is_heroku: self.install_on_heroku()
python
{ "resource": "" }
q261638
DeployManager.upgrade
validation
def upgrade(self): """Upgrade deployment.""" if not self.is_valid: raise PolyaxonDeploymentConfigError( 'Deployment type `{}` not supported'.format(self.deployment_type)) if self.is_kubernetes: self.upgrade_on_kubernetes() elif self.is_docker_compose: self.upgrade_on_docker_compose() elif self.is_docker: self.upgrade_on_docker() elif self.is_heroku: self.upgrade_on_heroku()
python
{ "resource": "" }
q261639
DeployManager.teardown
validation
def teardown(self, hooks=True): """Teardown Polyaxon.""" if not self.is_valid: raise PolyaxonDeploymentConfigError( 'Deployment type `{}` not supported'.format(self.deployment_type)) if self.is_kubernetes: self.teardown_on_kubernetes(hooks=hooks) elif self.is_docker_compose: self.teardown_on_docker_compose() elif self.is_docker: self.teardown_on_docker(hooks=hooks) elif self.is_heroku: self.teardown_on_heroku(hooks=hooks)
python
{ "resource": "" }
q261640
project
validation
def project(ctx, project): # pylint:disable=redefined-outer-name """Commands for projects.""" if ctx.invoked_subcommand not in ['create', 'list']: ctx.obj = ctx.obj or {} ctx.obj['project'] = project
python
{ "resource": "" }
q261641
create
validation
def create(ctx, name, description, tags, private, init): """Create a new project. Uses [Caching](/references/polyaxon-cli/#caching) Example: \b ```bash $ polyaxon project create --name=cats-vs-dogs --description="Image Classification with DL" ``` """ try: tags = tags.split(',') if tags else None project_dict = dict(name=name, description=description, is_public=not private, tags=tags) project_config = ProjectConfig.from_dict(project_dict) except ValidationError: Printer.print_error('Project name should contain only alpha numerical, "-", and "_".') sys.exit(1) try: _project = PolyaxonClient().project.create_project(project_config) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not create project `{}`.'.format(name)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) Printer.print_success("Project `{}` was created successfully.".format(_project.name)) if init: ctx.obj = {} ctx.invoke(init_project, project=name)
python
{ "resource": "" }
q261642
list
validation
def list(page): # pylint:disable=redefined-builtin """List projects. Uses [Caching](/references/polyaxon-cli/#caching) """ user = AuthConfigManager.get_value('username') if not user: Printer.print_error('Please login first. `polyaxon login --help`') page = page or 1 try: response = PolyaxonClient().project.list_projects(user, page=page) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not get list of projects.') Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) meta = get_meta_response(response) if meta: Printer.print_header('Projects for current user') Printer.print_header('Navigation:') dict_tabulate(meta) else: Printer.print_header('No projects found for current user') objects = list_dicts_to_tabulate( [o.to_light_dict( humanize_values=True, exclude_attrs=['uuid', 'experiment_groups', 'experiments', 'description', 'num_experiments', 'num_independent_experiments', 'num_experiment_groups', 'num_jobs', 'num_builds', 'unique_name']) for o in response['results']]) if objects: Printer.print_header("Projects:") dict_tabulate(objects, is_list_dict=True)
python
{ "resource": "" }
q261643
delete
validation
def delete(ctx): """Delete project. Uses [Caching](/references/polyaxon-cli/#caching) """ user, project_name = get_project_or_local(ctx.obj.get('project')) if not click.confirm("Are sure you want to delete project `{}/{}`".format(user, project_name)): click.echo('Existing without deleting project.') sys.exit(1) try: response = PolyaxonClient().project.delete_project(user, project_name) local_project = ProjectManager.get_config() if local_project and (user, project_name) == (local_project.user, local_project.name): # Purge caching ProjectManager.purge() except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not delete project `{}/{}`.'.format(user, project_name)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) if response.status_code == 204: Printer.print_success("Project `{}/{}` was delete successfully".format(user, project_name))
python
{ "resource": "" }
q261644
update
validation
def update(ctx, name, description, tags, private): """Update project. Uses [Caching](/references/polyaxon-cli/#caching) Example: \b ```bash $ polyaxon update foobar --description="Image Classification with DL using TensorFlow" ``` \b ```bash $ polyaxon update mike1/foobar --description="Image Classification with DL using TensorFlow" ``` \b ```bash $ polyaxon update --tags="foo, bar" ``` """ user, project_name = get_project_or_local(ctx.obj.get('project')) update_dict = {} if name: update_dict['name'] = name if description: update_dict['description'] = description if private is not None: update_dict['is_public'] = not private tags = validate_tags(tags) if tags: update_dict['tags'] = tags if not update_dict: Printer.print_warning('No argument was provided to update the project.') sys.exit(1) try: response = PolyaxonClient().project.update_project(user, project_name, update_dict) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not update project `{}`.'.format(project_name)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) Printer.print_success("Project updated.") get_project_details(response)
python
{ "resource": "" }
q261645
groups
validation
def groups(ctx, query, sort, page): """List experiment groups for this project. Uses [Caching](/references/polyaxon-cli/#caching) Examples: Get all groups: \b ```bash $ polyaxon project groups ``` Get all groups with with status {created or running}, and creation date between 2018-01-01 and 2018-01-02, and search algorithm not in {grid or random search} \b ```bash $ polyaxon project groups \ -q "status:created|running, started_at:2018-01-01..2018-01-02, search_algorithm:~grid|random" ``` Get all groups sorted by update date \b ```bash $ polyaxon project groups -s "-updated_at" ``` """ user, project_name = get_project_or_local(ctx.obj.get('project')) page = page or 1 try: response = PolyaxonClient().project.list_experiment_groups(username=user, project_name=project_name, query=query, sort=sort, page=page) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error( 'Could not get experiment groups for project `{}`.'.format(project_name)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) meta = get_meta_response(response) if meta: Printer.print_header('Experiment groups for project `{}/{}`.'.format(user, project_name)) Printer.print_header('Navigation:') dict_tabulate(meta) else: Printer.print_header('No experiment groups found for project `{}/{}`.'.format( user, project_name)) objects = [Printer.add_status_color(o.to_light_dict(humanize_values=True)) for o in response['results']] objects = list_dicts_to_tabulate(objects) if objects: Printer.print_header("Experiment groups:") objects.pop('project', None) objects.pop('user', None) dict_tabulate(objects, is_list_dict=True)
python
{ "resource": "" }
q261646
experiments
validation
def experiments(ctx, metrics, declarations, independent, group, query, sort, page): """List experiments for this project. Uses [Caching](/references/polyaxon-cli/#caching) Examples: Get all experiments: \b ```bash $ polyaxon project experiments ``` Get all experiments with with status {created or running}, and creation date between 2018-01-01 and 2018-01-02, and declarations activation equal to sigmoid and metric loss less or equal to 0.2 \b ```bash $ polyaxon project experiments \ -q "status:created|running, started_at:2018-01-01..2018-01-02, \ declarations.activation:sigmoid, metric.loss:<=0.2" ``` Get all experiments sorted by update date \b ```bash $ polyaxon project experiments -s "-updated_at" ``` """ user, project_name = get_project_or_local(ctx.obj.get('project')) page = page or 1 try: response = PolyaxonClient().project.list_experiments(username=user, project_name=project_name, independent=independent, group=group, metrics=metrics, declarations=declarations, query=query, sort=sort, page=page) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not get experiments for project `{}`.'.format(project_name)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) meta = get_meta_response(response) if meta: Printer.print_header('Experiments for project `{}/{}`.'.format(user, project_name)) Printer.print_header('Navigation:') dict_tabulate(meta) else: Printer.print_header('No experiments found for project `{}/{}`.'.format(user, project_name)) if metrics: objects = get_experiments_with_metrics(response) elif declarations: objects = get_experiments_with_declarations(response) else: objects = [Printer.add_status_color(o.to_light_dict(humanize_values=True)) for o in response['results']] objects = list_dicts_to_tabulate(objects) if objects: Printer.print_header("Experiments:") objects.pop('project_name', None) dict_tabulate(objects, is_list_dict=True)
python
{ "resource": "" }
q261647
download
validation
def download(ctx): """Download code of the current project.""" user, project_name = get_project_or_local(ctx.obj.get('project')) try: PolyaxonClient().project.download_repo(user, project_name) except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e: Printer.print_error('Could not download code for project `{}`.'.format(project_name)) Printer.print_error('Error message `{}`.'.format(e)) sys.exit(1) Printer.print_success('Files downloaded.')
python
{ "resource": "" }
q261648
DXclass.write
validation
def write(self,file,optstring="",quote=False): """write the 'object' line; additional args are packed in string""" classid = str(self.id) if quote: classid = '"'+classid+'"' # Only use a *single* space between tokens; both chimera's and pymol's DX parser # does not properly implement the OpenDX specs and produces garbage with multiple # spaces. (Chimera 1.4.1, PyMOL 1.3) file.write('object '+classid+' class '+str(self.name)+' '+\ optstring+'\n')
python
{ "resource": "" }
q261649
gridpositions.edges
validation
def edges(self): """Edges of the grid cells, origin at centre of 0,0,..,0 grid cell. Only works for regular, orthonormal grids. """ return [self.delta[d,d] * numpy.arange(self.shape[d]+1) + self.origin[d]\ - 0.5*self.delta[d,d] for d in range(self.rank)]
python
{ "resource": "" }
q261650
field.write
validation
def write(self, filename): """Write the complete dx object to the file. This is the simple OpenDX format which includes the data into the header via the 'object array ... data follows' statement. Only simple regular arrays are supported. The format should be compatible with VMD's dx reader plugin. """ # comments (VMD chokes on lines of len > 80, so truncate) maxcol = 80 with open(filename,'w') as outfile: for line in self.comments: comment = '# '+str(line) outfile.write(comment[:maxcol]+'\n') # each individual object for component,object in self.sorted_components(): object.write(outfile) # the field object itself DXclass.write(self,outfile,quote=True) for component,object in self.sorted_components(): outfile.write('component "%s" value %s\n' % (component,str(object.id)))
python
{ "resource": "" }
q261651
field.read
validation
def read(self,file): """Read DX field from file. dx = OpenDX.field.read(dxfile) The classid is discarded and replaced with the one from the file. """ DXfield = self p = DXParser(file) p.parse(DXfield)
python
{ "resource": "" }
q261652
Token.value
validation
def value(self,ascode=None): """Return text cast to the correct type or the selected type""" if ascode is None: ascode = self.code return self.cast[ascode](self.text)
python
{ "resource": "" }
q261653
DXInitObject.initialize
validation
def initialize(self): """Initialize the corresponding DXclass from the data. class = DXInitObject.initialize() """ return self.DXclasses[self.type](self.id,**self.args)
python
{ "resource": "" }
q261654
DXParser.parse
validation
def parse(self,DXfield): """Parse the dx file and construct a DX field object with component classes. A :class:`field` instance *DXfield* must be provided to be filled by the parser:: DXfield_object = OpenDX.field(*args) parse(DXfield_object) A tokenizer turns the dx file into a stream of tokens. A hierarchy of parsers examines the stream. The level-0 parser ('general') distinguishes comments and objects (level-1). The object parser calls level-3 parsers depending on the object found. The basic idea is that of a 'state machine'. There is one parser active at any time. The main loop is the general parser. * Constructing the dx objects with classtype and classid is not implemented yet. * Unknown tokens raise an exception. """ self.DXfield = DXfield # OpenDX.field (used by comment parser) self.currentobject = None # containers for data self.objects = [] # | self.tokens = [] # token buffer with open(self.filename,'r') as self.dxfile: self.use_parser('general') # parse the whole file and populate self.objects # assemble field from objects for o in self.objects: if o.type == 'field': # Almost ignore the field object; VMD, for instance, # does not write components. To make this work # seamlessly I have to think harder how to organize # and use the data, eg preping the field object # properly and the initializing. Probably should also # check uniqueness of ids etc. DXfield.id = o.id continue c = o.initialize() self.DXfield.add(c.component,c) # free space del self.currentobject, self.objects
python
{ "resource": "" }
q261655
DXParser.__general
validation
def __general(self): """Level-0 parser and main loop. Look for a token that matches a level-1 parser and hand over control.""" while 1: # main loop try: tok = self.__peek() # only peek, apply_parser() will consume except DXParserNoTokens: # save previous DXInitObject # (kludge in here as the last level-2 parser usually does not return # via the object parser) if self.currentobject and self.currentobject not in self.objects: self.objects.append(self.currentobject) return # stop parsing and finish # decision branches for all level-1 parsers: # (the only way to get out of the lower level parsers!) if tok.iscode('COMMENT'): self.set_parser('comment') # switch the state elif tok.iscode('WORD') and tok.equals('object'): self.set_parser('object') # switch the state elif self.__parser is self.__general: # Either a level-2 parser screwed up or some level-1 # construct is not implemented. (Note: this elif can # be only reached at the beginning or after comments; # later we never formally switch back to __general # (would create inifinite loop) raise DXParseError('Unknown level-1 construct at '+str(tok)) self.apply_parser()
python
{ "resource": "" }
q261656
DXParser.__comment
validation
def __comment(self): """Level-1 parser for comments. pattern: #.* Append comment (with initial '# ' stripped) to all comments. """ tok = self.__consume() self.DXfield.add_comment(tok.value()) self.set_parser('general')
python
{ "resource": "" }
q261657
DXParser.__object
validation
def __object(self): """Level-1 parser for objects. pattern: 'object' id 'class' type ... id ::= integer|string|'"'white space string'"' type ::= string """ self.__consume() # 'object' classid = self.__consume().text word = self.__consume().text if word != "class": raise DXParseError("reserved word %s should have been 'class'." % word) # save previous DXInitObject if self.currentobject: self.objects.append(self.currentobject) # setup new DXInitObject classtype = self.__consume().text self.currentobject = DXInitObject(classtype=classtype,classid=classid) self.use_parser(classtype)
python
{ "resource": "" }
q261658
DXParser.__gridpositions
validation
def __gridpositions(self): """Level-2 parser for gridpositions. pattern: object 1 class gridpositions counts 97 93 99 origin -46.5 -45.5 -48.5 delta 1 0 0 delta 0 1 0 delta 0 0 1 """ try: tok = self.__consume() except DXParserNoTokens: return if tok.equals('counts'): shape = [] try: while True: # raises exception if not an int self.__peek().value('INTEGER') tok = self.__consume() shape.append(tok.value('INTEGER')) except (DXParserNoTokens, ValueError): pass if len(shape) == 0: raise DXParseError('gridpositions: no shape parameters') self.currentobject['shape'] = shape elif tok.equals('origin'): origin = [] try: while (self.__peek().iscode('INTEGER') or self.__peek().iscode('REAL')): tok = self.__consume() origin.append(tok.value()) except DXParserNoTokens: pass if len(origin) == 0: raise DXParseError('gridpositions: no origin parameters') self.currentobject['origin'] = origin elif tok.equals('delta'): d = [] try: while (self.__peek().iscode('INTEGER') or self.__peek().iscode('REAL')): tok = self.__consume() d.append(tok.value()) except DXParserNoTokens: pass if len(d) == 0: raise DXParseError('gridpositions: missing delta parameters') try: self.currentobject['delta'].append(d) except KeyError: self.currentobject['delta'] = [d] else: raise DXParseError('gridpositions: '+str(tok)+' not recognized.')
python
{ "resource": "" }
q261659
DXParser.__gridconnections
validation
def __gridconnections(self): """Level-2 parser for gridconnections. pattern: object 2 class gridconnections counts 97 93 99 """ try: tok = self.__consume() except DXParserNoTokens: return if tok.equals('counts'): shape = [] try: while True: # raises exception if not an int self.__peek().value('INTEGER') tok = self.__consume() shape.append(tok.value('INTEGER')) except (DXParserNoTokens, ValueError): pass if len(shape) == 0: raise DXParseError('gridconnections: no shape parameters') self.currentobject['shape'] = shape else: raise DXParseError('gridconnections: '+str(tok)+' not recognized.')
python
{ "resource": "" }
q261660
DXParser.__array
validation
def __array(self): """Level-2 parser for arrays. pattern: object 3 class array type double rank 0 items 12 data follows 0 2 0 0 0 3.6 0 -2.0 1e-12 +4.534e+01 .34534 0.43654 attribute "dep" string "positions" """ try: tok = self.__consume() except DXParserNoTokens: return if tok.equals('type'): tok = self.__consume() if not tok.iscode('STRING'): raise DXParseError('array: type was "%s", not a string.'%\ tok.text) self.currentobject['type'] = tok.value() elif tok.equals('rank'): tok = self.__consume() try: self.currentobject['rank'] = tok.value('INTEGER') except ValueError: raise DXParseError('array: rank was "%s", not an integer.'%\ tok.text) elif tok.equals('items'): tok = self.__consume() try: self.currentobject['size'] = tok.value('INTEGER') except ValueError: raise DXParseError('array: items was "%s", not an integer.'%\ tok.text) elif tok.equals('data'): tok = self.__consume() if not tok.iscode('STRING'): raise DXParseError('array: data was "%s", not a string.'%\ tok.text) if tok.text != 'follows': raise NotImplementedError(\ 'array: Only the "data follows header" format is supported.') if not self.currentobject['size']: raise DXParseError("array: missing number of items") # This is the slow part. Once we get here, we are just # reading in a long list of numbers. Conversion to floats # will be done later when the numpy array is created. # Don't assume anything about whitespace or the number of elements per row self.currentobject['array'] = [] while len(self.currentobject['array']) <self.currentobject['size']: self.currentobject['array'].extend(self.dxfile.readline().strip().split()) # If you assume that there are three elements per row # (except the last) the following version works and is a little faster. # for i in range(int(numpy.ceil(self.currentobject['size']/3))): # self.currentobject['array'].append(self.dxfile.readline()) # self.currentobject['array'] = ' '.join(self.currentobject['array']).split() elif tok.equals('attribute'): # not used at the moment attribute = self.__consume().value() if not self.__consume().equals('string'): raise DXParseError('array: "string" expected.') value = self.__consume().value() else: raise DXParseError('array: '+str(tok)+' not recognized.')
python
{ "resource": "" }
q261661
DXParser.__field
validation
def __field(self): """Level-2 parser for a DX field object. pattern: object "site map 1" class field component "positions" value 1 component "connections" value 2 component "data" value 3 """ try: tok = self.__consume() except DXParserNoTokens: return if tok.equals('component'): component = self.__consume().value() if not self.__consume().equals('value'): raise DXParseError('field: "value" expected') classid = self.__consume().value() try: self.currentobject['components'][component] = classid except KeyError: self.currentobject['components'] = {component:classid} else: raise DXParseError('field: '+str(tok)+' not recognized.')
python
{ "resource": "" }
q261662
DXParser.use_parser
validation
def use_parser(self,parsername): """Set parsername as the current parser and apply it.""" self.__parser = self.parsers[parsername] self.__parser()
python
{ "resource": "" }
q261663
DXParser.__tokenize
validation
def __tokenize(self,string): """Split s into tokens and update the token buffer. __tokenize(string) New tokens are appended to the token buffer, discarding white space. Based on http://effbot.org/zone/xml-scanner.htm """ for m in self.dx_regex.finditer(string.strip()): code = m.lastgroup text = m.group(m.lastgroup) tok = Token(code,text) if not tok.iscode('WHITESPACE'): self.tokens.append(tok)
python
{ "resource": "" }
q261664
DXParser.__refill_tokenbuffer
validation
def __refill_tokenbuffer(self): """Add a new tokenized line from the file to the token buffer. __refill_tokenbuffer() Only reads a new line if the buffer is empty. It is safe to call it repeatedly. At end of file, method returns empty strings and it is up to __peek and __consume to flag the end of the stream. """ if len(self.tokens) == 0: self.__tokenize(self.dxfile.readline())
python
{ "resource": "" }
q261665
ndmeshgrid
validation
def ndmeshgrid(*arrs): """Return a mesh grid for N dimensions. The input are N arrays, each of which contains the values along one axis of the coordinate system. The arrays do not have to have the same number of entries. The function returns arrays that can be fed into numpy functions so that they produce values for *all* points spanned by the axes *arrs*. Original from http://stackoverflow.com/questions/1827489/numpy-meshgrid-in-3d and fixed. .. SeeAlso: :func:`numpy.meshgrid` for the 2D case. """ #arrs = tuple(reversed(arrs)) <-- wrong on stackoverflow.com arrs = tuple(arrs) lens = list(map(len, arrs)) dim = len(arrs) sz = 1 for s in lens: sz *= s ans = [] for i, arr in enumerate(arrs): slc = [1] * dim slc[i] = lens[i] arr2 = numpy.asanyarray(arr).reshape(slc) for j, sz in enumerate(lens): if j != i: arr2 = arr2.repeat(sz, axis=j) ans.append(arr2) return tuple(ans)
python
{ "resource": "" }
q261666
Grid.resample_factor
validation
def resample_factor(self, factor): """Resample to a new regular grid. Parameters ---------- factor : float The number of grid cells are scaled with `factor` in each dimension, i.e., ``factor * N_i`` cells along each dimension i. Returns ------- Grid See Also -------- resample """ # new number of edges N' = (N-1)*f + 1 newlengths = [(N - 1) * float(factor) + 1 for N in self._len_edges()] edges = [numpy.linspace(start, stop, num=int(N), endpoint=True) for (start, stop, N) in zip(self._min_edges(), self._max_edges(), newlengths)] return self.resample(edges)
python
{ "resource": "" }
q261667
Grid._load_cpp4
validation
def _load_cpp4(self, filename): """Initializes Grid from a CCP4 file.""" ccp4 = CCP4.CCP4() ccp4.read(filename) grid, edges = ccp4.histogramdd() self.__init__(grid=grid, edges=edges, metadata=self.metadata)
python
{ "resource": "" }
q261668
Grid._load_dx
validation
def _load_dx(self, filename): """Initializes Grid from a OpenDX file.""" dx = OpenDX.field(0) dx.read(filename) grid, edges = dx.histogramdd() self.__init__(grid=grid, edges=edges, metadata=self.metadata)
python
{ "resource": "" }
q261669
Grid._load_plt
validation
def _load_plt(self, filename): """Initialize Grid from gOpenMol plt file.""" g = gOpenMol.Plt() g.read(filename) grid, edges = g.histogramdd() self.__init__(grid=grid, edges=edges, metadata=self.metadata)
python
{ "resource": "" }
q261670
Grid.export
validation
def export(self, filename, file_format=None, type=None, typequote='"'): """export density to file using the given format. The format can also be deduced from the suffix of the filename though the *format* keyword takes precedence. The default format for export() is 'dx'. Use 'dx' for visualization. Implemented formats: dx :mod:`OpenDX` pickle pickle (use :meth:`Grid.load` to restore); :meth:`Grid.save` is simpler than ``export(format='python')``. Parameters ---------- filename : str name of the output file file_format : {'dx', 'pickle', None} (optional) output file format, the default is "dx" type : str (optional) for DX, set the output DX array type, e.g., "double" or "float". By default (``None``), the DX type is determined from the numpy dtype of the array of the grid (and this will typically result in "double"). .. versionadded:: 0.4.0 typequote : str (optional) For DX, set the character used to quote the type string; by default this is a double-quote character, '"'. Custom parsers like the one from NAMD-GridForces (backend for MDFF) expect no quotes, and typequote='' may be used to appease them. .. versionadded:: 0.5.0 """ exporter = self._get_exporter(filename, file_format=file_format) exporter(filename, type=type, typequote=typequote)
python
{ "resource": "" }
q261671
Grid._export_python
validation
def _export_python(self, filename, **kwargs): """Pickle the Grid object The object is dumped as a dictionary with grid and edges: This is sufficient to recreate the grid object with __init__(). """ data = dict(grid=self.grid, edges=self.edges, metadata=self.metadata) with open(filename, 'wb') as f: cPickle.dump(data, f, cPickle.HIGHEST_PROTOCOL)
python
{ "resource": "" }
q261672
Grid._export_dx
validation
def _export_dx(self, filename, type=None, typequote='"', **kwargs): """Export the density grid to an OpenDX file. The file format is the simplest regular grid array and it is also understood by VMD's and Chimera's DX reader; PyMOL requires the dx `type` to be set to "double". For the file format see http://opendx.sdsc.edu/docs/html/pages/usrgu068.htm#HDREDF """ root, ext = os.path.splitext(filename) filename = root + '.dx' comments = [ 'OpenDX density file written by gridDataFormats.Grid.export()', 'File format: http://opendx.sdsc.edu/docs/html/pages/usrgu068.htm#HDREDF', 'Data are embedded in the header and tied to the grid positions.', 'Data is written in C array order: In grid[x,y,z] the axis z is fastest', 'varying, then y, then finally x, i.e. z is the innermost loop.' ] # write metadata in comments section if self.metadata: comments.append('Meta data stored with the python Grid object:') for k in self.metadata: comments.append(' ' + str(k) + ' = ' + str(self.metadata[k])) comments.append( '(Note: the VMD dx-reader chokes on comments below this line)') components = dict( positions=OpenDX.gridpositions(1, self.grid.shape, self.origin, self.delta), connections=OpenDX.gridconnections(2, self.grid.shape), data=OpenDX.array(3, self.grid, type=type, typequote=typequote), ) dx = OpenDX.field('density', components=components, comments=comments) dx.write(filename)
python
{ "resource": "" }
q261673
Grid.centers
validation
def centers(self): """Returns the coordinates of the centers of all grid cells as an iterator.""" for idx in numpy.ndindex(self.grid.shape): yield self.delta * numpy.array(idx) + self.origin
python
{ "resource": "" }
q261674
CCP4._detect_byteorder
validation
def _detect_byteorder(ccp4file): """Detect the byteorder of stream `ccp4file` and return format character. Try all endinaness and alignment options until we find something that looks sensible ("MAPS " in the first 4 bytes). (The ``machst`` field could be used to obtain endianness, but it does not specify alignment.) .. SeeAlso:: :mod:`struct` """ bsaflag = None ccp4file.seek(52 * 4) mapbin = ccp4file.read(4) for flag in '@=<>': mapstr = struct.unpack(flag + '4s', mapbin)[0].decode('utf-8') if mapstr.upper() == 'MAP ': bsaflag = flag break # Only possible value according to spec. else: raise TypeError( "Cannot decode header --- corrupted or wrong format?") ccp4file.seek(0) return bsaflag
python
{ "resource": "" }
q261675
CCP4._read_header
validation
def _read_header(self, ccp4file): """Read header bytes""" bsaflag = self._detect_byteorder(ccp4file) # Parse the top of the header (4-byte words, 1 to 25). nheader = struct.calcsize(self._headerfmt) names = [r.key for r in self._header_struct] bintopheader = ccp4file.read(25 * 4) def decode_header(header, bsaflag='@'): h = dict(zip(names, struct.unpack(bsaflag + self._headerfmt, header))) h['bsaflag'] = bsaflag return h header = decode_header(bintopheader, bsaflag) for rec in self._header_struct: if not rec.is_legal_dict(header): warnings.warn( "Key %s: Illegal value %r" % (rec.key, header[rec.key])) # Parse the latter half of the header (4-byte words, 26 to 256). if (header['lskflg']): skewmatrix = np.fromfile(ccp4file, dtype=np.float32, count=9) header['skwmat'] = skewmatrix.reshape((3, 3)) header['skwtrn'] = np.fromfile(ccp4file, dtype=np.float32, count=3) else: header['skwmat'] = header['skwtrn'] = None ccp4file.seek(12 * 4, 1) ccp4file.seek(15 * 4, 1) # Skip future use section. ccp4file.seek(4, 1) # Skip map text, already used above to verify format. # TODO: Compare file specified endianness to one obtained above. endiancode = struct.unpack(bsaflag + '4b', ccp4file.read(4)) header['endianness'] = 'little' if endiancode == (0x44, 0x41, 0, 0 ) else 'big' header['arms'] = struct.unpack(bsaflag + 'f', ccp4file.read(4))[0] header['nlabl'] = struct.unpack(bsaflag + 'I', ccp4file.read(4))[0] if header['nlabl']: binlabel = ccp4file.read(80 * header['nlabl']) flag = bsaflag + str(80 * header['nlabl']) + 's' label = struct.unpack(flag, binlabel)[0] header['label'] = label.decode('utf-8').rstrip('\x00') else: header['label'] = None ccp4file.seek(256 * 4) # TODO: Parse symmetry records, if any. return header
python
{ "resource": "" }
q261676
AmbientWeatherStation.get_data
validation
def get_data(self, **kwargs): """ Get the data for a specific device for a specific end date Keyword Arguments: limit - max 288 end_date - is Epoch in milliseconds :return: """ limit = int(kwargs.get('limit', 288)) end_date = kwargs.get('end_date', False) if end_date and isinstance(end_date, datetime.datetime): end_date = self.convert_datetime(end_date) if self.mac_address is not None: service_address = 'devices/%s' % self.mac_address self.api_instance.log('SERVICE ADDRESS: %s' % service_address) data = dict(limit=limit) # If endDate is left blank (not passed in), the most recent results will be returned. if end_date: data.update({'endDate': end_date}) self.api_instance.log('DATA:') self.api_instance.log(data) return self.api_instance.api_call(service_address, **data)
python
{ "resource": "" }
q261677
AmbientAPI.get_devices
validation
def get_devices(self): """ Get all devices :return: A list of AmbientWeatherStation instances. """ retn = [] api_devices = self.api_call('devices') self.log('DEVICES:') self.log(api_devices) for device in api_devices: retn.append(AmbientWeatherStation(self, device)) self.log('DEVICE INSTANCE LIST:') self.log(retn) return retn
python
{ "resource": "" }
q261678
UrlBuilder.create_url
validation
def create_url(self, path, params={}, opts={}): """ Create URL with supplied path and `opts` parameters dict. Parameters ---------- path : str opts : dict Dictionary specifying URL parameters. Non-imgix parameters are added to the URL unprocessed. For a complete list of imgix supported parameters, visit https://docs.imgix.com/apis/url . (default {}) Returns ------- str imgix URL """ if opts: warnings.warn('`opts` has been deprecated. Use `params` instead.', DeprecationWarning, stacklevel=2) params = params or opts if self._shard_strategy == SHARD_STRATEGY_CRC: crc = zlib.crc32(path.encode('utf-8')) & 0xffffffff index = crc % len(self._domains) # Deterministically choose domain domain = self._domains[index] elif self._shard_strategy == SHARD_STRATEGY_CYCLE: domain = self._domains[self._shard_next_index] self._shard_next_index = ( self._shard_next_index + 1) % len(self._domains) else: domain = self._domains[0] scheme = "https" if self._use_https else "http" url_obj = UrlHelper( domain, path, scheme, sign_key=self._sign_key, include_library_param=self._include_library_param, params=params) return str(url_obj)
python
{ "resource": "" }
q261679
UrlHelper.set_parameter
validation
def set_parameter(self, key, value): """ Set a url parameter. Parameters ---------- key : str If key ends with '64', the value provided will be automatically base64 encoded. """ if value is None or isinstance(value, (int, float, bool)): value = str(value) if key.endswith('64'): value = urlsafe_b64encode(value.encode('utf-8')) value = value.replace(b('='), b('')) self._parameters[key] = value
python
{ "resource": "" }
q261680
Tibber.rt_connect
validation
async def rt_connect(self, loop): """Start subscription manager for real time data.""" if self.sub_manager is not None: return self.sub_manager = SubscriptionManager( loop, "token={}".format(self._access_token), SUB_ENDPOINT ) self.sub_manager.start()
python
{ "resource": "" }
q261681
Tibber.sync_update_info
validation
def sync_update_info(self, *_): """Update home info.""" loop = asyncio.get_event_loop() task = loop.create_task(self.update_info()) loop.run_until_complete(task)
python
{ "resource": "" }
q261682
Tibber.update_info
validation
async def update_info(self, *_): """Update home info async.""" query = gql( """ { viewer { name homes { subscriptions { status } id } } } """ ) res = await self._execute(query) if res is None: return errors = res.get("errors", []) if errors: msg = errors[0].get("message", "failed to login") _LOGGER.error(msg) raise InvalidLogin(msg) data = res.get("data") if not data: return viewer = data.get("viewer") if not viewer: return self._name = viewer.get("name") homes = viewer.get("homes", []) self._home_ids = [] for _home in homes: home_id = _home.get("id") self._all_home_ids += [home_id] subs = _home.get("subscriptions") if subs: status = subs[0].get("status", "ended").lower() if not home_id or status != "running": continue self._home_ids += [home_id]
python
{ "resource": "" }
q261683
Tibber.get_homes
validation
def get_homes(self, only_active=True): """Return list of Tibber homes.""" return [self.get_home(home_id) for home_id in self.get_home_ids(only_active)]
python
{ "resource": "" }
q261684
Tibber.get_home
validation
def get_home(self, home_id): """Retun an instance of TibberHome for given home id.""" if home_id not in self._all_home_ids: _LOGGER.error("Could not find any Tibber home with id: %s", home_id) return None if home_id not in self._homes.keys(): self._homes[home_id] = TibberHome(home_id, self) return self._homes[home_id]
python
{ "resource": "" }
q261685
TibberHome.currency
validation
def currency(self): """Return the currency.""" try: current_subscription = self.info["viewer"]["home"]["currentSubscription"] return current_subscription["priceInfo"]["current"]["currency"] except (KeyError, TypeError, IndexError): _LOGGER.error("Could not find currency.") return ""
python
{ "resource": "" }
q261686
TibberHome.price_unit
validation
def price_unit(self): """Return the price unit.""" currency = self.currency consumption_unit = self.consumption_unit if not currency or not consumption_unit: _LOGGER.error("Could not find price_unit.") return " " return currency + "/" + consumption_unit
python
{ "resource": "" }
q261687
TibberHome.rt_unsubscribe
validation
async def rt_unsubscribe(self): """Unsubscribe to Tibber rt subscription.""" if self._subscription_id is None: _LOGGER.error("Not subscribed.") return await self._tibber_control.sub_manager.unsubscribe(self._subscription_id)
python
{ "resource": "" }
q261688
TibberHome.rt_subscription_running
validation
def rt_subscription_running(self): """Is real time subscription running.""" return ( self._tibber_control.sub_manager is not None and self._tibber_control.sub_manager.is_running and self._subscription_id is not None )
python
{ "resource": "" }
q261689
Activity.cleanup_none
validation
def cleanup_none(self): """ Removes the temporary value set for None attributes. """ for (prop, default) in self.defaults.items(): if getattr(self, prop) == '_None': setattr(self, prop, None)
python
{ "resource": "" }
q261690
WSGIWorker.build_environ
validation
def build_environ(self, sock_file, conn): """ Build the execution environment. """ # Grab the request line request = self.read_request_line(sock_file) # Copy the Base Environment environ = self.base_environ.copy() # Grab the headers for k, v in self.read_headers(sock_file).items(): environ[str('HTTP_'+k)] = v # Add CGI Variables environ['REQUEST_METHOD'] = request['method'] environ['PATH_INFO'] = request['path'] environ['SERVER_PROTOCOL'] = request['protocol'] environ['SERVER_PORT'] = str(conn.server_port) environ['REMOTE_PORT'] = str(conn.client_port) environ['REMOTE_ADDR'] = str(conn.client_addr) environ['QUERY_STRING'] = request['query_string'] if 'HTTP_CONTENT_LENGTH' in environ: environ['CONTENT_LENGTH'] = environ['HTTP_CONTENT_LENGTH'] if 'HTTP_CONTENT_TYPE' in environ: environ['CONTENT_TYPE'] = environ['HTTP_CONTENT_TYPE'] # Save the request method for later self.request_method = environ['REQUEST_METHOD'] # Add Dynamic WSGI Variables if conn.ssl: environ['wsgi.url_scheme'] = 'https' environ['HTTPS'] = 'on' else: environ['wsgi.url_scheme'] = 'http' if environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked': environ['wsgi.input'] = ChunkedReader(sock_file) else: environ['wsgi.input'] = sock_file return environ
python
{ "resource": "" }
q261691
WSGIWorker.write
validation
def write(self, data, sections=None): """ Write the data to the output socket. """ if self.error[0]: self.status = self.error[0] data = b(self.error[1]) if not self.headers_sent: self.send_headers(data, sections) if self.request_method != 'HEAD': try: if self.chunked: self.conn.sendall(b('%x\r\n%s\r\n' % (len(data), data))) else: self.conn.sendall(data) except socket.timeout: self.closeConnection = True except socket.error: # But some clients will close the connection before that # resulting in a socket error. self.closeConnection = True
python
{ "resource": "" }
q261692
WSGIWorker.start_response
validation
def start_response(self, status, response_headers, exc_info=None): """ Store the HTTP status and headers to be sent when self.write is called. """ if exc_info: try: if self.headers_sent: # Re-raise original exception if headers sent # because this violates WSGI specification. raise finally: exc_info = None elif self.header_set: raise AssertionError("Headers already set!") if PY3K and not isinstance(status, str): self.status = str(status, 'ISO-8859-1') else: self.status = status # Make sure headers are bytes objects try: self.header_set = Headers(response_headers) except UnicodeDecodeError: self.error = ('500 Internal Server Error', 'HTTP Headers should be bytes') self.err_log.error('Received HTTP Headers from client that contain' ' invalid characters for Latin-1 encoding.') return self.write_warning
python
{ "resource": "" }
q261693
CherryPyWSGIServer
validation
def CherryPyWSGIServer(bind_addr, wsgi_app, numthreads = 10, server_name = None, max = -1, request_queue_size = 5, timeout = 10, shutdown_timeout = 5): """ A Cherrypy wsgiserver-compatible wrapper. """ max_threads = max if max_threads < 0: max_threads = 0 return Rocket(bind_addr, 'wsgi', {'wsgi_app': wsgi_app}, min_threads = numthreads, max_threads = max_threads, queue_size = request_queue_size, timeout = timeout)
python
{ "resource": "" }
q261694
aggregate
validation
def aggregate(l): """Aggregate a `list` of prefixes. Keyword arguments: l -- a python list of prefixes Example use: >>> aggregate(["10.0.0.0/8", "10.0.0.0/24"]) ['10.0.0.0/8'] """ tree = radix.Radix() for item in l: try: tree.add(item) except (ValueError) as err: raise Exception("ERROR: invalid IP prefix: {}".format(item)) return aggregate_tree(tree).prefixes()
python
{ "resource": "" }
q261695
aggregate_tree
validation
def aggregate_tree(l_tree): """Walk a py-radix tree and aggregate it. Arguments l_tree -- radix.Radix() object """ def _aggregate_phase1(tree): # phase1 removes any supplied prefixes which are superfluous because # they are already included in another supplied prefix. For example, # 2001:67c:208c:10::/64 would be removed if 2001:67c:208c::/48 was # also supplied. n_tree = radix.Radix() for prefix in tree.prefixes(): if tree.search_worst(prefix).prefix == prefix: n_tree.add(prefix) return n_tree def _aggregate_phase2(tree): # phase2 identifies adjacent prefixes that can be combined under a # single, shorter-length prefix. For example, 2001:67c:208c::/48 and # 2001:67c:208d::/48 can be combined into the single prefix # 2001:67c:208c::/47. n_tree = radix.Radix() for rnode in tree: p = text(ip_network(text(rnode.prefix)).supernet()) r = tree.search_covered(p) if len(r) == 2: if r[0].prefixlen == r[1].prefixlen == rnode.prefixlen: n_tree.add(p) else: n_tree.add(rnode.prefix) else: n_tree.add(rnode.prefix) return n_tree l_tree = _aggregate_phase1(l_tree) if len(l_tree.prefixes()) == 1: return l_tree while True: r_tree = _aggregate_phase2(l_tree) if l_tree.prefixes() == r_tree.prefixes(): break else: l_tree = r_tree del r_tree return l_tree
python
{ "resource": "" }
q261696
_ordinal_metric
validation
def _ordinal_metric(_v1, _v2, i1, i2, n_v): """Metric for ordinal data.""" if i1 > i2: i1, i2 = i2, i1 return (np.sum(n_v[i1:(i2 + 1)]) - (n_v[i1] + n_v[i2]) / 2) ** 2
python
{ "resource": "" }
q261697
_ratio_metric
validation
def _ratio_metric(v1, v2, **_kwargs): """Metric for ratio data.""" return (((v1 - v2) / (v1 + v2)) ** 2) if v1 + v2 != 0 else 0
python
{ "resource": "" }
q261698
_coincidences
validation
def _coincidences(value_counts, value_domain, dtype=np.float64): """Coincidence matrix. Parameters ---------- value_counts : ndarray, with shape (N, V) Number of coders that assigned a certain value to a determined unit, where N is the number of units and V is the value count. value_domain : array_like, with shape (V,) Possible values V the units can take. If the level of measurement is not nominal, it must be ordered. dtype : data-type Result and computation data-type. Returns ------- o : ndarray, with shape (V, V) Coincidence matrix. """ value_counts_matrices = value_counts.reshape(value_counts.shape + (1,)) pairable = np.maximum(np.sum(value_counts, axis=1), 2) diagonals = np.tile(np.eye(len(value_domain)), (len(value_counts), 1, 1)) \ * value_counts.reshape((value_counts.shape[0], 1, value_counts.shape[1])) unnormalized_coincidences = value_counts_matrices * value_counts_matrices.transpose((0, 2, 1)) - diagonals return np.sum(np.divide(unnormalized_coincidences, (pairable - 1).reshape((-1, 1, 1)), dtype=dtype), axis=0)
python
{ "resource": "" }
q261699
_random_coincidences
validation
def _random_coincidences(value_domain, n, n_v): """Random coincidence matrix. Parameters ---------- value_domain : array_like, with shape (V,) Possible values V the units can take. If the level of measurement is not nominal, it must be ordered. n : scalar Number of pairable values. n_v : ndarray, with shape (V,) Number of pairable elements for each value. Returns ------- e : ndarray, with shape (V, V) Random coincidence matrix. """ n_v_column = n_v.reshape(-1, 1) return (n_v_column.dot(n_v_column.T) - np.eye(len(value_domain)) * n_v_column) / (n - 1)
python
{ "resource": "" }