id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
13,300
Dallinger/Dallinger
dallinger/registration.py
_create_osf_project
def _create_osf_project(dlgr_id, description=None): """Create a project on the OSF.""" if not description: description = "Experiment {} registered by Dallinger.".format(dlgr_id) r = requests.post( "{}/nodes/".format(root), data={ "type": "nodes", "category": "project", "title": "Experiment dlgr-{}".format(dlgr_id[0:8]), "description": description, }, headers={"Authorization": "Bearer {}".format(config.get("osf_access_token"))}, ) r.raise_for_status() osf_id = r.json()["data"]["id"] logger.info("Project registered on OSF at http://osf.io/{}".format(osf_id)) return osf_id
python
def _create_osf_project(dlgr_id, description=None): if not description: description = "Experiment {} registered by Dallinger.".format(dlgr_id) r = requests.post( "{}/nodes/".format(root), data={ "type": "nodes", "category": "project", "title": "Experiment dlgr-{}".format(dlgr_id[0:8]), "description": description, }, headers={"Authorization": "Bearer {}".format(config.get("osf_access_token"))}, ) r.raise_for_status() osf_id = r.json()["data"]["id"] logger.info("Project registered on OSF at http://osf.io/{}".format(osf_id)) return osf_id
[ "def", "_create_osf_project", "(", "dlgr_id", ",", "description", "=", "None", ")", ":", "if", "not", "description", ":", "description", "=", "\"Experiment {} registered by Dallinger.\"", ".", "format", "(", "dlgr_id", ")", "r", "=", "requests", ".", "post", "("...
Create a project on the OSF.
[ "Create", "a", "project", "on", "the", "OSF", "." ]
76ca8217c709989c116d0ebd8fca37bd22f591af
https://github.com/Dallinger/Dallinger/blob/76ca8217c709989c116d0ebd8fca37bd22f591af/dallinger/registration.py#L26-L47
13,301
Dallinger/Dallinger
dallinger/registration.py
_upload_assets_to_OSF
def _upload_assets_to_OSF(dlgr_id, osf_id, provider="osfstorage"): """Upload experimental assets to the OSF.""" root = "https://files.osf.io/v1" snapshot_filename = "{}-code.zip".format(dlgr_id) snapshot_path = os.path.join("snapshots", snapshot_filename) r = requests.put( "{}/resources/{}/providers/{}/".format(root, osf_id, provider), params={"kind": "file", "name": snapshot_filename}, headers={ "Authorization": "Bearer {}".format(config.get("osf_access_token")), "Content-Type": "text/plain", }, data=open(snapshot_path, "rb"), ) r.raise_for_status()
python
def _upload_assets_to_OSF(dlgr_id, osf_id, provider="osfstorage"): root = "https://files.osf.io/v1" snapshot_filename = "{}-code.zip".format(dlgr_id) snapshot_path = os.path.join("snapshots", snapshot_filename) r = requests.put( "{}/resources/{}/providers/{}/".format(root, osf_id, provider), params={"kind": "file", "name": snapshot_filename}, headers={ "Authorization": "Bearer {}".format(config.get("osf_access_token")), "Content-Type": "text/plain", }, data=open(snapshot_path, "rb"), ) r.raise_for_status()
[ "def", "_upload_assets_to_OSF", "(", "dlgr_id", ",", "osf_id", ",", "provider", "=", "\"osfstorage\"", ")", ":", "root", "=", "\"https://files.osf.io/v1\"", "snapshot_filename", "=", "\"{}-code.zip\"", ".", "format", "(", "dlgr_id", ")", "snapshot_path", "=", "os", ...
Upload experimental assets to the OSF.
[ "Upload", "experimental", "assets", "to", "the", "OSF", "." ]
76ca8217c709989c116d0ebd8fca37bd22f591af
https://github.com/Dallinger/Dallinger/blob/76ca8217c709989c116d0ebd8fca37bd22f591af/dallinger/registration.py#L50-L64
13,302
globus/globus-cli
globus_cli/commands/task/update.py
update_task
def update_task(deadline, label, task_id): """ Executor for `globus task update` """ client = get_client() task_doc = assemble_generic_doc("task", label=label, deadline=deadline) res = client.update_task(task_id, task_doc) formatted_print(res, simple_text="Success")
python
def update_task(deadline, label, task_id): client = get_client() task_doc = assemble_generic_doc("task", label=label, deadline=deadline) res = client.update_task(task_id, task_doc) formatted_print(res, simple_text="Success")
[ "def", "update_task", "(", "deadline", ",", "label", ",", "task_id", ")", ":", "client", "=", "get_client", "(", ")", "task_doc", "=", "assemble_generic_doc", "(", "\"task\"", ",", "label", "=", "label", ",", "deadline", "=", "deadline", ")", "res", "=", ...
Executor for `globus task update`
[ "Executor", "for", "globus", "task", "update" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/task/update.py#L17-L26
13,303
globus/globus-cli
globus_cli/commands/task/show.py
show_task
def show_task(successful_transfers, task_id): """ Executor for `globus task show` """ client = get_client() if successful_transfers: print_successful_transfers(client, task_id) else: print_task_detail(client, task_id)
python
def show_task(successful_transfers, task_id): client = get_client() if successful_transfers: print_successful_transfers(client, task_id) else: print_task_detail(client, task_id)
[ "def", "show_task", "(", "successful_transfers", ",", "task_id", ")", ":", "client", "=", "get_client", "(", ")", "if", "successful_transfers", ":", "print_successful_transfers", "(", "client", ",", "task_id", ")", "else", ":", "print_task_detail", "(", "client", ...
Executor for `globus task show`
[ "Executor", "for", "globus", "task", "show" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/task/show.py#L82-L91
13,304
globus/globus-cli
globus_cli/commands/bookmark/create.py
bookmark_create
def bookmark_create(endpoint_plus_path, bookmark_name): """ Executor for `globus bookmark create` """ endpoint_id, path = endpoint_plus_path client = get_client() submit_data = {"endpoint_id": str(endpoint_id), "path": path, "name": bookmark_name} res = client.create_bookmark(submit_data) formatted_print(res, simple_text="Bookmark ID: {}".format(res["id"]))
python
def bookmark_create(endpoint_plus_path, bookmark_name): endpoint_id, path = endpoint_plus_path client = get_client() submit_data = {"endpoint_id": str(endpoint_id), "path": path, "name": bookmark_name} res = client.create_bookmark(submit_data) formatted_print(res, simple_text="Bookmark ID: {}".format(res["id"]))
[ "def", "bookmark_create", "(", "endpoint_plus_path", ",", "bookmark_name", ")", ":", "endpoint_id", ",", "path", "=", "endpoint_plus_path", "client", "=", "get_client", "(", ")", "submit_data", "=", "{", "\"endpoint_id\"", ":", "str", "(", "endpoint_id", ")", ",...
Executor for `globus bookmark create`
[ "Executor", "for", "globus", "bookmark", "create" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/bookmark/create.py#L16-L26
13,305
globus/globus-cli
globus_cli/commands/endpoint/server/list.py
server_list
def server_list(endpoint_id): """ Executor for `globus endpoint server list` """ # raises usage error on shares for us endpoint, server_list = get_endpoint_w_server_list(endpoint_id) if server_list == "S3": # not GCS -- this is an S3 endpoint server_list = {"s3_url": endpoint["s3_url"]} fields = [("S3 URL", "s3_url")] text_format = FORMAT_TEXT_RECORD else: # regular GCS host endpoint fields = ( ("ID", "id"), ("URI", lambda s: (s["uri"] or "none (Globus Connect Personal)")), ) text_format = FORMAT_TEXT_TABLE formatted_print(server_list, text_format=text_format, fields=fields)
python
def server_list(endpoint_id): # raises usage error on shares for us endpoint, server_list = get_endpoint_w_server_list(endpoint_id) if server_list == "S3": # not GCS -- this is an S3 endpoint server_list = {"s3_url": endpoint["s3_url"]} fields = [("S3 URL", "s3_url")] text_format = FORMAT_TEXT_RECORD else: # regular GCS host endpoint fields = ( ("ID", "id"), ("URI", lambda s: (s["uri"] or "none (Globus Connect Personal)")), ) text_format = FORMAT_TEXT_TABLE formatted_print(server_list, text_format=text_format, fields=fields)
[ "def", "server_list", "(", "endpoint_id", ")", ":", "# raises usage error on shares for us", "endpoint", ",", "server_list", "=", "get_endpoint_w_server_list", "(", "endpoint_id", ")", "if", "server_list", "==", "\"S3\"", ":", "# not GCS -- this is an S3 endpoint", "server_...
Executor for `globus endpoint server list`
[ "Executor", "for", "globus", "endpoint", "server", "list" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/server/list.py#L11-L28
13,306
globus/globus-cli
globus_cli/commands/task/list.py
task_list
def task_list( limit, filter_task_id, filter_status, filter_type, filter_label, filter_not_label, inexact, filter_requested_after, filter_requested_before, filter_completed_after, filter_completed_before, ): """ Executor for `globus task-list` """ def _process_filterval(prefix, value, default=None): if value: if isinstance(value, six.string_types): return "{}:{}/".format(prefix, value) return "{}:{}/".format(prefix, ",".join(str(x) for x in value)) else: return default or "" # make filter string filter_string = "" filter_string += _process_filterval("task_id", filter_task_id) filter_string += _process_filterval("status", filter_status) filter_string += _process_filterval( "type", filter_type, default="type:TRANSFER,DELETE/" ) # combine data into one list for easier processing if inexact: label_data = ["~" + s for s in filter_label] + [ "!~" + s for s in filter_not_label ] else: label_data = ["=" + s for s in filter_label] + [ "!" + s for s in filter_not_label ] filter_string += _process_filterval("label", label_data) filter_string += _process_filterval( "request_time", [(filter_requested_after or ""), (filter_requested_before or "")], ) filter_string += _process_filterval( "completion_time", [(filter_completed_after or ""), (filter_completed_before or "")], ) client = get_client() task_iterator = client.task_list( num_results=limit, filter=filter_string[:-1] ) # ignore trailing / fields = [ ("Task ID", "task_id"), ("Status", "status"), ("Type", "type"), ("Source Display Name", "source_endpoint_display_name"), ("Dest Display Name", "destination_endpoint_display_name"), ("Label", "label"), ] formatted_print( task_iterator, fields=fields, json_converter=iterable_response_to_dict )
python
def task_list( limit, filter_task_id, filter_status, filter_type, filter_label, filter_not_label, inexact, filter_requested_after, filter_requested_before, filter_completed_after, filter_completed_before, ): def _process_filterval(prefix, value, default=None): if value: if isinstance(value, six.string_types): return "{}:{}/".format(prefix, value) return "{}:{}/".format(prefix, ",".join(str(x) for x in value)) else: return default or "" # make filter string filter_string = "" filter_string += _process_filterval("task_id", filter_task_id) filter_string += _process_filterval("status", filter_status) filter_string += _process_filterval( "type", filter_type, default="type:TRANSFER,DELETE/" ) # combine data into one list for easier processing if inexact: label_data = ["~" + s for s in filter_label] + [ "!~" + s for s in filter_not_label ] else: label_data = ["=" + s for s in filter_label] + [ "!" + s for s in filter_not_label ] filter_string += _process_filterval("label", label_data) filter_string += _process_filterval( "request_time", [(filter_requested_after or ""), (filter_requested_before or "")], ) filter_string += _process_filterval( "completion_time", [(filter_completed_after or ""), (filter_completed_before or "")], ) client = get_client() task_iterator = client.task_list( num_results=limit, filter=filter_string[:-1] ) # ignore trailing / fields = [ ("Task ID", "task_id"), ("Status", "status"), ("Type", "type"), ("Source Display Name", "source_endpoint_display_name"), ("Dest Display Name", "destination_endpoint_display_name"), ("Label", "label"), ] formatted_print( task_iterator, fields=fields, json_converter=iterable_response_to_dict )
[ "def", "task_list", "(", "limit", ",", "filter_task_id", ",", "filter_status", ",", "filter_type", ",", "filter_label", ",", "filter_not_label", ",", "inexact", ",", "filter_requested_after", ",", "filter_requested_before", ",", "filter_completed_after", ",", "filter_co...
Executor for `globus task-list`
[ "Executor", "for", "globus", "task", "-", "list" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/task/list.py#L75-L143
13,307
globus/globus-cli
globus_cli/commands/delete.py
delete_command
def delete_command( batch, ignore_missing, star_silent, recursive, enable_globs, endpoint_plus_path, label, submission_id, dry_run, deadline, skip_activation_check, notify, ): """ Executor for `globus delete` """ endpoint_id, path = endpoint_plus_path if path is None and (not batch): raise click.UsageError("delete requires either a PATH OR --batch") client = get_client() # attempt to activate unless --skip-activation-check is given if not skip_activation_check: autoactivate(client, endpoint_id, if_expires_in=60) delete_data = DeleteData( client, endpoint_id, label=label, recursive=recursive, ignore_missing=ignore_missing, submission_id=submission_id, deadline=deadline, skip_activation_check=skip_activation_check, interpret_globs=enable_globs, **notify ) if batch: # although this sophisticated structure (like that in transfer) # isn't strictly necessary, it gives us the ability to add options in # the future to these lines with trivial modifications @click.command() @click.argument("path", type=TaskPath(base_dir=path)) def process_batch_line(path): """ Parse a line of batch input and add it to the delete submission item. """ delete_data.add_item(str(path)) shlex_process_stdin(process_batch_line, "Enter paths to delete, line by line.") else: if not star_silent and enable_globs and path.endswith("*"): # not intuitive, but `click.confirm(abort=True)` prints to stdout # unnecessarily, which we don't really want... # only do this check if stderr is a pty if ( err_is_terminal() and term_is_interactive() and not click.confirm( 'Are you sure you want to delete all files matching "{}"?'.format( path ), err=True, ) ): safeprint("Aborted.", write_to_stderr=True) click.get_current_context().exit(1) delete_data.add_item(path) if dry_run: formatted_print(delete_data, response_key="DATA", fields=[("Path", "path")]) # exit safely return res = client.submit_delete(delete_data) formatted_print( res, text_format=FORMAT_TEXT_RECORD, fields=(("Message", "message"), ("Task ID", "task_id")), )
python
def delete_command( batch, ignore_missing, star_silent, recursive, enable_globs, endpoint_plus_path, label, submission_id, dry_run, deadline, skip_activation_check, notify, ): endpoint_id, path = endpoint_plus_path if path is None and (not batch): raise click.UsageError("delete requires either a PATH OR --batch") client = get_client() # attempt to activate unless --skip-activation-check is given if not skip_activation_check: autoactivate(client, endpoint_id, if_expires_in=60) delete_data = DeleteData( client, endpoint_id, label=label, recursive=recursive, ignore_missing=ignore_missing, submission_id=submission_id, deadline=deadline, skip_activation_check=skip_activation_check, interpret_globs=enable_globs, **notify ) if batch: # although this sophisticated structure (like that in transfer) # isn't strictly necessary, it gives us the ability to add options in # the future to these lines with trivial modifications @click.command() @click.argument("path", type=TaskPath(base_dir=path)) def process_batch_line(path): """ Parse a line of batch input and add it to the delete submission item. """ delete_data.add_item(str(path)) shlex_process_stdin(process_batch_line, "Enter paths to delete, line by line.") else: if not star_silent and enable_globs and path.endswith("*"): # not intuitive, but `click.confirm(abort=True)` prints to stdout # unnecessarily, which we don't really want... # only do this check if stderr is a pty if ( err_is_terminal() and term_is_interactive() and not click.confirm( 'Are you sure you want to delete all files matching "{}"?'.format( path ), err=True, ) ): safeprint("Aborted.", write_to_stderr=True) click.get_current_context().exit(1) delete_data.add_item(path) if dry_run: formatted_print(delete_data, response_key="DATA", fields=[("Path", "path")]) # exit safely return res = client.submit_delete(delete_data) formatted_print( res, text_format=FORMAT_TEXT_RECORD, fields=(("Message", "message"), ("Task ID", "task_id")), )
[ "def", "delete_command", "(", "batch", ",", "ignore_missing", ",", "star_silent", ",", "recursive", ",", "enable_globs", ",", "endpoint_plus_path", ",", "label", ",", "submission_id", ",", "dry_run", ",", "deadline", ",", "skip_activation_check", ",", "notify", ",...
Executor for `globus delete`
[ "Executor", "for", "globus", "delete" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/delete.py#L35-L118
13,308
globus/globus-cli
globus_cli/commands/config/set.py
set_command
def set_command(value, parameter): """ Executor for `globus config set` """ conf = get_config_obj() section = "cli" if "." in parameter: section, parameter = parameter.split(".", 1) # ensure that the section exists if section not in conf: conf[section] = {} # set the value for the given parameter conf[section][parameter] = value # write to disk safeprint("Writing updated config to {}".format(conf.filename)) conf.write()
python
def set_command(value, parameter): conf = get_config_obj() section = "cli" if "." in parameter: section, parameter = parameter.split(".", 1) # ensure that the section exists if section not in conf: conf[section] = {} # set the value for the given parameter conf[section][parameter] = value # write to disk safeprint("Writing updated config to {}".format(conf.filename)) conf.write()
[ "def", "set_command", "(", "value", ",", "parameter", ")", ":", "conf", "=", "get_config_obj", "(", ")", "section", "=", "\"cli\"", "if", "\".\"", "in", "parameter", ":", "section", ",", "parameter", "=", "parameter", ".", "split", "(", "\".\"", ",", "1"...
Executor for `globus config set`
[ "Executor", "for", "globus", "config", "set" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/config/set.py#L12-L30
13,309
globus/globus-cli
globus_cli/parsing/process_stdin.py
shlex_process_stdin
def shlex_process_stdin(process_command, helptext): """ Use shlex to process stdin line-by-line. Also prints help text. Requires that @process_command be a Click command object, used for processing single lines of input. helptext is prepended to the standard message printed to interactive sessions. """ # if input is interactive, print help to stderr if sys.stdin.isatty(): safeprint( ( "{}\n".format(helptext) + "Lines are split with shlex in POSIX mode: " "https://docs.python.org/library/shlex.html#parsing-rules\n" "Terminate input with Ctrl+D or <EOF>\n" ), write_to_stderr=True, ) # use readlines() rather than implicit file read line looping to force # python to properly capture EOF (otherwise, EOF acts as a flush and # things get weird) for line in sys.stdin.readlines(): # get the argument vector: # do a shlex split to handle quoted paths with spaces in them # also lets us have comments with # argv = shlex.split(line, comments=True) if argv: try: process_command.main(args=argv) except SystemExit as e: if e.code != 0: raise
python
def shlex_process_stdin(process_command, helptext): # if input is interactive, print help to stderr if sys.stdin.isatty(): safeprint( ( "{}\n".format(helptext) + "Lines are split with shlex in POSIX mode: " "https://docs.python.org/library/shlex.html#parsing-rules\n" "Terminate input with Ctrl+D or <EOF>\n" ), write_to_stderr=True, ) # use readlines() rather than implicit file read line looping to force # python to properly capture EOF (otherwise, EOF acts as a flush and # things get weird) for line in sys.stdin.readlines(): # get the argument vector: # do a shlex split to handle quoted paths with spaces in them # also lets us have comments with # argv = shlex.split(line, comments=True) if argv: try: process_command.main(args=argv) except SystemExit as e: if e.code != 0: raise
[ "def", "shlex_process_stdin", "(", "process_command", ",", "helptext", ")", ":", "# if input is interactive, print help to stderr", "if", "sys", ".", "stdin", ".", "isatty", "(", ")", ":", "safeprint", "(", "(", "\"{}\\n\"", ".", "format", "(", "helptext", ")", ...
Use shlex to process stdin line-by-line. Also prints help text. Requires that @process_command be a Click command object, used for processing single lines of input. helptext is prepended to the standard message printed to interactive sessions.
[ "Use", "shlex", "to", "process", "stdin", "line", "-", "by", "-", "line", ".", "Also", "prints", "help", "text", "." ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/parsing/process_stdin.py#L7-L40
13,310
globus/globus-cli
globus_cli/commands/endpoint/local_id.py
local_id
def local_id(personal): """ Executor for `globus endpoint local-id` """ if personal: try: ep_id = LocalGlobusConnectPersonal().endpoint_id except IOError as e: safeprint(e, write_to_stderr=True) click.get_current_context().exit(1) if ep_id is not None: safeprint(ep_id) else: safeprint("No Globus Connect Personal installation found.") click.get_current_context().exit(1)
python
def local_id(personal): if personal: try: ep_id = LocalGlobusConnectPersonal().endpoint_id except IOError as e: safeprint(e, write_to_stderr=True) click.get_current_context().exit(1) if ep_id is not None: safeprint(ep_id) else: safeprint("No Globus Connect Personal installation found.") click.get_current_context().exit(1)
[ "def", "local_id", "(", "personal", ")", ":", "if", "personal", ":", "try", ":", "ep_id", "=", "LocalGlobusConnectPersonal", "(", ")", ".", "endpoint_id", "except", "IOError", "as", "e", ":", "safeprint", "(", "e", ",", "write_to_stderr", "=", "True", ")",...
Executor for `globus endpoint local-id`
[ "Executor", "for", "globus", "endpoint", "local", "-", "id" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/local_id.py#L16-L31
13,311
globus/globus-cli
globus_cli/parsing/custom_group.py
globus_group
def globus_group(*args, **kwargs): """ Wrapper over click.group which sets GlobusCommandGroup as the Class Caution! Don't get snake-bitten by this. `globus_group` is a decorator which MUST take arguments. It is not wrapped in our common detect-and-decorate pattern to allow it to be used bare -- that wouldn't work (unnamed groups? weird stuff) """ def inner_decorator(f): f = click.group(*args, cls=GlobusCommandGroup, **kwargs)(f) f = common_options(f) return f return inner_decorator
python
def globus_group(*args, **kwargs): def inner_decorator(f): f = click.group(*args, cls=GlobusCommandGroup, **kwargs)(f) f = common_options(f) return f return inner_decorator
[ "def", "globus_group", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "inner_decorator", "(", "f", ")", ":", "f", "=", "click", ".", "group", "(", "*", "args", ",", "cls", "=", "GlobusCommandGroup", ",", "*", "*", "kwargs", ")", "(", ...
Wrapper over click.group which sets GlobusCommandGroup as the Class Caution! Don't get snake-bitten by this. `globus_group` is a decorator which MUST take arguments. It is not wrapped in our common detect-and-decorate pattern to allow it to be used bare -- that wouldn't work (unnamed groups? weird stuff)
[ "Wrapper", "over", "click", ".", "group", "which", "sets", "GlobusCommandGroup", "as", "the", "Class" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/parsing/custom_group.py#L32-L48
13,312
globus/globus-cli
globus_cli/helpers/version.py
_get_package_data
def _get_package_data(): """ Import a set of important packages and return relevant data about them in a dict. Imports are done in here to avoid potential for circular imports and other problems, and to make iteration simpler. """ moddata = [] modlist = ( "click", "configobj", "cryptography", "globus_cli", "globus_sdk", "jmespath", "requests", "six", ) if verbosity() < 2: modlist = ("globus_cli", "globus_sdk", "requests") for mod in modlist: cur = [mod] try: loaded_mod = __import__(mod) except ImportError: loaded_mod = None for attr in ("__version__", "__file__", "__path__"): # if loading failed, be sure to pad with error messages if loaded_mod is None: cur.append("[import failed]") continue try: attrval = getattr(loaded_mod, attr) except AttributeError: attrval = "" cur.append(attrval) moddata.append(cur) return moddata
python
def _get_package_data(): moddata = [] modlist = ( "click", "configobj", "cryptography", "globus_cli", "globus_sdk", "jmespath", "requests", "six", ) if verbosity() < 2: modlist = ("globus_cli", "globus_sdk", "requests") for mod in modlist: cur = [mod] try: loaded_mod = __import__(mod) except ImportError: loaded_mod = None for attr in ("__version__", "__file__", "__path__"): # if loading failed, be sure to pad with error messages if loaded_mod is None: cur.append("[import failed]") continue try: attrval = getattr(loaded_mod, attr) except AttributeError: attrval = "" cur.append(attrval) moddata.append(cur) return moddata
[ "def", "_get_package_data", "(", ")", ":", "moddata", "=", "[", "]", "modlist", "=", "(", "\"click\"", ",", "\"configobj\"", ",", "\"cryptography\"", ",", "\"globus_cli\"", ",", "\"globus_sdk\"", ",", "\"jmespath\"", ",", "\"requests\"", ",", "\"six\"", ",", "...
Import a set of important packages and return relevant data about them in a dict. Imports are done in here to avoid potential for circular imports and other problems, and to make iteration simpler.
[ "Import", "a", "set", "of", "important", "packages", "and", "return", "relevant", "data", "about", "them", "in", "a", "dict", ".", "Imports", "are", "done", "in", "here", "to", "avoid", "potential", "for", "circular", "imports", "and", "other", "problems", ...
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/helpers/version.py#L9-L50
13,313
globus/globus-cli
globus_cli/helpers/version.py
print_version
def print_version(): """ Print out the current version, and at least try to fetch the latest from PyPi to print alongside it. It may seem odd that this isn't in globus_cli.version , but it's done this way to separate concerns over printing the version from looking it up. """ latest, current = get_versions() if latest is None: safeprint( ("Installed Version: {0}\n" "Failed to lookup latest version.").format( current ) ) else: safeprint( ("Installed Version: {0}\n" "Latest Version: {1}\n" "\n{2}").format( current, latest, "You are running the latest version of the Globus CLI" if current == latest else ( "You should update your version of the Globus CLI with\n" " globus update" ) if current < latest else "You are running a preview version of the Globus CLI", ) ) # verbose shows more platform and python info # it also includes versions of some CLI dependencies if is_verbose(): moddata = _get_package_data() safeprint("\nVerbose Data\n---") safeprint("platform:") safeprint(" platform: {}".format(platform.platform())) safeprint(" py_implementation: {}".format(platform.python_implementation())) safeprint(" py_version: {}".format(platform.python_version())) safeprint(" sys.executable: {}".format(sys.executable)) safeprint(" site.USER_BASE: {}".format(site.USER_BASE)) safeprint("modules:") for mod, modversion, modfile, modpath in moddata: safeprint(" {}:".format(mod)) safeprint(" __version__: {}".format(modversion)) safeprint(" __file__: {}".format(modfile)) safeprint(" __path__: {}".format(modpath))
python
def print_version(): latest, current = get_versions() if latest is None: safeprint( ("Installed Version: {0}\n" "Failed to lookup latest version.").format( current ) ) else: safeprint( ("Installed Version: {0}\n" "Latest Version: {1}\n" "\n{2}").format( current, latest, "You are running the latest version of the Globus CLI" if current == latest else ( "You should update your version of the Globus CLI with\n" " globus update" ) if current < latest else "You are running a preview version of the Globus CLI", ) ) # verbose shows more platform and python info # it also includes versions of some CLI dependencies if is_verbose(): moddata = _get_package_data() safeprint("\nVerbose Data\n---") safeprint("platform:") safeprint(" platform: {}".format(platform.platform())) safeprint(" py_implementation: {}".format(platform.python_implementation())) safeprint(" py_version: {}".format(platform.python_version())) safeprint(" sys.executable: {}".format(sys.executable)) safeprint(" site.USER_BASE: {}".format(site.USER_BASE)) safeprint("modules:") for mod, modversion, modfile, modpath in moddata: safeprint(" {}:".format(mod)) safeprint(" __version__: {}".format(modversion)) safeprint(" __file__: {}".format(modfile)) safeprint(" __path__: {}".format(modpath))
[ "def", "print_version", "(", ")", ":", "latest", ",", "current", "=", "get_versions", "(", ")", "if", "latest", "is", "None", ":", "safeprint", "(", "(", "\"Installed Version: {0}\\n\"", "\"Failed to lookup latest version.\"", ")", ".", "format", "(", "current", ...
Print out the current version, and at least try to fetch the latest from PyPi to print alongside it. It may seem odd that this isn't in globus_cli.version , but it's done this way to separate concerns over printing the version from looking it up.
[ "Print", "out", "the", "current", "version", "and", "at", "least", "try", "to", "fetch", "the", "latest", "from", "PyPi", "to", "print", "alongside", "it", "." ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/helpers/version.py#L53-L103
13,314
globus/globus-cli
globus_cli/commands/update.py
update_command
def update_command(yes, development, development_version): """ Executor for `globus update` """ # enforce that pip MUST be installed # Why not just include it in the setup.py requirements? Mostly weak # reasons, but it shouldn't matter much. # - if someone has installed the CLI without pip, then they haven't # followed our install instructions, so it's mostly a non-issue # - we don't want to have `pip install -U globus-cli` upgrade pip -- that's # a little bit invasive and easy to do by accident on modern versions of # pip where `--upgrade-strategy` defaults to `eager` # - we may want to do distributions in the future with dependencies baked # into a package, but we'd never want to do that with pip. More changes # would be needed to support that use-case, which we've discussed, but # not depending directly on pip gives us a better escape hatch # - if we depend on pip, we need to start thinking about what versions we # support. In point of fact, that becomes an issue as soon as we add this # command, but not being explicit about it lets us punt for now (maybe # indefinitely) on figuring out version requirements. All of that is to # say: not including it is bad, and from that badness we reap the rewards # of procrastination and non-explicit requirements # - Advanced usage, like `pip install -t` can produce an installed version # of the CLI which can't import its installing `pip`. If we depend on # pip, anyone doing this is forced to get two copies of pip, which seems # kind of nasty (even if "they're asking for it") if not _check_pip_installed(): safeprint("`globus update` requires pip. " "Please install pip and try again") click.get_current_context().exit(1) # --development-version implies --development development = development or (development_version is not None) # if we're running with `--development`, then the target version is a # tarball from GitHub, and we can skip out on the safety checks if development: # default to master development_version = development_version or "master" target_version = ( "https://github.com/globus/globus-cli/archive/{}" ".tar.gz#egg=globus-cli" ).format(development_version) else: # lookup version from PyPi, abort if we can't get it latest, current = get_versions() if latest is None: safeprint("Failed to lookup latest version. Aborting.") click.get_current_context().exit(1) # in the case where we're already up to date, do nothing and exit if current == latest: safeprint("You are already running the latest version: {}".format(current)) return # if we're up to date (or ahead, meaning a dev version was installed) # then prompt before continuing, respecting `--yes` else: safeprint( ( "You are already running version {0}\n" "The latest version is {1}" ).format(current, latest) ) if not yes and ( not click.confirm("Continue with the upgrade?", default=True) ): click.get_current_context().exit(1) # if we make it through to here, it means we didn't hit any safe (or # unsafe) abort conditions, so set the target version for upgrade to # the latest target_version = "globus-cli=={}".format(latest) # print verbose warning/help message, to guide less fortunate souls who hit # Ctrl+C at a foolish time, lose connectivity, or don't invoke with `sudo` # on a global install of the CLI safeprint( ( "The Globus CLI will now update itself.\n" "In the event that an error occurs or the update is interrupted, we " "recommend uninstalling and reinstalling the CLI.\n" "Update Target: {}\n" ).format(target_version) ) # register the upgrade activity as an atexit function # this ensures that most library teardown (other than whatever libs might # jam into atexit themselves...) has already run, and therefore protects us # against most potential bugs resulting from upgrading click while a click # command is running # # NOTE: there is a risk that we will see bugs on upgrade if the act of # doing a pip upgrade install changes state on disk and we (or a lib we # use) rely on that via pkg_resources, lazy/deferred imports, or good # old-fashioned direct inspection of `__file__` and the like DURING an # atexit method. Anything outside of atexit methods remains safe! @atexit.register def do_upgrade(): install_args = ["install", "--upgrade", target_version] if IS_USER_INSTALL: install_args.insert(1, "--user") _call_pip(*install_args)
python
def update_command(yes, development, development_version): # enforce that pip MUST be installed # Why not just include it in the setup.py requirements? Mostly weak # reasons, but it shouldn't matter much. # - if someone has installed the CLI without pip, then they haven't # followed our install instructions, so it's mostly a non-issue # - we don't want to have `pip install -U globus-cli` upgrade pip -- that's # a little bit invasive and easy to do by accident on modern versions of # pip where `--upgrade-strategy` defaults to `eager` # - we may want to do distributions in the future with dependencies baked # into a package, but we'd never want to do that with pip. More changes # would be needed to support that use-case, which we've discussed, but # not depending directly on pip gives us a better escape hatch # - if we depend on pip, we need to start thinking about what versions we # support. In point of fact, that becomes an issue as soon as we add this # command, but not being explicit about it lets us punt for now (maybe # indefinitely) on figuring out version requirements. All of that is to # say: not including it is bad, and from that badness we reap the rewards # of procrastination and non-explicit requirements # - Advanced usage, like `pip install -t` can produce an installed version # of the CLI which can't import its installing `pip`. If we depend on # pip, anyone doing this is forced to get two copies of pip, which seems # kind of nasty (even if "they're asking for it") if not _check_pip_installed(): safeprint("`globus update` requires pip. " "Please install pip and try again") click.get_current_context().exit(1) # --development-version implies --development development = development or (development_version is not None) # if we're running with `--development`, then the target version is a # tarball from GitHub, and we can skip out on the safety checks if development: # default to master development_version = development_version or "master" target_version = ( "https://github.com/globus/globus-cli/archive/{}" ".tar.gz#egg=globus-cli" ).format(development_version) else: # lookup version from PyPi, abort if we can't get it latest, current = get_versions() if latest is None: safeprint("Failed to lookup latest version. Aborting.") click.get_current_context().exit(1) # in the case where we're already up to date, do nothing and exit if current == latest: safeprint("You are already running the latest version: {}".format(current)) return # if we're up to date (or ahead, meaning a dev version was installed) # then prompt before continuing, respecting `--yes` else: safeprint( ( "You are already running version {0}\n" "The latest version is {1}" ).format(current, latest) ) if not yes and ( not click.confirm("Continue with the upgrade?", default=True) ): click.get_current_context().exit(1) # if we make it through to here, it means we didn't hit any safe (or # unsafe) abort conditions, so set the target version for upgrade to # the latest target_version = "globus-cli=={}".format(latest) # print verbose warning/help message, to guide less fortunate souls who hit # Ctrl+C at a foolish time, lose connectivity, or don't invoke with `sudo` # on a global install of the CLI safeprint( ( "The Globus CLI will now update itself.\n" "In the event that an error occurs or the update is interrupted, we " "recommend uninstalling and reinstalling the CLI.\n" "Update Target: {}\n" ).format(target_version) ) # register the upgrade activity as an atexit function # this ensures that most library teardown (other than whatever libs might # jam into atexit themselves...) has already run, and therefore protects us # against most potential bugs resulting from upgrading click while a click # command is running # # NOTE: there is a risk that we will see bugs on upgrade if the act of # doing a pip upgrade install changes state on disk and we (or a lib we # use) rely on that via pkg_resources, lazy/deferred imports, or good # old-fashioned direct inspection of `__file__` and the like DURING an # atexit method. Anything outside of atexit methods remains safe! @atexit.register def do_upgrade(): install_args = ["install", "--upgrade", target_version] if IS_USER_INSTALL: install_args.insert(1, "--user") _call_pip(*install_args)
[ "def", "update_command", "(", "yes", ",", "development", ",", "development_version", ")", ":", "# enforce that pip MUST be installed", "# Why not just include it in the setup.py requirements? Mostly weak", "# reasons, but it shouldn't matter much.", "# - if someone has installed the CLI wi...
Executor for `globus update`
[ "Executor", "for", "globus", "update" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/update.py#L55-L155
13,315
globus/globus-cli
globus_cli/commands/whoami.py
whoami_command
def whoami_command(linked_identities): """ Executor for `globus whoami` """ client = get_auth_client() # get userinfo from auth. # if we get back an error the user likely needs to log in again try: res = client.oauth2_userinfo() except AuthAPIError: safeprint( "Unable to get user information. Please try " "logging in again.", write_to_stderr=True, ) click.get_current_context().exit(1) print_command_hint( "For information on which identities are in session see\n" " globus session show\n" ) # --linked-identities either displays all usernames or a table if verbose if linked_identities: try: formatted_print( res["identity_set"], fields=[ ("Username", "username"), ("Name", "name"), ("ID", "sub"), ("Email", "email"), ], simple_text=( None if is_verbose() else "\n".join([x["username"] for x in res["identity_set"]]) ), ) except KeyError: safeprint( "Your current login does not have the consents required " "to view your full identity set. Please log in again " "to agree to the required consents.", write_to_stderr=True, ) # Default output is the top level data else: formatted_print( res, text_format=FORMAT_TEXT_RECORD, fields=[ ("Username", "preferred_username"), ("Name", "name"), ("ID", "sub"), ("Email", "email"), ], simple_text=(None if is_verbose() else res["preferred_username"]), )
python
def whoami_command(linked_identities): client = get_auth_client() # get userinfo from auth. # if we get back an error the user likely needs to log in again try: res = client.oauth2_userinfo() except AuthAPIError: safeprint( "Unable to get user information. Please try " "logging in again.", write_to_stderr=True, ) click.get_current_context().exit(1) print_command_hint( "For information on which identities are in session see\n" " globus session show\n" ) # --linked-identities either displays all usernames or a table if verbose if linked_identities: try: formatted_print( res["identity_set"], fields=[ ("Username", "username"), ("Name", "name"), ("ID", "sub"), ("Email", "email"), ], simple_text=( None if is_verbose() else "\n".join([x["username"] for x in res["identity_set"]]) ), ) except KeyError: safeprint( "Your current login does not have the consents required " "to view your full identity set. Please log in again " "to agree to the required consents.", write_to_stderr=True, ) # Default output is the top level data else: formatted_print( res, text_format=FORMAT_TEXT_RECORD, fields=[ ("Username", "preferred_username"), ("Name", "name"), ("ID", "sub"), ("Email", "email"), ], simple_text=(None if is_verbose() else res["preferred_username"]), )
[ "def", "whoami_command", "(", "linked_identities", ")", ":", "client", "=", "get_auth_client", "(", ")", "# get userinfo from auth.", "# if we get back an error the user likely needs to log in again", "try", ":", "res", "=", "client", ".", "oauth2_userinfo", "(", ")", "ex...
Executor for `globus whoami`
[ "Executor", "for", "globus", "whoami" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/whoami.py#L24-L83
13,316
globus/globus-cli
globus_cli/parsing/shell_completion.py
get_completion_context
def get_completion_context(args): """ Walk the tree of commands to a terminal command or multicommand, using the Click Context system. Effectively, we'll be using the resilient_parsing mode of commands to stop evaluation, then having them capture their options and arguments, passing us on to the next subcommand. If we walk "off the tree" with a command that we don't recognize, we have a hardstop condition, but otherwise, we walk as far as we can go and that's the location from which we should do our completion work. """ # get the "globus" command as a click.Command root_command = click.get_current_context().find_root().command # build a new context object off of it, with resilient_parsing set so that # no callbacks are invoked ctx = root_command.make_context("globus", list(args), resilient_parsing=True) # walk down multicommands until we've matched on everything and are at a # terminal context that holds all of our completed args while isinstance(ctx.command, click.MultiCommand) and args: # trim out any params that are capturable at this level of the command # tree by resetting the argument list args = ctx.protected_args + ctx.args # if there were no remaining args, stop walking the tree if not args: break # check for a matching command, and if one isn't found stop the # traversal and abort the whole process -- this would mean that a # completed command was entered which doesn't match a known command # there's nothing completion can do in this case unless it implements # sophisticated fuzzy matching command = ctx.command.get_command(ctx, args[0]) if not command: return None # otherwise, grab that command, and build a subcontext to continue the # tree walk else: ctx = command.make_context( args[0], args[1:], parent=ctx, resilient_parsing=True ) # return the context we found return ctx
python
def get_completion_context(args): # get the "globus" command as a click.Command root_command = click.get_current_context().find_root().command # build a new context object off of it, with resilient_parsing set so that # no callbacks are invoked ctx = root_command.make_context("globus", list(args), resilient_parsing=True) # walk down multicommands until we've matched on everything and are at a # terminal context that holds all of our completed args while isinstance(ctx.command, click.MultiCommand) and args: # trim out any params that are capturable at this level of the command # tree by resetting the argument list args = ctx.protected_args + ctx.args # if there were no remaining args, stop walking the tree if not args: break # check for a matching command, and if one isn't found stop the # traversal and abort the whole process -- this would mean that a # completed command was entered which doesn't match a known command # there's nothing completion can do in this case unless it implements # sophisticated fuzzy matching command = ctx.command.get_command(ctx, args[0]) if not command: return None # otherwise, grab that command, and build a subcontext to continue the # tree walk else: ctx = command.make_context( args[0], args[1:], parent=ctx, resilient_parsing=True ) # return the context we found return ctx
[ "def", "get_completion_context", "(", "args", ")", ":", "# get the \"globus\" command as a click.Command", "root_command", "=", "click", ".", "get_current_context", "(", ")", ".", "find_root", "(", ")", ".", "command", "# build a new context object off of it, with resilient_p...
Walk the tree of commands to a terminal command or multicommand, using the Click Context system. Effectively, we'll be using the resilient_parsing mode of commands to stop evaluation, then having them capture their options and arguments, passing us on to the next subcommand. If we walk "off the tree" with a command that we don't recognize, we have a hardstop condition, but otherwise, we walk as far as we can go and that's the location from which we should do our completion work.
[ "Walk", "the", "tree", "of", "commands", "to", "a", "terminal", "command", "or", "multicommand", "using", "the", "Click", "Context", "system", ".", "Effectively", "we", "ll", "be", "using", "the", "resilient_parsing", "mode", "of", "commands", "to", "stop", ...
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/parsing/shell_completion.py#L33-L78
13,317
globus/globus-cli
globus_cli/commands/endpoint/is_activated.py
endpoint_is_activated
def endpoint_is_activated(endpoint_id, until, absolute_time): """ Executor for `globus endpoint is-activated` """ client = get_client() res = client.endpoint_get_activation_requirements(endpoint_id) def fail(deadline=None): exp_string = "" if deadline is not None: exp_string = " or will expire within {} seconds".format(deadline) message = "The endpoint is not activated{}.\n\n".format( exp_string ) + activation_requirements_help_text(res, endpoint_id) formatted_print(res, simple_text=message) click.get_current_context().exit(1) def success(msg, *format_params): formatted_print(res, simple_text=(msg.format(endpoint_id, *format_params))) click.get_current_context().exit(0) # eternally active endpoints have a special expires_in value if res["expires_in"] == -1: success("{} does not require activation") # autoactivation is not supported and --until was not passed if until is None: # and we are active right now (0s in the future)... if res.active_until(0): success("{} is activated") # or we are not active fail() # autoactivation is not supported and --until was passed if res.active_until(until, relative_time=not absolute_time): success("{} will be active for at least {} seconds", until) else: fail(deadline=until)
python
def endpoint_is_activated(endpoint_id, until, absolute_time): client = get_client() res = client.endpoint_get_activation_requirements(endpoint_id) def fail(deadline=None): exp_string = "" if deadline is not None: exp_string = " or will expire within {} seconds".format(deadline) message = "The endpoint is not activated{}.\n\n".format( exp_string ) + activation_requirements_help_text(res, endpoint_id) formatted_print(res, simple_text=message) click.get_current_context().exit(1) def success(msg, *format_params): formatted_print(res, simple_text=(msg.format(endpoint_id, *format_params))) click.get_current_context().exit(0) # eternally active endpoints have a special expires_in value if res["expires_in"] == -1: success("{} does not require activation") # autoactivation is not supported and --until was not passed if until is None: # and we are active right now (0s in the future)... if res.active_until(0): success("{} is activated") # or we are not active fail() # autoactivation is not supported and --until was passed if res.active_until(until, relative_time=not absolute_time): success("{} will be active for at least {} seconds", until) else: fail(deadline=until)
[ "def", "endpoint_is_activated", "(", "endpoint_id", ",", "until", ",", "absolute_time", ")", ":", "client", "=", "get_client", "(", ")", "res", "=", "client", ".", "endpoint_get_activation_requirements", "(", "endpoint_id", ")", "def", "fail", "(", "deadline", "...
Executor for `globus endpoint is-activated`
[ "Executor", "for", "globus", "endpoint", "is", "-", "activated" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/is_activated.py#L38-L76
13,318
globus/globus-cli
globus_cli/commands/endpoint/create.py
endpoint_create
def endpoint_create(**kwargs): """ Executor for `globus endpoint create` """ client = get_client() # get endpoint type, ensure unambiguous. personal = kwargs.pop("personal") server = kwargs.pop("server") shared = kwargs.pop("shared") if personal and (not server) and (not shared): endpoint_type = "personal" elif server and (not personal) and (not shared): endpoint_type = "server" elif shared and (not personal) and (not server): endpoint_type = "shared" else: raise click.UsageError( "Exactly one of --personal, --server, or --shared is required." ) # validate options kwargs["is_globus_connect"] = personal or None validate_endpoint_create_and_update_params(endpoint_type, False, kwargs) # shared endpoint creation if shared: endpoint_id, host_path = shared kwargs["host_endpoint"] = endpoint_id kwargs["host_path"] = host_path ep_doc = assemble_generic_doc("shared_endpoint", **kwargs) autoactivate(client, endpoint_id, if_expires_in=60) res = client.create_shared_endpoint(ep_doc) # non shared endpoint creation else: # omit `is_globus_connect` key if not GCP, otherwise include as `True` ep_doc = assemble_generic_doc("endpoint", **kwargs) res = client.create_endpoint(ep_doc) # output formatted_print( res, fields=(COMMON_FIELDS + GCP_FIELDS if personal else COMMON_FIELDS), text_format=FORMAT_TEXT_RECORD, )
python
def endpoint_create(**kwargs): client = get_client() # get endpoint type, ensure unambiguous. personal = kwargs.pop("personal") server = kwargs.pop("server") shared = kwargs.pop("shared") if personal and (not server) and (not shared): endpoint_type = "personal" elif server and (not personal) and (not shared): endpoint_type = "server" elif shared and (not personal) and (not server): endpoint_type = "shared" else: raise click.UsageError( "Exactly one of --personal, --server, or --shared is required." ) # validate options kwargs["is_globus_connect"] = personal or None validate_endpoint_create_and_update_params(endpoint_type, False, kwargs) # shared endpoint creation if shared: endpoint_id, host_path = shared kwargs["host_endpoint"] = endpoint_id kwargs["host_path"] = host_path ep_doc = assemble_generic_doc("shared_endpoint", **kwargs) autoactivate(client, endpoint_id, if_expires_in=60) res = client.create_shared_endpoint(ep_doc) # non shared endpoint creation else: # omit `is_globus_connect` key if not GCP, otherwise include as `True` ep_doc = assemble_generic_doc("endpoint", **kwargs) res = client.create_endpoint(ep_doc) # output formatted_print( res, fields=(COMMON_FIELDS + GCP_FIELDS if personal else COMMON_FIELDS), text_format=FORMAT_TEXT_RECORD, )
[ "def", "endpoint_create", "(", "*", "*", "kwargs", ")", ":", "client", "=", "get_client", "(", ")", "# get endpoint type, ensure unambiguous.", "personal", "=", "kwargs", ".", "pop", "(", "\"personal\"", ")", "server", "=", "kwargs", ".", "pop", "(", "\"server...
Executor for `globus endpoint create`
[ "Executor", "for", "globus", "endpoint", "create" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/create.py#L56-L103
13,319
globus/globus-cli
globus_cli/commands/endpoint/server/update.py
server_update
def server_update( endpoint_id, server_id, subject, port, scheme, hostname, incoming_data_ports, outgoing_data_ports, ): """ Executor for `globus endpoint server update` """ client = get_client() server_doc = assemble_generic_doc( "server", subject=subject, port=port, scheme=scheme, hostname=hostname ) # n.b. must be done after assemble_generic_doc(), as that function filters # out `None`s, which we need to be able to set for `'unspecified'` if incoming_data_ports: server_doc.update( incoming_data_port_start=incoming_data_ports[0], incoming_data_port_end=incoming_data_ports[1], ) if outgoing_data_ports: server_doc.update( outgoing_data_port_start=outgoing_data_ports[0], outgoing_data_port_end=outgoing_data_ports[1], ) res = client.update_endpoint_server(endpoint_id, server_id, server_doc) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
python
def server_update( endpoint_id, server_id, subject, port, scheme, hostname, incoming_data_ports, outgoing_data_ports, ): client = get_client() server_doc = assemble_generic_doc( "server", subject=subject, port=port, scheme=scheme, hostname=hostname ) # n.b. must be done after assemble_generic_doc(), as that function filters # out `None`s, which we need to be able to set for `'unspecified'` if incoming_data_ports: server_doc.update( incoming_data_port_start=incoming_data_ports[0], incoming_data_port_end=incoming_data_ports[1], ) if outgoing_data_ports: server_doc.update( outgoing_data_port_start=outgoing_data_ports[0], outgoing_data_port_end=outgoing_data_ports[1], ) res = client.update_endpoint_server(endpoint_id, server_id, server_doc) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
[ "def", "server_update", "(", "endpoint_id", ",", "server_id", ",", "subject", ",", "port", ",", "scheme", ",", "hostname", ",", "incoming_data_ports", ",", "outgoing_data_ports", ",", ")", ":", "client", "=", "get_client", "(", ")", "server_doc", "=", "assembl...
Executor for `globus endpoint server update`
[ "Executor", "for", "globus", "endpoint", "server", "update" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/server/update.py#L18-L51
13,320
globus/globus-cli
globus_cli/commands/config/filename.py
filename_command
def filename_command(): """ Executor for `globus config filename` """ try: config = get_config_obj(file_error=True) except IOError as e: safeprint(e, write_to_stderr=True) click.get_current_context().exit(1) else: safeprint(config.filename)
python
def filename_command(): try: config = get_config_obj(file_error=True) except IOError as e: safeprint(e, write_to_stderr=True) click.get_current_context().exit(1) else: safeprint(config.filename)
[ "def", "filename_command", "(", ")", ":", "try", ":", "config", "=", "get_config_obj", "(", "file_error", "=", "True", ")", "except", "IOError", "as", "e", ":", "safeprint", "(", "e", ",", "write_to_stderr", "=", "True", ")", "click", ".", "get_current_con...
Executor for `globus config filename`
[ "Executor", "for", "globus", "config", "filename" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/config/filename.py#L10-L20
13,321
globus/globus-cli
globus_cli/commands/endpoint/my_shared_endpoint_list.py
my_shared_endpoint_list
def my_shared_endpoint_list(endpoint_id): """ Executor for `globus endpoint my-shared-endpoint-list` """ client = get_client() ep_iterator = client.my_shared_endpoint_list(endpoint_id) formatted_print(ep_iterator, fields=ENDPOINT_LIST_FIELDS)
python
def my_shared_endpoint_list(endpoint_id): client = get_client() ep_iterator = client.my_shared_endpoint_list(endpoint_id) formatted_print(ep_iterator, fields=ENDPOINT_LIST_FIELDS)
[ "def", "my_shared_endpoint_list", "(", "endpoint_id", ")", ":", "client", "=", "get_client", "(", ")", "ep_iterator", "=", "client", ".", "my_shared_endpoint_list", "(", "endpoint_id", ")", "formatted_print", "(", "ep_iterator", ",", "fields", "=", "ENDPOINT_LIST_FI...
Executor for `globus endpoint my-shared-endpoint-list`
[ "Executor", "for", "globus", "endpoint", "my", "-", "shared", "-", "endpoint", "-", "list" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/my_shared_endpoint_list.py#L14-L21
13,322
globus/globus-cli
globus_cli/commands/get_identities.py
_try_b32_decode
def _try_b32_decode(v): """ Attempt to decode a b32-encoded username which is sometimes generated by internal Globus components. The expectation is that the string is a valid ID, username, or b32-encoded name. Therefore, we can do some simple checking on it. If it does not appear to be formatted correctly, return None. """ # should start with "u_" if not v.startswith("u_"): return None # usernames have @ , we want to allow `u_foo@example.com` # b32 names never have @ if "@" in v: return None # trim "u_" v = v[2:] # wrong length if len(v) != 26: return None # append padding and uppercase so that b32decode will work v = v.upper() + (6 * "=") # try to decode try: return str(uuid.UUID(bytes=base64.b32decode(v))) # if it fails, I guess it's a username? Not much left to do except ValueError: return None
python
def _try_b32_decode(v): # should start with "u_" if not v.startswith("u_"): return None # usernames have @ , we want to allow `u_foo@example.com` # b32 names never have @ if "@" in v: return None # trim "u_" v = v[2:] # wrong length if len(v) != 26: return None # append padding and uppercase so that b32decode will work v = v.upper() + (6 * "=") # try to decode try: return str(uuid.UUID(bytes=base64.b32decode(v))) # if it fails, I guess it's a username? Not much left to do except ValueError: return None
[ "def", "_try_b32_decode", "(", "v", ")", ":", "# should start with \"u_\"", "if", "not", "v", ".", "startswith", "(", "\"u_\"", ")", ":", "return", "None", "# usernames have @ , we want to allow `u_foo@example.com`", "# b32 names never have @", "if", "\"@\"", "in", "v",...
Attempt to decode a b32-encoded username which is sometimes generated by internal Globus components. The expectation is that the string is a valid ID, username, or b32-encoded name. Therefore, we can do some simple checking on it. If it does not appear to be formatted correctly, return None.
[ "Attempt", "to", "decode", "a", "b32", "-", "encoded", "username", "which", "is", "sometimes", "generated", "by", "internal", "Globus", "components", "." ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/get_identities.py#L12-L43
13,323
globus/globus-cli
globus_cli/commands/get_identities.py
get_identities_command
def get_identities_command(values): """ Executor for `globus get-identities` """ client = get_auth_client() resolved_values = [_try_b32_decode(v) or v for v in values] # since API doesn't accept mixed ids and usernames, # split input values into separate lists ids = [] usernames = [] for val in resolved_values: try: uuid.UUID(val) ids.append(val) except ValueError: usernames.append(val) # make two calls to get_identities with ids and usernames # then combine the calls into one response results = [] if len(ids): results += client.get_identities(ids=ids)["identities"] if len(usernames): results += client.get_identities(usernames=usernames)["identities"] res = GlobusResponse({"identities": results}) def _custom_text_format(identities): """ Non-verbose text output is customized """ def resolve_identity(value): """ helper to deal with variable inputs and uncertain response order """ for identity in identities: if identity["id"] == value: return identity["username"] if identity["username"] == value: return identity["id"] return "NO_SUCH_IDENTITY" # standard output is one resolved identity per line in the same order # as the inputs. A resolved identity is either a username if given a # UUID vice versa, or "NO_SUCH_IDENTITY" if the identity could not be # found for val in resolved_values: safeprint(resolve_identity(val)) formatted_print( res, response_key="identities", fields=[ ("ID", "id"), ("Username", "username"), ("Full Name", "name"), ("Organization", "organization"), ("Email Address", "email"), ], # verbose output is a table. Order not guaranteed, may contain # duplicates text_format=(FORMAT_TEXT_TABLE if is_verbose() else _custom_text_format), )
python
def get_identities_command(values): client = get_auth_client() resolved_values = [_try_b32_decode(v) or v for v in values] # since API doesn't accept mixed ids and usernames, # split input values into separate lists ids = [] usernames = [] for val in resolved_values: try: uuid.UUID(val) ids.append(val) except ValueError: usernames.append(val) # make two calls to get_identities with ids and usernames # then combine the calls into one response results = [] if len(ids): results += client.get_identities(ids=ids)["identities"] if len(usernames): results += client.get_identities(usernames=usernames)["identities"] res = GlobusResponse({"identities": results}) def _custom_text_format(identities): """ Non-verbose text output is customized """ def resolve_identity(value): """ helper to deal with variable inputs and uncertain response order """ for identity in identities: if identity["id"] == value: return identity["username"] if identity["username"] == value: return identity["id"] return "NO_SUCH_IDENTITY" # standard output is one resolved identity per line in the same order # as the inputs. A resolved identity is either a username if given a # UUID vice versa, or "NO_SUCH_IDENTITY" if the identity could not be # found for val in resolved_values: safeprint(resolve_identity(val)) formatted_print( res, response_key="identities", fields=[ ("ID", "id"), ("Username", "username"), ("Full Name", "name"), ("Organization", "organization"), ("Email Address", "email"), ], # verbose output is a table. Order not guaranteed, may contain # duplicates text_format=(FORMAT_TEXT_TABLE if is_verbose() else _custom_text_format), )
[ "def", "get_identities_command", "(", "values", ")", ":", "client", "=", "get_auth_client", "(", ")", "resolved_values", "=", "[", "_try_b32_decode", "(", "v", ")", "or", "v", "for", "v", "in", "values", "]", "# since API doesn't accept mixed ids and usernames,", ...
Executor for `globus get-identities`
[ "Executor", "for", "globus", "get", "-", "identities" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/get_identities.py#L55-L119
13,324
globus/globus-cli
globus_cli/services/transfer.py
supported_activation_methods
def supported_activation_methods(res): """ Given an activation_requirements document returns a list of activation methods supported by this endpoint. """ supported = ["web"] # web activation is always supported. # oauth if res["oauth_server"]: supported.append("oauth") for req in res["DATA"]: # myproxy if ( req["type"] == "myproxy" and req["name"] == "hostname" and req["value"] != "myproxy.globusonline.org" ): supported.append("myproxy") # delegate_proxy if req["type"] == "delegate_proxy" and req["name"] == "public_key": supported.append("delegate_proxy") return supported
python
def supported_activation_methods(res): supported = ["web"] # web activation is always supported. # oauth if res["oauth_server"]: supported.append("oauth") for req in res["DATA"]: # myproxy if ( req["type"] == "myproxy" and req["name"] == "hostname" and req["value"] != "myproxy.globusonline.org" ): supported.append("myproxy") # delegate_proxy if req["type"] == "delegate_proxy" and req["name"] == "public_key": supported.append("delegate_proxy") return supported
[ "def", "supported_activation_methods", "(", "res", ")", ":", "supported", "=", "[", "\"web\"", "]", "# web activation is always supported.", "# oauth", "if", "res", "[", "\"oauth_server\"", "]", ":", "supported", ".", "append", "(", "\"oauth\"", ")", "for", "req",...
Given an activation_requirements document returns a list of activation methods supported by this endpoint.
[ "Given", "an", "activation_requirements", "document", "returns", "a", "list", "of", "activation", "methods", "supported", "by", "this", "endpoint", "." ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/services/transfer.py#L159-L183
13,325
globus/globus-cli
globus_cli/services/transfer.py
activation_requirements_help_text
def activation_requirements_help_text(res, ep_id): """ Given an activation requirements document and an endpoint_id returns a string of help text for how to activate the endpoint """ methods = supported_activation_methods(res) lines = [ "This endpoint supports the following activation methods: ", ", ".join(methods).replace("_", " "), "\n", ( "For web activation use:\n" "'globus endpoint activate --web {}'\n".format(ep_id) if "web" in methods else "" ), ( "For myproxy activation use:\n" "'globus endpoint activate --myproxy {}'\n".format(ep_id) if "myproxy" in methods else "" ), ( "For oauth activation use web activation:\n" "'globus endpoint activate --web {}'\n".format(ep_id) if "oauth" in methods else "" ), ( "For delegate proxy activation use:\n" "'globus endpoint activate --delegate-proxy " "X.509_PEM_FILE {}'\n".format(ep_id) if "delegate_proxy" in methods else "" ), ( "Delegate proxy activation requires an additional dependency on " "cryptography. See the docs for details:\n" "https://docs.globus.org/cli/reference/endpoint_activate/\n" if "delegate_proxy" in methods else "" ), ] return "".join(lines)
python
def activation_requirements_help_text(res, ep_id): methods = supported_activation_methods(res) lines = [ "This endpoint supports the following activation methods: ", ", ".join(methods).replace("_", " "), "\n", ( "For web activation use:\n" "'globus endpoint activate --web {}'\n".format(ep_id) if "web" in methods else "" ), ( "For myproxy activation use:\n" "'globus endpoint activate --myproxy {}'\n".format(ep_id) if "myproxy" in methods else "" ), ( "For oauth activation use web activation:\n" "'globus endpoint activate --web {}'\n".format(ep_id) if "oauth" in methods else "" ), ( "For delegate proxy activation use:\n" "'globus endpoint activate --delegate-proxy " "X.509_PEM_FILE {}'\n".format(ep_id) if "delegate_proxy" in methods else "" ), ( "Delegate proxy activation requires an additional dependency on " "cryptography. See the docs for details:\n" "https://docs.globus.org/cli/reference/endpoint_activate/\n" if "delegate_proxy" in methods else "" ), ] return "".join(lines)
[ "def", "activation_requirements_help_text", "(", "res", ",", "ep_id", ")", ":", "methods", "=", "supported_activation_methods", "(", "res", ")", "lines", "=", "[", "\"This endpoint supports the following activation methods: \"", ",", "\", \"", ".", "join", "(", "methods...
Given an activation requirements document and an endpoint_id returns a string of help text for how to activate the endpoint
[ "Given", "an", "activation", "requirements", "document", "and", "an", "endpoint_id", "returns", "a", "string", "of", "help", "text", "for", "how", "to", "activate", "the", "endpoint" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/services/transfer.py#L186-L231
13,326
globus/globus-cli
globus_cli/services/transfer.py
get_endpoint_w_server_list
def get_endpoint_w_server_list(endpoint_id): """ A helper for handling endpoint server list lookups correctly accounting for various endpoint types. - Raises click.UsageError when used on Shares - Returns (<get_endpoint_response>, "S3") for S3 endpoints - Returns (<get_endpoint_response>, <server_list_response>) for all other Endpoints """ client = get_client() endpoint = client.get_endpoint(endpoint_id) if endpoint["host_endpoint_id"]: # not GCS -- this is a share endpoint raise click.UsageError( dedent( u"""\ {id} ({0}) is a share and does not have servers. To see details of the share, use globus endpoint show {id} To list the servers on the share's host endpoint, use globus endpoint server list {host_endpoint_id} """ ).format(display_name_or_cname(endpoint), **endpoint.data) ) if endpoint["s3_url"]: # not GCS -- legacy S3 endpoint type return (endpoint, "S3") else: return (endpoint, client.endpoint_server_list(endpoint_id))
python
def get_endpoint_w_server_list(endpoint_id): client = get_client() endpoint = client.get_endpoint(endpoint_id) if endpoint["host_endpoint_id"]: # not GCS -- this is a share endpoint raise click.UsageError( dedent( u"""\ {id} ({0}) is a share and does not have servers. To see details of the share, use globus endpoint show {id} To list the servers on the share's host endpoint, use globus endpoint server list {host_endpoint_id} """ ).format(display_name_or_cname(endpoint), **endpoint.data) ) if endpoint["s3_url"]: # not GCS -- legacy S3 endpoint type return (endpoint, "S3") else: return (endpoint, client.endpoint_server_list(endpoint_id))
[ "def", "get_endpoint_w_server_list", "(", "endpoint_id", ")", ":", "client", "=", "get_client", "(", ")", "endpoint", "=", "client", ".", "get_endpoint", "(", "endpoint_id", ")", "if", "endpoint", "[", "\"host_endpoint_id\"", "]", ":", "# not GCS -- this is a share ...
A helper for handling endpoint server list lookups correctly accounting for various endpoint types. - Raises click.UsageError when used on Shares - Returns (<get_endpoint_response>, "S3") for S3 endpoints - Returns (<get_endpoint_response>, <server_list_response>) for all other Endpoints
[ "A", "helper", "for", "handling", "endpoint", "server", "list", "lookups", "correctly", "accounting", "for", "various", "endpoint", "types", "." ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/services/transfer.py#L261-L294
13,327
globus/globus-cli
globus_cli/services/transfer.py
RetryingTransferClient.retry
def retry(self, f, *args, **kwargs): """ Retries the given function self.tries times on NetworkErros """ backoff = random.random() / 100 # 5ms on average for _ in range(self.tries - 1): try: return f(*args, **kwargs) except NetworkError: time.sleep(backoff) backoff *= 2 return f(*args, **kwargs)
python
def retry(self, f, *args, **kwargs): backoff = random.random() / 100 # 5ms on average for _ in range(self.tries - 1): try: return f(*args, **kwargs) except NetworkError: time.sleep(backoff) backoff *= 2 return f(*args, **kwargs)
[ "def", "retry", "(", "self", ",", "f", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "backoff", "=", "random", ".", "random", "(", ")", "/", "100", "# 5ms on average", "for", "_", "in", "range", "(", "self", ".", "tries", "-", "1", ")", ...
Retries the given function self.tries times on NetworkErros
[ "Retries", "the", "given", "function", "self", ".", "tries", "times", "on", "NetworkErros" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/services/transfer.py#L32-L43
13,328
globus/globus-cli
globus_cli/commands/task/cancel.py
cancel_task
def cancel_task(all, task_id): """ Executor for `globus task cancel` """ if bool(all) + bool(task_id) != 1: raise click.UsageError( "You must pass EITHER the special --all flag " "to cancel all in-progress tasks OR a single " "task ID to cancel." ) client = get_client() if all: from sys import maxsize task_ids = [ task_row["task_id"] for task_row in client.task_list( filter="type:TRANSFER,DELETE/status:ACTIVE,INACTIVE", fields="task_id", num_results=maxsize, # FIXME want to ask for "unlimited" set ) ] task_count = len(task_ids) if not task_ids: raise click.ClickException("You have no in-progress tasks.") def cancellation_iterator(): for i in task_ids: yield (i, client.cancel_task(i).data) def json_converter(res): return { "results": [x for i, x in cancellation_iterator()], "task_ids": task_ids, } def _custom_text(res): for (i, (task_id, data)) in enumerate(cancellation_iterator(), start=1): safeprint( u"{} ({} of {}): {}".format(task_id, i, task_count, data["message"]) ) # FIXME: this is kind of an abuse of formatted_print because the # text format and json converter are doing their own thing, not really # interacting with the "response data" (None). Is there a better way of # handling this? formatted_print(None, text_format=_custom_text, json_converter=json_converter) else: res = client.cancel_task(task_id) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
python
def cancel_task(all, task_id): if bool(all) + bool(task_id) != 1: raise click.UsageError( "You must pass EITHER the special --all flag " "to cancel all in-progress tasks OR a single " "task ID to cancel." ) client = get_client() if all: from sys import maxsize task_ids = [ task_row["task_id"] for task_row in client.task_list( filter="type:TRANSFER,DELETE/status:ACTIVE,INACTIVE", fields="task_id", num_results=maxsize, # FIXME want to ask for "unlimited" set ) ] task_count = len(task_ids) if not task_ids: raise click.ClickException("You have no in-progress tasks.") def cancellation_iterator(): for i in task_ids: yield (i, client.cancel_task(i).data) def json_converter(res): return { "results": [x for i, x in cancellation_iterator()], "task_ids": task_ids, } def _custom_text(res): for (i, (task_id, data)) in enumerate(cancellation_iterator(), start=1): safeprint( u"{} ({} of {}): {}".format(task_id, i, task_count, data["message"]) ) # FIXME: this is kind of an abuse of formatted_print because the # text format and json converter are doing their own thing, not really # interacting with the "response data" (None). Is there a better way of # handling this? formatted_print(None, text_format=_custom_text, json_converter=json_converter) else: res = client.cancel_task(task_id) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
[ "def", "cancel_task", "(", "all", ",", "task_id", ")", ":", "if", "bool", "(", "all", ")", "+", "bool", "(", "task_id", ")", "!=", "1", ":", "raise", "click", ".", "UsageError", "(", "\"You must pass EITHER the special --all flag \"", "\"to cancel all in-progres...
Executor for `globus task cancel`
[ "Executor", "for", "globus", "task", "cancel" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/task/cancel.py#L16-L71
13,329
globus/globus-cli
globus_cli/commands/task/wait.py
task_wait
def task_wait(meow, heartbeat, polling_interval, timeout, task_id, timeout_exit_code): """ Executor for `globus task wait` """ task_wait_with_io( meow, heartbeat, polling_interval, timeout, task_id, timeout_exit_code )
python
def task_wait(meow, heartbeat, polling_interval, timeout, task_id, timeout_exit_code): task_wait_with_io( meow, heartbeat, polling_interval, timeout, task_id, timeout_exit_code )
[ "def", "task_wait", "(", "meow", ",", "heartbeat", ",", "polling_interval", ",", "timeout", ",", "task_id", ",", "timeout_exit_code", ")", ":", "task_wait_with_io", "(", "meow", ",", "heartbeat", ",", "polling_interval", ",", "timeout", ",", "task_id", ",", "t...
Executor for `globus task wait`
[ "Executor", "for", "globus", "task", "wait" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/task/wait.py#L15-L21
13,330
globus/globus-cli
globus_cli/parsing/one_use_option.py
one_use_option
def one_use_option(*args, **kwargs): """ Wrapper of the click.option decorator that replaces any instances of the Option class with the custom OneUseOption class """ # cannot force a multiple or count option to be single use if "multiple" in kwargs or "count" in kwargs: raise ValueError( "Internal error, one_use_option cannot be used " "with multiple or count." ) # cannot force a non Option Paramater (argument) to be a OneUseOption if kwargs.get("cls"): raise TypeError( "Internal error, one_use_option cannot overwrite " "cls {}.".format(kwargs.get("cls")) ) # use our OneUseOption class instead of a normal Option kwargs["cls"] = OneUseOption # if dealing with a flag, switch to a counting option, # and then assert if the count is not greater than 1 and cast to a bool if kwargs.get("is_flag"): kwargs["is_flag"] = False # mutually exclusive with count kwargs["count"] = True # if not a flag, this option takes an argument(s), switch to a multiple # option, assert the len is 1, and treat the first element as the value else: kwargs["multiple"] = True # decorate with the click.option decorator, but with our custom kwargs def decorator(f): return click.option(*args, **kwargs)(f) return decorator
python
def one_use_option(*args, **kwargs): # cannot force a multiple or count option to be single use if "multiple" in kwargs or "count" in kwargs: raise ValueError( "Internal error, one_use_option cannot be used " "with multiple or count." ) # cannot force a non Option Paramater (argument) to be a OneUseOption if kwargs.get("cls"): raise TypeError( "Internal error, one_use_option cannot overwrite " "cls {}.".format(kwargs.get("cls")) ) # use our OneUseOption class instead of a normal Option kwargs["cls"] = OneUseOption # if dealing with a flag, switch to a counting option, # and then assert if the count is not greater than 1 and cast to a bool if kwargs.get("is_flag"): kwargs["is_flag"] = False # mutually exclusive with count kwargs["count"] = True # if not a flag, this option takes an argument(s), switch to a multiple # option, assert the len is 1, and treat the first element as the value else: kwargs["multiple"] = True # decorate with the click.option decorator, but with our custom kwargs def decorator(f): return click.option(*args, **kwargs)(f) return decorator
[ "def", "one_use_option", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# cannot force a multiple or count option to be single use", "if", "\"multiple\"", "in", "kwargs", "or", "\"count\"", "in", "kwargs", ":", "raise", "ValueError", "(", "\"Internal error, one...
Wrapper of the click.option decorator that replaces any instances of the Option class with the custom OneUseOption class
[ "Wrapper", "of", "the", "click", ".", "option", "decorator", "that", "replaces", "any", "instances", "of", "the", "Option", "class", "with", "the", "custom", "OneUseOption", "class" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/parsing/one_use_option.py#L43-L79
13,331
globus/globus-cli
globus_cli/commands/config/remove.py
remove_command
def remove_command(parameter): """ Executor for `globus config remove` """ conf = get_config_obj() section = "cli" if "." in parameter: section, parameter = parameter.split(".", 1) # ensure that the section exists if section not in conf: conf[section] = {} # remove the value for the given parameter del conf[section][parameter] # write to disk safeprint("Writing updated config to {}".format(conf.filename)) conf.write()
python
def remove_command(parameter): conf = get_config_obj() section = "cli" if "." in parameter: section, parameter = parameter.split(".", 1) # ensure that the section exists if section not in conf: conf[section] = {} # remove the value for the given parameter del conf[section][parameter] # write to disk safeprint("Writing updated config to {}".format(conf.filename)) conf.write()
[ "def", "remove_command", "(", "parameter", ")", ":", "conf", "=", "get_config_obj", "(", ")", "section", "=", "\"cli\"", "if", "\".\"", "in", "parameter", ":", "section", ",", "parameter", "=", "parameter", ".", "split", "(", "\".\"", ",", "1", ")", "# e...
Executor for `globus config remove`
[ "Executor", "for", "globus", "config", "remove" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/config/remove.py#L11-L29
13,332
globus/globus-cli
globus_cli/commands/endpoint/permission/show.py
show_command
def show_command(endpoint_id, rule_id): """ Executor for `globus endpoint permission show` """ client = get_client() rule = client.get_endpoint_acl_rule(endpoint_id, rule_id) formatted_print( rule, text_format=FORMAT_TEXT_RECORD, fields=( ("Rule ID", "id"), ("Permissions", "permissions"), ("Shared With", _shared_with_keyfunc), ("Path", "path"), ), )
python
def show_command(endpoint_id, rule_id): client = get_client() rule = client.get_endpoint_acl_rule(endpoint_id, rule_id) formatted_print( rule, text_format=FORMAT_TEXT_RECORD, fields=( ("Rule ID", "id"), ("Permissions", "permissions"), ("Shared With", _shared_with_keyfunc), ("Path", "path"), ), )
[ "def", "show_command", "(", "endpoint_id", ",", "rule_id", ")", ":", "client", "=", "get_client", "(", ")", "rule", "=", "client", ".", "get_endpoint_acl_rule", "(", "endpoint_id", ",", "rule_id", ")", "formatted_print", "(", "rule", ",", "text_format", "=", ...
Executor for `globus endpoint permission show`
[ "Executor", "for", "globus", "endpoint", "permission", "show" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/permission/show.py#L22-L38
13,333
globus/globus-cli
globus_cli/commands/bookmark/rename.py
bookmark_rename
def bookmark_rename(bookmark_id_or_name, new_bookmark_name): """ Executor for `globus bookmark rename` """ client = get_client() bookmark_id = resolve_id_or_name(client, bookmark_id_or_name)["id"] submit_data = {"name": new_bookmark_name} res = client.update_bookmark(bookmark_id, submit_data) formatted_print(res, simple_text="Success")
python
def bookmark_rename(bookmark_id_or_name, new_bookmark_name): client = get_client() bookmark_id = resolve_id_or_name(client, bookmark_id_or_name)["id"] submit_data = {"name": new_bookmark_name} res = client.update_bookmark(bookmark_id, submit_data) formatted_print(res, simple_text="Success")
[ "def", "bookmark_rename", "(", "bookmark_id_or_name", ",", "new_bookmark_name", ")", ":", "client", "=", "get_client", "(", ")", "bookmark_id", "=", "resolve_id_or_name", "(", "client", ",", "bookmark_id_or_name", ")", "[", "\"id\"", "]", "submit_data", "=", "{", ...
Executor for `globus bookmark rename`
[ "Executor", "for", "globus", "bookmark", "rename" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/bookmark/rename.py#L13-L23
13,334
globus/globus-cli
globus_cli/commands/config/show.py
show_command
def show_command(parameter): """ Executor for `globus config show` """ section = "cli" if "." in parameter: section, parameter = parameter.split(".", 1) value = lookup_option(parameter, section=section) if value is None: safeprint("{} not set".format(parameter)) else: safeprint("{} = {}".format(parameter, value))
python
def show_command(parameter): section = "cli" if "." in parameter: section, parameter = parameter.split(".", 1) value = lookup_option(parameter, section=section) if value is None: safeprint("{} not set".format(parameter)) else: safeprint("{} = {}".format(parameter, value))
[ "def", "show_command", "(", "parameter", ")", ":", "section", "=", "\"cli\"", "if", "\".\"", "in", "parameter", ":", "section", ",", "parameter", "=", "parameter", ".", "split", "(", "\".\"", ",", "1", ")", "value", "=", "lookup_option", "(", "parameter", ...
Executor for `globus config show`
[ "Executor", "for", "globus", "config", "show" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/config/show.py#L11-L24
13,335
globus/globus-cli
globus_cli/commands/rename.py
rename_command
def rename_command(source, destination): """ Executor for `globus rename` """ source_ep, source_path = source dest_ep, dest_path = destination if source_ep != dest_ep: raise click.UsageError( ( "rename requires that the source and dest " "endpoints are the same, {} != {}" ).format(source_ep, dest_ep) ) endpoint_id = source_ep client = get_client() autoactivate(client, endpoint_id, if_expires_in=60) res = client.operation_rename(endpoint_id, oldpath=source_path, newpath=dest_path) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
python
def rename_command(source, destination): source_ep, source_path = source dest_ep, dest_path = destination if source_ep != dest_ep: raise click.UsageError( ( "rename requires that the source and dest " "endpoints are the same, {} != {}" ).format(source_ep, dest_ep) ) endpoint_id = source_ep client = get_client() autoactivate(client, endpoint_id, if_expires_in=60) res = client.operation_rename(endpoint_id, oldpath=source_path, newpath=dest_path) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
[ "def", "rename_command", "(", "source", ",", "destination", ")", ":", "source_ep", ",", "source_path", "=", "source", "dest_ep", ",", "dest_path", "=", "destination", "if", "source_ep", "!=", "dest_ep", ":", "raise", "click", ".", "UsageError", "(", "(", "\"...
Executor for `globus rename`
[ "Executor", "for", "globus", "rename" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/rename.py#L14-L34
13,336
globus/globus-cli
globus_cli/commands/endpoint/show.py
endpoint_show
def endpoint_show(endpoint_id): """ Executor for `globus endpoint show` """ client = get_client() res = client.get_endpoint(endpoint_id) formatted_print( res, text_format=FORMAT_TEXT_RECORD, fields=GCP_FIELDS if res["is_globus_connect"] else STANDARD_FIELDS, )
python
def endpoint_show(endpoint_id): client = get_client() res = client.get_endpoint(endpoint_id) formatted_print( res, text_format=FORMAT_TEXT_RECORD, fields=GCP_FIELDS if res["is_globus_connect"] else STANDARD_FIELDS, )
[ "def", "endpoint_show", "(", "endpoint_id", ")", ":", "client", "=", "get_client", "(", ")", "res", "=", "client", ".", "get_endpoint", "(", "endpoint_id", ")", "formatted_print", "(", "res", ",", "text_format", "=", "FORMAT_TEXT_RECORD", ",", "fields", "=", ...
Executor for `globus endpoint show`
[ "Executor", "for", "globus", "endpoint", "show" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/show.py#L11-L23
13,337
globus/globus-cli
globus_cli/commands/endpoint/permission/update.py
update_command
def update_command(permissions, rule_id, endpoint_id): """ Executor for `globus endpoint permission update` """ client = get_client() rule_data = assemble_generic_doc("access", permissions=permissions) res = client.update_endpoint_acl_rule(endpoint_id, rule_id, rule_data) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
python
def update_command(permissions, rule_id, endpoint_id): client = get_client() rule_data = assemble_generic_doc("access", permissions=permissions) res = client.update_endpoint_acl_rule(endpoint_id, rule_id, rule_data) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
[ "def", "update_command", "(", "permissions", ",", "rule_id", ",", "endpoint_id", ")", ":", "client", "=", "get_client", "(", ")", "rule_data", "=", "assemble_generic_doc", "(", "\"access\"", ",", "permissions", "=", "permissions", ")", "res", "=", "client", "....
Executor for `globus endpoint permission update`
[ "Executor", "for", "globus", "endpoint", "permission", "update" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/permission/update.py#L21-L29
13,338
globus/globus-cli
globus_cli/safeio/errors.py
PrintableErrorField._format_value
def _format_value(self, val): """ formats a value to be good for textmode printing val must be unicode """ name = self.name + ":" if not self.multiline or "\n" not in val: val = u"{0} {1}".format(name.ljust(self._text_prefix_len), val) else: spacer = "\n" + " " * (self._text_prefix_len + 1) val = u"{0}{1}{2}".format(name, spacer, spacer.join(val.split("\n"))) return val
python
def _format_value(self, val): name = self.name + ":" if not self.multiline or "\n" not in val: val = u"{0} {1}".format(name.ljust(self._text_prefix_len), val) else: spacer = "\n" + " " * (self._text_prefix_len + 1) val = u"{0}{1}{2}".format(name, spacer, spacer.join(val.split("\n"))) return val
[ "def", "_format_value", "(", "self", ",", "val", ")", ":", "name", "=", "self", ".", "name", "+", "\":\"", "if", "not", "self", ".", "multiline", "or", "\"\\n\"", "not", "in", "val", ":", "val", "=", "u\"{0} {1}\"", ".", "format", "(", "name", ".", ...
formats a value to be good for textmode printing val must be unicode
[ "formats", "a", "value", "to", "be", "good", "for", "textmode", "printing", "val", "must", "be", "unicode" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/safeio/errors.py#L27-L39
13,339
globus/globus-cli
globus_cli/commands/endpoint/permission/delete.py
delete_command
def delete_command(endpoint_id, rule_id): """ Executor for `globus endpoint permission delete` """ client = get_client() res = client.delete_endpoint_acl_rule(endpoint_id, rule_id) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
python
def delete_command(endpoint_id, rule_id): client = get_client() res = client.delete_endpoint_acl_rule(endpoint_id, rule_id) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
[ "def", "delete_command", "(", "endpoint_id", ",", "rule_id", ")", ":", "client", "=", "get_client", "(", ")", "res", "=", "client", ".", "delete_endpoint_acl_rule", "(", "endpoint_id", ",", "rule_id", ")", "formatted_print", "(", "res", ",", "text_format", "="...
Executor for `globus endpoint permission delete`
[ "Executor", "for", "globus", "endpoint", "permission", "delete" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/permission/delete.py#L14-L21
13,340
globus/globus-cli
globus_cli/commands/transfer.py
transfer_command
def transfer_command( batch, sync_level, recursive, destination, source, label, preserve_mtime, verify_checksum, encrypt, submission_id, dry_run, delete, deadline, skip_activation_check, notify, perf_cc, perf_p, perf_pp, perf_udt, ): """ Executor for `globus transfer` """ source_endpoint, cmd_source_path = source dest_endpoint, cmd_dest_path = destination if recursive and batch: raise click.UsageError( ( "You cannot use --recursive in addition to --batch. " "Instead, use --recursive on lines of --batch input " "which need it" ) ) if (cmd_source_path is None or cmd_dest_path is None) and (not batch): raise click.UsageError( ("transfer requires either SOURCE_PATH and DEST_PATH or " "--batch") ) # because python can't handle multiple **kwargs expansions in a single # call, we need to get a little bit clever # both the performance options (of which there are a few), and the # notification options (also there are a few) have elements which should be # omitted in some cases # notify comes to us clean, perf opts need more care # put them together into a dict before passing to TransferData kwargs = {} perf_opts = dict( (k, v) for (k, v) in dict( perf_cc=perf_cc, perf_p=perf_p, perf_pp=perf_pp, perf_udt=perf_udt ).items() if v is not None ) kwargs.update(perf_opts) kwargs.update(notify) client = get_client() transfer_data = TransferData( client, source_endpoint, dest_endpoint, label=label, sync_level=sync_level, verify_checksum=verify_checksum, preserve_timestamp=preserve_mtime, encrypt_data=encrypt, submission_id=submission_id, delete_destination_extra=delete, deadline=deadline, skip_activation_check=skip_activation_check, **kwargs ) if batch: @click.command() @click.option("--recursive", "-r", is_flag=True) @click.argument("source_path", type=TaskPath(base_dir=cmd_source_path)) @click.argument("dest_path", type=TaskPath(base_dir=cmd_dest_path)) def process_batch_line(dest_path, source_path, recursive): """ Parse a line of batch input and turn it into a transfer submission item. """ transfer_data.add_item( str(source_path), str(dest_path), recursive=recursive ) shlex_process_stdin( process_batch_line, ( "Enter transfers, line by line, as\n\n" " [--recursive] SOURCE_PATH DEST_PATH\n" ), ) else: transfer_data.add_item(cmd_source_path, cmd_dest_path, recursive=recursive) if dry_run: formatted_print( transfer_data, response_key="DATA", fields=( ("Source Path", "source_path"), ("Dest Path", "destination_path"), ("Recursive", "recursive"), ), ) # exit safely return # autoactivate after parsing all args and putting things together # skip this if skip-activation-check is given if not skip_activation_check: autoactivate(client, source_endpoint, if_expires_in=60) autoactivate(client, dest_endpoint, if_expires_in=60) res = client.submit_transfer(transfer_data) formatted_print( res, text_format=FORMAT_TEXT_RECORD, fields=(("Message", "message"), ("Task ID", "task_id")), )
python
def transfer_command( batch, sync_level, recursive, destination, source, label, preserve_mtime, verify_checksum, encrypt, submission_id, dry_run, delete, deadline, skip_activation_check, notify, perf_cc, perf_p, perf_pp, perf_udt, ): source_endpoint, cmd_source_path = source dest_endpoint, cmd_dest_path = destination if recursive and batch: raise click.UsageError( ( "You cannot use --recursive in addition to --batch. " "Instead, use --recursive on lines of --batch input " "which need it" ) ) if (cmd_source_path is None or cmd_dest_path is None) and (not batch): raise click.UsageError( ("transfer requires either SOURCE_PATH and DEST_PATH or " "--batch") ) # because python can't handle multiple **kwargs expansions in a single # call, we need to get a little bit clever # both the performance options (of which there are a few), and the # notification options (also there are a few) have elements which should be # omitted in some cases # notify comes to us clean, perf opts need more care # put them together into a dict before passing to TransferData kwargs = {} perf_opts = dict( (k, v) for (k, v) in dict( perf_cc=perf_cc, perf_p=perf_p, perf_pp=perf_pp, perf_udt=perf_udt ).items() if v is not None ) kwargs.update(perf_opts) kwargs.update(notify) client = get_client() transfer_data = TransferData( client, source_endpoint, dest_endpoint, label=label, sync_level=sync_level, verify_checksum=verify_checksum, preserve_timestamp=preserve_mtime, encrypt_data=encrypt, submission_id=submission_id, delete_destination_extra=delete, deadline=deadline, skip_activation_check=skip_activation_check, **kwargs ) if batch: @click.command() @click.option("--recursive", "-r", is_flag=True) @click.argument("source_path", type=TaskPath(base_dir=cmd_source_path)) @click.argument("dest_path", type=TaskPath(base_dir=cmd_dest_path)) def process_batch_line(dest_path, source_path, recursive): """ Parse a line of batch input and turn it into a transfer submission item. """ transfer_data.add_item( str(source_path), str(dest_path), recursive=recursive ) shlex_process_stdin( process_batch_line, ( "Enter transfers, line by line, as\n\n" " [--recursive] SOURCE_PATH DEST_PATH\n" ), ) else: transfer_data.add_item(cmd_source_path, cmd_dest_path, recursive=recursive) if dry_run: formatted_print( transfer_data, response_key="DATA", fields=( ("Source Path", "source_path"), ("Dest Path", "destination_path"), ("Recursive", "recursive"), ), ) # exit safely return # autoactivate after parsing all args and putting things together # skip this if skip-activation-check is given if not skip_activation_check: autoactivate(client, source_endpoint, if_expires_in=60) autoactivate(client, dest_endpoint, if_expires_in=60) res = client.submit_transfer(transfer_data) formatted_print( res, text_format=FORMAT_TEXT_RECORD, fields=(("Message", "message"), ("Task ID", "task_id")), )
[ "def", "transfer_command", "(", "batch", ",", "sync_level", ",", "recursive", ",", "destination", ",", "source", ",", "label", ",", "preserve_mtime", ",", "verify_checksum", ",", "encrypt", ",", "submission_id", ",", "dry_run", ",", "delete", ",", "deadline", ...
Executor for `globus transfer`
[ "Executor", "for", "globus", "transfer" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/transfer.py#L143-L268
13,341
globus/globus-cli
globus_cli/commands/endpoint/delete.py
endpoint_delete
def endpoint_delete(endpoint_id): """ Executor for `globus endpoint delete` """ client = get_client() res = client.delete_endpoint(endpoint_id) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
python
def endpoint_delete(endpoint_id): client = get_client() res = client.delete_endpoint(endpoint_id) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
[ "def", "endpoint_delete", "(", "endpoint_id", ")", ":", "client", "=", "get_client", "(", ")", "res", "=", "client", ".", "delete_endpoint", "(", "endpoint_id", ")", "formatted_print", "(", "res", ",", "text_format", "=", "FORMAT_TEXT_RAW", ",", "response_key", ...
Executor for `globus endpoint delete`
[ "Executor", "for", "globus", "endpoint", "delete" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/delete.py#L11-L17
13,342
globus/globus-cli
globus_cli/commands/bookmark/delete.py
bookmark_delete
def bookmark_delete(bookmark_id_or_name): """ Executor for `globus bookmark delete` """ client = get_client() bookmark_id = resolve_id_or_name(client, bookmark_id_or_name)["id"] res = client.delete_bookmark(bookmark_id) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
python
def bookmark_delete(bookmark_id_or_name): client = get_client() bookmark_id = resolve_id_or_name(client, bookmark_id_or_name)["id"] res = client.delete_bookmark(bookmark_id) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
[ "def", "bookmark_delete", "(", "bookmark_id_or_name", ")", ":", "client", "=", "get_client", "(", ")", "bookmark_id", "=", "resolve_id_or_name", "(", "client", ",", "bookmark_id_or_name", ")", "[", "\"id\"", "]", "res", "=", "client", ".", "delete_bookmark", "("...
Executor for `globus bookmark delete`
[ "Executor", "for", "globus", "bookmark", "delete" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/bookmark/delete.py#L12-L20
13,343
globus/globus-cli
globus_cli/commands/mkdir.py
mkdir_command
def mkdir_command(endpoint_plus_path): """ Executor for `globus mkdir` """ endpoint_id, path = endpoint_plus_path client = get_client() autoactivate(client, endpoint_id, if_expires_in=60) res = client.operation_mkdir(endpoint_id, path=path) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
python
def mkdir_command(endpoint_plus_path): endpoint_id, path = endpoint_plus_path client = get_client() autoactivate(client, endpoint_id, if_expires_in=60) res = client.operation_mkdir(endpoint_id, path=path) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
[ "def", "mkdir_command", "(", "endpoint_plus_path", ")", ":", "endpoint_id", ",", "path", "=", "endpoint_plus_path", "client", "=", "get_client", "(", ")", "autoactivate", "(", "client", ",", "endpoint_id", ",", "if_expires_in", "=", "60", ")", "res", "=", "cli...
Executor for `globus mkdir`
[ "Executor", "for", "globus", "mkdir" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/mkdir.py#L15-L25
13,344
globus/globus-cli
globus_cli/commands/config/init.py
init_command
def init_command(default_output_format, default_myproxy_username): """ Executor for `globus config init` """ # now handle the output format, requires a little bit more care # first, prompt if it isn't given, but be clear that we have a sensible # default if they don't set it # then, make sure that if it is given, it's a valid format (discard # otherwise) # finally, set it only if given and valid if not default_output_format: safeprint( textwrap.fill( 'This must be one of "json" or "text". Other values will be ' "ignored. ENTER to skip." ) ) default_output_format = ( click.prompt( "Default CLI output format (cli.output_format)", default="text" ) .strip() .lower() ) if default_output_format not in ("json", "text"): default_output_format = None if not default_myproxy_username: safeprint(textwrap.fill("ENTER to skip.")) default_myproxy_username = click.prompt( "Default myproxy username (cli.default_myproxy_username)", default="", show_default=False, ).strip() # write to disk safeprint( "\n\nWriting updated config to {0}".format(os.path.expanduser("~/.globus.cfg")) ) write_option(OUTPUT_FORMAT_OPTNAME, default_output_format) write_option(MYPROXY_USERNAME_OPTNAME, default_myproxy_username)
python
def init_command(default_output_format, default_myproxy_username): # now handle the output format, requires a little bit more care # first, prompt if it isn't given, but be clear that we have a sensible # default if they don't set it # then, make sure that if it is given, it's a valid format (discard # otherwise) # finally, set it only if given and valid if not default_output_format: safeprint( textwrap.fill( 'This must be one of "json" or "text". Other values will be ' "ignored. ENTER to skip." ) ) default_output_format = ( click.prompt( "Default CLI output format (cli.output_format)", default="text" ) .strip() .lower() ) if default_output_format not in ("json", "text"): default_output_format = None if not default_myproxy_username: safeprint(textwrap.fill("ENTER to skip.")) default_myproxy_username = click.prompt( "Default myproxy username (cli.default_myproxy_username)", default="", show_default=False, ).strip() # write to disk safeprint( "\n\nWriting updated config to {0}".format(os.path.expanduser("~/.globus.cfg")) ) write_option(OUTPUT_FORMAT_OPTNAME, default_output_format) write_option(MYPROXY_USERNAME_OPTNAME, default_myproxy_username)
[ "def", "init_command", "(", "default_output_format", ",", "default_myproxy_username", ")", ":", "# now handle the output format, requires a little bit more care", "# first, prompt if it isn't given, but be clear that we have a sensible", "# default if they don't set it", "# then, make sure tha...
Executor for `globus config init`
[ "Executor", "for", "globus", "config", "init" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/config/init.py#L43-L83
13,345
globus/globus-cli
globus_cli/parsing/detect_and_decorate.py
detect_and_decorate
def detect_and_decorate(decorator, args, kwargs): """ Helper for applying a decorator when it is applied directly, and also applying it when it is given arguments and then applied to a function. """ # special behavior when invoked with only one non-keyword argument: act as # a normal decorator, decorating and returning that argument with # click.option if len(args) == 1 and len(kwargs) == 0 and callable(args[0]): return decorator(args[0]) # if we're not doing that, we should see no positional args # the alternative behavior is to fall through and discard *args, but this # will probably confuse someone in the future when their arguments are # silently discarded elif len(args) != 0: raise ValueError("this decorator cannot take positional args") # final case: got 0 or more kwargs, no positionals # do the function-which-returns-a-decorator dance to produce a # new decorator based on the arguments given else: def inner_decorator(f): return decorator(f, **kwargs) return inner_decorator
python
def detect_and_decorate(decorator, args, kwargs): # special behavior when invoked with only one non-keyword argument: act as # a normal decorator, decorating and returning that argument with # click.option if len(args) == 1 and len(kwargs) == 0 and callable(args[0]): return decorator(args[0]) # if we're not doing that, we should see no positional args # the alternative behavior is to fall through and discard *args, but this # will probably confuse someone in the future when their arguments are # silently discarded elif len(args) != 0: raise ValueError("this decorator cannot take positional args") # final case: got 0 or more kwargs, no positionals # do the function-which-returns-a-decorator dance to produce a # new decorator based on the arguments given else: def inner_decorator(f): return decorator(f, **kwargs) return inner_decorator
[ "def", "detect_and_decorate", "(", "decorator", ",", "args", ",", "kwargs", ")", ":", "# special behavior when invoked with only one non-keyword argument: act as", "# a normal decorator, decorating and returning that argument with", "# click.option", "if", "len", "(", "args", ")", ...
Helper for applying a decorator when it is applied directly, and also applying it when it is given arguments and then applied to a function.
[ "Helper", "for", "applying", "a", "decorator", "when", "it", "is", "applied", "directly", "and", "also", "applying", "it", "when", "it", "is", "given", "arguments", "and", "then", "applied", "to", "a", "function", "." ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/parsing/detect_and_decorate.py#L1-L27
13,346
globus/globus-cli
globus_cli/commands/endpoint/permission/list.py
list_command
def list_command(endpoint_id): """ Executor for `globus endpoint permission list` """ client = get_client() rules = client.endpoint_acl_list(endpoint_id) resolved_ids = LazyIdentityMap( x["principal"] for x in rules if x["principal_type"] == "identity" ) def principal_str(rule): principal = rule["principal"] if rule["principal_type"] == "identity": username = resolved_ids.get(principal) return username or principal elif rule["principal_type"] == "group": return (u"https://app.globus.org/groups/{}").format(principal) else: principal = rule["principal_type"] return principal formatted_print( rules, fields=[ ("Rule ID", "id"), ("Permissions", "permissions"), ("Shared With", principal_str), ("Path", "path"), ], )
python
def list_command(endpoint_id): client = get_client() rules = client.endpoint_acl_list(endpoint_id) resolved_ids = LazyIdentityMap( x["principal"] for x in rules if x["principal_type"] == "identity" ) def principal_str(rule): principal = rule["principal"] if rule["principal_type"] == "identity": username = resolved_ids.get(principal) return username or principal elif rule["principal_type"] == "group": return (u"https://app.globus.org/groups/{}").format(principal) else: principal = rule["principal_type"] return principal formatted_print( rules, fields=[ ("Rule ID", "id"), ("Permissions", "permissions"), ("Shared With", principal_str), ("Path", "path"), ], )
[ "def", "list_command", "(", "endpoint_id", ")", ":", "client", "=", "get_client", "(", ")", "rules", "=", "client", ".", "endpoint_acl_list", "(", "endpoint_id", ")", "resolved_ids", "=", "LazyIdentityMap", "(", "x", "[", "\"principal\"", "]", "for", "x", "i...
Executor for `globus endpoint permission list`
[ "Executor", "for", "globus", "endpoint", "permission", "list" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/permission/list.py#L12-L44
13,347
globus/globus-cli
globus_cli/commands/bookmark/list.py
bookmark_list
def bookmark_list(): """ Executor for `globus bookmark list` """ client = get_client() bookmark_iterator = client.bookmark_list() def get_ep_name(item): ep_id = item["endpoint_id"] try: ep_doc = client.get_endpoint(ep_id) return display_name_or_cname(ep_doc) except TransferAPIError as err: if err.code == "EndpointDeleted": return "[DELETED ENDPOINT]" else: raise err formatted_print( bookmark_iterator, fields=[ ("Name", "name"), ("Bookmark ID", "id"), ("Endpoint ID", "endpoint_id"), ("Endpoint Name", get_ep_name), ("Path", "path"), ], response_key="DATA", json_converter=iterable_response_to_dict, )
python
def bookmark_list(): client = get_client() bookmark_iterator = client.bookmark_list() def get_ep_name(item): ep_id = item["endpoint_id"] try: ep_doc = client.get_endpoint(ep_id) return display_name_or_cname(ep_doc) except TransferAPIError as err: if err.code == "EndpointDeleted": return "[DELETED ENDPOINT]" else: raise err formatted_print( bookmark_iterator, fields=[ ("Name", "name"), ("Bookmark ID", "id"), ("Endpoint ID", "endpoint_id"), ("Endpoint Name", get_ep_name), ("Path", "path"), ], response_key="DATA", json_converter=iterable_response_to_dict, )
[ "def", "bookmark_list", "(", ")", ":", "client", "=", "get_client", "(", ")", "bookmark_iterator", "=", "client", ".", "bookmark_list", "(", ")", "def", "get_ep_name", "(", "item", ")", ":", "ep_id", "=", "item", "[", "\"endpoint_id\"", "]", "try", ":", ...
Executor for `globus bookmark list`
[ "Executor", "for", "globus", "bookmark", "list" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/bookmark/list.py#L15-L45
13,348
globus/globus-cli
globus_cli/commands/rm.py
rm_command
def rm_command( ignore_missing, star_silent, recursive, enable_globs, endpoint_plus_path, label, submission_id, dry_run, deadline, skip_activation_check, notify, meow, heartbeat, polling_interval, timeout, timeout_exit_code, ): """ Executor for `globus rm` """ endpoint_id, path = endpoint_plus_path client = get_client() # attempt to activate unless --skip-activation-check is given if not skip_activation_check: autoactivate(client, endpoint_id, if_expires_in=60) delete_data = DeleteData( client, endpoint_id, label=label, recursive=recursive, ignore_missing=ignore_missing, submission_id=submission_id, deadline=deadline, skip_activation_check=skip_activation_check, interpret_globs=enable_globs, **notify ) if not star_silent and enable_globs and path.endswith("*"): # not intuitive, but `click.confirm(abort=True)` prints to stdout # unnecessarily, which we don't really want... # only do this check if stderr is a pty if ( err_is_terminal() and term_is_interactive() and not click.confirm( 'Are you sure you want to delete all files matching "{}"?'.format(path), err=True, ) ): safeprint("Aborted.", write_to_stderr=True) click.get_current_context().exit(1) delete_data.add_item(path) if dry_run: formatted_print(delete_data, response_key="DATA", fields=[("Path", "path")]) # exit safely return # Print task submission to stderr so that `-Fjson` is still correctly # respected, as it will be by `task wait` res = client.submit_delete(delete_data) task_id = res["task_id"] safeprint( 'Delete task submitted under ID "{}"'.format(task_id), write_to_stderr=True ) # do a `task wait` equivalent, including printing and correct exit status task_wait_with_io( meow, heartbeat, polling_interval, timeout, task_id, timeout_exit_code, client=client, )
python
def rm_command( ignore_missing, star_silent, recursive, enable_globs, endpoint_plus_path, label, submission_id, dry_run, deadline, skip_activation_check, notify, meow, heartbeat, polling_interval, timeout, timeout_exit_code, ): endpoint_id, path = endpoint_plus_path client = get_client() # attempt to activate unless --skip-activation-check is given if not skip_activation_check: autoactivate(client, endpoint_id, if_expires_in=60) delete_data = DeleteData( client, endpoint_id, label=label, recursive=recursive, ignore_missing=ignore_missing, submission_id=submission_id, deadline=deadline, skip_activation_check=skip_activation_check, interpret_globs=enable_globs, **notify ) if not star_silent and enable_globs and path.endswith("*"): # not intuitive, but `click.confirm(abort=True)` prints to stdout # unnecessarily, which we don't really want... # only do this check if stderr is a pty if ( err_is_terminal() and term_is_interactive() and not click.confirm( 'Are you sure you want to delete all files matching "{}"?'.format(path), err=True, ) ): safeprint("Aborted.", write_to_stderr=True) click.get_current_context().exit(1) delete_data.add_item(path) if dry_run: formatted_print(delete_data, response_key="DATA", fields=[("Path", "path")]) # exit safely return # Print task submission to stderr so that `-Fjson` is still correctly # respected, as it will be by `task wait` res = client.submit_delete(delete_data) task_id = res["task_id"] safeprint( 'Delete task submitted under ID "{}"'.format(task_id), write_to_stderr=True ) # do a `task wait` equivalent, including printing and correct exit status task_wait_with_io( meow, heartbeat, polling_interval, timeout, task_id, timeout_exit_code, client=client, )
[ "def", "rm_command", "(", "ignore_missing", ",", "star_silent", ",", "recursive", ",", "enable_globs", ",", "endpoint_plus_path", ",", "label", ",", "submission_id", ",", "dry_run", ",", "deadline", ",", "skip_activation_check", ",", "notify", ",", "meow", ",", ...
Executor for `globus rm`
[ "Executor", "for", "globus", "rm" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/rm.py#L38-L118
13,349
globus/globus-cli
globus_cli/helpers/auth_flows.py
do_link_auth_flow
def do_link_auth_flow(session_params=None, force_new_client=False): """ Prompts the user with a link to authenticate with globus auth and authorize the CLI to act on their behalf. """ session_params = session_params or {} # get the ConfidentialApp client object auth_client = internal_auth_client( requires_instance=True, force_new_client=force_new_client ) # start the Confidential App Grant flow auth_client.oauth2_start_flow( redirect_uri=auth_client.base_url + "v2/web/auth-code", refresh_tokens=True, requested_scopes=SCOPES, ) # prompt additional_params = {"prompt": "login"} additional_params.update(session_params) linkprompt = "Please authenticate with Globus here" safeprint( "{0}:\n{1}\n{2}\n{1}\n".format( linkprompt, "-" * len(linkprompt), auth_client.oauth2_get_authorize_url(additional_params=additional_params), ) ) # come back with auth code auth_code = click.prompt("Enter the resulting Authorization Code here").strip() # finish auth flow exchange_code_and_store_config(auth_client, auth_code) return True
python
def do_link_auth_flow(session_params=None, force_new_client=False): session_params = session_params or {} # get the ConfidentialApp client object auth_client = internal_auth_client( requires_instance=True, force_new_client=force_new_client ) # start the Confidential App Grant flow auth_client.oauth2_start_flow( redirect_uri=auth_client.base_url + "v2/web/auth-code", refresh_tokens=True, requested_scopes=SCOPES, ) # prompt additional_params = {"prompt": "login"} additional_params.update(session_params) linkprompt = "Please authenticate with Globus here" safeprint( "{0}:\n{1}\n{2}\n{1}\n".format( linkprompt, "-" * len(linkprompt), auth_client.oauth2_get_authorize_url(additional_params=additional_params), ) ) # come back with auth code auth_code = click.prompt("Enter the resulting Authorization Code here").strip() # finish auth flow exchange_code_and_store_config(auth_client, auth_code) return True
[ "def", "do_link_auth_flow", "(", "session_params", "=", "None", ",", "force_new_client", "=", "False", ")", ":", "session_params", "=", "session_params", "or", "{", "}", "# get the ConfidentialApp client object", "auth_client", "=", "internal_auth_client", "(", "require...
Prompts the user with a link to authenticate with globus auth and authorize the CLI to act on their behalf.
[ "Prompts", "the", "user", "with", "a", "link", "to", "authenticate", "with", "globus", "auth", "and", "authorize", "the", "CLI", "to", "act", "on", "their", "behalf", "." ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/helpers/auth_flows.py#L26-L62
13,350
globus/globus-cli
globus_cli/helpers/auth_flows.py
exchange_code_and_store_config
def exchange_code_and_store_config(auth_client, auth_code): """ Finishes auth flow after code is gotten from command line or local server. Exchanges code for tokens and gets user info from auth. Stores tokens and user info in config. """ # do a token exchange with the given code tkn = auth_client.oauth2_exchange_code_for_tokens(auth_code) tkn = tkn.by_resource_server # extract access tokens from final response transfer_at = tkn["transfer.api.globus.org"]["access_token"] transfer_at_expires = tkn["transfer.api.globus.org"]["expires_at_seconds"] transfer_rt = tkn["transfer.api.globus.org"]["refresh_token"] auth_at = tkn["auth.globus.org"]["access_token"] auth_at_expires = tkn["auth.globus.org"]["expires_at_seconds"] auth_rt = tkn["auth.globus.org"]["refresh_token"] # revoke any existing tokens for token_opt in ( TRANSFER_RT_OPTNAME, TRANSFER_AT_OPTNAME, AUTH_RT_OPTNAME, AUTH_AT_OPTNAME, ): token = lookup_option(token_opt) if token: auth_client.oauth2_revoke_token(token) # write new tokens to config write_option(TRANSFER_RT_OPTNAME, transfer_rt) write_option(TRANSFER_AT_OPTNAME, transfer_at) write_option(TRANSFER_AT_EXPIRES_OPTNAME, transfer_at_expires) write_option(AUTH_RT_OPTNAME, auth_rt) write_option(AUTH_AT_OPTNAME, auth_at) write_option(AUTH_AT_EXPIRES_OPTNAME, auth_at_expires)
python
def exchange_code_and_store_config(auth_client, auth_code): # do a token exchange with the given code tkn = auth_client.oauth2_exchange_code_for_tokens(auth_code) tkn = tkn.by_resource_server # extract access tokens from final response transfer_at = tkn["transfer.api.globus.org"]["access_token"] transfer_at_expires = tkn["transfer.api.globus.org"]["expires_at_seconds"] transfer_rt = tkn["transfer.api.globus.org"]["refresh_token"] auth_at = tkn["auth.globus.org"]["access_token"] auth_at_expires = tkn["auth.globus.org"]["expires_at_seconds"] auth_rt = tkn["auth.globus.org"]["refresh_token"] # revoke any existing tokens for token_opt in ( TRANSFER_RT_OPTNAME, TRANSFER_AT_OPTNAME, AUTH_RT_OPTNAME, AUTH_AT_OPTNAME, ): token = lookup_option(token_opt) if token: auth_client.oauth2_revoke_token(token) # write new tokens to config write_option(TRANSFER_RT_OPTNAME, transfer_rt) write_option(TRANSFER_AT_OPTNAME, transfer_at) write_option(TRANSFER_AT_EXPIRES_OPTNAME, transfer_at_expires) write_option(AUTH_RT_OPTNAME, auth_rt) write_option(AUTH_AT_OPTNAME, auth_at) write_option(AUTH_AT_EXPIRES_OPTNAME, auth_at_expires)
[ "def", "exchange_code_and_store_config", "(", "auth_client", ",", "auth_code", ")", ":", "# do a token exchange with the given code", "tkn", "=", "auth_client", ".", "oauth2_exchange_code_for_tokens", "(", "auth_code", ")", "tkn", "=", "tkn", ".", "by_resource_server", "#...
Finishes auth flow after code is gotten from command line or local server. Exchanges code for tokens and gets user info from auth. Stores tokens and user info in config.
[ "Finishes", "auth", "flow", "after", "code", "is", "gotten", "from", "command", "line", "or", "local", "server", ".", "Exchanges", "code", "for", "tokens", "and", "gets", "user", "info", "from", "auth", ".", "Stores", "tokens", "and", "user", "info", "in",...
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/helpers/auth_flows.py#L107-L142
13,351
globus/globus-cli
globus_cli/commands/endpoint/search.py
endpoint_search
def endpoint_search(filter_fulltext, filter_owner_id, filter_scope): """ Executor for `globus endpoint search` """ if filter_scope == "all" and not filter_fulltext: raise click.UsageError( "When searching all endpoints (--filter-scope=all, the default), " "a full-text search filter is required. Other scopes (e.g. " "--filter-scope=recently-used) may be used without specifying " "an additional filter." ) client = get_client() owner_id = filter_owner_id if owner_id: owner_id = maybe_lookup_identity_id(owner_id) search_iterator = client.endpoint_search( filter_fulltext=filter_fulltext, filter_scope=filter_scope, filter_owner_id=owner_id, ) formatted_print( search_iterator, fields=ENDPOINT_LIST_FIELDS, json_converter=iterable_response_to_dict, )
python
def endpoint_search(filter_fulltext, filter_owner_id, filter_scope): if filter_scope == "all" and not filter_fulltext: raise click.UsageError( "When searching all endpoints (--filter-scope=all, the default), " "a full-text search filter is required. Other scopes (e.g. " "--filter-scope=recently-used) may be used without specifying " "an additional filter." ) client = get_client() owner_id = filter_owner_id if owner_id: owner_id = maybe_lookup_identity_id(owner_id) search_iterator = client.endpoint_search( filter_fulltext=filter_fulltext, filter_scope=filter_scope, filter_owner_id=owner_id, ) formatted_print( search_iterator, fields=ENDPOINT_LIST_FIELDS, json_converter=iterable_response_to_dict, )
[ "def", "endpoint_search", "(", "filter_fulltext", ",", "filter_owner_id", ",", "filter_scope", ")", ":", "if", "filter_scope", "==", "\"all\"", "and", "not", "filter_fulltext", ":", "raise", "click", ".", "UsageError", "(", "\"When searching all endpoints (--filter-scop...
Executor for `globus endpoint search`
[ "Executor", "for", "globus", "endpoint", "search" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/search.py#L42-L70
13,352
globus/globus-cli
globus_cli/commands/endpoint/update.py
endpoint_update
def endpoint_update(**kwargs): """ Executor for `globus endpoint update` """ # validate params. Requires a get call to check the endpoint type client = get_client() endpoint_id = kwargs.pop("endpoint_id") get_res = client.get_endpoint(endpoint_id) if get_res["host_endpoint_id"]: endpoint_type = "shared" elif get_res["is_globus_connect"]: endpoint_type = "personal" elif get_res["s3_url"]: endpoint_type = "s3" else: endpoint_type = "server" validate_endpoint_create_and_update_params( endpoint_type, get_res["subscription_id"], kwargs ) # make the update ep_doc = assemble_generic_doc("endpoint", **kwargs) res = client.update_endpoint(endpoint_id, ep_doc) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
python
def endpoint_update(**kwargs): # validate params. Requires a get call to check the endpoint type client = get_client() endpoint_id = kwargs.pop("endpoint_id") get_res = client.get_endpoint(endpoint_id) if get_res["host_endpoint_id"]: endpoint_type = "shared" elif get_res["is_globus_connect"]: endpoint_type = "personal" elif get_res["s3_url"]: endpoint_type = "s3" else: endpoint_type = "server" validate_endpoint_create_and_update_params( endpoint_type, get_res["subscription_id"], kwargs ) # make the update ep_doc = assemble_generic_doc("endpoint", **kwargs) res = client.update_endpoint(endpoint_id, ep_doc) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
[ "def", "endpoint_update", "(", "*", "*", "kwargs", ")", ":", "# validate params. Requires a get call to check the endpoint type", "client", "=", "get_client", "(", ")", "endpoint_id", "=", "kwargs", ".", "pop", "(", "\"endpoint_id\"", ")", "get_res", "=", "client", ...
Executor for `globus endpoint update`
[ "Executor", "for", "globus", "endpoint", "update" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/update.py#L17-L41
13,353
globus/globus-cli
globus_cli/safeio/write.py
safeprint
def safeprint(message, write_to_stderr=False, newline=True): """ Wrapper around click.echo used to encapsulate its functionality. Also protects against EPIPE during click.echo calls, as this can happen normally in piped commands when the consumer closes before the producer. """ try: click.echo(message, nl=newline, err=write_to_stderr) except IOError as err: if err.errno is errno.EPIPE: pass else: raise
python
def safeprint(message, write_to_stderr=False, newline=True): try: click.echo(message, nl=newline, err=write_to_stderr) except IOError as err: if err.errno is errno.EPIPE: pass else: raise
[ "def", "safeprint", "(", "message", ",", "write_to_stderr", "=", "False", ",", "newline", "=", "True", ")", ":", "try", ":", "click", ".", "echo", "(", "message", ",", "nl", "=", "newline", ",", "err", "=", "write_to_stderr", ")", "except", "IOError", ...
Wrapper around click.echo used to encapsulate its functionality. Also protects against EPIPE during click.echo calls, as this can happen normally in piped commands when the consumer closes before the producer.
[ "Wrapper", "around", "click", ".", "echo", "used", "to", "encapsulate", "its", "functionality", ".", "Also", "protects", "against", "EPIPE", "during", "click", ".", "echo", "calls", "as", "this", "can", "happen", "normally", "in", "piped", "commands", "when", ...
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/safeio/write.py#L12-L24
13,354
globus/globus-cli
globus_cli/parsing/shared_options.py
common_options
def common_options(*args, **kwargs): """ This is a multi-purpose decorator for applying a "base" set of options shared by all commands. It can be applied either directly, or given keyword arguments. Usage: >>> @common_options >>> def mycommand(abc, xyz): >>> ... or >>> @common_options(no_format_option=True) >>> def mycommand(abc, xyz): >>> ... """ def decorate(f, **kwargs): """ Work of actually decorating a function -- wrapped in here because we want to dispatch depending on how `common_options` is invoked """ f = version_option(f) f = debug_option(f) f = verbose_option(f) f = click.help_option("-h", "--help")(f) # if the format option is being allowed, it needs to be applied to `f` if not kwargs.get("no_format_option"): f = format_option(f) # if the --map-http-status option is being allowed, ... if not kwargs.get("no_map_http_status_option"): f = map_http_status_option(f) return f return detect_and_decorate(decorate, args, kwargs)
python
def common_options(*args, **kwargs): def decorate(f, **kwargs): """ Work of actually decorating a function -- wrapped in here because we want to dispatch depending on how `common_options` is invoked """ f = version_option(f) f = debug_option(f) f = verbose_option(f) f = click.help_option("-h", "--help")(f) # if the format option is being allowed, it needs to be applied to `f` if not kwargs.get("no_format_option"): f = format_option(f) # if the --map-http-status option is being allowed, ... if not kwargs.get("no_map_http_status_option"): f = map_http_status_option(f) return f return detect_and_decorate(decorate, args, kwargs)
[ "def", "common_options", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "decorate", "(", "f", ",", "*", "*", "kwargs", ")", ":", "\"\"\"\n Work of actually decorating a function -- wrapped in here because we\n want to dispatch depending on how `co...
This is a multi-purpose decorator for applying a "base" set of options shared by all commands. It can be applied either directly, or given keyword arguments. Usage: >>> @common_options >>> def mycommand(abc, xyz): >>> ... or >>> @common_options(no_format_option=True) >>> def mycommand(abc, xyz): >>> ...
[ "This", "is", "a", "multi", "-", "purpose", "decorator", "for", "applying", "a", "base", "set", "of", "options", "shared", "by", "all", "commands", ".", "It", "can", "be", "applied", "either", "directly", "or", "given", "keyword", "arguments", "." ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/parsing/shared_options.py#L18-L57
13,355
globus/globus-cli
globus_cli/parsing/shared_options.py
validate_endpoint_create_and_update_params
def validate_endpoint_create_and_update_params(endpoint_type, managed, params): """ Given an endpoint type of "shared" "server" or "personal" and option values Confirms the option values are valid for the given endpoint """ # options only allowed for GCS endpoints if endpoint_type != "server": # catch params with two option flags if params["public"] is False: raise click.UsageError( "Option --private only allowed " "for Globus Connect Server endpoints" ) # catch any params only usable with GCS for option in [ "public", "myproxy_dn", "myproxy_server", "oauth_server", "location", "network_use", "max_concurrency", "preferred_concurrency", "max_parallelism", "preferred_parallelism", ]: if params[option] is not None: raise click.UsageError( ( "Option --{} can only be used with Globus Connect Server " "endpoints".format(option.replace("_", "-")) ) ) # if the endpoint was not previously managed, and is not being passed # a subscription id, it cannot use managed endpoint only fields if (not managed) and not (params["subscription_id"] or params["managed"]): for option in [ "network_use", "max_concurrency", "preferred_concurrency", "max_parallelism", "preferred_parallelism", ]: if params[option] is not None: raise click.UsageError( ( "Option --{} can only be used with managed " "endpoints".format(option.replace("_", "-")) ) ) # because the Transfer service doesn't do network use level updates in a # patchy way, *both* endpoint `POST`s *and* `PUT`s must either use # - `network_use='custom'` with *every* other parameter specified (which # is validated by the service), or # - a preset/absent `network_use` with *no* other parameter specified # (which is *not* validated by the service; in this case, Transfer will # accept but ignore the others parameters if given, leading to user # confusion if we don't do this validation check) custom_network_use_params = ( "max_concurrency", "preferred_concurrency", "max_parallelism", "preferred_parallelism", ) if params["network_use"] != "custom": for option in custom_network_use_params: if params[option] is not None: raise click.UsageError( "The {} options require you use --network-use=custom.".format( "/".join( "--" + option.replace("_", "-") for option in custom_network_use_params ) ) ) # make sure --(no-)managed and --subscription-id are mutually exclusive # if --managed given pass DEFAULT as the subscription_id # if --no-managed given, pass None managed_flag = params.get("managed") if managed_flag is not None: params.pop("managed") if managed_flag: params["subscription_id"] = params.get("subscription_id") or "DEFAULT" else: if params.get("subscription_id"): raise click.UsageError( "Cannot specify --subscription-id and " "use the --no-managed option." ) params["subscription_id"] = EXPLICIT_NULL # make sure --no-default-directory are mutually exclusive # if --no-managed given, pass an EXPLICIT_NULL as the default directory if params.get("no_default_directory"): if params.get("default_directory"): raise click.UsageError( "--no-default-directory and --default-directory are mutually " "exclusive." ) else: params["default_directory"] = EXPLICIT_NULL params.pop("no_default_directory")
python
def validate_endpoint_create_and_update_params(endpoint_type, managed, params): # options only allowed for GCS endpoints if endpoint_type != "server": # catch params with two option flags if params["public"] is False: raise click.UsageError( "Option --private only allowed " "for Globus Connect Server endpoints" ) # catch any params only usable with GCS for option in [ "public", "myproxy_dn", "myproxy_server", "oauth_server", "location", "network_use", "max_concurrency", "preferred_concurrency", "max_parallelism", "preferred_parallelism", ]: if params[option] is not None: raise click.UsageError( ( "Option --{} can only be used with Globus Connect Server " "endpoints".format(option.replace("_", "-")) ) ) # if the endpoint was not previously managed, and is not being passed # a subscription id, it cannot use managed endpoint only fields if (not managed) and not (params["subscription_id"] or params["managed"]): for option in [ "network_use", "max_concurrency", "preferred_concurrency", "max_parallelism", "preferred_parallelism", ]: if params[option] is not None: raise click.UsageError( ( "Option --{} can only be used with managed " "endpoints".format(option.replace("_", "-")) ) ) # because the Transfer service doesn't do network use level updates in a # patchy way, *both* endpoint `POST`s *and* `PUT`s must either use # - `network_use='custom'` with *every* other parameter specified (which # is validated by the service), or # - a preset/absent `network_use` with *no* other parameter specified # (which is *not* validated by the service; in this case, Transfer will # accept but ignore the others parameters if given, leading to user # confusion if we don't do this validation check) custom_network_use_params = ( "max_concurrency", "preferred_concurrency", "max_parallelism", "preferred_parallelism", ) if params["network_use"] != "custom": for option in custom_network_use_params: if params[option] is not None: raise click.UsageError( "The {} options require you use --network-use=custom.".format( "/".join( "--" + option.replace("_", "-") for option in custom_network_use_params ) ) ) # make sure --(no-)managed and --subscription-id are mutually exclusive # if --managed given pass DEFAULT as the subscription_id # if --no-managed given, pass None managed_flag = params.get("managed") if managed_flag is not None: params.pop("managed") if managed_flag: params["subscription_id"] = params.get("subscription_id") or "DEFAULT" else: if params.get("subscription_id"): raise click.UsageError( "Cannot specify --subscription-id and " "use the --no-managed option." ) params["subscription_id"] = EXPLICIT_NULL # make sure --no-default-directory are mutually exclusive # if --no-managed given, pass an EXPLICIT_NULL as the default directory if params.get("no_default_directory"): if params.get("default_directory"): raise click.UsageError( "--no-default-directory and --default-directory are mutually " "exclusive." ) else: params["default_directory"] = EXPLICIT_NULL params.pop("no_default_directory")
[ "def", "validate_endpoint_create_and_update_params", "(", "endpoint_type", ",", "managed", ",", "params", ")", ":", "# options only allowed for GCS endpoints", "if", "endpoint_type", "!=", "\"server\"", ":", "# catch params with two option flags", "if", "params", "[", "\"publ...
Given an endpoint type of "shared" "server" or "personal" and option values Confirms the option values are valid for the given endpoint
[ "Given", "an", "endpoint", "type", "of", "shared", "server", "or", "personal", "and", "option", "values", "Confirms", "the", "option", "values", "are", "valid", "for", "the", "given", "endpoint" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/parsing/shared_options.py#L296-L400
13,356
globus/globus-cli
globus_cli/parsing/shared_options.py
task_id_arg
def task_id_arg(*args, **kwargs): """ This is the `TASK_ID` argument consumed by many Transfer Task operations. It accept a toggle on whether or not it is required Usage: >>> @task_id_option >>> def command_func(task_id): >>> ... or >>> @task_id_option(required=False) >>> def command_func(task_id): >>> ... By default, the task ID is made required; pass `required=False` to the decorator arguments to make it optional. """ def inner_decorator(f, required=True): f = click.argument("TASK_ID", required=required)(f) return f return detect_and_decorate(inner_decorator, args, kwargs)
python
def task_id_arg(*args, **kwargs): def inner_decorator(f, required=True): f = click.argument("TASK_ID", required=required)(f) return f return detect_and_decorate(inner_decorator, args, kwargs)
[ "def", "task_id_arg", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "inner_decorator", "(", "f", ",", "required", "=", "True", ")", ":", "f", "=", "click", ".", "argument", "(", "\"TASK_ID\"", ",", "required", "=", "required", ")", "(",...
This is the `TASK_ID` argument consumed by many Transfer Task operations. It accept a toggle on whether or not it is required Usage: >>> @task_id_option >>> def command_func(task_id): >>> ... or >>> @task_id_option(required=False) >>> def command_func(task_id): >>> ... By default, the task ID is made required; pass `required=False` to the decorator arguments to make it optional.
[ "This", "is", "the", "TASK_ID", "argument", "consumed", "by", "many", "Transfer", "Task", "operations", ".", "It", "accept", "a", "toggle", "on", "whether", "or", "not", "it", "is", "required" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/parsing/shared_options.py#L403-L428
13,357
globus/globus-cli
globus_cli/parsing/shared_options.py
task_submission_options
def task_submission_options(f): """ Options shared by both transfer and delete task submission """ def notify_opt_callback(ctx, param, value): """ Parse --notify - "" is the same as "off" - parse by lowercase, comma-split, strip spaces - "off,x" is invalid for any x - "on,x" is valid for any valid x (other than "off") - "failed", "succeeded", "inactive" are normal vals In code, produces True, False, or a set """ # if no value was set, don't set any explicit options # the API default is "everything on" if value is None: return {} value = value.lower() value = [x.strip() for x in value.split(",")] # [""] is what you'll get if value is "" to start with # special-case it into "off", which helps avoid surprising scripts # which take a notification settings as inputs and build --notify if value == [""]: value = ["off"] off = "off" in value on = "on" in value # set-ize it -- duplicates are fine vals = set([x for x in value if x not in ("off", "on")]) if (vals or on) and off: raise click.UsageError('--notify cannot accept "off" and another value') allowed_vals = set(("on", "succeeded", "failed", "inactive")) if not vals <= allowed_vals: raise click.UsageError( "--notify received at least one invalid value among {}".format( list(vals) ) ) # return the notification options to send! # on means don't set anything (default) if on: return {} # off means turn off everything if off: return { "notify_on_succeeded": False, "notify_on_failed": False, "notify_on_inactive": False, } # otherwise, return the exact set of values seen else: return { "notify_on_succeeded": "succeeded" in vals, "notify_on_failed": "failed" in vals, "notify_on_inactive": "inactive" in vals, } f = click.option( "--dry-run", is_flag=True, help=("Don't actually submit the task, print submission " "data instead"), )(f) f = click.option( "--notify", callback=notify_opt_callback, help=( "Comma separated list of task events which notify by email. " "'on' and 'off' may be used to enable or disable notifications " "for all event types. Otherwise, use 'succeeded', 'failed', or " "'inactive'" ), )(f) f = click.option( "--submission-id", help=( "Task submission ID, as generated by `globus task " "generate-submission-id`. Used for safe resubmission in the " "presence of network failures." ), )(f) f = click.option("--label", default=None, help="Set a label for this task.")(f) f = click.option( "--deadline", default=None, type=ISOTimeType(), help="Set a deadline for this to be canceled if not completed by.", )(f) f = click.option( "--skip-activation-check", is_flag=True, help=("Submit the task even if the endpoint(s) " "aren't currently activated."), )(f) return f
python
def task_submission_options(f): def notify_opt_callback(ctx, param, value): """ Parse --notify - "" is the same as "off" - parse by lowercase, comma-split, strip spaces - "off,x" is invalid for any x - "on,x" is valid for any valid x (other than "off") - "failed", "succeeded", "inactive" are normal vals In code, produces True, False, or a set """ # if no value was set, don't set any explicit options # the API default is "everything on" if value is None: return {} value = value.lower() value = [x.strip() for x in value.split(",")] # [""] is what you'll get if value is "" to start with # special-case it into "off", which helps avoid surprising scripts # which take a notification settings as inputs and build --notify if value == [""]: value = ["off"] off = "off" in value on = "on" in value # set-ize it -- duplicates are fine vals = set([x for x in value if x not in ("off", "on")]) if (vals or on) and off: raise click.UsageError('--notify cannot accept "off" and another value') allowed_vals = set(("on", "succeeded", "failed", "inactive")) if not vals <= allowed_vals: raise click.UsageError( "--notify received at least one invalid value among {}".format( list(vals) ) ) # return the notification options to send! # on means don't set anything (default) if on: return {} # off means turn off everything if off: return { "notify_on_succeeded": False, "notify_on_failed": False, "notify_on_inactive": False, } # otherwise, return the exact set of values seen else: return { "notify_on_succeeded": "succeeded" in vals, "notify_on_failed": "failed" in vals, "notify_on_inactive": "inactive" in vals, } f = click.option( "--dry-run", is_flag=True, help=("Don't actually submit the task, print submission " "data instead"), )(f) f = click.option( "--notify", callback=notify_opt_callback, help=( "Comma separated list of task events which notify by email. " "'on' and 'off' may be used to enable or disable notifications " "for all event types. Otherwise, use 'succeeded', 'failed', or " "'inactive'" ), )(f) f = click.option( "--submission-id", help=( "Task submission ID, as generated by `globus task " "generate-submission-id`. Used for safe resubmission in the " "presence of network failures." ), )(f) f = click.option("--label", default=None, help="Set a label for this task.")(f) f = click.option( "--deadline", default=None, type=ISOTimeType(), help="Set a deadline for this to be canceled if not completed by.", )(f) f = click.option( "--skip-activation-check", is_flag=True, help=("Submit the task even if the endpoint(s) " "aren't currently activated."), )(f) return f
[ "def", "task_submission_options", "(", "f", ")", ":", "def", "notify_opt_callback", "(", "ctx", ",", "param", ",", "value", ")", ":", "\"\"\"\n Parse --notify\n - \"\" is the same as \"off\"\n - parse by lowercase, comma-split, strip spaces\n - \"off,x\" i...
Options shared by both transfer and delete task submission
[ "Options", "shared", "by", "both", "transfer", "and", "delete", "task", "submission" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/parsing/shared_options.py#L431-L531
13,358
globus/globus-cli
globus_cli/parsing/shared_options.py
delete_and_rm_options
def delete_and_rm_options(*args, **kwargs): """ Options which apply both to `globus delete` and `globus rm` """ def inner_decorator(f, supports_batch=True, default_enable_globs=False): f = click.option( "--recursive", "-r", is_flag=True, help="Recursively delete dirs" )(f) f = click.option( "--ignore-missing", "-f", is_flag=True, help="Don't throw errors if the file or dir is absent", )(f) f = click.option( "--star-silent", "--unsafe", "star_silent", is_flag=True, help=( 'Don\'t prompt when the trailing character is a "*".' + (" Implicit in --batch" if supports_batch else "") ), )(f) f = click.option( "--enable-globs/--no-enable-globs", is_flag=True, default=default_enable_globs, show_default=True, help=( "Enable expansion of *, ?, and [ ] characters in the last " "component of file paths, unless they are escaped with " "a preceeding backslash, \\" ), )(f) if supports_batch: f = click.option( "--batch", is_flag=True, help=( "Accept a batch of paths on stdin (i.e. run in " "batchmode). Uses ENDPOINT_ID as passed on the " "commandline. Any commandline PATH given will be used " "as a prefix to all paths given" ), )(f) return f return detect_and_decorate(inner_decorator, args, kwargs)
python
def delete_and_rm_options(*args, **kwargs): def inner_decorator(f, supports_batch=True, default_enable_globs=False): f = click.option( "--recursive", "-r", is_flag=True, help="Recursively delete dirs" )(f) f = click.option( "--ignore-missing", "-f", is_flag=True, help="Don't throw errors if the file or dir is absent", )(f) f = click.option( "--star-silent", "--unsafe", "star_silent", is_flag=True, help=( 'Don\'t prompt when the trailing character is a "*".' + (" Implicit in --batch" if supports_batch else "") ), )(f) f = click.option( "--enable-globs/--no-enable-globs", is_flag=True, default=default_enable_globs, show_default=True, help=( "Enable expansion of *, ?, and [ ] characters in the last " "component of file paths, unless they are escaped with " "a preceeding backslash, \\" ), )(f) if supports_batch: f = click.option( "--batch", is_flag=True, help=( "Accept a batch of paths on stdin (i.e. run in " "batchmode). Uses ENDPOINT_ID as passed on the " "commandline. Any commandline PATH given will be used " "as a prefix to all paths given" ), )(f) return f return detect_and_decorate(inner_decorator, args, kwargs)
[ "def", "delete_and_rm_options", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "inner_decorator", "(", "f", ",", "supports_batch", "=", "True", ",", "default_enable_globs", "=", "False", ")", ":", "f", "=", "click", ".", "option", "(", "\"--...
Options which apply both to `globus delete` and `globus rm`
[ "Options", "which", "apply", "both", "to", "globus", "delete", "and", "globus", "rm" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/parsing/shared_options.py#L534-L583
13,359
globus/globus-cli
globus_cli/parsing/shared_options.py
server_add_and_update_opts
def server_add_and_update_opts(*args, **kwargs): """ shared collection of options for `globus transfer endpoint server add` and `globus transfer endpoint server update`. Accepts a toggle to know if it's being used as `add` or `update`. usage: >>> @server_add_and_update_opts >>> def command_func(subject, port, scheme, hostname): >>> ... or >>> @server_add_and_update_opts(add=True) >>> def command_func(subject, port, scheme, hostname): >>> ... """ def port_range_callback(ctx, param, value): if not value: return None value = value.lower().strip() if value == "unspecified": return None, None if value == "unrestricted": return 1024, 65535 try: lower, upper = map(int, value.split("-")) except ValueError: # too many/few values from split or non-integer(s) raise click.BadParameter( "must specify as 'unspecified', " "'unrestricted', or as range separated " "by a hyphen (e.g. '50000-51000')" ) if not 1024 <= lower <= 65535 or not 1024 <= upper <= 65535: raise click.BadParameter("must be within the 1024-65535 range") return (lower, upper) if lower <= upper else (upper, lower) def inner_decorator(f, add=False): f = click.option("--hostname", required=add, help="Server Hostname.")(f) default_scheme = "gsiftp" if add else None f = click.option( "--scheme", help="Scheme for the Server.", type=CaseInsensitiveChoice(("gsiftp", "ftp")), default=default_scheme, show_default=add, )(f) default_port = 2811 if add else None f = click.option( "--port", help="Port for Globus control channel connections.", type=int, default=default_port, show_default=add, )(f) f = click.option( "--subject", help=( "Subject of the X509 Certificate of the server. When " "unspecified, the CN must match the server hostname." ), )(f) for adjective, our_preposition, their_preposition in [ ("incoming", "to", "from"), ("outgoing", "from", "to"), ]: f = click.option( "--{}-data-ports".format(adjective), callback=port_range_callback, help="Indicate to firewall administrators at other sites how to " "allow {} traffic {} this server {} their own. Specify as " "either 'unspecified', 'unrestricted', or as range of " "ports separated by a hyphen (e.g. '50000-51000') within " "the 1024-65535 range.".format( adjective, our_preposition, their_preposition ), )(f) return f return detect_and_decorate(inner_decorator, args, kwargs)
python
def server_add_and_update_opts(*args, **kwargs): def port_range_callback(ctx, param, value): if not value: return None value = value.lower().strip() if value == "unspecified": return None, None if value == "unrestricted": return 1024, 65535 try: lower, upper = map(int, value.split("-")) except ValueError: # too many/few values from split or non-integer(s) raise click.BadParameter( "must specify as 'unspecified', " "'unrestricted', or as range separated " "by a hyphen (e.g. '50000-51000')" ) if not 1024 <= lower <= 65535 or not 1024 <= upper <= 65535: raise click.BadParameter("must be within the 1024-65535 range") return (lower, upper) if lower <= upper else (upper, lower) def inner_decorator(f, add=False): f = click.option("--hostname", required=add, help="Server Hostname.")(f) default_scheme = "gsiftp" if add else None f = click.option( "--scheme", help="Scheme for the Server.", type=CaseInsensitiveChoice(("gsiftp", "ftp")), default=default_scheme, show_default=add, )(f) default_port = 2811 if add else None f = click.option( "--port", help="Port for Globus control channel connections.", type=int, default=default_port, show_default=add, )(f) f = click.option( "--subject", help=( "Subject of the X509 Certificate of the server. When " "unspecified, the CN must match the server hostname." ), )(f) for adjective, our_preposition, their_preposition in [ ("incoming", "to", "from"), ("outgoing", "from", "to"), ]: f = click.option( "--{}-data-ports".format(adjective), callback=port_range_callback, help="Indicate to firewall administrators at other sites how to " "allow {} traffic {} this server {} their own. Specify as " "either 'unspecified', 'unrestricted', or as range of " "ports separated by a hyphen (e.g. '50000-51000') within " "the 1024-65535 range.".format( adjective, our_preposition, their_preposition ), )(f) return f return detect_and_decorate(inner_decorator, args, kwargs)
[ "def", "server_add_and_update_opts", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "port_range_callback", "(", "ctx", ",", "param", ",", "value", ")", ":", "if", "not", "value", ":", "return", "None", "value", "=", "value", ".", "lower", ...
shared collection of options for `globus transfer endpoint server add` and `globus transfer endpoint server update`. Accepts a toggle to know if it's being used as `add` or `update`. usage: >>> @server_add_and_update_opts >>> def command_func(subject, port, scheme, hostname): >>> ... or >>> @server_add_and_update_opts(add=True) >>> def command_func(subject, port, scheme, hostname): >>> ...
[ "shared", "collection", "of", "options", "for", "globus", "transfer", "endpoint", "server", "add", "and", "globus", "transfer", "endpoint", "server", "update", ".", "Accepts", "a", "toggle", "to", "know", "if", "it", "s", "being", "used", "as", "add", "or", ...
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/parsing/shared_options.py#L664-L753
13,360
globus/globus-cli
globus_cli/commands/endpoint/deactivate.py
endpoint_deactivate
def endpoint_deactivate(endpoint_id): """ Executor for `globus endpoint deactivate` """ client = get_client() res = client.endpoint_deactivate(endpoint_id) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
python
def endpoint_deactivate(endpoint_id): client = get_client() res = client.endpoint_deactivate(endpoint_id) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
[ "def", "endpoint_deactivate", "(", "endpoint_id", ")", ":", "client", "=", "get_client", "(", ")", "res", "=", "client", ".", "endpoint_deactivate", "(", "endpoint_id", ")", "formatted_print", "(", "res", ",", "text_format", "=", "FORMAT_TEXT_RAW", ",", "respons...
Executor for `globus endpoint deactivate`
[ "Executor", "for", "globus", "endpoint", "deactivate" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/deactivate.py#L11-L17
13,361
globus/globus-cli
globus_cli/services/auth.py
LazyIdentityMap._lookup_identity_names
def _lookup_identity_names(self): """ Batch resolve identities to usernames. Returns a dict mapping IDs to Usernames """ id_batch_size = 100 # fetch in batches of 100, store in a dict ac = get_auth_client() self._resolved_map = {} for i in range(0, len(self.identity_ids), id_batch_size): chunk = self.identity_ids[i : i + id_batch_size] resolved_result = ac.get_identities(ids=chunk) for x in resolved_result["identities"]: self._resolved_map[x["id"]] = x["username"]
python
def _lookup_identity_names(self): id_batch_size = 100 # fetch in batches of 100, store in a dict ac = get_auth_client() self._resolved_map = {} for i in range(0, len(self.identity_ids), id_batch_size): chunk = self.identity_ids[i : i + id_batch_size] resolved_result = ac.get_identities(ids=chunk) for x in resolved_result["identities"]: self._resolved_map[x["id"]] = x["username"]
[ "def", "_lookup_identity_names", "(", "self", ")", ":", "id_batch_size", "=", "100", "# fetch in batches of 100, store in a dict", "ac", "=", "get_auth_client", "(", ")", "self", ".", "_resolved_map", "=", "{", "}", "for", "i", "in", "range", "(", "0", ",", "l...
Batch resolve identities to usernames. Returns a dict mapping IDs to Usernames
[ "Batch", "resolve", "identities", "to", "usernames", ".", "Returns", "a", "dict", "mapping", "IDs", "to", "Usernames" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/services/auth.py#L96-L110
13,362
globus/globus-cli
globus_cli/config.py
internal_auth_client
def internal_auth_client(requires_instance=False, force_new_client=False): """ Looks up the values for this CLI's Instance Client in config If none exists and requires_instance is True or force_new_client is True, registers a new Instance Client with GLobus Auth If none exists and requires_instance is false, defaults to a Native Client for backwards compatibility Returns either a NativeAppAuthClient or a ConfidentialAppAuthClient """ client_id = lookup_option(CLIENT_ID_OPTNAME) client_secret = lookup_option(CLIENT_SECRET_OPTNAME) template_id = lookup_option(TEMPLATE_ID_OPTNAME) or DEFAULT_TEMPLATE_ID template_client = internal_native_client() existing = client_id and client_secret # if we are forcing a new client, delete any existing client if force_new_client and existing: existing_client = globus_sdk.ConfidentialAppAuthClient(client_id, client_secret) try: existing_client.delete("/v2/api/clients/{}".format(client_id)) # if the client secret has been invalidated or the client has # already been removed, we continue on except globus_sdk.exc.AuthAPIError: pass # if we require a new client to be made if force_new_client or (requires_instance and not existing): # register a new instance client with auth body = {"client": {"template_id": template_id, "name": "Globus CLI"}} res = template_client.post("/v2/api/clients", json_body=body) # get values and write to config credential_data = res["included"]["client_credential"] client_id = credential_data["client"] client_secret = credential_data["secret"] write_option(CLIENT_ID_OPTNAME, client_id) write_option(CLIENT_SECRET_OPTNAME, client_secret) return globus_sdk.ConfidentialAppAuthClient( client_id, client_secret, app_name="Globus CLI" ) # if we already have a client, just return it elif existing: return globus_sdk.ConfidentialAppAuthClient( client_id, client_secret, app_name="Globus CLI" ) # fall-back to a native client to not break old logins # TOOD: eventually remove this behavior else: return template_client
python
def internal_auth_client(requires_instance=False, force_new_client=False): client_id = lookup_option(CLIENT_ID_OPTNAME) client_secret = lookup_option(CLIENT_SECRET_OPTNAME) template_id = lookup_option(TEMPLATE_ID_OPTNAME) or DEFAULT_TEMPLATE_ID template_client = internal_native_client() existing = client_id and client_secret # if we are forcing a new client, delete any existing client if force_new_client and existing: existing_client = globus_sdk.ConfidentialAppAuthClient(client_id, client_secret) try: existing_client.delete("/v2/api/clients/{}".format(client_id)) # if the client secret has been invalidated or the client has # already been removed, we continue on except globus_sdk.exc.AuthAPIError: pass # if we require a new client to be made if force_new_client or (requires_instance and not existing): # register a new instance client with auth body = {"client": {"template_id": template_id, "name": "Globus CLI"}} res = template_client.post("/v2/api/clients", json_body=body) # get values and write to config credential_data = res["included"]["client_credential"] client_id = credential_data["client"] client_secret = credential_data["secret"] write_option(CLIENT_ID_OPTNAME, client_id) write_option(CLIENT_SECRET_OPTNAME, client_secret) return globus_sdk.ConfidentialAppAuthClient( client_id, client_secret, app_name="Globus CLI" ) # if we already have a client, just return it elif existing: return globus_sdk.ConfidentialAppAuthClient( client_id, client_secret, app_name="Globus CLI" ) # fall-back to a native client to not break old logins # TOOD: eventually remove this behavior else: return template_client
[ "def", "internal_auth_client", "(", "requires_instance", "=", "False", ",", "force_new_client", "=", "False", ")", ":", "client_id", "=", "lookup_option", "(", "CLIENT_ID_OPTNAME", ")", "client_secret", "=", "lookup_option", "(", "CLIENT_SECRET_OPTNAME", ")", "templat...
Looks up the values for this CLI's Instance Client in config If none exists and requires_instance is True or force_new_client is True, registers a new Instance Client with GLobus Auth If none exists and requires_instance is false, defaults to a Native Client for backwards compatibility Returns either a NativeAppAuthClient or a ConfidentialAppAuthClient
[ "Looks", "up", "the", "values", "for", "this", "CLI", "s", "Instance", "Client", "in", "config" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/config.py#L187-L242
13,363
globus/globus-cli
globus_cli/parsing/excepthook.py
exit_with_mapped_status
def exit_with_mapped_status(http_status): """ Given an HTTP Status, exit with either an error status of 1 or the status mapped by what we were given. """ # get the mapping by looking up the state and getting the mapping attr mapping = click.get_current_context().ensure_object(CommandState).http_status_map # if there is a mapped exit code, exit with that. Otherwise, exit 1 if http_status in mapping: sys.exit(mapping[http_status]) else: sys.exit(1)
python
def exit_with_mapped_status(http_status): # get the mapping by looking up the state and getting the mapping attr mapping = click.get_current_context().ensure_object(CommandState).http_status_map # if there is a mapped exit code, exit with that. Otherwise, exit 1 if http_status in mapping: sys.exit(mapping[http_status]) else: sys.exit(1)
[ "def", "exit_with_mapped_status", "(", "http_status", ")", ":", "# get the mapping by looking up the state and getting the mapping attr", "mapping", "=", "click", ".", "get_current_context", "(", ")", ".", "ensure_object", "(", "CommandState", ")", ".", "http_status_map", "...
Given an HTTP Status, exit with either an error status of 1 or the status mapped by what we were given.
[ "Given", "an", "HTTP", "Status", "exit", "with", "either", "an", "error", "status", "of", "1", "or", "the", "status", "mapped", "by", "what", "we", "were", "given", "." ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/parsing/excepthook.py#L20-L32
13,364
globus/globus-cli
globus_cli/parsing/excepthook.py
session_hook
def session_hook(exception): """ Expects an exception with an authorization_paramaters field in its raw_json """ safeprint( "The resource you are trying to access requires you to " "re-authenticate with specific identities." ) params = exception.raw_json["authorization_parameters"] message = params.get("session_message") if message: safeprint("message: {}".format(message)) identities = params.get("session_required_identities") if identities: id_str = " ".join(identities) safeprint( "Please run\n\n" " globus session update {}\n\n" "to re-authenticate with the required identities".format(id_str) ) else: safeprint( 'Please use "globus session update" to re-authenticate ' "with specific identities".format(id_str) ) exit_with_mapped_status(exception.http_status)
python
def session_hook(exception): safeprint( "The resource you are trying to access requires you to " "re-authenticate with specific identities." ) params = exception.raw_json["authorization_parameters"] message = params.get("session_message") if message: safeprint("message: {}".format(message)) identities = params.get("session_required_identities") if identities: id_str = " ".join(identities) safeprint( "Please run\n\n" " globus session update {}\n\n" "to re-authenticate with the required identities".format(id_str) ) else: safeprint( 'Please use "globus session update" to re-authenticate ' "with specific identities".format(id_str) ) exit_with_mapped_status(exception.http_status)
[ "def", "session_hook", "(", "exception", ")", ":", "safeprint", "(", "\"The resource you are trying to access requires you to \"", "\"re-authenticate with specific identities.\"", ")", "params", "=", "exception", ".", "raw_json", "[", "\"authorization_parameters\"", "]", "messa...
Expects an exception with an authorization_paramaters field in its raw_json
[ "Expects", "an", "exception", "with", "an", "authorization_paramaters", "field", "in", "its", "raw_json" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/parsing/excepthook.py#L35-L63
13,365
globus/globus-cli
globus_cli/parsing/excepthook.py
custom_except_hook
def custom_except_hook(exc_info): """ A custom excepthook to present python errors produced by the CLI. We don't want to show end users big scary stacktraces if they aren't python programmers, so slim it down to some basic info. We keep a "DEBUGMODE" env variable kicking around to let us turn on stacktraces if we ever need them. Additionally, does global suppression of EPIPE errors, which often occur when a python command is piped to a consumer like `head` which closes its input stream before python has sent all of its output. DANGER: There is a (small) risk that this will bite us if there are EPIPE errors produced within the Globus SDK. We should keep an eye on this possibility, as it may demand more sophisticated handling of EPIPE. Possible TODO item to reduce this risk: inspect the exception and only hide EPIPE if it comes from within the globus_cli package. """ exception_type, exception, traceback = exc_info # check if we're in debug mode, and run the real excepthook if we are ctx = click.get_current_context() state = ctx.ensure_object(CommandState) if state.debug: sys.excepthook(exception_type, exception, traceback) # we're not in debug mode, do custom handling else: # if it's a click exception, re-raise as original -- Click's main # execution context will handle pretty-printing if isinstance(exception, click.ClickException): reraise(exception_type, exception, traceback) # catch any session errors to give helpful instructions # on how to use globus session update elif ( isinstance(exception, exc.GlobusAPIError) and exception.raw_json and "authorization_parameters" in exception.raw_json ): session_hook(exception) # handle the Globus-raised errors with our special hooks # these will present the output (on stderr) as JSON elif isinstance(exception, exc.TransferAPIError): if exception.code == "ClientError.AuthenticationFailed": authentication_hook(exception) else: transferapi_hook(exception) elif isinstance(exception, exc.AuthAPIError): if exception.code == "UNAUTHORIZED": authentication_hook(exception) # invalid_grant occurs when the users refresh tokens are not valid elif exception.message == "invalid_grant": invalidrefresh_hook(exception) else: authapi_hook(exception) elif isinstance(exception, exc.GlobusAPIError): globusapi_hook(exception) # specific checks fell through -- now check if it's any kind of # GlobusError elif isinstance(exception, exc.GlobusError): globus_generic_hook(exception) # not a GlobusError, not a ClickException -- something like ValueError # or NotImplementedError bubbled all the way up here: just print it # out, basically else: safeprint(u"{}: {}".format(exception_type.__name__, exception)) sys.exit(1)
python
def custom_except_hook(exc_info): exception_type, exception, traceback = exc_info # check if we're in debug mode, and run the real excepthook if we are ctx = click.get_current_context() state = ctx.ensure_object(CommandState) if state.debug: sys.excepthook(exception_type, exception, traceback) # we're not in debug mode, do custom handling else: # if it's a click exception, re-raise as original -- Click's main # execution context will handle pretty-printing if isinstance(exception, click.ClickException): reraise(exception_type, exception, traceback) # catch any session errors to give helpful instructions # on how to use globus session update elif ( isinstance(exception, exc.GlobusAPIError) and exception.raw_json and "authorization_parameters" in exception.raw_json ): session_hook(exception) # handle the Globus-raised errors with our special hooks # these will present the output (on stderr) as JSON elif isinstance(exception, exc.TransferAPIError): if exception.code == "ClientError.AuthenticationFailed": authentication_hook(exception) else: transferapi_hook(exception) elif isinstance(exception, exc.AuthAPIError): if exception.code == "UNAUTHORIZED": authentication_hook(exception) # invalid_grant occurs when the users refresh tokens are not valid elif exception.message == "invalid_grant": invalidrefresh_hook(exception) else: authapi_hook(exception) elif isinstance(exception, exc.GlobusAPIError): globusapi_hook(exception) # specific checks fell through -- now check if it's any kind of # GlobusError elif isinstance(exception, exc.GlobusError): globus_generic_hook(exception) # not a GlobusError, not a ClickException -- something like ValueError # or NotImplementedError bubbled all the way up here: just print it # out, basically else: safeprint(u"{}: {}".format(exception_type.__name__, exception)) sys.exit(1)
[ "def", "custom_except_hook", "(", "exc_info", ")", ":", "exception_type", ",", "exception", ",", "traceback", "=", "exc_info", "# check if we're in debug mode, and run the real excepthook if we are", "ctx", "=", "click", ".", "get_current_context", "(", ")", "state", "=",...
A custom excepthook to present python errors produced by the CLI. We don't want to show end users big scary stacktraces if they aren't python programmers, so slim it down to some basic info. We keep a "DEBUGMODE" env variable kicking around to let us turn on stacktraces if we ever need them. Additionally, does global suppression of EPIPE errors, which often occur when a python command is piped to a consumer like `head` which closes its input stream before python has sent all of its output. DANGER: There is a (small) risk that this will bite us if there are EPIPE errors produced within the Globus SDK. We should keep an eye on this possibility, as it may demand more sophisticated handling of EPIPE. Possible TODO item to reduce this risk: inspect the exception and only hide EPIPE if it comes from within the globus_cli package.
[ "A", "custom", "excepthook", "to", "present", "python", "errors", "produced", "by", "the", "CLI", ".", "We", "don", "t", "want", "to", "show", "end", "users", "big", "scary", "stacktraces", "if", "they", "aren", "t", "python", "programmers", "so", "slim", ...
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/parsing/excepthook.py#L146-L217
13,366
globus/globus-cli
globus_cli/commands/task/event_list.py
task_event_list
def task_event_list(task_id, limit, filter_errors, filter_non_errors): """ Executor for `globus task-event-list` """ client = get_client() # cannot filter by both errors and non errors if filter_errors and filter_non_errors: raise click.UsageError("Cannot filter by both errors and non errors") elif filter_errors: filter_string = "is_error:1" elif filter_non_errors: filter_string = "is_error:0" else: filter_string = "" event_iterator = client.task_event_list( task_id, num_results=limit, filter=filter_string ) def squashed_json_details(x): is_json = False try: loaded = json.loads(x["details"]) is_json = True except ValueError: loaded = x["details"] if is_json: return json.dumps(loaded, separators=(",", ":"), sort_keys=True) else: return loaded.replace("\n", "\\n") formatted_print( event_iterator, fields=( ("Time", "time"), ("Code", "code"), ("Is Error", "is_error"), ("Details", squashed_json_details), ), json_converter=iterable_response_to_dict, )
python
def task_event_list(task_id, limit, filter_errors, filter_non_errors): client = get_client() # cannot filter by both errors and non errors if filter_errors and filter_non_errors: raise click.UsageError("Cannot filter by both errors and non errors") elif filter_errors: filter_string = "is_error:1" elif filter_non_errors: filter_string = "is_error:0" else: filter_string = "" event_iterator = client.task_event_list( task_id, num_results=limit, filter=filter_string ) def squashed_json_details(x): is_json = False try: loaded = json.loads(x["details"]) is_json = True except ValueError: loaded = x["details"] if is_json: return json.dumps(loaded, separators=(",", ":"), sort_keys=True) else: return loaded.replace("\n", "\\n") formatted_print( event_iterator, fields=( ("Time", "time"), ("Code", "code"), ("Is Error", "is_error"), ("Details", squashed_json_details), ), json_converter=iterable_response_to_dict, )
[ "def", "task_event_list", "(", "task_id", ",", "limit", ",", "filter_errors", ",", "filter_non_errors", ")", ":", "client", "=", "get_client", "(", ")", "# cannot filter by both errors and non errors", "if", "filter_errors", "and", "filter_non_errors", ":", "raise", "...
Executor for `globus task-event-list`
[ "Executor", "for", "globus", "task", "-", "event", "-", "list" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/task/event_list.py#L16-L61
13,367
globus/globus-cli
globus_cli/commands/endpoint/role/delete.py
role_delete
def role_delete(role_id, endpoint_id): """ Executor for `globus endpoint role delete` """ client = get_client() res = client.delete_endpoint_role(endpoint_id, role_id) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
python
def role_delete(role_id, endpoint_id): client = get_client() res = client.delete_endpoint_role(endpoint_id, role_id) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
[ "def", "role_delete", "(", "role_id", ",", "endpoint_id", ")", ":", "client", "=", "get_client", "(", ")", "res", "=", "client", ".", "delete_endpoint_role", "(", "endpoint_id", ",", "role_id", ")", "formatted_print", "(", "res", ",", "text_format", "=", "FO...
Executor for `globus endpoint role delete`
[ "Executor", "for", "globus", "endpoint", "role", "delete" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/role/delete.py#L12-L18
13,368
globus/globus-cli
globus_cli/commands/ls.py
ls_command
def ls_command( endpoint_plus_path, recursive_depth_limit, recursive, long_output, show_hidden, filter_val, ): """ Executor for `globus ls` """ endpoint_id, path = endpoint_plus_path # do autoactivation before the `ls` call so that recursive invocations # won't do this repeatedly, and won't have to instantiate new clients client = get_client() autoactivate(client, endpoint_id, if_expires_in=60) # create the query paramaters to send to operation_ls ls_params = {"show_hidden": int(show_hidden)} if path: ls_params["path"] = path if filter_val: # this char has special meaning in the LS API's filter clause # can't be part of the pattern (but we don't support globbing across # dir structures anyway) if "/" in filter_val: raise click.UsageError('--filter cannot contain "/"') # format into a simple filter clause which operates on filenames ls_params["filter"] = "name:{}".format(filter_val) # get the `ls` result if recursive: # NOTE: # --recursive and --filter have an interplay that some users may find # surprising # if we're asked to change or "improve" the behavior in the future, we # could do so with "type:dir" or "type:file" filters added in, and # potentially work out some viable behavior based on what people want res = client.recursive_operation_ls( endpoint_id, depth=recursive_depth_limit, **ls_params ) else: res = client.operation_ls(endpoint_id, **ls_params) def cleaned_item_name(item): return item["name"] + ("/" if item["type"] == "dir" else "") # and then print it, per formatting rules formatted_print( res, fields=[ ("Permissions", "permissions"), ("User", "user"), ("Group", "group"), ("Size", "size"), ("Last Modified", "last_modified"), ("File Type", "type"), ("Filename", cleaned_item_name), ], simple_text=( None if long_output or is_verbose() or not outformat_is_text() else "\n".join(cleaned_item_name(x) for x in res) ), json_converter=iterable_response_to_dict, )
python
def ls_command( endpoint_plus_path, recursive_depth_limit, recursive, long_output, show_hidden, filter_val, ): endpoint_id, path = endpoint_plus_path # do autoactivation before the `ls` call so that recursive invocations # won't do this repeatedly, and won't have to instantiate new clients client = get_client() autoactivate(client, endpoint_id, if_expires_in=60) # create the query paramaters to send to operation_ls ls_params = {"show_hidden": int(show_hidden)} if path: ls_params["path"] = path if filter_val: # this char has special meaning in the LS API's filter clause # can't be part of the pattern (but we don't support globbing across # dir structures anyway) if "/" in filter_val: raise click.UsageError('--filter cannot contain "/"') # format into a simple filter clause which operates on filenames ls_params["filter"] = "name:{}".format(filter_val) # get the `ls` result if recursive: # NOTE: # --recursive and --filter have an interplay that some users may find # surprising # if we're asked to change or "improve" the behavior in the future, we # could do so with "type:dir" or "type:file" filters added in, and # potentially work out some viable behavior based on what people want res = client.recursive_operation_ls( endpoint_id, depth=recursive_depth_limit, **ls_params ) else: res = client.operation_ls(endpoint_id, **ls_params) def cleaned_item_name(item): return item["name"] + ("/" if item["type"] == "dir" else "") # and then print it, per formatting rules formatted_print( res, fields=[ ("Permissions", "permissions"), ("User", "user"), ("Group", "group"), ("Size", "size"), ("Last Modified", "last_modified"), ("File Type", "type"), ("Filename", cleaned_item_name), ], simple_text=( None if long_output or is_verbose() or not outformat_is_text() else "\n".join(cleaned_item_name(x) for x in res) ), json_converter=iterable_response_to_dict, )
[ "def", "ls_command", "(", "endpoint_plus_path", ",", "recursive_depth_limit", ",", "recursive", ",", "long_output", ",", "show_hidden", ",", "filter_val", ",", ")", ":", "endpoint_id", ",", "path", "=", "endpoint_plus_path", "# do autoactivation before the `ls` call so th...
Executor for `globus ls`
[ "Executor", "for", "globus", "ls" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/ls.py#L84-L150
13,369
globus/globus-cli
globus_cli/commands/task/generate_submission_id.py
generate_submission_id
def generate_submission_id(): """ Executor for `globus task generate-submission-id` """ client = get_client() res = client.get_submission_id() formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="value")
python
def generate_submission_id(): client = get_client() res = client.get_submission_id() formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="value")
[ "def", "generate_submission_id", "(", ")", ":", "client", "=", "get_client", "(", ")", "res", "=", "client", ".", "get_submission_id", "(", ")", "formatted_print", "(", "res", ",", "text_format", "=", "FORMAT_TEXT_RAW", ",", "response_key", "=", "\"value\"", "...
Executor for `globus task generate-submission-id`
[ "Executor", "for", "globus", "task", "generate", "-", "submission", "-", "id" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/task/generate_submission_id.py#L22-L29
13,370
globus/globus-cli
globus_cli/commands/endpoint/permission/create.py
create_command
def create_command( principal, permissions, endpoint_plus_path, notify_email, notify_message ): """ Executor for `globus endpoint permission create` """ if not principal: raise click.UsageError("A security principal is required for this command") endpoint_id, path = endpoint_plus_path principal_type, principal_val = principal client = get_client() if principal_type == "identity": principal_val = maybe_lookup_identity_id(principal_val) if not principal_val: raise click.UsageError( "Identity does not exist. " "Use --provision-identity to auto-provision an identity." ) elif principal_type == "provision-identity": principal_val = maybe_lookup_identity_id(principal_val, provision=True) principal_type = "identity" if not notify_email: notify_message = None rule_data = assemble_generic_doc( "access", permissions=permissions, principal=principal_val, principal_type=principal_type, path=path, notify_email=notify_email, notify_message=notify_message, ) res = client.add_endpoint_acl_rule(endpoint_id, rule_data) formatted_print( res, text_format=FORMAT_TEXT_RECORD, fields=[("Message", "message"), ("Rule ID", "access_id")], )
python
def create_command( principal, permissions, endpoint_plus_path, notify_email, notify_message ): if not principal: raise click.UsageError("A security principal is required for this command") endpoint_id, path = endpoint_plus_path principal_type, principal_val = principal client = get_client() if principal_type == "identity": principal_val = maybe_lookup_identity_id(principal_val) if not principal_val: raise click.UsageError( "Identity does not exist. " "Use --provision-identity to auto-provision an identity." ) elif principal_type == "provision-identity": principal_val = maybe_lookup_identity_id(principal_val, provision=True) principal_type = "identity" if not notify_email: notify_message = None rule_data = assemble_generic_doc( "access", permissions=permissions, principal=principal_val, principal_type=principal_type, path=path, notify_email=notify_email, notify_message=notify_message, ) res = client.add_endpoint_acl_rule(endpoint_id, rule_data) formatted_print( res, text_format=FORMAT_TEXT_RECORD, fields=[("Message", "message"), ("Rule ID", "access_id")], )
[ "def", "create_command", "(", "principal", ",", "permissions", ",", "endpoint_plus_path", ",", "notify_email", ",", "notify_message", ")", ":", "if", "not", "principal", ":", "raise", "click", ".", "UsageError", "(", "\"A security principal is required for this command\...
Executor for `globus endpoint permission create`
[ "Executor", "for", "globus", "endpoint", "permission", "create" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/permission/create.py#L42-L85
13,371
globus/globus-cli
globus_cli/commands/bookmark/show.py
bookmark_show
def bookmark_show(bookmark_id_or_name): """ Executor for `globus bookmark show` """ client = get_client() res = resolve_id_or_name(client, bookmark_id_or_name) formatted_print( res, text_format=FORMAT_TEXT_RECORD, fields=( ("ID", "id"), ("Name", "name"), ("Endpoint ID", "endpoint_id"), ("Path", "path"), ), simple_text=( # standard output is endpoint:path format "{}:{}".format(res["endpoint_id"], res["path"]) # verbose output includes all fields if not is_verbose() else None ), )
python
def bookmark_show(bookmark_id_or_name): client = get_client() res = resolve_id_or_name(client, bookmark_id_or_name) formatted_print( res, text_format=FORMAT_TEXT_RECORD, fields=( ("ID", "id"), ("Name", "name"), ("Endpoint ID", "endpoint_id"), ("Path", "path"), ), simple_text=( # standard output is endpoint:path format "{}:{}".format(res["endpoint_id"], res["path"]) # verbose output includes all fields if not is_verbose() else None ), )
[ "def", "bookmark_show", "(", "bookmark_id_or_name", ")", ":", "client", "=", "get_client", "(", ")", "res", "=", "resolve_id_or_name", "(", "client", ",", "bookmark_id_or_name", ")", "formatted_print", "(", "res", ",", "text_format", "=", "FORMAT_TEXT_RECORD", ","...
Executor for `globus bookmark show`
[ "Executor", "for", "globus", "bookmark", "show" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/bookmark/show.py#L18-L40
13,372
globus/globus-cli
globus_cli/commands/endpoint/role/list.py
role_list
def role_list(endpoint_id): """ Executor for `globus access endpoint-role-list` """ client = get_client() roles = client.endpoint_role_list(endpoint_id) resolved_ids = LazyIdentityMap( x["principal"] for x in roles if x["principal_type"] == "identity" ) def principal_str(role): principal = role["principal"] if role["principal_type"] == "identity": username = resolved_ids.get(principal) return username or principal elif role["principal_type"] == "group": return (u"https://app.globus.org/groups/{}").format(principal) else: return principal formatted_print( roles, fields=[ ("Principal Type", "principal_type"), ("Role ID", "id"), ("Principal", principal_str), ("Role", "role"), ], )
python
def role_list(endpoint_id): client = get_client() roles = client.endpoint_role_list(endpoint_id) resolved_ids = LazyIdentityMap( x["principal"] for x in roles if x["principal_type"] == "identity" ) def principal_str(role): principal = role["principal"] if role["principal_type"] == "identity": username = resolved_ids.get(principal) return username or principal elif role["principal_type"] == "group": return (u"https://app.globus.org/groups/{}").format(principal) else: return principal formatted_print( roles, fields=[ ("Principal Type", "principal_type"), ("Role ID", "id"), ("Principal", principal_str), ("Role", "role"), ], )
[ "def", "role_list", "(", "endpoint_id", ")", ":", "client", "=", "get_client", "(", ")", "roles", "=", "client", ".", "endpoint_role_list", "(", "endpoint_id", ")", "resolved_ids", "=", "LazyIdentityMap", "(", "x", "[", "\"principal\"", "]", "for", "x", "in"...
Executor for `globus access endpoint-role-list`
[ "Executor", "for", "globus", "access", "endpoint", "-", "role", "-", "list" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/endpoint/role/list.py#L12-L41
13,373
globus/globus-cli
globus_cli/helpers/delegate_proxy.py
fill_delegate_proxy_activation_requirements
def fill_delegate_proxy_activation_requirements( requirements_data, cred_file, lifetime_hours=12 ): """ Given the activation requirements for an endpoint and a filename for X.509 credentials, extracts the public key from the activation requirements, uses the key and the credentials to make a proxy credential, and returns the requirements data with the proxy chain filled in. """ # get the public key from the activation requirements for data in requirements_data["DATA"]: if data["type"] == "delegate_proxy" and data["name"] == "public_key": public_key = data["value"] break else: raise ValueError( ( "No public_key found in activation requirements, this endpoint " "does not support Delegate Proxy activation." ) ) # get user credentials from user credential file" with open(cred_file) as f: issuer_cred = f.read() # create the proxy credentials proxy = create_proxy_credentials(issuer_cred, public_key, lifetime_hours) # return the activation requirements document with the proxy_chain filled for data in requirements_data["DATA"]: if data["type"] == "delegate_proxy" and data["name"] == "proxy_chain": data["value"] = proxy return requirements_data else: raise ValueError( ( "No proxy_chain found in activation requirements, this endpoint " "does not support Delegate Proxy activation." ) )
python
def fill_delegate_proxy_activation_requirements( requirements_data, cred_file, lifetime_hours=12 ): # get the public key from the activation requirements for data in requirements_data["DATA"]: if data["type"] == "delegate_proxy" and data["name"] == "public_key": public_key = data["value"] break else: raise ValueError( ( "No public_key found in activation requirements, this endpoint " "does not support Delegate Proxy activation." ) ) # get user credentials from user credential file" with open(cred_file) as f: issuer_cred = f.read() # create the proxy credentials proxy = create_proxy_credentials(issuer_cred, public_key, lifetime_hours) # return the activation requirements document with the proxy_chain filled for data in requirements_data["DATA"]: if data["type"] == "delegate_proxy" and data["name"] == "proxy_chain": data["value"] = proxy return requirements_data else: raise ValueError( ( "No proxy_chain found in activation requirements, this endpoint " "does not support Delegate Proxy activation." ) )
[ "def", "fill_delegate_proxy_activation_requirements", "(", "requirements_data", ",", "cred_file", ",", "lifetime_hours", "=", "12", ")", ":", "# get the public key from the activation requirements", "for", "data", "in", "requirements_data", "[", "\"DATA\"", "]", ":", "if", ...
Given the activation requirements for an endpoint and a filename for X.509 credentials, extracts the public key from the activation requirements, uses the key and the credentials to make a proxy credential, and returns the requirements data with the proxy chain filled in.
[ "Given", "the", "activation", "requirements", "for", "an", "endpoint", "and", "a", "filename", "for", "X", ".", "509", "credentials", "extracts", "the", "public", "key", "from", "the", "activation", "requirements", "uses", "the", "key", "and", "the", "credenti...
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/helpers/delegate_proxy.py#L12-L52
13,374
globus/globus-cli
globus_cli/helpers/delegate_proxy.py
create_proxy_credentials
def create_proxy_credentials(issuer_cred, public_key, lifetime_hours): """ Given an issuer credentials PEM file in the form of a string, a public_key string from an activation requirements document, and an int for the proxy lifetime, returns credentials as a unicode string in PEM format containing a new proxy certificate and an extended proxy chain. """ # parse the issuer credential loaded_cert, loaded_private_key, issuer_chain = parse_issuer_cred(issuer_cred) # load the public_key into a cryptography object loaded_public_key = serialization.load_pem_public_key( public_key.encode("ascii"), backend=default_backend() ) # check that the issuer certificate is not an old proxy # and is using the keyUsage section as required confirm_not_old_proxy(loaded_cert) validate_key_usage(loaded_cert) # create the proxy cert cryptography object new_cert = create_proxy_cert( loaded_cert, loaded_private_key, loaded_public_key, lifetime_hours ) # extend the proxy chain as a unicode string extended_chain = loaded_cert.public_bytes(serialization.Encoding.PEM).decode( "ascii" ) + six.u(issuer_chain) # return in PEM format as a unicode string return ( new_cert.public_bytes(serialization.Encoding.PEM).decode("ascii") + extended_chain )
python
def create_proxy_credentials(issuer_cred, public_key, lifetime_hours): # parse the issuer credential loaded_cert, loaded_private_key, issuer_chain = parse_issuer_cred(issuer_cred) # load the public_key into a cryptography object loaded_public_key = serialization.load_pem_public_key( public_key.encode("ascii"), backend=default_backend() ) # check that the issuer certificate is not an old proxy # and is using the keyUsage section as required confirm_not_old_proxy(loaded_cert) validate_key_usage(loaded_cert) # create the proxy cert cryptography object new_cert = create_proxy_cert( loaded_cert, loaded_private_key, loaded_public_key, lifetime_hours ) # extend the proxy chain as a unicode string extended_chain = loaded_cert.public_bytes(serialization.Encoding.PEM).decode( "ascii" ) + six.u(issuer_chain) # return in PEM format as a unicode string return ( new_cert.public_bytes(serialization.Encoding.PEM).decode("ascii") + extended_chain )
[ "def", "create_proxy_credentials", "(", "issuer_cred", ",", "public_key", ",", "lifetime_hours", ")", ":", "# parse the issuer credential", "loaded_cert", ",", "loaded_private_key", ",", "issuer_chain", "=", "parse_issuer_cred", "(", "issuer_cred", ")", "# load the public_k...
Given an issuer credentials PEM file in the form of a string, a public_key string from an activation requirements document, and an int for the proxy lifetime, returns credentials as a unicode string in PEM format containing a new proxy certificate and an extended proxy chain.
[ "Given", "an", "issuer", "credentials", "PEM", "file", "in", "the", "form", "of", "a", "string", "a", "public_key", "string", "from", "an", "activation", "requirements", "document", "and", "an", "int", "for", "the", "proxy", "lifetime", "returns", "credentials...
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/helpers/delegate_proxy.py#L55-L89
13,375
globus/globus-cli
globus_cli/helpers/delegate_proxy.py
create_proxy_cert
def create_proxy_cert( loaded_cert, loaded_private_key, loaded_public_key, lifetime_hours ): """ Given cryptography objects for an issuing certificate, a public_key, a private_key, and an int for lifetime in hours, creates a proxy cert from the issuer and public key signed by the private key. """ builder = x509.CertificateBuilder() # create a serial number for the new proxy # Under RFC 3820 there are many ways to generate the serial number. However # making the number unpredictable has security benefits, e.g. it can make # this style of attack more difficult: # http://www.win.tue.nl/hashclash/rogue-ca serial = struct.unpack("<Q", os.urandom(8))[0] builder = builder.serial_number(serial) # set the new proxy as valid from now until lifetime_hours have passed builder = builder.not_valid_before(datetime.datetime.utcnow()) builder = builder.not_valid_after( datetime.datetime.utcnow() + datetime.timedelta(hours=lifetime_hours) ) # set the public key of the new proxy to the given public key builder = builder.public_key(loaded_public_key) # set the issuer of the new cert to the subject of the issuing cert builder = builder.issuer_name(loaded_cert.subject) # set the new proxy's subject # append a CommonName to the new proxy's subject # with the serial as the value of the CN new_atribute = x509.NameAttribute(x509.oid.NameOID.COMMON_NAME, six.u(str(serial))) subject_attributes = list(loaded_cert.subject) subject_attributes.append(new_atribute) builder = builder.subject_name(x509.Name(subject_attributes)) # add proxyCertInfo extension to the new proxy (We opt not to add keyUsage) # For RFC proxies the effective usage is defined as the intersection # of the usage of each cert in the chain. See section 4.2 of RFC 3820. # the constants 'oid' and 'value' are gotten from # examining output from a call to the open ssl function: # X509V3_EXT_conf(NULL, ctx, name, value) # ctx set by X509V3_set_nconf(&ctx, NCONF_new(NULL)) # name = "proxyCertInfo" # value = "critical,language:Inherit all" oid = x509.ObjectIdentifier("1.3.6.1.5.5.7.1.14") value = b"0\x0c0\n\x06\x08+\x06\x01\x05\x05\x07\x15\x01" extension = x509.extensions.UnrecognizedExtension(oid, value) builder = builder.add_extension(extension, critical=True) # sign the new proxy with the issuer's private key new_certificate = builder.sign( private_key=loaded_private_key, algorithm=hashes.SHA256(), backend=default_backend(), ) # return the new proxy as a cryptography object return new_certificate
python
def create_proxy_cert( loaded_cert, loaded_private_key, loaded_public_key, lifetime_hours ): builder = x509.CertificateBuilder() # create a serial number for the new proxy # Under RFC 3820 there are many ways to generate the serial number. However # making the number unpredictable has security benefits, e.g. it can make # this style of attack more difficult: # http://www.win.tue.nl/hashclash/rogue-ca serial = struct.unpack("<Q", os.urandom(8))[0] builder = builder.serial_number(serial) # set the new proxy as valid from now until lifetime_hours have passed builder = builder.not_valid_before(datetime.datetime.utcnow()) builder = builder.not_valid_after( datetime.datetime.utcnow() + datetime.timedelta(hours=lifetime_hours) ) # set the public key of the new proxy to the given public key builder = builder.public_key(loaded_public_key) # set the issuer of the new cert to the subject of the issuing cert builder = builder.issuer_name(loaded_cert.subject) # set the new proxy's subject # append a CommonName to the new proxy's subject # with the serial as the value of the CN new_atribute = x509.NameAttribute(x509.oid.NameOID.COMMON_NAME, six.u(str(serial))) subject_attributes = list(loaded_cert.subject) subject_attributes.append(new_atribute) builder = builder.subject_name(x509.Name(subject_attributes)) # add proxyCertInfo extension to the new proxy (We opt not to add keyUsage) # For RFC proxies the effective usage is defined as the intersection # of the usage of each cert in the chain. See section 4.2 of RFC 3820. # the constants 'oid' and 'value' are gotten from # examining output from a call to the open ssl function: # X509V3_EXT_conf(NULL, ctx, name, value) # ctx set by X509V3_set_nconf(&ctx, NCONF_new(NULL)) # name = "proxyCertInfo" # value = "critical,language:Inherit all" oid = x509.ObjectIdentifier("1.3.6.1.5.5.7.1.14") value = b"0\x0c0\n\x06\x08+\x06\x01\x05\x05\x07\x15\x01" extension = x509.extensions.UnrecognizedExtension(oid, value) builder = builder.add_extension(extension, critical=True) # sign the new proxy with the issuer's private key new_certificate = builder.sign( private_key=loaded_private_key, algorithm=hashes.SHA256(), backend=default_backend(), ) # return the new proxy as a cryptography object return new_certificate
[ "def", "create_proxy_cert", "(", "loaded_cert", ",", "loaded_private_key", ",", "loaded_public_key", ",", "lifetime_hours", ")", ":", "builder", "=", "x509", ".", "CertificateBuilder", "(", ")", "# create a serial number for the new proxy", "# Under RFC 3820 there are many wa...
Given cryptography objects for an issuing certificate, a public_key, a private_key, and an int for lifetime in hours, creates a proxy cert from the issuer and public key signed by the private key.
[ "Given", "cryptography", "objects", "for", "an", "issuing", "certificate", "a", "public_key", "a", "private_key", "and", "an", "int", "for", "lifetime", "in", "hours", "creates", "a", "proxy", "cert", "from", "the", "issuer", "and", "public", "key", "signed", ...
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/helpers/delegate_proxy.py#L140-L201
13,376
globus/globus-cli
globus_cli/helpers/delegate_proxy.py
confirm_not_old_proxy
def confirm_not_old_proxy(loaded_cert): """ Given a cryptography object for the issuer cert, checks if the cert is an "old proxy" and raise an error if so. """ # Examine the last CommonName to see if it looks like an old proxy. last_cn = loaded_cert.subject.get_attributes_for_oid(x509.oid.NameOID.COMMON_NAME)[ -1 ] # if the last CN is 'proxy' or 'limited proxy' we are in an old proxy if last_cn.value in ("proxy", "limited proxy"): raise ValueError( "Proxy certificate is in an outdated format " "that is no longer supported" )
python
def confirm_not_old_proxy(loaded_cert): # Examine the last CommonName to see if it looks like an old proxy. last_cn = loaded_cert.subject.get_attributes_for_oid(x509.oid.NameOID.COMMON_NAME)[ -1 ] # if the last CN is 'proxy' or 'limited proxy' we are in an old proxy if last_cn.value in ("proxy", "limited proxy"): raise ValueError( "Proxy certificate is in an outdated format " "that is no longer supported" )
[ "def", "confirm_not_old_proxy", "(", "loaded_cert", ")", ":", "# Examine the last CommonName to see if it looks like an old proxy.", "last_cn", "=", "loaded_cert", ".", "subject", ".", "get_attributes_for_oid", "(", "x509", ".", "oid", ".", "NameOID", ".", "COMMON_NAME", ...
Given a cryptography object for the issuer cert, checks if the cert is an "old proxy" and raise an error if so.
[ "Given", "a", "cryptography", "object", "for", "the", "issuer", "cert", "checks", "if", "the", "cert", "is", "an", "old", "proxy", "and", "raise", "an", "error", "if", "so", "." ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/helpers/delegate_proxy.py#L204-L217
13,377
globus/globus-cli
globus_cli/commands/task/pause_info.py
task_pause_info
def task_pause_info(task_id): """ Executor for `globus task pause-info` """ client = get_client() res = client.task_pause_info(task_id) def _custom_text_format(res): explicit_pauses = [ field for field in EXPLICIT_PAUSE_MSG_FIELDS # n.b. some keys are absent for completed tasks if res.get(field[1]) ] effective_pause_rules = res["pause_rules"] if not explicit_pauses and not effective_pause_rules: safeprint("Task {} is not paused.".format(task_id)) click.get_current_context().exit(0) if explicit_pauses: formatted_print( res, fields=explicit_pauses, text_format=FORMAT_TEXT_RECORD, text_preamble="This task has been explicitly paused.\n", text_epilog="\n" if effective_pause_rules else None, ) if effective_pause_rules: formatted_print( effective_pause_rules, fields=PAUSE_RULE_DISPLAY_FIELDS, text_preamble=( "The following pause rules are effective on this task:\n" ), ) formatted_print(res, text_format=_custom_text_format)
python
def task_pause_info(task_id): client = get_client() res = client.task_pause_info(task_id) def _custom_text_format(res): explicit_pauses = [ field for field in EXPLICIT_PAUSE_MSG_FIELDS # n.b. some keys are absent for completed tasks if res.get(field[1]) ] effective_pause_rules = res["pause_rules"] if not explicit_pauses and not effective_pause_rules: safeprint("Task {} is not paused.".format(task_id)) click.get_current_context().exit(0) if explicit_pauses: formatted_print( res, fields=explicit_pauses, text_format=FORMAT_TEXT_RECORD, text_preamble="This task has been explicitly paused.\n", text_epilog="\n" if effective_pause_rules else None, ) if effective_pause_rules: formatted_print( effective_pause_rules, fields=PAUSE_RULE_DISPLAY_FIELDS, text_preamble=( "The following pause rules are effective on this task:\n" ), ) formatted_print(res, text_format=_custom_text_format)
[ "def", "task_pause_info", "(", "task_id", ")", ":", "client", "=", "get_client", "(", ")", "res", "=", "client", ".", "task_pause_info", "(", "task_id", ")", "def", "_custom_text_format", "(", "res", ")", ":", "explicit_pauses", "=", "[", "field", "for", "...
Executor for `globus task pause-info`
[ "Executor", "for", "globus", "task", "pause", "-", "info" ]
336675ff24da64c5ee487243f39ae39fc49a7e14
https://github.com/globus/globus-cli/blob/336675ff24da64c5ee487243f39ae39fc49a7e14/globus_cli/commands/task/pause_info.py#L45-L83
13,378
instana/python-sensor
instana/recorder.py
InstanaRecorder.run
def run(self): """ Span a background thread to periodically report queued spans """ self.timer = t.Thread(target=self.report_spans) self.timer.daemon = True self.timer.name = "Instana Span Reporting" self.timer.start()
python
def run(self): self.timer = t.Thread(target=self.report_spans) self.timer.daemon = True self.timer.name = "Instana Span Reporting" self.timer.start()
[ "def", "run", "(", "self", ")", ":", "self", ".", "timer", "=", "t", ".", "Thread", "(", "target", "=", "self", ".", "report_spans", ")", "self", ".", "timer", ".", "daemon", "=", "True", "self", ".", "timer", ".", "name", "=", "\"Instana Span Report...
Span a background thread to periodically report queued spans
[ "Span", "a", "background", "thread", "to", "periodically", "report", "queued", "spans" ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/recorder.py#L45-L50
13,379
instana/python-sensor
instana/recorder.py
InstanaRecorder.report_spans
def report_spans(self): """ Periodically report the queued spans """ logger.debug("Span reporting thread is now alive") def span_work(): queue_size = self.queue.qsize() if queue_size > 0 and instana.singletons.agent.can_send(): response = instana.singletons.agent.report_traces(self.queued_spans()) if response: logger.debug("reported %d spans" % queue_size) return True every(2, span_work, "Span Reporting")
python
def report_spans(self): logger.debug("Span reporting thread is now alive") def span_work(): queue_size = self.queue.qsize() if queue_size > 0 and instana.singletons.agent.can_send(): response = instana.singletons.agent.report_traces(self.queued_spans()) if response: logger.debug("reported %d spans" % queue_size) return True every(2, span_work, "Span Reporting")
[ "def", "report_spans", "(", "self", ")", ":", "logger", ".", "debug", "(", "\"Span reporting thread is now alive\"", ")", "def", "span_work", "(", ")", ":", "queue_size", "=", "self", ".", "queue", ".", "qsize", "(", ")", "if", "queue_size", ">", "0", "and...
Periodically report the queued spans
[ "Periodically", "report", "the", "queued", "spans" ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/recorder.py#L52-L64
13,380
instana/python-sensor
instana/recorder.py
InstanaRecorder.queued_spans
def queued_spans(self): """ Get all of the spans in the queue """ spans = [] while True: try: s = self.queue.get(False) except queue.Empty: break else: spans.append(s) return spans
python
def queued_spans(self): spans = [] while True: try: s = self.queue.get(False) except queue.Empty: break else: spans.append(s) return spans
[ "def", "queued_spans", "(", "self", ")", ":", "spans", "=", "[", "]", "while", "True", ":", "try", ":", "s", "=", "self", ".", "queue", ".", "get", "(", "False", ")", "except", "queue", ".", "Empty", ":", "break", "else", ":", "spans", ".", "appe...
Get all of the spans in the queue
[ "Get", "all", "of", "the", "spans", "in", "the", "queue" ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/recorder.py#L70-L80
13,381
instana/python-sensor
instana/recorder.py
InstanaRecorder.record_span
def record_span(self, span): """ Convert the passed BasicSpan into an JsonSpan and add it to the span queue """ if instana.singletons.agent.can_send() or "INSTANA_TEST" in os.environ: json_span = None if span.operation_name in self.registered_spans: json_span = self.build_registered_span(span) else: json_span = self.build_sdk_span(span) self.queue.put(json_span)
python
def record_span(self, span): if instana.singletons.agent.can_send() or "INSTANA_TEST" in os.environ: json_span = None if span.operation_name in self.registered_spans: json_span = self.build_registered_span(span) else: json_span = self.build_sdk_span(span) self.queue.put(json_span)
[ "def", "record_span", "(", "self", ",", "span", ")", ":", "if", "instana", ".", "singletons", ".", "agent", ".", "can_send", "(", ")", "or", "\"INSTANA_TEST\"", "in", "os", ".", "environ", ":", "json_span", "=", "None", "if", "span", ".", "operation_name...
Convert the passed BasicSpan into an JsonSpan and add it to the span queue
[ "Convert", "the", "passed", "BasicSpan", "into", "an", "JsonSpan", "and", "add", "it", "to", "the", "span", "queue" ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/recorder.py#L86-L99
13,382
instana/python-sensor
instana/recorder.py
InstanaRecorder.build_sdk_span
def build_sdk_span(self, span): """ Takes a BasicSpan and converts into an SDK type JsonSpan """ custom_data = CustomData(tags=span.tags, logs=self.collect_logs(span)) sdk_data = SDKData(name=span.operation_name, custom=custom_data, Type=self.get_span_kind_as_string(span)) if "arguments" in span.tags: sdk_data.arguments = span.tags["arguments"] if "return" in span.tags: sdk_data.Return = span.tags["return"] data = Data(service=instana.singletons.agent.sensor.options.service_name, sdk=sdk_data) entity_from = {'e': instana.singletons.agent.from_.pid, 'h': instana.singletons.agent.from_.agentUuid} json_span = JsonSpan( t=span.context.trace_id, p=span.parent_id, s=span.context.span_id, ts=int(round(span.start_time * 1000)), d=int(round(span.duration * 1000)), k=self.get_span_kind_as_int(span), n="sdk", f=entity_from, data=data) error = span.tags.pop("error", False) ec = span.tags.pop("ec", None) if error and ec: json_span.error = error json_span.ec = ec return json_span
python
def build_sdk_span(self, span): custom_data = CustomData(tags=span.tags, logs=self.collect_logs(span)) sdk_data = SDKData(name=span.operation_name, custom=custom_data, Type=self.get_span_kind_as_string(span)) if "arguments" in span.tags: sdk_data.arguments = span.tags["arguments"] if "return" in span.tags: sdk_data.Return = span.tags["return"] data = Data(service=instana.singletons.agent.sensor.options.service_name, sdk=sdk_data) entity_from = {'e': instana.singletons.agent.from_.pid, 'h': instana.singletons.agent.from_.agentUuid} json_span = JsonSpan( t=span.context.trace_id, p=span.parent_id, s=span.context.span_id, ts=int(round(span.start_time * 1000)), d=int(round(span.duration * 1000)), k=self.get_span_kind_as_int(span), n="sdk", f=entity_from, data=data) error = span.tags.pop("error", False) ec = span.tags.pop("ec", None) if error and ec: json_span.error = error json_span.ec = ec return json_span
[ "def", "build_sdk_span", "(", "self", ",", "span", ")", ":", "custom_data", "=", "CustomData", "(", "tags", "=", "span", ".", "tags", ",", "logs", "=", "self", ".", "collect_logs", "(", "span", ")", ")", "sdk_data", "=", "SDKData", "(", "name", "=", ...
Takes a BasicSpan and converts into an SDK type JsonSpan
[ "Takes", "a", "BasicSpan", "and", "converts", "into", "an", "SDK", "type", "JsonSpan" ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/recorder.py#L211-L249
13,383
instana/python-sensor
instana/recorder.py
InstanaRecorder.get_span_kind_as_string
def get_span_kind_as_string(self, span): """ Will retrieve the `span.kind` tag and return the appropriate string value for the Instana backend or None if the tag is set to something we don't recognize. :param span: The span to search for the `span.kind` tag :return: String """ kind = None if "span.kind" in span.tags: if span.tags["span.kind"] in self.entry_kind: kind = "entry" elif span.tags["span.kind"] in self.exit_kind: kind = "exit" else: kind = "intermediate" return kind
python
def get_span_kind_as_string(self, span): kind = None if "span.kind" in span.tags: if span.tags["span.kind"] in self.entry_kind: kind = "entry" elif span.tags["span.kind"] in self.exit_kind: kind = "exit" else: kind = "intermediate" return kind
[ "def", "get_span_kind_as_string", "(", "self", ",", "span", ")", ":", "kind", "=", "None", "if", "\"span.kind\"", "in", "span", ".", "tags", ":", "if", "span", ".", "tags", "[", "\"span.kind\"", "]", "in", "self", ".", "entry_kind", ":", "kind", "=", "...
Will retrieve the `span.kind` tag and return the appropriate string value for the Instana backend or None if the tag is set to something we don't recognize. :param span: The span to search for the `span.kind` tag :return: String
[ "Will", "retrieve", "the", "span", ".", "kind", "tag", "and", "return", "the", "appropriate", "string", "value", "for", "the", "Instana", "backend", "or", "None", "if", "the", "tag", "is", "set", "to", "something", "we", "don", "t", "recognize", "." ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/recorder.py#L251-L267
13,384
instana/python-sensor
instana/recorder.py
InstanaRecorder.get_span_kind_as_int
def get_span_kind_as_int(self, span): """ Will retrieve the `span.kind` tag and return the appropriate integer value for the Instana backend or None if the tag is set to something we don't recognize. :param span: The span to search for the `span.kind` tag :return: Integer """ kind = None if "span.kind" in span.tags: if span.tags["span.kind"] in self.entry_kind: kind = 1 elif span.tags["span.kind"] in self.exit_kind: kind = 2 else: kind = 3 return kind
python
def get_span_kind_as_int(self, span): kind = None if "span.kind" in span.tags: if span.tags["span.kind"] in self.entry_kind: kind = 1 elif span.tags["span.kind"] in self.exit_kind: kind = 2 else: kind = 3 return kind
[ "def", "get_span_kind_as_int", "(", "self", ",", "span", ")", ":", "kind", "=", "None", "if", "\"span.kind\"", "in", "span", ".", "tags", ":", "if", "span", ".", "tags", "[", "\"span.kind\"", "]", "in", "self", ".", "entry_kind", ":", "kind", "=", "1",...
Will retrieve the `span.kind` tag and return the appropriate integer value for the Instana backend or None if the tag is set to something we don't recognize. :param span: The span to search for the `span.kind` tag :return: Integer
[ "Will", "retrieve", "the", "span", ".", "kind", "tag", "and", "return", "the", "appropriate", "integer", "value", "for", "the", "Instana", "backend", "or", "None", "if", "the", "tag", "is", "set", "to", "something", "we", "don", "t", "recognize", "." ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/recorder.py#L269-L285
13,385
instana/python-sensor
instana/meter.py
Meter.run
def run(self): """ Spawns the metric reporting thread """ self.thr = threading.Thread(target=self.collect_and_report) self.thr.daemon = True self.thr.name = "Instana Metric Collection" self.thr.start()
python
def run(self): self.thr = threading.Thread(target=self.collect_and_report) self.thr.daemon = True self.thr.name = "Instana Metric Collection" self.thr.start()
[ "def", "run", "(", "self", ")", ":", "self", ".", "thr", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "collect_and_report", ")", "self", ".", "thr", ".", "daemon", "=", "True", "self", ".", "thr", ".", "name", "=", "\"Instana Met...
Spawns the metric reporting thread
[ "Spawns", "the", "metric", "reporting", "thread" ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/meter.py#L126-L131
13,386
instana/python-sensor
instana/meter.py
Meter.reset
def reset(self): """" Reset the state as new """ self.last_usage = None self.last_collect = None self.last_metrics = None self.snapshot_countdown = 0 self.run()
python
def reset(self): "self.last_usage = None self.last_collect = None self.last_metrics = None self.snapshot_countdown = 0 self.run()
[ "def", "reset", "(", "self", ")", ":", "self", ".", "last_usage", "=", "None", "self", ".", "last_collect", "=", "None", "self", ".", "last_metrics", "=", "None", "self", ".", "snapshot_countdown", "=", "0", "self", ".", "run", "(", ")" ]
Reset the state as new
[ "Reset", "the", "state", "as", "new" ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/meter.py#L133-L139
13,387
instana/python-sensor
instana/meter.py
Meter.collect_and_report
def collect_and_report(self): """ Target function for the metric reporting thread. This is a simple loop to collect and report entity data every 1 second. """ logger.debug("Metric reporting thread is now alive") def metric_work(): self.process() if self.agent.is_timed_out(): logger.warn("Host agent offline for >1 min. Going to sit in a corner...") self.agent.reset() return False return True every(1, metric_work, "Metrics Collection")
python
def collect_and_report(self): logger.debug("Metric reporting thread is now alive") def metric_work(): self.process() if self.agent.is_timed_out(): logger.warn("Host agent offline for >1 min. Going to sit in a corner...") self.agent.reset() return False return True every(1, metric_work, "Metrics Collection")
[ "def", "collect_and_report", "(", "self", ")", ":", "logger", ".", "debug", "(", "\"Metric reporting thread is now alive\"", ")", "def", "metric_work", "(", ")", ":", "self", ".", "process", "(", ")", "if", "self", ".", "agent", ".", "is_timed_out", "(", ")"...
Target function for the metric reporting thread. This is a simple loop to collect and report entity data every 1 second.
[ "Target", "function", "for", "the", "metric", "reporting", "thread", ".", "This", "is", "a", "simple", "loop", "to", "collect", "and", "report", "entity", "data", "every", "1", "second", "." ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/meter.py#L141-L156
13,388
instana/python-sensor
instana/meter.py
Meter.process
def process(self): """ Collects, processes & reports metrics """ if self.agent.machine.fsm.current is "wait4init": # Test the host agent if we're ready to send data if self.agent.is_agent_ready(): self.agent.machine.fsm.ready() else: return if self.agent.can_send(): self.snapshot_countdown = self.snapshot_countdown - 1 ss = None cm = self.collect_metrics() if self.snapshot_countdown < 1: logger.debug("Sending process snapshot data") self.snapshot_countdown = self.SNAPSHOT_PERIOD ss = self.collect_snapshot() md = copy.deepcopy(cm).delta_data(None) else: md = copy.deepcopy(cm).delta_data(self.last_metrics) ed = EntityData(pid=self.agent.from_.pid, snapshot=ss, metrics=md) response = self.agent.report_data(ed) if response: if response.status_code is 200 and len(response.content) > 2: # The host agent returned something indicating that is has a request for us that we # need to process. self.handle_agent_tasks(json.loads(response.content)[0]) self.last_metrics = cm.__dict__
python
def process(self): if self.agent.machine.fsm.current is "wait4init": # Test the host agent if we're ready to send data if self.agent.is_agent_ready(): self.agent.machine.fsm.ready() else: return if self.agent.can_send(): self.snapshot_countdown = self.snapshot_countdown - 1 ss = None cm = self.collect_metrics() if self.snapshot_countdown < 1: logger.debug("Sending process snapshot data") self.snapshot_countdown = self.SNAPSHOT_PERIOD ss = self.collect_snapshot() md = copy.deepcopy(cm).delta_data(None) else: md = copy.deepcopy(cm).delta_data(self.last_metrics) ed = EntityData(pid=self.agent.from_.pid, snapshot=ss, metrics=md) response = self.agent.report_data(ed) if response: if response.status_code is 200 and len(response.content) > 2: # The host agent returned something indicating that is has a request for us that we # need to process. self.handle_agent_tasks(json.loads(response.content)[0]) self.last_metrics = cm.__dict__
[ "def", "process", "(", "self", ")", ":", "if", "self", ".", "agent", ".", "machine", ".", "fsm", ".", "current", "is", "\"wait4init\"", ":", "# Test the host agent if we're ready to send data", "if", "self", ".", "agent", ".", "is_agent_ready", "(", ")", ":", ...
Collects, processes & reports metrics
[ "Collects", "processes", "&", "reports", "metrics" ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/meter.py#L158-L189
13,389
instana/python-sensor
instana/meter.py
Meter.collect_snapshot
def collect_snapshot(self): """ Collects snapshot related information to this process and environment """ try: if "INSTANA_SERVICE_NAME" in os.environ: appname = os.environ["INSTANA_SERVICE_NAME"] elif "FLASK_APP" in os.environ: appname = os.environ["FLASK_APP"] elif "DJANGO_SETTINGS_MODULE" in os.environ: appname = os.environ["DJANGO_SETTINGS_MODULE"].split('.')[0] elif os.path.basename(sys.argv[0]) == '' and sys.stdout.isatty(): appname = "Interactive Console" else: if os.path.basename(sys.argv[0]) == '': appname = os.path.basename(sys.executable) else: appname = os.path.basename(sys.argv[0]) s = Snapshot(name=appname, version=platform.version(), f=platform.python_implementation(), a=platform.architecture()[0], djmw=self.djmw) s.version = sys.version s.versions = self.collect_modules() except Exception as e: logger.debug(e.message) else: return s
python
def collect_snapshot(self): try: if "INSTANA_SERVICE_NAME" in os.environ: appname = os.environ["INSTANA_SERVICE_NAME"] elif "FLASK_APP" in os.environ: appname = os.environ["FLASK_APP"] elif "DJANGO_SETTINGS_MODULE" in os.environ: appname = os.environ["DJANGO_SETTINGS_MODULE"].split('.')[0] elif os.path.basename(sys.argv[0]) == '' and sys.stdout.isatty(): appname = "Interactive Console" else: if os.path.basename(sys.argv[0]) == '': appname = os.path.basename(sys.executable) else: appname = os.path.basename(sys.argv[0]) s = Snapshot(name=appname, version=platform.version(), f=platform.python_implementation(), a=platform.architecture()[0], djmw=self.djmw) s.version = sys.version s.versions = self.collect_modules() except Exception as e: logger.debug(e.message) else: return s
[ "def", "collect_snapshot", "(", "self", ")", ":", "try", ":", "if", "\"INSTANA_SERVICE_NAME\"", "in", "os", ".", "environ", ":", "appname", "=", "os", ".", "environ", "[", "\"INSTANA_SERVICE_NAME\"", "]", "elif", "\"FLASK_APP\"", "in", "os", ".", "environ", ...
Collects snapshot related information to this process and environment
[ "Collects", "snapshot", "related", "information", "to", "this", "process", "and", "environment" ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/meter.py#L209-L235
13,390
instana/python-sensor
instana/meter.py
Meter.collect_modules
def collect_modules(self): """ Collect up the list of modules in use """ try: res = {} m = sys.modules for k in m: # Don't report submodules (e.g. django.x, django.y, django.z) # Skip modules that begin with underscore if ('.' in k) or k[0] == '_': continue if m[k]: try: d = m[k].__dict__ if "version" in d and d["version"]: res[k] = self.jsonable(d["version"]) elif "__version__" in d and d["__version__"]: res[k] = self.jsonable(d["__version__"]) else: res[k] = get_distribution(k).version except DistributionNotFound: pass except Exception: logger.debug("collect_modules: could not process module: %s" % k) except Exception: logger.debug("collect_modules", exc_info=True) else: return res
python
def collect_modules(self): try: res = {} m = sys.modules for k in m: # Don't report submodules (e.g. django.x, django.y, django.z) # Skip modules that begin with underscore if ('.' in k) or k[0] == '_': continue if m[k]: try: d = m[k].__dict__ if "version" in d and d["version"]: res[k] = self.jsonable(d["version"]) elif "__version__" in d and d["__version__"]: res[k] = self.jsonable(d["__version__"]) else: res[k] = get_distribution(k).version except DistributionNotFound: pass except Exception: logger.debug("collect_modules: could not process module: %s" % k) except Exception: logger.debug("collect_modules", exc_info=True) else: return res
[ "def", "collect_modules", "(", "self", ")", ":", "try", ":", "res", "=", "{", "}", "m", "=", "sys", ".", "modules", "for", "k", "in", "m", ":", "# Don't report submodules (e.g. django.x, django.y, django.z)", "# Skip modules that begin with underscore", "if", "(", ...
Collect up the list of modules in use
[ "Collect", "up", "the", "list", "of", "modules", "in", "use" ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/meter.py#L249-L276
13,391
instana/python-sensor
instana/agent.py
Agent.start
def start(self, e): """ Starts the agent and required threads """ logger.debug("Spawning metric & trace reporting threads") self.sensor.meter.run() instana.singletons.tracer.recorder.run()
python
def start(self, e): logger.debug("Spawning metric & trace reporting threads") self.sensor.meter.run() instana.singletons.tracer.recorder.run()
[ "def", "start", "(", "self", ",", "e", ")", ":", "logger", ".", "debug", "(", "\"Spawning metric & trace reporting threads\"", ")", "self", ".", "sensor", ".", "meter", ".", "run", "(", ")", "instana", ".", "singletons", ".", "tracer", ".", "recorder", "."...
Starts the agent and required threads
[ "Starts", "the", "agent", "and", "required", "threads" ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/agent.py#L46-L50
13,392
instana/python-sensor
instana/agent.py
Agent.handle_fork
def handle_fork(self): """ Forks happen. Here we handle them. """ self.reset() self.sensor.handle_fork() instana.singletons.tracer.handle_fork()
python
def handle_fork(self): self.reset() self.sensor.handle_fork() instana.singletons.tracer.handle_fork()
[ "def", "handle_fork", "(", "self", ")", ":", "self", ".", "reset", "(", ")", "self", ".", "sensor", ".", "handle_fork", "(", ")", "instana", ".", "singletons", ".", "tracer", ".", "handle_fork", "(", ")" ]
Forks happen. Here we handle them.
[ "Forks", "happen", ".", "Here", "we", "handle", "them", "." ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/agent.py#L104-L110
13,393
instana/python-sensor
instana/agent.py
Agent.announce
def announce(self, discovery): """ With the passed in Discovery class, attempt to announce to the host agent. """ try: url = self.__discovery_url() logger.debug("making announce request to %s" % (url)) response = None response = self.client.put(url, data=self.to_json(discovery), headers={"Content-Type": "application/json"}, timeout=0.8) if response.status_code is 200: self.last_seen = datetime.now() except (requests.ConnectTimeout, requests.ConnectionError): logger.debug("announce", exc_info=True) finally: return response
python
def announce(self, discovery): try: url = self.__discovery_url() logger.debug("making announce request to %s" % (url)) response = None response = self.client.put(url, data=self.to_json(discovery), headers={"Content-Type": "application/json"}, timeout=0.8) if response.status_code is 200: self.last_seen = datetime.now() except (requests.ConnectTimeout, requests.ConnectionError): logger.debug("announce", exc_info=True) finally: return response
[ "def", "announce", "(", "self", ",", "discovery", ")", ":", "try", ":", "url", "=", "self", ".", "__discovery_url", "(", ")", "logger", ".", "debug", "(", "\"making announce request to %s\"", "%", "(", "url", ")", ")", "response", "=", "None", "response", ...
With the passed in Discovery class, attempt to announce to the host agent.
[ "With", "the", "passed", "in", "Discovery", "class", "attempt", "to", "announce", "to", "the", "host", "agent", "." ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/agent.py#L134-L152
13,394
instana/python-sensor
instana/agent.py
Agent.is_agent_ready
def is_agent_ready(self): """ Used after making a successful announce to test when the agent is ready to accept data. """ try: response = self.client.head(self.__data_url(), timeout=0.8) if response.status_code is 200: return True return False except (requests.ConnectTimeout, requests.ConnectionError): logger.debug("is_agent_ready: host agent connection error")
python
def is_agent_ready(self): try: response = self.client.head(self.__data_url(), timeout=0.8) if response.status_code is 200: return True return False except (requests.ConnectTimeout, requests.ConnectionError): logger.debug("is_agent_ready: host agent connection error")
[ "def", "is_agent_ready", "(", "self", ")", ":", "try", ":", "response", "=", "self", ".", "client", ".", "head", "(", "self", ".", "__data_url", "(", ")", ",", "timeout", "=", "0.8", ")", "if", "response", ".", "status_code", "is", "200", ":", "retur...
Used after making a successful announce to test when the agent is ready to accept data.
[ "Used", "after", "making", "a", "successful", "announce", "to", "test", "when", "the", "agent", "is", "ready", "to", "accept", "data", "." ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/agent.py#L154-L165
13,395
instana/python-sensor
instana/agent.py
Agent.task_response
def task_response(self, message_id, data): """ When the host agent passes us a task and we do it, this function is used to respond with the results of the task. """ try: response = None payload = json.dumps(data) logger.debug("Task response is %s: %s" % (self.__response_url(message_id), payload)) response = self.client.post(self.__response_url(message_id), data=payload, headers={"Content-Type": "application/json"}, timeout=0.8) except (requests.ConnectTimeout, requests.ConnectionError): logger.debug("task_response", exc_info=True) except Exception: logger.debug("task_response Exception", exc_info=True) finally: return response
python
def task_response(self, message_id, data): try: response = None payload = json.dumps(data) logger.debug("Task response is %s: %s" % (self.__response_url(message_id), payload)) response = self.client.post(self.__response_url(message_id), data=payload, headers={"Content-Type": "application/json"}, timeout=0.8) except (requests.ConnectTimeout, requests.ConnectionError): logger.debug("task_response", exc_info=True) except Exception: logger.debug("task_response Exception", exc_info=True) finally: return response
[ "def", "task_response", "(", "self", ",", "message_id", ",", "data", ")", ":", "try", ":", "response", "=", "None", "payload", "=", "json", ".", "dumps", "(", "data", ")", "logger", ".", "debug", "(", "\"Task response is %s: %s\"", "%", "(", "self", ".",...
When the host agent passes us a task and we do it, this function is used to respond with the results of the task.
[ "When", "the", "host", "agent", "passes", "us", "a", "task", "and", "we", "do", "it", "this", "function", "is", "used", "to", "respond", "with", "the", "results", "of", "the", "task", "." ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/agent.py#L207-L227
13,396
instana/python-sensor
instana/agent.py
Agent.__discovery_url
def __discovery_url(self): """ URL for announcing to the host agent """ port = self.sensor.options.agent_port if port == 0: port = AGENT_DEFAULT_PORT return "http://%s:%s/%s" % (self.host, port, AGENT_DISCOVERY_PATH)
python
def __discovery_url(self): port = self.sensor.options.agent_port if port == 0: port = AGENT_DEFAULT_PORT return "http://%s:%s/%s" % (self.host, port, AGENT_DISCOVERY_PATH)
[ "def", "__discovery_url", "(", "self", ")", ":", "port", "=", "self", ".", "sensor", ".", "options", ".", "agent_port", "if", "port", "==", "0", ":", "port", "=", "AGENT_DEFAULT_PORT", "return", "\"http://%s:%s/%s\"", "%", "(", "self", ".", "host", ",", ...
URL for announcing to the host agent
[ "URL", "for", "announcing", "to", "the", "host", "agent" ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/agent.py#L229-L237
13,397
instana/python-sensor
instana/agent.py
Agent.__data_url
def __data_url(self): """ URL for posting metrics to the host agent. Only valid when announced. """ path = AGENT_DATA_PATH % self.from_.pid return "http://%s:%s/%s" % (self.host, self.port, path)
python
def __data_url(self): path = AGENT_DATA_PATH % self.from_.pid return "http://%s:%s/%s" % (self.host, self.port, path)
[ "def", "__data_url", "(", "self", ")", ":", "path", "=", "AGENT_DATA_PATH", "%", "self", ".", "from_", ".", "pid", "return", "\"http://%s:%s/%s\"", "%", "(", "self", ".", "host", ",", "self", ".", "port", ",", "path", ")" ]
URL for posting metrics to the host agent. Only valid when announced.
[ "URL", "for", "posting", "metrics", "to", "the", "host", "agent", ".", "Only", "valid", "when", "announced", "." ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/agent.py#L239-L244
13,398
instana/python-sensor
instana/agent.py
Agent.__traces_url
def __traces_url(self): """ URL for posting traces to the host agent. Only valid when announced. """ path = AGENT_TRACES_PATH % self.from_.pid return "http://%s:%s/%s" % (self.host, self.port, path)
python
def __traces_url(self): path = AGENT_TRACES_PATH % self.from_.pid return "http://%s:%s/%s" % (self.host, self.port, path)
[ "def", "__traces_url", "(", "self", ")", ":", "path", "=", "AGENT_TRACES_PATH", "%", "self", ".", "from_", ".", "pid", "return", "\"http://%s:%s/%s\"", "%", "(", "self", ".", "host", ",", "self", ".", "port", ",", "path", ")" ]
URL for posting traces to the host agent. Only valid when announced.
[ "URL", "for", "posting", "traces", "to", "the", "host", "agent", ".", "Only", "valid", "when", "announced", "." ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/agent.py#L246-L251
13,399
instana/python-sensor
instana/agent.py
Agent.__response_url
def __response_url(self, message_id): """ URL for responding to agent requests. """ if self.from_.pid != 0: path = AGENT_RESPONSE_PATH % (self.from_.pid, message_id) return "http://%s:%s/%s" % (self.host, self.port, path)
python
def __response_url(self, message_id): if self.from_.pid != 0: path = AGENT_RESPONSE_PATH % (self.from_.pid, message_id) return "http://%s:%s/%s" % (self.host, self.port, path)
[ "def", "__response_url", "(", "self", ",", "message_id", ")", ":", "if", "self", ".", "from_", ".", "pid", "!=", "0", ":", "path", "=", "AGENT_RESPONSE_PATH", "%", "(", "self", ".", "from_", ".", "pid", ",", "message_id", ")", "return", "\"http://%s:%s/%...
URL for responding to agent requests.
[ "URL", "for", "responding", "to", "agent", "requests", "." ]
58aecb90924c48bafcbc4f93bd9b7190980918bc
https://github.com/instana/python-sensor/blob/58aecb90924c48bafcbc4f93bd9b7190980918bc/instana/agent.py#L253-L260