_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q272400
check_call
test
def check_call(state, callstr, argstr=None, expand_msg=None): """When checking a function definition of lambda function, prepare has_equal_x for checking the call of a user-defined function. Args: callstr (str): call string that specifies how the function should be called, e.g. `f(1, a = 2)`. ``check_call()`` will replace ``f`` with the function/lambda you're targeting. argstr (str): If specified, this overrides the way the function call is refered to in the expand message. expand_msg (str): If specified, this overrides any messages that are prepended by previous SCT chains. state (State): state object that is chained from. :Example: Student and solution code:: def my_power(x): print("calculating sqrt...") return(x * x) SCT:: Ex().check_function_def('my_power').multi( check_call("f(3)").has_equal_value() check_call("f(3)").has_equal_output() ) """ state.assert_is( ["function_defs", "lambda_functions"], "check_call", ["check_function_def", "check_lambda_function"], ) if expand_msg is None: expand_msg = "To verify it, we reran {{argstr}}. " stu_part, _argstr = build_call(callstr, state.student_parts["node"]) sol_part, _ = build_call(callstr, state.solution_parts["node"]) append_message = {"msg": expand_msg, "kwargs": {"argstr": argstr or _argstr}} child = part_to_child(stu_part, sol_part, append_message, state) return child
python
{ "resource": "" }
q272401
TransitModel.get_true_anomaly
test
def get_true_anomaly(self): """ Return the true anomaly at each time """ self.f = _rsky._getf(self.t_supersample, self.t0, self.per, self.a, self.inc*pi/180., self.ecc, self.w*pi/180., self.transittype, self.nthreads) return self.f
python
{ "resource": "" }
q272402
LDAP3LoginManager.init_app
test
def init_app(self, app): ''' Configures this extension with the given app. This registers an ``teardown_appcontext`` call, and attaches this ``LDAP3LoginManager`` to it as ``app.ldap3_login_manager``. Args: app (flask.Flask): The flask app to initialise with ''' app.ldap3_login_manager = self servers = list(self._server_pool) for s in servers: self._server_pool.remove(s) self.init_config(app.config) if hasattr(app, 'teardown_appcontext'): app.teardown_appcontext(self.teardown) else: # pragma: no cover app.teardown_request(self.teardown) self.app = app
python
{ "resource": "" }
q272403
LDAP3LoginManager.init_config
test
def init_config(self, config): ''' Configures this extension with a given configuration dictionary. This allows use of this extension without a flask app. Args: config (dict): A dictionary with configuration keys ''' self.config.update(config) self.config.setdefault('LDAP_PORT', 389) self.config.setdefault('LDAP_HOST', None) self.config.setdefault('LDAP_USE_SSL', False) self.config.setdefault('LDAP_READONLY', True) self.config.setdefault('LDAP_CHECK_NAMES', True) self.config.setdefault('LDAP_BIND_DIRECT_CREDENTIALS', False) self.config.setdefault('LDAP_BIND_DIRECT_PREFIX', '') self.config.setdefault('LDAP_BIND_DIRECT_SUFFIX', '') self.config.setdefault('LDAP_BIND_DIRECT_GET_USER_INFO', True) self.config.setdefault('LDAP_ALWAYS_SEARCH_BIND', False) self.config.setdefault('LDAP_BASE_DN', '') self.config.setdefault('LDAP_BIND_USER_DN', None) self.config.setdefault('LDAP_BIND_USER_PASSWORD', None) self.config.setdefault('LDAP_SEARCH_FOR_GROUPS', True) self.config.setdefault('LDAP_FAIL_AUTH_ON_MULTIPLE_FOUND', False) # Prepended to the Base DN to limit scope when searching for # Users/Groups. self.config.setdefault('LDAP_USER_DN', '') self.config.setdefault('LDAP_GROUP_DN', '') self.config.setdefault('LDAP_BIND_AUTHENTICATION_TYPE', 'SIMPLE') # Ldap Filters self.config.setdefault('LDAP_USER_SEARCH_SCOPE', 'LEVEL') self.config.setdefault('LDAP_USER_OBJECT_FILTER', '(objectclass=person)') self.config.setdefault('LDAP_USER_LOGIN_ATTR', 'uid') self.config.setdefault('LDAP_USER_RDN_ATTR', 'uid') self.config.setdefault( 'LDAP_GET_USER_ATTRIBUTES', ldap3.ALL_ATTRIBUTES) self.config.setdefault('LDAP_GROUP_SEARCH_SCOPE', 'LEVEL') self.config.setdefault( 'LDAP_GROUP_OBJECT_FILTER', '(objectclass=group)') self.config.setdefault('LDAP_GROUP_MEMBERS_ATTR', 'uniqueMember') self.config.setdefault( 'LDAP_GET_GROUP_ATTRIBUTES', ldap3.ALL_ATTRIBUTES) self.config.setdefault('LDAP_ADD_SERVER', True) if self.config['LDAP_ADD_SERVER']: self.add_server( hostname=self.config['LDAP_HOST'], port=self.config['LDAP_PORT'], use_ssl=self.config['LDAP_USE_SSL'] )
python
{ "resource": "" }
q272404
LDAP3LoginManager.add_server
test
def add_server(self, hostname, port, use_ssl, tls_ctx=None): """ Add an additional server to the server pool and return the freshly created server. Args: hostname (str): Hostname of the server port (int): Port of the server use_ssl (bool): True if SSL is to be used when connecting. tls_ctx (ldap3.Tls): An optional TLS context object to use when connecting. Returns: ldap3.Server: The freshly created server object. """ if not use_ssl and tls_ctx: raise ValueError("Cannot specify a TLS context and not use SSL!") server = ldap3.Server( hostname, port=port, use_ssl=use_ssl, tls=tls_ctx ) self._server_pool.add(server) return server
python
{ "resource": "" }
q272405
LDAP3LoginManager._decontextualise_connection
test
def _decontextualise_connection(self, connection): """ Remove a connection from the appcontext. Args: connection (ldap3.Connection): connection to remove from the appcontext """ ctx = stack.top if ctx is not None and connection in ctx.ldap3_manager_connections: ctx.ldap3_manager_connections.remove(connection)
python
{ "resource": "" }
q272406
LDAP3LoginManager.teardown
test
def teardown(self, exception): """ Cleanup after a request. Close any open connections. """ ctx = stack.top if ctx is not None: if hasattr(ctx, 'ldap3_manager_connections'): for connection in ctx.ldap3_manager_connections: self.destroy_connection(connection) if hasattr(ctx, 'ldap3_manager_main_connection'): log.debug( "Unbinding a connection used within the request context.") ctx.ldap3_manager_main_connection.unbind() ctx.ldap3_manager_main_connection = None
python
{ "resource": "" }
q272407
LDAP3LoginManager.authenticate
test
def authenticate(self, username, password): """ An abstracted authentication method. Decides whether to perform a direct bind or a search bind based upon the login attribute configured in the config. Args: username (str): Username of the user to bind password (str): User's password to bind with. Returns: AuthenticationResponse """ if self.config.get('LDAP_BIND_DIRECT_CREDENTIALS'): result = self.authenticate_direct_credentials(username, password) elif not self.config.get('LDAP_ALWAYS_SEARCH_BIND') and \ self.config.get('LDAP_USER_RDN_ATTR') == \ self.config.get('LDAP_USER_LOGIN_ATTR'): # Since the user's RDN is the same as the login field, # we can do a direct bind. result = self.authenticate_direct_bind(username, password) else: # We need to search the User's DN to find who the user is (and # their DN) so we can try bind with their password. result = self.authenticate_search_bind(username, password) return result
python
{ "resource": "" }
q272408
LDAP3LoginManager.authenticate_direct_bind
test
def authenticate_direct_bind(self, username, password): """ Performs a direct bind. We can do this since the RDN is the same as the login attribute. Hence we just string together a dn to find this user with. Args: username (str): Username of the user to bind (the field specified as LDAP_BIND_RDN_ATTR) password (str): User's password to bind with. Returns: AuthenticationResponse """ bind_user = '{rdn}={username},{user_search_dn}'.format( rdn=self.config.get('LDAP_USER_RDN_ATTR'), username=username, user_search_dn=self.full_user_search_dn, ) connection = self._make_connection( bind_user=bind_user, bind_password=password, ) response = AuthenticationResponse() try: connection.bind() log.debug( "Authentication was successful for user '{0}'".format(username)) response.status = AuthenticationResponseStatus.success # Get user info here. user_info = self.get_user_info( dn=bind_user, _connection=connection) response.user_dn = bind_user response.user_id = username response.user_info = user_info if self.config.get('LDAP_SEARCH_FOR_GROUPS'): response.user_groups = self.get_user_groups( dn=bind_user, _connection=connection) except ldap3.core.exceptions.LDAPInvalidCredentialsResult: log.debug( "Authentication was not successful for user '{0}'".format(username)) response.status = AuthenticationResponseStatus.fail except Exception as e: log.error(e) response.status = AuthenticationResponseStatus.fail self.destroy_connection(connection) return response
python
{ "resource": "" }
q272409
LDAP3LoginManager.authenticate_search_bind
test
def authenticate_search_bind(self, username, password): """ Performs a search bind to authenticate a user. This is required when a the login attribute is not the same as the RDN, since we cannot string together their DN on the fly, instead we have to find it in the LDAP, then attempt to bind with their credentials. Args: username (str): Username of the user to bind (the field specified as LDAP_BIND_LOGIN_ATTR) password (str): User's password to bind with when we find their dn. Returns: AuthenticationResponse """ connection = self._make_connection( bind_user=self.config.get('LDAP_BIND_USER_DN'), bind_password=self.config.get('LDAP_BIND_USER_PASSWORD'), ) try: connection.bind() log.debug("Successfully bound to LDAP as '{0}' for search_bind method".format( self.config.get('LDAP_BIND_USER_DN') or 'Anonymous' )) except Exception as e: self.destroy_connection(connection) log.error(e) return AuthenticationResponse() # Find the user in the search path. user_filter = '({search_attr}={username})'.format( search_attr=self.config.get('LDAP_USER_LOGIN_ATTR'), username=username ) search_filter = '(&{0}{1})'.format( self.config.get('LDAP_USER_OBJECT_FILTER'), user_filter, ) log.debug( "Performing an LDAP Search using filter '{0}', base '{1}', " "and scope '{2}'".format( search_filter, self.full_user_search_dn, self.config.get('LDAP_USER_SEARCH_SCOPE') )) connection.search( search_base=self.full_user_search_dn, search_filter=search_filter, search_scope=getattr( ldap3, self.config.get('LDAP_USER_SEARCH_SCOPE')), attributes=self.config.get('LDAP_GET_USER_ATTRIBUTES') ) response = AuthenticationResponse() if len(connection.response) == 0 or \ (self.config.get('LDAP_FAIL_AUTH_ON_MULTIPLE_FOUND') and len(connection.response) > 1): # Don't allow them to log in. log.debug( "Authentication was not successful for user '{0}'".format(username)) else: for user in connection.response: # Attempt to bind with each user we find until we can find # one that works. if 'type' not in user or user.get('type') != 'searchResEntry': # Issue #13 - Don't return non-entry results. continue user_connection = self._make_connection( bind_user=user['dn'], bind_password=password ) log.debug( "Directly binding a connection to a server with " "user:'{0}'".format(user['dn'])) try: user_connection.bind() log.debug( "Authentication was successful for user '{0}'".format(username)) response.status = AuthenticationResponseStatus.success # Populate User Data user['attributes']['dn'] = user['dn'] response.user_info = user['attributes'] response.user_id = username response.user_dn = user['dn'] if self.config.get('LDAP_SEARCH_FOR_GROUPS'): response.user_groups = self.get_user_groups( dn=user['dn'], _connection=connection) self.destroy_connection(user_connection) break except ldap3.core.exceptions.LDAPInvalidCredentialsResult: log.debug( "Authentication was not successful for " "user '{0}'".format(username)) response.status = AuthenticationResponseStatus.fail except Exception as e: # pragma: no cover # This should never happen, however in case ldap3 does ever # throw an error here, we catch it and log it log.error(e) response.status = AuthenticationResponseStatus.fail self.destroy_connection(user_connection) self.destroy_connection(connection) return response
python
{ "resource": "" }
q272410
LDAP3LoginManager.get_user_groups
test
def get_user_groups(self, dn, group_search_dn=None, _connection=None): """ Gets a list of groups a user at dn is a member of Args: dn (str): The dn of the user to find memberships for. _connection (ldap3.Connection): A connection object to use when searching. If not given, a temporary connection will be created, and destroyed after use. group_search_dn (str): The search dn for groups. Defaults to ``'{LDAP_GROUP_DN},{LDAP_BASE_DN}'``. Returns: list: A list of LDAP groups the user is a member of. """ connection = _connection if not connection: connection = self._make_connection( bind_user=self.config.get('LDAP_BIND_USER_DN'), bind_password=self.config.get('LDAP_BIND_USER_PASSWORD') ) connection.bind() safe_dn = ldap3.utils.conv.escape_filter_chars(dn) search_filter = '(&{group_filter}({members_attr}={user_dn}))'.format( group_filter=self.config.get('LDAP_GROUP_OBJECT_FILTER'), members_attr=self.config.get('LDAP_GROUP_MEMBERS_ATTR'), user_dn=safe_dn ) log.debug( "Searching for groups for specific user with filter '{0}' " ", base '{1}' and scope '{2}'".format( search_filter, group_search_dn or self.full_group_search_dn, self.config.get('LDAP_GROUP_SEARCH_SCOPE') )) connection.search( search_base=group_search_dn or self.full_group_search_dn, search_filter=search_filter, attributes=self.config.get('LDAP_GET_GROUP_ATTRIBUTES'), search_scope=getattr( ldap3, self.config.get('LDAP_GROUP_SEARCH_SCOPE')) ) results = [] for item in connection.response: if 'type' not in item or item.get('type') != 'searchResEntry': # Issue #13 - Don't return non-entry results. continue group_data = item['attributes'] group_data['dn'] = item['dn'] results.append(group_data) if not _connection: # We made a connection, so we need to kill it. self.destroy_connection(connection) return results
python
{ "resource": "" }
q272411
LDAP3LoginManager.get_user_info
test
def get_user_info(self, dn, _connection=None): """ Gets info about a user specified at dn. Args: dn (str): The dn of the user to find _connection (ldap3.Connection): A connection object to use when searching. If not given, a temporary connection will be created, and destroyed after use. Returns: dict: A dictionary of the user info from LDAP """ return self.get_object( dn=dn, filter=self.config.get('LDAP_USER_OBJECT_FILTER'), attributes=self.config.get("LDAP_GET_USER_ATTRIBUTES"), _connection=_connection, )
python
{ "resource": "" }
q272412
LDAP3LoginManager.get_user_info_for_username
test
def get_user_info_for_username(self, username, _connection=None): """ Gets info about a user at a specified username by searching the Users DN. Username attribute is the same as specified as LDAP_USER_LOGIN_ATTR. Args: username (str): Username of the user to search for. _connection (ldap3.Connection): A connection object to use when searching. If not given, a temporary connection will be created, and destroyed after use. Returns: dict: A dictionary of the user info from LDAP """ ldap_filter = '(&({0}={1}){2})'.format( self.config.get('LDAP_USER_LOGIN_ATTR'), username, self.config.get('LDAP_USER_OBJECT_FILTER') ) return self.get_object( dn=self.full_user_search_dn, filter=ldap_filter, attributes=self.config.get("LDAP_GET_USER_ATTRIBUTES"), _connection=_connection, )
python
{ "resource": "" }
q272413
LDAP3LoginManager.get_object
test
def get_object(self, dn, filter, attributes, _connection=None): """ Gets an object at the specified dn and returns it. Args: dn (str): The dn of the object to find. filter (str): The LDAP syntax search filter. attributes (list): A list of LDAP attributes to get when searching. _connection (ldap3.Connection): A connection object to use when searching. If not given, a temporary connection will be created, and destroyed after use. Returns: dict: A dictionary of the object info from LDAP """ connection = _connection if not connection: connection = self._make_connection( bind_user=self.config.get('LDAP_BIND_USER_DN'), bind_password=self.config.get('LDAP_BIND_USER_PASSWORD') ) connection.bind() connection.search( search_base=dn, search_filter=filter, attributes=attributes, ) data = None if len(connection.response) > 0: data = connection.response[0]['attributes'] data['dn'] = connection.response[0]['dn'] if not _connection: # We made a connection, so we need to kill it. self.destroy_connection(connection) return data
python
{ "resource": "" }
q272414
LDAP3LoginManager.connection
test
def connection(self): """ Convenience property for externally accessing an authenticated connection to the server. This connection is automatically handled by the appcontext, so you do not have to perform an unbind. Returns: ldap3.Connection: A bound ldap3.Connection Raises: ldap3.core.exceptions.LDAPException: Since this method is performing a bind on behalf of the caller. You should handle this case occuring, such as invalid service credentials. """ ctx = stack.top if ctx is None: raise Exception("Working outside of the Flask application " "context. If you wish to make a connection outside of a flask" " application context, please handle your connections " "and use manager.make_connection()") if hasattr(ctx, 'ldap3_manager_main_connection'): return ctx.ldap3_manager_main_connection else: connection = self._make_connection( bind_user=self.config.get('LDAP_BIND_USER_DN'), bind_password=self.config.get('LDAP_BIND_USER_PASSWORD'), contextualise=False ) connection.bind() if ctx is not None: ctx.ldap3_manager_main_connection = connection return connection
python
{ "resource": "" }
q272415
LDAP3LoginManager.make_connection
test
def make_connection(self, bind_user=None, bind_password=None, **kwargs): """ Make a connection to the LDAP Directory. Args: bind_user (str): User to bind with. If `None`, AUTH_ANONYMOUS is used, otherwise authentication specified with config['LDAP_BIND_AUTHENTICATION_TYPE'] is used. bind_password (str): Password to bind to the directory with **kwargs (dict): Additional arguments to pass to the ``ldap3.Connection`` Returns: ldap3.Connection: An unbound ldap3.Connection. You should handle exceptions upon bind if you use this internal method. """ return self._make_connection(bind_user, bind_password, contextualise=False, **kwargs)
python
{ "resource": "" }
q272416
LDAP3LoginManager._make_connection
test
def _make_connection(self, bind_user=None, bind_password=None, contextualise=True, **kwargs): """ Make a connection. Args: bind_user (str): User to bind with. If `None`, AUTH_ANONYMOUS is used, otherwise authentication specified with config['LDAP_BIND_AUTHENTICATION_TYPE'] is used. bind_password (str): Password to bind to the directory with contextualise (bool): If true (default), will add this connection to the appcontext so it can be unbound upon app_teardown. Returns: ldap3.Connection: An unbound ldap3.Connection. You should handle exceptions upon bind if you use this internal method. """ authentication = ldap3.ANONYMOUS if bind_user: authentication = getattr(ldap3, self.config.get( 'LDAP_BIND_AUTHENTICATION_TYPE')) log.debug("Opening connection with bind user '{0}'".format( bind_user or 'Anonymous')) connection = ldap3.Connection( server=self._server_pool, read_only=self.config.get('LDAP_READONLY'), user=bind_user, password=bind_password, client_strategy=ldap3.SYNC, authentication=authentication, check_names=self.config['LDAP_CHECK_NAMES'], raise_exceptions=True, **kwargs ) if contextualise: self._contextualise_connection(connection) return connection
python
{ "resource": "" }
q272417
LDAP3LoginManager.destroy_connection
test
def destroy_connection(self, connection): """ Destroys a connection. Removes the connection from the appcontext, and unbinds it. Args: connection (ldap3.Connection): The connnection to destroy """ log.debug("Destroying connection at <{0}>".format(hex(id(connection)))) self._decontextualise_connection(connection) connection.unbind()
python
{ "resource": "" }
q272418
search
test
def search(self, query=None, args=None): '''query a s3 endpoint for an image based on a string EXAMPLE QUERIES: [empty] list all container collections vsoch/dinosaur look for containers with name vsoch/dinosaur ''' if query is not None: return self._container_search(query) # Search collections across all fields return self._search_all()
python
{ "resource": "" }
q272419
label_search
test
def label_search(self, key=None, value=None): '''search across labels''' if key is not None: key = key.lower() if value is not None: value = value.lower() show_details = True if key is None and value is None: url = '%s/labels/search' % (self.base) show_details = False elif key is not None and value is not None: url = '%s/labels/search/%s/key/%s/value' % (self.base, key, value) elif key is None: url = '%s/labels/search/%s/value' % (self.base, value) else: url = '%s/labels/search/%s/key' % (self.base, key) result = self._get(url) if len(result) == 0: bot.info("No labels found.") sys.exit(0) bot.info("Labels\n") rows = [] for l in result: if show_details is True: entry = ["%s:%s" %(l['key'],l['value']), "\n%s\n\n" %"\n".join(l['containers'])] else: entry = ["N=%s" %len(l['containers']), "%s:%s" %(l['key'],l['value']) ] rows.append(entry) bot.table(rows) return rows
python
{ "resource": "" }
q272420
search
test
def search(self, query=None, args=None): '''query a GitLab artifacts folder for a list of images. If query is None, collections are listed. ''' if query is None: bot.exit('You must include a collection query, <collection>/<repo>') # or default to listing (searching) all things. return self._search_all(query)
python
{ "resource": "" }
q272421
search_all
test
def search_all(self, collection, job_id=None): '''a "show all" search that doesn't require a query the user is shown URLs to ''' results = [['job_id', 'browser']] url = "%s/projects/%s/jobs" %(self.api_base, quote_plus(collection.strip('/'))) response = requests.get(url, headers=self.headers) if response.status_code == 200: jobs = response.json() # We can't get a listing of artifacts # https://gitlab.com/gitlab-org/gitlab-ce/issues/51515 # Parse through jobs (each can have different tags for a collection): for job in jobs: # Only show jobs that are successful if job['status'] == 'success': name = job['name'] for artifact in job['artifacts']: if artifact['filename'].endswith('zip'): # The user must browse to see the names artifact_url = ("%s/%s/-/jobs/%s/artifacts/browse/%s" %(self.base , collection, job['id'], name)) results.append([str(job['id']), artifact_url]) if len(results) == 1: bot.info("No potential archives found in artifacts.") sys.exit(0) bot.info("Artifact Browsers (you will need path and job id for pull)") bot.table(results) return results
python
{ "resource": "" }
q272422
ApiConnection.speak
test
def speak(self): ''' a function for the client to announce him or herself, depending on the level specified. If you want your client to have additional announced things here, then implement the class `_speak` for your client. ''' if self.quiet is False: bot.info('[client|%s] [database|%s]' %(self.client_name, self.database)) self._speak()
python
{ "resource": "" }
q272423
ApiConnection.announce
test
def announce(self, command=None): '''the client will announce itself given that a command is not in a particular predefined list. ''' if command is not None: if command not in ['get'] and self.quiet is False: self.speak()
python
{ "resource": "" }
q272424
Client._update_secrets
test
def _update_secrets(self): '''The user is required to have an application secrets file in his or her environment. The client exists with error if the variable isn't found. ''' env = 'SREGISTRY_GOOGLE_DRIVE_CREDENTIALS' self._secrets = self._get_and_update_setting(env) self._base = self._get_and_update_setting('SREGISTRY_GOOGLE_DRIVE_ROOT') if self._base is None: self._base = 'sregistry' if self._secrets is None: bot.error('You must export %s to use Google Drive client' %env) bot.info("https://singularityhub.github.io/sregistry-cli/client-google-drive") sys.exit(1)
python
{ "resource": "" }
q272425
update_headers
test
def update_headers(self,fields=None): '''update headers with a token & other fields ''' do_reset = True if hasattr(self, 'headers'): if self.headers is not None: do_reset = False if do_reset is True: self._reset_headers() if fields is not None: for key,value in fields.items(): self.headers[key] = value header_names = ",".join(list(self.headers.keys())) bot.debug("Headers found: %s" %header_names)
python
{ "resource": "" }
q272426
require_secrets
test
def require_secrets(self, params=None): '''require secrets ensures that the client has the secrets file, and specifically has one or more parameters defined. If params is None, only a check is done for the file. Parameters ========== params: a list of keys to lookup in the client secrets, eg: secrets[client_name][params1] should not be in [None,''] or not set ''' name = self.client_name # Check 1: the client must have secrets, period has_secrets = True # Secrets file not asked for (incorrectly) but still wanted # The client shouldn't be calling this function if didn't init secrets if not hasattr(self,'secrets'): has_secrets = False # Secret file was not found, period elif hasattr(self,'secrets'): if self.secrets is None: has_secrets = False # The client isn't defined in the secrets file elif self.client_name not in self.secrets: has_secrets = False # Missing file or client secrets, fail if has_secrets is False: message = '%s requires client secrets.' %name bot.error(message) sys.exit(1) # Check 2: we have secrets and lookup, do we have all needed params? if params is not None: # Assume list so we can always parse through if not isinstance(params,list): params = [params] for param in params: # The parameter is not a key for the client if param not in self.secrets[name]: has_secrets = False # The parameter is a key, but empty or undefined elif self.secrets[name][param] in [None,'']: has_secrets=False # Missing parameter, exit on fail if has_secrets is False: message = 'Missing %s in client secrets.' %param bot.error(message) sys.exit(1)
python
{ "resource": "" }
q272427
download
test
def download(url, file_name, headers=None, show_progress=True): '''stream to a temporary file, rename on successful completion Parameters ========== file_name: the file name to stream to url: the url to stream from headers: additional headers to add ''' fd, tmp_file = tempfile.mkstemp(prefix=("%s.tmp." % file_name)) os.close(fd) if DISABLE_SSL_CHECK is True: bot.warning('Verify of certificates disabled! ::TESTING USE ONLY::') verify = not DISABLE_SSL_CHECK response = stream(url, headers=headers, stream_to=tmp_file) shutil.move(tmp_file, file_name) return file_name
python
{ "resource": "" }
q272428
stream
test
def stream(url, headers, stream_to=None, retry=True): '''stream is a get that will stream to file_name. Since this is a worker task, it differs from the client provided version in that it requires headers. ''' bot.debug("GET %s" % url) if DISABLE_SSL_CHECK is True: bot.warning('Verify of certificates disabled! ::TESTING USE ONLY::') # Ensure headers are present, update if not response = requests.get(url, headers=headers, verify=not DISABLE_SSL_CHECK, stream=True) # If we get permissions error, one more try with updated token if response.status_code in [401, 403]: headers = update_token(headers) return stream(url, headers, stream_to, retry=False) # Successful Response elif response.status_code == 200: # Keep user updated with Progress Bar content_size = None if 'Content-Length' in response.headers: progress = 0 content_size = int(response.headers['Content-Length']) bot.show_progress(progress,content_size,length=35) chunk_size = 1 << 20 with open(stream_to,'wb') as filey: for chunk in response.iter_content(chunk_size=chunk_size): filey.write(chunk) if content_size is not None: progress+=chunk_size bot.show_progress(iteration=progress, total=content_size, length=35, carriage_return=False) # Newline to finish download sys.stdout.write('\n') return stream_to bot.error("Problem with stream, response %s" %(response.status_code)) sys.exit(1)
python
{ "resource": "" }
q272429
update_token
test
def update_token(headers): '''update_token uses HTTP basic authentication to attempt to authenticate given a 401 response. We take as input previous headers, and update them. Parameters ========== response: the http request response to parse for the challenge. ''' try: from awscli.clidriver import create_clidriver except: bot.exit('Please install pip install sregistry[aws]') driver = create_clidriver() aws = driver.session.create_client('ecr') tokens = aws.get_authorization_token() token = tokens['authorizationData'][0]['authorizationToken'] try: token = {"Authorization": "Basic %s" % token} headers.update(token) except Exception: bot.error("Error getting token.") sys.exit(1) return headers
python
{ "resource": "" }
q272430
get_or_create_folder
test
def get_or_create_folder(self, folder): '''create a folder at the drive root. If the folder already exists, it is simply returned. folder = self._get_or_create_folder(self._base) $ folder {'id': '1pXR5S8wufELh9Q-jDkhCoYu-BL1NqN9y'} ''' q = "mimeType='application/vnd.google-apps.folder' and name='%s'" %folder response = self._service.files().list(q=q, spaces='drive').execute().get('files',[]) # If no folder is found, create it! if len(response) == 0: folder = self._create_folder(folder) else: folder = response[0] return folder
python
{ "resource": "" }
q272431
Client._read_response
test
def _read_response(self,response, field="detail"): '''attempt to read the detail provided by the response. If none, default to using the reason''' try: message = json.loads(response._content.decode('utf-8'))[field] except: message = response.reason return message
python
{ "resource": "" }
q272432
Client.get_bucket
test
def get_bucket(self): '''given a bucket name and a client that is initialized, get or create the bucket. ''' for attr in ['bucket_name', 's3']: if not hasattr(self, attr): bot.exit('client is missing attribute %s' %(attr)) # See if the bucket is already existing self.bucket = None for bucket in self.s3.buckets.all(): if bucket.name == self.bucket_name: self.bucket = bucket # If the bucket doesn't exist, create it if self.bucket is None: self.bucket = self.s3.create_bucket(Bucket=self.bucket_name) bot.info('Created bucket %s' % self.bucket.name ) return self.bucket
python
{ "resource": "" }
q272433
Client._update_secrets
test
def _update_secrets(self): '''update secrets will take a secrets credential file either located at .sregistry or the environment variable SREGISTRY_CLIENT_SECRETS and update the current client secrets as well as the associated API base. ''' self.secrets = read_client_secrets() if self.secrets is not None: if "registry" in self.secrets: if "base" in self.secrets['registry']: self.base = self.secrets['registry']['base'] self._update_base()
python
{ "resource": "" }
q272434
Client._init_clients
test
def _init_clients(self): '''init_ cliends will obtain the tranfer and access tokens, and then use them to create a transfer client. ''' self._client = globus_sdk.NativeAppAuthClient(self._client_id) self._load_secrets()
python
{ "resource": "" }
q272435
Client._load_secrets
test
def _load_secrets(self): '''load the secrets credentials file with the Globus OAuthTokenResponse ''' # Second priority: load from cache self.auth = self._get_and_update_setting('GLOBUS_AUTH_RESPONSE') self.transfer = self._get_and_update_setting('GLOBUS_TRANSFER_RESPONSE')
python
{ "resource": "" }
q272436
logs
test
def logs(self, name=None): '''return logs for a particular container. The logs file is equivalent to the name, but with extension .log. If there is no name, the most recent log is returned. Parameters ========== name: the container name to print logs for. ''' content = None results = self._list_logs() print(results) # If we are searching for a name if name is not None: for result in results: matches = False # Case 1: the name is in the storage path if name in result.name: matches=True # Case 2: match in metadata for key,val in result.metadata.items(): if name in val: matches = True if matches is True: content = self._print_log(result.name) # Otherwise return the last else: if len(results) > 0: latest = results[0] # Get the most recent for result in results: if result.time_created >= latest.time_created: latest = result content = self._print_log(result.name) return content
python
{ "resource": "" }
q272437
list_logs
test
def list_logs(self): '''return a list of logs. We return any file that ends in .log ''' results = [] for image in self._bucket.list_blobs(): if image.name.endswith('log'): results.append(image) if len(results) == 0: bot.info("No containers found, based on extension .log") return results
python
{ "resource": "" }
q272438
create_endpoint_folder
test
def create_endpoint_folder(self, endpoint_id, folder): '''create an endpoint folder, catching the error if it exists. Parameters ========== endpoint_id: the endpoint id parameters folder: the relative path of the folder to create ''' try: res = self.transfer_client.operation_mkdir(endpoint_id, folder) bot.info("%s --> %s" %(res['message'], folder)) except TransferAPIError: bot.info('%s already exists at endpoint' %folder)
python
{ "resource": "" }
q272439
init_transfer_client
test
def init_transfer_client(self): '''return a transfer client for the user''' if self._tokens_need_update(): self._update_tokens() access_token = self.transfer['access_token'] # Createe Refresh Token Authorizer authorizer = globus_sdk.RefreshTokenAuthorizer( self.transfer['refresh_token'], self._client, access_token=self.transfer['access_token'], expires_at=self.transfer['expires_at_seconds']) self.transfer_client = globus_sdk.TransferClient(authorizer=authorizer)
python
{ "resource": "" }
q272440
search_all
test
def search_all(self): '''a "list all" search that doesn't require a query. Here we return to the user all objects that have custom properties value type set to container, which is set when the image is pushed. IMPORTANT: the upload function adds this metadata. For a container to be found by the client, it must have the properties value with type as container. It also should have a "uri" in properties to show the user, otherwise the user will have to query / download based on the id ''' results = self._list_containers() matches = [] bot.info("[drive://%s] Containers" %self._base) rows = [] for i in results: # Fallback to the image name without the extension uri = i['name'].replace('.simg','') # However the properties should include the uri if 'properties' in i: if 'uri' in i['properties']: uri = i['properties']['uri'] rows.append([i['id'],uri]) # Give the user back a uri i['uri'] = uri matches.append(i) bot.custom(prefix=" [drive://%s]" %self._base, message="\t\t[id]\t[uri]", color="PURPLE") bot.table(rows) return matches
python
{ "resource": "" }
q272441
status
test
def status(backend): '''print the status for all or one of the backends. ''' print('[backend status]') settings = read_client_secrets() print('There are %s clients found in secrets.' %len(settings)) if 'SREGISTRY_CLIENT' in settings: print('active: %s' %settings['SREGISTRY_CLIENT']) update_secrets(settings) else: print('There is no active client.')
python
{ "resource": "" }
q272442
add
test
def add(backend, variable, value, force=False): '''add the variable to the config ''' print('[add]') settings = read_client_secrets() # If the variable begins with the SREGISTRY_<CLIENT> don't add it prefix = 'SREGISTRY_%s_' %backend.upper() if not variable.startswith(prefix): variable = '%s%s' %(prefix, variable) # All must be uppercase variable = variable.upper() bot.info("%s %s" %(variable, value)) # Does the setting already exist? if backend in settings: if variable in settings[backend] and force is False: previous = settings[backend][variable] bot.error('%s is already set as %s. Use --force to override.' %(variable, previous)) sys.exit(1) if backend not in settings: settings[backend] = {} settings[backend][variable] = value update_secrets(settings)
python
{ "resource": "" }
q272443
remove
test
def remove(backend, variable): '''remove a variable from the config, if found. ''' print('[remove]') settings = read_client_secrets() # If the variable begins with the SREGISTRY_<CLIENT> don't add it prefixed = variable prefix = 'SREGISTRY_%s_' %backend.upper() if not variable.startswith(prefix): prefixed = '%s%s' %(prefix, variable) # All must be uppercase variable = variable.upper() bot.info(variable) # Does the setting already exist? if backend in settings: if variable in settings[backend]: del settings[backend][variable] if prefixed in settings[backend]: del settings[backend][prefixed] update_secrets(settings)
python
{ "resource": "" }
q272444
activate
test
def activate(backend): '''activate a backend by adding it to the .sregistry configuration file. ''' settings = read_client_secrets() if backend is not None: settings['SREGISTRY_CLIENT'] = backend update_secrets(settings) print('[activate] %s' %backend)
python
{ "resource": "" }
q272445
delete_backend
test
def delete_backend(backend): '''delete a backend, and update the secrets file ''' settings = read_client_secrets() if backend in settings: del settings[backend] # If the backend was the active client, remove too if 'SREGISTRY_CLIENT' in settings: if settings['SREGISTRY_CLIENT'] == backend: del settings['SREGISTRY_CLIENT'] update_secrets(settings) print('[delete] %s' %backend) else: if backend is not None: print('%s is not a known client.' %backend) else: print('Please specify a backend to delete.')
python
{ "resource": "" }
q272446
Client._update_base
test
def _update_base(self, image): ''' update a base based on an image name, meaning detecting a particular registry and if necessary, updating the self.base. When the image name is parsed, the base will be given to remove the registry. ''' base = None # Google Container Cloud if "gcr.io" in image: base = 'gcr.io' self._set_base(default_base=base) self._update_secrets() return base
python
{ "resource": "" }
q272447
basic_auth_header
test
def basic_auth_header(username, password): '''generate a base64 encoded header to ask for a token. This means base64 encoding a username and password and adding to the Authorization header to identify the client. Parameters ========== username: the username password: the password ''' s = "%s:%s" % (username, password) if sys.version_info[0] >= 3: s = bytes(s, 'utf-8') credentials = base64.b64encode(s).decode('utf-8') else: credentials = base64.b64encode(s) auth = {"Authorization": "Basic %s" % credentials} return auth
python
{ "resource": "" }
q272448
generate_signature
test
def generate_signature(payload, secret): '''use an endpoint specific payload and client secret to generate a signature for the request''' payload = _encode(payload) secret = _encode(secret) return hmac.new(secret, digestmod=hashlib.sha256, msg=payload).hexdigest()
python
{ "resource": "" }
q272449
generate_header_signature
test
def generate_header_signature(secret, payload, request_type): '''Authorize a client based on encrypting the payload with the client secret, timestamp, and other metadata ''' # Use the payload to generate a digest push|collection|name|tag|user timestamp = generate_timestamp() credential = "%s/%s" %(request_type,timestamp) signature = generate_signature(payload,secret) return "SREGISTRY-HMAC-SHA256 Credential=%s,Signature=%s" %(credential,signature)
python
{ "resource": "" }
q272450
delete
test
def delete(self, url, headers=None, return_json=True, default_headers=True): '''delete request, use with caution ''' bot.debug('DELETE %s' %url) return self._call(url, headers=headers, func=requests.delete, return_json=return_json, default_headers=default_headers)
python
{ "resource": "" }
q272451
head
test
def head(self, url): '''head request, typically used for status code retrieval, etc. ''' bot.debug('HEAD %s' %url) return self._call(url, func=requests.head)
python
{ "resource": "" }
q272452
paginate_get
test
def paginate_get(self, url, headers=None, return_json=True, start_page=None): '''paginate_call is a wrapper for get to paginate results ''' geturl = '%s&page=1' %(url) if start_page is not None: geturl = '%s&page=%s' %(url,start_page) results = [] while geturl is not None: result = self._get(url, headers=headers, return_json=return_json) # If we have pagination: if isinstance(result, dict): if 'results' in result: results = results + result['results'] geturl = result['next'] # No pagination is a list else: return result return results
python
{ "resource": "" }
q272453
verify
test
def verify(self): ''' verify will return a True or False to determine to verify the requests call or not. If False, we should the user a warning message, as this should not be done in production! ''' from sregistry.defaults import DISABLE_SSL_CHECK if DISABLE_SSL_CHECK is True: bot.warning('Verify of certificates disabled! ::TESTING USE ONLY::') return not DISABLE_SSL_CHECK
python
{ "resource": "" }
q272454
remove
test
def remove(self, image, force=False): '''delete an image to Singularity Registry''' q = parse_image_name(remove_uri(image)) # If the registry is provided in the uri, use it if q['registry'] == None: q['registry'] = self.base # If the base doesn't start with http or https, add it q = self._add_https(q) url = '%s/container/%s/%s:%s' % (q['registry'], q["collection"], q["image"], q["tag"]) SREGISTRY_EVENT = self.authorize(request_type="delete", names=q) headers = {'Authorization': SREGISTRY_EVENT } self._update_headers(fields=headers) continue_delete = True if force is False: response = input("Are you sure you want to delete %s?" % q['uri']) while len(response) < 1 or response[0].lower().strip() not in "ynyesno": response = input("Please answer yes or no: ") if response[0].lower().strip() in "no": continue_delete = False if continue_delete is True: response = self._delete(url) message = self._read_response(response) bot.info("Response %s, %s" %(response.status_code, message)) else: bot.info("Delete cancelled.")
python
{ "resource": "" }
q272455
get_lookup
test
def get_lookup(): '''get version by way of sregistry.version, returns a lookup dictionary with several global variables without needing to import singularity ''' lookup = dict() version_file = os.path.join('sregistry', 'version.py') with open(version_file) as filey: exec(filey.read(), lookup) return lookup
python
{ "resource": "" }
q272456
get_reqs
test
def get_reqs(lookup=None, key='INSTALL_REQUIRES'): '''get requirements, mean reading in requirements and versions from the lookup obtained with get_lookup''' if lookup == None: lookup = get_lookup() install_requires = [] for module in lookup[key]: module_name = module[0] module_meta = module[1] if "exact_version" in module_meta: dependency = "%s==%s" %(module_name,module_meta['exact_version']) elif "min_version" in module_meta: if module_meta['min_version'] == None: dependency = module_name else: dependency = "%s>=%s" %(module_name,module_meta['min_version']) install_requires.append(dependency) return install_requires
python
{ "resource": "" }
q272457
get_singularity_version
test
def get_singularity_version(singularity_version=None): '''get_singularity_version will determine the singularity version for a build first, an environmental variable is looked at, followed by using the system version. Parameters ========== singularity_version: if not defined, look for in environment. If still not find, try finding via executing --version to Singularity. Only return None if not set in environment or installed. ''' if singularity_version is None: singularity_version = os.environ.get("SINGULARITY_VERSION") if singularity_version is None: try: cmd = ['singularity','--version'] output = run_command(cmd) if isinstance(output['message'],bytes): output['message'] = output['message'].decode('utf-8') singularity_version = output['message'].strip('\n') bot.info("Singularity %s being used." % singularity_version) except: singularity_version = None bot.warning("Singularity version not found, so it's likely not installed.") return singularity_version
python
{ "resource": "" }
q272458
check_install
test
def check_install(software=None, quiet=True): '''check_install will attempt to run the singularity command, and return True if installed. The command line utils will not run without this check. Parameters ========== software: the software to check if installed quiet: should we be quiet? (default True) ''' if software is None: software = "singularity" cmd = [software, '--version'] try: version = run_command(cmd,software) except: # FileNotFoundError return False if version is not None: if quiet is False and version['return_code'] == 0: version = version['message'] bot.info("Found %s version %s" % (software.upper(), version)) return True return False
python
{ "resource": "" }
q272459
get_installdir
test
def get_installdir(): '''get_installdir returns the installation directory of the application ''' return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
python
{ "resource": "" }
q272460
get_thumbnail
test
def get_thumbnail(): '''return the robot.png thumbnail from the database folder. if the user has exported a different image, use that instead. ''' from sregistry.defaults import SREGISTRY_THUMBNAIL if SREGISTRY_THUMBNAIL is not None: if os.path.exists(SREGISTRY_THUMBNAIL): return SREGISTRY_THUMBNAIL return "%s/database/robot.png" %get_installdir()
python
{ "resource": "" }
q272461
run_command
test
def run_command(cmd, sudo=False): '''run_command uses subprocess to send a command to the terminal. Parameters ========== cmd: the command to send, should be a list for subprocess error_message: the error message to give to user if fails, if none specified, will alert that command failed. ''' if sudo is True: cmd = ['sudo'] + cmd try: output = Popen(cmd, stderr=STDOUT, stdout=PIPE) except FileNotFoundError: cmd.pop(0) output = Popen(cmd, stderr=STDOUT, stdout=PIPE) t = output.communicate()[0],output.returncode output = {'message':t[0], 'return_code':t[1]} if isinstance(output['message'], bytes): output['message'] = output['message'].decode('utf-8') return output
python
{ "resource": "" }
q272462
Client._get_metadata
test
def _get_metadata(self, image_file=None, dbx_metadata=None): '''this is a wrapper around the main client.get_metadata to first parse a Dropbox FileMetadata into a dicionary, then pass it on to the primary get_metadata function. Parameters ========== image_file: the full path to the image file that had metadata extracted metadata: the Dropbox FileMetadata to parse. ''' metadata = dict() if dbx_metadata is not None: for key in dbx_metadata.__dir__(): value = getattr(dbx_metadata, key) if type(value) in [str, datetime.datetime, bool, int, float]: metadata[key.strip('_')] = value return self.get_metadata(image_file, names=metadata)
python
{ "resource": "" }
q272463
Client._update_secrets
test
def _update_secrets(self): '''update secrets will look for a dropbox token in the environment at SREGISTRY_DROPBOX_TOKEN and if found, create a client. If not, an error message is returned and the client exits. ''' # Retrieve the user token. Exit if not found token = self._required_get_and_update('SREGISTRY_DROPBOX_TOKEN') # Create the dropbox client self.dbx = Dropbox(token) # Verify that the account is valid try: self.account = self.dbx.users_get_current_account() except AuthError as err: bot.error('Account invalid. Exiting.') sys.exit(1)
python
{ "resource": "" }
q272464
print_output
test
def print_output(response, output_file=None): '''print the output to the console for the user. If the user wants the content also printed to an output file, do that. Parameters ========== response: the response from the builder, with metadata added output_file: if defined, write output also to file ''' # If successful built, show container uri if response['status'] == 'SUCCESS': bucket = response['artifacts']['objects']['location'] obj = response['artifacts']['objects']['paths'][0] bot.custom("MD5HASH", response['file_hash'], 'CYAN') bot.custom("SIZE", response['size'], 'CYAN') bot.custom(response['status'], bucket + obj , 'CYAN') else: bot.custom(response['status'], 'see logs for details', 'CYAN') # Show the logs no matter what bot.custom("LOGS", response['logUrl'], 'CYAN') # Did the user make the container public? if "public_url" in response: bot.custom('URL', response['public_url'], 'CYAN') # Does the user also need writing to an output file? if output_file != None: with open(output_file, 'w') as filey: if response['status'] == 'SUCCESS': filey.writelines('MD5HASH %s\n' % response['file_hash']) filey.writelines('SIZE %s\n' % response['size']) filey.writelines('%s %s%s\n' % (response['status'], bucket, obj)) filey.writelines('LOGS %s\n' % response['logUrl']) if "public_url" in response: filey.writelines('URL %s\n' % response['public_url'])
python
{ "resource": "" }
q272465
kill
test
def kill(args): '''kill is a helper function to call the "kill" function of the client, meaning we bring down an instance. ''' from sregistry.main import Client as cli if len(args.commands) > 0: for name in args.commands: cli.destroy(name) sys.exit(0)
python
{ "resource": "" }
q272466
list_logs
test
def list_logs(args, container_name=None): '''list a specific log for a builder, or the latest log if none provided Parameters ========== args: the argparse object to look for a container name container_name: a default container name set to be None (show latest log) ''' from sregistry.main import Client as cli if len(args.commands) > 0: container_name = args.commands.pop(0) cli.logs(container_name) sys.exit(0)
python
{ "resource": "" }
q272467
Client.get_collections
test
def get_collections(self): '''get a listing of collections that the user has access to. ''' collections = [] for container in self.conn.get_account()[1]: collections.append(container['name']) return collections
python
{ "resource": "" }
q272468
Client._update_secrets
test
def _update_secrets(self): '''update secrets will look for a user and token in the environment If we find the values, cache and continue. Otherwise, exit with error ''' # Get the swift authentication type first. That will determine what we # will need to collect for proper authentication self.config['SREGISTRY_SWIFT_AUTHTYPE'] = self._required_get_and_update( 'SREGISTRY_SWIFT_AUTHTYPE') # Check what auth version is requested and setup the connection if self.config['SREGISTRY_SWIFT_AUTHTYPE'] == 'preauth': # Pre-Authenticated Token/URL - Use OS_AUTH_TOKEN/OS_STORAGE_URL # Retrieve the user token, user, and base. Exit if not found for envar in ['SREGISTRY_SWIFT_OS_AUTH_TOKEN', 'SREGISTRY_SWIFT_OS_STORAGE_URL' ]: self.config[envar] = self._required_get_and_update(envar) self.conn = swiftclient.Connection( preauthurl=self.config['SREGISTRY_SWIFT_OS_STORAGE_URL'], preauthtoken=self.config['SREGISTRY_SWIFT_OS_AUTH_TOKEN'] ) elif self.config['SREGISTRY_SWIFT_AUTHTYPE'] == 'keystonev3': # Keystone v3 Authentication # Retrieve the user token, user, and base. Exit if not found for envar in ['SREGISTRY_SWIFT_USER', 'SREGISTRY_SWIFT_TOKEN', 'SREGISTRY_SWIFT_URL']: self.config[envar] = self._required_get_and_update(envar) auth_url = '%s/v3' % self.config['SREGISTRY_SWIFT_URL'] # Setting to default as a safety. No v3 environment to test # May require ENV vars for real use. - M. Moore _os_options = { 'user_domain_name': 'Default', 'project_domain_name': 'Default', 'project_name': 'Default' } # Save the connection to use for some command self.conn = swiftclient.Connection( user=self.config['SREGISTRY_SWIFT_USER'], key=self.config['SREGISTRY_SWIFT_TOKEN'], os_options=_os_options, authurl=auth_url, auth_version='3' ) elif self.config['SREGISTRY_SWIFT_AUTHTYPE'] == 'keystonev2': # Keystone v2 Authentication # Retrieve the user token, user, and base. Exit if not found for envar in ['SREGISTRY_SWIFT_USER', 'SREGISTRY_SWIFT_TOKEN', 'SREGISTRY_SWIFT_TENANT', 'SREGISTRY_SWIFT_REGION', 'SREGISTRY_SWIFT_URL']: self.config[envar] = self._required_get_and_update(envar) # More human friendly to interact with auth_url = '%s/v2.0/' % self.config['SREGISTRY_SWIFT_URL'] # Set required OpenStack options for tenant/region _os_options = { 'tenant_name': self.config['SREGISTRY_SWIFT_TENANT'], 'region_name': self.config['SREGISTRY_SWIFT_REGION'] } # Save the connection to use for some command self.conn = swiftclient.Connection( user=self.config['SREGISTRY_SWIFT_USER'], key=self.config['SREGISTRY_SWIFT_TOKEN'], os_options=_os_options, authurl=auth_url, auth_version='2' ) else: # Legacy Authentication # Retrieve the user token, user, and base. Exit if not found for envar in ['SREGISTRY_SWIFT_USER', 'SREGISTRY_SWIFT_TOKEN', 'SREGISTRY_SWIFT_URL']: self.config[envar] = self._required_get_and_update(envar) # More human friendly to interact with auth_url = '%s/auth/' % self.config['SREGISTRY_SWIFT_URL'] # Save the connection to use for some command self.conn = swiftclient.Connection( user=self.config['SREGISTRY_SWIFT_USER'], key=self.config['SREGISTRY_SWIFT_TOKEN'], authurl=auth_url, )
python
{ "resource": "" }
q272469
Client._update_secrets
test
def _update_secrets(self): '''The user is required to have an application secrets file in his or her environment. The information isn't saved to the secrets file, but the client exists with error if the variable isn't found. ''' env = 'GOOGLE_APPLICATION_CREDENTIALS' self._secrets = self._get_and_update_setting(env) if self._secrets is None: bot.error('You must export %s to use Google Storage client' %env) sys.exit(1)
python
{ "resource": "" }
q272470
get_client
test
def get_client(image=None, quiet=False, **kwargs): ''' get the correct client depending on the driver of interest. The selected client can be chosen based on the environment variable SREGISTRY_CLIENT, and later changed based on the image uri parsed If there is no preference, the default is to load the singularity hub client. Parameters ========== image: if provided, we derive the correct client based on the uri of an image. If not provided, we default to environment, then hub. quiet: if True, suppress most output about the client (e.g. speak) ''' from sregistry.defaults import SREGISTRY_CLIENT # Give the user a warning: if not check_install(): bot.warning('Singularity is not installed, function might be limited.') # If an image is provided, use to determine client client_name = get_uri(image) if client_name is not None: SREGISTRY_CLIENT = client_name # If no obvious credential provided, we can use SREGISTRY_CLIENT if SREGISTRY_CLIENT == 'aws': from .aws import Client elif SREGISTRY_CLIENT == 'docker': from .docker import Client elif SREGISTRY_CLIENT == 'dropbox': from .dropbox import Client elif SREGISTRY_CLIENT == 'gitlab': from .gitlab import Client elif SREGISTRY_CLIENT == 'globus': from .globus import Client elif SREGISTRY_CLIENT == 'nvidia': from .nvidia import Client elif SREGISTRY_CLIENT == 'hub': from .hub import Client elif SREGISTRY_CLIENT == 'google-drive': from .google_drive import Client elif SREGISTRY_CLIENT == 'google-compute': from .google_storage import Client elif SREGISTRY_CLIENT == 'google-storage': from .google_storage import Client elif SREGISTRY_CLIENT == 'google-build': from .google_build import Client elif SREGISTRY_CLIENT == 'registry': from .registry import Client elif SREGISTRY_CLIENT == 's3': from .s3 import Client elif SREGISTRY_CLIENT == 'swift': from .swift import Client else: from .hub import Client Client.client_name = SREGISTRY_CLIENT Client.quiet = quiet # Create credentials cache, if it doesn't exist Client._credential_cache = get_credential_cache() # Add the database, if wanted if SREGISTRY_DATABASE is not None: # These are global functions used across modules from sregistry.database import ( init_db, add, cp, get, mv, rm, rmi, images, inspect, rename, get_container, get_collection, get_or_create_collection ) # Actions Client._init_db = init_db Client.add = add Client.cp = cp Client.get = get Client.inspect = inspect Client.mv = mv Client.rename = rename Client.rm = rm Client.rmi = rmi Client.images = images # Collections Client.get_or_create_collection = get_or_create_collection Client.get_container = get_container Client.get_collection = get_collection # If no database, import dummy functions that return the equivalent else: from sregistry.database import ( add, init_db ) Client.add = add Client._init_db = init_db # Initialize the database cli = Client() if hasattr(Client, '_init_db'): cli._init_db(SREGISTRY_DATABASE) return cli
python
{ "resource": "" }
q272471
ipython
test
def ipython(args): '''give the user an ipython shell, optionally with an endpoint of choice. ''' # The client will announce itself (backend/database) unless it's get from sregistry.main import get_client client = get_client(args.endpoint) client.announce(args.command) from IPython import embed embed()
python
{ "resource": "" }
q272472
get_manifests
test
def get_manifests(self, repo_name, digest=None): '''get_manifests calls get_manifest for each of the schema versions, including v2 and v1. Version 1 includes image layers and metadata, and version 2 must be parsed for a specific manifest, and the 2nd call includes the layers. If a digest is not provided latest is used. Parameters ========== repo_name: reference to the <username>/<repository>:<tag> to obtain digest: a tag or shasum version ''' if not hasattr(self, 'manifests'): self.manifests = {} # Obtain schema version 1 (metadata) and 2, and image config schemaVersions = ['v1', 'v2', 'config'] for schemaVersion in schemaVersions: manifest = self._get_manifest(repo_name, digest, schemaVersion) if manifest is not None: # If we don't have a config yet, try to get from version 2 manifest if schemaVersion == "v2" and "config" in manifest: bot.debug('Attempting to get config as blob in verison 2 manifest') url = self._get_layerLink(repo_name, manifest['config']['digest']) headers = {'Accept': manifest['config']['mediaType']} self.manifests['config'] = self._get(url, headers=headers) self.manifests[schemaVersion] = manifest return self.manifests
python
{ "resource": "" }
q272473
get_manifest
test
def get_manifest(self, repo_name, digest=None, version="v1"): ''' get_manifest should return an image manifest for a particular repo and tag. The image details are extracted when the client is generated. Parameters ========== repo_name: reference to the <username>/<repository>:<tag> to obtain digest: a tag or shasum version version: one of v1, v2, and config (for image config) ''' accepts = {'config': "application/vnd.docker.container.image.v1+json", 'v1': "application/vnd.docker.distribution.manifest.v1+json", 'v2': "application/vnd.docker.distribution.manifest.v2+json" } url = self._get_manifest_selfLink(repo_name, digest) bot.verbose("Obtaining manifest: %s %s" % (url, version)) headers = {'Accept': accepts[version] } try: manifest = self._get(url, headers=headers, quiet=True) manifest['selfLink'] = url except: manifest = None return manifest
python
{ "resource": "" }
q272474
get_download_cache
test
def get_download_cache(self, destination, subfolder='docker'): '''determine the user preference for atomic download of layers. If the user has set a singularity cache directory, honor it. Otherwise, use the Singularity default. ''' # First priority after user specification is Singularity Cache if destination is None: destination = self._get_setting('SINGULARITY_CACHEDIR', SINGULARITY_CACHE) # If not set, the user has disabled (use tmp) destination = get_tmpdir(destination) if not destination.endswith(subfolder): destination = "%s/%s" %(destination, subfolder) # Create subfolders, if don't exist mkdir_p(destination) return destination
python
{ "resource": "" }
q272475
extract_env
test
def extract_env(self): '''extract the environment from the manifest, or return None. Used by functions env_extract_image, and env_extract_tar ''' environ = self._get_config('Env') if environ is not None: if not isinstance(environ, list): environ = [environ] lines = [] for line in environ: line = re.findall("(?P<var_name>.+?)=(?P<var_value>.+)", line) line = ['export %s="%s"' % (x[0], x[1]) for x in line] lines = lines + line environ = "\n".join(lines) bot.verbose3("Found Docker container environment!") return environ
python
{ "resource": "" }
q272476
Client._update_base
test
def _update_base(self): '''update the base, including the URL for GitLab and the API endpoint. ''' self.base = self._get_and_update_setting('SREGISTRY_GITLAB_BASE', "https://gitlab.com/") self.api_base = "%s/api/v4" % self.base.strip('/') self.artifacts = self._get_and_update_setting('SREGISTRY_GITLAB_FOLDER', 'build') self.job = self._get_and_update_setting('SREGISTRY_GITLAB_JOB', 'build') bot.debug(' Api: %s' % self.api_base) bot.debug('Artifacts: %s' % self.artifacts) bot.debug(' Job: %s' % self.job)
python
{ "resource": "" }
q272477
Client._update_secrets
test
def _update_secrets(self): '''update secrets will update metadata needed for pull and search ''' self.token = self._required_get_and_update('SREGISTRY_GITLAB_TOKEN') self.headers["Private-Token"] = self.token
python
{ "resource": "" }
q272478
Client._get_metadata
test
def _get_metadata(self): '''since the user needs a job id and other parameters, save this for them. ''' metadata = {'SREGISTRY_GITLAB_FOLDER': self.artifacts, 'api_base': self.api_base, 'SREGISTRY_GITLAB_BASE': self.base, 'SREGISTRY_GITLAB_JOB': self.job } return metadata
python
{ "resource": "" }
q272479
get_settings
test
def get_settings(self, client_name=None): '''get all settings, either for a particular client if a name is provided, or across clients. Parameters ========== client_name: the client name to return settings for (optional) ''' settings = read_client_secrets() if client_name is not None and client_name in settings: return settings[client_name] return settings
python
{ "resource": "" }
q272480
required_get_and_update
test
def required_get_and_update(self, name, default=None): '''a wrapper to get_and_update, but if not successful, will print an error and exit. ''' setting = self._get_and_update_setting(name, default=None) if setting in [None, ""]: bot.exit('You must export %s' % name) return setting
python
{ "resource": "" }
q272481
update_setting
test
def update_setting(self, name, value): '''Just update a setting, doesn't need to be returned. ''' if value is not None: updates = {name : value} update_client_secrets(backend=self.client_name, updates=updates)
python
{ "resource": "" }
q272482
authorize
test
def authorize(self, names, payload=None, request_type="push"): '''Authorize a client based on encrypting the payload with the client token, which should be matched on the receiving server''' if self.secrets is not None: if "registry" in self.secrets: # Use the payload to generate a digest push|collection|name|tag|user timestamp = generate_timestamp() credential = generate_credential(self.secrets['registry']['username']) credential = "%s/%s/%s" %(request_type,credential,timestamp) if payload is None: payload = "%s|%s|%s|%s|%s|" %(request_type, names['collection'], timestamp, names['image'], names['tag']) signature = generate_signature(payload,self.secrets['registry']['token']) return "SREGISTRY-HMAC-SHA256 Credential=%s,Signature=%s" %(credential,signature)
python
{ "resource": "" }
q272483
list_builders
test
def list_builders(self, project=None, zone='us-west1-a'): '''list builders, or instances for the project. They should start with sregistry-builder Parameters ========== project: specify a project, will default to environment first zone: the zone to use, defaults to us-west1-a if environment not set ''' builders = [] instances = self._get_instances(project, zone) for instance in instances['items']: builders.append([instance['name'], instance['status']]) bot.info("[google-compute] Found %s instances" %(len(builders))) bot.table(builders) bot.newline()
python
{ "resource": "" }
q272484
load_templates
test
def load_templates(self, name): '''load a particular template based on a name. We look for a name IN data, so the query name can be a partial string of the full name. Parameters ========== name: the name of a template to look up ''' configs = self._get_templates() templates = [] # The user wants to retrieve a particular configuration matches = [x for x in configs['data'] if name in x['name']] if len(matches) > 0: for match in matches: response = self._get(match['id']) templates.append(response) return templates bot.info('No matches found for %s' %name)
python
{ "resource": "" }
q272485
get_ipaddress
test
def get_ipaddress(self, name, retries=3, delay=3): '''get the ip_address of an inserted instance. Will try three times with delay to give the instance time to start up. Parameters ========== name: the name of the instance to get the ip address for. retries: the number of retries before giving up delay: the delay between retry Note from @vsoch: this function is pretty nasty. ''' for rr in range(retries): # Retrieve list of instances instances = self._get_instances() for instance in instances['items']: if instance['name'] == name: # Iterate through network interfaces for network in instance['networkInterfaces']: if network['name'] == 'nic0': # Access configurations for subnet in network['accessConfigs']: if subnet['name'] == 'External NAT': if 'natIP' in subnet: return subnet['natIP'] sleep(delay) bot.warning('Did not find IP address, check Cloud Console!')
python
{ "resource": "" }
q272486
run_build
test
def run_build(self, config): '''run a build, meaning inserting an instance. Retry if there is failure Parameters ========== config: the configuration dictionary generated by setup_build ''' project = self._get_project() zone = self._get_zone() bot.custom(prefix='INSTANCE', message=config['name'], color="CYAN") bot.info(config['description']) response = self._compute_service.instances().insert(project=project, zone=zone, body=config).execute() # Direct the user to the web portal with log ipaddress = self._get_ipaddress(config['name']) bot.info('Robot Logger: http://%s' %ipaddress) bot.info('Allow a few minutes for web server install, beepboop!') return response
python
{ "resource": "" }
q272487
list_containers
test
def list_containers(self): '''return a list of containers, determined by finding the metadata field "type" with value "container." We alert the user to no containers if results is empty, and exit {'metadata': {'items': [ {'key': 'type', 'value': 'container'}, ... ] } } ''' results = [] for image in self._bucket.list_blobs(): if image.metadata is not None: if "type" in image.metadata: if image.metadata['type'] == "container": results.append(image) if len(results) == 0: bot.info("No containers found, based on metadata type:container") return results
python
{ "resource": "" }
q272488
search_all
test
def search_all(self): '''a "list all" search that doesn't require a query. Here we return to the user all objects that have custom metadata value of "container" IMPORTANT: the upload function adds this metadata. For a container to be found by the client, it must have the type as container in metadata. ''' results = self._list_containers() bot.info("[gs://%s] Containers" %self._bucket_name) rows = [] for i in results: size = round(i.size / (1024*1024.0)) size = ("%s MB" %size).rjust(10) rows.append([size, i.metadata['name']]) bot.table(rows) return rows
python
{ "resource": "" }
q272489
main
test
def main(args,parser,subparser): '''the list command corresponds with listing images for an external resource. This is different from listing images that are local to the database, which should be done with "images" ''' from sregistry.main import get_client cli = get_client(quiet=args.quiet) for query in args.query: if query in ['','*']: query = None cli.ls(query=query)
python
{ "resource": "" }
q272490
main
test
def main(args, parser, subparser): '''sharing an image means sending a remote share from an image you control to a contact, usually an email. ''' from sregistry.main import get_client images = args.image if not isinstance(images,list): images = [images] for image in images: print(image) # Detect any uri, and refresh client if necessary cli = get_client(image, quiet=args.quiet) cli.announce(args.command) cli.share(image, share_to=args.share_to)
python
{ "resource": "" }
q272491
init_db
test
def init_db(self, db_path): '''initialize the database, with the default database path or custom of the format sqlite:////scif/data/expfactory.db The custom path can be set with the environment variable SREGISTRY_DATABASE when a user creates the client, we must initialize this db the database should use the .singularity cache folder to cache layers and images, and .singularity/sregistry.db as a database ''' # Database Setup, use default if uri not provided self.database = 'sqlite:///%s' % db_path self.storage = SREGISTRY_STORAGE bot.debug("Database located at %s" % self.database) self.engine = create_engine(self.database, convert_unicode=True) self.session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=self.engine)) Base.query = self.session.query_property() # import all modules here that might define models so that # they will be registered properly on the metadata. Otherwise # you will have to import them first before calling init_db() Base.metadata.create_all(bind=self.engine) self.Base = Base
python
{ "resource": "" }
q272492
get_build_template
test
def get_build_template(): '''get default build template. ''' base = get_installdir() name = "%s/main/templates/build/singularity-cloudbuild.json" % base if os.path.exists(name): bot.debug("Found template %s" %name) return read_json(name) bot.warning("Template %s not found." % name)
python
{ "resource": "" }
q272493
search
test
def search(self, query=None, args=None): '''query will show images determined by the extension of img or simg. Parameters ========== query: the container name (path) or uri to search for args.endpoint: can be an endpoint id and optional path, e.g.: --endpoint 6881ae2e-db26-11e5-9772-22000b9da45e:.singularity' --endpoint 6881ae2e-db26-11e5-9772-22000b9da45e' if not defined, we show the user endpoints to choose from Usage ===== If endpoint is defined with a query, then we search the given endpoint for a container of interested (designated by ending in .img or .simg If no endpoint is provided but instead just a query, we use the query to search endpoints. ''' # No query is defined if query is None: # Option 1: No query or endpoints lists all shared and personal if args.endpoint is None: bot.info('Listing shared endpoints. Add query to expand search.') return self._list_endpoints() # Option 2: An endpoint without query will just list containers there else: return self._list_endpoint(args.endpoint) # Option 3: A query without an endpoint will search endpoints for it if args.endpoint is None: bot.info('You must specify an endpoint id to query!') return self._list_endpoints(query) # Option 4: A query with an endpoint will search the endpoint for pattern return self._list_endpoint(endpoint=args.endpoint, query=query)
python
{ "resource": "" }
q272494
list_endpoints
test
def list_endpoints(self, query=None): '''list all endpoints, providing a list of endpoints to the user to better filter the search. This function takes no arguments, as the user has not provided an endpoint id or query. ''' bot.info('Please select an endpoint id to query from') endpoints = self._get_endpoints(query) # Iterate through endpoints to provide user a list bot.custom(prefix="Globus", message="Endpoints", color="CYAN") rows = [] for kind,eps in endpoints.items(): for epid,epmeta in eps.items(): rows.append([epid, '[%s]' %kind, epmeta['name']]) bot.table(rows) return rows
python
{ "resource": "" }
q272495
list_endpoint
test
def list_endpoint(self, endpoint, query=None): '''An endpoint is required here to list files within. Optionally, we can take a path relative to the endpoint root. Parameters ========== endpoint: a single endpoint ID or an endpoint id and relative path. If no path is provided, we use '', which defaults to scratch. query: if defined, limit files to those that have query match ''' if not hasattr(self, 'transfer_client'): self._init_transfer_client() # Separate endpoint id from the desired path endpoint, path = self._parse_endpoint_name(endpoint) # Get a list of files at endpoint, under specific path try: result = self.transfer_client.operation_ls(endpoint, path=path) except TransferAPIError as err: # Tell the user what went wrong! bot.custom(prefix='ERROR', message=err, color='RED') sys.exit(1) rows = [] for filey in result: # Highlight container contenders with purple name = filey['name'] if query is None or query in name: if name.endswith('img'): name = bot.addColor('PURPLE',name) rows.append([filey['type'], filey['permissions'], str(filey['size']), name ]) if len(rows) > 0: rows = [["type","[perm]","[size]","[name]"]] + rows bot.custom(prefix="Endpoint Listing %s" %path, message='', color="CYAN") bot.table(rows) else: bot.info('No content was found at the selected endpoint.') return rows
python
{ "resource": "" }
q272496
share
test
def share(self, query, share_to=None): '''share will use the client to get a shareable link for an image of choice. the functions returns a url of choice to send to a recipient. ''' names = parse_image_name(remove_uri(query)) # Dropbox path is the path in storage with a slash dropbox_path = '/%s' % names['storage'] # First ensure that exists if self.exists(dropbox_path) is True: # Create new shared link try: share = self.dbx.sharing_create_shared_link_with_settings(dropbox_path) # Already exists! except ApiError as err: share = self.dbx.sharing_create_shared_link(dropbox_path) bot.info(share.url) return share.url
python
{ "resource": "" }
q272497
read_client_secrets
test
def read_client_secrets(): '''for private or protected registries, a client secrets file is required to be located at .sregistry. If no secrets are found, we use default of Singularity Hub, and return a dummy secrets. ''' client_secrets = _default_client_secrets() # If token file not provided, check environment secrets = get_secrets_file() # If exists, load if secrets is not None: client_secrets = read_json(secrets) # Otherwise, initialize else: from sregistry.defaults import SREGISTRY_CLIENT_SECRETS write_json(client_secrets, SREGISTRY_CLIENT_SECRETS) return client_secrets
python
{ "resource": "" }
q272498
Client._get_services
test
def _get_services(self, version='v1'): '''get version 1 of the google compute and storage service Parameters ========== version: version to use (default is v1) ''' self._bucket_service = storage.Client() creds = GoogleCredentials.get_application_default() self._storage_service = discovery_build('storage', version, credentials=creds) self._compute_service = discovery_build('compute', version, credentials=creds)
python
{ "resource": "" }
q272499
delete_object
test
def delete_object(service, bucket_name, object_name): '''delete object will delete a file from a bucket Parameters ========== storage_service: the service obtained with get_storage_service bucket_name: the name of the bucket object_name: the "name" parameter of the object. ''' try: operation = service.objects().delete(bucket=bucket_name, object=object_name).execute() except HttpError as e: pass operation = e return operation
python
{ "resource": "" }