repo
stringlengths
7
55
path
stringlengths
4
223
func_name
stringlengths
1
134
original_string
stringlengths
75
104k
language
stringclasses
1 value
code
stringlengths
75
104k
code_tokens
listlengths
19
28.4k
docstring
stringlengths
1
46.9k
docstring_tokens
listlengths
1
1.97k
sha
stringlengths
40
40
url
stringlengths
87
315
partition
stringclasses
1 value
MediaFire/mediafire-python-open-sdk
mediafire/subsetio.py
SubsetIO.read
def read(self, limit=-1): """Read content. See file.read""" remaining = self.len - self.parent_fd.tell() + self.offset if limit > remaining or limit == -1: limit = remaining return self.parent_fd.read(limit)
python
def read(self, limit=-1): """Read content. See file.read""" remaining = self.len - self.parent_fd.tell() + self.offset if limit > remaining or limit == -1: limit = remaining return self.parent_fd.read(limit)
[ "def", "read", "(", "self", ",", "limit", "=", "-", "1", ")", ":", "remaining", "=", "self", ".", "len", "-", "self", ".", "parent_fd", ".", "tell", "(", ")", "+", "self", ".", "offset", "if", "limit", ">", "remaining", "or", "limit", "==", "-", "1", ":", "limit", "=", "remaining", "return", "self", ".", "parent_fd", ".", "read", "(", "limit", ")" ]
Read content. See file.read
[ "Read", "content", ".", "See", "file", ".", "read" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/subsetio.py#L48-L55
train
MediaFire/mediafire-python-open-sdk
mediafire/subsetio.py
SubsetIO.seek
def seek(self, offset, whence=os.SEEK_SET): """Seek to position in stream, see file.seek""" pos = None if whence == os.SEEK_SET: pos = self.offset + offset elif whence == os.SEEK_CUR: pos = self.tell() + offset elif whence == os.SEEK_END: pos = self.offset + self.len + offset else: raise ValueError("invalid whence {}".format(whence)) if pos > self.offset + self.len or pos < self.offset: raise ValueError("seek position beyond chunk area") self.parent_fd.seek(pos, os.SEEK_SET)
python
def seek(self, offset, whence=os.SEEK_SET): """Seek to position in stream, see file.seek""" pos = None if whence == os.SEEK_SET: pos = self.offset + offset elif whence == os.SEEK_CUR: pos = self.tell() + offset elif whence == os.SEEK_END: pos = self.offset + self.len + offset else: raise ValueError("invalid whence {}".format(whence)) if pos > self.offset + self.len or pos < self.offset: raise ValueError("seek position beyond chunk area") self.parent_fd.seek(pos, os.SEEK_SET)
[ "def", "seek", "(", "self", ",", "offset", ",", "whence", "=", "os", ".", "SEEK_SET", ")", ":", "pos", "=", "None", "if", "whence", "==", "os", ".", "SEEK_SET", ":", "pos", "=", "self", ".", "offset", "+", "offset", "elif", "whence", "==", "os", ".", "SEEK_CUR", ":", "pos", "=", "self", ".", "tell", "(", ")", "+", "offset", "elif", "whence", "==", "os", ".", "SEEK_END", ":", "pos", "=", "self", ".", "offset", "+", "self", ".", "len", "+", "offset", "else", ":", "raise", "ValueError", "(", "\"invalid whence {}\"", ".", "format", "(", "whence", ")", ")", "if", "pos", ">", "self", ".", "offset", "+", "self", ".", "len", "or", "pos", "<", "self", ".", "offset", ":", "raise", "ValueError", "(", "\"seek position beyond chunk area\"", ")", "self", ".", "parent_fd", ".", "seek", "(", "pos", ",", "os", ".", "SEEK_SET", ")" ]
Seek to position in stream, see file.seek
[ "Seek", "to", "position", "in", "stream", "see", "file", ".", "seek" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/subsetio.py#L57-L73
train
MediaFire/mediafire-python-open-sdk
mediafire/subsetio.py
SubsetIO.close
def close(self): """Close file, see file.close""" try: self.parent_fd.fileno() except io.UnsupportedOperation: logger.debug("Not closing parent_fd - reusing existing") else: self.parent_fd.close()
python
def close(self): """Close file, see file.close""" try: self.parent_fd.fileno() except io.UnsupportedOperation: logger.debug("Not closing parent_fd - reusing existing") else: self.parent_fd.close()
[ "def", "close", "(", "self", ")", ":", "try", ":", "self", ".", "parent_fd", ".", "fileno", "(", ")", "except", "io", ".", "UnsupportedOperation", ":", "logger", ".", "debug", "(", "\"Not closing parent_fd - reusing existing\"", ")", "else", ":", "self", ".", "parent_fd", ".", "close", "(", ")" ]
Close file, see file.close
[ "Close", "file", "see", "file", ".", "close" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/subsetio.py#L80-L87
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi._build_query
def _build_query(self, uri, params=None, action_token_type=None): """Prepare query string""" if params is None: params = QueryParams() params['response_format'] = 'json' session_token = None if action_token_type in self._action_tokens: # Favor action token using_action_token = True session_token = self._action_tokens[action_token_type] else: using_action_token = False if self._session: session_token = self._session['session_token'] if session_token: params['session_token'] = session_token # make order of parameters predictable for testing keys = list(params.keys()) keys.sort() query = urlencode([tuple([key, params[key]]) for key in keys]) if not using_action_token and self._session: secret_key_mod = int(self._session['secret_key']) % 256 signature_base = (str(secret_key_mod) + self._session['time'] + uri + '?' + query).encode('ascii') query += '&signature=' + hashlib.md5(signature_base).hexdigest() return query
python
def _build_query(self, uri, params=None, action_token_type=None): """Prepare query string""" if params is None: params = QueryParams() params['response_format'] = 'json' session_token = None if action_token_type in self._action_tokens: # Favor action token using_action_token = True session_token = self._action_tokens[action_token_type] else: using_action_token = False if self._session: session_token = self._session['session_token'] if session_token: params['session_token'] = session_token # make order of parameters predictable for testing keys = list(params.keys()) keys.sort() query = urlencode([tuple([key, params[key]]) for key in keys]) if not using_action_token and self._session: secret_key_mod = int(self._session['secret_key']) % 256 signature_base = (str(secret_key_mod) + self._session['time'] + uri + '?' + query).encode('ascii') query += '&signature=' + hashlib.md5(signature_base).hexdigest() return query
[ "def", "_build_query", "(", "self", ",", "uri", ",", "params", "=", "None", ",", "action_token_type", "=", "None", ")", ":", "if", "params", "is", "None", ":", "params", "=", "QueryParams", "(", ")", "params", "[", "'response_format'", "]", "=", "'json'", "session_token", "=", "None", "if", "action_token_type", "in", "self", ".", "_action_tokens", ":", "# Favor action token", "using_action_token", "=", "True", "session_token", "=", "self", ".", "_action_tokens", "[", "action_token_type", "]", "else", ":", "using_action_token", "=", "False", "if", "self", ".", "_session", ":", "session_token", "=", "self", ".", "_session", "[", "'session_token'", "]", "if", "session_token", ":", "params", "[", "'session_token'", "]", "=", "session_token", "# make order of parameters predictable for testing", "keys", "=", "list", "(", "params", ".", "keys", "(", ")", ")", "keys", ".", "sort", "(", ")", "query", "=", "urlencode", "(", "[", "tuple", "(", "[", "key", ",", "params", "[", "key", "]", "]", ")", "for", "key", "in", "keys", "]", ")", "if", "not", "using_action_token", "and", "self", ".", "_session", ":", "secret_key_mod", "=", "int", "(", "self", ".", "_session", "[", "'secret_key'", "]", ")", "%", "256", "signature_base", "=", "(", "str", "(", "secret_key_mod", ")", "+", "self", ".", "_session", "[", "'time'", "]", "+", "uri", "+", "'?'", "+", "query", ")", ".", "encode", "(", "'ascii'", ")", "query", "+=", "'&signature='", "+", "hashlib", ".", "md5", "(", "signature_base", ")", ".", "hexdigest", "(", ")", "return", "query" ]
Prepare query string
[ "Prepare", "query", "string" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L97-L134
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.request
def request(self, action, params=None, action_token_type=None, upload_info=None, headers=None): """Perform request to MediaFire API action -- "category/name" of method to call params -- dict of parameters or query string action_token_type -- action token to use: None, "upload", "image" upload_info -- in case of upload, dict of "fd" and "filename" headers -- additional headers to send (used for upload) session_token and signature generation/update is handled automatically """ uri = self._build_uri(action) if isinstance(params, six.text_type): query = params else: query = self._build_query(uri, params, action_token_type) if headers is None: headers = {} if upload_info is None: # Use request body for query data = query headers['Content-Type'] = FORM_MIMETYPE else: # Use query string for query since payload is file uri += '?' + query if "filename" in upload_info: data = MultipartEncoder( fields={'file': ( upload_info["filename"], upload_info["fd"], UPLOAD_MIMETYPE )} ) headers["Content-Type"] = data.content_type else: data = upload_info["fd"] headers["Content-Type"] = UPLOAD_MIMETYPE logger.debug("uri=%s query=%s", uri, query if not upload_info else None) try: # bytes from now on url = (API_BASE + uri).encode('utf-8') if isinstance(data, six.text_type): # request's data is bytes, dict, or filehandle data = data.encode('utf-8') response = self.http.post(url, data=data, headers=headers, stream=True) except RequestException as ex: logger.exception("HTTP request failed") raise MediaFireConnectionError( "RequestException: {}".format(ex)) return self._process_response(response)
python
def request(self, action, params=None, action_token_type=None, upload_info=None, headers=None): """Perform request to MediaFire API action -- "category/name" of method to call params -- dict of parameters or query string action_token_type -- action token to use: None, "upload", "image" upload_info -- in case of upload, dict of "fd" and "filename" headers -- additional headers to send (used for upload) session_token and signature generation/update is handled automatically """ uri = self._build_uri(action) if isinstance(params, six.text_type): query = params else: query = self._build_query(uri, params, action_token_type) if headers is None: headers = {} if upload_info is None: # Use request body for query data = query headers['Content-Type'] = FORM_MIMETYPE else: # Use query string for query since payload is file uri += '?' + query if "filename" in upload_info: data = MultipartEncoder( fields={'file': ( upload_info["filename"], upload_info["fd"], UPLOAD_MIMETYPE )} ) headers["Content-Type"] = data.content_type else: data = upload_info["fd"] headers["Content-Type"] = UPLOAD_MIMETYPE logger.debug("uri=%s query=%s", uri, query if not upload_info else None) try: # bytes from now on url = (API_BASE + uri).encode('utf-8') if isinstance(data, six.text_type): # request's data is bytes, dict, or filehandle data = data.encode('utf-8') response = self.http.post(url, data=data, headers=headers, stream=True) except RequestException as ex: logger.exception("HTTP request failed") raise MediaFireConnectionError( "RequestException: {}".format(ex)) return self._process_response(response)
[ "def", "request", "(", "self", ",", "action", ",", "params", "=", "None", ",", "action_token_type", "=", "None", ",", "upload_info", "=", "None", ",", "headers", "=", "None", ")", ":", "uri", "=", "self", ".", "_build_uri", "(", "action", ")", "if", "isinstance", "(", "params", ",", "six", ".", "text_type", ")", ":", "query", "=", "params", "else", ":", "query", "=", "self", ".", "_build_query", "(", "uri", ",", "params", ",", "action_token_type", ")", "if", "headers", "is", "None", ":", "headers", "=", "{", "}", "if", "upload_info", "is", "None", ":", "# Use request body for query", "data", "=", "query", "headers", "[", "'Content-Type'", "]", "=", "FORM_MIMETYPE", "else", ":", "# Use query string for query since payload is file", "uri", "+=", "'?'", "+", "query", "if", "\"filename\"", "in", "upload_info", ":", "data", "=", "MultipartEncoder", "(", "fields", "=", "{", "'file'", ":", "(", "upload_info", "[", "\"filename\"", "]", ",", "upload_info", "[", "\"fd\"", "]", ",", "UPLOAD_MIMETYPE", ")", "}", ")", "headers", "[", "\"Content-Type\"", "]", "=", "data", ".", "content_type", "else", ":", "data", "=", "upload_info", "[", "\"fd\"", "]", "headers", "[", "\"Content-Type\"", "]", "=", "UPLOAD_MIMETYPE", "logger", ".", "debug", "(", "\"uri=%s query=%s\"", ",", "uri", ",", "query", "if", "not", "upload_info", "else", "None", ")", "try", ":", "# bytes from now on", "url", "=", "(", "API_BASE", "+", "uri", ")", ".", "encode", "(", "'utf-8'", ")", "if", "isinstance", "(", "data", ",", "six", ".", "text_type", ")", ":", "# request's data is bytes, dict, or filehandle", "data", "=", "data", ".", "encode", "(", "'utf-8'", ")", "response", "=", "self", ".", "http", ".", "post", "(", "url", ",", "data", "=", "data", ",", "headers", "=", "headers", ",", "stream", "=", "True", ")", "except", "RequestException", "as", "ex", ":", "logger", ".", "exception", "(", "\"HTTP request failed\"", ")", "raise", "MediaFireConnectionError", "(", "\"RequestException: {}\"", ".", "format", "(", "ex", ")", ")", "return", "self", ".", "_process_response", "(", "response", ")" ]
Perform request to MediaFire API action -- "category/name" of method to call params -- dict of parameters or query string action_token_type -- action token to use: None, "upload", "image" upload_info -- in case of upload, dict of "fd" and "filename" headers -- additional headers to send (used for upload) session_token and signature generation/update is handled automatically
[ "Perform", "request", "to", "MediaFire", "API" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L136-L197
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi._process_response
def _process_response(self, response): """Parse response""" forward_raw = False content_type = response.headers['Content-Type'] if content_type != 'application/json': logger.debug("headers: %s", response.headers) # API BUG: text/xml content-type with json payload # http://forum.mediafiredev.com/showthread.php?136 if content_type == 'text/xml': # we never request xml, so check it quacks like JSON if not response.text.lstrip().startswith('{'): forward_raw = True else: # _process_response can't deal with non-json, # return response as is forward_raw = True if forward_raw: response.raise_for_status() return response logger.debug("response: %s", response.text) # if we are here, then most likely have json try: response_node = response.json()['response'] except ValueError: # promised JSON but failed raise MediaFireApiError("JSON decode failure") if response_node.get('new_key', 'no') == 'yes': self._regenerate_secret_key() # check for errors if response_node['result'] != 'Success': raise MediaFireApiError(response_node['message'], response_node['error']) return response_node
python
def _process_response(self, response): """Parse response""" forward_raw = False content_type = response.headers['Content-Type'] if content_type != 'application/json': logger.debug("headers: %s", response.headers) # API BUG: text/xml content-type with json payload # http://forum.mediafiredev.com/showthread.php?136 if content_type == 'text/xml': # we never request xml, so check it quacks like JSON if not response.text.lstrip().startswith('{'): forward_raw = True else: # _process_response can't deal with non-json, # return response as is forward_raw = True if forward_raw: response.raise_for_status() return response logger.debug("response: %s", response.text) # if we are here, then most likely have json try: response_node = response.json()['response'] except ValueError: # promised JSON but failed raise MediaFireApiError("JSON decode failure") if response_node.get('new_key', 'no') == 'yes': self._regenerate_secret_key() # check for errors if response_node['result'] != 'Success': raise MediaFireApiError(response_node['message'], response_node['error']) return response_node
[ "def", "_process_response", "(", "self", ",", "response", ")", ":", "forward_raw", "=", "False", "content_type", "=", "response", ".", "headers", "[", "'Content-Type'", "]", "if", "content_type", "!=", "'application/json'", ":", "logger", ".", "debug", "(", "\"headers: %s\"", ",", "response", ".", "headers", ")", "# API BUG: text/xml content-type with json payload", "# http://forum.mediafiredev.com/showthread.php?136", "if", "content_type", "==", "'text/xml'", ":", "# we never request xml, so check it quacks like JSON", "if", "not", "response", ".", "text", ".", "lstrip", "(", ")", ".", "startswith", "(", "'{'", ")", ":", "forward_raw", "=", "True", "else", ":", "# _process_response can't deal with non-json,", "# return response as is", "forward_raw", "=", "True", "if", "forward_raw", ":", "response", ".", "raise_for_status", "(", ")", "return", "response", "logger", ".", "debug", "(", "\"response: %s\"", ",", "response", ".", "text", ")", "# if we are here, then most likely have json", "try", ":", "response_node", "=", "response", ".", "json", "(", ")", "[", "'response'", "]", "except", "ValueError", ":", "# promised JSON but failed", "raise", "MediaFireApiError", "(", "\"JSON decode failure\"", ")", "if", "response_node", ".", "get", "(", "'new_key'", ",", "'no'", ")", "==", "'yes'", ":", "self", ".", "_regenerate_secret_key", "(", ")", "# check for errors", "if", "response_node", "[", "'result'", "]", "!=", "'Success'", ":", "raise", "MediaFireApiError", "(", "response_node", "[", "'message'", "]", ",", "response_node", "[", "'error'", "]", ")", "return", "response_node" ]
Parse response
[ "Parse", "response" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L199-L238
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi._regenerate_secret_key
def _regenerate_secret_key(self): """Regenerate secret key http://www.mediafire.com/developers/core_api/1.3/getting_started/#call_signature """ # Don't regenerate the key if we have none if self._session and 'secret_key' in self._session: self._session['secret_key'] = ( int(self._session['secret_key']) * 16807) % 2147483647
python
def _regenerate_secret_key(self): """Regenerate secret key http://www.mediafire.com/developers/core_api/1.3/getting_started/#call_signature """ # Don't regenerate the key if we have none if self._session and 'secret_key' in self._session: self._session['secret_key'] = ( int(self._session['secret_key']) * 16807) % 2147483647
[ "def", "_regenerate_secret_key", "(", "self", ")", ":", "# Don't regenerate the key if we have none", "if", "self", ".", "_session", "and", "'secret_key'", "in", "self", ".", "_session", ":", "self", ".", "_session", "[", "'secret_key'", "]", "=", "(", "int", "(", "self", ".", "_session", "[", "'secret_key'", "]", ")", "*", "16807", ")", "%", "2147483647" ]
Regenerate secret key http://www.mediafire.com/developers/core_api/1.3/getting_started/#call_signature
[ "Regenerate", "secret", "key" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L240-L248
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.session
def session(self, value): """Set session token value -- dict returned by user/get_session_token""" # unset session token if value is None: self._session = None return if not isinstance(value, dict): raise ValueError("session info is required") session_parsed = {} for key in ["session_token", "time", "secret_key"]: if key not in value: raise ValueError("Missing parameter: {}".format(key)) session_parsed[key] = value[key] for key in ["ekey", "pkey"]: # nice to have, but not mandatory if key in value: session_parsed[key] = value[key] self._session = session_parsed
python
def session(self, value): """Set session token value -- dict returned by user/get_session_token""" # unset session token if value is None: self._session = None return if not isinstance(value, dict): raise ValueError("session info is required") session_parsed = {} for key in ["session_token", "time", "secret_key"]: if key not in value: raise ValueError("Missing parameter: {}".format(key)) session_parsed[key] = value[key] for key in ["ekey", "pkey"]: # nice to have, but not mandatory if key in value: session_parsed[key] = value[key] self._session = session_parsed
[ "def", "session", "(", "self", ",", "value", ")", ":", "# unset session token", "if", "value", "is", "None", ":", "self", ".", "_session", "=", "None", "return", "if", "not", "isinstance", "(", "value", ",", "dict", ")", ":", "raise", "ValueError", "(", "\"session info is required\"", ")", "session_parsed", "=", "{", "}", "for", "key", "in", "[", "\"session_token\"", ",", "\"time\"", ",", "\"secret_key\"", "]", ":", "if", "key", "not", "in", "value", ":", "raise", "ValueError", "(", "\"Missing parameter: {}\"", ".", "format", "(", "key", ")", ")", "session_parsed", "[", "key", "]", "=", "value", "[", "key", "]", "for", "key", "in", "[", "\"ekey\"", ",", "\"pkey\"", "]", ":", "# nice to have, but not mandatory", "if", "key", "in", "value", ":", "session_parsed", "[", "key", "]", "=", "value", "[", "key", "]", "self", ".", "_session", "=", "session_parsed" ]
Set session token value -- dict returned by user/get_session_token
[ "Set", "session", "token" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L256-L281
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.set_action_token
def set_action_token(self, type_=None, action_token=None): """Set action tokens type_ -- either "upload" or "image" action_token -- string obtained from user/get_action_token, set None to remove the token """ if action_token is None: del self._action_tokens[type_] else: self._action_tokens[type_] = action_token
python
def set_action_token(self, type_=None, action_token=None): """Set action tokens type_ -- either "upload" or "image" action_token -- string obtained from user/get_action_token, set None to remove the token """ if action_token is None: del self._action_tokens[type_] else: self._action_tokens[type_] = action_token
[ "def", "set_action_token", "(", "self", ",", "type_", "=", "None", ",", "action_token", "=", "None", ")", ":", "if", "action_token", "is", "None", ":", "del", "self", ".", "_action_tokens", "[", "type_", "]", "else", ":", "self", ".", "_action_tokens", "[", "type_", "]", "=", "action_token" ]
Set action tokens type_ -- either "upload" or "image" action_token -- string obtained from user/get_action_token, set None to remove the token
[ "Set", "action", "tokens" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L288-L298
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.user_get_session_token
def user_get_session_token(self, app_id=None, email=None, password=None, ekey=None, fb_access_token=None, tw_oauth_token=None, tw_oauth_token_secret=None, api_key=None): """user/get_session_token http://www.mediafire.com/developers/core_api/1.3/user/#get_session_token """ if app_id is None: raise ValueError("app_id must be defined") params = QueryParams({ 'application_id': str(app_id), 'token_version': 2, 'response_format': 'json' }) if fb_access_token: params['fb_access_token'] = fb_access_token signature_keys = ['fb_access_token'] elif tw_oauth_token and tw_oauth_token_secret: params['tw_oauth_token'] = tw_oauth_token params['tw_oauth_token_secret'] = tw_oauth_token_secret signature_keys = ['tw_oauth_token', 'tw_oauth_token_secret'] elif (email or ekey) and password: signature_keys = [] if email: signature_keys.append('email') params['email'] = email if ekey: signature_keys.append('ekey') params['ekey'] = ekey params['password'] = password signature_keys.append('password') else: raise ValueError("Credentials not provided") signature_keys.append('application_id') signature = hashlib.sha1() for key in signature_keys: signature.update(str(params[key]).encode('ascii')) # Note: If the app uses a callback URL to provide its API key, # or if it does not have the "Require Secret Key" option checked, # then the API key may be omitted from the signature if api_key: signature.update(api_key.encode('ascii')) query = urlencode(params) query += '&signature=' + signature.hexdigest() return self.request('user/get_session_token', params=query)
python
def user_get_session_token(self, app_id=None, email=None, password=None, ekey=None, fb_access_token=None, tw_oauth_token=None, tw_oauth_token_secret=None, api_key=None): """user/get_session_token http://www.mediafire.com/developers/core_api/1.3/user/#get_session_token """ if app_id is None: raise ValueError("app_id must be defined") params = QueryParams({ 'application_id': str(app_id), 'token_version': 2, 'response_format': 'json' }) if fb_access_token: params['fb_access_token'] = fb_access_token signature_keys = ['fb_access_token'] elif tw_oauth_token and tw_oauth_token_secret: params['tw_oauth_token'] = tw_oauth_token params['tw_oauth_token_secret'] = tw_oauth_token_secret signature_keys = ['tw_oauth_token', 'tw_oauth_token_secret'] elif (email or ekey) and password: signature_keys = [] if email: signature_keys.append('email') params['email'] = email if ekey: signature_keys.append('ekey') params['ekey'] = ekey params['password'] = password signature_keys.append('password') else: raise ValueError("Credentials not provided") signature_keys.append('application_id') signature = hashlib.sha1() for key in signature_keys: signature.update(str(params[key]).encode('ascii')) # Note: If the app uses a callback URL to provide its API key, # or if it does not have the "Require Secret Key" option checked, # then the API key may be omitted from the signature if api_key: signature.update(api_key.encode('ascii')) query = urlencode(params) query += '&signature=' + signature.hexdigest() return self.request('user/get_session_token', params=query)
[ "def", "user_get_session_token", "(", "self", ",", "app_id", "=", "None", ",", "email", "=", "None", ",", "password", "=", "None", ",", "ekey", "=", "None", ",", "fb_access_token", "=", "None", ",", "tw_oauth_token", "=", "None", ",", "tw_oauth_token_secret", "=", "None", ",", "api_key", "=", "None", ")", ":", "if", "app_id", "is", "None", ":", "raise", "ValueError", "(", "\"app_id must be defined\"", ")", "params", "=", "QueryParams", "(", "{", "'application_id'", ":", "str", "(", "app_id", ")", ",", "'token_version'", ":", "2", ",", "'response_format'", ":", "'json'", "}", ")", "if", "fb_access_token", ":", "params", "[", "'fb_access_token'", "]", "=", "fb_access_token", "signature_keys", "=", "[", "'fb_access_token'", "]", "elif", "tw_oauth_token", "and", "tw_oauth_token_secret", ":", "params", "[", "'tw_oauth_token'", "]", "=", "tw_oauth_token", "params", "[", "'tw_oauth_token_secret'", "]", "=", "tw_oauth_token_secret", "signature_keys", "=", "[", "'tw_oauth_token'", ",", "'tw_oauth_token_secret'", "]", "elif", "(", "email", "or", "ekey", ")", "and", "password", ":", "signature_keys", "=", "[", "]", "if", "email", ":", "signature_keys", ".", "append", "(", "'email'", ")", "params", "[", "'email'", "]", "=", "email", "if", "ekey", ":", "signature_keys", ".", "append", "(", "'ekey'", ")", "params", "[", "'ekey'", "]", "=", "ekey", "params", "[", "'password'", "]", "=", "password", "signature_keys", ".", "append", "(", "'password'", ")", "else", ":", "raise", "ValueError", "(", "\"Credentials not provided\"", ")", "signature_keys", ".", "append", "(", "'application_id'", ")", "signature", "=", "hashlib", ".", "sha1", "(", ")", "for", "key", "in", "signature_keys", ":", "signature", ".", "update", "(", "str", "(", "params", "[", "key", "]", ")", ".", "encode", "(", "'ascii'", ")", ")", "# Note: If the app uses a callback URL to provide its API key,", "# or if it does not have the \"Require Secret Key\" option checked,", "# then the API key may be omitted from the signature", "if", "api_key", ":", "signature", ".", "update", "(", "api_key", ".", "encode", "(", "'ascii'", ")", ")", "query", "=", "urlencode", "(", "params", ")", "query", "+=", "'&signature='", "+", "signature", ".", "hexdigest", "(", ")", "return", "self", ".", "request", "(", "'user/get_session_token'", ",", "params", "=", "query", ")" ]
user/get_session_token http://www.mediafire.com/developers/core_api/1.3/user/#get_session_token
[ "user", "/", "get_session_token" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L318-L374
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.user_set_avatar
def user_set_avatar(self, action=None, quick_key=None, url=None): """user/set_avatar http://www.mediafire.com/developers/core_api/1.3/user/#set_avatar """ return self.request("user/set_avatar", QueryParams({ "action": action, "quick_key": quick_key, "url": url }))
python
def user_set_avatar(self, action=None, quick_key=None, url=None): """user/set_avatar http://www.mediafire.com/developers/core_api/1.3/user/#set_avatar """ return self.request("user/set_avatar", QueryParams({ "action": action, "quick_key": quick_key, "url": url }))
[ "def", "user_set_avatar", "(", "self", ",", "action", "=", "None", ",", "quick_key", "=", "None", ",", "url", "=", "None", ")", ":", "return", "self", ".", "request", "(", "\"user/set_avatar\"", ",", "QueryParams", "(", "{", "\"action\"", ":", "action", ",", "\"quick_key\"", ":", "quick_key", ",", "\"url\"", ":", "url", "}", ")", ")" ]
user/set_avatar http://www.mediafire.com/developers/core_api/1.3/user/#set_avatar
[ "user", "/", "set_avatar" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L430-L439
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.user_update
def user_update(self, display_name=None, first_name=None, last_name=None, email=None, password=None, current_password=None, birth_date=None, gender=None, website=None, subdomain=None, location=None, newsletter=None, primary_usage=None, timezone=None): """ user/update http://www.mediafire.com/developers/core_api/1.3/user/#update """ return self.request("user/update", QueryParams({ "display_name": display_name, "first_name": first_name, "last_name": last_name, "email": email, "password": password, "current_password": current_password, "birth_date": birth_date, "gender": gender, "website": website, "subdomain": subdomain, "location": location, "newsletter": newsletter, "primary_usage": primary_usage, "timezone": timezone }))
python
def user_update(self, display_name=None, first_name=None, last_name=None, email=None, password=None, current_password=None, birth_date=None, gender=None, website=None, subdomain=None, location=None, newsletter=None, primary_usage=None, timezone=None): """ user/update http://www.mediafire.com/developers/core_api/1.3/user/#update """ return self.request("user/update", QueryParams({ "display_name": display_name, "first_name": first_name, "last_name": last_name, "email": email, "password": password, "current_password": current_password, "birth_date": birth_date, "gender": gender, "website": website, "subdomain": subdomain, "location": location, "newsletter": newsletter, "primary_usage": primary_usage, "timezone": timezone }))
[ "def", "user_update", "(", "self", ",", "display_name", "=", "None", ",", "first_name", "=", "None", ",", "last_name", "=", "None", ",", "email", "=", "None", ",", "password", "=", "None", ",", "current_password", "=", "None", ",", "birth_date", "=", "None", ",", "gender", "=", "None", ",", "website", "=", "None", ",", "subdomain", "=", "None", ",", "location", "=", "None", ",", "newsletter", "=", "None", ",", "primary_usage", "=", "None", ",", "timezone", "=", "None", ")", ":", "return", "self", ".", "request", "(", "\"user/update\"", ",", "QueryParams", "(", "{", "\"display_name\"", ":", "display_name", ",", "\"first_name\"", ":", "first_name", ",", "\"last_name\"", ":", "last_name", ",", "\"email\"", ":", "email", ",", "\"password\"", ":", "password", ",", "\"current_password\"", ":", "current_password", ",", "\"birth_date\"", ":", "birth_date", ",", "\"gender\"", ":", "gender", ",", "\"website\"", ":", "website", ",", "\"subdomain\"", ":", "subdomain", ",", "\"location\"", ":", "location", ",", "\"newsletter\"", ":", "newsletter", ",", "\"primary_usage\"", ":", "primary_usage", ",", "\"timezone\"", ":", "timezone", "}", ")", ")" ]
user/update http://www.mediafire.com/developers/core_api/1.3/user/#update
[ "user", "/", "update" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L441-L466
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.folder_get_info
def folder_get_info(self, folder_key=None, device_id=None, details=None): """folder/get_info http://www.mediafire.com/developers/core_api/1.3/folder/#get_info """ return self.request('folder/get_info', QueryParams({ 'folder_key': folder_key, 'device_id': device_id, 'details': details }))
python
def folder_get_info(self, folder_key=None, device_id=None, details=None): """folder/get_info http://www.mediafire.com/developers/core_api/1.3/folder/#get_info """ return self.request('folder/get_info', QueryParams({ 'folder_key': folder_key, 'device_id': device_id, 'details': details }))
[ "def", "folder_get_info", "(", "self", ",", "folder_key", "=", "None", ",", "device_id", "=", "None", ",", "details", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'folder/get_info'", ",", "QueryParams", "(", "{", "'folder_key'", ":", "folder_key", ",", "'device_id'", ":", "device_id", ",", "'details'", ":", "details", "}", ")", ")" ]
folder/get_info http://www.mediafire.com/developers/core_api/1.3/folder/#get_info
[ "folder", "/", "get_info" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L468-L477
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.folder_get_content
def folder_get_content(self, folder_key=None, content_type=None, filter_=None, device_id=None, order_by=None, order_direction=None, chunk=None, details=None, chunk_size=None): """folder/get_content http://www.mediafire.com/developers/core_api/1.3/folder/#get_content """ return self.request('folder/get_content', QueryParams({ 'folder_key': folder_key, 'content_type': content_type, 'filter': filter_, 'device_id': device_id, 'order_by': order_by, 'order_direction': order_direction, 'chunk': chunk, 'details': details, 'chunk_size': chunk_size }))
python
def folder_get_content(self, folder_key=None, content_type=None, filter_=None, device_id=None, order_by=None, order_direction=None, chunk=None, details=None, chunk_size=None): """folder/get_content http://www.mediafire.com/developers/core_api/1.3/folder/#get_content """ return self.request('folder/get_content', QueryParams({ 'folder_key': folder_key, 'content_type': content_type, 'filter': filter_, 'device_id': device_id, 'order_by': order_by, 'order_direction': order_direction, 'chunk': chunk, 'details': details, 'chunk_size': chunk_size }))
[ "def", "folder_get_content", "(", "self", ",", "folder_key", "=", "None", ",", "content_type", "=", "None", ",", "filter_", "=", "None", ",", "device_id", "=", "None", ",", "order_by", "=", "None", ",", "order_direction", "=", "None", ",", "chunk", "=", "None", ",", "details", "=", "None", ",", "chunk_size", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'folder/get_content'", ",", "QueryParams", "(", "{", "'folder_key'", ":", "folder_key", ",", "'content_type'", ":", "content_type", ",", "'filter'", ":", "filter_", ",", "'device_id'", ":", "device_id", ",", "'order_by'", ":", "order_by", ",", "'order_direction'", ":", "order_direction", ",", "'chunk'", ":", "chunk", ",", "'details'", ":", "details", ",", "'chunk_size'", ":", "chunk_size", "}", ")", ")" ]
folder/get_content http://www.mediafire.com/developers/core_api/1.3/folder/#get_content
[ "folder", "/", "get_content" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L479-L497
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.folder_update
def folder_update(self, folder_key, foldername=None, description=None, privacy=None, privacy_recursive=None, mtime=None): """folder/update http://www.mediafire.com/developers/core_api/1.3/folder/#update """ return self.request('folder/update', QueryParams({ 'folder_key': folder_key, 'foldername': foldername, 'description': description, 'privacy': privacy, 'privacy_recursive': privacy_recursive, 'mtime': mtime }))
python
def folder_update(self, folder_key, foldername=None, description=None, privacy=None, privacy_recursive=None, mtime=None): """folder/update http://www.mediafire.com/developers/core_api/1.3/folder/#update """ return self.request('folder/update', QueryParams({ 'folder_key': folder_key, 'foldername': foldername, 'description': description, 'privacy': privacy, 'privacy_recursive': privacy_recursive, 'mtime': mtime }))
[ "def", "folder_update", "(", "self", ",", "folder_key", ",", "foldername", "=", "None", ",", "description", "=", "None", ",", "privacy", "=", "None", ",", "privacy_recursive", "=", "None", ",", "mtime", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'folder/update'", ",", "QueryParams", "(", "{", "'folder_key'", ":", "folder_key", ",", "'foldername'", ":", "foldername", ",", "'description'", ":", "description", ",", "'privacy'", ":", "privacy", ",", "'privacy_recursive'", ":", "privacy_recursive", ",", "'mtime'", ":", "mtime", "}", ")", ")" ]
folder/update http://www.mediafire.com/developers/core_api/1.3/folder/#update
[ "folder", "/", "update" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L499-L512
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.folder_create
def folder_create(self, foldername=None, parent_key=None, action_on_duplicate=None, mtime=None): """folder/create http://www.mediafire.com/developers/core_api/1.3/folder/#create """ return self.request('folder/create', QueryParams({ 'foldername': foldername, 'parent_key': parent_key, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime }))
python
def folder_create(self, foldername=None, parent_key=None, action_on_duplicate=None, mtime=None): """folder/create http://www.mediafire.com/developers/core_api/1.3/folder/#create """ return self.request('folder/create', QueryParams({ 'foldername': foldername, 'parent_key': parent_key, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime }))
[ "def", "folder_create", "(", "self", ",", "foldername", "=", "None", ",", "parent_key", "=", "None", ",", "action_on_duplicate", "=", "None", ",", "mtime", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'folder/create'", ",", "QueryParams", "(", "{", "'foldername'", ":", "foldername", ",", "'parent_key'", ":", "parent_key", ",", "'action_on_duplicate'", ":", "action_on_duplicate", ",", "'mtime'", ":", "mtime", "}", ")", ")" ]
folder/create http://www.mediafire.com/developers/core_api/1.3/folder/#create
[ "folder", "/", "create" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L514-L525
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.upload_check
def upload_check(self, filename=None, folder_key=None, filedrop_key=None, size=None, hash_=None, path=None, resumable=None): """upload/check http://www.mediafire.com/developers/core_api/1.3/upload/#check """ return self.request('upload/check', QueryParams({ 'filename': filename, 'folder_key': folder_key, 'filedrop_key': filedrop_key, 'size': size, 'hash': hash_, 'path': path, 'resumable': resumable }))
python
def upload_check(self, filename=None, folder_key=None, filedrop_key=None, size=None, hash_=None, path=None, resumable=None): """upload/check http://www.mediafire.com/developers/core_api/1.3/upload/#check """ return self.request('upload/check', QueryParams({ 'filename': filename, 'folder_key': folder_key, 'filedrop_key': filedrop_key, 'size': size, 'hash': hash_, 'path': path, 'resumable': resumable }))
[ "def", "upload_check", "(", "self", ",", "filename", "=", "None", ",", "folder_key", "=", "None", ",", "filedrop_key", "=", "None", ",", "size", "=", "None", ",", "hash_", "=", "None", ",", "path", "=", "None", ",", "resumable", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'upload/check'", ",", "QueryParams", "(", "{", "'filename'", ":", "filename", ",", "'folder_key'", ":", "folder_key", ",", "'filedrop_key'", ":", "filedrop_key", ",", "'size'", ":", "size", ",", "'hash'", ":", "hash_", ",", "'path'", ":", "path", ",", "'resumable'", ":", "resumable", "}", ")", ")" ]
upload/check http://www.mediafire.com/developers/core_api/1.3/upload/#check
[ "upload", "/", "check" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L555-L569
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.upload_simple
def upload_simple(self, fd, filename, folder_key=None, path=None, filedrop_key=None, action_on_duplicate=None, mtime=None, file_size=None, file_hash=None): """upload/simple http://www.mediafire.com/developers/core_api/1.3/upload/#simple """ action = 'upload/simple' params = QueryParams({ 'folder_key': folder_key, 'path': path, 'filedrop_key': filedrop_key, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime }) headers = QueryParams({ 'X-Filesize': str(file_size), 'X-Filehash': file_hash, 'X-Filename': filename.encode('utf-8') }) upload_info = { "fd": fd, } return self.request(action, params, action_token_type="upload", upload_info=upload_info, headers=headers)
python
def upload_simple(self, fd, filename, folder_key=None, path=None, filedrop_key=None, action_on_duplicate=None, mtime=None, file_size=None, file_hash=None): """upload/simple http://www.mediafire.com/developers/core_api/1.3/upload/#simple """ action = 'upload/simple' params = QueryParams({ 'folder_key': folder_key, 'path': path, 'filedrop_key': filedrop_key, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime }) headers = QueryParams({ 'X-Filesize': str(file_size), 'X-Filehash': file_hash, 'X-Filename': filename.encode('utf-8') }) upload_info = { "fd": fd, } return self.request(action, params, action_token_type="upload", upload_info=upload_info, headers=headers)
[ "def", "upload_simple", "(", "self", ",", "fd", ",", "filename", ",", "folder_key", "=", "None", ",", "path", "=", "None", ",", "filedrop_key", "=", "None", ",", "action_on_duplicate", "=", "None", ",", "mtime", "=", "None", ",", "file_size", "=", "None", ",", "file_hash", "=", "None", ")", ":", "action", "=", "'upload/simple'", "params", "=", "QueryParams", "(", "{", "'folder_key'", ":", "folder_key", ",", "'path'", ":", "path", ",", "'filedrop_key'", ":", "filedrop_key", ",", "'action_on_duplicate'", ":", "action_on_duplicate", ",", "'mtime'", ":", "mtime", "}", ")", "headers", "=", "QueryParams", "(", "{", "'X-Filesize'", ":", "str", "(", "file_size", ")", ",", "'X-Filehash'", ":", "file_hash", ",", "'X-Filename'", ":", "filename", ".", "encode", "(", "'utf-8'", ")", "}", ")", "upload_info", "=", "{", "\"fd\"", ":", "fd", ",", "}", "return", "self", ".", "request", "(", "action", ",", "params", ",", "action_token_type", "=", "\"upload\"", ",", "upload_info", "=", "upload_info", ",", "headers", "=", "headers", ")" ]
upload/simple http://www.mediafire.com/developers/core_api/1.3/upload/#simple
[ "upload", "/", "simple" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L571-L599
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.upload_resumable
def upload_resumable(self, fd, filesize, filehash, unit_hash, unit_id, unit_size, quick_key=None, action_on_duplicate=None, mtime=None, version_control=None, folder_key=None, filedrop_key=None, path=None, previous_hash=None): """upload/resumable http://www.mediafire.com/developers/core_api/1.3/upload/#resumable """ action = 'upload/resumable' headers = { 'x-filesize': str(filesize), 'x-filehash': filehash, 'x-unit-hash': unit_hash, 'x-unit-id': str(unit_id), 'x-unit-size': str(unit_size) } params = QueryParams({ 'quick_key': quick_key, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime, 'version_control': version_control, 'folder_key': folder_key, 'filedrop_key': filedrop_key, 'path': path, 'previous_hash': previous_hash }) upload_info = { "fd": fd, "filename": "chunk" } return self.request(action, params, action_token_type="upload", upload_info=upload_info, headers=headers)
python
def upload_resumable(self, fd, filesize, filehash, unit_hash, unit_id, unit_size, quick_key=None, action_on_duplicate=None, mtime=None, version_control=None, folder_key=None, filedrop_key=None, path=None, previous_hash=None): """upload/resumable http://www.mediafire.com/developers/core_api/1.3/upload/#resumable """ action = 'upload/resumable' headers = { 'x-filesize': str(filesize), 'x-filehash': filehash, 'x-unit-hash': unit_hash, 'x-unit-id': str(unit_id), 'x-unit-size': str(unit_size) } params = QueryParams({ 'quick_key': quick_key, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime, 'version_control': version_control, 'folder_key': folder_key, 'filedrop_key': filedrop_key, 'path': path, 'previous_hash': previous_hash }) upload_info = { "fd": fd, "filename": "chunk" } return self.request(action, params, action_token_type="upload", upload_info=upload_info, headers=headers)
[ "def", "upload_resumable", "(", "self", ",", "fd", ",", "filesize", ",", "filehash", ",", "unit_hash", ",", "unit_id", ",", "unit_size", ",", "quick_key", "=", "None", ",", "action_on_duplicate", "=", "None", ",", "mtime", "=", "None", ",", "version_control", "=", "None", ",", "folder_key", "=", "None", ",", "filedrop_key", "=", "None", ",", "path", "=", "None", ",", "previous_hash", "=", "None", ")", ":", "action", "=", "'upload/resumable'", "headers", "=", "{", "'x-filesize'", ":", "str", "(", "filesize", ")", ",", "'x-filehash'", ":", "filehash", ",", "'x-unit-hash'", ":", "unit_hash", ",", "'x-unit-id'", ":", "str", "(", "unit_id", ")", ",", "'x-unit-size'", ":", "str", "(", "unit_size", ")", "}", "params", "=", "QueryParams", "(", "{", "'quick_key'", ":", "quick_key", ",", "'action_on_duplicate'", ":", "action_on_duplicate", ",", "'mtime'", ":", "mtime", ",", "'version_control'", ":", "version_control", ",", "'folder_key'", ":", "folder_key", ",", "'filedrop_key'", ":", "filedrop_key", ",", "'path'", ":", "path", ",", "'previous_hash'", ":", "previous_hash", "}", ")", "upload_info", "=", "{", "\"fd\"", ":", "fd", ",", "\"filename\"", ":", "\"chunk\"", "}", "return", "self", ".", "request", "(", "action", ",", "params", ",", "action_token_type", "=", "\"upload\"", ",", "upload_info", "=", "upload_info", ",", "headers", "=", "headers", ")" ]
upload/resumable http://www.mediafire.com/developers/core_api/1.3/upload/#resumable
[ "upload", "/", "resumable" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L603-L638
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.upload_instant
def upload_instant(self, filename, size, hash_, quick_key=None, folder_key=None, filedrop_key=None, path=None, action_on_duplicate=None, mtime=None, version_control=None, previous_hash=None): """upload/instant http://www.mediafire.com/developers/core_api/1.3/upload/#instant """ return self.request('upload/instant', QueryParams({ 'filename': filename, 'size': size, 'hash': hash_, 'quick_key': quick_key, 'folder_key': folder_key, 'filedrop_key': filedrop_key, 'path': path, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime, 'version_control': version_control, 'previous_hash': previous_hash }))
python
def upload_instant(self, filename, size, hash_, quick_key=None, folder_key=None, filedrop_key=None, path=None, action_on_duplicate=None, mtime=None, version_control=None, previous_hash=None): """upload/instant http://www.mediafire.com/developers/core_api/1.3/upload/#instant """ return self.request('upload/instant', QueryParams({ 'filename': filename, 'size': size, 'hash': hash_, 'quick_key': quick_key, 'folder_key': folder_key, 'filedrop_key': filedrop_key, 'path': path, 'action_on_duplicate': action_on_duplicate, 'mtime': mtime, 'version_control': version_control, 'previous_hash': previous_hash }))
[ "def", "upload_instant", "(", "self", ",", "filename", ",", "size", ",", "hash_", ",", "quick_key", "=", "None", ",", "folder_key", "=", "None", ",", "filedrop_key", "=", "None", ",", "path", "=", "None", ",", "action_on_duplicate", "=", "None", ",", "mtime", "=", "None", ",", "version_control", "=", "None", ",", "previous_hash", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'upload/instant'", ",", "QueryParams", "(", "{", "'filename'", ":", "filename", ",", "'size'", ":", "size", ",", "'hash'", ":", "hash_", ",", "'quick_key'", ":", "quick_key", ",", "'folder_key'", ":", "folder_key", ",", "'filedrop_key'", ":", "filedrop_key", ",", "'path'", ":", "path", ",", "'action_on_duplicate'", ":", "action_on_duplicate", ",", "'mtime'", ":", "mtime", ",", "'version_control'", ":", "version_control", ",", "'previous_hash'", ":", "previous_hash", "}", ")", ")" ]
upload/instant http://www.mediafire.com/developers/core_api/1.3/upload/#instant
[ "upload", "/", "instant" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L641-L661
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.file_update
def file_update(self, quick_key, filename=None, description=None, mtime=None, privacy=None): """file/update http://www.mediafire.com/developers/core_api/1.3/file/#update """ return self.request('file/update', QueryParams({ 'quick_key': quick_key, 'filename': filename, 'description': description, 'mtime': mtime, 'privacy': privacy }))
python
def file_update(self, quick_key, filename=None, description=None, mtime=None, privacy=None): """file/update http://www.mediafire.com/developers/core_api/1.3/file/#update """ return self.request('file/update', QueryParams({ 'quick_key': quick_key, 'filename': filename, 'description': description, 'mtime': mtime, 'privacy': privacy }))
[ "def", "file_update", "(", "self", ",", "quick_key", ",", "filename", "=", "None", ",", "description", "=", "None", ",", "mtime", "=", "None", ",", "privacy", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'file/update'", ",", "QueryParams", "(", "{", "'quick_key'", ":", "quick_key", ",", "'filename'", ":", "filename", ",", "'description'", ":", "description", ",", "'mtime'", ":", "mtime", ",", "'privacy'", ":", "privacy", "}", ")", ")" ]
file/update http://www.mediafire.com/developers/core_api/1.3/file/#update
[ "file", "/", "update" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L691-L703
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.file_update_file
def file_update_file(self, quick_key, file_extension=None, filename=None, description=None, mtime=None, privacy=None, timezone=None): """file/update_file http://www.mediafire.com/developers/core_api/1.3/file/#update_file """ return self.request('file/update', QueryParams({ 'quick_key': quick_key, 'file_extension': file_extension, 'filename': filename, 'description': description, 'mtime': mtime, 'privacy': privacy, 'timezone': timezone }))
python
def file_update_file(self, quick_key, file_extension=None, filename=None, description=None, mtime=None, privacy=None, timezone=None): """file/update_file http://www.mediafire.com/developers/core_api/1.3/file/#update_file """ return self.request('file/update', QueryParams({ 'quick_key': quick_key, 'file_extension': file_extension, 'filename': filename, 'description': description, 'mtime': mtime, 'privacy': privacy, 'timezone': timezone }))
[ "def", "file_update_file", "(", "self", ",", "quick_key", ",", "file_extension", "=", "None", ",", "filename", "=", "None", ",", "description", "=", "None", ",", "mtime", "=", "None", ",", "privacy", "=", "None", ",", "timezone", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'file/update'", ",", "QueryParams", "(", "{", "'quick_key'", ":", "quick_key", ",", "'file_extension'", ":", "file_extension", ",", "'filename'", ":", "filename", ",", "'description'", ":", "description", ",", "'mtime'", ":", "mtime", ",", "'privacy'", ":", "privacy", ",", "'timezone'", ":", "timezone", "}", ")", ")" ]
file/update_file http://www.mediafire.com/developers/core_api/1.3/file/#update_file
[ "file", "/", "update_file" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L705-L720
train
MediaFire/mediafire-python-open-sdk
mediafire/api.py
MediaFireApi.file_zip
def file_zip(self, keys, confirm_download=None, meta_only=None): """file/zip http://www.mediafire.com/developers/core_api/1.3/file/#zip """ return self.request('file/zip', QueryParams({ 'keys': keys, 'confirm_download': confirm_download, 'meta_only': meta_only }))
python
def file_zip(self, keys, confirm_download=None, meta_only=None): """file/zip http://www.mediafire.com/developers/core_api/1.3/file/#zip """ return self.request('file/zip', QueryParams({ 'keys': keys, 'confirm_download': confirm_download, 'meta_only': meta_only }))
[ "def", "file_zip", "(", "self", ",", "keys", ",", "confirm_download", "=", "None", ",", "meta_only", "=", "None", ")", ":", "return", "self", ".", "request", "(", "'file/zip'", ",", "QueryParams", "(", "{", "'keys'", ":", "keys", ",", "'confirm_download'", ":", "confirm_download", ",", "'meta_only'", ":", "meta_only", "}", ")", ")" ]
file/zip http://www.mediafire.com/developers/core_api/1.3/file/#zip
[ "file", "/", "zip" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/mediafire/api.py#L750-L759
train
dlecocq/nsq-py
nsq/connection.py
Connection._reset
def _reset(self): '''Reset all of our stateful variables''' self._socket = None # The pending messages we have to send, and the current buffer we're # sending self._pending = deque() self._out_buffer = '' # Our read buffer self._buffer = '' # The identify response we last received from the server self._identify_response = {} # Our ready state self.last_ready_sent = 0 self.ready = 0
python
def _reset(self): '''Reset all of our stateful variables''' self._socket = None # The pending messages we have to send, and the current buffer we're # sending self._pending = deque() self._out_buffer = '' # Our read buffer self._buffer = '' # The identify response we last received from the server self._identify_response = {} # Our ready state self.last_ready_sent = 0 self.ready = 0
[ "def", "_reset", "(", "self", ")", ":", "self", ".", "_socket", "=", "None", "# The pending messages we have to send, and the current buffer we're", "# sending", "self", ".", "_pending", "=", "deque", "(", ")", "self", ".", "_out_buffer", "=", "''", "# Our read buffer", "self", ".", "_buffer", "=", "''", "# The identify response we last received from the server", "self", ".", "_identify_response", "=", "{", "}", "# Our ready state", "self", ".", "last_ready_sent", "=", "0", "self", ".", "ready", "=", "0" ]
Reset all of our stateful variables
[ "Reset", "all", "of", "our", "stateful", "variables" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L91-L104
train
dlecocq/nsq-py
nsq/connection.py
Connection.connect
def connect(self, force=False): '''Establish a connection''' # Don't re-establish existing connections if not force and self.alive(): return True self._reset() # Otherwise, try to connect with self._socket_lock: try: logger.info('Creating socket...') self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self._socket.settimeout(self._timeout) logger.info('Connecting to %s, %s', self.host, self.port) self._socket.connect((self.host, self.port)) # Set our socket's blocking state to whatever ours is self._socket.setblocking(self._blocking) # Safely write our magic self._pending.append(constants.MAGIC_V2) while self.pending(): self.flush() # And send our identify command self.identify(self._identify_options) while self.pending(): self.flush() self._reconnnection_counter.success() # Wait until we've gotten a response to IDENTIFY, try to read # one. Also, only spend up to the provided timeout waiting to # establish the connection. limit = time.time() + self._timeout responses = self._read(1) while (not responses) and (time.time() < limit): responses = self._read(1) if not responses: raise ConnectionTimeoutException( 'Read identify response timed out (%ss)' % self._timeout) self.identified(responses[0]) return True except: logger.exception('Failed to connect') if self._socket: self._socket.close() self._reconnnection_counter.failed() self._reset() return False
python
def connect(self, force=False): '''Establish a connection''' # Don't re-establish existing connections if not force and self.alive(): return True self._reset() # Otherwise, try to connect with self._socket_lock: try: logger.info('Creating socket...') self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self._socket.settimeout(self._timeout) logger.info('Connecting to %s, %s', self.host, self.port) self._socket.connect((self.host, self.port)) # Set our socket's blocking state to whatever ours is self._socket.setblocking(self._blocking) # Safely write our magic self._pending.append(constants.MAGIC_V2) while self.pending(): self.flush() # And send our identify command self.identify(self._identify_options) while self.pending(): self.flush() self._reconnnection_counter.success() # Wait until we've gotten a response to IDENTIFY, try to read # one. Also, only spend up to the provided timeout waiting to # establish the connection. limit = time.time() + self._timeout responses = self._read(1) while (not responses) and (time.time() < limit): responses = self._read(1) if not responses: raise ConnectionTimeoutException( 'Read identify response timed out (%ss)' % self._timeout) self.identified(responses[0]) return True except: logger.exception('Failed to connect') if self._socket: self._socket.close() self._reconnnection_counter.failed() self._reset() return False
[ "def", "connect", "(", "self", ",", "force", "=", "False", ")", ":", "# Don't re-establish existing connections", "if", "not", "force", "and", "self", ".", "alive", "(", ")", ":", "return", "True", "self", ".", "_reset", "(", ")", "# Otherwise, try to connect", "with", "self", ".", "_socket_lock", ":", "try", ":", "logger", ".", "info", "(", "'Creating socket...'", ")", "self", ".", "_socket", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "self", ".", "_socket", ".", "settimeout", "(", "self", ".", "_timeout", ")", "logger", ".", "info", "(", "'Connecting to %s, %s'", ",", "self", ".", "host", ",", "self", ".", "port", ")", "self", ".", "_socket", ".", "connect", "(", "(", "self", ".", "host", ",", "self", ".", "port", ")", ")", "# Set our socket's blocking state to whatever ours is", "self", ".", "_socket", ".", "setblocking", "(", "self", ".", "_blocking", ")", "# Safely write our magic", "self", ".", "_pending", ".", "append", "(", "constants", ".", "MAGIC_V2", ")", "while", "self", ".", "pending", "(", ")", ":", "self", ".", "flush", "(", ")", "# And send our identify command", "self", ".", "identify", "(", "self", ".", "_identify_options", ")", "while", "self", ".", "pending", "(", ")", ":", "self", ".", "flush", "(", ")", "self", ".", "_reconnnection_counter", ".", "success", "(", ")", "# Wait until we've gotten a response to IDENTIFY, try to read", "# one. Also, only spend up to the provided timeout waiting to", "# establish the connection.", "limit", "=", "time", ".", "time", "(", ")", "+", "self", ".", "_timeout", "responses", "=", "self", ".", "_read", "(", "1", ")", "while", "(", "not", "responses", ")", "and", "(", "time", ".", "time", "(", ")", "<", "limit", ")", ":", "responses", "=", "self", ".", "_read", "(", "1", ")", "if", "not", "responses", ":", "raise", "ConnectionTimeoutException", "(", "'Read identify response timed out (%ss)'", "%", "self", ".", "_timeout", ")", "self", ".", "identified", "(", "responses", "[", "0", "]", ")", "return", "True", "except", ":", "logger", ".", "exception", "(", "'Failed to connect'", ")", "if", "self", ".", "_socket", ":", "self", ".", "_socket", ".", "close", "(", ")", "self", ".", "_reconnnection_counter", ".", "failed", "(", ")", "self", ".", "_reset", "(", ")", "return", "False" ]
Establish a connection
[ "Establish", "a", "connection" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L106-L151
train
dlecocq/nsq-py
nsq/connection.py
Connection.close
def close(self): '''Close our connection''' # Flush any unsent message try: while self.pending(): self.flush() except socket.error: pass with self._socket_lock: try: if self._socket: self._socket.close() finally: self._reset()
python
def close(self): '''Close our connection''' # Flush any unsent message try: while self.pending(): self.flush() except socket.error: pass with self._socket_lock: try: if self._socket: self._socket.close() finally: self._reset()
[ "def", "close", "(", "self", ")", ":", "# Flush any unsent message", "try", ":", "while", "self", ".", "pending", "(", ")", ":", "self", ".", "flush", "(", ")", "except", "socket", ".", "error", ":", "pass", "with", "self", ".", "_socket_lock", ":", "try", ":", "if", "self", ".", "_socket", ":", "self", ".", "_socket", ".", "close", "(", ")", "finally", ":", "self", ".", "_reset", "(", ")" ]
Close our connection
[ "Close", "our", "connection" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L153-L166
train
dlecocq/nsq-py
nsq/connection.py
Connection.socket
def socket(self, blocking=True): '''Blockingly yield the socket''' # If the socket is available, then yield it. Otherwise, yield nothing if self._socket_lock.acquire(blocking): try: yield self._socket finally: self._socket_lock.release()
python
def socket(self, blocking=True): '''Blockingly yield the socket''' # If the socket is available, then yield it. Otherwise, yield nothing if self._socket_lock.acquire(blocking): try: yield self._socket finally: self._socket_lock.release()
[ "def", "socket", "(", "self", ",", "blocking", "=", "True", ")", ":", "# If the socket is available, then yield it. Otherwise, yield nothing", "if", "self", ".", "_socket_lock", ".", "acquire", "(", "blocking", ")", ":", "try", ":", "yield", "self", ".", "_socket", "finally", ":", "self", ".", "_socket_lock", ".", "release", "(", ")" ]
Blockingly yield the socket
[ "Blockingly", "yield", "the", "socket" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L168-L175
train
dlecocq/nsq-py
nsq/connection.py
Connection.identified
def identified(self, res): '''Handle a response to our 'identify' command. Returns response''' # If they support it, they should give us a JSON blob which we should # inspect. try: res.data = json.loads(res.data) self._identify_response = res.data logger.info('Got identify response: %s', res.data) except: logger.warn('Server does not support feature negotiation') self._identify_response = {} # Save our max ready count unless it's not provided self.max_rdy_count = self._identify_response.get( 'max_rdy_count', self.max_rdy_count) if self._identify_options.get('tls_v1', False): if not self._identify_response.get('tls_v1', False): raise UnsupportedException( 'NSQd instance does not support TLS') else: self._socket = TLSSocket.wrap_socket(self._socket) # Now is the appropriate time to send auth if self._identify_response.get('auth_required', False): if not self._auth_secret: raise UnsupportedException( 'Auth required but not provided') else: self.auth(self._auth_secret) # If we're not talking over TLS, warn the user if not self._identify_response.get('tls_v1', False): logger.warn('Using AUTH without TLS') elif self._auth_secret: logger.warn('Authentication secret provided but not required') return res
python
def identified(self, res): '''Handle a response to our 'identify' command. Returns response''' # If they support it, they should give us a JSON blob which we should # inspect. try: res.data = json.loads(res.data) self._identify_response = res.data logger.info('Got identify response: %s', res.data) except: logger.warn('Server does not support feature negotiation') self._identify_response = {} # Save our max ready count unless it's not provided self.max_rdy_count = self._identify_response.get( 'max_rdy_count', self.max_rdy_count) if self._identify_options.get('tls_v1', False): if not self._identify_response.get('tls_v1', False): raise UnsupportedException( 'NSQd instance does not support TLS') else: self._socket = TLSSocket.wrap_socket(self._socket) # Now is the appropriate time to send auth if self._identify_response.get('auth_required', False): if not self._auth_secret: raise UnsupportedException( 'Auth required but not provided') else: self.auth(self._auth_secret) # If we're not talking over TLS, warn the user if not self._identify_response.get('tls_v1', False): logger.warn('Using AUTH without TLS') elif self._auth_secret: logger.warn('Authentication secret provided but not required') return res
[ "def", "identified", "(", "self", ",", "res", ")", ":", "# If they support it, they should give us a JSON blob which we should", "# inspect.", "try", ":", "res", ".", "data", "=", "json", ".", "loads", "(", "res", ".", "data", ")", "self", ".", "_identify_response", "=", "res", ".", "data", "logger", ".", "info", "(", "'Got identify response: %s'", ",", "res", ".", "data", ")", "except", ":", "logger", ".", "warn", "(", "'Server does not support feature negotiation'", ")", "self", ".", "_identify_response", "=", "{", "}", "# Save our max ready count unless it's not provided", "self", ".", "max_rdy_count", "=", "self", ".", "_identify_response", ".", "get", "(", "'max_rdy_count'", ",", "self", ".", "max_rdy_count", ")", "if", "self", ".", "_identify_options", ".", "get", "(", "'tls_v1'", ",", "False", ")", ":", "if", "not", "self", ".", "_identify_response", ".", "get", "(", "'tls_v1'", ",", "False", ")", ":", "raise", "UnsupportedException", "(", "'NSQd instance does not support TLS'", ")", "else", ":", "self", ".", "_socket", "=", "TLSSocket", ".", "wrap_socket", "(", "self", ".", "_socket", ")", "# Now is the appropriate time to send auth", "if", "self", ".", "_identify_response", ".", "get", "(", "'auth_required'", ",", "False", ")", ":", "if", "not", "self", ".", "_auth_secret", ":", "raise", "UnsupportedException", "(", "'Auth required but not provided'", ")", "else", ":", "self", ".", "auth", "(", "self", ".", "_auth_secret", ")", "# If we're not talking over TLS, warn the user", "if", "not", "self", ".", "_identify_response", ".", "get", "(", "'tls_v1'", ",", "False", ")", ":", "logger", ".", "warn", "(", "'Using AUTH without TLS'", ")", "elif", "self", ".", "_auth_secret", ":", "logger", ".", "warn", "(", "'Authentication secret provided but not required'", ")", "return", "res" ]
Handle a response to our 'identify' command. Returns response
[ "Handle", "a", "response", "to", "our", "identify", "command", ".", "Returns", "response" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L177-L211
train
dlecocq/nsq-py
nsq/connection.py
Connection.setblocking
def setblocking(self, blocking): '''Set whether or not this message is blocking''' for sock in self.socket(): sock.setblocking(blocking) self._blocking = blocking
python
def setblocking(self, blocking): '''Set whether or not this message is blocking''' for sock in self.socket(): sock.setblocking(blocking) self._blocking = blocking
[ "def", "setblocking", "(", "self", ",", "blocking", ")", ":", "for", "sock", "in", "self", ".", "socket", "(", ")", ":", "sock", ".", "setblocking", "(", "blocking", ")", "self", ".", "_blocking", "=", "blocking" ]
Set whether or not this message is blocking
[ "Set", "whether", "or", "not", "this", "message", "is", "blocking" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L217-L221
train
dlecocq/nsq-py
nsq/connection.py
Connection.flush
def flush(self): '''Flush some of the waiting messages, returns count written''' # When profiling, we found that while there was some efficiency to be # gained elsewhere, the big performance hit is sending lots of small # messages at a time. In particular, consumers send many 'FIN' messages # which are very small indeed and the cost of dispatching so many system # calls is very high. Instead, we prefer to glom together many messages # into a single string to send at once. total = 0 for sock in self.socket(blocking=False): # If there's nothing left in the out buffer, take whatever's in the # pending queue. # # When using SSL, if the socket throws 'SSL_WANT_WRITE', then the # subsequent send requests have to send the same buffer. pending = self._pending data = self._out_buffer or ''.join( pending.popleft() for _ in xrange(len(pending))) try: # Try to send as much of the first message as possible total = sock.send(data) except socket.error as exc: # Catch (errno, message)-type socket.errors if exc.args[0] not in self.WOULD_BLOCK_ERRS: raise self._out_buffer = data else: self._out_buffer = None finally: if total < len(data): # Save the rest of the message that could not be sent self._pending.appendleft(data[total:]) return total
python
def flush(self): '''Flush some of the waiting messages, returns count written''' # When profiling, we found that while there was some efficiency to be # gained elsewhere, the big performance hit is sending lots of small # messages at a time. In particular, consumers send many 'FIN' messages # which are very small indeed and the cost of dispatching so many system # calls is very high. Instead, we prefer to glom together many messages # into a single string to send at once. total = 0 for sock in self.socket(blocking=False): # If there's nothing left in the out buffer, take whatever's in the # pending queue. # # When using SSL, if the socket throws 'SSL_WANT_WRITE', then the # subsequent send requests have to send the same buffer. pending = self._pending data = self._out_buffer or ''.join( pending.popleft() for _ in xrange(len(pending))) try: # Try to send as much of the first message as possible total = sock.send(data) except socket.error as exc: # Catch (errno, message)-type socket.errors if exc.args[0] not in self.WOULD_BLOCK_ERRS: raise self._out_buffer = data else: self._out_buffer = None finally: if total < len(data): # Save the rest of the message that could not be sent self._pending.appendleft(data[total:]) return total
[ "def", "flush", "(", "self", ")", ":", "# When profiling, we found that while there was some efficiency to be", "# gained elsewhere, the big performance hit is sending lots of small", "# messages at a time. In particular, consumers send many 'FIN' messages", "# which are very small indeed and the cost of dispatching so many system", "# calls is very high. Instead, we prefer to glom together many messages", "# into a single string to send at once.", "total", "=", "0", "for", "sock", "in", "self", ".", "socket", "(", "blocking", "=", "False", ")", ":", "# If there's nothing left in the out buffer, take whatever's in the", "# pending queue.", "#", "# When using SSL, if the socket throws 'SSL_WANT_WRITE', then the", "# subsequent send requests have to send the same buffer.", "pending", "=", "self", ".", "_pending", "data", "=", "self", ".", "_out_buffer", "or", "''", ".", "join", "(", "pending", ".", "popleft", "(", ")", "for", "_", "in", "xrange", "(", "len", "(", "pending", ")", ")", ")", "try", ":", "# Try to send as much of the first message as possible", "total", "=", "sock", ".", "send", "(", "data", ")", "except", "socket", ".", "error", "as", "exc", ":", "# Catch (errno, message)-type socket.errors", "if", "exc", ".", "args", "[", "0", "]", "not", "in", "self", ".", "WOULD_BLOCK_ERRS", ":", "raise", "self", ".", "_out_buffer", "=", "data", "else", ":", "self", ".", "_out_buffer", "=", "None", "finally", ":", "if", "total", "<", "len", "(", "data", ")", ":", "# Save the rest of the message that could not be sent", "self", ".", "_pending", ".", "appendleft", "(", "data", "[", "total", ":", "]", ")", "return", "total" ]
Flush some of the waiting messages, returns count written
[ "Flush", "some", "of", "the", "waiting", "messages", "returns", "count", "written" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L234-L266
train
dlecocq/nsq-py
nsq/connection.py
Connection.send
def send(self, command, message=None): '''Send a command over the socket with length endcoded''' if message: joined = command + constants.NL + util.pack(message) else: joined = command + constants.NL if self._blocking: for sock in self.socket(): sock.sendall(joined) else: self._pending.append(joined)
python
def send(self, command, message=None): '''Send a command over the socket with length endcoded''' if message: joined = command + constants.NL + util.pack(message) else: joined = command + constants.NL if self._blocking: for sock in self.socket(): sock.sendall(joined) else: self._pending.append(joined)
[ "def", "send", "(", "self", ",", "command", ",", "message", "=", "None", ")", ":", "if", "message", ":", "joined", "=", "command", "+", "constants", ".", "NL", "+", "util", ".", "pack", "(", "message", ")", "else", ":", "joined", "=", "command", "+", "constants", ".", "NL", "if", "self", ".", "_blocking", ":", "for", "sock", "in", "self", ".", "socket", "(", ")", ":", "sock", ".", "sendall", "(", "joined", ")", "else", ":", "self", ".", "_pending", ".", "append", "(", "joined", ")" ]
Send a command over the socket with length endcoded
[ "Send", "a", "command", "over", "the", "socket", "with", "length", "endcoded" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L268-L278
train
dlecocq/nsq-py
nsq/connection.py
Connection.identify
def identify(self, data): '''Send an identification message''' return self.send(constants.IDENTIFY, json.dumps(data))
python
def identify(self, data): '''Send an identification message''' return self.send(constants.IDENTIFY, json.dumps(data))
[ "def", "identify", "(", "self", ",", "data", ")", ":", "return", "self", ".", "send", "(", "constants", ".", "IDENTIFY", ",", "json", ".", "dumps", "(", "data", ")", ")" ]
Send an identification message
[ "Send", "an", "identification", "message" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L280-L282
train
dlecocq/nsq-py
nsq/connection.py
Connection.sub
def sub(self, topic, channel): '''Subscribe to a topic/channel''' return self.send(' '.join((constants.SUB, topic, channel)))
python
def sub(self, topic, channel): '''Subscribe to a topic/channel''' return self.send(' '.join((constants.SUB, topic, channel)))
[ "def", "sub", "(", "self", ",", "topic", ",", "channel", ")", ":", "return", "self", ".", "send", "(", "' '", ".", "join", "(", "(", "constants", ".", "SUB", ",", "topic", ",", "channel", ")", ")", ")" ]
Subscribe to a topic/channel
[ "Subscribe", "to", "a", "topic", "/", "channel" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L288-L290
train
dlecocq/nsq-py
nsq/connection.py
Connection.pub
def pub(self, topic, message): '''Publish to a topic''' return self.send(' '.join((constants.PUB, topic)), message)
python
def pub(self, topic, message): '''Publish to a topic''' return self.send(' '.join((constants.PUB, topic)), message)
[ "def", "pub", "(", "self", ",", "topic", ",", "message", ")", ":", "return", "self", ".", "send", "(", "' '", ".", "join", "(", "(", "constants", ".", "PUB", ",", "topic", ")", ")", ",", "message", ")" ]
Publish to a topic
[ "Publish", "to", "a", "topic" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L292-L294
train
dlecocq/nsq-py
nsq/connection.py
Connection.mpub
def mpub(self, topic, *messages): '''Publish multiple messages to a topic''' return self.send(constants.MPUB + ' ' + topic, messages)
python
def mpub(self, topic, *messages): '''Publish multiple messages to a topic''' return self.send(constants.MPUB + ' ' + topic, messages)
[ "def", "mpub", "(", "self", ",", "topic", ",", "*", "messages", ")", ":", "return", "self", ".", "send", "(", "constants", ".", "MPUB", "+", "' '", "+", "topic", ",", "messages", ")" ]
Publish multiple messages to a topic
[ "Publish", "multiple", "messages", "to", "a", "topic" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L296-L298
train
dlecocq/nsq-py
nsq/connection.py
Connection.rdy
def rdy(self, count): '''Indicate that you're ready to receive''' self.ready = count self.last_ready_sent = count return self.send(constants.RDY + ' ' + str(count))
python
def rdy(self, count): '''Indicate that you're ready to receive''' self.ready = count self.last_ready_sent = count return self.send(constants.RDY + ' ' + str(count))
[ "def", "rdy", "(", "self", ",", "count", ")", ":", "self", ".", "ready", "=", "count", "self", ".", "last_ready_sent", "=", "count", "return", "self", ".", "send", "(", "constants", ".", "RDY", "+", "' '", "+", "str", "(", "count", ")", ")" ]
Indicate that you're ready to receive
[ "Indicate", "that", "you", "re", "ready", "to", "receive" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L300-L304
train
dlecocq/nsq-py
nsq/connection.py
Connection.req
def req(self, message_id, timeout): '''Re-queue a message''' return self.send(constants.REQ + ' ' + message_id + ' ' + str(timeout))
python
def req(self, message_id, timeout): '''Re-queue a message''' return self.send(constants.REQ + ' ' + message_id + ' ' + str(timeout))
[ "def", "req", "(", "self", ",", "message_id", ",", "timeout", ")", ":", "return", "self", ".", "send", "(", "constants", ".", "REQ", "+", "' '", "+", "message_id", "+", "' '", "+", "str", "(", "timeout", ")", ")" ]
Re-queue a message
[ "Re", "-", "queue", "a", "message" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L310-L312
train
dlecocq/nsq-py
nsq/connection.py
Connection._read
def _read(self, limit=1000): '''Return all the responses read''' # It's important to know that it may return no responses or multiple # responses. It depends on how the buffering works out. First, read from # the socket for sock in self.socket(): if sock is None: # Race condition. Connection has been closed. return [] try: packet = sock.recv(4096) except socket.timeout: # If the socket times out, return nothing return [] except socket.error as exc: # Catch (errno, message)-type socket.errors if exc.args[0] in self.WOULD_BLOCK_ERRS: return [] else: raise # Append our newly-read data to our buffer self._buffer += packet responses = [] total = 0 buf = self._buffer remaining = len(buf) while limit and (remaining >= 4): size = struct.unpack('>l', buf[total:(total + 4)])[0] # Now check to see if there's enough left in the buffer to read # the message. if (remaining - 4) >= size: responses.append(Response.from_raw( self, buf[(total + 4):(total + size + 4)])) total += (size + 4) remaining -= (size + 4) limit -= 1 else: break self._buffer = self._buffer[total:] return responses
python
def _read(self, limit=1000): '''Return all the responses read''' # It's important to know that it may return no responses or multiple # responses. It depends on how the buffering works out. First, read from # the socket for sock in self.socket(): if sock is None: # Race condition. Connection has been closed. return [] try: packet = sock.recv(4096) except socket.timeout: # If the socket times out, return nothing return [] except socket.error as exc: # Catch (errno, message)-type socket.errors if exc.args[0] in self.WOULD_BLOCK_ERRS: return [] else: raise # Append our newly-read data to our buffer self._buffer += packet responses = [] total = 0 buf = self._buffer remaining = len(buf) while limit and (remaining >= 4): size = struct.unpack('>l', buf[total:(total + 4)])[0] # Now check to see if there's enough left in the buffer to read # the message. if (remaining - 4) >= size: responses.append(Response.from_raw( self, buf[(total + 4):(total + size + 4)])) total += (size + 4) remaining -= (size + 4) limit -= 1 else: break self._buffer = self._buffer[total:] return responses
[ "def", "_read", "(", "self", ",", "limit", "=", "1000", ")", ":", "# It's important to know that it may return no responses or multiple", "# responses. It depends on how the buffering works out. First, read from", "# the socket", "for", "sock", "in", "self", ".", "socket", "(", ")", ":", "if", "sock", "is", "None", ":", "# Race condition. Connection has been closed.", "return", "[", "]", "try", ":", "packet", "=", "sock", ".", "recv", "(", "4096", ")", "except", "socket", ".", "timeout", ":", "# If the socket times out, return nothing", "return", "[", "]", "except", "socket", ".", "error", "as", "exc", ":", "# Catch (errno, message)-type socket.errors", "if", "exc", ".", "args", "[", "0", "]", "in", "self", ".", "WOULD_BLOCK_ERRS", ":", "return", "[", "]", "else", ":", "raise", "# Append our newly-read data to our buffer", "self", ".", "_buffer", "+=", "packet", "responses", "=", "[", "]", "total", "=", "0", "buf", "=", "self", ".", "_buffer", "remaining", "=", "len", "(", "buf", ")", "while", "limit", "and", "(", "remaining", ">=", "4", ")", ":", "size", "=", "struct", ".", "unpack", "(", "'>l'", ",", "buf", "[", "total", ":", "(", "total", "+", "4", ")", "]", ")", "[", "0", "]", "# Now check to see if there's enough left in the buffer to read", "# the message.", "if", "(", "remaining", "-", "4", ")", ">=", "size", ":", "responses", ".", "append", "(", "Response", ".", "from_raw", "(", "self", ",", "buf", "[", "(", "total", "+", "4", ")", ":", "(", "total", "+", "size", "+", "4", ")", "]", ")", ")", "total", "+=", "(", "size", "+", "4", ")", "remaining", "-=", "(", "size", "+", "4", ")", "limit", "-=", "1", "else", ":", "break", "self", ".", "_buffer", "=", "self", ".", "_buffer", "[", "total", ":", "]", "return", "responses" ]
Return all the responses read
[ "Return", "all", "the", "responses", "read" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L330-L371
train
dlecocq/nsq-py
nsq/connection.py
Connection.read
def read(self): '''Responses from an established socket''' responses = self._read() # Determine the number of messages in here and decrement our ready # count appropriately self.ready -= sum( map(int, (r.frame_type == Message.FRAME_TYPE for r in responses))) return responses
python
def read(self): '''Responses from an established socket''' responses = self._read() # Determine the number of messages in here and decrement our ready # count appropriately self.ready -= sum( map(int, (r.frame_type == Message.FRAME_TYPE for r in responses))) return responses
[ "def", "read", "(", "self", ")", ":", "responses", "=", "self", ".", "_read", "(", ")", "# Determine the number of messages in here and decrement our ready", "# count appropriately", "self", ".", "ready", "-=", "sum", "(", "map", "(", "int", ",", "(", "r", ".", "frame_type", "==", "Message", ".", "FRAME_TYPE", "for", "r", "in", "responses", ")", ")", ")", "return", "responses" ]
Responses from an established socket
[ "Responses", "from", "an", "established", "socket" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/connection.py#L373-L380
train
dlecocq/nsq-py
nsq/client.py
Client.discover
def discover(self, topic): '''Run the discovery mechanism''' logger.info('Discovering on topic %s', topic) producers = [] for lookupd in self._lookupd: logger.info('Discovering on %s', lookupd) try: # Find all the current producers on this instance for producer in lookupd.lookup(topic)['producers']: logger.info('Found producer %s on %s', producer, lookupd) producers.append( (producer['broadcast_address'], producer['tcp_port'])) except ClientException: logger.exception('Failed to query %s', lookupd) new = [] for host, port in producers: conn = self._connections.get((host, port)) if not conn: logger.info('Discovered %s:%s', host, port) new.append(self.connect(host, port)) elif not conn.alive(): logger.info('Reconnecting to %s:%s', host, port) if conn.connect(): conn.setblocking(0) self.reconnected(conn) else: logger.debug('Connection to %s:%s still alive', host, port) # And return all the new connections return [conn for conn in new if conn]
python
def discover(self, topic): '''Run the discovery mechanism''' logger.info('Discovering on topic %s', topic) producers = [] for lookupd in self._lookupd: logger.info('Discovering on %s', lookupd) try: # Find all the current producers on this instance for producer in lookupd.lookup(topic)['producers']: logger.info('Found producer %s on %s', producer, lookupd) producers.append( (producer['broadcast_address'], producer['tcp_port'])) except ClientException: logger.exception('Failed to query %s', lookupd) new = [] for host, port in producers: conn = self._connections.get((host, port)) if not conn: logger.info('Discovered %s:%s', host, port) new.append(self.connect(host, port)) elif not conn.alive(): logger.info('Reconnecting to %s:%s', host, port) if conn.connect(): conn.setblocking(0) self.reconnected(conn) else: logger.debug('Connection to %s:%s still alive', host, port) # And return all the new connections return [conn for conn in new if conn]
[ "def", "discover", "(", "self", ",", "topic", ")", ":", "logger", ".", "info", "(", "'Discovering on topic %s'", ",", "topic", ")", "producers", "=", "[", "]", "for", "lookupd", "in", "self", ".", "_lookupd", ":", "logger", ".", "info", "(", "'Discovering on %s'", ",", "lookupd", ")", "try", ":", "# Find all the current producers on this instance", "for", "producer", "in", "lookupd", ".", "lookup", "(", "topic", ")", "[", "'producers'", "]", ":", "logger", ".", "info", "(", "'Found producer %s on %s'", ",", "producer", ",", "lookupd", ")", "producers", ".", "append", "(", "(", "producer", "[", "'broadcast_address'", "]", ",", "producer", "[", "'tcp_port'", "]", ")", ")", "except", "ClientException", ":", "logger", ".", "exception", "(", "'Failed to query %s'", ",", "lookupd", ")", "new", "=", "[", "]", "for", "host", ",", "port", "in", "producers", ":", "conn", "=", "self", ".", "_connections", ".", "get", "(", "(", "host", ",", "port", ")", ")", "if", "not", "conn", ":", "logger", ".", "info", "(", "'Discovered %s:%s'", ",", "host", ",", "port", ")", "new", ".", "append", "(", "self", ".", "connect", "(", "host", ",", "port", ")", ")", "elif", "not", "conn", ".", "alive", "(", ")", ":", "logger", ".", "info", "(", "'Reconnecting to %s:%s'", ",", "host", ",", "port", ")", "if", "conn", ".", "connect", "(", ")", ":", "conn", ".", "setblocking", "(", "0", ")", "self", ".", "reconnected", "(", "conn", ")", "else", ":", "logger", ".", "debug", "(", "'Connection to %s:%s still alive'", ",", "host", ",", "port", ")", "# And return all the new connections", "return", "[", "conn", "for", "conn", "in", "new", "if", "conn", "]" ]
Run the discovery mechanism
[ "Run", "the", "discovery", "mechanism" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L59-L89
train
dlecocq/nsq-py
nsq/client.py
Client.check_connections
def check_connections(self): '''Connect to all the appropriate instances''' logger.info('Checking connections') if self._lookupd: self.discover(self._topic) # Make sure we're connected to all the prescribed hosts for hostspec in self._nsqd_tcp_addresses: logger.debug('Checking nsqd instance %s', hostspec) host, port = hostspec.split(':') port = int(port) conn = self._connections.get((host, port), None) # If there is no connection to it, we have to try to connect if not conn: logger.info('Connecting to %s:%s', host, port) self.connect(host, port) elif not conn.alive(): # If we've connected to it before, but it's no longer alive, # we'll have to make a decision about when to try to reconnect # to it, if we need to reconnect to it at all if conn.ready_to_reconnect(): logger.info('Reconnecting to %s:%s', host, port) if conn.connect(): conn.setblocking(0) self.reconnected(conn) else: logger.debug('Checking freshness') now = time.time() time_check = math.ceil(now - self.last_recv_timestamp) if time_check >= ((self.heartbeat_interval * 2) / 1000.0): if conn.ready_to_reconnect(): logger.info('Reconnecting to %s:%s', host, port) if conn.connect(): conn.setblocking(0) self.reconnected(conn)
python
def check_connections(self): '''Connect to all the appropriate instances''' logger.info('Checking connections') if self._lookupd: self.discover(self._topic) # Make sure we're connected to all the prescribed hosts for hostspec in self._nsqd_tcp_addresses: logger.debug('Checking nsqd instance %s', hostspec) host, port = hostspec.split(':') port = int(port) conn = self._connections.get((host, port), None) # If there is no connection to it, we have to try to connect if not conn: logger.info('Connecting to %s:%s', host, port) self.connect(host, port) elif not conn.alive(): # If we've connected to it before, but it's no longer alive, # we'll have to make a decision about when to try to reconnect # to it, if we need to reconnect to it at all if conn.ready_to_reconnect(): logger.info('Reconnecting to %s:%s', host, port) if conn.connect(): conn.setblocking(0) self.reconnected(conn) else: logger.debug('Checking freshness') now = time.time() time_check = math.ceil(now - self.last_recv_timestamp) if time_check >= ((self.heartbeat_interval * 2) / 1000.0): if conn.ready_to_reconnect(): logger.info('Reconnecting to %s:%s', host, port) if conn.connect(): conn.setblocking(0) self.reconnected(conn)
[ "def", "check_connections", "(", "self", ")", ":", "logger", ".", "info", "(", "'Checking connections'", ")", "if", "self", ".", "_lookupd", ":", "self", ".", "discover", "(", "self", ".", "_topic", ")", "# Make sure we're connected to all the prescribed hosts", "for", "hostspec", "in", "self", ".", "_nsqd_tcp_addresses", ":", "logger", ".", "debug", "(", "'Checking nsqd instance %s'", ",", "hostspec", ")", "host", ",", "port", "=", "hostspec", ".", "split", "(", "':'", ")", "port", "=", "int", "(", "port", ")", "conn", "=", "self", ".", "_connections", ".", "get", "(", "(", "host", ",", "port", ")", ",", "None", ")", "# If there is no connection to it, we have to try to connect", "if", "not", "conn", ":", "logger", ".", "info", "(", "'Connecting to %s:%s'", ",", "host", ",", "port", ")", "self", ".", "connect", "(", "host", ",", "port", ")", "elif", "not", "conn", ".", "alive", "(", ")", ":", "# If we've connected to it before, but it's no longer alive,", "# we'll have to make a decision about when to try to reconnect", "# to it, if we need to reconnect to it at all", "if", "conn", ".", "ready_to_reconnect", "(", ")", ":", "logger", ".", "info", "(", "'Reconnecting to %s:%s'", ",", "host", ",", "port", ")", "if", "conn", ".", "connect", "(", ")", ":", "conn", ".", "setblocking", "(", "0", ")", "self", ".", "reconnected", "(", "conn", ")", "else", ":", "logger", ".", "debug", "(", "'Checking freshness'", ")", "now", "=", "time", ".", "time", "(", ")", "time_check", "=", "math", ".", "ceil", "(", "now", "-", "self", ".", "last_recv_timestamp", ")", "if", "time_check", ">=", "(", "(", "self", ".", "heartbeat_interval", "*", "2", ")", "/", "1000.0", ")", ":", "if", "conn", ".", "ready_to_reconnect", "(", ")", ":", "logger", ".", "info", "(", "'Reconnecting to %s:%s'", ",", "host", ",", "port", ")", "if", "conn", ".", "connect", "(", ")", ":", "conn", ".", "setblocking", "(", "0", ")", "self", ".", "reconnected", "(", "conn", ")" ]
Connect to all the appropriate instances
[ "Connect", "to", "all", "the", "appropriate", "instances" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L91-L125
train
dlecocq/nsq-py
nsq/client.py
Client.connection_checker
def connection_checker(self): '''Run periodic reconnection checks''' thread = ConnectionChecker(self) logger.info('Starting connection-checker thread') thread.start() try: yield thread finally: logger.info('Stopping connection-checker') thread.stop() logger.info('Joining connection-checker') thread.join()
python
def connection_checker(self): '''Run periodic reconnection checks''' thread = ConnectionChecker(self) logger.info('Starting connection-checker thread') thread.start() try: yield thread finally: logger.info('Stopping connection-checker') thread.stop() logger.info('Joining connection-checker') thread.join()
[ "def", "connection_checker", "(", "self", ")", ":", "thread", "=", "ConnectionChecker", "(", "self", ")", "logger", ".", "info", "(", "'Starting connection-checker thread'", ")", "thread", ".", "start", "(", ")", "try", ":", "yield", "thread", "finally", ":", "logger", ".", "info", "(", "'Stopping connection-checker'", ")", "thread", ".", "stop", "(", ")", "logger", ".", "info", "(", "'Joining connection-checker'", ")", "thread", ".", "join", "(", ")" ]
Run periodic reconnection checks
[ "Run", "periodic", "reconnection", "checks" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L128-L139
train
dlecocq/nsq-py
nsq/client.py
Client.connect
def connect(self, host, port): '''Connect to the provided host, port''' conn = connection.Connection(host, port, reconnection_backoff=self._reconnection_backoff, auth_secret=self._auth_secret, timeout=self._connect_timeout, **self._identify_options) if conn.alive(): conn.setblocking(0) self.add(conn) return conn
python
def connect(self, host, port): '''Connect to the provided host, port''' conn = connection.Connection(host, port, reconnection_backoff=self._reconnection_backoff, auth_secret=self._auth_secret, timeout=self._connect_timeout, **self._identify_options) if conn.alive(): conn.setblocking(0) self.add(conn) return conn
[ "def", "connect", "(", "self", ",", "host", ",", "port", ")", ":", "conn", "=", "connection", ".", "Connection", "(", "host", ",", "port", ",", "reconnection_backoff", "=", "self", ".", "_reconnection_backoff", ",", "auth_secret", "=", "self", ".", "_auth_secret", ",", "timeout", "=", "self", ".", "_connect_timeout", ",", "*", "*", "self", ".", "_identify_options", ")", "if", "conn", ".", "alive", "(", ")", ":", "conn", ".", "setblocking", "(", "0", ")", "self", ".", "add", "(", "conn", ")", "return", "conn" ]
Connect to the provided host, port
[ "Connect", "to", "the", "provided", "host", "port" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L141-L151
train
dlecocq/nsq-py
nsq/client.py
Client.add
def add(self, connection): '''Add a connection''' key = (connection.host, connection.port) with self._lock: if key not in self._connections: self._connections[key] = connection self.added(connection) return connection else: return None
python
def add(self, connection): '''Add a connection''' key = (connection.host, connection.port) with self._lock: if key not in self._connections: self._connections[key] = connection self.added(connection) return connection else: return None
[ "def", "add", "(", "self", ",", "connection", ")", ":", "key", "=", "(", "connection", ".", "host", ",", "connection", ".", "port", ")", "with", "self", ".", "_lock", ":", "if", "key", "not", "in", "self", ".", "_connections", ":", "self", ".", "_connections", "[", "key", "]", "=", "connection", "self", ".", "added", "(", "connection", ")", "return", "connection", "else", ":", "return", "None" ]
Add a connection
[ "Add", "a", "connection" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L164-L173
train
dlecocq/nsq-py
nsq/client.py
Client.remove
def remove(self, connection): '''Remove a connection''' key = (connection.host, connection.port) with self._lock: found = self._connections.pop(key, None) try: self.close_connection(found) except Exception as exc: logger.warn('Failed to close %s: %s', connection, exc) return found
python
def remove(self, connection): '''Remove a connection''' key = (connection.host, connection.port) with self._lock: found = self._connections.pop(key, None) try: self.close_connection(found) except Exception as exc: logger.warn('Failed to close %s: %s', connection, exc) return found
[ "def", "remove", "(", "self", ",", "connection", ")", ":", "key", "=", "(", "connection", ".", "host", ",", "connection", ".", "port", ")", "with", "self", ".", "_lock", ":", "found", "=", "self", ".", "_connections", ".", "pop", "(", "key", ",", "None", ")", "try", ":", "self", ".", "close_connection", "(", "found", ")", "except", "Exception", "as", "exc", ":", "logger", ".", "warn", "(", "'Failed to close %s: %s'", ",", "connection", ",", "exc", ")", "return", "found" ]
Remove a connection
[ "Remove", "a", "connection" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L175-L184
train
dlecocq/nsq-py
nsq/client.py
Client.read
def read(self): '''Read from any of the connections that need it''' # We'll check all living connections connections = [c for c in self.connections() if c.alive()] if not connections: # If there are no connections, obviously we return no messages, but # we should wait the duration of the timeout time.sleep(self._timeout) return [] # Not all connections need to be written to, so we'll only concern # ourselves with those that require writes writes = [c for c in connections if c.pending()] try: readable, writable, exceptable = select.select( connections, writes, connections, self._timeout) except exceptions.ConnectionClosedException: logger.exception('Tried selecting on closed client') return [] except select.error: logger.exception('Error running select') return [] # If we returned because the timeout interval passed, log it and return if not (readable or writable or exceptable): logger.debug('Timed out...') return [] responses = [] # For each readable socket, we'll try to read some responses for conn in readable: try: for res in conn.read(): # We'll capture heartbeats and respond to them automatically if (isinstance(res, Response) and res.data == HEARTBEAT): logger.info('Sending heartbeat to %s', conn) conn.nop() logger.debug('Setting last_recv_timestamp') self.last_recv_timestamp = time.time() continue elif isinstance(res, Error): nonfatal = ( exceptions.FinFailedException, exceptions.ReqFailedException, exceptions.TouchFailedException ) if not isinstance(res.exception(), nonfatal): # If it's not any of the non-fatal exceptions, then # we have to close this connection logger.error( 'Closing %s: %s', conn, res.exception()) self.close_connection(conn) responses.append(res) logger.debug('Setting last_recv_timestamp') self.last_recv_timestamp = time.time() except exceptions.NSQException: logger.exception('Failed to read from %s', conn) self.close_connection(conn) except socket.error: logger.exception('Failed to read from %s', conn) self.close_connection(conn) # For each writable socket, flush some data out for conn in writable: try: conn.flush() except socket.error: logger.exception('Failed to flush %s', conn) self.close_connection(conn) # For each connection with an exception, try to close it and remove it # from our connections for conn in exceptable: self.close_connection(conn) return responses
python
def read(self): '''Read from any of the connections that need it''' # We'll check all living connections connections = [c for c in self.connections() if c.alive()] if not connections: # If there are no connections, obviously we return no messages, but # we should wait the duration of the timeout time.sleep(self._timeout) return [] # Not all connections need to be written to, so we'll only concern # ourselves with those that require writes writes = [c for c in connections if c.pending()] try: readable, writable, exceptable = select.select( connections, writes, connections, self._timeout) except exceptions.ConnectionClosedException: logger.exception('Tried selecting on closed client') return [] except select.error: logger.exception('Error running select') return [] # If we returned because the timeout interval passed, log it and return if not (readable or writable or exceptable): logger.debug('Timed out...') return [] responses = [] # For each readable socket, we'll try to read some responses for conn in readable: try: for res in conn.read(): # We'll capture heartbeats and respond to them automatically if (isinstance(res, Response) and res.data == HEARTBEAT): logger.info('Sending heartbeat to %s', conn) conn.nop() logger.debug('Setting last_recv_timestamp') self.last_recv_timestamp = time.time() continue elif isinstance(res, Error): nonfatal = ( exceptions.FinFailedException, exceptions.ReqFailedException, exceptions.TouchFailedException ) if not isinstance(res.exception(), nonfatal): # If it's not any of the non-fatal exceptions, then # we have to close this connection logger.error( 'Closing %s: %s', conn, res.exception()) self.close_connection(conn) responses.append(res) logger.debug('Setting last_recv_timestamp') self.last_recv_timestamp = time.time() except exceptions.NSQException: logger.exception('Failed to read from %s', conn) self.close_connection(conn) except socket.error: logger.exception('Failed to read from %s', conn) self.close_connection(conn) # For each writable socket, flush some data out for conn in writable: try: conn.flush() except socket.error: logger.exception('Failed to flush %s', conn) self.close_connection(conn) # For each connection with an exception, try to close it and remove it # from our connections for conn in exceptable: self.close_connection(conn) return responses
[ "def", "read", "(", "self", ")", ":", "# We'll check all living connections", "connections", "=", "[", "c", "for", "c", "in", "self", ".", "connections", "(", ")", "if", "c", ".", "alive", "(", ")", "]", "if", "not", "connections", ":", "# If there are no connections, obviously we return no messages, but", "# we should wait the duration of the timeout", "time", ".", "sleep", "(", "self", ".", "_timeout", ")", "return", "[", "]", "# Not all connections need to be written to, so we'll only concern", "# ourselves with those that require writes", "writes", "=", "[", "c", "for", "c", "in", "connections", "if", "c", ".", "pending", "(", ")", "]", "try", ":", "readable", ",", "writable", ",", "exceptable", "=", "select", ".", "select", "(", "connections", ",", "writes", ",", "connections", ",", "self", ".", "_timeout", ")", "except", "exceptions", ".", "ConnectionClosedException", ":", "logger", ".", "exception", "(", "'Tried selecting on closed client'", ")", "return", "[", "]", "except", "select", ".", "error", ":", "logger", ".", "exception", "(", "'Error running select'", ")", "return", "[", "]", "# If we returned because the timeout interval passed, log it and return", "if", "not", "(", "readable", "or", "writable", "or", "exceptable", ")", ":", "logger", ".", "debug", "(", "'Timed out...'", ")", "return", "[", "]", "responses", "=", "[", "]", "# For each readable socket, we'll try to read some responses", "for", "conn", "in", "readable", ":", "try", ":", "for", "res", "in", "conn", ".", "read", "(", ")", ":", "# We'll capture heartbeats and respond to them automatically", "if", "(", "isinstance", "(", "res", ",", "Response", ")", "and", "res", ".", "data", "==", "HEARTBEAT", ")", ":", "logger", ".", "info", "(", "'Sending heartbeat to %s'", ",", "conn", ")", "conn", ".", "nop", "(", ")", "logger", ".", "debug", "(", "'Setting last_recv_timestamp'", ")", "self", ".", "last_recv_timestamp", "=", "time", ".", "time", "(", ")", "continue", "elif", "isinstance", "(", "res", ",", "Error", ")", ":", "nonfatal", "=", "(", "exceptions", ".", "FinFailedException", ",", "exceptions", ".", "ReqFailedException", ",", "exceptions", ".", "TouchFailedException", ")", "if", "not", "isinstance", "(", "res", ".", "exception", "(", ")", ",", "nonfatal", ")", ":", "# If it's not any of the non-fatal exceptions, then", "# we have to close this connection", "logger", ".", "error", "(", "'Closing %s: %s'", ",", "conn", ",", "res", ".", "exception", "(", ")", ")", "self", ".", "close_connection", "(", "conn", ")", "responses", ".", "append", "(", "res", ")", "logger", ".", "debug", "(", "'Setting last_recv_timestamp'", ")", "self", ".", "last_recv_timestamp", "=", "time", ".", "time", "(", ")", "except", "exceptions", ".", "NSQException", ":", "logger", ".", "exception", "(", "'Failed to read from %s'", ",", "conn", ")", "self", ".", "close_connection", "(", "conn", ")", "except", "socket", ".", "error", ":", "logger", ".", "exception", "(", "'Failed to read from %s'", ",", "conn", ")", "self", ".", "close_connection", "(", "conn", ")", "# For each writable socket, flush some data out", "for", "conn", "in", "writable", ":", "try", ":", "conn", ".", "flush", "(", ")", "except", "socket", ".", "error", ":", "logger", ".", "exception", "(", "'Failed to flush %s'", ",", "conn", ")", "self", ".", "close_connection", "(", "conn", ")", "# For each connection with an exception, try to close it and remove it", "# from our connections", "for", "conn", "in", "exceptable", ":", "self", ".", "close_connection", "(", "conn", ")", "return", "responses" ]
Read from any of the connections that need it
[ "Read", "from", "any", "of", "the", "connections", "that", "need", "it" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L194-L270
train
dlecocq/nsq-py
nsq/client.py
Client.random_connection
def random_connection(self): '''Pick a random living connection''' # While at the moment there's no need for this to be a context manager # per se, I would like to use that interface since I anticipate # adding some wrapping around it at some point. yield random.choice( [conn for conn in self.connections() if conn.alive()])
python
def random_connection(self): '''Pick a random living connection''' # While at the moment there's no need for this to be a context manager # per se, I would like to use that interface since I anticipate # adding some wrapping around it at some point. yield random.choice( [conn for conn in self.connections() if conn.alive()])
[ "def", "random_connection", "(", "self", ")", ":", "# While at the moment there's no need for this to be a context manager", "# per se, I would like to use that interface since I anticipate", "# adding some wrapping around it at some point.", "yield", "random", ".", "choice", "(", "[", "conn", "for", "conn", "in", "self", ".", "connections", "(", ")", "if", "conn", ".", "alive", "(", ")", "]", ")" ]
Pick a random living connection
[ "Pick", "a", "random", "living", "connection" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L273-L279
train
dlecocq/nsq-py
nsq/client.py
Client.wait_response
def wait_response(self): '''Wait for a response''' responses = self.read() while not responses: responses = self.read() return responses
python
def wait_response(self): '''Wait for a response''' responses = self.read() while not responses: responses = self.read() return responses
[ "def", "wait_response", "(", "self", ")", ":", "responses", "=", "self", ".", "read", "(", ")", "while", "not", "responses", ":", "responses", "=", "self", ".", "read", "(", ")", "return", "responses" ]
Wait for a response
[ "Wait", "for", "a", "response" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L281-L286
train
dlecocq/nsq-py
nsq/client.py
Client.pub
def pub(self, topic, message): '''Publish the provided message to the provided topic''' with self.random_connection() as client: client.pub(topic, message) return self.wait_response()
python
def pub(self, topic, message): '''Publish the provided message to the provided topic''' with self.random_connection() as client: client.pub(topic, message) return self.wait_response()
[ "def", "pub", "(", "self", ",", "topic", ",", "message", ")", ":", "with", "self", ".", "random_connection", "(", ")", "as", "client", ":", "client", ".", "pub", "(", "topic", ",", "message", ")", "return", "self", ".", "wait_response", "(", ")" ]
Publish the provided message to the provided topic
[ "Publish", "the", "provided", "message", "to", "the", "provided", "topic" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L293-L297
train
dlecocq/nsq-py
nsq/client.py
Client.mpub
def mpub(self, topic, *messages): '''Publish messages to a topic''' with self.random_connection() as client: client.mpub(topic, *messages) return self.wait_response()
python
def mpub(self, topic, *messages): '''Publish messages to a topic''' with self.random_connection() as client: client.mpub(topic, *messages) return self.wait_response()
[ "def", "mpub", "(", "self", ",", "topic", ",", "*", "messages", ")", ":", "with", "self", ".", "random_connection", "(", ")", "as", "client", ":", "client", ".", "mpub", "(", "topic", ",", "*", "messages", ")", "return", "self", ".", "wait_response", "(", ")" ]
Publish messages to a topic
[ "Publish", "messages", "to", "a", "topic" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/client.py#L299-L303
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.create_socket
def create_socket(self): """Create a socket for the daemon, depending on the directory location. Args: config_dir (str): The absolute path to the config directory used by the daemon. Returns: socket.socket: The daemon socket. Clients connect to this socket. """ socket_path = os.path.join(self.config_dir, 'pueue.sock') # Create Socket and exit with 1, if socket can't be created try: if os.path.exists(socket_path): os.remove(socket_path) self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket.bind(socket_path) self.socket.setblocking(0) self.socket.listen(0) # Set file permissions os.chmod(socket_path, stat.S_IRWXU) except Exception: self.logger.error("Daemon couldn't socket. Aborting") self.logger.exception() sys.exit(1) return self.socket
python
def create_socket(self): """Create a socket for the daemon, depending on the directory location. Args: config_dir (str): The absolute path to the config directory used by the daemon. Returns: socket.socket: The daemon socket. Clients connect to this socket. """ socket_path = os.path.join(self.config_dir, 'pueue.sock') # Create Socket and exit with 1, if socket can't be created try: if os.path.exists(socket_path): os.remove(socket_path) self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket.bind(socket_path) self.socket.setblocking(0) self.socket.listen(0) # Set file permissions os.chmod(socket_path, stat.S_IRWXU) except Exception: self.logger.error("Daemon couldn't socket. Aborting") self.logger.exception() sys.exit(1) return self.socket
[ "def", "create_socket", "(", "self", ")", ":", "socket_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "config_dir", ",", "'pueue.sock'", ")", "# Create Socket and exit with 1, if socket can't be created", "try", ":", "if", "os", ".", "path", ".", "exists", "(", "socket_path", ")", ":", "os", ".", "remove", "(", "socket_path", ")", "self", ".", "socket", "=", "socket", ".", "socket", "(", "socket", ".", "AF_UNIX", ",", "socket", ".", "SOCK_STREAM", ")", "self", ".", "socket", ".", "setsockopt", "(", "socket", ".", "SOL_SOCKET", ",", "socket", ".", "SO_REUSEADDR", ",", "1", ")", "self", ".", "socket", ".", "bind", "(", "socket_path", ")", "self", ".", "socket", ".", "setblocking", "(", "0", ")", "self", ".", "socket", ".", "listen", "(", "0", ")", "# Set file permissions", "os", ".", "chmod", "(", "socket_path", ",", "stat", ".", "S_IRWXU", ")", "except", "Exception", ":", "self", ".", "logger", ".", "error", "(", "\"Daemon couldn't socket. Aborting\"", ")", "self", ".", "logger", ".", "exception", "(", ")", "sys", ".", "exit", "(", "1", ")", "return", "self", ".", "socket" ]
Create a socket for the daemon, depending on the directory location. Args: config_dir (str): The absolute path to the config directory used by the daemon. Returns: socket.socket: The daemon socket. Clients connect to this socket.
[ "Create", "a", "socket", "for", "the", "daemon", "depending", "on", "the", "directory", "location", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L82-L109
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.initialize_directories
def initialize_directories(self, root_dir): """Create all directories needed for logs and configs.""" if not root_dir: root_dir = os.path.expanduser('~') # Create config directory, if it doesn't exist self.config_dir = os.path.join(root_dir, '.config/pueue') if not os.path.exists(self.config_dir): os.makedirs(self.config_dir)
python
def initialize_directories(self, root_dir): """Create all directories needed for logs and configs.""" if not root_dir: root_dir = os.path.expanduser('~') # Create config directory, if it doesn't exist self.config_dir = os.path.join(root_dir, '.config/pueue') if not os.path.exists(self.config_dir): os.makedirs(self.config_dir)
[ "def", "initialize_directories", "(", "self", ",", "root_dir", ")", ":", "if", "not", "root_dir", ":", "root_dir", "=", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", "# Create config directory, if it doesn't exist", "self", ".", "config_dir", "=", "os", ".", "path", ".", "join", "(", "root_dir", ",", "'.config/pueue'", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "config_dir", ")", ":", "os", ".", "makedirs", "(", "self", ".", "config_dir", ")" ]
Create all directories needed for logs and configs.
[ "Create", "all", "directories", "needed", "for", "logs", "and", "configs", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L111-L119
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.respond_client
def respond_client(self, answer, socket): """Send an answer to the client.""" response = pickle.dumps(answer, -1) socket.sendall(response) self.read_list.remove(socket) socket.close()
python
def respond_client(self, answer, socket): """Send an answer to the client.""" response = pickle.dumps(answer, -1) socket.sendall(response) self.read_list.remove(socket) socket.close()
[ "def", "respond_client", "(", "self", ",", "answer", ",", "socket", ")", ":", "response", "=", "pickle", ".", "dumps", "(", "answer", ",", "-", "1", ")", "socket", ".", "sendall", "(", "response", ")", "self", ".", "read_list", ".", "remove", "(", "socket", ")", "socket", ".", "close", "(", ")" ]
Send an answer to the client.
[ "Send", "an", "answer", "to", "the", "client", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L121-L126
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.read_config
def read_config(self): """Read a previous configuration file or create a new with default values.""" config_file = os.path.join(self.config_dir, 'pueue.ini') self.config = configparser.ConfigParser() # Try to get configuration file and return it # If this doesn't work, a new default config file will be created if os.path.exists(config_file): try: self.config.read(config_file) return except Exception: self.logger.error('Error while parsing config file. Deleting old config') self.logger.exception() self.config['default'] = { 'resumeAfterStart': False, 'maxProcesses': 1, 'customShell': 'default', } self.config['log'] = { 'logTime': 60*60*24*14, } self.write_config()
python
def read_config(self): """Read a previous configuration file or create a new with default values.""" config_file = os.path.join(self.config_dir, 'pueue.ini') self.config = configparser.ConfigParser() # Try to get configuration file and return it # If this doesn't work, a new default config file will be created if os.path.exists(config_file): try: self.config.read(config_file) return except Exception: self.logger.error('Error while parsing config file. Deleting old config') self.logger.exception() self.config['default'] = { 'resumeAfterStart': False, 'maxProcesses': 1, 'customShell': 'default', } self.config['log'] = { 'logTime': 60*60*24*14, } self.write_config()
[ "def", "read_config", "(", "self", ")", ":", "config_file", "=", "os", ".", "path", ".", "join", "(", "self", ".", "config_dir", ",", "'pueue.ini'", ")", "self", ".", "config", "=", "configparser", ".", "ConfigParser", "(", ")", "# Try to get configuration file and return it", "# If this doesn't work, a new default config file will be created", "if", "os", ".", "path", ".", "exists", "(", "config_file", ")", ":", "try", ":", "self", ".", "config", ".", "read", "(", "config_file", ")", "return", "except", "Exception", ":", "self", ".", "logger", ".", "error", "(", "'Error while parsing config file. Deleting old config'", ")", "self", ".", "logger", ".", "exception", "(", ")", "self", ".", "config", "[", "'default'", "]", "=", "{", "'resumeAfterStart'", ":", "False", ",", "'maxProcesses'", ":", "1", ",", "'customShell'", ":", "'default'", ",", "}", "self", ".", "config", "[", "'log'", "]", "=", "{", "'logTime'", ":", "60", "*", "60", "*", "24", "*", "14", ",", "}", "self", ".", "write_config", "(", ")" ]
Read a previous configuration file or create a new with default values.
[ "Read", "a", "previous", "configuration", "file", "or", "create", "a", "new", "with", "default", "values", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L128-L150
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.write_config
def write_config(self): """Write the current configuration to the config file.""" config_file = os.path.join(self.config_dir, 'pueue.ini') with open(config_file, 'w') as file_descriptor: self.config.write(file_descriptor)
python
def write_config(self): """Write the current configuration to the config file.""" config_file = os.path.join(self.config_dir, 'pueue.ini') with open(config_file, 'w') as file_descriptor: self.config.write(file_descriptor)
[ "def", "write_config", "(", "self", ")", ":", "config_file", "=", "os", ".", "path", ".", "join", "(", "self", ".", "config_dir", ",", "'pueue.ini'", ")", "with", "open", "(", "config_file", ",", "'w'", ")", "as", "file_descriptor", ":", "self", ".", "config", ".", "write", "(", "file_descriptor", ")" ]
Write the current configuration to the config file.
[ "Write", "the", "current", "configuration", "to", "the", "config", "file", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L152-L156
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.main
def main(self): """The main function containing the loop for communication and process management. This function is the heart of the daemon. It is responsible for: - Client communication - Executing commands from clients - Update the status of processes by polling the ProcessHandler. - Logging - Cleanup on exit """ try: while self.running: # Trigger the processing of finished processes by the ProcessHandler. # If there are finished processes we write the log to keep it up to date. if self.process_handler.check_finished(): self.logger.write(self.queue) if self.reset and self.process_handler.all_finished(): # Rotate log and reset queue self.logger.rotate(self.queue) self.queue.reset() self.reset = False # Check if the ProcessHandler has any free slots to spawn a new process if not self.paused and not self.reset and self.running: self.process_handler.check_for_new() # This is the communication section of the daemon. # 1. Receive message from the client # 2. Check payload and call respective function with payload as parameter. # 3. Execute logic # 4. Return payload with response to client # Create list for waitable objects readable, writable, failed = select.select(self.read_list, [], [], 1) for waiting_socket in readable: if waiting_socket is self.socket: # Listening for clients to connect. # Client sockets are added to readlist to be processed. try: client_socket, client_address = self.socket.accept() self.read_list.append(client_socket) except Exception: self.logger.warning('Daemon rejected client') else: # Trying to receive instruction from client socket try: instruction = waiting_socket.recv(1048576) except (EOFError, OSError): self.logger.warning('Client died while sending message, dropping received data.') # Remove client socket self.read_list.remove(waiting_socket) waiting_socket.close() instruction = None # Check for valid instruction if instruction is not None: # Check if received data can be unpickled. try: payload = pickle.loads(instruction) except EOFError: # Instruction is ignored if it can't be unpickled self.logger.error('Received message is incomplete, dropping received data.') self.read_list.remove(waiting_socket) waiting_socket.close() # Set invalid payload payload = {'mode': ''} functions = { 'add': self.add, 'remove': self.remove, 'edit': self.edit_command, 'switch': self.switch, 'send': self.pipe_to_process, 'status': self.send_status, 'start': self.start, 'pause': self.pause, 'stash': self.stash, 'enqueue': self.enqueue, 'restart': self.restart, 'kill': self.kill_process, 'reset': self.reset_everything, 'clear': self.clear, 'config': self.set_config, 'STOPDAEMON': self.stop_daemon, } if payload['mode'] in functions.keys(): self.logger.debug('Payload received:') self.logger.debug(payload) response = functions[payload['mode']](payload) self.logger.debug('Sending payload:') self.logger.debug(response) try: self.respond_client(response, waiting_socket) except (BrokenPipeError): self.logger.warning('Client disconnected during message dispatching. Function successfully executed anyway.') # Remove client socket self.read_list.remove(waiting_socket) waiting_socket.close() instruction = None else: self.respond_client({'message': 'Unknown Command', 'status': 'error'}, waiting_socket) except Exception: self.logger.exception() # Wait for killed or stopped processes to finish (cleanup) self.process_handler.wait_for_finish() # Close socket, clean everything up and exit self.socket.close() cleanup(self.config_dir) sys.exit(0)
python
def main(self): """The main function containing the loop for communication and process management. This function is the heart of the daemon. It is responsible for: - Client communication - Executing commands from clients - Update the status of processes by polling the ProcessHandler. - Logging - Cleanup on exit """ try: while self.running: # Trigger the processing of finished processes by the ProcessHandler. # If there are finished processes we write the log to keep it up to date. if self.process_handler.check_finished(): self.logger.write(self.queue) if self.reset and self.process_handler.all_finished(): # Rotate log and reset queue self.logger.rotate(self.queue) self.queue.reset() self.reset = False # Check if the ProcessHandler has any free slots to spawn a new process if not self.paused and not self.reset and self.running: self.process_handler.check_for_new() # This is the communication section of the daemon. # 1. Receive message from the client # 2. Check payload and call respective function with payload as parameter. # 3. Execute logic # 4. Return payload with response to client # Create list for waitable objects readable, writable, failed = select.select(self.read_list, [], [], 1) for waiting_socket in readable: if waiting_socket is self.socket: # Listening for clients to connect. # Client sockets are added to readlist to be processed. try: client_socket, client_address = self.socket.accept() self.read_list.append(client_socket) except Exception: self.logger.warning('Daemon rejected client') else: # Trying to receive instruction from client socket try: instruction = waiting_socket.recv(1048576) except (EOFError, OSError): self.logger.warning('Client died while sending message, dropping received data.') # Remove client socket self.read_list.remove(waiting_socket) waiting_socket.close() instruction = None # Check for valid instruction if instruction is not None: # Check if received data can be unpickled. try: payload = pickle.loads(instruction) except EOFError: # Instruction is ignored if it can't be unpickled self.logger.error('Received message is incomplete, dropping received data.') self.read_list.remove(waiting_socket) waiting_socket.close() # Set invalid payload payload = {'mode': ''} functions = { 'add': self.add, 'remove': self.remove, 'edit': self.edit_command, 'switch': self.switch, 'send': self.pipe_to_process, 'status': self.send_status, 'start': self.start, 'pause': self.pause, 'stash': self.stash, 'enqueue': self.enqueue, 'restart': self.restart, 'kill': self.kill_process, 'reset': self.reset_everything, 'clear': self.clear, 'config': self.set_config, 'STOPDAEMON': self.stop_daemon, } if payload['mode'] in functions.keys(): self.logger.debug('Payload received:') self.logger.debug(payload) response = functions[payload['mode']](payload) self.logger.debug('Sending payload:') self.logger.debug(response) try: self.respond_client(response, waiting_socket) except (BrokenPipeError): self.logger.warning('Client disconnected during message dispatching. Function successfully executed anyway.') # Remove client socket self.read_list.remove(waiting_socket) waiting_socket.close() instruction = None else: self.respond_client({'message': 'Unknown Command', 'status': 'error'}, waiting_socket) except Exception: self.logger.exception() # Wait for killed or stopped processes to finish (cleanup) self.process_handler.wait_for_finish() # Close socket, clean everything up and exit self.socket.close() cleanup(self.config_dir) sys.exit(0)
[ "def", "main", "(", "self", ")", ":", "try", ":", "while", "self", ".", "running", ":", "# Trigger the processing of finished processes by the ProcessHandler.", "# If there are finished processes we write the log to keep it up to date.", "if", "self", ".", "process_handler", ".", "check_finished", "(", ")", ":", "self", ".", "logger", ".", "write", "(", "self", ".", "queue", ")", "if", "self", ".", "reset", "and", "self", ".", "process_handler", ".", "all_finished", "(", ")", ":", "# Rotate log and reset queue", "self", ".", "logger", ".", "rotate", "(", "self", ".", "queue", ")", "self", ".", "queue", ".", "reset", "(", ")", "self", ".", "reset", "=", "False", "# Check if the ProcessHandler has any free slots to spawn a new process", "if", "not", "self", ".", "paused", "and", "not", "self", ".", "reset", "and", "self", ".", "running", ":", "self", ".", "process_handler", ".", "check_for_new", "(", ")", "# This is the communication section of the daemon.", "# 1. Receive message from the client", "# 2. Check payload and call respective function with payload as parameter.", "# 3. Execute logic", "# 4. Return payload with response to client", "# Create list for waitable objects", "readable", ",", "writable", ",", "failed", "=", "select", ".", "select", "(", "self", ".", "read_list", ",", "[", "]", ",", "[", "]", ",", "1", ")", "for", "waiting_socket", "in", "readable", ":", "if", "waiting_socket", "is", "self", ".", "socket", ":", "# Listening for clients to connect.", "# Client sockets are added to readlist to be processed.", "try", ":", "client_socket", ",", "client_address", "=", "self", ".", "socket", ".", "accept", "(", ")", "self", ".", "read_list", ".", "append", "(", "client_socket", ")", "except", "Exception", ":", "self", ".", "logger", ".", "warning", "(", "'Daemon rejected client'", ")", "else", ":", "# Trying to receive instruction from client socket", "try", ":", "instruction", "=", "waiting_socket", ".", "recv", "(", "1048576", ")", "except", "(", "EOFError", ",", "OSError", ")", ":", "self", ".", "logger", ".", "warning", "(", "'Client died while sending message, dropping received data.'", ")", "# Remove client socket", "self", ".", "read_list", ".", "remove", "(", "waiting_socket", ")", "waiting_socket", ".", "close", "(", ")", "instruction", "=", "None", "# Check for valid instruction", "if", "instruction", "is", "not", "None", ":", "# Check if received data can be unpickled.", "try", ":", "payload", "=", "pickle", ".", "loads", "(", "instruction", ")", "except", "EOFError", ":", "# Instruction is ignored if it can't be unpickled", "self", ".", "logger", ".", "error", "(", "'Received message is incomplete, dropping received data.'", ")", "self", ".", "read_list", ".", "remove", "(", "waiting_socket", ")", "waiting_socket", ".", "close", "(", ")", "# Set invalid payload", "payload", "=", "{", "'mode'", ":", "''", "}", "functions", "=", "{", "'add'", ":", "self", ".", "add", ",", "'remove'", ":", "self", ".", "remove", ",", "'edit'", ":", "self", ".", "edit_command", ",", "'switch'", ":", "self", ".", "switch", ",", "'send'", ":", "self", ".", "pipe_to_process", ",", "'status'", ":", "self", ".", "send_status", ",", "'start'", ":", "self", ".", "start", ",", "'pause'", ":", "self", ".", "pause", ",", "'stash'", ":", "self", ".", "stash", ",", "'enqueue'", ":", "self", ".", "enqueue", ",", "'restart'", ":", "self", ".", "restart", ",", "'kill'", ":", "self", ".", "kill_process", ",", "'reset'", ":", "self", ".", "reset_everything", ",", "'clear'", ":", "self", ".", "clear", ",", "'config'", ":", "self", ".", "set_config", ",", "'STOPDAEMON'", ":", "self", ".", "stop_daemon", ",", "}", "if", "payload", "[", "'mode'", "]", "in", "functions", ".", "keys", "(", ")", ":", "self", ".", "logger", ".", "debug", "(", "'Payload received:'", ")", "self", ".", "logger", ".", "debug", "(", "payload", ")", "response", "=", "functions", "[", "payload", "[", "'mode'", "]", "]", "(", "payload", ")", "self", ".", "logger", ".", "debug", "(", "'Sending payload:'", ")", "self", ".", "logger", ".", "debug", "(", "response", ")", "try", ":", "self", ".", "respond_client", "(", "response", ",", "waiting_socket", ")", "except", "(", "BrokenPipeError", ")", ":", "self", ".", "logger", ".", "warning", "(", "'Client disconnected during message dispatching. Function successfully executed anyway.'", ")", "# Remove client socket", "self", ".", "read_list", ".", "remove", "(", "waiting_socket", ")", "waiting_socket", ".", "close", "(", ")", "instruction", "=", "None", "else", ":", "self", ".", "respond_client", "(", "{", "'message'", ":", "'Unknown Command'", ",", "'status'", ":", "'error'", "}", ",", "waiting_socket", ")", "except", "Exception", ":", "self", ".", "logger", ".", "exception", "(", ")", "# Wait for killed or stopped processes to finish (cleanup)", "self", ".", "process_handler", ".", "wait_for_finish", "(", ")", "# Close socket, clean everything up and exit", "self", ".", "socket", ".", "close", "(", ")", "cleanup", "(", "self", ".", "config_dir", ")", "sys", ".", "exit", "(", "0", ")" ]
The main function containing the loop for communication and process management. This function is the heart of the daemon. It is responsible for: - Client communication - Executing commands from clients - Update the status of processes by polling the ProcessHandler. - Logging - Cleanup on exit
[ "The", "main", "function", "containing", "the", "loop", "for", "communication", "and", "process", "management", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L158-L273
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.stop_daemon
def stop_daemon(self, payload=None): """Kill current processes and initiate daemon shutdown. The daemon will shut down after a last check on all killed processes. """ kill_signal = signals['9'] self.process_handler.kill_all(kill_signal, True) self.running = False return {'message': 'Pueue daemon shutting down', 'status': 'success'}
python
def stop_daemon(self, payload=None): """Kill current processes and initiate daemon shutdown. The daemon will shut down after a last check on all killed processes. """ kill_signal = signals['9'] self.process_handler.kill_all(kill_signal, True) self.running = False return {'message': 'Pueue daemon shutting down', 'status': 'success'}
[ "def", "stop_daemon", "(", "self", ",", "payload", "=", "None", ")", ":", "kill_signal", "=", "signals", "[", "'9'", "]", "self", ".", "process_handler", ".", "kill_all", "(", "kill_signal", ",", "True", ")", "self", ".", "running", "=", "False", "return", "{", "'message'", ":", "'Pueue daemon shutting down'", ",", "'status'", ":", "'success'", "}" ]
Kill current processes and initiate daemon shutdown. The daemon will shut down after a last check on all killed processes.
[ "Kill", "current", "processes", "and", "initiate", "daemon", "shutdown", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L275-L285
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.set_config
def set_config(self, payload): """Update the current config depending on the payload and save it.""" self.config['default'][payload['option']] = str(payload['value']) if payload['option'] == 'maxProcesses': self.process_handler.set_max(payload['value']) if payload['option'] == 'customShell': path = payload['value'] if os.path.isfile(path) and os.access(path, os.X_OK): self.process_handler.set_shell(path) elif path == 'default': self.process_handler.set_shell() else: return {'message': "File in path doesn't exist or is not executable.", 'status': 'error'} self.write_config() return {'message': 'Configuration successfully updated.', 'status': 'success'}
python
def set_config(self, payload): """Update the current config depending on the payload and save it.""" self.config['default'][payload['option']] = str(payload['value']) if payload['option'] == 'maxProcesses': self.process_handler.set_max(payload['value']) if payload['option'] == 'customShell': path = payload['value'] if os.path.isfile(path) and os.access(path, os.X_OK): self.process_handler.set_shell(path) elif path == 'default': self.process_handler.set_shell() else: return {'message': "File in path doesn't exist or is not executable.", 'status': 'error'} self.write_config() return {'message': 'Configuration successfully updated.', 'status': 'success'}
[ "def", "set_config", "(", "self", ",", "payload", ")", ":", "self", ".", "config", "[", "'default'", "]", "[", "payload", "[", "'option'", "]", "]", "=", "str", "(", "payload", "[", "'value'", "]", ")", "if", "payload", "[", "'option'", "]", "==", "'maxProcesses'", ":", "self", ".", "process_handler", ".", "set_max", "(", "payload", "[", "'value'", "]", ")", "if", "payload", "[", "'option'", "]", "==", "'customShell'", ":", "path", "=", "payload", "[", "'value'", "]", "if", "os", ".", "path", ".", "isfile", "(", "path", ")", "and", "os", ".", "access", "(", "path", ",", "os", ".", "X_OK", ")", ":", "self", ".", "process_handler", ".", "set_shell", "(", "path", ")", "elif", "path", "==", "'default'", ":", "self", ".", "process_handler", ".", "set_shell", "(", ")", "else", ":", "return", "{", "'message'", ":", "\"File in path doesn't exist or is not executable.\"", ",", "'status'", ":", "'error'", "}", "self", ".", "write_config", "(", ")", "return", "{", "'message'", ":", "'Configuration successfully updated.'", ",", "'status'", ":", "'success'", "}" ]
Update the current config depending on the payload and save it.
[ "Update", "the", "current", "config", "depending", "on", "the", "payload", "and", "save", "it", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L287-L306
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.pipe_to_process
def pipe_to_process(self, payload): """Send something to stdin of a specific process.""" message = payload['input'] key = payload['key'] if not self.process_handler.is_running(key): return {'message': 'No running process for this key', 'status': 'error'} self.process_handler.send_to_process(message, key) return {'message': 'Message sent', 'status': 'success'}
python
def pipe_to_process(self, payload): """Send something to stdin of a specific process.""" message = payload['input'] key = payload['key'] if not self.process_handler.is_running(key): return {'message': 'No running process for this key', 'status': 'error'} self.process_handler.send_to_process(message, key) return {'message': 'Message sent', 'status': 'success'}
[ "def", "pipe_to_process", "(", "self", ",", "payload", ")", ":", "message", "=", "payload", "[", "'input'", "]", "key", "=", "payload", "[", "'key'", "]", "if", "not", "self", ".", "process_handler", ".", "is_running", "(", "key", ")", ":", "return", "{", "'message'", ":", "'No running process for this key'", ",", "'status'", ":", "'error'", "}", "self", ".", "process_handler", ".", "send_to_process", "(", "message", ",", "key", ")", "return", "{", "'message'", ":", "'Message sent'", ",", "'status'", ":", "'success'", "}" ]
Send something to stdin of a specific process.
[ "Send", "something", "to", "stdin", "of", "a", "specific", "process", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L308-L317
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.send_status
def send_status(self, payload): """Send the daemon status and the current queue for displaying.""" answer = {} data = [] # Get daemon status if self.paused: answer['status'] = 'paused' else: answer['status'] = 'running' # Add current queue or a message, that queue is empty if len(self.queue) > 0: data = deepcopy(self.queue.queue) # Remove stderr and stdout output for transfer # Some outputs are way to big for the socket buffer # and this is not needed by the client for key, item in data.items(): if 'stderr' in item: del item['stderr'] if 'stdout' in item: del item['stdout'] else: data = 'Queue is empty' answer['data'] = data return answer
python
def send_status(self, payload): """Send the daemon status and the current queue for displaying.""" answer = {} data = [] # Get daemon status if self.paused: answer['status'] = 'paused' else: answer['status'] = 'running' # Add current queue or a message, that queue is empty if len(self.queue) > 0: data = deepcopy(self.queue.queue) # Remove stderr and stdout output for transfer # Some outputs are way to big for the socket buffer # and this is not needed by the client for key, item in data.items(): if 'stderr' in item: del item['stderr'] if 'stdout' in item: del item['stdout'] else: data = 'Queue is empty' answer['data'] = data return answer
[ "def", "send_status", "(", "self", ",", "payload", ")", ":", "answer", "=", "{", "}", "data", "=", "[", "]", "# Get daemon status", "if", "self", ".", "paused", ":", "answer", "[", "'status'", "]", "=", "'paused'", "else", ":", "answer", "[", "'status'", "]", "=", "'running'", "# Add current queue or a message, that queue is empty", "if", "len", "(", "self", ".", "queue", ")", ">", "0", ":", "data", "=", "deepcopy", "(", "self", ".", "queue", ".", "queue", ")", "# Remove stderr and stdout output for transfer", "# Some outputs are way to big for the socket buffer", "# and this is not needed by the client", "for", "key", ",", "item", "in", "data", ".", "items", "(", ")", ":", "if", "'stderr'", "in", "item", ":", "del", "item", "[", "'stderr'", "]", "if", "'stdout'", "in", "item", ":", "del", "item", "[", "'stdout'", "]", "else", ":", "data", "=", "'Queue is empty'", "answer", "[", "'data'", "]", "=", "data", "return", "answer" ]
Send the daemon status and the current queue for displaying.
[ "Send", "the", "daemon", "status", "and", "the", "current", "queue", "for", "displaying", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L319-L344
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.reset_everything
def reset_everything(self, payload): """Kill all processes, delete the queue and clean everything up.""" kill_signal = signals['9'] self.process_handler.kill_all(kill_signal, True) self.process_handler.wait_for_finish() self.reset = True answer = {'message': 'Resetting current queue', 'status': 'success'} return answer
python
def reset_everything(self, payload): """Kill all processes, delete the queue and clean everything up.""" kill_signal = signals['9'] self.process_handler.kill_all(kill_signal, True) self.process_handler.wait_for_finish() self.reset = True answer = {'message': 'Resetting current queue', 'status': 'success'} return answer
[ "def", "reset_everything", "(", "self", ",", "payload", ")", ":", "kill_signal", "=", "signals", "[", "'9'", "]", "self", ".", "process_handler", ".", "kill_all", "(", "kill_signal", ",", "True", ")", "self", ".", "process_handler", ".", "wait_for_finish", "(", ")", "self", ".", "reset", "=", "True", "answer", "=", "{", "'message'", ":", "'Resetting current queue'", ",", "'status'", ":", "'success'", "}", "return", "answer" ]
Kill all processes, delete the queue and clean everything up.
[ "Kill", "all", "processes", "delete", "the", "queue", "and", "clean", "everything", "up", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L346-L354
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.clear
def clear(self, payload): """Clear queue from any `done` or `failed` entries. The log will be rotated once. Otherwise we would loose all logs from thoes finished processes. """ self.logger.rotate(self.queue) self.queue.clear() self.logger.write(self.queue) answer = {'message': 'Finished entries have been removed.', 'status': 'success'} return answer
python
def clear(self, payload): """Clear queue from any `done` or `failed` entries. The log will be rotated once. Otherwise we would loose all logs from thoes finished processes. """ self.logger.rotate(self.queue) self.queue.clear() self.logger.write(self.queue) answer = {'message': 'Finished entries have been removed.', 'status': 'success'} return answer
[ "def", "clear", "(", "self", ",", "payload", ")", ":", "self", ".", "logger", ".", "rotate", "(", "self", ".", "queue", ")", "self", ".", "queue", ".", "clear", "(", ")", "self", ".", "logger", ".", "write", "(", "self", ".", "queue", ")", "answer", "=", "{", "'message'", ":", "'Finished entries have been removed.'", ",", "'status'", ":", "'success'", "}", "return", "answer" ]
Clear queue from any `done` or `failed` entries. The log will be rotated once. Otherwise we would loose all logs from thoes finished processes.
[ "Clear", "queue", "from", "any", "done", "or", "failed", "entries", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L356-L367
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.start
def start(self, payload): """Start the daemon and all processes or only specific processes.""" # Start specific processes, if `keys` is given in the payload if payload.get('keys'): succeeded = [] failed = [] for key in payload.get('keys'): success = self.process_handler.start_process(key) if success: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Started processes: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo paused, queued or stashed process for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} # Start a all processes and the daemon else: self.process_handler.start_all() if self.paused: self.paused = False answer = {'message': 'Daemon and all processes started.', 'status': 'success'} else: answer = {'message': 'Daemon already running, starting all processes.', 'status': 'success'} return answer
python
def start(self, payload): """Start the daemon and all processes or only specific processes.""" # Start specific processes, if `keys` is given in the payload if payload.get('keys'): succeeded = [] failed = [] for key in payload.get('keys'): success = self.process_handler.start_process(key) if success: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Started processes: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo paused, queued or stashed process for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} # Start a all processes and the daemon else: self.process_handler.start_all() if self.paused: self.paused = False answer = {'message': 'Daemon and all processes started.', 'status': 'success'} else: answer = {'message': 'Daemon already running, starting all processes.', 'status': 'success'} return answer
[ "def", "start", "(", "self", ",", "payload", ")", ":", "# Start specific processes, if `keys` is given in the payload", "if", "payload", ".", "get", "(", "'keys'", ")", ":", "succeeded", "=", "[", "]", "failed", "=", "[", "]", "for", "key", "in", "payload", ".", "get", "(", "'keys'", ")", ":", "success", "=", "self", ".", "process_handler", ".", "start_process", "(", "key", ")", "if", "success", ":", "succeeded", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "message", "=", "''", "if", "len", "(", "succeeded", ")", ">", "0", ":", "message", "+=", "'Started processes: {}.'", ".", "format", "(", "', '", ".", "join", "(", "succeeded", ")", ")", "status", "=", "'success'", "if", "len", "(", "failed", ")", ">", "0", ":", "message", "+=", "'\\nNo paused, queued or stashed process for keys: {}'", ".", "format", "(", "', '", ".", "join", "(", "failed", ")", ")", "status", "=", "'error'", "answer", "=", "{", "'message'", ":", "message", ".", "strip", "(", ")", ",", "'status'", ":", "status", "}", "# Start a all processes and the daemon", "else", ":", "self", ".", "process_handler", ".", "start_all", "(", ")", "if", "self", ".", "paused", ":", "self", ".", "paused", "=", "False", "answer", "=", "{", "'message'", ":", "'Daemon and all processes started.'", ",", "'status'", ":", "'success'", "}", "else", ":", "answer", "=", "{", "'message'", ":", "'Daemon already running, starting all processes.'", ",", "'status'", ":", "'success'", "}", "return", "answer" ]
Start the daemon and all processes or only specific processes.
[ "Start", "the", "daemon", "and", "all", "processes", "or", "only", "specific", "processes", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L369-L402
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.pause
def pause(self, payload): """Start the daemon and all processes or only specific processes.""" # Pause specific processes, if `keys` is given in the payload if payload.get('keys'): succeeded = [] failed = [] for key in payload.get('keys'): success = self.process_handler.pause_process(key) if success: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Paused processes: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo running process for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} # Pause all processes and the daemon else: if payload.get('wait'): self.paused = True answer = {'message': 'Pausing daemon, but waiting for processes to finish.', 'status': 'success'} else: self.process_handler.pause_all() if not self.paused: self.paused = True answer = {'message': 'Daemon and all processes paused.', 'status': 'success'} else: answer = {'message': 'Daemon already paused, pausing all processes anyway.', 'status': 'success'} return answer
python
def pause(self, payload): """Start the daemon and all processes or only specific processes.""" # Pause specific processes, if `keys` is given in the payload if payload.get('keys'): succeeded = [] failed = [] for key in payload.get('keys'): success = self.process_handler.pause_process(key) if success: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Paused processes: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo running process for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} # Pause all processes and the daemon else: if payload.get('wait'): self.paused = True answer = {'message': 'Pausing daemon, but waiting for processes to finish.', 'status': 'success'} else: self.process_handler.pause_all() if not self.paused: self.paused = True answer = {'message': 'Daemon and all processes paused.', 'status': 'success'} else: answer = {'message': 'Daemon already paused, pausing all processes anyway.', 'status': 'success'} return answer
[ "def", "pause", "(", "self", ",", "payload", ")", ":", "# Pause specific processes, if `keys` is given in the payload", "if", "payload", ".", "get", "(", "'keys'", ")", ":", "succeeded", "=", "[", "]", "failed", "=", "[", "]", "for", "key", "in", "payload", ".", "get", "(", "'keys'", ")", ":", "success", "=", "self", ".", "process_handler", ".", "pause_process", "(", "key", ")", "if", "success", ":", "succeeded", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "message", "=", "''", "if", "len", "(", "succeeded", ")", ">", "0", ":", "message", "+=", "'Paused processes: {}.'", ".", "format", "(", "', '", ".", "join", "(", "succeeded", ")", ")", "status", "=", "'success'", "if", "len", "(", "failed", ")", ">", "0", ":", "message", "+=", "'\\nNo running process for keys: {}'", ".", "format", "(", "', '", ".", "join", "(", "failed", ")", ")", "status", "=", "'error'", "answer", "=", "{", "'message'", ":", "message", ".", "strip", "(", ")", ",", "'status'", ":", "status", "}", "# Pause all processes and the daemon", "else", ":", "if", "payload", ".", "get", "(", "'wait'", ")", ":", "self", ".", "paused", "=", "True", "answer", "=", "{", "'message'", ":", "'Pausing daemon, but waiting for processes to finish.'", ",", "'status'", ":", "'success'", "}", "else", ":", "self", ".", "process_handler", ".", "pause_all", "(", ")", "if", "not", "self", ".", "paused", ":", "self", ".", "paused", "=", "True", "answer", "=", "{", "'message'", ":", "'Daemon and all processes paused.'", ",", "'status'", ":", "'success'", "}", "else", ":", "answer", "=", "{", "'message'", ":", "'Daemon already paused, pausing all processes anyway.'", ",", "'status'", ":", "'success'", "}", "return", "answer" ]
Start the daemon and all processes or only specific processes.
[ "Start", "the", "daemon", "and", "all", "processes", "or", "only", "specific", "processes", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L404-L443
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.edit_command
def edit_command(self, payload): """Edit the command of a specific entry.""" key = payload['key'] command = payload['command'] if self.queue[key]: if self.queue[key]['status'] in ['queued', 'stashed']: self.queue[key]['command'] = command answer = {'message': 'Command updated', 'status': 'error'} else: answer = {'message': "Entry is not 'queued' or 'stashed'", 'status': 'error'} else: answer = {'message': 'No entry with this key', 'status': 'error'} # Pause all processes and the daemon return answer
python
def edit_command(self, payload): """Edit the command of a specific entry.""" key = payload['key'] command = payload['command'] if self.queue[key]: if self.queue[key]['status'] in ['queued', 'stashed']: self.queue[key]['command'] = command answer = {'message': 'Command updated', 'status': 'error'} else: answer = {'message': "Entry is not 'queued' or 'stashed'", 'status': 'error'} else: answer = {'message': 'No entry with this key', 'status': 'error'} # Pause all processes and the daemon return answer
[ "def", "edit_command", "(", "self", ",", "payload", ")", ":", "key", "=", "payload", "[", "'key'", "]", "command", "=", "payload", "[", "'command'", "]", "if", "self", ".", "queue", "[", "key", "]", ":", "if", "self", ".", "queue", "[", "key", "]", "[", "'status'", "]", "in", "[", "'queued'", ",", "'stashed'", "]", ":", "self", ".", "queue", "[", "key", "]", "[", "'command'", "]", "=", "command", "answer", "=", "{", "'message'", ":", "'Command updated'", ",", "'status'", ":", "'error'", "}", "else", ":", "answer", "=", "{", "'message'", ":", "\"Entry is not 'queued' or 'stashed'\"", ",", "'status'", ":", "'error'", "}", "else", ":", "answer", "=", "{", "'message'", ":", "'No entry with this key'", ",", "'status'", ":", "'error'", "}", "# Pause all processes and the daemon", "return", "answer" ]
Edit the command of a specific entry.
[ "Edit", "the", "command", "of", "a", "specific", "entry", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L445-L460
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.stash
def stash(self, payload): """Stash the specified processes.""" succeeded = [] failed = [] for key in payload['keys']: if self.queue.get(key) is not None: if self.queue[key]['status'] == 'queued': self.queue[key]['status'] = 'stashed' succeeded.append(str(key)) else: failed.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Stashed entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo queued entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
python
def stash(self, payload): """Stash the specified processes.""" succeeded = [] failed = [] for key in payload['keys']: if self.queue.get(key) is not None: if self.queue[key]['status'] == 'queued': self.queue[key]['status'] = 'stashed' succeeded.append(str(key)) else: failed.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Stashed entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo queued entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
[ "def", "stash", "(", "self", ",", "payload", ")", ":", "succeeded", "=", "[", "]", "failed", "=", "[", "]", "for", "key", "in", "payload", "[", "'keys'", "]", ":", "if", "self", ".", "queue", ".", "get", "(", "key", ")", "is", "not", "None", ":", "if", "self", ".", "queue", "[", "key", "]", "[", "'status'", "]", "==", "'queued'", ":", "self", ".", "queue", "[", "key", "]", "[", "'status'", "]", "=", "'stashed'", "succeeded", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "message", "=", "''", "if", "len", "(", "succeeded", ")", ">", "0", ":", "message", "+=", "'Stashed entries: {}.'", ".", "format", "(", "', '", ".", "join", "(", "succeeded", ")", ")", "status", "=", "'success'", "if", "len", "(", "failed", ")", ">", "0", ":", "message", "+=", "'\\nNo queued entry for keys: {}'", ".", "format", "(", "', '", ".", "join", "(", "failed", ")", ")", "status", "=", "'error'", "answer", "=", "{", "'message'", ":", "message", ".", "strip", "(", ")", ",", "'status'", ":", "status", "}", "return", "answer" ]
Stash the specified processes.
[ "Stash", "the", "specified", "processes", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L462-L486
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.kill_process
def kill_process(self, payload): """Pause the daemon and kill all processes or kill a specific process.""" # Kill specific processes, if `keys` is given in the payload kill_signal = signals[payload['signal'].lower()] kill_shell = payload.get('all', False) if payload.get('keys'): succeeded = [] failed = [] for key in payload.get('keys'): success = self.process_handler.kill_process(key, kill_signal, kill_shell) if success: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += "Signal '{}' sent to processes: {}.".format(payload['signal'], ', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo running process for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} # Kill all processes and the daemon else: self.process_handler.kill_all(kill_signal, kill_shell) if kill_signal == signal.SIGINT or \ kill_signal == signal.SIGTERM or \ kill_signal == signal.SIGKILL: self.paused = True answer = {'message': 'Signal send to all processes.', 'status': 'success'} return answer
python
def kill_process(self, payload): """Pause the daemon and kill all processes or kill a specific process.""" # Kill specific processes, if `keys` is given in the payload kill_signal = signals[payload['signal'].lower()] kill_shell = payload.get('all', False) if payload.get('keys'): succeeded = [] failed = [] for key in payload.get('keys'): success = self.process_handler.kill_process(key, kill_signal, kill_shell) if success: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += "Signal '{}' sent to processes: {}.".format(payload['signal'], ', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo running process for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} # Kill all processes and the daemon else: self.process_handler.kill_all(kill_signal, kill_shell) if kill_signal == signal.SIGINT or \ kill_signal == signal.SIGTERM or \ kill_signal == signal.SIGKILL: self.paused = True answer = {'message': 'Signal send to all processes.', 'status': 'success'} return answer
[ "def", "kill_process", "(", "self", ",", "payload", ")", ":", "# Kill specific processes, if `keys` is given in the payload", "kill_signal", "=", "signals", "[", "payload", "[", "'signal'", "]", ".", "lower", "(", ")", "]", "kill_shell", "=", "payload", ".", "get", "(", "'all'", ",", "False", ")", "if", "payload", ".", "get", "(", "'keys'", ")", ":", "succeeded", "=", "[", "]", "failed", "=", "[", "]", "for", "key", "in", "payload", ".", "get", "(", "'keys'", ")", ":", "success", "=", "self", ".", "process_handler", ".", "kill_process", "(", "key", ",", "kill_signal", ",", "kill_shell", ")", "if", "success", ":", "succeeded", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "message", "=", "''", "if", "len", "(", "succeeded", ")", ">", "0", ":", "message", "+=", "\"Signal '{}' sent to processes: {}.\"", ".", "format", "(", "payload", "[", "'signal'", "]", ",", "', '", ".", "join", "(", "succeeded", ")", ")", "status", "=", "'success'", "if", "len", "(", "failed", ")", ">", "0", ":", "message", "+=", "'\\nNo running process for keys: {}'", ".", "format", "(", "', '", ".", "join", "(", "failed", ")", ")", "status", "=", "'error'", "answer", "=", "{", "'message'", ":", "message", ".", "strip", "(", ")", ",", "'status'", ":", "status", "}", "# Kill all processes and the daemon", "else", ":", "self", ".", "process_handler", ".", "kill_all", "(", "kill_signal", ",", "kill_shell", ")", "if", "kill_signal", "==", "signal", ".", "SIGINT", "or", "kill_signal", "==", "signal", ".", "SIGTERM", "or", "kill_signal", "==", "signal", ".", "SIGKILL", ":", "self", ".", "paused", "=", "True", "answer", "=", "{", "'message'", ":", "'Signal send to all processes.'", ",", "'status'", ":", "'success'", "}", "return", "answer" ]
Pause the daemon and kill all processes or kill a specific process.
[ "Pause", "the", "daemon", "and", "kill", "all", "processes", "or", "kill", "a", "specific", "process", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L514-L548
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.remove
def remove(self, payload): """Remove specified entries from the queue.""" succeeded = [] failed = [] for key in payload['keys']: running = self.process_handler.is_running(key) if not running: removed = self.queue.remove(key) if removed: succeeded.append(str(key)) else: failed.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Removed entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nRunning or non-existing entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
python
def remove(self, payload): """Remove specified entries from the queue.""" succeeded = [] failed = [] for key in payload['keys']: running = self.process_handler.is_running(key) if not running: removed = self.queue.remove(key) if removed: succeeded.append(str(key)) else: failed.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Removed entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nRunning or non-existing entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
[ "def", "remove", "(", "self", ",", "payload", ")", ":", "succeeded", "=", "[", "]", "failed", "=", "[", "]", "for", "key", "in", "payload", "[", "'keys'", "]", ":", "running", "=", "self", ".", "process_handler", ".", "is_running", "(", "key", ")", "if", "not", "running", ":", "removed", "=", "self", ".", "queue", ".", "remove", "(", "key", ")", "if", "removed", ":", "succeeded", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "message", "=", "''", "if", "len", "(", "succeeded", ")", ">", "0", ":", "message", "+=", "'Removed entries: {}.'", ".", "format", "(", "', '", ".", "join", "(", "succeeded", ")", ")", "status", "=", "'success'", "if", "len", "(", "failed", ")", ">", "0", ":", "message", "+=", "'\\nRunning or non-existing entry for keys: {}'", ".", "format", "(", "', '", ".", "join", "(", "failed", ")", ")", "status", "=", "'error'", "answer", "=", "{", "'message'", ":", "message", ".", "strip", "(", ")", ",", "'status'", ":", "status", "}", "return", "answer" ]
Remove specified entries from the queue.
[ "Remove", "specified", "entries", "from", "the", "queue", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L555-L580
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.switch
def switch(self, payload): """Switch the two specified entry positions in the queue.""" first = payload['first'] second = payload['second'] running = self.process_handler.is_running(first) or self.process_handler.is_running(second) if running: answer = { 'message': "Can't switch running processes, " "please stop the processes before switching them.", 'status': 'error' } else: switched = self.queue.switch(first, second) if switched: answer = { 'message': 'Entries #{} and #{} switched'.format(first, second), 'status': 'success' } else: answer = {'message': "One or both entries do not exist or are not queued/stashed.", 'status': 'error'} return answer
python
def switch(self, payload): """Switch the two specified entry positions in the queue.""" first = payload['first'] second = payload['second'] running = self.process_handler.is_running(first) or self.process_handler.is_running(second) if running: answer = { 'message': "Can't switch running processes, " "please stop the processes before switching them.", 'status': 'error' } else: switched = self.queue.switch(first, second) if switched: answer = { 'message': 'Entries #{} and #{} switched'.format(first, second), 'status': 'success' } else: answer = {'message': "One or both entries do not exist or are not queued/stashed.", 'status': 'error'} return answer
[ "def", "switch", "(", "self", ",", "payload", ")", ":", "first", "=", "payload", "[", "'first'", "]", "second", "=", "payload", "[", "'second'", "]", "running", "=", "self", ".", "process_handler", ".", "is_running", "(", "first", ")", "or", "self", ".", "process_handler", ".", "is_running", "(", "second", ")", "if", "running", ":", "answer", "=", "{", "'message'", ":", "\"Can't switch running processes, \"", "\"please stop the processes before switching them.\"", ",", "'status'", ":", "'error'", "}", "else", ":", "switched", "=", "self", ".", "queue", ".", "switch", "(", "first", ",", "second", ")", "if", "switched", ":", "answer", "=", "{", "'message'", ":", "'Entries #{} and #{} switched'", ".", "format", "(", "first", ",", "second", ")", ",", "'status'", ":", "'success'", "}", "else", ":", "answer", "=", "{", "'message'", ":", "\"One or both entries do not exist or are not queued/stashed.\"", ",", "'status'", ":", "'error'", "}", "return", "answer" ]
Switch the two specified entry positions in the queue.
[ "Switch", "the", "two", "specified", "entry", "positions", "in", "the", "queue", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L582-L604
train
Nukesor/pueue
pueue/daemon/daemon.py
Daemon.restart
def restart(self, payload): """Restart the specified entries.""" succeeded = [] failed = [] for key in payload['keys']: restarted = self.queue.restart(key) if restarted: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Restarted entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo finished entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
python
def restart(self, payload): """Restart the specified entries.""" succeeded = [] failed = [] for key in payload['keys']: restarted = self.queue.restart(key) if restarted: succeeded.append(str(key)) else: failed.append(str(key)) message = '' if len(succeeded) > 0: message += 'Restarted entries: {}.'.format(', '.join(succeeded)) status = 'success' if len(failed) > 0: message += '\nNo finished entry for keys: {}'.format(', '.join(failed)) status = 'error' answer = {'message': message.strip(), 'status': status} return answer
[ "def", "restart", "(", "self", ",", "payload", ")", ":", "succeeded", "=", "[", "]", "failed", "=", "[", "]", "for", "key", "in", "payload", "[", "'keys'", "]", ":", "restarted", "=", "self", ".", "queue", ".", "restart", "(", "key", ")", "if", "restarted", ":", "succeeded", ".", "append", "(", "str", "(", "key", ")", ")", "else", ":", "failed", ".", "append", "(", "str", "(", "key", ")", ")", "message", "=", "''", "if", "len", "(", "succeeded", ")", ">", "0", ":", "message", "+=", "'Restarted entries: {}.'", ".", "format", "(", "', '", ".", "join", "(", "succeeded", ")", ")", "status", "=", "'success'", "if", "len", "(", "failed", ")", ">", "0", ":", "message", "+=", "'\\nNo finished entry for keys: {}'", ".", "format", "(", "', '", ".", "join", "(", "failed", ")", ")", "status", "=", "'error'", "answer", "=", "{", "'message'", ":", "message", ".", "strip", "(", ")", ",", "'status'", ":", "status", "}", "return", "answer" ]
Restart the specified entries.
[ "Restart", "the", "specified", "entries", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/daemon.py#L606-L626
train
dlecocq/nsq-py
nsq/sockets/base.py
SocketWrapper.sendall
def sendall(self, data, flags=0): '''Same as socket.sendall''' count = len(data) while count: sent = self.send(data, flags) # This could probably be a buffer object data = data[sent:] count -= sent
python
def sendall(self, data, flags=0): '''Same as socket.sendall''' count = len(data) while count: sent = self.send(data, flags) # This could probably be a buffer object data = data[sent:] count -= sent
[ "def", "sendall", "(", "self", ",", "data", ",", "flags", "=", "0", ")", ":", "count", "=", "len", "(", "data", ")", "while", "count", ":", "sent", "=", "self", ".", "send", "(", "data", ",", "flags", ")", "# This could probably be a buffer object", "data", "=", "data", "[", "sent", ":", "]", "count", "-=", "sent" ]
Same as socket.sendall
[ "Same", "as", "socket", ".", "sendall" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/sockets/base.py#L31-L38
train
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
do_ls
def do_ls(client, args): """List directory""" for item in client.get_folder_contents_iter(args.uri): # privacy flag if item['privacy'] == 'public': item['pf'] = '@' else: item['pf'] = '-' if isinstance(item, Folder): # type flag item['tf'] = 'd' item['key'] = item['folderkey'] item['size'] = '' else: item['tf'] = '-' item['key'] = item['quickkey'] item['name'] = item['filename'] print("{tf}{pf} {key:>15} {size:>10} {created} {name}".format(**item)) return True
python
def do_ls(client, args): """List directory""" for item in client.get_folder_contents_iter(args.uri): # privacy flag if item['privacy'] == 'public': item['pf'] = '@' else: item['pf'] = '-' if isinstance(item, Folder): # type flag item['tf'] = 'd' item['key'] = item['folderkey'] item['size'] = '' else: item['tf'] = '-' item['key'] = item['quickkey'] item['name'] = item['filename'] print("{tf}{pf} {key:>15} {size:>10} {created} {name}".format(**item)) return True
[ "def", "do_ls", "(", "client", ",", "args", ")", ":", "for", "item", "in", "client", ".", "get_folder_contents_iter", "(", "args", ".", "uri", ")", ":", "# privacy flag", "if", "item", "[", "'privacy'", "]", "==", "'public'", ":", "item", "[", "'pf'", "]", "=", "'@'", "else", ":", "item", "[", "'pf'", "]", "=", "'-'", "if", "isinstance", "(", "item", ",", "Folder", ")", ":", "# type flag", "item", "[", "'tf'", "]", "=", "'d'", "item", "[", "'key'", "]", "=", "item", "[", "'folderkey'", "]", "item", "[", "'size'", "]", "=", "''", "else", ":", "item", "[", "'tf'", "]", "=", "'-'", "item", "[", "'key'", "]", "=", "item", "[", "'quickkey'", "]", "item", "[", "'name'", "]", "=", "item", "[", "'filename'", "]", "print", "(", "\"{tf}{pf} {key:>15} {size:>10} {created} {name}\"", ".", "format", "(", "*", "*", "item", ")", ")", "return", "True" ]
List directory
[ "List", "directory" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L28-L50
train
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
do_file_upload
def do_file_upload(client, args): """Upload files""" # Sanity check if len(args.paths) > 1: # destination must be a directory try: resource = client.get_resource_by_uri(args.dest_uri) except ResourceNotFoundError: resource = None if resource and not isinstance(resource, Folder): print("file-upload: " "target '{}' is not a directory".format(args.dest_uri)) return None with client.upload_session(): for src_path in args.paths: print("Uploading {} to {}".format(src_path, args.dest_uri)) result = client.upload_file(src_path, args.dest_uri) print("Uploaded {}, result={}".format(src_path, result)) return True
python
def do_file_upload(client, args): """Upload files""" # Sanity check if len(args.paths) > 1: # destination must be a directory try: resource = client.get_resource_by_uri(args.dest_uri) except ResourceNotFoundError: resource = None if resource and not isinstance(resource, Folder): print("file-upload: " "target '{}' is not a directory".format(args.dest_uri)) return None with client.upload_session(): for src_path in args.paths: print("Uploading {} to {}".format(src_path, args.dest_uri)) result = client.upload_file(src_path, args.dest_uri) print("Uploaded {}, result={}".format(src_path, result)) return True
[ "def", "do_file_upload", "(", "client", ",", "args", ")", ":", "# Sanity check", "if", "len", "(", "args", ".", "paths", ")", ">", "1", ":", "# destination must be a directory", "try", ":", "resource", "=", "client", ".", "get_resource_by_uri", "(", "args", ".", "dest_uri", ")", "except", "ResourceNotFoundError", ":", "resource", "=", "None", "if", "resource", "and", "not", "isinstance", "(", "resource", ",", "Folder", ")", ":", "print", "(", "\"file-upload: \"", "\"target '{}' is not a directory\"", ".", "format", "(", "args", ".", "dest_uri", ")", ")", "return", "None", "with", "client", ".", "upload_session", "(", ")", ":", "for", "src_path", "in", "args", ".", "paths", ":", "print", "(", "\"Uploading {} to {}\"", ".", "format", "(", "src_path", ",", "args", ".", "dest_uri", ")", ")", "result", "=", "client", ".", "upload_file", "(", "src_path", ",", "args", ".", "dest_uri", ")", "print", "(", "\"Uploaded {}, result={}\"", ".", "format", "(", "src_path", ",", "result", ")", ")", "return", "True" ]
Upload files
[ "Upload", "files" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L53-L76
train
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
do_file_download
def do_file_download(client, args): """Download file""" # Sanity check if not os.path.isdir(args.dest_path) and not args.dest_path.endswith('/'): print("file-download: " "target '{}' is not a directory".format(args.dest_path)) if not os.path.exists(args.dest_path): print("\tHint: add trailing / to create one") return None for src_uri in args.uris: print("Downloading {} to {}".format(src_uri, args.dest_path)) client.download_file(src_uri, args.dest_path) print("Downloaded {}".format(src_uri)) return True
python
def do_file_download(client, args): """Download file""" # Sanity check if not os.path.isdir(args.dest_path) and not args.dest_path.endswith('/'): print("file-download: " "target '{}' is not a directory".format(args.dest_path)) if not os.path.exists(args.dest_path): print("\tHint: add trailing / to create one") return None for src_uri in args.uris: print("Downloading {} to {}".format(src_uri, args.dest_path)) client.download_file(src_uri, args.dest_path) print("Downloaded {}".format(src_uri)) return True
[ "def", "do_file_download", "(", "client", ",", "args", ")", ":", "# Sanity check", "if", "not", "os", ".", "path", ".", "isdir", "(", "args", ".", "dest_path", ")", "and", "not", "args", ".", "dest_path", ".", "endswith", "(", "'/'", ")", ":", "print", "(", "\"file-download: \"", "\"target '{}' is not a directory\"", ".", "format", "(", "args", ".", "dest_path", ")", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "args", ".", "dest_path", ")", ":", "print", "(", "\"\\tHint: add trailing / to create one\"", ")", "return", "None", "for", "src_uri", "in", "args", ".", "uris", ":", "print", "(", "\"Downloading {} to {}\"", ".", "format", "(", "src_uri", ",", "args", ".", "dest_path", ")", ")", "client", ".", "download_file", "(", "src_uri", ",", "args", ".", "dest_path", ")", "print", "(", "\"Downloaded {}\"", ".", "format", "(", "src_uri", ")", ")", "return", "True" ]
Download file
[ "Download", "file" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L79-L95
train
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
do_file_show
def do_file_show(client, args): """Output file contents to stdout""" for src_uri in args.uris: client.download_file(src_uri, sys.stdout.buffer) return True
python
def do_file_show(client, args): """Output file contents to stdout""" for src_uri in args.uris: client.download_file(src_uri, sys.stdout.buffer) return True
[ "def", "do_file_show", "(", "client", ",", "args", ")", ":", "for", "src_uri", "in", "args", ".", "uris", ":", "client", ".", "download_file", "(", "src_uri", ",", "sys", ".", "stdout", ".", "buffer", ")", "return", "True" ]
Output file contents to stdout
[ "Output", "file", "contents", "to", "stdout" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L98-L103
train
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
do_folder_create
def do_folder_create(client, args): """Create directory""" for folder_uri in args.uris: client.create_folder(folder_uri, recursive=True) return True
python
def do_folder_create(client, args): """Create directory""" for folder_uri in args.uris: client.create_folder(folder_uri, recursive=True) return True
[ "def", "do_folder_create", "(", "client", ",", "args", ")", ":", "for", "folder_uri", "in", "args", ".", "uris", ":", "client", ".", "create_folder", "(", "folder_uri", ",", "recursive", "=", "True", ")", "return", "True" ]
Create directory
[ "Create", "directory" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L106-L110
train
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
do_resource_delete
def do_resource_delete(client, args): """Remove resource""" for resource_uri in args.uris: client.delete_resource(resource_uri, purge=args.purge) print("Deleted {}".format(resource_uri)) return True
python
def do_resource_delete(client, args): """Remove resource""" for resource_uri in args.uris: client.delete_resource(resource_uri, purge=args.purge) print("Deleted {}".format(resource_uri)) return True
[ "def", "do_resource_delete", "(", "client", ",", "args", ")", ":", "for", "resource_uri", "in", "args", ".", "uris", ":", "client", ".", "delete_resource", "(", "resource_uri", ",", "purge", "=", "args", ".", "purge", ")", "print", "(", "\"Deleted {}\"", ".", "format", "(", "resource_uri", ")", ")", "return", "True" ]
Remove resource
[ "Remove", "resource" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L113-L118
train
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
do_file_update_metadata
def do_file_update_metadata(client, args): """Update file metadata""" client.update_file_metadata(args.uri, filename=args.filename, description=args.description, mtime=args.mtime, privacy=args.privacy) return True
python
def do_file_update_metadata(client, args): """Update file metadata""" client.update_file_metadata(args.uri, filename=args.filename, description=args.description, mtime=args.mtime, privacy=args.privacy) return True
[ "def", "do_file_update_metadata", "(", "client", ",", "args", ")", ":", "client", ".", "update_file_metadata", "(", "args", ".", "uri", ",", "filename", "=", "args", ".", "filename", ",", "description", "=", "args", ".", "description", ",", "mtime", "=", "args", ".", "mtime", ",", "privacy", "=", "args", ".", "privacy", ")", "return", "True" ]
Update file metadata
[ "Update", "file", "metadata" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L121-L126
train
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
do_folder_update_metadata
def do_folder_update_metadata(client, args): """Update file metadata""" client.update_folder_metadata(args.uri, foldername=args.foldername, description=args.description, mtime=args.mtime, privacy=args.privacy, privacy_recursive=args.recursive) return True
python
def do_folder_update_metadata(client, args): """Update file metadata""" client.update_folder_metadata(args.uri, foldername=args.foldername, description=args.description, mtime=args.mtime, privacy=args.privacy, privacy_recursive=args.recursive) return True
[ "def", "do_folder_update_metadata", "(", "client", ",", "args", ")", ":", "client", ".", "update_folder_metadata", "(", "args", ".", "uri", ",", "foldername", "=", "args", ".", "foldername", ",", "description", "=", "args", ".", "description", ",", "mtime", "=", "args", ".", "mtime", ",", "privacy", "=", "args", ".", "privacy", ",", "privacy_recursive", "=", "args", ".", "recursive", ")", "return", "True" ]
Update file metadata
[ "Update", "file", "metadata" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L129-L135
train
MediaFire/mediafire-python-open-sdk
examples/mediafire-cli.py
main
def main(): # pylint: disable=too-many-statements """Main entry point""" parser = argparse.ArgumentParser(prog='mediafire-cli', description=__doc__) parser.add_argument('--debug', dest='debug', action='store_true', default=False, help='Enable debug output') parser.add_argument('--email', dest='email', required=False, default=os.environ.get('MEDIAFIRE_EMAIL', None)) parser.add_argument('--password', dest='password', required=False, default=os.environ.get('MEDIAFIRE_PASSWORD', None)) actions = parser.add_subparsers(title='Actions', dest='action') # http://bugs.python.org/issue9253#msg186387 actions.required = True # ls subparser = actions.add_parser('ls', help=do_ls.__doc__) subparser.add_argument('uri', nargs='?', help='MediaFire URI', default='mf:///') # file-upload subparser = actions.add_parser('file-upload', help=do_file_upload.__doc__) subparser.add_argument('paths', nargs='+', help='Path[s] to upload') subparser.add_argument('dest_uri', help='Destination MediaFire URI') # file-download subparser = actions.add_parser('file-download', help=do_file_download.__doc__) subparser.add_argument('uris', nargs='+', help='MediaFire File URI[s] to download') subparser.add_argument('dest_path', help='Destination path') # file-show subparser = actions.add_parser('file-show', help=do_file_show.__doc__) subparser.add_argument('uris', nargs='+', help='MediaFire File URI[s] to print out') # folder-create subparser = actions.add_parser('folder-create', help=do_folder_create.__doc__) subparser.add_argument('uris', nargs='+', help='MediaFire folder path URI[s]') # resource-delete subparser = actions.add_parser('resource-delete', help=do_resource_delete.__doc__) subparser.add_argument('uris', nargs='+', help='MediaFire resource URI[s]') subparser.add_argument('--purge', help="Purge, don't send to trash", dest="purge", action="store_true", default=False) # file-update-metadata subparser = actions.add_parser('file-update-metadata', help=do_file_update_metadata.__doc__) subparser.add_argument('uri', help='MediaFire file URI') subparser.add_argument('--filename', help='Set file name', default=None, dest='filename') subparser.add_argument('--privacy', help='Set file privacy', choices=['public', 'private'], default=None, dest='privacy') subparser.add_argument('--description', help='Set file description', dest='description', default=None) subparser.add_argument('--mtime', help="Set file modification time", dest='mtime', default=None) # folder-update-metadata subparser = actions.add_parser('folder-update-metadata', help=do_folder_update_metadata.__doc__) subparser.add_argument('uri', help='MediaFire folder URI') subparser.add_argument('--foldername', help='Set folder name', default=None, dest='foldername') subparser.add_argument('--privacy', help='Set folder privacy', choices=['public', 'private'], default=None, dest='privacy') subparser.add_argument('--recursive', help='Set privacy recursively', action='store_true', default=None, dest='recursive') subparser.add_argument('--description', help='Set folder description', dest='description', default=None) subparser.add_argument('--mtime', help='Set folder mtime', default=None, dest='mtime') # debug-get-resource subparser = actions.add_parser('debug-get-resource', help=do_debug_get_resource.__doc__) subparser.add_argument('uri', help='MediaFire resource URI', default='mediafire:/', nargs='?') args = parser.parse_args() if args.debug: logger = logging.getLogger() logger.setLevel(logging.DEBUG) logging.getLogger("mediafire.client").setLevel(logging.DEBUG) client = MediaFireClient() if args.email and args.password: client.login(args.email, args.password, app_id=APP_ID) router = { "file-upload": do_file_upload, "file-download": do_file_download, "file-show": do_file_show, "ls": do_ls, "folder-create": do_folder_create, "resource-delete": do_resource_delete, "file-update-metadata": do_file_update_metadata, "folder-update-metadata": do_folder_update_metadata, "debug-get-resource": do_debug_get_resource } if args.action in router: result = router[args.action](client, args) if not result: sys.exit(1) else: print('Unsupported action: {}'.format(args.action)) sys.exit(1)
python
def main(): # pylint: disable=too-many-statements """Main entry point""" parser = argparse.ArgumentParser(prog='mediafire-cli', description=__doc__) parser.add_argument('--debug', dest='debug', action='store_true', default=False, help='Enable debug output') parser.add_argument('--email', dest='email', required=False, default=os.environ.get('MEDIAFIRE_EMAIL', None)) parser.add_argument('--password', dest='password', required=False, default=os.environ.get('MEDIAFIRE_PASSWORD', None)) actions = parser.add_subparsers(title='Actions', dest='action') # http://bugs.python.org/issue9253#msg186387 actions.required = True # ls subparser = actions.add_parser('ls', help=do_ls.__doc__) subparser.add_argument('uri', nargs='?', help='MediaFire URI', default='mf:///') # file-upload subparser = actions.add_parser('file-upload', help=do_file_upload.__doc__) subparser.add_argument('paths', nargs='+', help='Path[s] to upload') subparser.add_argument('dest_uri', help='Destination MediaFire URI') # file-download subparser = actions.add_parser('file-download', help=do_file_download.__doc__) subparser.add_argument('uris', nargs='+', help='MediaFire File URI[s] to download') subparser.add_argument('dest_path', help='Destination path') # file-show subparser = actions.add_parser('file-show', help=do_file_show.__doc__) subparser.add_argument('uris', nargs='+', help='MediaFire File URI[s] to print out') # folder-create subparser = actions.add_parser('folder-create', help=do_folder_create.__doc__) subparser.add_argument('uris', nargs='+', help='MediaFire folder path URI[s]') # resource-delete subparser = actions.add_parser('resource-delete', help=do_resource_delete.__doc__) subparser.add_argument('uris', nargs='+', help='MediaFire resource URI[s]') subparser.add_argument('--purge', help="Purge, don't send to trash", dest="purge", action="store_true", default=False) # file-update-metadata subparser = actions.add_parser('file-update-metadata', help=do_file_update_metadata.__doc__) subparser.add_argument('uri', help='MediaFire file URI') subparser.add_argument('--filename', help='Set file name', default=None, dest='filename') subparser.add_argument('--privacy', help='Set file privacy', choices=['public', 'private'], default=None, dest='privacy') subparser.add_argument('--description', help='Set file description', dest='description', default=None) subparser.add_argument('--mtime', help="Set file modification time", dest='mtime', default=None) # folder-update-metadata subparser = actions.add_parser('folder-update-metadata', help=do_folder_update_metadata.__doc__) subparser.add_argument('uri', help='MediaFire folder URI') subparser.add_argument('--foldername', help='Set folder name', default=None, dest='foldername') subparser.add_argument('--privacy', help='Set folder privacy', choices=['public', 'private'], default=None, dest='privacy') subparser.add_argument('--recursive', help='Set privacy recursively', action='store_true', default=None, dest='recursive') subparser.add_argument('--description', help='Set folder description', dest='description', default=None) subparser.add_argument('--mtime', help='Set folder mtime', default=None, dest='mtime') # debug-get-resource subparser = actions.add_parser('debug-get-resource', help=do_debug_get_resource.__doc__) subparser.add_argument('uri', help='MediaFire resource URI', default='mediafire:/', nargs='?') args = parser.parse_args() if args.debug: logger = logging.getLogger() logger.setLevel(logging.DEBUG) logging.getLogger("mediafire.client").setLevel(logging.DEBUG) client = MediaFireClient() if args.email and args.password: client.login(args.email, args.password, app_id=APP_ID) router = { "file-upload": do_file_upload, "file-download": do_file_download, "file-show": do_file_show, "ls": do_ls, "folder-create": do_folder_create, "resource-delete": do_resource_delete, "file-update-metadata": do_file_update_metadata, "folder-update-metadata": do_folder_update_metadata, "debug-get-resource": do_debug_get_resource } if args.action in router: result = router[args.action](client, args) if not result: sys.exit(1) else: print('Unsupported action: {}'.format(args.action)) sys.exit(1)
[ "def", "main", "(", ")", ":", "# pylint: disable=too-many-statements", "parser", "=", "argparse", ".", "ArgumentParser", "(", "prog", "=", "'mediafire-cli'", ",", "description", "=", "__doc__", ")", "parser", ".", "add_argument", "(", "'--debug'", ",", "dest", "=", "'debug'", ",", "action", "=", "'store_true'", ",", "default", "=", "False", ",", "help", "=", "'Enable debug output'", ")", "parser", ".", "add_argument", "(", "'--email'", ",", "dest", "=", "'email'", ",", "required", "=", "False", ",", "default", "=", "os", ".", "environ", ".", "get", "(", "'MEDIAFIRE_EMAIL'", ",", "None", ")", ")", "parser", ".", "add_argument", "(", "'--password'", ",", "dest", "=", "'password'", ",", "required", "=", "False", ",", "default", "=", "os", ".", "environ", ".", "get", "(", "'MEDIAFIRE_PASSWORD'", ",", "None", ")", ")", "actions", "=", "parser", ".", "add_subparsers", "(", "title", "=", "'Actions'", ",", "dest", "=", "'action'", ")", "# http://bugs.python.org/issue9253#msg186387", "actions", ".", "required", "=", "True", "# ls", "subparser", "=", "actions", ".", "add_parser", "(", "'ls'", ",", "help", "=", "do_ls", ".", "__doc__", ")", "subparser", ".", "add_argument", "(", "'uri'", ",", "nargs", "=", "'?'", ",", "help", "=", "'MediaFire URI'", ",", "default", "=", "'mf:///'", ")", "# file-upload", "subparser", "=", "actions", ".", "add_parser", "(", "'file-upload'", ",", "help", "=", "do_file_upload", ".", "__doc__", ")", "subparser", ".", "add_argument", "(", "'paths'", ",", "nargs", "=", "'+'", ",", "help", "=", "'Path[s] to upload'", ")", "subparser", ".", "add_argument", "(", "'dest_uri'", ",", "help", "=", "'Destination MediaFire URI'", ")", "# file-download", "subparser", "=", "actions", ".", "add_parser", "(", "'file-download'", ",", "help", "=", "do_file_download", ".", "__doc__", ")", "subparser", ".", "add_argument", "(", "'uris'", ",", "nargs", "=", "'+'", ",", "help", "=", "'MediaFire File URI[s] to download'", ")", "subparser", ".", "add_argument", "(", "'dest_path'", ",", "help", "=", "'Destination path'", ")", "# file-show", "subparser", "=", "actions", ".", "add_parser", "(", "'file-show'", ",", "help", "=", "do_file_show", ".", "__doc__", ")", "subparser", ".", "add_argument", "(", "'uris'", ",", "nargs", "=", "'+'", ",", "help", "=", "'MediaFire File URI[s] to print out'", ")", "# folder-create", "subparser", "=", "actions", ".", "add_parser", "(", "'folder-create'", ",", "help", "=", "do_folder_create", ".", "__doc__", ")", "subparser", ".", "add_argument", "(", "'uris'", ",", "nargs", "=", "'+'", ",", "help", "=", "'MediaFire folder path URI[s]'", ")", "# resource-delete", "subparser", "=", "actions", ".", "add_parser", "(", "'resource-delete'", ",", "help", "=", "do_resource_delete", ".", "__doc__", ")", "subparser", ".", "add_argument", "(", "'uris'", ",", "nargs", "=", "'+'", ",", "help", "=", "'MediaFire resource URI[s]'", ")", "subparser", ".", "add_argument", "(", "'--purge'", ",", "help", "=", "\"Purge, don't send to trash\"", ",", "dest", "=", "\"purge\"", ",", "action", "=", "\"store_true\"", ",", "default", "=", "False", ")", "# file-update-metadata", "subparser", "=", "actions", ".", "add_parser", "(", "'file-update-metadata'", ",", "help", "=", "do_file_update_metadata", ".", "__doc__", ")", "subparser", ".", "add_argument", "(", "'uri'", ",", "help", "=", "'MediaFire file URI'", ")", "subparser", ".", "add_argument", "(", "'--filename'", ",", "help", "=", "'Set file name'", ",", "default", "=", "None", ",", "dest", "=", "'filename'", ")", "subparser", ".", "add_argument", "(", "'--privacy'", ",", "help", "=", "'Set file privacy'", ",", "choices", "=", "[", "'public'", ",", "'private'", "]", ",", "default", "=", "None", ",", "dest", "=", "'privacy'", ")", "subparser", ".", "add_argument", "(", "'--description'", ",", "help", "=", "'Set file description'", ",", "dest", "=", "'description'", ",", "default", "=", "None", ")", "subparser", ".", "add_argument", "(", "'--mtime'", ",", "help", "=", "\"Set file modification time\"", ",", "dest", "=", "'mtime'", ",", "default", "=", "None", ")", "# folder-update-metadata", "subparser", "=", "actions", ".", "add_parser", "(", "'folder-update-metadata'", ",", "help", "=", "do_folder_update_metadata", ".", "__doc__", ")", "subparser", ".", "add_argument", "(", "'uri'", ",", "help", "=", "'MediaFire folder URI'", ")", "subparser", ".", "add_argument", "(", "'--foldername'", ",", "help", "=", "'Set folder name'", ",", "default", "=", "None", ",", "dest", "=", "'foldername'", ")", "subparser", ".", "add_argument", "(", "'--privacy'", ",", "help", "=", "'Set folder privacy'", ",", "choices", "=", "[", "'public'", ",", "'private'", "]", ",", "default", "=", "None", ",", "dest", "=", "'privacy'", ")", "subparser", ".", "add_argument", "(", "'--recursive'", ",", "help", "=", "'Set privacy recursively'", ",", "action", "=", "'store_true'", ",", "default", "=", "None", ",", "dest", "=", "'recursive'", ")", "subparser", ".", "add_argument", "(", "'--description'", ",", "help", "=", "'Set folder description'", ",", "dest", "=", "'description'", ",", "default", "=", "None", ")", "subparser", ".", "add_argument", "(", "'--mtime'", ",", "help", "=", "'Set folder mtime'", ",", "default", "=", "None", ",", "dest", "=", "'mtime'", ")", "# debug-get-resource", "subparser", "=", "actions", ".", "add_parser", "(", "'debug-get-resource'", ",", "help", "=", "do_debug_get_resource", ".", "__doc__", ")", "subparser", ".", "add_argument", "(", "'uri'", ",", "help", "=", "'MediaFire resource URI'", ",", "default", "=", "'mediafire:/'", ",", "nargs", "=", "'?'", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "if", "args", ".", "debug", ":", "logger", "=", "logging", ".", "getLogger", "(", ")", "logger", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "logging", ".", "getLogger", "(", "\"mediafire.client\"", ")", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "client", "=", "MediaFireClient", "(", ")", "if", "args", ".", "email", "and", "args", ".", "password", ":", "client", ".", "login", "(", "args", ".", "email", ",", "args", ".", "password", ",", "app_id", "=", "APP_ID", ")", "router", "=", "{", "\"file-upload\"", ":", "do_file_upload", ",", "\"file-download\"", ":", "do_file_download", ",", "\"file-show\"", ":", "do_file_show", ",", "\"ls\"", ":", "do_ls", ",", "\"folder-create\"", ":", "do_folder_create", ",", "\"resource-delete\"", ":", "do_resource_delete", ",", "\"file-update-metadata\"", ":", "do_file_update_metadata", ",", "\"folder-update-metadata\"", ":", "do_folder_update_metadata", ",", "\"debug-get-resource\"", ":", "do_debug_get_resource", "}", "if", "args", ".", "action", "in", "router", ":", "result", "=", "router", "[", "args", ".", "action", "]", "(", "client", ",", "args", ")", "if", "not", "result", ":", "sys", ".", "exit", "(", "1", ")", "else", ":", "print", "(", "'Unsupported action: {}'", ".", "format", "(", "args", ".", "action", ")", ")", "sys", ".", "exit", "(", "1", ")" ]
Main entry point
[ "Main", "entry", "point" ]
8f1f23db1b16f16e026f5c6777aec32d00baa05f
https://github.com/MediaFire/mediafire-python-open-sdk/blob/8f1f23db1b16f16e026f5c6777aec32d00baa05f/examples/mediafire-cli.py#L145-L273
train
dlecocq/nsq-py
nsq/http/nsqd.py
Client.pub
def pub(self, topic, message): '''Publish a message to a topic''' return self.post('pub', params={'topic': topic}, data=message)
python
def pub(self, topic, message): '''Publish a message to a topic''' return self.post('pub', params={'topic': topic}, data=message)
[ "def", "pub", "(", "self", ",", "topic", ",", "message", ")", ":", "return", "self", ".", "post", "(", "'pub'", ",", "params", "=", "{", "'topic'", ":", "topic", "}", ",", "data", "=", "message", ")" ]
Publish a message to a topic
[ "Publish", "a", "message", "to", "a", "topic" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/http/nsqd.py#L19-L21
train
dlecocq/nsq-py
nsq/http/nsqd.py
Client.mpub
def mpub(self, topic, messages, binary=True): '''Send multiple messages to a topic. Optionally pack the messages''' if binary: # Pack and ship the data return self.post('mpub', data=pack(messages)[4:], params={'topic': topic, 'binary': True}) elif any('\n' in m for m in messages): # If any of the messages has a newline, then you must use the binary # calling format raise ClientException( 'Use `binary` flag in mpub for messages with newlines') else: return self.post( '/mpub', params={'topic': topic}, data='\n'.join(messages))
python
def mpub(self, topic, messages, binary=True): '''Send multiple messages to a topic. Optionally pack the messages''' if binary: # Pack and ship the data return self.post('mpub', data=pack(messages)[4:], params={'topic': topic, 'binary': True}) elif any('\n' in m for m in messages): # If any of the messages has a newline, then you must use the binary # calling format raise ClientException( 'Use `binary` flag in mpub for messages with newlines') else: return self.post( '/mpub', params={'topic': topic}, data='\n'.join(messages))
[ "def", "mpub", "(", "self", ",", "topic", ",", "messages", ",", "binary", "=", "True", ")", ":", "if", "binary", ":", "# Pack and ship the data", "return", "self", ".", "post", "(", "'mpub'", ",", "data", "=", "pack", "(", "messages", ")", "[", "4", ":", "]", ",", "params", "=", "{", "'topic'", ":", "topic", ",", "'binary'", ":", "True", "}", ")", "elif", "any", "(", "'\\n'", "in", "m", "for", "m", "in", "messages", ")", ":", "# If any of the messages has a newline, then you must use the binary", "# calling format", "raise", "ClientException", "(", "'Use `binary` flag in mpub for messages with newlines'", ")", "else", ":", "return", "self", ".", "post", "(", "'/mpub'", ",", "params", "=", "{", "'topic'", ":", "topic", "}", ",", "data", "=", "'\\n'", ".", "join", "(", "messages", ")", ")" ]
Send multiple messages to a topic. Optionally pack the messages
[ "Send", "multiple", "messages", "to", "a", "topic", ".", "Optionally", "pack", "the", "messages" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/http/nsqd.py#L24-L37
train
dlecocq/nsq-py
nsq/http/nsqd.py
Client.clean_stats
def clean_stats(self): '''Stats with topics and channels keyed on topic and channel names''' stats = self.stats() if 'topics' in stats: # pragma: no branch topics = stats['topics'] topics = dict((t.pop('topic_name'), t) for t in topics) for topic, data in topics.items(): if 'channels' in data: # pragma: no branch channels = data['channels'] channels = dict( (c.pop('channel_name'), c) for c in channels) data['channels'] = channels stats['topics'] = topics return stats
python
def clean_stats(self): '''Stats with topics and channels keyed on topic and channel names''' stats = self.stats() if 'topics' in stats: # pragma: no branch topics = stats['topics'] topics = dict((t.pop('topic_name'), t) for t in topics) for topic, data in topics.items(): if 'channels' in data: # pragma: no branch channels = data['channels'] channels = dict( (c.pop('channel_name'), c) for c in channels) data['channels'] = channels stats['topics'] = topics return stats
[ "def", "clean_stats", "(", "self", ")", ":", "stats", "=", "self", ".", "stats", "(", ")", "if", "'topics'", "in", "stats", ":", "# pragma: no branch", "topics", "=", "stats", "[", "'topics'", "]", "topics", "=", "dict", "(", "(", "t", ".", "pop", "(", "'topic_name'", ")", ",", "t", ")", "for", "t", "in", "topics", ")", "for", "topic", ",", "data", "in", "topics", ".", "items", "(", ")", ":", "if", "'channels'", "in", "data", ":", "# pragma: no branch", "channels", "=", "data", "[", "'channels'", "]", "channels", "=", "dict", "(", "(", "c", ".", "pop", "(", "'channel_name'", ")", ",", "c", ")", "for", "c", "in", "channels", ")", "data", "[", "'channels'", "]", "=", "channels", "stats", "[", "'topics'", "]", "=", "topics", "return", "stats" ]
Stats with topics and channels keyed on topic and channel names
[ "Stats", "with", "topics", "and", "channels", "keyed", "on", "topic", "and", "channel", "names" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/http/nsqd.py#L99-L112
train
Nukesor/pueue
pueue/client/manipulation.py
execute_add
def execute_add(args, root_dir=None): """Add a new command to the daemon queue. Args: args['command'] (list(str)): The actual programm call. Something like ['ls', '-a'] or ['ls -al'] root_dir (string): The path to the root directory the daemon is running in. """ # We accept a list of strings. # This is done to create a better commandline experience with argparse. command = ' '.join(args['command']) # Send new instruction to daemon instruction = { 'command': command, 'path': os.getcwd() } print_command_factory('add')(instruction, root_dir)
python
def execute_add(args, root_dir=None): """Add a new command to the daemon queue. Args: args['command'] (list(str)): The actual programm call. Something like ['ls', '-a'] or ['ls -al'] root_dir (string): The path to the root directory the daemon is running in. """ # We accept a list of strings. # This is done to create a better commandline experience with argparse. command = ' '.join(args['command']) # Send new instruction to daemon instruction = { 'command': command, 'path': os.getcwd() } print_command_factory('add')(instruction, root_dir)
[ "def", "execute_add", "(", "args", ",", "root_dir", "=", "None", ")", ":", "# We accept a list of strings.", "# This is done to create a better commandline experience with argparse.", "command", "=", "' '", ".", "join", "(", "args", "[", "'command'", "]", ")", "# Send new instruction to daemon", "instruction", "=", "{", "'command'", ":", "command", ",", "'path'", ":", "os", ".", "getcwd", "(", ")", "}", "print_command_factory", "(", "'add'", ")", "(", "instruction", ",", "root_dir", ")" ]
Add a new command to the daemon queue. Args: args['command'] (list(str)): The actual programm call. Something like ['ls', '-a'] or ['ls -al'] root_dir (string): The path to the root directory the daemon is running in.
[ "Add", "a", "new", "command", "to", "the", "daemon", "queue", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/client/manipulation.py#L9-L26
train
Nukesor/pueue
pueue/client/manipulation.py
execute_edit
def execute_edit(args, root_dir=None): """Edit a existing queue command in the daemon. Args: args['key'] int: The key of the queue entry to be edited root_dir (string): The path to the root directory the daemon is running in. """ # Get editor EDITOR = os.environ.get('EDITOR', 'vim') # Get command from server key = args['key'] status = command_factory('status')({}, root_dir=root_dir) # Check if queue is not empty, the entry exists and is queued or stashed if not isinstance(status['data'], str) and key in status['data']: if status['data'][key]['status'] in ['queued', 'stashed']: command = status['data'][key]['command'] else: print("Entry is not 'queued' or 'stashed'") sys.exit(1) else: print('No entry with this key') sys.exit(1) with tempfile.NamedTemporaryFile(suffix=".tmp") as tf: tf.write(command.encode('utf-8')) tf.flush() call([EDITOR, tf.name]) # do the parsing with `tf` using regular File operations. # for instance: tf.seek(0) edited_command = tf.read().decode('utf-8') print_command_factory('edit')({ 'key': key, 'command': edited_command, }, root_dir=root_dir)
python
def execute_edit(args, root_dir=None): """Edit a existing queue command in the daemon. Args: args['key'] int: The key of the queue entry to be edited root_dir (string): The path to the root directory the daemon is running in. """ # Get editor EDITOR = os.environ.get('EDITOR', 'vim') # Get command from server key = args['key'] status = command_factory('status')({}, root_dir=root_dir) # Check if queue is not empty, the entry exists and is queued or stashed if not isinstance(status['data'], str) and key in status['data']: if status['data'][key]['status'] in ['queued', 'stashed']: command = status['data'][key]['command'] else: print("Entry is not 'queued' or 'stashed'") sys.exit(1) else: print('No entry with this key') sys.exit(1) with tempfile.NamedTemporaryFile(suffix=".tmp") as tf: tf.write(command.encode('utf-8')) tf.flush() call([EDITOR, tf.name]) # do the parsing with `tf` using regular File operations. # for instance: tf.seek(0) edited_command = tf.read().decode('utf-8') print_command_factory('edit')({ 'key': key, 'command': edited_command, }, root_dir=root_dir)
[ "def", "execute_edit", "(", "args", ",", "root_dir", "=", "None", ")", ":", "# Get editor", "EDITOR", "=", "os", ".", "environ", ".", "get", "(", "'EDITOR'", ",", "'vim'", ")", "# Get command from server", "key", "=", "args", "[", "'key'", "]", "status", "=", "command_factory", "(", "'status'", ")", "(", "{", "}", ",", "root_dir", "=", "root_dir", ")", "# Check if queue is not empty, the entry exists and is queued or stashed", "if", "not", "isinstance", "(", "status", "[", "'data'", "]", ",", "str", ")", "and", "key", "in", "status", "[", "'data'", "]", ":", "if", "status", "[", "'data'", "]", "[", "key", "]", "[", "'status'", "]", "in", "[", "'queued'", ",", "'stashed'", "]", ":", "command", "=", "status", "[", "'data'", "]", "[", "key", "]", "[", "'command'", "]", "else", ":", "print", "(", "\"Entry is not 'queued' or 'stashed'\"", ")", "sys", ".", "exit", "(", "1", ")", "else", ":", "print", "(", "'No entry with this key'", ")", "sys", ".", "exit", "(", "1", ")", "with", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "\".tmp\"", ")", "as", "tf", ":", "tf", ".", "write", "(", "command", ".", "encode", "(", "'utf-8'", ")", ")", "tf", ".", "flush", "(", ")", "call", "(", "[", "EDITOR", ",", "tf", ".", "name", "]", ")", "# do the parsing with `tf` using regular File operations.", "# for instance:", "tf", ".", "seek", "(", "0", ")", "edited_command", "=", "tf", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", "print_command_factory", "(", "'edit'", ")", "(", "{", "'key'", ":", "key", ",", "'command'", ":", "edited_command", ",", "}", ",", "root_dir", "=", "root_dir", ")" ]
Edit a existing queue command in the daemon. Args: args['key'] int: The key of the queue entry to be edited root_dir (string): The path to the root directory the daemon is running in.
[ "Edit", "a", "existing", "queue", "command", "in", "the", "daemon", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/client/manipulation.py#L29-L66
train
Nukesor/pueue
pueue/client/factories.py
command_factory
def command_factory(command): """A factory which returns functions for direct daemon communication. This factory will create a function which sends a payload to the daemon and returns the unpickled object which is returned by the daemon. Args: command (string): The type of payload this should be. This determines as what kind of instruction this will be interpreted by the daemon. Returns: function: The created function. """ def communicate(body={}, root_dir=None): """Communicate with the daemon. This function sends a payload to the daemon and returns the unpickled object sent by the daemon. Args: body (dir): Any other arguments that should be put into the payload. root_dir (str): The root directory in which we expect the daemon. We need this to connect to the daemons socket. Returns: function: The returned payload. """ client = connect_socket(root_dir) body['mode'] = command # Delete the func entry we use to call the correct function with argparse # as functions can't be pickled and this shouldn't be send to the daemon. if 'func' in body: del body['func'] data_string = pickle.dumps(body, -1) client.send(data_string) # Receive message, unpickle and return it response = receive_data(client) return response return communicate
python
def command_factory(command): """A factory which returns functions for direct daemon communication. This factory will create a function which sends a payload to the daemon and returns the unpickled object which is returned by the daemon. Args: command (string): The type of payload this should be. This determines as what kind of instruction this will be interpreted by the daemon. Returns: function: The created function. """ def communicate(body={}, root_dir=None): """Communicate with the daemon. This function sends a payload to the daemon and returns the unpickled object sent by the daemon. Args: body (dir): Any other arguments that should be put into the payload. root_dir (str): The root directory in which we expect the daemon. We need this to connect to the daemons socket. Returns: function: The returned payload. """ client = connect_socket(root_dir) body['mode'] = command # Delete the func entry we use to call the correct function with argparse # as functions can't be pickled and this shouldn't be send to the daemon. if 'func' in body: del body['func'] data_string = pickle.dumps(body, -1) client.send(data_string) # Receive message, unpickle and return it response = receive_data(client) return response return communicate
[ "def", "command_factory", "(", "command", ")", ":", "def", "communicate", "(", "body", "=", "{", "}", ",", "root_dir", "=", "None", ")", ":", "\"\"\"Communicate with the daemon.\n\n This function sends a payload to the daemon and returns the unpickled\n object sent by the daemon.\n\n Args:\n body (dir): Any other arguments that should be put into the payload.\n root_dir (str): The root directory in which we expect the daemon.\n We need this to connect to the daemons socket.\n Returns:\n function: The returned payload.\n \"\"\"", "client", "=", "connect_socket", "(", "root_dir", ")", "body", "[", "'mode'", "]", "=", "command", "# Delete the func entry we use to call the correct function with argparse", "# as functions can't be pickled and this shouldn't be send to the daemon.", "if", "'func'", "in", "body", ":", "del", "body", "[", "'func'", "]", "data_string", "=", "pickle", ".", "dumps", "(", "body", ",", "-", "1", ")", "client", ".", "send", "(", "data_string", ")", "# Receive message, unpickle and return it", "response", "=", "receive_data", "(", "client", ")", "return", "response", "return", "communicate" ]
A factory which returns functions for direct daemon communication. This factory will create a function which sends a payload to the daemon and returns the unpickled object which is returned by the daemon. Args: command (string): The type of payload this should be. This determines as what kind of instruction this will be interpreted by the daemon. Returns: function: The created function.
[ "A", "factory", "which", "returns", "functions", "for", "direct", "daemon", "communication", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/client/factories.py#L6-L44
train
Nukesor/pueue
pueue/daemon/process_handler.py
ProcessHandler.get_descriptor
def get_descriptor(self, number): """Create file descriptors for process output.""" # Create stdout file and get file descriptor stdout_path = os.path.join(self.config_dir, 'pueue_process_{}.stdout'.format(number)) if os.path.exists(stdout_path): os.remove(stdout_path) out_descriptor = open(stdout_path, 'w+') # Create stderr file and get file descriptor stderr_path = os.path.join(self.config_dir, 'pueue_process_{}.stderr'.format(number)) if os.path.exists(stderr_path): os.remove(stderr_path) err_descriptor = open(stderr_path, 'w+') self.descriptors[number] = {} self.descriptors[number]['stdout'] = out_descriptor self.descriptors[number]['stdout_path'] = stdout_path self.descriptors[number]['stderr'] = err_descriptor self.descriptors[number]['stderr_path'] = stderr_path return out_descriptor, err_descriptor
python
def get_descriptor(self, number): """Create file descriptors for process output.""" # Create stdout file and get file descriptor stdout_path = os.path.join(self.config_dir, 'pueue_process_{}.stdout'.format(number)) if os.path.exists(stdout_path): os.remove(stdout_path) out_descriptor = open(stdout_path, 'w+') # Create stderr file and get file descriptor stderr_path = os.path.join(self.config_dir, 'pueue_process_{}.stderr'.format(number)) if os.path.exists(stderr_path): os.remove(stderr_path) err_descriptor = open(stderr_path, 'w+') self.descriptors[number] = {} self.descriptors[number]['stdout'] = out_descriptor self.descriptors[number]['stdout_path'] = stdout_path self.descriptors[number]['stderr'] = err_descriptor self.descriptors[number]['stderr_path'] = stderr_path return out_descriptor, err_descriptor
[ "def", "get_descriptor", "(", "self", ",", "number", ")", ":", "# Create stdout file and get file descriptor", "stdout_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "config_dir", ",", "'pueue_process_{}.stdout'", ".", "format", "(", "number", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "stdout_path", ")", ":", "os", ".", "remove", "(", "stdout_path", ")", "out_descriptor", "=", "open", "(", "stdout_path", ",", "'w+'", ")", "# Create stderr file and get file descriptor", "stderr_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "config_dir", ",", "'pueue_process_{}.stderr'", ".", "format", "(", "number", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "stderr_path", ")", ":", "os", ".", "remove", "(", "stderr_path", ")", "err_descriptor", "=", "open", "(", "stderr_path", ",", "'w+'", ")", "self", ".", "descriptors", "[", "number", "]", "=", "{", "}", "self", ".", "descriptors", "[", "number", "]", "[", "'stdout'", "]", "=", "out_descriptor", "self", ".", "descriptors", "[", "number", "]", "[", "'stdout_path'", "]", "=", "stdout_path", "self", ".", "descriptors", "[", "number", "]", "[", "'stderr'", "]", "=", "err_descriptor", "self", ".", "descriptors", "[", "number", "]", "[", "'stderr_path'", "]", "=", "stderr_path", "return", "out_descriptor", ",", "err_descriptor" ]
Create file descriptors for process output.
[ "Create", "file", "descriptors", "for", "process", "output", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/process_handler.py#L58-L79
train
Nukesor/pueue
pueue/daemon/process_handler.py
ProcessHandler.clean_descriptor
def clean_descriptor(self, number): """Close file descriptor and remove underlying files.""" self.descriptors[number]['stdout'].close() self.descriptors[number]['stderr'].close() if os.path.exists(self.descriptors[number]['stdout_path']): os.remove(self.descriptors[number]['stdout_path']) if os.path.exists(self.descriptors[number]['stderr_path']): os.remove(self.descriptors[number]['stderr_path'])
python
def clean_descriptor(self, number): """Close file descriptor and remove underlying files.""" self.descriptors[number]['stdout'].close() self.descriptors[number]['stderr'].close() if os.path.exists(self.descriptors[number]['stdout_path']): os.remove(self.descriptors[number]['stdout_path']) if os.path.exists(self.descriptors[number]['stderr_path']): os.remove(self.descriptors[number]['stderr_path'])
[ "def", "clean_descriptor", "(", "self", ",", "number", ")", ":", "self", ".", "descriptors", "[", "number", "]", "[", "'stdout'", "]", ".", "close", "(", ")", "self", ".", "descriptors", "[", "number", "]", "[", "'stderr'", "]", ".", "close", "(", ")", "if", "os", ".", "path", ".", "exists", "(", "self", ".", "descriptors", "[", "number", "]", "[", "'stdout_path'", "]", ")", ":", "os", ".", "remove", "(", "self", ".", "descriptors", "[", "number", "]", "[", "'stdout_path'", "]", ")", "if", "os", ".", "path", ".", "exists", "(", "self", ".", "descriptors", "[", "number", "]", "[", "'stderr_path'", "]", ")", ":", "os", ".", "remove", "(", "self", ".", "descriptors", "[", "number", "]", "[", "'stderr_path'", "]", ")" ]
Close file descriptor and remove underlying files.
[ "Close", "file", "descriptor", "and", "remove", "underlying", "files", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/process_handler.py#L81-L90
train
Nukesor/pueue
pueue/daemon/process_handler.py
ProcessHandler.check_finished
def check_finished(self): """Poll all processes and handle any finished processes.""" changed = False for key in list(self.processes.keys()): # Poll process and check if it finshed process = self.processes[key] process.poll() if process.returncode is not None: # If a process is terminated by `stop` or `kill` # we want to queue it again instead closing it as failed. if key not in self.stopping: # Get std_out and err_out output, error_output = process.communicate() descriptor = self.descriptors[key] descriptor['stdout'].seek(0) descriptor['stderr'].seek(0) output = get_descriptor_output(descriptor['stdout'], key, handler=self) error_output = get_descriptor_output(descriptor['stderr'], key, handler=self) # Mark queue entry as finished and save returncode self.queue[key]['returncode'] = process.returncode if process.returncode != 0: self.queue[key]['status'] = 'failed' else: self.queue[key]['status'] = 'done' # Add outputs to queue self.queue[key]['stdout'] = output self.queue[key]['stderr'] = error_output self.queue[key]['end'] = str(datetime.now().strftime("%H:%M")) self.queue.write() changed = True else: self.stopping.remove(key) if key in self.to_remove: self.to_remove.remove(key) del self.queue[key] else: if key in self.to_stash: self.to_stash.remove(key) self.queue[key]['status'] = 'stashed' else: self.queue[key]['status'] = 'queued' self.queue[key]['start'] = '' self.queue[key]['end'] = '' self.queue.write() self.clean_descriptor(key) del self.processes[key] # If anything should be logged we return True return changed
python
def check_finished(self): """Poll all processes and handle any finished processes.""" changed = False for key in list(self.processes.keys()): # Poll process and check if it finshed process = self.processes[key] process.poll() if process.returncode is not None: # If a process is terminated by `stop` or `kill` # we want to queue it again instead closing it as failed. if key not in self.stopping: # Get std_out and err_out output, error_output = process.communicate() descriptor = self.descriptors[key] descriptor['stdout'].seek(0) descriptor['stderr'].seek(0) output = get_descriptor_output(descriptor['stdout'], key, handler=self) error_output = get_descriptor_output(descriptor['stderr'], key, handler=self) # Mark queue entry as finished and save returncode self.queue[key]['returncode'] = process.returncode if process.returncode != 0: self.queue[key]['status'] = 'failed' else: self.queue[key]['status'] = 'done' # Add outputs to queue self.queue[key]['stdout'] = output self.queue[key]['stderr'] = error_output self.queue[key]['end'] = str(datetime.now().strftime("%H:%M")) self.queue.write() changed = True else: self.stopping.remove(key) if key in self.to_remove: self.to_remove.remove(key) del self.queue[key] else: if key in self.to_stash: self.to_stash.remove(key) self.queue[key]['status'] = 'stashed' else: self.queue[key]['status'] = 'queued' self.queue[key]['start'] = '' self.queue[key]['end'] = '' self.queue.write() self.clean_descriptor(key) del self.processes[key] # If anything should be logged we return True return changed
[ "def", "check_finished", "(", "self", ")", ":", "changed", "=", "False", "for", "key", "in", "list", "(", "self", ".", "processes", ".", "keys", "(", ")", ")", ":", "# Poll process and check if it finshed", "process", "=", "self", ".", "processes", "[", "key", "]", "process", ".", "poll", "(", ")", "if", "process", ".", "returncode", "is", "not", "None", ":", "# If a process is terminated by `stop` or `kill`", "# we want to queue it again instead closing it as failed.", "if", "key", "not", "in", "self", ".", "stopping", ":", "# Get std_out and err_out", "output", ",", "error_output", "=", "process", ".", "communicate", "(", ")", "descriptor", "=", "self", ".", "descriptors", "[", "key", "]", "descriptor", "[", "'stdout'", "]", ".", "seek", "(", "0", ")", "descriptor", "[", "'stderr'", "]", ".", "seek", "(", "0", ")", "output", "=", "get_descriptor_output", "(", "descriptor", "[", "'stdout'", "]", ",", "key", ",", "handler", "=", "self", ")", "error_output", "=", "get_descriptor_output", "(", "descriptor", "[", "'stderr'", "]", ",", "key", ",", "handler", "=", "self", ")", "# Mark queue entry as finished and save returncode", "self", ".", "queue", "[", "key", "]", "[", "'returncode'", "]", "=", "process", ".", "returncode", "if", "process", ".", "returncode", "!=", "0", ":", "self", ".", "queue", "[", "key", "]", "[", "'status'", "]", "=", "'failed'", "else", ":", "self", ".", "queue", "[", "key", "]", "[", "'status'", "]", "=", "'done'", "# Add outputs to queue", "self", ".", "queue", "[", "key", "]", "[", "'stdout'", "]", "=", "output", "self", ".", "queue", "[", "key", "]", "[", "'stderr'", "]", "=", "error_output", "self", ".", "queue", "[", "key", "]", "[", "'end'", "]", "=", "str", "(", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "\"%H:%M\"", ")", ")", "self", ".", "queue", ".", "write", "(", ")", "changed", "=", "True", "else", ":", "self", ".", "stopping", ".", "remove", "(", "key", ")", "if", "key", "in", "self", ".", "to_remove", ":", "self", ".", "to_remove", ".", "remove", "(", "key", ")", "del", "self", ".", "queue", "[", "key", "]", "else", ":", "if", "key", "in", "self", ".", "to_stash", ":", "self", ".", "to_stash", ".", "remove", "(", "key", ")", "self", ".", "queue", "[", "key", "]", "[", "'status'", "]", "=", "'stashed'", "else", ":", "self", ".", "queue", "[", "key", "]", "[", "'status'", "]", "=", "'queued'", "self", ".", "queue", "[", "key", "]", "[", "'start'", "]", "=", "''", "self", ".", "queue", "[", "key", "]", "[", "'end'", "]", "=", "''", "self", ".", "queue", ".", "write", "(", ")", "self", ".", "clean_descriptor", "(", "key", ")", "del", "self", ".", "processes", "[", "key", "]", "# If anything should be logged we return True", "return", "changed" ]
Poll all processes and handle any finished processes.
[ "Poll", "all", "processes", "and", "handle", "any", "finished", "processes", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/process_handler.py#L92-L146
train
Nukesor/pueue
pueue/daemon/process_handler.py
ProcessHandler.check_for_new
def check_for_new(self): """Check if we can start a new process.""" free_slots = self.max_processes - len(self.processes) for item in range(free_slots): key = self.queue.next() if key is not None: self.spawn_new(key)
python
def check_for_new(self): """Check if we can start a new process.""" free_slots = self.max_processes - len(self.processes) for item in range(free_slots): key = self.queue.next() if key is not None: self.spawn_new(key)
[ "def", "check_for_new", "(", "self", ")", ":", "free_slots", "=", "self", ".", "max_processes", "-", "len", "(", "self", ".", "processes", ")", "for", "item", "in", "range", "(", "free_slots", ")", ":", "key", "=", "self", ".", "queue", ".", "next", "(", ")", "if", "key", "is", "not", "None", ":", "self", ".", "spawn_new", "(", "key", ")" ]
Check if we can start a new process.
[ "Check", "if", "we", "can", "start", "a", "new", "process", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/process_handler.py#L148-L154
train
Nukesor/pueue
pueue/daemon/process_handler.py
ProcessHandler.spawn_new
def spawn_new(self, key): """Spawn a new task and save it to the queue.""" # Check if path exists if not os.path.exists(self.queue[key]['path']): self.queue[key]['status'] = 'failed' error_msg = "The directory for this command doesn't exist anymore: {}".format(self.queue[key]['path']) self.logger.error(error_msg) self.queue[key]['stdout'] = '' self.queue[key]['stderr'] = error_msg else: # Get file descriptors stdout, stderr = self.get_descriptor(key) if self.custom_shell != 'default': # Create subprocess self.processes[key] = subprocess.Popen( [ self.custom_shell, '-i', '-c', self.queue[key]['command'], ], stdout=stdout, stderr=stderr, stdin=subprocess.PIPE, universal_newlines=True, preexec_fn=os.setsid, cwd=self.queue[key]['path'] ) else: # Create subprocess self.processes[key] = subprocess.Popen( self.queue[key]['command'], shell=True, stdout=stdout, stderr=stderr, stdin=subprocess.PIPE, universal_newlines=True, preexec_fn=os.setsid, cwd=self.queue[key]['path'] ) self.queue[key]['status'] = 'running' self.queue[key]['start'] = str(datetime.now().strftime("%H:%M")) self.queue.write()
python
def spawn_new(self, key): """Spawn a new task and save it to the queue.""" # Check if path exists if not os.path.exists(self.queue[key]['path']): self.queue[key]['status'] = 'failed' error_msg = "The directory for this command doesn't exist anymore: {}".format(self.queue[key]['path']) self.logger.error(error_msg) self.queue[key]['stdout'] = '' self.queue[key]['stderr'] = error_msg else: # Get file descriptors stdout, stderr = self.get_descriptor(key) if self.custom_shell != 'default': # Create subprocess self.processes[key] = subprocess.Popen( [ self.custom_shell, '-i', '-c', self.queue[key]['command'], ], stdout=stdout, stderr=stderr, stdin=subprocess.PIPE, universal_newlines=True, preexec_fn=os.setsid, cwd=self.queue[key]['path'] ) else: # Create subprocess self.processes[key] = subprocess.Popen( self.queue[key]['command'], shell=True, stdout=stdout, stderr=stderr, stdin=subprocess.PIPE, universal_newlines=True, preexec_fn=os.setsid, cwd=self.queue[key]['path'] ) self.queue[key]['status'] = 'running' self.queue[key]['start'] = str(datetime.now().strftime("%H:%M")) self.queue.write()
[ "def", "spawn_new", "(", "self", ",", "key", ")", ":", "# Check if path exists", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "queue", "[", "key", "]", "[", "'path'", "]", ")", ":", "self", ".", "queue", "[", "key", "]", "[", "'status'", "]", "=", "'failed'", "error_msg", "=", "\"The directory for this command doesn't exist anymore: {}\"", ".", "format", "(", "self", ".", "queue", "[", "key", "]", "[", "'path'", "]", ")", "self", ".", "logger", ".", "error", "(", "error_msg", ")", "self", ".", "queue", "[", "key", "]", "[", "'stdout'", "]", "=", "''", "self", ".", "queue", "[", "key", "]", "[", "'stderr'", "]", "=", "error_msg", "else", ":", "# Get file descriptors", "stdout", ",", "stderr", "=", "self", ".", "get_descriptor", "(", "key", ")", "if", "self", ".", "custom_shell", "!=", "'default'", ":", "# Create subprocess", "self", ".", "processes", "[", "key", "]", "=", "subprocess", ".", "Popen", "(", "[", "self", ".", "custom_shell", ",", "'-i'", ",", "'-c'", ",", "self", ".", "queue", "[", "key", "]", "[", "'command'", "]", ",", "]", ",", "stdout", "=", "stdout", ",", "stderr", "=", "stderr", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "universal_newlines", "=", "True", ",", "preexec_fn", "=", "os", ".", "setsid", ",", "cwd", "=", "self", ".", "queue", "[", "key", "]", "[", "'path'", "]", ")", "else", ":", "# Create subprocess", "self", ".", "processes", "[", "key", "]", "=", "subprocess", ".", "Popen", "(", "self", ".", "queue", "[", "key", "]", "[", "'command'", "]", ",", "shell", "=", "True", ",", "stdout", "=", "stdout", ",", "stderr", "=", "stderr", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "universal_newlines", "=", "True", ",", "preexec_fn", "=", "os", ".", "setsid", ",", "cwd", "=", "self", ".", "queue", "[", "key", "]", "[", "'path'", "]", ")", "self", ".", "queue", "[", "key", "]", "[", "'status'", "]", "=", "'running'", "self", ".", "queue", "[", "key", "]", "[", "'start'", "]", "=", "str", "(", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "\"%H:%M\"", ")", ")", "self", ".", "queue", ".", "write", "(", ")" ]
Spawn a new task and save it to the queue.
[ "Spawn", "a", "new", "task", "and", "save", "it", "to", "the", "queue", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/process_handler.py#L156-L201
train
Nukesor/pueue
pueue/daemon/process_handler.py
ProcessHandler.kill_all
def kill_all(self, kill_signal, kill_shell=False): """Kill all running processes.""" for key in self.processes.keys(): self.kill_process(key, kill_signal, kill_shell)
python
def kill_all(self, kill_signal, kill_shell=False): """Kill all running processes.""" for key in self.processes.keys(): self.kill_process(key, kill_signal, kill_shell)
[ "def", "kill_all", "(", "self", ",", "kill_signal", ",", "kill_shell", "=", "False", ")", ":", "for", "key", "in", "self", ".", "processes", ".", "keys", "(", ")", ":", "self", ".", "kill_process", "(", "key", ",", "kill_signal", ",", "kill_shell", ")" ]
Kill all running processes.
[ "Kill", "all", "running", "processes", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/process_handler.py#L221-L224
train
Nukesor/pueue
pueue/daemon/process_handler.py
ProcessHandler.start_process
def start_process(self, key): """Start a specific processes.""" if key in self.processes and key in self.paused: os.killpg(os.getpgid(self.processes[key].pid), signal.SIGCONT) self.queue[key]['status'] = 'running' self.paused.remove(key) return True elif key not in self.processes: if self.queue[key]['status'] in ['queued', 'stashed']: self.spawn_new(key) return True return False
python
def start_process(self, key): """Start a specific processes.""" if key in self.processes and key in self.paused: os.killpg(os.getpgid(self.processes[key].pid), signal.SIGCONT) self.queue[key]['status'] = 'running' self.paused.remove(key) return True elif key not in self.processes: if self.queue[key]['status'] in ['queued', 'stashed']: self.spawn_new(key) return True return False
[ "def", "start_process", "(", "self", ",", "key", ")", ":", "if", "key", "in", "self", ".", "processes", "and", "key", "in", "self", ".", "paused", ":", "os", ".", "killpg", "(", "os", ".", "getpgid", "(", "self", ".", "processes", "[", "key", "]", ".", "pid", ")", ",", "signal", ".", "SIGCONT", ")", "self", ".", "queue", "[", "key", "]", "[", "'status'", "]", "=", "'running'", "self", ".", "paused", ".", "remove", "(", "key", ")", "return", "True", "elif", "key", "not", "in", "self", ".", "processes", ":", "if", "self", ".", "queue", "[", "key", "]", "[", "'status'", "]", "in", "[", "'queued'", ",", "'stashed'", "]", ":", "self", ".", "spawn_new", "(", "key", ")", "return", "True", "return", "False" ]
Start a specific processes.
[ "Start", "a", "specific", "processes", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/process_handler.py#L226-L238
train
Nukesor/pueue
pueue/daemon/process_handler.py
ProcessHandler.pause_process
def pause_process(self, key): """Pause a specific processes.""" if key in self.processes and key not in self.paused: os.killpg(os.getpgid(self.processes[key].pid), signal.SIGSTOP) self.queue[key]['status'] = 'paused' self.paused.append(key) return True return False
python
def pause_process(self, key): """Pause a specific processes.""" if key in self.processes and key not in self.paused: os.killpg(os.getpgid(self.processes[key].pid), signal.SIGSTOP) self.queue[key]['status'] = 'paused' self.paused.append(key) return True return False
[ "def", "pause_process", "(", "self", ",", "key", ")", ":", "if", "key", "in", "self", ".", "processes", "and", "key", "not", "in", "self", ".", "paused", ":", "os", ".", "killpg", "(", "os", ".", "getpgid", "(", "self", ".", "processes", "[", "key", "]", ".", "pid", ")", ",", "signal", ".", "SIGSTOP", ")", "self", ".", "queue", "[", "key", "]", "[", "'status'", "]", "=", "'paused'", "self", ".", "paused", ".", "append", "(", "key", ")", "return", "True", "return", "False" ]
Pause a specific processes.
[ "Pause", "a", "specific", "processes", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/daemon/process_handler.py#L240-L247
train
Nukesor/pueue
pueue/__init__.py
daemon_factory
def daemon_factory(path): """Create a closure which creates a running daemon. We need to create a closure that contains the correct path the daemon should be started with. This is needed as the `Daemonize` library requires a callable function for daemonization and doesn't accept any arguments. This function cleans up sockets and output files in case we encounter any exceptions. """ def start_daemon(): root_dir = path config_dir = os.path.join(root_dir, '.config/pueue') try: daemon = Daemon(root_dir=root_dir) daemon.main() except KeyboardInterrupt: print('Keyboard interrupt. Shutting down') daemon.stop_daemon() except Exception: try: daemon.stop_daemon() except Exception: pass cleanup(config_dir) raise return start_daemon
python
def daemon_factory(path): """Create a closure which creates a running daemon. We need to create a closure that contains the correct path the daemon should be started with. This is needed as the `Daemonize` library requires a callable function for daemonization and doesn't accept any arguments. This function cleans up sockets and output files in case we encounter any exceptions. """ def start_daemon(): root_dir = path config_dir = os.path.join(root_dir, '.config/pueue') try: daemon = Daemon(root_dir=root_dir) daemon.main() except KeyboardInterrupt: print('Keyboard interrupt. Shutting down') daemon.stop_daemon() except Exception: try: daemon.stop_daemon() except Exception: pass cleanup(config_dir) raise return start_daemon
[ "def", "daemon_factory", "(", "path", ")", ":", "def", "start_daemon", "(", ")", ":", "root_dir", "=", "path", "config_dir", "=", "os", ".", "path", ".", "join", "(", "root_dir", ",", "'.config/pueue'", ")", "try", ":", "daemon", "=", "Daemon", "(", "root_dir", "=", "root_dir", ")", "daemon", ".", "main", "(", ")", "except", "KeyboardInterrupt", ":", "print", "(", "'Keyboard interrupt. Shutting down'", ")", "daemon", ".", "stop_daemon", "(", ")", "except", "Exception", ":", "try", ":", "daemon", ".", "stop_daemon", "(", ")", "except", "Exception", ":", "pass", "cleanup", "(", "config_dir", ")", "raise", "return", "start_daemon" ]
Create a closure which creates a running daemon. We need to create a closure that contains the correct path the daemon should be started with. This is needed as the `Daemonize` library requires a callable function for daemonization and doesn't accept any arguments. This function cleans up sockets and output files in case we encounter any exceptions.
[ "Create", "a", "closure", "which", "creates", "a", "running", "daemon", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/__init__.py#L13-L37
train
Nukesor/pueue
pueue/__init__.py
main
def main(): """Execute entry function.""" args = parser.parse_args() args_dict = vars(args) root_dir = args_dict['root'] if 'root' in args else None # If a root directory is specified, get the absolute path and # check if it exists. Abort if it doesn't exist! if root_dir: root_dir = os.path.abspath(root_dir) if not os.path.exists(root_dir): print("The specified directory doesn't exist!") sys.exit(1) # Default to home directory if no root is specified else: root_dir = os.path.expanduser('~') if args.stopdaemon: print_command_factory('STOPDAEMON')(vars(args), root_dir) elif args.nodaemon: daemon_factory(root_dir)() elif args.daemon: config_dir = os.path.join(root_dir, '.config/pueue') os.makedirs(config_dir, exist_ok=True) daemon = Daemonize(app='pueue', pid=os.path.join(config_dir, 'pueue.pid'), action=daemon_factory(root_dir), chdir=root_dir) daemon.start() elif hasattr(args, 'func'): try: args.func(args_dict, root_dir) except EOFError: print('Apparently the daemon just died. Sorry for that :/') else: print('Invalid Command. Please check -h')
python
def main(): """Execute entry function.""" args = parser.parse_args() args_dict = vars(args) root_dir = args_dict['root'] if 'root' in args else None # If a root directory is specified, get the absolute path and # check if it exists. Abort if it doesn't exist! if root_dir: root_dir = os.path.abspath(root_dir) if not os.path.exists(root_dir): print("The specified directory doesn't exist!") sys.exit(1) # Default to home directory if no root is specified else: root_dir = os.path.expanduser('~') if args.stopdaemon: print_command_factory('STOPDAEMON')(vars(args), root_dir) elif args.nodaemon: daemon_factory(root_dir)() elif args.daemon: config_dir = os.path.join(root_dir, '.config/pueue') os.makedirs(config_dir, exist_ok=True) daemon = Daemonize(app='pueue', pid=os.path.join(config_dir, 'pueue.pid'), action=daemon_factory(root_dir), chdir=root_dir) daemon.start() elif hasattr(args, 'func'): try: args.func(args_dict, root_dir) except EOFError: print('Apparently the daemon just died. Sorry for that :/') else: print('Invalid Command. Please check -h')
[ "def", "main", "(", ")", ":", "args", "=", "parser", ".", "parse_args", "(", ")", "args_dict", "=", "vars", "(", "args", ")", "root_dir", "=", "args_dict", "[", "'root'", "]", "if", "'root'", "in", "args", "else", "None", "# If a root directory is specified, get the absolute path and", "# check if it exists. Abort if it doesn't exist!", "if", "root_dir", ":", "root_dir", "=", "os", ".", "path", ".", "abspath", "(", "root_dir", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "root_dir", ")", ":", "print", "(", "\"The specified directory doesn't exist!\"", ")", "sys", ".", "exit", "(", "1", ")", "# Default to home directory if no root is specified", "else", ":", "root_dir", "=", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", "if", "args", ".", "stopdaemon", ":", "print_command_factory", "(", "'STOPDAEMON'", ")", "(", "vars", "(", "args", ")", ",", "root_dir", ")", "elif", "args", ".", "nodaemon", ":", "daemon_factory", "(", "root_dir", ")", "(", ")", "elif", "args", ".", "daemon", ":", "config_dir", "=", "os", ".", "path", ".", "join", "(", "root_dir", ",", "'.config/pueue'", ")", "os", ".", "makedirs", "(", "config_dir", ",", "exist_ok", "=", "True", ")", "daemon", "=", "Daemonize", "(", "app", "=", "'pueue'", ",", "pid", "=", "os", ".", "path", ".", "join", "(", "config_dir", ",", "'pueue.pid'", ")", ",", "action", "=", "daemon_factory", "(", "root_dir", ")", ",", "chdir", "=", "root_dir", ")", "daemon", ".", "start", "(", ")", "elif", "hasattr", "(", "args", ",", "'func'", ")", ":", "try", ":", "args", ".", "func", "(", "args_dict", ",", "root_dir", ")", "except", "EOFError", ":", "print", "(", "'Apparently the daemon just died. Sorry for that :/'", ")", "else", ":", "print", "(", "'Invalid Command. Please check -h'", ")" ]
Execute entry function.
[ "Execute", "entry", "function", "." ]
f1d276360454d4dd2738658a13df1e20caa4b926
https://github.com/Nukesor/pueue/blob/f1d276360454d4dd2738658a13df1e20caa4b926/pueue/__init__.py#L40-L73
train
corpusops/pdbclone
lib/pdb_clone/pdbhandler.py
register
def register(host=DFLT_ADDRESS[0], port=DFLT_ADDRESS[1], signum=signal.SIGUSR1): """Register a pdb handler for signal 'signum'. The handler sets pdb to listen on the ('host', 'port') internet address and to start a remote debugging session on accepting a socket connection. """ _pdbhandler._register(host, port, signum)
python
def register(host=DFLT_ADDRESS[0], port=DFLT_ADDRESS[1], signum=signal.SIGUSR1): """Register a pdb handler for signal 'signum'. The handler sets pdb to listen on the ('host', 'port') internet address and to start a remote debugging session on accepting a socket connection. """ _pdbhandler._register(host, port, signum)
[ "def", "register", "(", "host", "=", "DFLT_ADDRESS", "[", "0", "]", ",", "port", "=", "DFLT_ADDRESS", "[", "1", "]", ",", "signum", "=", "signal", ".", "SIGUSR1", ")", ":", "_pdbhandler", ".", "_register", "(", "host", ",", "port", ",", "signum", ")" ]
Register a pdb handler for signal 'signum'. The handler sets pdb to listen on the ('host', 'port') internet address and to start a remote debugging session on accepting a socket connection.
[ "Register", "a", "pdb", "handler", "for", "signal", "signum", "." ]
f781537c243a4874b246d43dbdef8c4279f0094d
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdbhandler.py#L19-L26
train
corpusops/pdbclone
lib/pdb_clone/pdbhandler.py
get_handler
def get_handler(): """Return the handler as a named tuple. The named tuple attributes are 'host', 'port', 'signum'. Return None when no handler has been registered. """ host, port, signum = _pdbhandler._registered() if signum: return Handler(host if host else DFLT_ADDRESS[0].encode(), port if port else DFLT_ADDRESS[1], signum)
python
def get_handler(): """Return the handler as a named tuple. The named tuple attributes are 'host', 'port', 'signum'. Return None when no handler has been registered. """ host, port, signum = _pdbhandler._registered() if signum: return Handler(host if host else DFLT_ADDRESS[0].encode(), port if port else DFLT_ADDRESS[1], signum)
[ "def", "get_handler", "(", ")", ":", "host", ",", "port", ",", "signum", "=", "_pdbhandler", ".", "_registered", "(", ")", "if", "signum", ":", "return", "Handler", "(", "host", "if", "host", "else", "DFLT_ADDRESS", "[", "0", "]", ".", "encode", "(", ")", ",", "port", "if", "port", "else", "DFLT_ADDRESS", "[", "1", "]", ",", "signum", ")" ]
Return the handler as a named tuple. The named tuple attributes are 'host', 'port', 'signum'. Return None when no handler has been registered.
[ "Return", "the", "handler", "as", "a", "named", "tuple", "." ]
f781537c243a4874b246d43dbdef8c4279f0094d
https://github.com/corpusops/pdbclone/blob/f781537c243a4874b246d43dbdef8c4279f0094d/lib/pdb_clone/pdbhandler.py#L35-L44
train
dlecocq/nsq-py
nsq/checker.py
StoppableThread.wait
def wait(self, timeout): '''Wait for the provided time to elapse''' logger.debug('Waiting for %fs', timeout) return self._event.wait(timeout)
python
def wait(self, timeout): '''Wait for the provided time to elapse''' logger.debug('Waiting for %fs', timeout) return self._event.wait(timeout)
[ "def", "wait", "(", "self", ",", "timeout", ")", ":", "logger", ".", "debug", "(", "'Waiting for %fs'", ",", "timeout", ")", "return", "self", ".", "_event", ".", "wait", "(", "timeout", ")" ]
Wait for the provided time to elapse
[ "Wait", "for", "the", "provided", "time", "to", "elapse" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/checker.py#L14-L17
train
dlecocq/nsq-py
nsq/checker.py
PeriodicThread.delay
def delay(self): '''How long to wait before the next check''' if self._last_checked: return self._interval - (time.time() - self._last_checked) return self._interval
python
def delay(self): '''How long to wait before the next check''' if self._last_checked: return self._interval - (time.time() - self._last_checked) return self._interval
[ "def", "delay", "(", "self", ")", ":", "if", "self", ".", "_last_checked", ":", "return", "self", ".", "_interval", "-", "(", "time", ".", "time", "(", ")", "-", "self", ".", "_last_checked", ")", "return", "self", ".", "_interval" ]
How long to wait before the next check
[ "How", "long", "to", "wait", "before", "the", "next", "check" ]
3ecacf6ab7719d38031179277113d875554a0c16
https://github.com/dlecocq/nsq-py/blob/3ecacf6ab7719d38031179277113d875554a0c16/nsq/checker.py#L34-L38
train