repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
brettcannon/caniusepython3
caniusepython3/dependencies.py
reasons_to_paths
def reasons_to_paths(reasons): """Calculate the dependency paths to the reasons of the blockers. Paths will be in reverse-dependency order (i.e. parent projects are in ascending order). """ blockers = set(reasons.keys()) - set(reasons.values()) paths = set() for blocker in blockers: path = [blocker] parent = reasons[blocker] while parent: if parent in path: raise CircularDependencyError(dict(parent=parent, blocker=blocker, path=path)) path.append(parent) parent = reasons.get(parent) paths.add(tuple(path)) return paths
python
def reasons_to_paths(reasons): """Calculate the dependency paths to the reasons of the blockers. Paths will be in reverse-dependency order (i.e. parent projects are in ascending order). """ blockers = set(reasons.keys()) - set(reasons.values()) paths = set() for blocker in blockers: path = [blocker] parent = reasons[blocker] while parent: if parent in path: raise CircularDependencyError(dict(parent=parent, blocker=blocker, path=path)) path.append(parent) parent = reasons.get(parent) paths.add(tuple(path)) return paths
[ "def", "reasons_to_paths", "(", "reasons", ")", ":", "blockers", "=", "set", "(", "reasons", ".", "keys", "(", ")", ")", "-", "set", "(", "reasons", ".", "values", "(", ")", ")", "paths", "=", "set", "(", ")", "for", "blocker", "in", "blockers", ":...
Calculate the dependency paths to the reasons of the blockers. Paths will be in reverse-dependency order (i.e. parent projects are in ascending order).
[ "Calculate", "the", "dependency", "paths", "to", "the", "reasons", "of", "the", "blockers", "." ]
195775d8f1891f73eb90734f3edda0c57e08dbf3
https://github.com/brettcannon/caniusepython3/blob/195775d8f1891f73eb90734f3edda0c57e08dbf3/caniusepython3/dependencies.py#L31-L51
train
203,400
brettcannon/caniusepython3
caniusepython3/dependencies.py
dependencies
def dependencies(project_name): """Get the dependencies for a project.""" log = logging.getLogger('ciu') log.info('Locating dependencies for {}'.format(project_name)) located = distlib.locators.locate(project_name, prereleases=True) if not located: log.warning('{0} not found'.format(project_name)) return None return {packaging.utils.canonicalize_name(pypi.just_name(dep)) for dep in located.run_requires}
python
def dependencies(project_name): """Get the dependencies for a project.""" log = logging.getLogger('ciu') log.info('Locating dependencies for {}'.format(project_name)) located = distlib.locators.locate(project_name, prereleases=True) if not located: log.warning('{0} not found'.format(project_name)) return None return {packaging.utils.canonicalize_name(pypi.just_name(dep)) for dep in located.run_requires}
[ "def", "dependencies", "(", "project_name", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "'ciu'", ")", "log", ".", "info", "(", "'Locating dependencies for {}'", ".", "format", "(", "project_name", ")", ")", "located", "=", "distlib", ".", "locato...
Get the dependencies for a project.
[ "Get", "the", "dependencies", "for", "a", "project", "." ]
195775d8f1891f73eb90734f3edda0c57e08dbf3
https://github.com/brettcannon/caniusepython3/blob/195775d8f1891f73eb90734f3edda0c57e08dbf3/caniusepython3/dependencies.py#L54-L63
train
203,401
brettcannon/caniusepython3
caniusepython3/projects.py
projects_from_requirements
def projects_from_requirements(requirements): """Extract the project dependencies from a Requirements specification.""" log = logging.getLogger('ciu') valid_reqs = [] for requirements_path in requirements: with io.open(requirements_path) as file: requirements_text = file.read() # Drop line continuations. requirements_text = re.sub(r"\\s*", "", requirements_text) # Drop comments. requirements_text = re.sub(r"#.*", "", requirements_text) reqs = [] for line in requirements_text.splitlines(): if not line: continue try: reqs.append(packaging.requirements.Requirement(line)) except packaging.requirements.InvalidRequirement: log.warning('Skipping {0!r}: could not parse requirement'.format(line)) for req in reqs: if not req.name: log.warning('A requirement lacks a name ' '(e.g. no `#egg` on a `file:` path)') elif req.url: log.warning( 'Skipping {0}: URL-specified projects unsupported'.format(req.name)) else: valid_reqs.append(req.name) return frozenset(map(packaging.utils.canonicalize_name, valid_reqs))
python
def projects_from_requirements(requirements): """Extract the project dependencies from a Requirements specification.""" log = logging.getLogger('ciu') valid_reqs = [] for requirements_path in requirements: with io.open(requirements_path) as file: requirements_text = file.read() # Drop line continuations. requirements_text = re.sub(r"\\s*", "", requirements_text) # Drop comments. requirements_text = re.sub(r"#.*", "", requirements_text) reqs = [] for line in requirements_text.splitlines(): if not line: continue try: reqs.append(packaging.requirements.Requirement(line)) except packaging.requirements.InvalidRequirement: log.warning('Skipping {0!r}: could not parse requirement'.format(line)) for req in reqs: if not req.name: log.warning('A requirement lacks a name ' '(e.g. no `#egg` on a `file:` path)') elif req.url: log.warning( 'Skipping {0}: URL-specified projects unsupported'.format(req.name)) else: valid_reqs.append(req.name) return frozenset(map(packaging.utils.canonicalize_name, valid_reqs))
[ "def", "projects_from_requirements", "(", "requirements", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "'ciu'", ")", "valid_reqs", "=", "[", "]", "for", "requirements_path", "in", "requirements", ":", "with", "io", ".", "open", "(", "requirements_pa...
Extract the project dependencies from a Requirements specification.
[ "Extract", "the", "project", "dependencies", "from", "a", "Requirements", "specification", "." ]
195775d8f1891f73eb90734f3edda0c57e08dbf3
https://github.com/brettcannon/caniusepython3/blob/195775d8f1891f73eb90734f3edda0c57e08dbf3/caniusepython3/projects.py#L15-L43
train
203,402
brettcannon/caniusepython3
caniusepython3/projects.py
projects_from_metadata
def projects_from_metadata(metadata): """Extract the project dependencies from a metadata spec.""" projects = [] for data in metadata: meta = distlib.metadata.Metadata(fileobj=io.StringIO(data)) projects.extend(pypi.just_name(project) for project in meta.run_requires) return frozenset(map(packaging.utils.canonicalize_name, projects))
python
def projects_from_metadata(metadata): """Extract the project dependencies from a metadata spec.""" projects = [] for data in metadata: meta = distlib.metadata.Metadata(fileobj=io.StringIO(data)) projects.extend(pypi.just_name(project) for project in meta.run_requires) return frozenset(map(packaging.utils.canonicalize_name, projects))
[ "def", "projects_from_metadata", "(", "metadata", ")", ":", "projects", "=", "[", "]", "for", "data", "in", "metadata", ":", "meta", "=", "distlib", ".", "metadata", ".", "Metadata", "(", "fileobj", "=", "io", ".", "StringIO", "(", "data", ")", ")", "p...
Extract the project dependencies from a metadata spec.
[ "Extract", "the", "project", "dependencies", "from", "a", "metadata", "spec", "." ]
195775d8f1891f73eb90734f3edda0c57e08dbf3
https://github.com/brettcannon/caniusepython3/blob/195775d8f1891f73eb90734f3edda0c57e08dbf3/caniusepython3/projects.py#L46-L52
train
203,403
asweigart/pyscreeze
pyscreeze/__init__.py
locateOnScreen
def locateOnScreen(image, minSearchTime=0, **kwargs): """minSearchTime - amount of time in seconds to repeat taking screenshots and trying to locate a match. The default of 0 performs a single search. """ start = time.time() while True: try: screenshotIm = screenshot(region=None) # the locateAll() function must handle cropping to return accurate coordinates, so don't pass a region here. retVal = locate(image, screenshotIm, **kwargs) try: screenshotIm.fp.close() except AttributeError: # Screenshots on Windows won't have an fp since they came from # ImageGrab, not a file. Screenshots on Linux will have fp set # to None since the file has been unlinked pass if retVal or time.time() - start > minSearchTime: return retVal except ImageNotFoundException: if time.time() - start > minSearchTime: if USE_IMAGE_NOT_FOUND_EXCEPTION: raise else: return None
python
def locateOnScreen(image, minSearchTime=0, **kwargs): """minSearchTime - amount of time in seconds to repeat taking screenshots and trying to locate a match. The default of 0 performs a single search. """ start = time.time() while True: try: screenshotIm = screenshot(region=None) # the locateAll() function must handle cropping to return accurate coordinates, so don't pass a region here. retVal = locate(image, screenshotIm, **kwargs) try: screenshotIm.fp.close() except AttributeError: # Screenshots on Windows won't have an fp since they came from # ImageGrab, not a file. Screenshots on Linux will have fp set # to None since the file has been unlinked pass if retVal or time.time() - start > minSearchTime: return retVal except ImageNotFoundException: if time.time() - start > minSearchTime: if USE_IMAGE_NOT_FOUND_EXCEPTION: raise else: return None
[ "def", "locateOnScreen", "(", "image", ",", "minSearchTime", "=", "0", ",", "*", "*", "kwargs", ")", ":", "start", "=", "time", ".", "time", "(", ")", "while", "True", ":", "try", ":", "screenshotIm", "=", "screenshot", "(", "region", "=", "None", ")...
minSearchTime - amount of time in seconds to repeat taking screenshots and trying to locate a match. The default of 0 performs a single search.
[ "minSearchTime", "-", "amount", "of", "time", "in", "seconds", "to", "repeat", "taking", "screenshots", "and", "trying", "to", "locate", "a", "match", ".", "The", "default", "of", "0", "performs", "a", "single", "search", "." ]
3cfd27a1357b12805e7267893fdaff5995cc4528
https://github.com/asweigart/pyscreeze/blob/3cfd27a1357b12805e7267893fdaff5995cc4528/pyscreeze/__init__.py#L277-L301
train
203,404
ipinfo/python
ipinfo/handler.py
Handler.getDetails
def getDetails(self, ip_address=None): """Get details for specified IP address as a Details object.""" raw_details = self._requestDetails(ip_address) raw_details['country_name'] = self.countries.get(raw_details.get('country')) raw_details['ip_address'] = ipaddress.ip_address(raw_details.get('ip')) raw_details['latitude'], raw_details['longitude'] = self._read_coords(raw_details.get('loc')) return Details(raw_details)
python
def getDetails(self, ip_address=None): """Get details for specified IP address as a Details object.""" raw_details = self._requestDetails(ip_address) raw_details['country_name'] = self.countries.get(raw_details.get('country')) raw_details['ip_address'] = ipaddress.ip_address(raw_details.get('ip')) raw_details['latitude'], raw_details['longitude'] = self._read_coords(raw_details.get('loc')) return Details(raw_details)
[ "def", "getDetails", "(", "self", ",", "ip_address", "=", "None", ")", ":", "raw_details", "=", "self", ".", "_requestDetails", "(", "ip_address", ")", "raw_details", "[", "'country_name'", "]", "=", "self", ".", "countries", ".", "get", "(", "raw_details", ...
Get details for specified IP address as a Details object.
[ "Get", "details", "for", "specified", "IP", "address", "as", "a", "Details", "object", "." ]
62fef9136069eab280806cc772dc578d3f1d8d63
https://github.com/ipinfo/python/blob/62fef9136069eab280806cc772dc578d3f1d8d63/ipinfo/handler.py#L44-L50
train
203,405
ipinfo/python
ipinfo/handler.py
Handler._requestDetails
def _requestDetails(self, ip_address=None): """Get IP address data by sending request to IPinfo API.""" if ip_address not in self.cache: url = self.API_URL if ip_address: url += '/' + ip_address response = requests.get(url, headers=self._get_headers(), **self.request_options) if response.status_code == 429: raise RequestQuotaExceededError() response.raise_for_status() self.cache[ip_address] = response.json() return self.cache[ip_address]
python
def _requestDetails(self, ip_address=None): """Get IP address data by sending request to IPinfo API.""" if ip_address not in self.cache: url = self.API_URL if ip_address: url += '/' + ip_address response = requests.get(url, headers=self._get_headers(), **self.request_options) if response.status_code == 429: raise RequestQuotaExceededError() response.raise_for_status() self.cache[ip_address] = response.json() return self.cache[ip_address]
[ "def", "_requestDetails", "(", "self", ",", "ip_address", "=", "None", ")", ":", "if", "ip_address", "not", "in", "self", ".", "cache", ":", "url", "=", "self", ".", "API_URL", "if", "ip_address", ":", "url", "+=", "'/'", "+", "ip_address", "response", ...
Get IP address data by sending request to IPinfo API.
[ "Get", "IP", "address", "data", "by", "sending", "request", "to", "IPinfo", "API", "." ]
62fef9136069eab280806cc772dc578d3f1d8d63
https://github.com/ipinfo/python/blob/62fef9136069eab280806cc772dc578d3f1d8d63/ipinfo/handler.py#L52-L65
train
203,406
ipinfo/python
ipinfo/handler.py
Handler._get_headers
def _get_headers(self): """Built headers for request to IPinfo API.""" headers = { 'user-agent': 'IPinfoClient/Python{version}/1.0'.format(version=sys.version_info[0]), 'accept': 'application/json' } if self.access_token: headers['authorization'] = 'Bearer {}'.format(self.access_token) return headers
python
def _get_headers(self): """Built headers for request to IPinfo API.""" headers = { 'user-agent': 'IPinfoClient/Python{version}/1.0'.format(version=sys.version_info[0]), 'accept': 'application/json' } if self.access_token: headers['authorization'] = 'Bearer {}'.format(self.access_token) return headers
[ "def", "_get_headers", "(", "self", ")", ":", "headers", "=", "{", "'user-agent'", ":", "'IPinfoClient/Python{version}/1.0'", ".", "format", "(", "version", "=", "sys", ".", "version_info", "[", "0", "]", ")", ",", "'accept'", ":", "'application/json'", "}", ...
Built headers for request to IPinfo API.
[ "Built", "headers", "for", "request", "to", "IPinfo", "API", "." ]
62fef9136069eab280806cc772dc578d3f1d8d63
https://github.com/ipinfo/python/blob/62fef9136069eab280806cc772dc578d3f1d8d63/ipinfo/handler.py#L67-L77
train
203,407
ipinfo/python
ipinfo/handler.py
Handler._read_country_names
def _read_country_names(self, countries_file=None): """Read list of countries from specified country file or default file.""" if not countries_file: countries_file = os.path.join(os.path.dirname(__file__), self.COUNTRY_FILE_DEFAULT) with open(countries_file) as f: countries_json = f.read() return json.loads(countries_json)
python
def _read_country_names(self, countries_file=None): """Read list of countries from specified country file or default file.""" if not countries_file: countries_file = os.path.join(os.path.dirname(__file__), self.COUNTRY_FILE_DEFAULT) with open(countries_file) as f: countries_json = f.read() return json.loads(countries_json)
[ "def", "_read_country_names", "(", "self", ",", "countries_file", "=", "None", ")", ":", "if", "not", "countries_file", ":", "countries_file", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "self",...
Read list of countries from specified country file or default file.
[ "Read", "list", "of", "countries", "from", "specified", "country", "file", "or", "default", "file", "." ]
62fef9136069eab280806cc772dc578d3f1d8d63
https://github.com/ipinfo/python/blob/62fef9136069eab280806cc772dc578d3f1d8d63/ipinfo/handler.py#L86-L93
train
203,408
captin411/ofxclient
ofxclient/config.py
SecurableConfigParser.is_secure_option
def is_secure_option(self, section, option): """Test an option to see if it is secured or not. :param section: section id :type section: string :param option: option name :type option: string :rtype: boolean otherwise. """ if not self.has_section(section): return False if not self.has_option(section, option): return False if ConfigParser.get(self, section, option) == self._secure_placeholder: return True return False
python
def is_secure_option(self, section, option): """Test an option to see if it is secured or not. :param section: section id :type section: string :param option: option name :type option: string :rtype: boolean otherwise. """ if not self.has_section(section): return False if not self.has_option(section, option): return False if ConfigParser.get(self, section, option) == self._secure_placeholder: return True return False
[ "def", "is_secure_option", "(", "self", ",", "section", ",", "option", ")", ":", "if", "not", "self", ".", "has_section", "(", "section", ")", ":", "return", "False", "if", "not", "self", ".", "has_option", "(", "section", ",", "option", ")", ":", "ret...
Test an option to see if it is secured or not. :param section: section id :type section: string :param option: option name :type option: string :rtype: boolean otherwise.
[ "Test", "an", "option", "to", "see", "if", "it", "is", "secured", "or", "not", "." ]
4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e
https://github.com/captin411/ofxclient/blob/4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e/ofxclient/config.py#L70-L86
train
203,409
captin411/ofxclient
ofxclient/config.py
SecurableConfigParser.items
def items(self, section): """Get all items for a section. Subclassed, to ensure secure items come back with the unencrypted data. :param section: section id :type section: string """ items = [] for k, v in ConfigParser.items(self, section): if self.is_secure_option(section, k): v = self.get(section, k) if v == '!!False!!': v = False items.append((k, v)) return items
python
def items(self, section): """Get all items for a section. Subclassed, to ensure secure items come back with the unencrypted data. :param section: section id :type section: string """ items = [] for k, v in ConfigParser.items(self, section): if self.is_secure_option(section, k): v = self.get(section, k) if v == '!!False!!': v = False items.append((k, v)) return items
[ "def", "items", "(", "self", ",", "section", ")", ":", "items", "=", "[", "]", "for", "k", ",", "v", "in", "ConfigParser", ".", "items", "(", "self", ",", "section", ")", ":", "if", "self", ".", "is_secure_option", "(", "section", ",", "k", ")", ...
Get all items for a section. Subclassed, to ensure secure items come back with the unencrypted data. :param section: section id :type section: string
[ "Get", "all", "items", "for", "a", "section", ".", "Subclassed", "to", "ensure", "secure", "items", "come", "back", "with", "the", "unencrypted", "data", "." ]
4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e
https://github.com/captin411/ofxclient/blob/4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e/ofxclient/config.py#L92-L106
train
203,410
captin411/ofxclient
ofxclient/config.py
SecurableConfigParser.set
def set(self, section, option, value): """Set an option value. Knows how to set options properly marked as secure.""" if not value: value = '!!False!!' if self.is_secure_option(section, option): self.set_secure(section, option, value) else: ConfigParser.set(self, section, option, value)
python
def set(self, section, option, value): """Set an option value. Knows how to set options properly marked as secure.""" if not value: value = '!!False!!' if self.is_secure_option(section, option): self.set_secure(section, option, value) else: ConfigParser.set(self, section, option, value)
[ "def", "set", "(", "self", ",", "section", ",", "option", ",", "value", ")", ":", "if", "not", "value", ":", "value", "=", "'!!False!!'", "if", "self", ".", "is_secure_option", "(", "section", ",", "option", ")", ":", "self", ".", "set_secure", "(", ...
Set an option value. Knows how to set options properly marked as secure.
[ "Set", "an", "option", "value", ".", "Knows", "how", "to", "set", "options", "properly", "marked", "as", "secure", "." ]
4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e
https://github.com/captin411/ofxclient/blob/4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e/ofxclient/config.py#L118-L126
train
203,411
captin411/ofxclient
ofxclient/config.py
SecurableConfigParser.set_secure
def set_secure(self, section, option, value): """Set an option and mark it as secure. Any subsequent uses of 'set' or 'get' will also now know that this option is secure as well. """ if self.keyring_available: s_option = "%s%s" % (section, option) self._unsaved[s_option] = ('set', value) value = self._secure_placeholder ConfigParser.set(self, section, option, value)
python
def set_secure(self, section, option, value): """Set an option and mark it as secure. Any subsequent uses of 'set' or 'get' will also now know that this option is secure as well. """ if self.keyring_available: s_option = "%s%s" % (section, option) self._unsaved[s_option] = ('set', value) value = self._secure_placeholder ConfigParser.set(self, section, option, value)
[ "def", "set_secure", "(", "self", ",", "section", ",", "option", ",", "value", ")", ":", "if", "self", ".", "keyring_available", ":", "s_option", "=", "\"%s%s\"", "%", "(", "section", ",", "option", ")", "self", ".", "_unsaved", "[", "s_option", "]", "...
Set an option and mark it as secure. Any subsequent uses of 'set' or 'get' will also now know that this option is secure as well.
[ "Set", "an", "option", "and", "mark", "it", "as", "secure", "." ]
4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e
https://github.com/captin411/ofxclient/blob/4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e/ofxclient/config.py#L128-L138
train
203,412
captin411/ofxclient
ofxclient/config.py
SecurableConfigParser.get
def get(self, section, option, *args): """Get option value from section. If an option is secure, populates the plain text.""" if self.is_secure_option(section, option) and self.keyring_available: s_option = "%s%s" % (section, option) if self._unsaved.get(s_option, [''])[0] == 'set': res = self._unsaved[s_option][1] else: res = keyring.get_password(self.keyring_name, s_option) else: res = ConfigParser.get(self, section, option, *args) if res == '!!False!!': return False return res
python
def get(self, section, option, *args): """Get option value from section. If an option is secure, populates the plain text.""" if self.is_secure_option(section, option) and self.keyring_available: s_option = "%s%s" % (section, option) if self._unsaved.get(s_option, [''])[0] == 'set': res = self._unsaved[s_option][1] else: res = keyring.get_password(self.keyring_name, s_option) else: res = ConfigParser.get(self, section, option, *args) if res == '!!False!!': return False return res
[ "def", "get", "(", "self", ",", "section", ",", "option", ",", "*", "args", ")", ":", "if", "self", ".", "is_secure_option", "(", "section", ",", "option", ")", "and", "self", ".", "keyring_available", ":", "s_option", "=", "\"%s%s\"", "%", "(", "secti...
Get option value from section. If an option is secure, populates the plain text.
[ "Get", "option", "value", "from", "section", ".", "If", "an", "option", "is", "secure", "populates", "the", "plain", "text", "." ]
4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e
https://github.com/captin411/ofxclient/blob/4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e/ofxclient/config.py#L140-L153
train
203,413
captin411/ofxclient
ofxclient/config.py
SecurableConfigParser.remove_option
def remove_option(self, section, option): """Removes the option from ConfigParser as well as the secure storage backend """ if self.is_secure_option(section, option) and self.keyring_available: s_option = "%s%s" % (section, option) self._unsaved[s_option] = ('delete', None) ConfigParser.remove_option(self, section, option)
python
def remove_option(self, section, option): """Removes the option from ConfigParser as well as the secure storage backend """ if self.is_secure_option(section, option) and self.keyring_available: s_option = "%s%s" % (section, option) self._unsaved[s_option] = ('delete', None) ConfigParser.remove_option(self, section, option)
[ "def", "remove_option", "(", "self", ",", "section", ",", "option", ")", ":", "if", "self", ".", "is_secure_option", "(", "section", ",", "option", ")", "and", "self", ".", "keyring_available", ":", "s_option", "=", "\"%s%s\"", "%", "(", "section", ",", ...
Removes the option from ConfigParser as well as the secure storage backend
[ "Removes", "the", "option", "from", "ConfigParser", "as", "well", "as", "the", "secure", "storage", "backend" ]
4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e
https://github.com/captin411/ofxclient/blob/4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e/ofxclient/config.py#L155-L162
train
203,414
captin411/ofxclient
ofxclient/config.py
OfxConfig.encrypt_account
def encrypt_account(self, id): """Make sure that certain fields are encrypted.""" for key in self.secured_field_names: value = self.parser.get(id, key) self.parser.set_secure(id, key, value) return self
python
def encrypt_account(self, id): """Make sure that certain fields are encrypted.""" for key in self.secured_field_names: value = self.parser.get(id, key) self.parser.set_secure(id, key, value) return self
[ "def", "encrypt_account", "(", "self", ",", "id", ")", ":", "for", "key", "in", "self", ".", "secured_field_names", ":", "value", "=", "self", ".", "parser", ".", "get", "(", "id", ",", "key", ")", "self", ".", "parser", ".", "set_secure", "(", "id",...
Make sure that certain fields are encrypted.
[ "Make", "sure", "that", "certain", "fields", "are", "encrypted", "." ]
4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e
https://github.com/captin411/ofxclient/blob/4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e/ofxclient/config.py#L258-L263
train
203,415
captin411/ofxclient
ofxclient/config.py
OfxConfig.is_encrypted_account
def is_encrypted_account(self, id): """Are all fields for the account id encrypted?""" for key in self.secured_field_names: if not self.parser.is_secure_option(id, key): return False return True
python
def is_encrypted_account(self, id): """Are all fields for the account id encrypted?""" for key in self.secured_field_names: if not self.parser.is_secure_option(id, key): return False return True
[ "def", "is_encrypted_account", "(", "self", ",", "id", ")", ":", "for", "key", "in", "self", ".", "secured_field_names", ":", "if", "not", "self", ".", "parser", ".", "is_secure_option", "(", "id", ",", "key", ")", ":", "return", "False", "return", "True...
Are all fields for the account id encrypted?
[ "Are", "all", "fields", "for", "the", "account", "id", "encrypted?" ]
4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e
https://github.com/captin411/ofxclient/blob/4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e/ofxclient/config.py#L265-L270
train
203,416
captin411/ofxclient
ofxclient/config.py
OfxConfig.save
def save(self): """Save changes to config file""" with open(self.file_name, 'w') as fp: self.parser.write(fp) return self
python
def save(self): """Save changes to config file""" with open(self.file_name, 'w') as fp: self.parser.write(fp) return self
[ "def", "save", "(", "self", ")", ":", "with", "open", "(", "self", ".", "file_name", ",", "'w'", ")", "as", "fp", ":", "self", ".", "parser", ".", "write", "(", "fp", ")", "return", "self" ]
Save changes to config file
[ "Save", "changes", "to", "config", "file" ]
4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e
https://github.com/captin411/ofxclient/blob/4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e/ofxclient/config.py#L279-L283
train
203,417
captin411/ofxclient
ofxclient/institution.py
Institution.authenticate
def authenticate(self, username=None, password=None): """Test the authentication credentials Raises a ``ValueError`` if there is a problem authenticating with the human readable reason given by the institution. :param username: optional username (use self.username by default) :type username: string or None :param password: optional password (use self.password by default) :type password: string or None """ u = self.username p = self.password if username and password: u = username p = password client = self.client() query = client.authenticated_query(username=u, password=p) res = client.post(query) ofx = BeautifulSoup(res, 'lxml') sonrs = ofx.find('sonrs') code = int(sonrs.find('code').contents[0].strip()) try: status = sonrs.find('message').contents[0].strip() except Exception: status = '' if code == 0: return 1 raise ValueError(status)
python
def authenticate(self, username=None, password=None): """Test the authentication credentials Raises a ``ValueError`` if there is a problem authenticating with the human readable reason given by the institution. :param username: optional username (use self.username by default) :type username: string or None :param password: optional password (use self.password by default) :type password: string or None """ u = self.username p = self.password if username and password: u = username p = password client = self.client() query = client.authenticated_query(username=u, password=p) res = client.post(query) ofx = BeautifulSoup(res, 'lxml') sonrs = ofx.find('sonrs') code = int(sonrs.find('code').contents[0].strip()) try: status = sonrs.find('message').contents[0].strip() except Exception: status = '' if code == 0: return 1 raise ValueError(status)
[ "def", "authenticate", "(", "self", ",", "username", "=", "None", ",", "password", "=", "None", ")", ":", "u", "=", "self", ".", "username", "p", "=", "self", ".", "password", "if", "username", "and", "password", ":", "u", "=", "username", "p", "=", ...
Test the authentication credentials Raises a ``ValueError`` if there is a problem authenticating with the human readable reason given by the institution. :param username: optional username (use self.username by default) :type username: string or None :param password: optional password (use self.password by default) :type password: string or None
[ "Test", "the", "authentication", "credentials" ]
4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e
https://github.com/captin411/ofxclient/blob/4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e/ofxclient/institution.py#L98-L132
train
203,418
captin411/ofxclient
ofxclient/account.py
Account.download
def download(self, days=60): """Downloaded OFX response for the given time range :param days: Number of days to look back at :type days: integer :rtype: :py:class:`StringIO` """ days_ago = datetime.datetime.now() - datetime.timedelta(days=days) as_of = time.strftime("%Y%m%d", days_ago.timetuple()) query = self._download_query(as_of=as_of) response = self.institution.client().post(query) return StringIO(response)
python
def download(self, days=60): """Downloaded OFX response for the given time range :param days: Number of days to look back at :type days: integer :rtype: :py:class:`StringIO` """ days_ago = datetime.datetime.now() - datetime.timedelta(days=days) as_of = time.strftime("%Y%m%d", days_ago.timetuple()) query = self._download_query(as_of=as_of) response = self.institution.client().post(query) return StringIO(response)
[ "def", "download", "(", "self", ",", "days", "=", "60", ")", ":", "days_ago", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "-", "datetime", ".", "timedelta", "(", "days", "=", "days", ")", "as_of", "=", "time", ".", "strftime", "(", "\"%...
Downloaded OFX response for the given time range :param days: Number of days to look back at :type days: integer :rtype: :py:class:`StringIO`
[ "Downloaded", "OFX", "response", "for", "the", "given", "time", "range" ]
4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e
https://github.com/captin411/ofxclient/blob/4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e/ofxclient/account.py#L93-L105
train
203,419
captin411/ofxclient
ofxclient/util.py
combined_download
def combined_download(accounts, days=60): """Download OFX files and combine them into one It expects an 'accounts' list of ofxclient.Account objects as well as an optional 'days' specifier which defaults to 60 """ client = Client(institution=None) out_file = StringIO() out_file.write(client.header()) out_file.write('<OFX>') for a in accounts: ofx = a.download(days=days).read() stripped = ofx.partition('<OFX>')[2].partition('</OFX>')[0] out_file.write(stripped) out_file.write("</OFX>") out_file.seek(0) return out_file
python
def combined_download(accounts, days=60): """Download OFX files and combine them into one It expects an 'accounts' list of ofxclient.Account objects as well as an optional 'days' specifier which defaults to 60 """ client = Client(institution=None) out_file = StringIO() out_file.write(client.header()) out_file.write('<OFX>') for a in accounts: ofx = a.download(days=days).read() stripped = ofx.partition('<OFX>')[2].partition('</OFX>')[0] out_file.write(stripped) out_file.write("</OFX>") out_file.seek(0) return out_file
[ "def", "combined_download", "(", "accounts", ",", "days", "=", "60", ")", ":", "client", "=", "Client", "(", "institution", "=", "None", ")", "out_file", "=", "StringIO", "(", ")", "out_file", ".", "write", "(", "client", ".", "header", "(", ")", ")", ...
Download OFX files and combine them into one It expects an 'accounts' list of ofxclient.Account objects as well as an optional 'days' specifier which defaults to 60
[ "Download", "OFX", "files", "and", "combine", "them", "into", "one" ]
4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e
https://github.com/captin411/ofxclient/blob/4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e/ofxclient/util.py#L13-L33
train
203,420
captin411/ofxclient
ofxclient/client.py
Client.bank_account_query
def bank_account_query(self, number, date, account_type, bank_id): """Bank account statement request""" return self.authenticated_query( self._bareq(number, date, account_type, bank_id) )
python
def bank_account_query(self, number, date, account_type, bank_id): """Bank account statement request""" return self.authenticated_query( self._bareq(number, date, account_type, bank_id) )
[ "def", "bank_account_query", "(", "self", ",", "number", ",", "date", ",", "account_type", ",", "bank_id", ")", ":", "return", "self", ".", "authenticated_query", "(", "self", ".", "_bareq", "(", "number", ",", "date", ",", "account_type", ",", "bank_id", ...
Bank account statement request
[ "Bank", "account", "statement", "request" ]
4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e
https://github.com/captin411/ofxclient/blob/4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e/ofxclient/client.py#L111-L115
train
203,421
captin411/ofxclient
ofxclient/client.py
Client.credit_card_account_query
def credit_card_account_query(self, number, date): """CC Statement request""" return self.authenticated_query(self._ccreq(number, date))
python
def credit_card_account_query(self, number, date): """CC Statement request""" return self.authenticated_query(self._ccreq(number, date))
[ "def", "credit_card_account_query", "(", "self", ",", "number", ",", "date", ")", ":", "return", "self", ".", "authenticated_query", "(", "self", ".", "_ccreq", "(", "number", ",", "date", ")", ")" ]
CC Statement request
[ "CC", "Statement", "request" ]
4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e
https://github.com/captin411/ofxclient/blob/4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e/ofxclient/client.py#L117-L119
train
203,422
captin411/ofxclient
ofxclient/client.py
Client._do_post
def _do_post(self, query, extra_headers=[]): """ Do a POST to the Institution. :param query: Body content to POST (OFX Query) :type query: str :param extra_headers: Extra headers to send with the request, as a list of (Name, Value) header 2-tuples. :type extra_headers: list :return: 2-tuple of (HTTPResponse, str response body) :rtype: tuple """ i = self.institution logging.debug('posting data to %s' % i.url) garbage, path = splittype(i.url) host, selector = splithost(path) h = HTTPSConnection(host, timeout=60) # Discover requires a particular ordering of headers, so send the # request step by step. h.putrequest('POST', selector, skip_host=True, skip_accept_encoding=True) headers = [ ('Content-Type', 'application/x-ofx'), ('Host', host), ('Content-Length', len(query)), ('Connection', 'Keep-Alive') ] if self.accept: headers.append(('Accept', self.accept)) if self.user_agent: headers.append(('User-Agent', self.user_agent)) for ehname, ehval in extra_headers: headers.append((ehname, ehval)) logging.debug('---- request headers ----') for hname, hval in headers: logging.debug('%s: %s', hname, hval) h.putheader(hname, hval) logging.debug('---- request body (query) ----') logging.debug(query) h.endheaders(query.encode()) res = h.getresponse() response = res.read().decode('ascii', 'ignore') logging.debug('---- response ----') logging.debug(res.__dict__) logging.debug('Headers: %s', res.getheaders()) logging.debug(response) res.close() return res, response
python
def _do_post(self, query, extra_headers=[]): """ Do a POST to the Institution. :param query: Body content to POST (OFX Query) :type query: str :param extra_headers: Extra headers to send with the request, as a list of (Name, Value) header 2-tuples. :type extra_headers: list :return: 2-tuple of (HTTPResponse, str response body) :rtype: tuple """ i = self.institution logging.debug('posting data to %s' % i.url) garbage, path = splittype(i.url) host, selector = splithost(path) h = HTTPSConnection(host, timeout=60) # Discover requires a particular ordering of headers, so send the # request step by step. h.putrequest('POST', selector, skip_host=True, skip_accept_encoding=True) headers = [ ('Content-Type', 'application/x-ofx'), ('Host', host), ('Content-Length', len(query)), ('Connection', 'Keep-Alive') ] if self.accept: headers.append(('Accept', self.accept)) if self.user_agent: headers.append(('User-Agent', self.user_agent)) for ehname, ehval in extra_headers: headers.append((ehname, ehval)) logging.debug('---- request headers ----') for hname, hval in headers: logging.debug('%s: %s', hname, hval) h.putheader(hname, hval) logging.debug('---- request body (query) ----') logging.debug(query) h.endheaders(query.encode()) res = h.getresponse() response = res.read().decode('ascii', 'ignore') logging.debug('---- response ----') logging.debug(res.__dict__) logging.debug('Headers: %s', res.getheaders()) logging.debug(response) res.close() return res, response
[ "def", "_do_post", "(", "self", ",", "query", ",", "extra_headers", "=", "[", "]", ")", ":", "i", "=", "self", ".", "institution", "logging", ".", "debug", "(", "'posting data to %s'", "%", "i", ".", "url", ")", "garbage", ",", "path", "=", "splittype"...
Do a POST to the Institution. :param query: Body content to POST (OFX Query) :type query: str :param extra_headers: Extra headers to send with the request, as a list of (Name, Value) header 2-tuples. :type extra_headers: list :return: 2-tuple of (HTTPResponse, str response body) :rtype: tuple
[ "Do", "a", "POST", "to", "the", "Institution", "." ]
4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e
https://github.com/captin411/ofxclient/blob/4da2719f0ecbbf5eee62fb82c1b3b34ec955ee5e/ofxclient/client.py#L141-L188
train
203,423
pnuckowski/aioresponses
aioresponses/core.py
RequestMatch._build_raw_headers
def _build_raw_headers(self, headers: Dict) -> Tuple: """ Convert a dict of headers to a tuple of tuples Mimics the format of ClientResponse. """ raw_headers = [] for k, v in headers.items(): raw_headers.append((k.encode('utf8'), v.encode('utf8'))) return tuple(raw_headers)
python
def _build_raw_headers(self, headers: Dict) -> Tuple: """ Convert a dict of headers to a tuple of tuples Mimics the format of ClientResponse. """ raw_headers = [] for k, v in headers.items(): raw_headers.append((k.encode('utf8'), v.encode('utf8'))) return tuple(raw_headers)
[ "def", "_build_raw_headers", "(", "self", ",", "headers", ":", "Dict", ")", "->", "Tuple", ":", "raw_headers", "=", "[", "]", "for", "k", ",", "v", "in", "headers", ".", "items", "(", ")", ":", "raw_headers", ".", "append", "(", "(", "k", ".", "enc...
Convert a dict of headers to a tuple of tuples Mimics the format of ClientResponse.
[ "Convert", "a", "dict", "of", "headers", "to", "a", "tuple", "of", "tuples" ]
566461a21a25757e313e0d4afaf338d53d66db03
https://github.com/pnuckowski/aioresponses/blob/566461a21a25757e313e0d4afaf338d53d66db03/aioresponses/core.py#L102-L111
train
203,424
pnuckowski/aioresponses
aioresponses/core.py
aioresponses._request_mock
async def _request_mock(self, orig_self: ClientSession, method: str, url: 'Union[URL, str]', *args: Tuple, **kwargs: Dict) -> 'ClientResponse': """Return mocked response object or raise connection error.""" url = normalize_url(merge_params(url, kwargs.get('params'))) url_str = str(url) for prefix in self._passthrough: if url_str.startswith(prefix): return (await self.patcher.temp_original( orig_self, method, url, *args, **kwargs )) response = await self.match(method, url, **kwargs) if response is None: raise ClientConnectionError( 'Connection refused: {} {}'.format(method, url) ) self._responses.append(response) key = (method, url) self.requests.setdefault(key, []) self.requests[key].append(RequestCall(args, kwargs)) return response
python
async def _request_mock(self, orig_self: ClientSession, method: str, url: 'Union[URL, str]', *args: Tuple, **kwargs: Dict) -> 'ClientResponse': """Return mocked response object or raise connection error.""" url = normalize_url(merge_params(url, kwargs.get('params'))) url_str = str(url) for prefix in self._passthrough: if url_str.startswith(prefix): return (await self.patcher.temp_original( orig_self, method, url, *args, **kwargs )) response = await self.match(method, url, **kwargs) if response is None: raise ClientConnectionError( 'Connection refused: {} {}'.format(method, url) ) self._responses.append(response) key = (method, url) self.requests.setdefault(key, []) self.requests[key].append(RequestCall(args, kwargs)) return response
[ "async", "def", "_request_mock", "(", "self", ",", "orig_self", ":", "ClientSession", ",", "method", ":", "str", ",", "url", ":", "'Union[URL, str]'", ",", "*", "args", ":", "Tuple", ",", "*", "*", "kwargs", ":", "Dict", ")", "->", "'ClientResponse'", ":...
Return mocked response object or raise connection error.
[ "Return", "mocked", "response", "object", "or", "raise", "connection", "error", "." ]
566461a21a25757e313e0d4afaf338d53d66db03
https://github.com/pnuckowski/aioresponses/blob/566461a21a25757e313e0d4afaf338d53d66db03/aioresponses/core.py#L312-L334
train
203,425
pnuckowski/aioresponses
aioresponses/compat.py
normalize_url
def normalize_url(url: 'Union[URL, str]') -> 'URL': """Normalize url to make comparisons.""" url = URL(url) return url.with_query(urlencode(sorted(parse_qsl(url.query_string))))
python
def normalize_url(url: 'Union[URL, str]') -> 'URL': """Normalize url to make comparisons.""" url = URL(url) return url.with_query(urlencode(sorted(parse_qsl(url.query_string))))
[ "def", "normalize_url", "(", "url", ":", "'Union[URL, str]'", ")", "->", "'URL'", ":", "url", "=", "URL", "(", "url", ")", "return", "url", ".", "with_query", "(", "urlencode", "(", "sorted", "(", "parse_qsl", "(", "url", ".", "query_string", ")", ")", ...
Normalize url to make comparisons.
[ "Normalize", "url", "to", "make", "comparisons", "." ]
566461a21a25757e313e0d4afaf338d53d66db03
https://github.com/pnuckowski/aioresponses/blob/566461a21a25757e313e0d4afaf338d53d66db03/aioresponses/compat.py#L44-L47
train
203,426
TheHive-Project/TheHive4py
thehive4py/api.py
TheHiveApi.promote_alert_to_case
def promote_alert_to_case(self, alert_id): """ This uses the TheHiveAPI to promote an alert to a case :param alert_id: Alert identifier :return: TheHive Case :rtype: json """ req = self.url + "/api/alert/{}/createCase".format(alert_id) try: return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert, data=json.dumps({})) except requests.exceptions.RequestException as the_exception: raise AlertException("Couldn't promote alert to case: {}".format(the_exception)) return None
python
def promote_alert_to_case(self, alert_id): """ This uses the TheHiveAPI to promote an alert to a case :param alert_id: Alert identifier :return: TheHive Case :rtype: json """ req = self.url + "/api/alert/{}/createCase".format(alert_id) try: return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert, data=json.dumps({})) except requests.exceptions.RequestException as the_exception: raise AlertException("Couldn't promote alert to case: {}".format(the_exception)) return None
[ "def", "promote_alert_to_case", "(", "self", ",", "alert_id", ")", ":", "req", "=", "self", ".", "url", "+", "\"/api/alert/{}/createCase\"", ".", "format", "(", "alert_id", ")", "try", ":", "return", "requests", ".", "post", "(", "req", ",", "headers", "="...
This uses the TheHiveAPI to promote an alert to a case :param alert_id: Alert identifier :return: TheHive Case :rtype: json
[ "This", "uses", "the", "TheHiveAPI", "to", "promote", "an", "alert", "to", "a", "case" ]
35762bbd50d8376943268464326b59c752d6241b
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L451-L470
train
203,427
bigchaindb/bigchaindb-driver
bigchaindb_driver/driver.py
TransactionsEndpoint.prepare
def prepare(*, operation='CREATE', signers=None, recipients=None, asset=None, metadata=None, inputs=None): """Prepares a transaction payload, ready to be fulfilled. Args: operation (str): The operation to perform. Must be ``'CREATE'`` or ``'TRANSFER'``. Case insensitive. Defaults to ``'CREATE'``. signers (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the issuer(s) of the asset being created. Only applies for ``'CREATE'`` operations. Defaults to ``None``. recipients (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the new recipients(s) of the asset being created or transferred. Defaults to ``None``. asset (:obj:`dict`, optional): The asset to be created or transferred. MUST be supplied for ``'TRANSFER'`` operations. Defaults to ``None``. metadata (:obj:`dict`, optional): Metadata associated with the transaction. Defaults to ``None``. inputs (:obj:`dict` | :obj:`list` | :obj:`tuple`, optional): One or more inputs holding the condition(s) that this transaction intends to fulfill. Each input is expected to be a :obj:`dict`. Only applies to, and MUST be supplied for, ``'TRANSFER'`` operations. Returns: dict: The prepared transaction. Raises: :class:`~.exceptions.BigchaindbException`: If ``operation`` is not ``'CREATE'`` or ``'TRANSFER'``. .. important:: **CREATE operations** * ``signers`` MUST be set. * ``recipients``, ``asset``, and ``metadata`` MAY be set. * If ``asset`` is set, it MUST be in the form of:: { 'data': { ... } } * The argument ``inputs`` is ignored. * If ``recipients`` is not given, or evaluates to ``False``, it will be set equal to ``signers``:: if not recipients: recipients = signers **TRANSFER operations** * ``recipients``, ``asset``, and ``inputs`` MUST be set. * ``asset`` MUST be in the form of:: { 'id': '<Asset ID (i.e. TX ID of its CREATE transaction)>' } * ``metadata`` MAY be set. * The argument ``signers`` is ignored. """ return prepare_transaction( operation=operation, signers=signers, recipients=recipients, asset=asset, metadata=metadata, inputs=inputs, )
python
def prepare(*, operation='CREATE', signers=None, recipients=None, asset=None, metadata=None, inputs=None): """Prepares a transaction payload, ready to be fulfilled. Args: operation (str): The operation to perform. Must be ``'CREATE'`` or ``'TRANSFER'``. Case insensitive. Defaults to ``'CREATE'``. signers (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the issuer(s) of the asset being created. Only applies for ``'CREATE'`` operations. Defaults to ``None``. recipients (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the new recipients(s) of the asset being created or transferred. Defaults to ``None``. asset (:obj:`dict`, optional): The asset to be created or transferred. MUST be supplied for ``'TRANSFER'`` operations. Defaults to ``None``. metadata (:obj:`dict`, optional): Metadata associated with the transaction. Defaults to ``None``. inputs (:obj:`dict` | :obj:`list` | :obj:`tuple`, optional): One or more inputs holding the condition(s) that this transaction intends to fulfill. Each input is expected to be a :obj:`dict`. Only applies to, and MUST be supplied for, ``'TRANSFER'`` operations. Returns: dict: The prepared transaction. Raises: :class:`~.exceptions.BigchaindbException`: If ``operation`` is not ``'CREATE'`` or ``'TRANSFER'``. .. important:: **CREATE operations** * ``signers`` MUST be set. * ``recipients``, ``asset``, and ``metadata`` MAY be set. * If ``asset`` is set, it MUST be in the form of:: { 'data': { ... } } * The argument ``inputs`` is ignored. * If ``recipients`` is not given, or evaluates to ``False``, it will be set equal to ``signers``:: if not recipients: recipients = signers **TRANSFER operations** * ``recipients``, ``asset``, and ``inputs`` MUST be set. * ``asset`` MUST be in the form of:: { 'id': '<Asset ID (i.e. TX ID of its CREATE transaction)>' } * ``metadata`` MAY be set. * The argument ``signers`` is ignored. """ return prepare_transaction( operation=operation, signers=signers, recipients=recipients, asset=asset, metadata=metadata, inputs=inputs, )
[ "def", "prepare", "(", "*", ",", "operation", "=", "'CREATE'", ",", "signers", "=", "None", ",", "recipients", "=", "None", ",", "asset", "=", "None", ",", "metadata", "=", "None", ",", "inputs", "=", "None", ")", ":", "return", "prepare_transaction", ...
Prepares a transaction payload, ready to be fulfilled. Args: operation (str): The operation to perform. Must be ``'CREATE'`` or ``'TRANSFER'``. Case insensitive. Defaults to ``'CREATE'``. signers (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the issuer(s) of the asset being created. Only applies for ``'CREATE'`` operations. Defaults to ``None``. recipients (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the new recipients(s) of the asset being created or transferred. Defaults to ``None``. asset (:obj:`dict`, optional): The asset to be created or transferred. MUST be supplied for ``'TRANSFER'`` operations. Defaults to ``None``. metadata (:obj:`dict`, optional): Metadata associated with the transaction. Defaults to ``None``. inputs (:obj:`dict` | :obj:`list` | :obj:`tuple`, optional): One or more inputs holding the condition(s) that this transaction intends to fulfill. Each input is expected to be a :obj:`dict`. Only applies to, and MUST be supplied for, ``'TRANSFER'`` operations. Returns: dict: The prepared transaction. Raises: :class:`~.exceptions.BigchaindbException`: If ``operation`` is not ``'CREATE'`` or ``'TRANSFER'``. .. important:: **CREATE operations** * ``signers`` MUST be set. * ``recipients``, ``asset``, and ``metadata`` MAY be set. * If ``asset`` is set, it MUST be in the form of:: { 'data': { ... } } * The argument ``inputs`` is ignored. * If ``recipients`` is not given, or evaluates to ``False``, it will be set equal to ``signers``:: if not recipients: recipients = signers **TRANSFER operations** * ``recipients``, ``asset``, and ``inputs`` MUST be set. * ``asset`` MUST be in the form of:: { 'id': '<Asset ID (i.e. TX ID of its CREATE transaction)>' } * ``metadata`` MAY be set. * The argument ``signers`` is ignored.
[ "Prepares", "a", "transaction", "payload", "ready", "to", "be", "fulfilled", "." ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/driver.py#L181-L255
train
203,428
bigchaindb/bigchaindb-driver
bigchaindb_driver/driver.py
TransactionsEndpoint.send_async
def send_async(self, transaction, headers=None): """Submit a transaction to the Federation with the mode `async`. Args: transaction (dict): the transaction to be sent to the Federation node(s). headers (dict): Optional headers to pass to the request. Returns: dict: The transaction sent to the Federation node(s). """ return self.transport.forward_request( method='POST', path=self.path, json=transaction, params={'mode': 'async'}, headers=headers)
python
def send_async(self, transaction, headers=None): """Submit a transaction to the Federation with the mode `async`. Args: transaction (dict): the transaction to be sent to the Federation node(s). headers (dict): Optional headers to pass to the request. Returns: dict: The transaction sent to the Federation node(s). """ return self.transport.forward_request( method='POST', path=self.path, json=transaction, params={'mode': 'async'}, headers=headers)
[ "def", "send_async", "(", "self", ",", "transaction", ",", "headers", "=", "None", ")", ":", "return", "self", ".", "transport", ".", "forward_request", "(", "method", "=", "'POST'", ",", "path", "=", "self", ".", "path", ",", "json", "=", "transaction",...
Submit a transaction to the Federation with the mode `async`. Args: transaction (dict): the transaction to be sent to the Federation node(s). headers (dict): Optional headers to pass to the request. Returns: dict: The transaction sent to the Federation node(s).
[ "Submit", "a", "transaction", "to", "the", "Federation", "with", "the", "mode", "async", "." ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/driver.py#L320-L337
train
203,429
bigchaindb/bigchaindb-driver
bigchaindb_driver/driver.py
TransactionsEndpoint.retrieve
def retrieve(self, txid, headers=None): """Retrieves the transaction with the given id. Args: txid (str): Id of the transaction to retrieve. headers (dict): Optional headers to pass to the request. Returns: dict: The transaction with the given id. """ path = self.path + txid return self.transport.forward_request( method='GET', path=path, headers=None)
python
def retrieve(self, txid, headers=None): """Retrieves the transaction with the given id. Args: txid (str): Id of the transaction to retrieve. headers (dict): Optional headers to pass to the request. Returns: dict: The transaction with the given id. """ path = self.path + txid return self.transport.forward_request( method='GET', path=path, headers=None)
[ "def", "retrieve", "(", "self", ",", "txid", ",", "headers", "=", "None", ")", ":", "path", "=", "self", ".", "path", "+", "txid", "return", "self", ".", "transport", ".", "forward_request", "(", "method", "=", "'GET'", ",", "path", "=", "path", ",",...
Retrieves the transaction with the given id. Args: txid (str): Id of the transaction to retrieve. headers (dict): Optional headers to pass to the request. Returns: dict: The transaction with the given id.
[ "Retrieves", "the", "transaction", "with", "the", "given", "id", "." ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/driver.py#L377-L390
train
203,430
bigchaindb/bigchaindb-driver
bigchaindb_driver/driver.py
OutputsEndpoint.get
def get(self, public_key, spent=None, headers=None): """Get transaction outputs by public key. The public_key parameter must be a base58 encoded ed25519 public key associated with transaction output ownership. Args: public_key (str): Public key for which unfulfilled conditions are sought. spent (bool): Indicate if the result set should include only spent or only unspent outputs. If not specified (``None``) the result includes all the outputs (both spent and unspent) associated with the public key. headers (dict): Optional headers to pass to the request. Returns: :obj:`list` of :obj:`str`: List of unfulfilled conditions. Example: Given a transaction with `id` ``da1b64a907ba54`` having an `ed25519` condition (at index ``0``) with alice's public key:: >>> bdb = BigchainDB() >>> bdb.outputs.get(alice_pubkey) ... ['../transactions/da1b64a907ba54/conditions/0'] """ return self.transport.forward_request( method='GET', path=self.path, params={'public_key': public_key, 'spent': spent}, headers=headers, )
python
def get(self, public_key, spent=None, headers=None): """Get transaction outputs by public key. The public_key parameter must be a base58 encoded ed25519 public key associated with transaction output ownership. Args: public_key (str): Public key for which unfulfilled conditions are sought. spent (bool): Indicate if the result set should include only spent or only unspent outputs. If not specified (``None``) the result includes all the outputs (both spent and unspent) associated with the public key. headers (dict): Optional headers to pass to the request. Returns: :obj:`list` of :obj:`str`: List of unfulfilled conditions. Example: Given a transaction with `id` ``da1b64a907ba54`` having an `ed25519` condition (at index ``0``) with alice's public key:: >>> bdb = BigchainDB() >>> bdb.outputs.get(alice_pubkey) ... ['../transactions/da1b64a907ba54/conditions/0'] """ return self.transport.forward_request( method='GET', path=self.path, params={'public_key': public_key, 'spent': spent}, headers=headers, )
[ "def", "get", "(", "self", ",", "public_key", ",", "spent", "=", "None", ",", "headers", "=", "None", ")", ":", "return", "self", ".", "transport", ".", "forward_request", "(", "method", "=", "'GET'", ",", "path", "=", "self", ".", "path", ",", "para...
Get transaction outputs by public key. The public_key parameter must be a base58 encoded ed25519 public key associated with transaction output ownership. Args: public_key (str): Public key for which unfulfilled conditions are sought. spent (bool): Indicate if the result set should include only spent or only unspent outputs. If not specified (``None``) the result includes all the outputs (both spent and unspent) associated with the public key. headers (dict): Optional headers to pass to the request. Returns: :obj:`list` of :obj:`str`: List of unfulfilled conditions. Example: Given a transaction with `id` ``da1b64a907ba54`` having an `ed25519` condition (at index ``0``) with alice's public key:: >>> bdb = BigchainDB() >>> bdb.outputs.get(alice_pubkey) ... ['../transactions/da1b64a907ba54/conditions/0']
[ "Get", "transaction", "outputs", "by", "public", "key", ".", "The", "public_key", "parameter", "must", "be", "a", "base58", "encoded", "ed25519", "public", "key", "associated", "with", "transaction", "output", "ownership", "." ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/driver.py#L403-L435
train
203,431
bigchaindb/bigchaindb-driver
bigchaindb_driver/driver.py
BlocksEndpoint.retrieve
def retrieve(self, block_height, headers=None): """Retrieves the block with the given ``block_height``. Args: block_height (str): height of the block to retrieve. headers (dict): Optional headers to pass to the request. Returns: dict: The block with the given ``block_height``. """ path = self.path + block_height return self.transport.forward_request( method='GET', path=path, headers=None)
python
def retrieve(self, block_height, headers=None): """Retrieves the block with the given ``block_height``. Args: block_height (str): height of the block to retrieve. headers (dict): Optional headers to pass to the request. Returns: dict: The block with the given ``block_height``. """ path = self.path + block_height return self.transport.forward_request( method='GET', path=path, headers=None)
[ "def", "retrieve", "(", "self", ",", "block_height", ",", "headers", "=", "None", ")", ":", "path", "=", "self", ".", "path", "+", "block_height", "return", "self", ".", "transport", ".", "forward_request", "(", "method", "=", "'GET'", ",", "path", "=", ...
Retrieves the block with the given ``block_height``. Args: block_height (str): height of the block to retrieve. headers (dict): Optional headers to pass to the request. Returns: dict: The block with the given ``block_height``.
[ "Retrieves", "the", "block", "with", "the", "given", "block_height", "." ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/driver.py#L468-L481
train
203,432
bigchaindb/bigchaindb-driver
bigchaindb_driver/driver.py
AssetsEndpoint.get
def get(self, *, search, limit=0, headers=None): """Retrieves the assets that match a given text search string. Args: search (str): Text search string. limit (int): Limit the number of returned documents. Defaults to zero meaning that it returns all the matching assets. headers (dict): Optional headers to pass to the request. Returns: :obj:`list` of :obj:`dict`: List of assets that match the query. """ return self.transport.forward_request( method='GET', path=self.path, params={'search': search, 'limit': limit}, headers=headers )
python
def get(self, *, search, limit=0, headers=None): """Retrieves the assets that match a given text search string. Args: search (str): Text search string. limit (int): Limit the number of returned documents. Defaults to zero meaning that it returns all the matching assets. headers (dict): Optional headers to pass to the request. Returns: :obj:`list` of :obj:`dict`: List of assets that match the query. """ return self.transport.forward_request( method='GET', path=self.path, params={'search': search, 'limit': limit}, headers=headers )
[ "def", "get", "(", "self", ",", "*", ",", "search", ",", "limit", "=", "0", ",", "headers", "=", "None", ")", ":", "return", "self", ".", "transport", ".", "forward_request", "(", "method", "=", "'GET'", ",", "path", "=", "self", ".", "path", ",", ...
Retrieves the assets that match a given text search string. Args: search (str): Text search string. limit (int): Limit the number of returned documents. Defaults to zero meaning that it returns all the matching assets. headers (dict): Optional headers to pass to the request. Returns: :obj:`list` of :obj:`dict`: List of assets that match the query.
[ "Retrieves", "the", "assets", "that", "match", "a", "given", "text", "search", "string", "." ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/driver.py#L494-L512
train
203,433
bigchaindb/bigchaindb-driver
bigchaindb_driver/offchain.py
prepare_create_transaction
def prepare_create_transaction(*, signers, recipients=None, asset=None, metadata=None): """Prepares a ``"CREATE"`` transaction payload, ready to be fulfilled. Args: signers (:obj:`list` | :obj:`tuple` | :obj:`str`): One or more public keys representing the issuer(s) of the asset being created. recipients (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the new recipients(s) of the asset being created. Defaults to ``None``. asset (:obj:`dict`, optional): The asset to be created. Defaults to ``None``. metadata (:obj:`dict`, optional): Metadata associated with the transaction. Defaults to ``None``. Returns: dict: The prepared ``"CREATE"`` transaction. .. important:: * If ``asset`` is set, it MUST be in the form of:: { 'data': { ... } } * If ``recipients`` is not given, or evaluates to ``False``, it will be set equal to ``signers``:: if not recipients: recipients = signers """ if not isinstance(signers, (list, tuple)): signers = [signers] # NOTE: Needed for the time being. See # https://github.com/bigchaindb/bigchaindb/issues/797 elif isinstance(signers, tuple): signers = list(signers) if not recipients: recipients = [(signers, 1)] elif not isinstance(recipients, (list, tuple)): recipients = [([recipients], 1)] # NOTE: Needed for the time being. See # https://github.com/bigchaindb/bigchaindb/issues/797 elif isinstance(recipients, tuple): recipients = [(list(recipients), 1)] transaction = Transaction.create( signers, recipients, metadata=metadata, asset=asset['data'] if asset else None, ) return transaction.to_dict()
python
def prepare_create_transaction(*, signers, recipients=None, asset=None, metadata=None): """Prepares a ``"CREATE"`` transaction payload, ready to be fulfilled. Args: signers (:obj:`list` | :obj:`tuple` | :obj:`str`): One or more public keys representing the issuer(s) of the asset being created. recipients (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the new recipients(s) of the asset being created. Defaults to ``None``. asset (:obj:`dict`, optional): The asset to be created. Defaults to ``None``. metadata (:obj:`dict`, optional): Metadata associated with the transaction. Defaults to ``None``. Returns: dict: The prepared ``"CREATE"`` transaction. .. important:: * If ``asset`` is set, it MUST be in the form of:: { 'data': { ... } } * If ``recipients`` is not given, or evaluates to ``False``, it will be set equal to ``signers``:: if not recipients: recipients = signers """ if not isinstance(signers, (list, tuple)): signers = [signers] # NOTE: Needed for the time being. See # https://github.com/bigchaindb/bigchaindb/issues/797 elif isinstance(signers, tuple): signers = list(signers) if not recipients: recipients = [(signers, 1)] elif not isinstance(recipients, (list, tuple)): recipients = [([recipients], 1)] # NOTE: Needed for the time being. See # https://github.com/bigchaindb/bigchaindb/issues/797 elif isinstance(recipients, tuple): recipients = [(list(recipients), 1)] transaction = Transaction.create( signers, recipients, metadata=metadata, asset=asset['data'] if asset else None, ) return transaction.to_dict()
[ "def", "prepare_create_transaction", "(", "*", ",", "signers", ",", "recipients", "=", "None", ",", "asset", "=", "None", ",", "metadata", "=", "None", ")", ":", "if", "not", "isinstance", "(", "signers", ",", "(", "list", ",", "tuple", ")", ")", ":", ...
Prepares a ``"CREATE"`` transaction payload, ready to be fulfilled. Args: signers (:obj:`list` | :obj:`tuple` | :obj:`str`): One or more public keys representing the issuer(s) of the asset being created. recipients (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the new recipients(s) of the asset being created. Defaults to ``None``. asset (:obj:`dict`, optional): The asset to be created. Defaults to ``None``. metadata (:obj:`dict`, optional): Metadata associated with the transaction. Defaults to ``None``. Returns: dict: The prepared ``"CREATE"`` transaction. .. important:: * If ``asset`` is set, it MUST be in the form of:: { 'data': { ... } } * If ``recipients`` is not given, or evaluates to ``False``, it will be set equal to ``signers``:: if not recipients: recipients = signers
[ "Prepares", "a", "CREATE", "transaction", "payload", "ready", "to", "be", "fulfilled", "." ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/offchain.py#L136-L198
train
203,434
bigchaindb/bigchaindb-driver
bigchaindb_driver/offchain.py
prepare_transfer_transaction
def prepare_transfer_transaction(*, inputs, recipients, asset, metadata=None): """Prepares a ``"TRANSFER"`` transaction payload, ready to be fulfilled. Args: inputs (:obj:`dict` | :obj:`list` | :obj:`tuple`): One or more inputs holding the condition(s) that this transaction intends to fulfill. Each input is expected to be a :obj:`dict`. recipients (:obj:`str` | :obj:`list` | :obj:`tuple`): One or more public keys representing the new recipients(s) of the asset being transferred. asset (:obj:`dict`): A single-key dictionary holding the ``id`` of the asset being transferred with this transaction. metadata (:obj:`dict`): Metadata associated with the transaction. Defaults to ``None``. Returns: dict: The prepared ``"TRANSFER"`` transaction. .. important:: * ``asset`` MUST be in the form of:: { 'id': '<Asset ID (i.e. TX ID of its CREATE transaction)>' } Example: .. todo:: Replace this section with docs. In case it may not be clear what an input should look like, say Alice (public key: ``'3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf'``) wishes to transfer an asset over to Bob (public key: ``'EcRawy3Y22eAUSS94vLF8BVJi62wbqbD9iSUSUNU9wAA'``). Let the asset creation transaction payload be denoted by ``tx``:: # noqa E501 >>> tx {'asset': {'data': {'msg': 'Hello BigchainDB!'}}, 'id': '9650055df2539223586d33d273cb8fd05bd6d485b1fef1caf7c8901a49464c87', 'inputs': [{'fulfillment': {'public_key': '3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf', 'type': 'ed25519-sha-256'}, 'fulfills': None, 'owners_before': ['3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf']}], 'metadata': None, 'operation': 'CREATE', 'outputs': [{'amount': '1', 'condition': {'details': {'public_key': '3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf', 'type': 'ed25519-sha-256'}, 'uri': 'ni:///sha-256;7ApQLsLLQgj5WOUipJg1txojmge68pctwFxvc3iOl54?fpt=ed25519-sha-256&cost=131072'}, 'public_keys': ['3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf']}], 'version': '2.0'} Then, the input may be constructed in this way:: output_index output = tx['transaction']['outputs'][output_index] input_ = { 'fulfillment': output['condition']['details'], 'input': { 'output_index': output_index, 'transaction_id': tx['id'], }, 'owners_before': output['owners_after'], } Displaying the input on the prompt would look like:: >>> input_ {'fulfillment': { 'public_key': '3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf', 'type': 'ed25519-sha-256'}, 'input': {'output_index': 0, 'transaction_id': '9650055df2539223586d33d273cb8fd05bd6d485b1fef1caf7c8901a49464c87'}, 'owners_before': ['3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf']} To prepare the transfer: >>> prepare_transfer_transaction( ... inputs=input_, ... recipients='EcRawy3Y22eAUSS94vLF8BVJi62wbqbD9iSUSUNU9wAA', ... asset=tx['transaction']['asset'], ... ) """ if not isinstance(inputs, (list, tuple)): inputs = (inputs, ) if not isinstance(recipients, (list, tuple)): recipients = [([recipients], 1)] # NOTE: Needed for the time being. See # https://github.com/bigchaindb/bigchaindb/issues/797 if isinstance(recipients, tuple): recipients = [(list(recipients), 1)] fulfillments = [ Input(_fulfillment_from_details(input_['fulfillment']), input_['owners_before'], fulfills=TransactionLink( txid=input_['fulfills']['transaction_id'], output=input_['fulfills']['output_index'])) for input_ in inputs ] transaction = Transaction.transfer( fulfillments, recipients, asset_id=asset['id'], metadata=metadata, ) return transaction.to_dict()
python
def prepare_transfer_transaction(*, inputs, recipients, asset, metadata=None): """Prepares a ``"TRANSFER"`` transaction payload, ready to be fulfilled. Args: inputs (:obj:`dict` | :obj:`list` | :obj:`tuple`): One or more inputs holding the condition(s) that this transaction intends to fulfill. Each input is expected to be a :obj:`dict`. recipients (:obj:`str` | :obj:`list` | :obj:`tuple`): One or more public keys representing the new recipients(s) of the asset being transferred. asset (:obj:`dict`): A single-key dictionary holding the ``id`` of the asset being transferred with this transaction. metadata (:obj:`dict`): Metadata associated with the transaction. Defaults to ``None``. Returns: dict: The prepared ``"TRANSFER"`` transaction. .. important:: * ``asset`` MUST be in the form of:: { 'id': '<Asset ID (i.e. TX ID of its CREATE transaction)>' } Example: .. todo:: Replace this section with docs. In case it may not be clear what an input should look like, say Alice (public key: ``'3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf'``) wishes to transfer an asset over to Bob (public key: ``'EcRawy3Y22eAUSS94vLF8BVJi62wbqbD9iSUSUNU9wAA'``). Let the asset creation transaction payload be denoted by ``tx``:: # noqa E501 >>> tx {'asset': {'data': {'msg': 'Hello BigchainDB!'}}, 'id': '9650055df2539223586d33d273cb8fd05bd6d485b1fef1caf7c8901a49464c87', 'inputs': [{'fulfillment': {'public_key': '3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf', 'type': 'ed25519-sha-256'}, 'fulfills': None, 'owners_before': ['3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf']}], 'metadata': None, 'operation': 'CREATE', 'outputs': [{'amount': '1', 'condition': {'details': {'public_key': '3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf', 'type': 'ed25519-sha-256'}, 'uri': 'ni:///sha-256;7ApQLsLLQgj5WOUipJg1txojmge68pctwFxvc3iOl54?fpt=ed25519-sha-256&cost=131072'}, 'public_keys': ['3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf']}], 'version': '2.0'} Then, the input may be constructed in this way:: output_index output = tx['transaction']['outputs'][output_index] input_ = { 'fulfillment': output['condition']['details'], 'input': { 'output_index': output_index, 'transaction_id': tx['id'], }, 'owners_before': output['owners_after'], } Displaying the input on the prompt would look like:: >>> input_ {'fulfillment': { 'public_key': '3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf', 'type': 'ed25519-sha-256'}, 'input': {'output_index': 0, 'transaction_id': '9650055df2539223586d33d273cb8fd05bd6d485b1fef1caf7c8901a49464c87'}, 'owners_before': ['3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf']} To prepare the transfer: >>> prepare_transfer_transaction( ... inputs=input_, ... recipients='EcRawy3Y22eAUSS94vLF8BVJi62wbqbD9iSUSUNU9wAA', ... asset=tx['transaction']['asset'], ... ) """ if not isinstance(inputs, (list, tuple)): inputs = (inputs, ) if not isinstance(recipients, (list, tuple)): recipients = [([recipients], 1)] # NOTE: Needed for the time being. See # https://github.com/bigchaindb/bigchaindb/issues/797 if isinstance(recipients, tuple): recipients = [(list(recipients), 1)] fulfillments = [ Input(_fulfillment_from_details(input_['fulfillment']), input_['owners_before'], fulfills=TransactionLink( txid=input_['fulfills']['transaction_id'], output=input_['fulfills']['output_index'])) for input_ in inputs ] transaction = Transaction.transfer( fulfillments, recipients, asset_id=asset['id'], metadata=metadata, ) return transaction.to_dict()
[ "def", "prepare_transfer_transaction", "(", "*", ",", "inputs", ",", "recipients", ",", "asset", ",", "metadata", "=", "None", ")", ":", "if", "not", "isinstance", "(", "inputs", ",", "(", "list", ",", "tuple", ")", ")", ":", "inputs", "=", "(", "input...
Prepares a ``"TRANSFER"`` transaction payload, ready to be fulfilled. Args: inputs (:obj:`dict` | :obj:`list` | :obj:`tuple`): One or more inputs holding the condition(s) that this transaction intends to fulfill. Each input is expected to be a :obj:`dict`. recipients (:obj:`str` | :obj:`list` | :obj:`tuple`): One or more public keys representing the new recipients(s) of the asset being transferred. asset (:obj:`dict`): A single-key dictionary holding the ``id`` of the asset being transferred with this transaction. metadata (:obj:`dict`): Metadata associated with the transaction. Defaults to ``None``. Returns: dict: The prepared ``"TRANSFER"`` transaction. .. important:: * ``asset`` MUST be in the form of:: { 'id': '<Asset ID (i.e. TX ID of its CREATE transaction)>' } Example: .. todo:: Replace this section with docs. In case it may not be clear what an input should look like, say Alice (public key: ``'3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf'``) wishes to transfer an asset over to Bob (public key: ``'EcRawy3Y22eAUSS94vLF8BVJi62wbqbD9iSUSUNU9wAA'``). Let the asset creation transaction payload be denoted by ``tx``:: # noqa E501 >>> tx {'asset': {'data': {'msg': 'Hello BigchainDB!'}}, 'id': '9650055df2539223586d33d273cb8fd05bd6d485b1fef1caf7c8901a49464c87', 'inputs': [{'fulfillment': {'public_key': '3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf', 'type': 'ed25519-sha-256'}, 'fulfills': None, 'owners_before': ['3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf']}], 'metadata': None, 'operation': 'CREATE', 'outputs': [{'amount': '1', 'condition': {'details': {'public_key': '3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf', 'type': 'ed25519-sha-256'}, 'uri': 'ni:///sha-256;7ApQLsLLQgj5WOUipJg1txojmge68pctwFxvc3iOl54?fpt=ed25519-sha-256&cost=131072'}, 'public_keys': ['3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf']}], 'version': '2.0'} Then, the input may be constructed in this way:: output_index output = tx['transaction']['outputs'][output_index] input_ = { 'fulfillment': output['condition']['details'], 'input': { 'output_index': output_index, 'transaction_id': tx['id'], }, 'owners_before': output['owners_after'], } Displaying the input on the prompt would look like:: >>> input_ {'fulfillment': { 'public_key': '3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf', 'type': 'ed25519-sha-256'}, 'input': {'output_index': 0, 'transaction_id': '9650055df2539223586d33d273cb8fd05bd6d485b1fef1caf7c8901a49464c87'}, 'owners_before': ['3Cxh1eKZk3Wp9KGBWFS7iVde465UvqUKnEqTg2MW4wNf']} To prepare the transfer: >>> prepare_transfer_transaction( ... inputs=input_, ... recipients='EcRawy3Y22eAUSS94vLF8BVJi62wbqbD9iSUSUNU9wAA', ... asset=tx['transaction']['asset'], ... )
[ "Prepares", "a", "TRANSFER", "transaction", "payload", "ready", "to", "be", "fulfilled", "." ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/offchain.py#L201-L319
train
203,435
bigchaindb/bigchaindb-driver
bigchaindb_driver/offchain.py
fulfill_transaction
def fulfill_transaction(transaction, *, private_keys): """Fulfills the given transaction. Args: transaction (dict): The transaction to be fulfilled. private_keys (:obj:`str` | :obj:`list` | :obj:`tuple`): One or more private keys to be used for fulfilling the transaction. Returns: dict: The fulfilled transaction payload, ready to be sent to a BigchainDB federation. Raises: :exc:`~.exceptions.MissingPrivateKeyError`: If a private key is missing. """ if not isinstance(private_keys, (list, tuple)): private_keys = [private_keys] # NOTE: Needed for the time being. See # https://github.com/bigchaindb/bigchaindb/issues/797 if isinstance(private_keys, tuple): private_keys = list(private_keys) transaction_obj = Transaction.from_dict(transaction) try: signed_transaction = transaction_obj.sign(private_keys) except KeypairMismatchException as exc: raise MissingPrivateKeyError('A private key is missing!') from exc return signed_transaction.to_dict()
python
def fulfill_transaction(transaction, *, private_keys): """Fulfills the given transaction. Args: transaction (dict): The transaction to be fulfilled. private_keys (:obj:`str` | :obj:`list` | :obj:`tuple`): One or more private keys to be used for fulfilling the transaction. Returns: dict: The fulfilled transaction payload, ready to be sent to a BigchainDB federation. Raises: :exc:`~.exceptions.MissingPrivateKeyError`: If a private key is missing. """ if not isinstance(private_keys, (list, tuple)): private_keys = [private_keys] # NOTE: Needed for the time being. See # https://github.com/bigchaindb/bigchaindb/issues/797 if isinstance(private_keys, tuple): private_keys = list(private_keys) transaction_obj = Transaction.from_dict(transaction) try: signed_transaction = transaction_obj.sign(private_keys) except KeypairMismatchException as exc: raise MissingPrivateKeyError('A private key is missing!') from exc return signed_transaction.to_dict()
[ "def", "fulfill_transaction", "(", "transaction", ",", "*", ",", "private_keys", ")", ":", "if", "not", "isinstance", "(", "private_keys", ",", "(", "list", ",", "tuple", ")", ")", ":", "private_keys", "=", "[", "private_keys", "]", "# NOTE: Needed for the tim...
Fulfills the given transaction. Args: transaction (dict): The transaction to be fulfilled. private_keys (:obj:`str` | :obj:`list` | :obj:`tuple`): One or more private keys to be used for fulfilling the transaction. Returns: dict: The fulfilled transaction payload, ready to be sent to a BigchainDB federation. Raises: :exc:`~.exceptions.MissingPrivateKeyError`: If a private key is missing.
[ "Fulfills", "the", "given", "transaction", "." ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/offchain.py#L322-L354
train
203,436
bigchaindb/bigchaindb-driver
bigchaindb_driver/utils.py
normalize_url
def normalize_url(node): """Normalizes the given node url""" if not node: node = DEFAULT_NODE elif '://' not in node: node = '//{}'.format(node) parts = urlparse(node, scheme='http', allow_fragments=False) port = parts.port if parts.port else _get_default_port(parts.scheme) netloc = '{}:{}'.format(parts.hostname, port) return urlunparse((parts.scheme, netloc, parts.path, '', '', ''))
python
def normalize_url(node): """Normalizes the given node url""" if not node: node = DEFAULT_NODE elif '://' not in node: node = '//{}'.format(node) parts = urlparse(node, scheme='http', allow_fragments=False) port = parts.port if parts.port else _get_default_port(parts.scheme) netloc = '{}:{}'.format(parts.hostname, port) return urlunparse((parts.scheme, netloc, parts.path, '', '', ''))
[ "def", "normalize_url", "(", "node", ")", ":", "if", "not", "node", ":", "node", "=", "DEFAULT_NODE", "elif", "'://'", "not", "in", "node", ":", "node", "=", "'//{}'", ".", "format", "(", "node", ")", "parts", "=", "urlparse", "(", "node", ",", "sche...
Normalizes the given node url
[ "Normalizes", "the", "given", "node", "url" ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/utils.py#L66-L75
train
203,437
bigchaindb/bigchaindb-driver
bigchaindb_driver/utils.py
normalize_node
def normalize_node(node, headers=None): """Normalizes given node as str or dict with headers""" headers = {} if headers is None else headers if isinstance(node, str): url = normalize_url(node) return {'endpoint': url, 'headers': headers} url = normalize_url(node['endpoint']) node_headers = node.get('headers', {}) return {'endpoint': url, 'headers': {**headers, **node_headers}}
python
def normalize_node(node, headers=None): """Normalizes given node as str or dict with headers""" headers = {} if headers is None else headers if isinstance(node, str): url = normalize_url(node) return {'endpoint': url, 'headers': headers} url = normalize_url(node['endpoint']) node_headers = node.get('headers', {}) return {'endpoint': url, 'headers': {**headers, **node_headers}}
[ "def", "normalize_node", "(", "node", ",", "headers", "=", "None", ")", ":", "headers", "=", "{", "}", "if", "headers", "is", "None", "else", "headers", "if", "isinstance", "(", "node", ",", "str", ")", ":", "url", "=", "normalize_url", "(", "node", ...
Normalizes given node as str or dict with headers
[ "Normalizes", "given", "node", "as", "str", "or", "dict", "with", "headers" ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/utils.py#L78-L87
train
203,438
bigchaindb/bigchaindb-driver
bigchaindb_driver/utils.py
normalize_nodes
def normalize_nodes(*nodes, headers=None): """Normalizes given dict or array of driver nodes""" if not nodes: return (normalize_node(DEFAULT_NODE, headers),) normalized_nodes = () for node in nodes: normalized_nodes += (normalize_node(node, headers),) return normalized_nodes
python
def normalize_nodes(*nodes, headers=None): """Normalizes given dict or array of driver nodes""" if not nodes: return (normalize_node(DEFAULT_NODE, headers),) normalized_nodes = () for node in nodes: normalized_nodes += (normalize_node(node, headers),) return normalized_nodes
[ "def", "normalize_nodes", "(", "*", "nodes", ",", "headers", "=", "None", ")", ":", "if", "not", "nodes", ":", "return", "(", "normalize_node", "(", "DEFAULT_NODE", ",", "headers", ")", ",", ")", "normalized_nodes", "=", "(", ")", "for", "node", "in", ...
Normalizes given dict or array of driver nodes
[ "Normalizes", "given", "dict", "or", "array", "of", "driver", "nodes" ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/utils.py#L90-L98
train
203,439
bigchaindb/bigchaindb-driver
bigchaindb_driver/connection.py
Connection.request
def request(self, method, *, path=None, json=None, params=None, headers=None, timeout=None, backoff_cap=None, **kwargs): """Performs an HTTP request with the given parameters. Implements exponential backoff. If `ConnectionError` occurs, a timestamp equal to now + the default delay (`BACKOFF_DELAY`) is assigned to the object. The timestamp is in UTC. Next time the function is called, it either waits till the timestamp is passed or raises `TimeoutError`. If `ConnectionError` occurs two or more times in a row, the retry count is incremented and the new timestamp is calculated as now + the default delay multiplied by two to the power of the number of retries. If a request is successful, the backoff timestamp is removed, the retry count is back to zero. Args: method (str): HTTP method (e.g.: ``'GET'``). path (str): API endpoint path (e.g.: ``'/transactions'``). json (dict): JSON data to send along with the request. params (dict): Dictionary of URL (query) parameters. headers (dict): Optional headers to pass to the request. timeout (int): Optional timeout in seconds. backoff_cap (int): The maximal allowed backoff delay in seconds to be assigned to a node. kwargs: Optional keyword arguments. """ backoff_timedelta = self.get_backoff_timedelta() if timeout is not None and timeout < backoff_timedelta: raise TimeoutError if backoff_timedelta > 0: time.sleep(backoff_timedelta) connExc = None timeout = timeout if timeout is None else timeout - backoff_timedelta try: response = self._request( method=method, timeout=timeout, url=self.node_url + path if path else self.node_url, json=json, params=params, headers=headers, **kwargs, ) except ConnectionError as err: connExc = err raise err finally: self.update_backoff_time(success=connExc is None, backoff_cap=backoff_cap) return response
python
def request(self, method, *, path=None, json=None, params=None, headers=None, timeout=None, backoff_cap=None, **kwargs): """Performs an HTTP request with the given parameters. Implements exponential backoff. If `ConnectionError` occurs, a timestamp equal to now + the default delay (`BACKOFF_DELAY`) is assigned to the object. The timestamp is in UTC. Next time the function is called, it either waits till the timestamp is passed or raises `TimeoutError`. If `ConnectionError` occurs two or more times in a row, the retry count is incremented and the new timestamp is calculated as now + the default delay multiplied by two to the power of the number of retries. If a request is successful, the backoff timestamp is removed, the retry count is back to zero. Args: method (str): HTTP method (e.g.: ``'GET'``). path (str): API endpoint path (e.g.: ``'/transactions'``). json (dict): JSON data to send along with the request. params (dict): Dictionary of URL (query) parameters. headers (dict): Optional headers to pass to the request. timeout (int): Optional timeout in seconds. backoff_cap (int): The maximal allowed backoff delay in seconds to be assigned to a node. kwargs: Optional keyword arguments. """ backoff_timedelta = self.get_backoff_timedelta() if timeout is not None and timeout < backoff_timedelta: raise TimeoutError if backoff_timedelta > 0: time.sleep(backoff_timedelta) connExc = None timeout = timeout if timeout is None else timeout - backoff_timedelta try: response = self._request( method=method, timeout=timeout, url=self.node_url + path if path else self.node_url, json=json, params=params, headers=headers, **kwargs, ) except ConnectionError as err: connExc = err raise err finally: self.update_backoff_time(success=connExc is None, backoff_cap=backoff_cap) return response
[ "def", "request", "(", "self", ",", "method", ",", "*", ",", "path", "=", "None", ",", "json", "=", "None", ",", "params", "=", "None", ",", "headers", "=", "None", ",", "timeout", "=", "None", ",", "backoff_cap", "=", "None", ",", "*", "*", "kwa...
Performs an HTTP request with the given parameters. Implements exponential backoff. If `ConnectionError` occurs, a timestamp equal to now + the default delay (`BACKOFF_DELAY`) is assigned to the object. The timestamp is in UTC. Next time the function is called, it either waits till the timestamp is passed or raises `TimeoutError`. If `ConnectionError` occurs two or more times in a row, the retry count is incremented and the new timestamp is calculated as now + the default delay multiplied by two to the power of the number of retries. If a request is successful, the backoff timestamp is removed, the retry count is back to zero. Args: method (str): HTTP method (e.g.: ``'GET'``). path (str): API endpoint path (e.g.: ``'/transactions'``). json (dict): JSON data to send along with the request. params (dict): Dictionary of URL (query) parameters. headers (dict): Optional headers to pass to the request. timeout (int): Optional timeout in seconds. backoff_cap (int): The maximal allowed backoff delay in seconds to be assigned to a node. kwargs: Optional keyword arguments.
[ "Performs", "an", "HTTP", "request", "with", "the", "given", "parameters", "." ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/connection.py#L41-L99
train
203,440
bigchaindb/bigchaindb-driver
bigchaindb_driver/pool.py
RoundRobinPicker.pick
def pick(self, connections): """Picks a connection with the earliest backoff time. As a result, the first connection is picked for as long as it has no backoff time. Otherwise, the connections are tried in a round robin fashion. Args: connections (:obj:list): List of :class:`~bigchaindb_driver.connection.Connection` instances. """ if len(connections) == 1: return connections[0] def key(conn): return (datetime.min if conn.backoff_time is None else conn.backoff_time) return min(*connections, key=key)
python
def pick(self, connections): """Picks a connection with the earliest backoff time. As a result, the first connection is picked for as long as it has no backoff time. Otherwise, the connections are tried in a round robin fashion. Args: connections (:obj:list): List of :class:`~bigchaindb_driver.connection.Connection` instances. """ if len(connections) == 1: return connections[0] def key(conn): return (datetime.min if conn.backoff_time is None else conn.backoff_time) return min(*connections, key=key)
[ "def", "pick", "(", "self", ",", "connections", ")", ":", "if", "len", "(", "connections", ")", "==", "1", ":", "return", "connections", "[", "0", "]", "def", "key", "(", "conn", ")", ":", "return", "(", "datetime", ".", "min", "if", "conn", ".", ...
Picks a connection with the earliest backoff time. As a result, the first connection is picked for as long as it has no backoff time. Otherwise, the connections are tried in a round robin fashion. Args: connections (:obj:list): List of :class:`~bigchaindb_driver.connection.Connection` instances.
[ "Picks", "a", "connection", "with", "the", "earliest", "backoff", "time", "." ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/pool.py#L32-L52
train
203,441
bigchaindb/bigchaindb-driver
bigchaindb_driver/transport.py
Transport.forward_request
def forward_request(self, method, path=None, json=None, params=None, headers=None): """Makes HTTP requests to the configured nodes. Retries connection errors (e.g. DNS failures, refused connection, etc). A user may choose to retry other errors by catching the corresponding exceptions and retrying `forward_request`. Exponential backoff is implemented individually for each node. Backoff delays are expressed as timestamps stored on the object and they are not reset in between multiple function calls. Times out when `self.timeout` is expired, if not `None`. Args: method (str): HTTP method name (e.g.: ``'GET'``). path (str): Path to be appended to the base url of a node. E.g.: ``'/transactions'``). json (dict): Payload to be sent with the HTTP request. params (dict)): Dictionary of URL (query) parameters. headers (dict): Optional headers to pass to the request. Returns: dict: Result of :meth:`requests.models.Response.json` """ error_trace = [] timeout = self.timeout backoff_cap = NO_TIMEOUT_BACKOFF_CAP if timeout is None \ else timeout / 2 while timeout is None or timeout > 0: connection = self.connection_pool.get_connection() start = time() try: response = connection.request( method=method, path=path, params=params, json=json, headers=headers, timeout=timeout, backoff_cap=backoff_cap, ) except ConnectionError as err: error_trace.append(err) continue else: return response.data finally: elapsed = time() - start if timeout is not None: timeout -= elapsed raise TimeoutError(error_trace)
python
def forward_request(self, method, path=None, json=None, params=None, headers=None): """Makes HTTP requests to the configured nodes. Retries connection errors (e.g. DNS failures, refused connection, etc). A user may choose to retry other errors by catching the corresponding exceptions and retrying `forward_request`. Exponential backoff is implemented individually for each node. Backoff delays are expressed as timestamps stored on the object and they are not reset in between multiple function calls. Times out when `self.timeout` is expired, if not `None`. Args: method (str): HTTP method name (e.g.: ``'GET'``). path (str): Path to be appended to the base url of a node. E.g.: ``'/transactions'``). json (dict): Payload to be sent with the HTTP request. params (dict)): Dictionary of URL (query) parameters. headers (dict): Optional headers to pass to the request. Returns: dict: Result of :meth:`requests.models.Response.json` """ error_trace = [] timeout = self.timeout backoff_cap = NO_TIMEOUT_BACKOFF_CAP if timeout is None \ else timeout / 2 while timeout is None or timeout > 0: connection = self.connection_pool.get_connection() start = time() try: response = connection.request( method=method, path=path, params=params, json=json, headers=headers, timeout=timeout, backoff_cap=backoff_cap, ) except ConnectionError as err: error_trace.append(err) continue else: return response.data finally: elapsed = time() - start if timeout is not None: timeout -= elapsed raise TimeoutError(error_trace)
[ "def", "forward_request", "(", "self", ",", "method", ",", "path", "=", "None", ",", "json", "=", "None", ",", "params", "=", "None", ",", "headers", "=", "None", ")", ":", "error_trace", "=", "[", "]", "timeout", "=", "self", ".", "timeout", "backof...
Makes HTTP requests to the configured nodes. Retries connection errors (e.g. DNS failures, refused connection, etc). A user may choose to retry other errors by catching the corresponding exceptions and retrying `forward_request`. Exponential backoff is implemented individually for each node. Backoff delays are expressed as timestamps stored on the object and they are not reset in between multiple function calls. Times out when `self.timeout` is expired, if not `None`. Args: method (str): HTTP method name (e.g.: ``'GET'``). path (str): Path to be appended to the base url of a node. E.g.: ``'/transactions'``). json (dict): Payload to be sent with the HTTP request. params (dict)): Dictionary of URL (query) parameters. headers (dict): Optional headers to pass to the request. Returns: dict: Result of :meth:`requests.models.Response.json`
[ "Makes", "HTTP", "requests", "to", "the", "configured", "nodes", "." ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/transport.py#L38-L94
train
203,442
bigchaindb/bigchaindb-driver
bigchaindb_driver/common/transaction.py
Transaction.inputs_valid
def inputs_valid(self, outputs=None): """Validates the Inputs in the Transaction against given Outputs. Note: Given a `CREATE` Transaction is passed, dummy values for Outputs are submitted for validation that evaluate parts of the validation-checks to `True`. Args: outputs (:obj:`list` of :class:`~bigchaindb.common. transaction.Output`): A list of Outputs to check the Inputs against. Returns: bool: If all Inputs are valid. """ if self.operation == Transaction.CREATE: # NOTE: Since in the case of a `CREATE`-transaction we do not have # to check for outputs, we're just submitting dummy # values to the actual method. This simplifies it's logic # greatly, as we do not have to check against `None` values. return self._inputs_valid(['dummyvalue' for _ in self.inputs]) elif self.operation == Transaction.TRANSFER: return self._inputs_valid([output.fulfillment.condition_uri for output in outputs]) else: allowed_ops = ', '.join(self.__class__.ALLOWED_OPERATIONS) raise TypeError('`operation` must be one of {}' .format(allowed_ops))
python
def inputs_valid(self, outputs=None): """Validates the Inputs in the Transaction against given Outputs. Note: Given a `CREATE` Transaction is passed, dummy values for Outputs are submitted for validation that evaluate parts of the validation-checks to `True`. Args: outputs (:obj:`list` of :class:`~bigchaindb.common. transaction.Output`): A list of Outputs to check the Inputs against. Returns: bool: If all Inputs are valid. """ if self.operation == Transaction.CREATE: # NOTE: Since in the case of a `CREATE`-transaction we do not have # to check for outputs, we're just submitting dummy # values to the actual method. This simplifies it's logic # greatly, as we do not have to check against `None` values. return self._inputs_valid(['dummyvalue' for _ in self.inputs]) elif self.operation == Transaction.TRANSFER: return self._inputs_valid([output.fulfillment.condition_uri for output in outputs]) else: allowed_ops = ', '.join(self.__class__.ALLOWED_OPERATIONS) raise TypeError('`operation` must be one of {}' .format(allowed_ops))
[ "def", "inputs_valid", "(", "self", ",", "outputs", "=", "None", ")", ":", "if", "self", ".", "operation", "==", "Transaction", ".", "CREATE", ":", "# NOTE: Since in the case of a `CREATE`-transaction we do not have", "# to check for outputs, we're just submitting dummy...
Validates the Inputs in the Transaction against given Outputs. Note: Given a `CREATE` Transaction is passed, dummy values for Outputs are submitted for validation that evaluate parts of the validation-checks to `True`. Args: outputs (:obj:`list` of :class:`~bigchaindb.common. transaction.Output`): A list of Outputs to check the Inputs against. Returns: bool: If all Inputs are valid.
[ "Validates", "the", "Inputs", "in", "the", "Transaction", "against", "given", "Outputs", "." ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/common/transaction.py#L945-L975
train
203,443
bigchaindb/bigchaindb-driver
bigchaindb_driver/common/transaction.py
Transaction._input_valid
def _input_valid(input_, operation, message, output_condition_uri=None): """Validates a single Input against a single Output. Note: In case of a `CREATE` Transaction, this method does not validate against `output_condition_uri`. Args: input_ (:class:`~bigchaindb.common.transaction. Input`) The Input to be signed. operation (str): The type of Transaction. message (str): The fulfillment message. output_condition_uri (str, optional): An Output to check the Input against. Returns: bool: If the Input is valid. """ ccffill = input_.fulfillment try: parsed_ffill = Fulfillment.from_uri(ccffill.serialize_uri()) except (TypeError, ValueError, ParsingError, ASN1DecodeError, ASN1EncodeError): return False if operation == Transaction.CREATE: # NOTE: In the case of a `CREATE` transaction, the # output is always valid. output_valid = True else: output_valid = output_condition_uri == ccffill.condition_uri message = sha3_256(message.encode()) if input_.fulfills: message.update('{}{}'.format( input_.fulfills.txid, input_.fulfills.output).encode()) # NOTE: We pass a timestamp to `.validate`, as in case of a timeout # condition we'll have to validate against it # cryptoconditions makes no assumptions of the encoding of the # message to sign or verify. It only accepts bytestrings ffill_valid = parsed_ffill.validate(message=message.digest()) return output_valid and ffill_valid
python
def _input_valid(input_, operation, message, output_condition_uri=None): """Validates a single Input against a single Output. Note: In case of a `CREATE` Transaction, this method does not validate against `output_condition_uri`. Args: input_ (:class:`~bigchaindb.common.transaction. Input`) The Input to be signed. operation (str): The type of Transaction. message (str): The fulfillment message. output_condition_uri (str, optional): An Output to check the Input against. Returns: bool: If the Input is valid. """ ccffill = input_.fulfillment try: parsed_ffill = Fulfillment.from_uri(ccffill.serialize_uri()) except (TypeError, ValueError, ParsingError, ASN1DecodeError, ASN1EncodeError): return False if operation == Transaction.CREATE: # NOTE: In the case of a `CREATE` transaction, the # output is always valid. output_valid = True else: output_valid = output_condition_uri == ccffill.condition_uri message = sha3_256(message.encode()) if input_.fulfills: message.update('{}{}'.format( input_.fulfills.txid, input_.fulfills.output).encode()) # NOTE: We pass a timestamp to `.validate`, as in case of a timeout # condition we'll have to validate against it # cryptoconditions makes no assumptions of the encoding of the # message to sign or verify. It only accepts bytestrings ffill_valid = parsed_ffill.validate(message=message.digest()) return output_valid and ffill_valid
[ "def", "_input_valid", "(", "input_", ",", "operation", ",", "message", ",", "output_condition_uri", "=", "None", ")", ":", "ccffill", "=", "input_", ".", "fulfillment", "try", ":", "parsed_ffill", "=", "Fulfillment", ".", "from_uri", "(", "ccffill", ".", "s...
Validates a single Input against a single Output. Note: In case of a `CREATE` Transaction, this method does not validate against `output_condition_uri`. Args: input_ (:class:`~bigchaindb.common.transaction. Input`) The Input to be signed. operation (str): The type of Transaction. message (str): The fulfillment message. output_condition_uri (str, optional): An Output to check the Input against. Returns: bool: If the Input is valid.
[ "Validates", "a", "single", "Input", "against", "a", "single", "Output", "." ]
c294a535f0696bd19483ae11a4882b74e6fc061e
https://github.com/bigchaindb/bigchaindb-driver/blob/c294a535f0696bd19483ae11a4882b74e6fc061e/bigchaindb_driver/common/transaction.py#L1010-L1053
train
203,444
zapier/email-reply-parser
email_reply_parser/__init__.py
EmailMessage.read
def read(self): """ Creates new fragment for each line and labels as a signature, quote, or hidden. Returns EmailMessage instance """ self.found_visible = False is_multi_quote_header = self.MULTI_QUOTE_HDR_REGEX_MULTILINE.search(self.text) if is_multi_quote_header: self.text = self.MULTI_QUOTE_HDR_REGEX.sub(is_multi_quote_header.groups()[0].replace('\n', ''), self.text) # Fix any outlook style replies, with the reply immediately above the signature boundary line # See email_2_2.txt for an example self.text = re.sub('([^\n])(?=\n ?[_-]{7,})', '\\1\n', self.text, re.MULTILINE) self.lines = self.text.split('\n') self.lines.reverse() for line in self.lines: self._scan_line(line) self._finish_fragment() self.fragments.reverse() return self
python
def read(self): """ Creates new fragment for each line and labels as a signature, quote, or hidden. Returns EmailMessage instance """ self.found_visible = False is_multi_quote_header = self.MULTI_QUOTE_HDR_REGEX_MULTILINE.search(self.text) if is_multi_quote_header: self.text = self.MULTI_QUOTE_HDR_REGEX.sub(is_multi_quote_header.groups()[0].replace('\n', ''), self.text) # Fix any outlook style replies, with the reply immediately above the signature boundary line # See email_2_2.txt for an example self.text = re.sub('([^\n])(?=\n ?[_-]{7,})', '\\1\n', self.text, re.MULTILINE) self.lines = self.text.split('\n') self.lines.reverse() for line in self.lines: self._scan_line(line) self._finish_fragment() self.fragments.reverse() return self
[ "def", "read", "(", "self", ")", ":", "self", ".", "found_visible", "=", "False", "is_multi_quote_header", "=", "self", ".", "MULTI_QUOTE_HDR_REGEX_MULTILINE", ".", "search", "(", "self", ".", "text", ")", "if", "is_multi_quote_header", ":", "self", ".", "text...
Creates new fragment for each line and labels as a signature, quote, or hidden. Returns EmailMessage instance
[ "Creates", "new", "fragment", "for", "each", "line", "and", "labels", "as", "a", "signature", "quote", "or", "hidden", "." ]
0c0b73a9bf2188b079a191417b273fc2cf695bf2
https://github.com/zapier/email-reply-parser/blob/0c0b73a9bf2188b079a191417b273fc2cf695bf2/email_reply_parser/__init__.py#L53-L80
train
203,445
zapier/email-reply-parser
email_reply_parser/__init__.py
EmailMessage.reply
def reply(self): """ Captures reply message within email """ reply = [] for f in self.fragments: if not (f.hidden or f.quoted): reply.append(f.content) return '\n'.join(reply)
python
def reply(self): """ Captures reply message within email """ reply = [] for f in self.fragments: if not (f.hidden or f.quoted): reply.append(f.content) return '\n'.join(reply)
[ "def", "reply", "(", "self", ")", ":", "reply", "=", "[", "]", "for", "f", "in", "self", ".", "fragments", ":", "if", "not", "(", "f", ".", "hidden", "or", "f", ".", "quoted", ")", ":", "reply", ".", "append", "(", "f", ".", "content", ")", "...
Captures reply message within email
[ "Captures", "reply", "message", "within", "email" ]
0c0b73a9bf2188b079a191417b273fc2cf695bf2
https://github.com/zapier/email-reply-parser/blob/0c0b73a9bf2188b079a191417b273fc2cf695bf2/email_reply_parser/__init__.py#L83-L90
train
203,446
zapier/email-reply-parser
email_reply_parser/__init__.py
EmailMessage._scan_line
def _scan_line(self, line): """ Reviews each line in email message and determines fragment type line - a row of text from an email message """ is_quote_header = self.QUOTE_HDR_REGEX.match(line) is not None is_quoted = self.QUOTED_REGEX.match(line) is not None is_header = is_quote_header or self.HEADER_REGEX.match(line) is not None if self.fragment and len(line.strip()) == 0: if self.SIG_REGEX.match(self.fragment.lines[-1].strip()): self.fragment.signature = True self._finish_fragment() if self.fragment \ and ((self.fragment.headers == is_header and self.fragment.quoted == is_quoted) or (self.fragment.quoted and (is_quote_header or len(line.strip()) == 0))): self.fragment.lines.append(line) else: self._finish_fragment() self.fragment = Fragment(is_quoted, line, headers=is_header)
python
def _scan_line(self, line): """ Reviews each line in email message and determines fragment type line - a row of text from an email message """ is_quote_header = self.QUOTE_HDR_REGEX.match(line) is not None is_quoted = self.QUOTED_REGEX.match(line) is not None is_header = is_quote_header or self.HEADER_REGEX.match(line) is not None if self.fragment and len(line.strip()) == 0: if self.SIG_REGEX.match(self.fragment.lines[-1].strip()): self.fragment.signature = True self._finish_fragment() if self.fragment \ and ((self.fragment.headers == is_header and self.fragment.quoted == is_quoted) or (self.fragment.quoted and (is_quote_header or len(line.strip()) == 0))): self.fragment.lines.append(line) else: self._finish_fragment() self.fragment = Fragment(is_quoted, line, headers=is_header)
[ "def", "_scan_line", "(", "self", ",", "line", ")", ":", "is_quote_header", "=", "self", ".", "QUOTE_HDR_REGEX", ".", "match", "(", "line", ")", "is", "not", "None", "is_quoted", "=", "self", ".", "QUOTED_REGEX", ".", "match", "(", "line", ")", "is", "...
Reviews each line in email message and determines fragment type line - a row of text from an email message
[ "Reviews", "each", "line", "in", "email", "message", "and", "determines", "fragment", "type" ]
0c0b73a9bf2188b079a191417b273fc2cf695bf2
https://github.com/zapier/email-reply-parser/blob/0c0b73a9bf2188b079a191417b273fc2cf695bf2/email_reply_parser/__init__.py#L92-L113
train
203,447
zapier/email-reply-parser
email_reply_parser/__init__.py
Fragment.finish
def finish(self): """ Creates block of content with lines belonging to fragment. """ self.lines.reverse() self._content = '\n'.join(self.lines) self.lines = None
python
def finish(self): """ Creates block of content with lines belonging to fragment. """ self.lines.reverse() self._content = '\n'.join(self.lines) self.lines = None
[ "def", "finish", "(", "self", ")", ":", "self", ".", "lines", ".", "reverse", "(", ")", "self", ".", "_content", "=", "'\\n'", ".", "join", "(", "self", ".", "lines", ")", "self", ".", "lines", "=", "None" ]
Creates block of content with lines belonging to fragment.
[ "Creates", "block", "of", "content", "with", "lines", "belonging", "to", "fragment", "." ]
0c0b73a9bf2188b079a191417b273fc2cf695bf2
https://github.com/zapier/email-reply-parser/blob/0c0b73a9bf2188b079a191417b273fc2cf695bf2/email_reply_parser/__init__.py#L162-L168
train
203,448
OCA/vertical-hotel
hotel_reservation/models/hotel_reservation.py
HotelReservation.check_in_out_dates
def check_in_out_dates(self): """ When date_order is less then check-in date or Checkout date should be greater than the check-in date. """ if self.checkout and self.checkin: if self.checkin < self.date_order: raise ValidationError(_('Check-in date should be greater than \ the current date.')) if self.checkout < self.checkin: raise ValidationError(_('Check-out date should be greater \ than Check-in date.'))
python
def check_in_out_dates(self): """ When date_order is less then check-in date or Checkout date should be greater than the check-in date. """ if self.checkout and self.checkin: if self.checkin < self.date_order: raise ValidationError(_('Check-in date should be greater than \ the current date.')) if self.checkout < self.checkin: raise ValidationError(_('Check-out date should be greater \ than Check-in date.'))
[ "def", "check_in_out_dates", "(", "self", ")", ":", "if", "self", ".", "checkout", "and", "self", ".", "checkin", ":", "if", "self", ".", "checkin", "<", "self", ".", "date_order", ":", "raise", "ValidationError", "(", "_", "(", "'Check-in date should be gre...
When date_order is less then check-in date or Checkout date should be greater than the check-in date.
[ "When", "date_order", "is", "less", "then", "check", "-", "in", "date", "or", "Checkout", "date", "should", "be", "greater", "than", "the", "check", "-", "in", "date", "." ]
a01442e92b5ea1fda7fb9e6180b3211e8749a35a
https://github.com/OCA/vertical-hotel/blob/a01442e92b5ea1fda7fb9e6180b3211e8749a35a/hotel_reservation/models/hotel_reservation.py#L236-L247
train
203,449
OCA/vertical-hotel
hotel_reservation/models/hotel_reservation.py
HotelReservation.send_reservation_maill
def send_reservation_maill(self): ''' This function opens a window to compose an email, template message loaded by default. @param self: object pointer ''' assert len(self._ids) == 1, 'This is for a single id at a time.' ir_model_data = self.env['ir.model.data'] try: template_id = (ir_model_data.get_object_reference ('hotel_reservation', 'mail_template_hotel_reservation')[1]) except ValueError: template_id = False try: compose_form_id = (ir_model_data.get_object_reference ('mail', 'email_compose_message_wizard_form')[1]) except ValueError: compose_form_id = False ctx = dict() ctx.update({ 'default_model': 'hotel.reservation', 'default_res_id': self._ids[0], 'default_use_template': bool(template_id), 'default_template_id': template_id, 'default_composition_mode': 'comment', 'force_send': True, 'mark_so_as_sent': True }) return { 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_model': 'mail.compose.message', 'views': [(compose_form_id, 'form')], 'view_id': compose_form_id, 'target': 'new', 'context': ctx, 'force_send': True }
python
def send_reservation_maill(self): ''' This function opens a window to compose an email, template message loaded by default. @param self: object pointer ''' assert len(self._ids) == 1, 'This is for a single id at a time.' ir_model_data = self.env['ir.model.data'] try: template_id = (ir_model_data.get_object_reference ('hotel_reservation', 'mail_template_hotel_reservation')[1]) except ValueError: template_id = False try: compose_form_id = (ir_model_data.get_object_reference ('mail', 'email_compose_message_wizard_form')[1]) except ValueError: compose_form_id = False ctx = dict() ctx.update({ 'default_model': 'hotel.reservation', 'default_res_id': self._ids[0], 'default_use_template': bool(template_id), 'default_template_id': template_id, 'default_composition_mode': 'comment', 'force_send': True, 'mark_so_as_sent': True }) return { 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_model': 'mail.compose.message', 'views': [(compose_form_id, 'form')], 'view_id': compose_form_id, 'target': 'new', 'context': ctx, 'force_send': True }
[ "def", "send_reservation_maill", "(", "self", ")", ":", "assert", "len", "(", "self", ".", "_ids", ")", "==", "1", ",", "'This is for a single id at a time.'", "ir_model_data", "=", "self", ".", "env", "[", "'ir.model.data'", "]", "try", ":", "template_id", "=...
This function opens a window to compose an email, template message loaded by default. @param self: object pointer
[ "This", "function", "opens", "a", "window", "to", "compose", "an", "email", "template", "message", "loaded", "by", "default", "." ]
a01442e92b5ea1fda7fb9e6180b3211e8749a35a
https://github.com/OCA/vertical-hotel/blob/a01442e92b5ea1fda7fb9e6180b3211e8749a35a/hotel_reservation/models/hotel_reservation.py#L431-L471
train
203,450
jmcgeheeiv/pyfakefs
pyfakefs/fake_scandir.py
walk
def walk(filesystem, top, topdown=True, onerror=None, followlinks=False): """Perform an os.walk operation over the fake filesystem. Args: filesystem: The fake filesystem used for implementation top: The root directory from which to begin walk. topdown: Determines whether to return the tuples with the root as the first entry (`True`) or as the last, after all the child directory tuples (`False`). onerror: If not `None`, function which will be called to handle the `os.error` instance provided when `os.listdir()` fails. followlinks: If `True`, symbolic links are followed. Yields: (path, directories, nondirectories) for top and each of its subdirectories. See the documentation for the builtin os module for further details. """ def do_walk(top_dir, top_most=False): top_dir = filesystem.normpath(top_dir) if not top_most and not followlinks and filesystem.islink(top_dir): return try: top_contents = _classify_directory_contents(filesystem, top_dir) except OSError as exc: top_contents = None if onerror is not None: onerror(exc) if top_contents is not None: if topdown: yield top_contents for directory in top_contents[1]: if not followlinks and filesystem.islink(directory): continue for contents in do_walk(filesystem.joinpaths(top_dir, directory)): yield contents if not topdown: yield top_contents return do_walk(top, top_most=True)
python
def walk(filesystem, top, topdown=True, onerror=None, followlinks=False): """Perform an os.walk operation over the fake filesystem. Args: filesystem: The fake filesystem used for implementation top: The root directory from which to begin walk. topdown: Determines whether to return the tuples with the root as the first entry (`True`) or as the last, after all the child directory tuples (`False`). onerror: If not `None`, function which will be called to handle the `os.error` instance provided when `os.listdir()` fails. followlinks: If `True`, symbolic links are followed. Yields: (path, directories, nondirectories) for top and each of its subdirectories. See the documentation for the builtin os module for further details. """ def do_walk(top_dir, top_most=False): top_dir = filesystem.normpath(top_dir) if not top_most and not followlinks and filesystem.islink(top_dir): return try: top_contents = _classify_directory_contents(filesystem, top_dir) except OSError as exc: top_contents = None if onerror is not None: onerror(exc) if top_contents is not None: if topdown: yield top_contents for directory in top_contents[1]: if not followlinks and filesystem.islink(directory): continue for contents in do_walk(filesystem.joinpaths(top_dir, directory)): yield contents if not topdown: yield top_contents return do_walk(top, top_most=True)
[ "def", "walk", "(", "filesystem", ",", "top", ",", "topdown", "=", "True", ",", "onerror", "=", "None", ",", "followlinks", "=", "False", ")", ":", "def", "do_walk", "(", "top_dir", ",", "top_most", "=", "False", ")", ":", "top_dir", "=", "filesystem",...
Perform an os.walk operation over the fake filesystem. Args: filesystem: The fake filesystem used for implementation top: The root directory from which to begin walk. topdown: Determines whether to return the tuples with the root as the first entry (`True`) or as the last, after all the child directory tuples (`False`). onerror: If not `None`, function which will be called to handle the `os.error` instance provided when `os.listdir()` fails. followlinks: If `True`, symbolic links are followed. Yields: (path, directories, nondirectories) for top and each of its subdirectories. See the documentation for the builtin os module for further details.
[ "Perform", "an", "os", ".", "walk", "operation", "over", "the", "fake", "filesystem", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_scandir.py#L203-L247
train
203,451
jmcgeheeiv/pyfakefs
pyfakefs/fake_scandir.py
DirEntry.inode
def inode(self): """Return the inode number of the entry.""" if self._inode is None: self.stat(follow_symlinks=False) return self._inode
python
def inode(self): """Return the inode number of the entry.""" if self._inode is None: self.stat(follow_symlinks=False) return self._inode
[ "def", "inode", "(", "self", ")", ":", "if", "self", ".", "_inode", "is", "None", ":", "self", ".", "stat", "(", "follow_symlinks", "=", "False", ")", "return", "self", ".", "_inode" ]
Return the inode number of the entry.
[ "Return", "the", "inode", "number", "of", "the", "entry", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_scandir.py#L43-L47
train
203,452
jmcgeheeiv/pyfakefs
pyfakefs/fake_scandir.py
FakeScanDirModule.walk
def walk(self, top, topdown=True, onerror=None, followlinks=False): """Perform a walk operation over the fake filesystem. Args: top: The root directory from which to begin walk. topdown: Determines whether to return the tuples with the root as the first entry (`True`) or as the last, after all the child directory tuples (`False`). onerror: If not `None`, function which will be called to handle the `os.error` instance provided when `os.listdir()` fails. followlinks: If `True`, symbolic links are followed. Yields: (path, directories, nondirectories) for top and each of its subdirectories. See the documentation for the builtin os module for further details. """ return walk(self.filesystem, top, topdown, onerror, followlinks)
python
def walk(self, top, topdown=True, onerror=None, followlinks=False): """Perform a walk operation over the fake filesystem. Args: top: The root directory from which to begin walk. topdown: Determines whether to return the tuples with the root as the first entry (`True`) or as the last, after all the child directory tuples (`False`). onerror: If not `None`, function which will be called to handle the `os.error` instance provided when `os.listdir()` fails. followlinks: If `True`, symbolic links are followed. Yields: (path, directories, nondirectories) for top and each of its subdirectories. See the documentation for the builtin os module for further details. """ return walk(self.filesystem, top, topdown, onerror, followlinks)
[ "def", "walk", "(", "self", ",", "top", ",", "topdown", "=", "True", ",", "onerror", "=", "None", ",", "followlinks", "=", "False", ")", ":", "return", "walk", "(", "self", ".", "filesystem", ",", "top", ",", "topdown", ",", "onerror", ",", "followli...
Perform a walk operation over the fake filesystem. Args: top: The root directory from which to begin walk. topdown: Determines whether to return the tuples with the root as the first entry (`True`) or as the last, after all the child directory tuples (`False`). onerror: If not `None`, function which will be called to handle the `os.error` instance provided when `os.listdir()` fails. followlinks: If `True`, symbolic links are followed. Yields: (path, directories, nondirectories) for top and each of its subdirectories. See the documentation for the builtin os module for further details.
[ "Perform", "a", "walk", "operation", "over", "the", "fake", "filesystem", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_scandir.py#L288-L305
train
203,453
jmcgeheeiv/pyfakefs
pyfakefs/deprecator.py
Deprecator.add
def add(clss, func, deprecated_name): """Add the deprecated version of a member function to the given class. Gives a deprecation warning on usage. Args: clss: the class where the deprecated function is to be added func: the actual function that is called by the deprecated version deprecated_name: the deprecated name of the function """ @Deprecator(func.__name__, deprecated_name) def _old_function(*args, **kwargs): return func(*args, **kwargs) setattr(clss, deprecated_name, _old_function)
python
def add(clss, func, deprecated_name): """Add the deprecated version of a member function to the given class. Gives a deprecation warning on usage. Args: clss: the class where the deprecated function is to be added func: the actual function that is called by the deprecated version deprecated_name: the deprecated name of the function """ @Deprecator(func.__name__, deprecated_name) def _old_function(*args, **kwargs): return func(*args, **kwargs) setattr(clss, deprecated_name, _old_function)
[ "def", "add", "(", "clss", ",", "func", ",", "deprecated_name", ")", ":", "@", "Deprecator", "(", "func", ".", "__name__", ",", "deprecated_name", ")", "def", "_old_function", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "func", "(", ...
Add the deprecated version of a member function to the given class. Gives a deprecation warning on usage. Args: clss: the class where the deprecated function is to be added func: the actual function that is called by the deprecated version deprecated_name: the deprecated name of the function
[ "Add", "the", "deprecated", "version", "of", "a", "member", "function", "to", "the", "given", "class", ".", "Gives", "a", "deprecation", "warning", "on", "usage", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/deprecator.py#L55-L69
train
203,454
jmcgeheeiv/pyfakefs
pyfakefs/fake_pathlib.py
init_module
def init_module(filesystem): """Initializes the fake module with the fake file system.""" # pylint: disable=protected-access FakePath.filesystem = filesystem FakePathlibModule.PureWindowsPath._flavour = _FakeWindowsFlavour( filesystem) FakePathlibModule.PurePosixPath._flavour = _FakePosixFlavour(filesystem)
python
def init_module(filesystem): """Initializes the fake module with the fake file system.""" # pylint: disable=protected-access FakePath.filesystem = filesystem FakePathlibModule.PureWindowsPath._flavour = _FakeWindowsFlavour( filesystem) FakePathlibModule.PurePosixPath._flavour = _FakePosixFlavour(filesystem)
[ "def", "init_module", "(", "filesystem", ")", ":", "# pylint: disable=protected-access", "FakePath", ".", "filesystem", "=", "filesystem", "FakePathlibModule", ".", "PureWindowsPath", ".", "_flavour", "=", "_FakeWindowsFlavour", "(", "filesystem", ")", "FakePathlibModule"...
Initializes the fake module with the fake file system.
[ "Initializes", "the", "fake", "module", "with", "the", "fake", "file", "system", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_pathlib.py#L51-L57
train
203,455
jmcgeheeiv/pyfakefs
pyfakefs/fake_pathlib.py
_FakeFlavour.splitroot
def splitroot(self, path, sep=None): """Split path into drive, root and rest.""" if sep is None: sep = self.filesystem.path_separator if self.filesystem.is_windows_fs: return self._splitroot_with_drive(path, sep) return self._splitroot_posix(path, sep)
python
def splitroot(self, path, sep=None): """Split path into drive, root and rest.""" if sep is None: sep = self.filesystem.path_separator if self.filesystem.is_windows_fs: return self._splitroot_with_drive(path, sep) return self._splitroot_posix(path, sep)
[ "def", "splitroot", "(", "self", ",", "path", ",", "sep", "=", "None", ")", ":", "if", "sep", "is", "None", ":", "sep", "=", "self", ".", "filesystem", ".", "path_separator", "if", "self", ".", "filesystem", ".", "is_windows_fs", ":", "return", "self",...
Split path into drive, root and rest.
[ "Split", "path", "into", "drive", "root", "and", "rest", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_pathlib.py#L215-L221
train
203,456
jmcgeheeiv/pyfakefs
pyfakefs/fake_pathlib.py
_FakeFlavour.casefold_parts
def casefold_parts(self, parts): """Return the lower-case version of parts for a Windows filesystem.""" if self.filesystem.is_windows_fs: return [p.lower() for p in parts] return parts
python
def casefold_parts(self, parts): """Return the lower-case version of parts for a Windows filesystem.""" if self.filesystem.is_windows_fs: return [p.lower() for p in parts] return parts
[ "def", "casefold_parts", "(", "self", ",", "parts", ")", ":", "if", "self", ".", "filesystem", ".", "is_windows_fs", ":", "return", "[", "p", ".", "lower", "(", ")", "for", "p", "in", "parts", "]", "return", "parts" ]
Return the lower-case version of parts for a Windows filesystem.
[ "Return", "the", "lower", "-", "case", "version", "of", "parts", "for", "a", "Windows", "filesystem", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_pathlib.py#L229-L233
train
203,457
jmcgeheeiv/pyfakefs
pyfakefs/fake_pathlib.py
_FakeFlavour.resolve
def resolve(self, path, strict): """Make the path absolute, resolving any symlinks.""" if self.filesystem.is_windows_fs: return self._resolve_windows(path, strict) return self._resolve_posix(path, strict)
python
def resolve(self, path, strict): """Make the path absolute, resolving any symlinks.""" if self.filesystem.is_windows_fs: return self._resolve_windows(path, strict) return self._resolve_posix(path, strict)
[ "def", "resolve", "(", "self", ",", "path", ",", "strict", ")", ":", "if", "self", ".", "filesystem", ".", "is_windows_fs", ":", "return", "self", ".", "_resolve_windows", "(", "path", ",", "strict", ")", "return", "self", ".", "_resolve_posix", "(", "pa...
Make the path absolute, resolving any symlinks.
[ "Make", "the", "path", "absolute", "resolving", "any", "symlinks", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_pathlib.py#L303-L307
train
203,458
jmcgeheeiv/pyfakefs
pyfakefs/fake_pathlib.py
FakePath.open
def open(self, mode='r', buffering=-1, encoding=None, errors=None, newline=None): """Open the file pointed by this path and return a fake file object. Raises: IOError: if the target object is a directory, the path is invalid or permission is denied. """ if self._closed: self._raise_closed() return FakeFileOpen(self.filesystem, use_io=True)( self._path(), mode, buffering, encoding, errors, newline)
python
def open(self, mode='r', buffering=-1, encoding=None, errors=None, newline=None): """Open the file pointed by this path and return a fake file object. Raises: IOError: if the target object is a directory, the path is invalid or permission is denied. """ if self._closed: self._raise_closed() return FakeFileOpen(self.filesystem, use_io=True)( self._path(), mode, buffering, encoding, errors, newline)
[ "def", "open", "(", "self", ",", "mode", "=", "'r'", ",", "buffering", "=", "-", "1", ",", "encoding", "=", "None", ",", "errors", "=", "None", ",", "newline", "=", "None", ")", ":", "if", "self", ".", "_closed", ":", "self", ".", "_raise_closed", ...
Open the file pointed by this path and return a fake file object. Raises: IOError: if the target object is a directory, the path is invalid or permission is denied.
[ "Open", "the", "file", "pointed", "by", "this", "path", "and", "return", "a", "fake", "file", "object", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_pathlib.py#L497-L508
train
203,459
jmcgeheeiv/pyfakefs
pyfakefs/fake_pathlib.py
FakePath.touch
def touch(self, mode=0o666, exist_ok=True): """Create a fake file for the path with the given access mode, if it doesn't exist. Args: mode: the file mode for the file if it does not exist exist_ok: if the file already exists and this is True, nothing happens, otherwise FileExistError is raised Raises: OSError: (Python 2 only) if the file exists and exits_ok is False. FileExistsError: (Python 3 only) if the file exists and exits_ok is False. """ if self._closed: self._raise_closed() if self.exists(): if exist_ok: self.filesystem.utime(self._path(), None) else: self.filesystem.raise_os_error(errno.EEXIST, self._path()) else: fake_file = self.open('w') fake_file.close() self.chmod(mode)
python
def touch(self, mode=0o666, exist_ok=True): """Create a fake file for the path with the given access mode, if it doesn't exist. Args: mode: the file mode for the file if it does not exist exist_ok: if the file already exists and this is True, nothing happens, otherwise FileExistError is raised Raises: OSError: (Python 2 only) if the file exists and exits_ok is False. FileExistsError: (Python 3 only) if the file exists and exits_ok is False. """ if self._closed: self._raise_closed() if self.exists(): if exist_ok: self.filesystem.utime(self._path(), None) else: self.filesystem.raise_os_error(errno.EEXIST, self._path()) else: fake_file = self.open('w') fake_file.close() self.chmod(mode)
[ "def", "touch", "(", "self", ",", "mode", "=", "0o666", ",", "exist_ok", "=", "True", ")", ":", "if", "self", ".", "_closed", ":", "self", ".", "_raise_closed", "(", ")", "if", "self", ".", "exists", "(", ")", ":", "if", "exist_ok", ":", "self", ...
Create a fake file for the path with the given access mode, if it doesn't exist. Args: mode: the file mode for the file if it does not exist exist_ok: if the file already exists and this is True, nothing happens, otherwise FileExistError is raised Raises: OSError: (Python 2 only) if the file exists and exits_ok is False. FileExistsError: (Python 3 only) if the file exists and exits_ok is False.
[ "Create", "a", "fake", "file", "for", "the", "path", "with", "the", "given", "access", "mode", "if", "it", "doesn", "t", "exist", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_pathlib.py#L604-L628
train
203,460
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
_copy_module
def _copy_module(old): """Recompiles and creates new module object.""" saved = sys.modules.pop(old.__name__, None) new = __import__(old.__name__) sys.modules[old.__name__] = saved return new
python
def _copy_module(old): """Recompiles and creates new module object.""" saved = sys.modules.pop(old.__name__, None) new = __import__(old.__name__) sys.modules[old.__name__] = saved return new
[ "def", "_copy_module", "(", "old", ")", ":", "saved", "=", "sys", ".", "modules", ".", "pop", "(", "old", ".", "__name__", ",", "None", ")", "new", "=", "__import__", "(", "old", ".", "__name__", ")", "sys", ".", "modules", "[", "old", ".", "__name...
Recompiles and creates new module object.
[ "Recompiles", "and", "creates", "new", "module", "object", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L215-L220
train
203,461
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFile.contents
def contents(self): """Return the contents as string with the original encoding.""" if not IS_PY2 and isinstance(self.byte_contents, bytes): return self.byte_contents.decode( self.encoding or locale.getpreferredencoding(False), errors=self.errors) return self.byte_contents
python
def contents(self): """Return the contents as string with the original encoding.""" if not IS_PY2 and isinstance(self.byte_contents, bytes): return self.byte_contents.decode( self.encoding or locale.getpreferredencoding(False), errors=self.errors) return self.byte_contents
[ "def", "contents", "(", "self", ")", ":", "if", "not", "IS_PY2", "and", "isinstance", "(", "self", ".", "byte_contents", ",", "bytes", ")", ":", "return", "self", ".", "byte_contents", ".", "decode", "(", "self", ".", "encoding", "or", "locale", ".", "...
Return the contents as string with the original encoding.
[ "Return", "the", "contents", "as", "string", "with", "the", "original", "encoding", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L301-L307
train
203,462
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFile.set_large_file_size
def set_large_file_size(self, st_size): """Sets the self.st_size attribute and replaces self.content with None. Provided specifically to simulate very large files without regards to their content (which wouldn't fit in memory). Note that read/write operations with such a file raise :py:class:`FakeLargeFileIoException`. Args: st_size: (int) The desired file size Raises: IOError: if the st_size is not a non-negative integer, or if st_size exceeds the available file system space """ self._check_positive_int(st_size) if self.st_size: self.size = 0 if self.filesystem: self.filesystem.change_disk_usage(st_size, self.name, self.st_dev) self.st_size = st_size self._byte_contents = None
python
def set_large_file_size(self, st_size): """Sets the self.st_size attribute and replaces self.content with None. Provided specifically to simulate very large files without regards to their content (which wouldn't fit in memory). Note that read/write operations with such a file raise :py:class:`FakeLargeFileIoException`. Args: st_size: (int) The desired file size Raises: IOError: if the st_size is not a non-negative integer, or if st_size exceeds the available file system space """ self._check_positive_int(st_size) if self.st_size: self.size = 0 if self.filesystem: self.filesystem.change_disk_usage(st_size, self.name, self.st_dev) self.st_size = st_size self._byte_contents = None
[ "def", "set_large_file_size", "(", "self", ",", "st_size", ")", ":", "self", ".", "_check_positive_int", "(", "st_size", ")", "if", "self", ".", "st_size", ":", "self", ".", "size", "=", "0", "if", "self", ".", "filesystem", ":", "self", ".", "filesystem...
Sets the self.st_size attribute and replaces self.content with None. Provided specifically to simulate very large files without regards to their content (which wouldn't fit in memory). Note that read/write operations with such a file raise :py:class:`FakeLargeFileIoException`. Args: st_size: (int) The desired file size Raises: IOError: if the st_size is not a non-negative integer, or if st_size exceeds the available file system space
[ "Sets", "the", "self", ".", "st_size", "attribute", "and", "replaces", "self", ".", "content", "with", "None", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L339-L360
train
203,463
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFile._set_initial_contents
def _set_initial_contents(self, contents): """Sets the file contents and size. Called internally after initial file creation. Args: contents: string, new content of file. Returns: True if the contents have been changed. Raises: IOError: if the st_size is not a non-negative integer, or if st_size exceeds the available file system space """ contents = self._encode_contents(contents) changed = self._byte_contents != contents st_size = len(contents) if self._byte_contents: self.size = 0 current_size = self.st_size or 0 self.filesystem.change_disk_usage( st_size - current_size, self.name, self.st_dev) self._byte_contents = contents self.st_size = st_size self.epoch += 1 return changed
python
def _set_initial_contents(self, contents): """Sets the file contents and size. Called internally after initial file creation. Args: contents: string, new content of file. Returns: True if the contents have been changed. Raises: IOError: if the st_size is not a non-negative integer, or if st_size exceeds the available file system space """ contents = self._encode_contents(contents) changed = self._byte_contents != contents st_size = len(contents) if self._byte_contents: self.size = 0 current_size = self.st_size or 0 self.filesystem.change_disk_usage( st_size - current_size, self.name, self.st_dev) self._byte_contents = contents self.st_size = st_size self.epoch += 1 return changed
[ "def", "_set_initial_contents", "(", "self", ",", "contents", ")", ":", "contents", "=", "self", ".", "_encode_contents", "(", "contents", ")", "changed", "=", "self", ".", "_byte_contents", "!=", "contents", "st_size", "=", "len", "(", "contents", ")", "if"...
Sets the file contents and size. Called internally after initial file creation. Args: contents: string, new content of file. Returns: True if the contents have been changed. Raises: IOError: if the st_size is not a non-negative integer, or if st_size exceeds the available file system space
[ "Sets", "the", "file", "contents", "and", "size", ".", "Called", "internally", "after", "initial", "file", "creation", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L385-L411
train
203,464
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFile.set_contents
def set_contents(self, contents, encoding=None): """Sets the file contents and size and increases the modification time. Also executes the side_effects if available. Args: contents: (str, bytes, unicode) new content of file. encoding: (str) the encoding to be used for writing the contents if they are a unicode string. If not given, the locale preferred encoding is used. Raises: IOError: if `st_size` is not a non-negative integer, or if it exceeds the available file system space. """ self.encoding = encoding changed = self._set_initial_contents(contents) if self._side_effect is not None: self._side_effect(self) return changed
python
def set_contents(self, contents, encoding=None): """Sets the file contents and size and increases the modification time. Also executes the side_effects if available. Args: contents: (str, bytes, unicode) new content of file. encoding: (str) the encoding to be used for writing the contents if they are a unicode string. If not given, the locale preferred encoding is used. Raises: IOError: if `st_size` is not a non-negative integer, or if it exceeds the available file system space. """ self.encoding = encoding changed = self._set_initial_contents(contents) if self._side_effect is not None: self._side_effect(self) return changed
[ "def", "set_contents", "(", "self", ",", "contents", ",", "encoding", "=", "None", ")", ":", "self", ".", "encoding", "=", "encoding", "changed", "=", "self", ".", "_set_initial_contents", "(", "contents", ")", "if", "self", ".", "_side_effect", "is", "not...
Sets the file contents and size and increases the modification time. Also executes the side_effects if available. Args: contents: (str, bytes, unicode) new content of file. encoding: (str) the encoding to be used for writing the contents if they are a unicode string. If not given, the locale preferred encoding is used. Raises: IOError: if `st_size` is not a non-negative integer, or if it exceeds the available file system space.
[ "Sets", "the", "file", "contents", "and", "size", "and", "increases", "the", "modification", "time", ".", "Also", "executes", "the", "side_effects", "if", "available", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L413-L431
train
203,465
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFile.path
def path(self): """Return the full path of the current object.""" names = [] obj = self while obj: names.insert(0, obj.name) obj = obj.parent_dir sep = self.filesystem._path_separator(self.name) if names[0] == sep: names.pop(0) dir_path = sep.join(names) # Windows paths with drive have a root separator entry # which should be removed is_drive = names and len(names[0]) == 2 and names[0][1] == ':' if not is_drive: dir_path = sep + dir_path else: dir_path = sep.join(names) dir_path = self.filesystem.absnormpath(dir_path) return dir_path
python
def path(self): """Return the full path of the current object.""" names = [] obj = self while obj: names.insert(0, obj.name) obj = obj.parent_dir sep = self.filesystem._path_separator(self.name) if names[0] == sep: names.pop(0) dir_path = sep.join(names) # Windows paths with drive have a root separator entry # which should be removed is_drive = names and len(names[0]) == 2 and names[0][1] == ':' if not is_drive: dir_path = sep + dir_path else: dir_path = sep.join(names) dir_path = self.filesystem.absnormpath(dir_path) return dir_path
[ "def", "path", "(", "self", ")", ":", "names", "=", "[", "]", "obj", "=", "self", "while", "obj", ":", "names", ".", "insert", "(", "0", ",", "obj", ".", "name", ")", "obj", "=", "obj", ".", "parent_dir", "sep", "=", "self", ".", "filesystem", ...
Return the full path of the current object.
[ "Return", "the", "full", "path", "of", "the", "current", "object", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L440-L459
train
203,466
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFile.size
def size(self, st_size): """Resizes file content, padding with nulls if new size exceeds the old size. Args: st_size: The desired size for the file. Raises: IOError: if the st_size arg is not a non-negative integer or if st_size exceeds the available file system space """ self._check_positive_int(st_size) current_size = self.st_size or 0 self.filesystem.change_disk_usage( st_size - current_size, self.name, self.st_dev) if self._byte_contents: if st_size < current_size: self._byte_contents = self._byte_contents[:st_size] else: if IS_PY2: self._byte_contents = '%s%s' % ( self._byte_contents, '\0' * (st_size - current_size)) else: self._byte_contents += b'\0' * (st_size - current_size) self.st_size = st_size self.epoch += 1
python
def size(self, st_size): """Resizes file content, padding with nulls if new size exceeds the old size. Args: st_size: The desired size for the file. Raises: IOError: if the st_size arg is not a non-negative integer or if st_size exceeds the available file system space """ self._check_positive_int(st_size) current_size = self.st_size or 0 self.filesystem.change_disk_usage( st_size - current_size, self.name, self.st_dev) if self._byte_contents: if st_size < current_size: self._byte_contents = self._byte_contents[:st_size] else: if IS_PY2: self._byte_contents = '%s%s' % ( self._byte_contents, '\0' * (st_size - current_size)) else: self._byte_contents += b'\0' * (st_size - current_size) self.st_size = st_size self.epoch += 1
[ "def", "size", "(", "self", ",", "st_size", ")", ":", "self", ".", "_check_positive_int", "(", "st_size", ")", "current_size", "=", "self", ".", "st_size", "or", "0", "self", ".", "filesystem", ".", "change_disk_usage", "(", "st_size", "-", "current_size", ...
Resizes file content, padding with nulls if new size exceeds the old size. Args: st_size: The desired size for the file. Raises: IOError: if the st_size arg is not a non-negative integer or if st_size exceeds the available file system space
[ "Resizes", "file", "content", "padding", "with", "nulls", "if", "new", "size", "exceeds", "the", "old", "size", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L470-L496
train
203,467
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeDirectory.ordered_dirs
def ordered_dirs(self): """Return the list of contained directory entry names ordered by creation order. """ return [item[0] for item in sorted( self.byte_contents.items(), key=lambda entry: entry[1].st_ino)]
python
def ordered_dirs(self): """Return the list of contained directory entry names ordered by creation order. """ return [item[0] for item in sorted( self.byte_contents.items(), key=lambda entry: entry[1].st_ino)]
[ "def", "ordered_dirs", "(", "self", ")", ":", "return", "[", "item", "[", "0", "]", "for", "item", "in", "sorted", "(", "self", ".", "byte_contents", ".", "items", "(", ")", ",", "key", "=", "lambda", "entry", ":", "entry", "[", "1", "]", ".", "s...
Return the list of contained directory entry names ordered by creation order.
[ "Return", "the", "list", "of", "contained", "directory", "entry", "names", "ordered", "by", "creation", "order", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L646-L651
train
203,468
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeDirectory.add_entry
def add_entry(self, path_object): """Adds a child FakeFile to this directory. Args: path_object: FakeFile instance to add as a child of this directory. Raises: OSError: if the directory has no write permission (Posix only) OSError: if the file or directory to be added already exists """ if (not is_root() and not self.st_mode & PERM_WRITE and not self.filesystem.is_windows_fs): exception = IOError if IS_PY2 else OSError raise exception(errno.EACCES, 'Permission Denied', self.path) if path_object.name in self.contents: self.filesystem.raise_os_error(errno.EEXIST, self.path) self.contents[path_object.name] = path_object path_object.parent_dir = self self.st_nlink += 1 path_object.st_nlink += 1 path_object.st_dev = self.st_dev if path_object.st_nlink == 1: self.filesystem.change_disk_usage( path_object.size, path_object.name, self.st_dev)
python
def add_entry(self, path_object): """Adds a child FakeFile to this directory. Args: path_object: FakeFile instance to add as a child of this directory. Raises: OSError: if the directory has no write permission (Posix only) OSError: if the file or directory to be added already exists """ if (not is_root() and not self.st_mode & PERM_WRITE and not self.filesystem.is_windows_fs): exception = IOError if IS_PY2 else OSError raise exception(errno.EACCES, 'Permission Denied', self.path) if path_object.name in self.contents: self.filesystem.raise_os_error(errno.EEXIST, self.path) self.contents[path_object.name] = path_object path_object.parent_dir = self self.st_nlink += 1 path_object.st_nlink += 1 path_object.st_dev = self.st_dev if path_object.st_nlink == 1: self.filesystem.change_disk_usage( path_object.size, path_object.name, self.st_dev)
[ "def", "add_entry", "(", "self", ",", "path_object", ")", ":", "if", "(", "not", "is_root", "(", ")", "and", "not", "self", ".", "st_mode", "&", "PERM_WRITE", "and", "not", "self", ".", "filesystem", ".", "is_windows_fs", ")", ":", "exception", "=", "I...
Adds a child FakeFile to this directory. Args: path_object: FakeFile instance to add as a child of this directory. Raises: OSError: if the directory has no write permission (Posix only) OSError: if the file or directory to be added already exists
[ "Adds", "a", "child", "FakeFile", "to", "this", "directory", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L653-L678
train
203,469
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeDirectory.get_entry
def get_entry(self, pathname_name): """Retrieves the specified child file or directory entry. Args: pathname_name: The basename of the child object to retrieve. Returns: The fake file or directory object. Raises: KeyError: if no child exists by the specified name. """ pathname_name = self._normalized_entryname(pathname_name) return self.contents[pathname_name]
python
def get_entry(self, pathname_name): """Retrieves the specified child file or directory entry. Args: pathname_name: The basename of the child object to retrieve. Returns: The fake file or directory object. Raises: KeyError: if no child exists by the specified name. """ pathname_name = self._normalized_entryname(pathname_name) return self.contents[pathname_name]
[ "def", "get_entry", "(", "self", ",", "pathname_name", ")", ":", "pathname_name", "=", "self", ".", "_normalized_entryname", "(", "pathname_name", ")", "return", "self", ".", "contents", "[", "pathname_name", "]" ]
Retrieves the specified child file or directory entry. Args: pathname_name: The basename of the child object to retrieve. Returns: The fake file or directory object. Raises: KeyError: if no child exists by the specified name.
[ "Retrieves", "the", "specified", "child", "file", "or", "directory", "entry", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L680-L693
train
203,470
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeDirectory.remove_entry
def remove_entry(self, pathname_name, recursive=True): """Removes the specified child file or directory. Args: pathname_name: Basename of the child object to remove. recursive: If True (default), the entries in contained directories are deleted first. Used to propagate removal errors (e.g. permission problems) from contained entries. Raises: KeyError: if no child exists by the specified name. OSError: if user lacks permission to delete the file, or (Windows only) the file is open. """ pathname_name = self._normalized_entryname(pathname_name) entry = self.get_entry(pathname_name) if self.filesystem.is_windows_fs: if entry.st_mode & PERM_WRITE == 0: self.filesystem.raise_os_error(errno.EACCES, pathname_name) if self.filesystem.has_open_file(entry): self.filesystem.raise_os_error(errno.EACCES, pathname_name) else: if (not is_root() and (self.st_mode & (PERM_WRITE | PERM_EXE) != PERM_WRITE | PERM_EXE)): self.filesystem.raise_os_error(errno.EACCES, pathname_name) if recursive and isinstance(entry, FakeDirectory): while entry.contents: entry.remove_entry(list(entry.contents)[0]) elif entry.st_nlink == 1: self.filesystem.change_disk_usage( -entry.size, pathname_name, entry.st_dev) self.st_nlink -= 1 entry.st_nlink -= 1 assert entry.st_nlink >= 0 del self.contents[pathname_name]
python
def remove_entry(self, pathname_name, recursive=True): """Removes the specified child file or directory. Args: pathname_name: Basename of the child object to remove. recursive: If True (default), the entries in contained directories are deleted first. Used to propagate removal errors (e.g. permission problems) from contained entries. Raises: KeyError: if no child exists by the specified name. OSError: if user lacks permission to delete the file, or (Windows only) the file is open. """ pathname_name = self._normalized_entryname(pathname_name) entry = self.get_entry(pathname_name) if self.filesystem.is_windows_fs: if entry.st_mode & PERM_WRITE == 0: self.filesystem.raise_os_error(errno.EACCES, pathname_name) if self.filesystem.has_open_file(entry): self.filesystem.raise_os_error(errno.EACCES, pathname_name) else: if (not is_root() and (self.st_mode & (PERM_WRITE | PERM_EXE) != PERM_WRITE | PERM_EXE)): self.filesystem.raise_os_error(errno.EACCES, pathname_name) if recursive and isinstance(entry, FakeDirectory): while entry.contents: entry.remove_entry(list(entry.contents)[0]) elif entry.st_nlink == 1: self.filesystem.change_disk_usage( -entry.size, pathname_name, entry.st_dev) self.st_nlink -= 1 entry.st_nlink -= 1 assert entry.st_nlink >= 0 del self.contents[pathname_name]
[ "def", "remove_entry", "(", "self", ",", "pathname_name", ",", "recursive", "=", "True", ")", ":", "pathname_name", "=", "self", ".", "_normalized_entryname", "(", "pathname_name", ")", "entry", "=", "self", ".", "get_entry", "(", "pathname_name", ")", "if", ...
Removes the specified child file or directory. Args: pathname_name: Basename of the child object to remove. recursive: If True (default), the entries in contained directories are deleted first. Used to propagate removal errors (e.g. permission problems) from contained entries. Raises: KeyError: if no child exists by the specified name. OSError: if user lacks permission to delete the file, or (Windows only) the file is open.
[ "Removes", "the", "specified", "child", "file", "or", "directory", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L703-L740
train
203,471
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeDirectory.has_parent_object
def has_parent_object(self, dir_object): """Return `True` if dir_object is a direct or indirect parent directory, or if both are the same object.""" obj = self while obj: if obj == dir_object: return True obj = obj.parent_dir return False
python
def has_parent_object(self, dir_object): """Return `True` if dir_object is a direct or indirect parent directory, or if both are the same object.""" obj = self while obj: if obj == dir_object: return True obj = obj.parent_dir return False
[ "def", "has_parent_object", "(", "self", ",", "dir_object", ")", ":", "obj", "=", "self", "while", "obj", ":", "if", "obj", "==", "dir_object", ":", "return", "True", "obj", "=", "obj", ".", "parent_dir", "return", "False" ]
Return `True` if dir_object is a direct or indirect parent directory, or if both are the same object.
[ "Return", "True", "if", "dir_object", "is", "a", "direct", "or", "indirect", "parent", "directory", "or", "if", "both", "are", "the", "same", "object", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L752-L760
train
203,472
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeDirectoryFromRealDirectory.contents
def contents(self): """Return the list of contained directory entries, loading them if not already loaded.""" if not self.contents_read: self.contents_read = True base = self.path for entry in os.listdir(self.source_path): source_path = os.path.join(self.source_path, entry) target_path = os.path.join(base, entry) if os.path.isdir(source_path): self.filesystem.add_real_directory( source_path, self.read_only, target_path=target_path) else: self.filesystem.add_real_file( source_path, self.read_only, target_path=target_path) return self.byte_contents
python
def contents(self): """Return the list of contained directory entries, loading them if not already loaded.""" if not self.contents_read: self.contents_read = True base = self.path for entry in os.listdir(self.source_path): source_path = os.path.join(self.source_path, entry) target_path = os.path.join(base, entry) if os.path.isdir(source_path): self.filesystem.add_real_directory( source_path, self.read_only, target_path=target_path) else: self.filesystem.add_real_file( source_path, self.read_only, target_path=target_path) return self.byte_contents
[ "def", "contents", "(", "self", ")", ":", "if", "not", "self", ".", "contents_read", ":", "self", ".", "contents_read", "=", "True", "base", "=", "self", ".", "path", "for", "entry", "in", "os", ".", "listdir", "(", "self", ".", "source_path", ")", "...
Return the list of contained directory entries, loading them if not already loaded.
[ "Return", "the", "list", "of", "contained", "directory", "entries", "loading", "them", "if", "not", "already", "loaded", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L817-L832
train
203,473
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.reset
def reset(self, total_size=None): """Remove all file system contents and reset the root.""" self.root = FakeDirectory(self.path_separator, filesystem=self) self.cwd = self.root.name self.open_files = [] self._free_fd_heap = [] self._last_ino = 0 self._last_dev = 0 self.mount_points = {} self.add_mount_point(self.root.name, total_size) self._add_standard_streams()
python
def reset(self, total_size=None): """Remove all file system contents and reset the root.""" self.root = FakeDirectory(self.path_separator, filesystem=self) self.cwd = self.root.name self.open_files = [] self._free_fd_heap = [] self._last_ino = 0 self._last_dev = 0 self.mount_points = {} self.add_mount_point(self.root.name, total_size) self._add_standard_streams()
[ "def", "reset", "(", "self", ",", "total_size", "=", "None", ")", ":", "self", ".", "root", "=", "FakeDirectory", "(", "self", ".", "path_separator", ",", "filesystem", "=", "self", ")", "self", ".", "cwd", "=", "self", ".", "root", ".", "name", "sel...
Remove all file system contents and reset the root.
[ "Remove", "all", "file", "system", "contents", "and", "reset", "the", "root", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L915-L926
train
203,474
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.raise_io_error
def raise_io_error(self, errno, filename=None): """Raises IOError. The error message is constructed from the given error code and shall start with the error in the real system. Args: errno: A numeric error code from the C variable errno. filename: The name of the affected file, if any. """ raise IOError(errno, self._error_message(errno), filename)
python
def raise_io_error(self, errno, filename=None): """Raises IOError. The error message is constructed from the given error code and shall start with the error in the real system. Args: errno: A numeric error code from the C variable errno. filename: The name of the affected file, if any. """ raise IOError(errno, self._error_message(errno), filename)
[ "def", "raise_io_error", "(", "self", ",", "errno", ",", "filename", "=", "None", ")", ":", "raise", "IOError", "(", "errno", ",", "self", ".", "_error_message", "(", "errno", ")", ",", "filename", ")" ]
Raises IOError. The error message is constructed from the given error code and shall start with the error in the real system. Args: errno: A numeric error code from the C variable errno. filename: The name of the affected file, if any.
[ "Raises", "IOError", ".", "The", "error", "message", "is", "constructed", "from", "the", "given", "error", "code", "and", "shall", "start", "with", "the", "error", "in", "the", "real", "system", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L984-L993
train
203,475
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem._matching_string
def _matching_string(matched, string): """Return the string as byte or unicode depending on the type of matched, assuming string is an ASCII string. """ if string is None: return string if IS_PY2: # pylint: disable=undefined-variable if isinstance(matched, text_type): return text_type(string) else: if isinstance(matched, bytes) and isinstance(string, str): return string.encode(locale.getpreferredencoding(False)) return string
python
def _matching_string(matched, string): """Return the string as byte or unicode depending on the type of matched, assuming string is an ASCII string. """ if string is None: return string if IS_PY2: # pylint: disable=undefined-variable if isinstance(matched, text_type): return text_type(string) else: if isinstance(matched, bytes) and isinstance(string, str): return string.encode(locale.getpreferredencoding(False)) return string
[ "def", "_matching_string", "(", "matched", ",", "string", ")", ":", "if", "string", "is", "None", ":", "return", "string", "if", "IS_PY2", ":", "# pylint: disable=undefined-variable", "if", "isinstance", "(", "matched", ",", "text_type", ")", ":", "return", "t...
Return the string as byte or unicode depending on the type of matched, assuming string is an ASCII string.
[ "Return", "the", "string", "as", "byte", "or", "unicode", "depending", "on", "the", "type", "of", "matched", "assuming", "string", "is", "an", "ASCII", "string", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L996-L1009
train
203,476
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.add_mount_point
def add_mount_point(self, path, total_size=None): """Add a new mount point for a filesystem device. The mount point gets a new unique device number. Args: path: The root path for the new mount path. total_size: The new total size of the added filesystem device in bytes. Defaults to infinite size. Returns: The newly created mount point dict. Raises: OSError: if trying to mount an existing mount point again. """ path = self.absnormpath(path) if path in self.mount_points: self.raise_os_error(errno.EEXIST, path) self._last_dev += 1 self.mount_points[path] = { 'idev': self._last_dev, 'total_size': total_size, 'used_size': 0 } # special handling for root path: has been created before root_dir = (self.root if path == self.root.name else self.create_dir(path)) root_dir.st_dev = self._last_dev return self.mount_points[path]
python
def add_mount_point(self, path, total_size=None): """Add a new mount point for a filesystem device. The mount point gets a new unique device number. Args: path: The root path for the new mount path. total_size: The new total size of the added filesystem device in bytes. Defaults to infinite size. Returns: The newly created mount point dict. Raises: OSError: if trying to mount an existing mount point again. """ path = self.absnormpath(path) if path in self.mount_points: self.raise_os_error(errno.EEXIST, path) self._last_dev += 1 self.mount_points[path] = { 'idev': self._last_dev, 'total_size': total_size, 'used_size': 0 } # special handling for root path: has been created before root_dir = (self.root if path == self.root.name else self.create_dir(path)) root_dir.st_dev = self._last_dev return self.mount_points[path]
[ "def", "add_mount_point", "(", "self", ",", "path", ",", "total_size", "=", "None", ")", ":", "path", "=", "self", ".", "absnormpath", "(", "path", ")", "if", "path", "in", "self", ".", "mount_points", ":", "self", ".", "raise_os_error", "(", "errno", ...
Add a new mount point for a filesystem device. The mount point gets a new unique device number. Args: path: The root path for the new mount path. total_size: The new total size of the added filesystem device in bytes. Defaults to infinite size. Returns: The newly created mount point dict. Raises: OSError: if trying to mount an existing mount point again.
[ "Add", "a", "new", "mount", "point", "for", "a", "filesystem", "device", ".", "The", "mount", "point", "gets", "a", "new", "unique", "device", "number", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1023-L1050
train
203,477
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.get_disk_usage
def get_disk_usage(self, path=None): """Return the total, used and free disk space in bytes as named tuple, or placeholder values simulating unlimited space if not set. .. note:: This matches the return value of shutil.disk_usage(). Args: path: The disk space is returned for the file system device where `path` resides. Defaults to the root path (e.g. '/' on Unix systems). """ DiskUsage = namedtuple('usage', 'total, used, free') if path is None: mount_point = self.mount_points[self.root.name] else: mount_point = self._mount_point_for_path(path) if mount_point and mount_point['total_size'] is not None: return DiskUsage(mount_point['total_size'], mount_point['used_size'], mount_point['total_size'] - mount_point['used_size']) return DiskUsage( 1024 * 1024 * 1024 * 1024, 0, 1024 * 1024 * 1024 * 1024)
python
def get_disk_usage(self, path=None): """Return the total, used and free disk space in bytes as named tuple, or placeholder values simulating unlimited space if not set. .. note:: This matches the return value of shutil.disk_usage(). Args: path: The disk space is returned for the file system device where `path` resides. Defaults to the root path (e.g. '/' on Unix systems). """ DiskUsage = namedtuple('usage', 'total, used, free') if path is None: mount_point = self.mount_points[self.root.name] else: mount_point = self._mount_point_for_path(path) if mount_point and mount_point['total_size'] is not None: return DiskUsage(mount_point['total_size'], mount_point['used_size'], mount_point['total_size'] - mount_point['used_size']) return DiskUsage( 1024 * 1024 * 1024 * 1024, 0, 1024 * 1024 * 1024 * 1024)
[ "def", "get_disk_usage", "(", "self", ",", "path", "=", "None", ")", ":", "DiskUsage", "=", "namedtuple", "(", "'usage'", ",", "'total, used, free'", ")", "if", "path", "is", "None", ":", "mount_point", "=", "self", ".", "mount_points", "[", "self", ".", ...
Return the total, used and free disk space in bytes as named tuple, or placeholder values simulating unlimited space if not set. .. note:: This matches the return value of shutil.disk_usage(). Args: path: The disk space is returned for the file system device where `path` resides. Defaults to the root path (e.g. '/' on Unix systems).
[ "Return", "the", "total", "used", "and", "free", "disk", "space", "in", "bytes", "as", "named", "tuple", "or", "placeholder", "values", "simulating", "unlimited", "space", "if", "not", "set", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1092-L1114
train
203,478
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.change_disk_usage
def change_disk_usage(self, usage_change, file_path, st_dev): """Change the used disk space by the given amount. Args: usage_change: Number of bytes added to the used space. If negative, the used space will be decreased. file_path: The path of the object needing the disk space. st_dev: The device ID for the respective file system. Raises: IOError: if usage_change exceeds the free file system space """ mount_point = self._mount_point_for_device(st_dev) if mount_point: total_size = mount_point['total_size'] if total_size is not None: if total_size - mount_point['used_size'] < usage_change: self.raise_io_error(errno.ENOSPC, file_path) mount_point['used_size'] += usage_change
python
def change_disk_usage(self, usage_change, file_path, st_dev): """Change the used disk space by the given amount. Args: usage_change: Number of bytes added to the used space. If negative, the used space will be decreased. file_path: The path of the object needing the disk space. st_dev: The device ID for the respective file system. Raises: IOError: if usage_change exceeds the free file system space """ mount_point = self._mount_point_for_device(st_dev) if mount_point: total_size = mount_point['total_size'] if total_size is not None: if total_size - mount_point['used_size'] < usage_change: self.raise_io_error(errno.ENOSPC, file_path) mount_point['used_size'] += usage_change
[ "def", "change_disk_usage", "(", "self", ",", "usage_change", ",", "file_path", ",", "st_dev", ")", ":", "mount_point", "=", "self", ".", "_mount_point_for_device", "(", "st_dev", ")", "if", "mount_point", ":", "total_size", "=", "mount_point", "[", "'total_size...
Change the used disk space by the given amount. Args: usage_change: Number of bytes added to the used space. If negative, the used space will be decreased. file_path: The path of the object needing the disk space. st_dev: The device ID for the respective file system. Raises: IOError: if usage_change exceeds the free file system space
[ "Change", "the", "used", "disk", "space", "by", "the", "given", "amount", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1138-L1158
train
203,479
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem._add_open_file
def _add_open_file(self, file_obj): """Add file_obj to the list of open files on the filesystem. Used internally to manage open files. The position in the open_files array is the file descriptor number. Args: file_obj: File object to be added to open files list. Returns: File descriptor number for the file object. """ if self._free_fd_heap: open_fd = heapq.heappop(self._free_fd_heap) self.open_files[open_fd] = [file_obj] return open_fd self.open_files.append([file_obj]) return len(self.open_files) - 1
python
def _add_open_file(self, file_obj): """Add file_obj to the list of open files on the filesystem. Used internally to manage open files. The position in the open_files array is the file descriptor number. Args: file_obj: File object to be added to open files list. Returns: File descriptor number for the file object. """ if self._free_fd_heap: open_fd = heapq.heappop(self._free_fd_heap) self.open_files[open_fd] = [file_obj] return open_fd self.open_files.append([file_obj]) return len(self.open_files) - 1
[ "def", "_add_open_file", "(", "self", ",", "file_obj", ")", ":", "if", "self", ".", "_free_fd_heap", ":", "open_fd", "=", "heapq", ".", "heappop", "(", "self", ".", "_free_fd_heap", ")", "self", ".", "open_files", "[", "open_fd", "]", "=", "[", "file_obj...
Add file_obj to the list of open files on the filesystem. Used internally to manage open files. The position in the open_files array is the file descriptor number. Args: file_obj: File object to be added to open files list. Returns: File descriptor number for the file object.
[ "Add", "file_obj", "to", "the", "list", "of", "open", "files", "on", "the", "filesystem", ".", "Used", "internally", "to", "manage", "open", "files", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1311-L1329
train
203,480
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem._close_open_file
def _close_open_file(self, file_des): """Remove file object with given descriptor from the list of open files. Sets the entry in open_files to None. Args: file_des: Descriptor of file object to be removed from open files list. """ self.open_files[file_des] = None heapq.heappush(self._free_fd_heap, file_des)
python
def _close_open_file(self, file_des): """Remove file object with given descriptor from the list of open files. Sets the entry in open_files to None. Args: file_des: Descriptor of file object to be removed from open files list. """ self.open_files[file_des] = None heapq.heappush(self._free_fd_heap, file_des)
[ "def", "_close_open_file", "(", "self", ",", "file_des", ")", ":", "self", ".", "open_files", "[", "file_des", "]", "=", "None", "heapq", ".", "heappush", "(", "self", ".", "_free_fd_heap", ",", "file_des", ")" ]
Remove file object with given descriptor from the list of open files. Sets the entry in open_files to None. Args: file_des: Descriptor of file object to be removed from open files list.
[ "Remove", "file", "object", "with", "given", "descriptor", "from", "the", "list", "of", "open", "files", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1331-L1342
train
203,481
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.get_open_file
def get_open_file(self, file_des): """Return an open file. Args: file_des: File descriptor of the open file. Raises: OSError: an invalid file descriptor. TypeError: filedes is not an integer. Returns: Open file object. """ if not is_int_type(file_des): raise TypeError('an integer is required') if (file_des >= len(self.open_files) or self.open_files[file_des] is None): self.raise_os_error(errno.EBADF, str(file_des)) return self.open_files[file_des][0]
python
def get_open_file(self, file_des): """Return an open file. Args: file_des: File descriptor of the open file. Raises: OSError: an invalid file descriptor. TypeError: filedes is not an integer. Returns: Open file object. """ if not is_int_type(file_des): raise TypeError('an integer is required') if (file_des >= len(self.open_files) or self.open_files[file_des] is None): self.raise_os_error(errno.EBADF, str(file_des)) return self.open_files[file_des][0]
[ "def", "get_open_file", "(", "self", ",", "file_des", ")", ":", "if", "not", "is_int_type", "(", "file_des", ")", ":", "raise", "TypeError", "(", "'an integer is required'", ")", "if", "(", "file_des", ">=", "len", "(", "self", ".", "open_files", ")", "or"...
Return an open file. Args: file_des: File descriptor of the open file. Raises: OSError: an invalid file descriptor. TypeError: filedes is not an integer. Returns: Open file object.
[ "Return", "an", "open", "file", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1344-L1362
train
203,482
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.has_open_file
def has_open_file(self, file_object): """Return True if the given file object is in the list of open files. Args: file_object: The FakeFile object to be checked. Returns: `True` if the file is open. """ return (file_object in [wrappers[0].get_object() for wrappers in self.open_files if wrappers])
python
def has_open_file(self, file_object): """Return True if the given file object is in the list of open files. Args: file_object: The FakeFile object to be checked. Returns: `True` if the file is open. """ return (file_object in [wrappers[0].get_object() for wrappers in self.open_files if wrappers])
[ "def", "has_open_file", "(", "self", ",", "file_object", ")", ":", "return", "(", "file_object", "in", "[", "wrappers", "[", "0", "]", ".", "get_object", "(", ")", "for", "wrappers", "in", "self", ".", "open_files", "if", "wrappers", "]", ")" ]
Return True if the given file object is in the list of open files. Args: file_object: The FakeFile object to be checked. Returns: `True` if the file is open.
[ "Return", "True", "if", "the", "given", "file", "object", "is", "in", "the", "list", "of", "open", "files", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1364-L1374
train
203,483
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.normpath
def normpath(self, path): """Mimic os.path.normpath using the specified path_separator. Mimics os.path.normpath using the path_separator that was specified for this FakeFilesystem. Normalizes the path, but unlike the method absnormpath, does not make it absolute. Eliminates dot components (. and ..) and combines repeated path separators (//). Initial .. components are left in place for relative paths. If the result is an empty path, '.' is returned instead. This also replaces alternative path separator with path separator. That is, it behaves like the real os.path.normpath on Windows if initialized with '\\' as path separator and '/' as alternative separator. Args: path: (str) The path to normalize. Returns: (str) A copy of path with empty components and dot components removed. """ path = self.normcase(path) drive, path = self.splitdrive(path) sep = self._path_separator(path) is_absolute_path = path.startswith(sep) path_components = path.split(sep) collapsed_path_components = [] dot = self._matching_string(path, '.') dotdot = self._matching_string(path, '..') for component in path_components: if (not component) or (component == dot): continue if component == dotdot: if collapsed_path_components and ( collapsed_path_components[-1] != dotdot): # Remove an up-reference: directory/.. collapsed_path_components.pop() continue elif is_absolute_path: # Ignore leading .. components if starting from the # root directory. continue collapsed_path_components.append(component) collapsed_path = sep.join(collapsed_path_components) if is_absolute_path: collapsed_path = sep + collapsed_path return drive + collapsed_path or dot
python
def normpath(self, path): """Mimic os.path.normpath using the specified path_separator. Mimics os.path.normpath using the path_separator that was specified for this FakeFilesystem. Normalizes the path, but unlike the method absnormpath, does not make it absolute. Eliminates dot components (. and ..) and combines repeated path separators (//). Initial .. components are left in place for relative paths. If the result is an empty path, '.' is returned instead. This also replaces alternative path separator with path separator. That is, it behaves like the real os.path.normpath on Windows if initialized with '\\' as path separator and '/' as alternative separator. Args: path: (str) The path to normalize. Returns: (str) A copy of path with empty components and dot components removed. """ path = self.normcase(path) drive, path = self.splitdrive(path) sep = self._path_separator(path) is_absolute_path = path.startswith(sep) path_components = path.split(sep) collapsed_path_components = [] dot = self._matching_string(path, '.') dotdot = self._matching_string(path, '..') for component in path_components: if (not component) or (component == dot): continue if component == dotdot: if collapsed_path_components and ( collapsed_path_components[-1] != dotdot): # Remove an up-reference: directory/.. collapsed_path_components.pop() continue elif is_absolute_path: # Ignore leading .. components if starting from the # root directory. continue collapsed_path_components.append(component) collapsed_path = sep.join(collapsed_path_components) if is_absolute_path: collapsed_path = sep + collapsed_path return drive + collapsed_path or dot
[ "def", "normpath", "(", "self", ",", "path", ")", ":", "path", "=", "self", ".", "normcase", "(", "path", ")", "drive", ",", "path", "=", "self", ".", "splitdrive", "(", "path", ")", "sep", "=", "self", ".", "_path_separator", "(", "path", ")", "is...
Mimic os.path.normpath using the specified path_separator. Mimics os.path.normpath using the path_separator that was specified for this FakeFilesystem. Normalizes the path, but unlike the method absnormpath, does not make it absolute. Eliminates dot components (. and ..) and combines repeated path separators (//). Initial .. components are left in place for relative paths. If the result is an empty path, '.' is returned instead. This also replaces alternative path separator with path separator. That is, it behaves like the real os.path.normpath on Windows if initialized with '\\' as path separator and '/' as alternative separator. Args: path: (str) The path to normalize. Returns: (str) A copy of path with empty components and dot components removed.
[ "Mimic", "os", ".", "path", ".", "normpath", "using", "the", "specified", "path_separator", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1397-L1444
train
203,484
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem._original_path
def _original_path(self, path): """Return a normalized case version of the given path for case-insensitive file systems. For case-sensitive file systems, return path unchanged. Args: path: the file path to be transformed Returns: A version of path matching the case of existing path elements. """ def components_to_path(): if len(path_components) > len(normalized_components): normalized_components.extend( path_components[len(normalized_components):]) sep = self._path_separator(path) normalized_path = sep.join(normalized_components) if path.startswith(sep) and not normalized_path.startswith(sep): normalized_path = sep + normalized_path return normalized_path if self.is_case_sensitive or not path: return path path_components = self._path_components(path) normalized_components = [] current_dir = self.root for component in path_components: if not isinstance(current_dir, FakeDirectory): return components_to_path() dir_name, current_dir = self._directory_content( current_dir, component) if current_dir is None or ( isinstance(current_dir, FakeDirectory) and current_dir._byte_contents is None and current_dir.st_size == 0): return components_to_path() normalized_components.append(dir_name) return components_to_path()
python
def _original_path(self, path): """Return a normalized case version of the given path for case-insensitive file systems. For case-sensitive file systems, return path unchanged. Args: path: the file path to be transformed Returns: A version of path matching the case of existing path elements. """ def components_to_path(): if len(path_components) > len(normalized_components): normalized_components.extend( path_components[len(normalized_components):]) sep = self._path_separator(path) normalized_path = sep.join(normalized_components) if path.startswith(sep) and not normalized_path.startswith(sep): normalized_path = sep + normalized_path return normalized_path if self.is_case_sensitive or not path: return path path_components = self._path_components(path) normalized_components = [] current_dir = self.root for component in path_components: if not isinstance(current_dir, FakeDirectory): return components_to_path() dir_name, current_dir = self._directory_content( current_dir, component) if current_dir is None or ( isinstance(current_dir, FakeDirectory) and current_dir._byte_contents is None and current_dir.st_size == 0): return components_to_path() normalized_components.append(dir_name) return components_to_path()
[ "def", "_original_path", "(", "self", ",", "path", ")", ":", "def", "components_to_path", "(", ")", ":", "if", "len", "(", "path_components", ")", ">", "len", "(", "normalized_components", ")", ":", "normalized_components", ".", "extend", "(", "path_components...
Return a normalized case version of the given path for case-insensitive file systems. For case-sensitive file systems, return path unchanged. Args: path: the file path to be transformed Returns: A version of path matching the case of existing path elements.
[ "Return", "a", "normalized", "case", "version", "of", "the", "given", "path", "for", "case", "-", "insensitive", "file", "systems", ".", "For", "case", "-", "sensitive", "file", "systems", "return", "path", "unchanged", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1446-L1484
train
203,485
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.absnormpath
def absnormpath(self, path): """Absolutize and minimalize the given path. Forces all relative paths to be absolute, and normalizes the path to eliminate dot and empty components. Args: path: Path to normalize. Returns: The normalized path relative to the current working directory, or the root directory if path is empty. """ path = self.normcase(path) cwd = self._matching_string(path, self.cwd) if not path: path = self.path_separator elif not self._starts_with_root_path(path): # Prefix relative paths with cwd, if cwd is not root. root_name = self._matching_string(path, self.root.name) empty = self._matching_string(path, '') path = self._path_separator(path).join( (cwd != root_name and cwd or empty, path)) if path == self._matching_string(path, '.'): path = cwd return self.normpath(path)
python
def absnormpath(self, path): """Absolutize and minimalize the given path. Forces all relative paths to be absolute, and normalizes the path to eliminate dot and empty components. Args: path: Path to normalize. Returns: The normalized path relative to the current working directory, or the root directory if path is empty. """ path = self.normcase(path) cwd = self._matching_string(path, self.cwd) if not path: path = self.path_separator elif not self._starts_with_root_path(path): # Prefix relative paths with cwd, if cwd is not root. root_name = self._matching_string(path, self.root.name) empty = self._matching_string(path, '') path = self._path_separator(path).join( (cwd != root_name and cwd or empty, path)) if path == self._matching_string(path, '.'): path = cwd return self.normpath(path)
[ "def", "absnormpath", "(", "self", ",", "path", ")", ":", "path", "=", "self", ".", "normcase", "(", "path", ")", "cwd", "=", "self", ".", "_matching_string", "(", "path", ",", "self", ".", "cwd", ")", "if", "not", "path", ":", "path", "=", "self",...
Absolutize and minimalize the given path. Forces all relative paths to be absolute, and normalizes the path to eliminate dot and empty components. Args: path: Path to normalize. Returns: The normalized path relative to the current working directory, or the root directory if path is empty.
[ "Absolutize", "and", "minimalize", "the", "given", "path", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1486-L1511
train
203,486
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.splitpath
def splitpath(self, path): """Mimic os.path.splitpath using the specified path_separator. Mimics os.path.splitpath using the path_separator that was specified for this FakeFilesystem. Args: path: (str) The path to split. Returns: (str) A duple (pathname, basename) for which pathname does not end with a slash, and basename does not contain a slash. """ path = self.normcase(path) sep = self._path_separator(path) path_components = path.split(sep) if not path_components: return ('', '') starts_with_drive = self._starts_with_drive_letter(path) basename = path_components.pop() colon = self._matching_string(path, ':') if not path_components: if starts_with_drive: components = basename.split(colon) return (components[0] + colon, components[1]) return ('', basename) for component in path_components: if component: # The path is not the root; it contains a non-separator # component. Strip all trailing separators. while not path_components[-1]: path_components.pop() if starts_with_drive: if not path_components: components = basename.split(colon) return (components[0] + colon, components[1]) if (len(path_components) == 1 and path_components[0].endswith(colon)): return (path_components[0] + sep, basename) return (sep.join(path_components), basename) # Root path. Collapse all leading separators. return (sep, basename)
python
def splitpath(self, path): """Mimic os.path.splitpath using the specified path_separator. Mimics os.path.splitpath using the path_separator that was specified for this FakeFilesystem. Args: path: (str) The path to split. Returns: (str) A duple (pathname, basename) for which pathname does not end with a slash, and basename does not contain a slash. """ path = self.normcase(path) sep = self._path_separator(path) path_components = path.split(sep) if not path_components: return ('', '') starts_with_drive = self._starts_with_drive_letter(path) basename = path_components.pop() colon = self._matching_string(path, ':') if not path_components: if starts_with_drive: components = basename.split(colon) return (components[0] + colon, components[1]) return ('', basename) for component in path_components: if component: # The path is not the root; it contains a non-separator # component. Strip all trailing separators. while not path_components[-1]: path_components.pop() if starts_with_drive: if not path_components: components = basename.split(colon) return (components[0] + colon, components[1]) if (len(path_components) == 1 and path_components[0].endswith(colon)): return (path_components[0] + sep, basename) return (sep.join(path_components), basename) # Root path. Collapse all leading separators. return (sep, basename)
[ "def", "splitpath", "(", "self", ",", "path", ")", ":", "path", "=", "self", ".", "normcase", "(", "path", ")", "sep", "=", "self", ".", "_path_separator", "(", "path", ")", "path_components", "=", "path", ".", "split", "(", "sep", ")", "if", "not", ...
Mimic os.path.splitpath using the specified path_separator. Mimics os.path.splitpath using the path_separator that was specified for this FakeFilesystem. Args: path: (str) The path to split. Returns: (str) A duple (pathname, basename) for which pathname does not end with a slash, and basename does not contain a slash.
[ "Mimic", "os", ".", "path", ".", "splitpath", "using", "the", "specified", "path_separator", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1513-L1555
train
203,487
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.splitdrive
def splitdrive(self, path): """Splits the path into the drive part and the rest of the path. Taken from Windows specific implementation in Python 3.5 and slightly adapted. Args: path: the full path to be splitpath. Returns: A tuple of the drive part and the rest of the path, or of an empty string and the full path if drive letters are not supported or no drive is present. """ path = make_string_path(path) if self.is_windows_fs: if len(path) >= 2: path = self.normcase(path) sep = self._path_separator(path) # UNC path handling is here since Python 2.7.8, # back-ported from Python 3 if sys.version_info >= (2, 7, 8): if (path[0:2] == sep * 2) and ( path[2:3] != sep): # UNC path handling - splits off the mount point # instead of the drive sep_index = path.find(sep, 2) if sep_index == -1: return path[:0], path sep_index2 = path.find(sep, sep_index + 1) if sep_index2 == sep_index + 1: return path[:0], path if sep_index2 == -1: sep_index2 = len(path) return path[:sep_index2], path[sep_index2:] if path[1:2] == self._matching_string(path, ':'): return path[:2], path[2:] return path[:0], path
python
def splitdrive(self, path): """Splits the path into the drive part and the rest of the path. Taken from Windows specific implementation in Python 3.5 and slightly adapted. Args: path: the full path to be splitpath. Returns: A tuple of the drive part and the rest of the path, or of an empty string and the full path if drive letters are not supported or no drive is present. """ path = make_string_path(path) if self.is_windows_fs: if len(path) >= 2: path = self.normcase(path) sep = self._path_separator(path) # UNC path handling is here since Python 2.7.8, # back-ported from Python 3 if sys.version_info >= (2, 7, 8): if (path[0:2] == sep * 2) and ( path[2:3] != sep): # UNC path handling - splits off the mount point # instead of the drive sep_index = path.find(sep, 2) if sep_index == -1: return path[:0], path sep_index2 = path.find(sep, sep_index + 1) if sep_index2 == sep_index + 1: return path[:0], path if sep_index2 == -1: sep_index2 = len(path) return path[:sep_index2], path[sep_index2:] if path[1:2] == self._matching_string(path, ':'): return path[:2], path[2:] return path[:0], path
[ "def", "splitdrive", "(", "self", ",", "path", ")", ":", "path", "=", "make_string_path", "(", "path", ")", "if", "self", ".", "is_windows_fs", ":", "if", "len", "(", "path", ")", ">=", "2", ":", "path", "=", "self", ".", "normcase", "(", "path", "...
Splits the path into the drive part and the rest of the path. Taken from Windows specific implementation in Python 3.5 and slightly adapted. Args: path: the full path to be splitpath. Returns: A tuple of the drive part and the rest of the path, or of an empty string and the full path if drive letters are not supported or no drive is present.
[ "Splits", "the", "path", "into", "the", "drive", "part", "and", "the", "rest", "of", "the", "path", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1557-L1594
train
203,488
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.joinpaths
def joinpaths(self, *paths): """Mimic os.path.join using the specified path_separator. Args: *paths: (str) Zero or more paths to join. Returns: (str) The paths joined by the path separator, starting with the last absolute path in paths. """ if sys.version_info >= (3, 6): paths = [os.fspath(path) for path in paths] if len(paths) == 1: return paths[0] if self.is_windows_fs: return self._join_paths_with_drive_support(*paths) joined_path_segments = [] sep = self._path_separator(paths[0]) for path_segment in paths: if self._starts_with_root_path(path_segment): # An absolute path joined_path_segments = [path_segment] else: if (joined_path_segments and not joined_path_segments[-1].endswith(sep)): joined_path_segments.append(sep) if path_segment: joined_path_segments.append(path_segment) return self._matching_string(paths[0], '').join(joined_path_segments)
python
def joinpaths(self, *paths): """Mimic os.path.join using the specified path_separator. Args: *paths: (str) Zero or more paths to join. Returns: (str) The paths joined by the path separator, starting with the last absolute path in paths. """ if sys.version_info >= (3, 6): paths = [os.fspath(path) for path in paths] if len(paths) == 1: return paths[0] if self.is_windows_fs: return self._join_paths_with_drive_support(*paths) joined_path_segments = [] sep = self._path_separator(paths[0]) for path_segment in paths: if self._starts_with_root_path(path_segment): # An absolute path joined_path_segments = [path_segment] else: if (joined_path_segments and not joined_path_segments[-1].endswith(sep)): joined_path_segments.append(sep) if path_segment: joined_path_segments.append(path_segment) return self._matching_string(paths[0], '').join(joined_path_segments)
[ "def", "joinpaths", "(", "self", ",", "*", "paths", ")", ":", "if", "sys", ".", "version_info", ">=", "(", "3", ",", "6", ")", ":", "paths", "=", "[", "os", ".", "fspath", "(", "path", ")", "for", "path", "in", "paths", "]", "if", "len", "(", ...
Mimic os.path.join using the specified path_separator. Args: *paths: (str) Zero or more paths to join. Returns: (str) The paths joined by the path separator, starting with the last absolute path in paths.
[ "Mimic", "os", ".", "path", ".", "join", "using", "the", "specified", "path_separator", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1632-L1660
train
203,489
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem._path_components
def _path_components(self, path): """Breaks the path into a list of component names. Does not include the root directory as a component, as all paths are considered relative to the root directory for the FakeFilesystem. Callers should basically follow this pattern: .. code:: python file_path = self.absnormpath(file_path) path_components = self._path_components(file_path) current_dir = self.root for component in path_components: if component not in current_dir.contents: raise IOError _do_stuff_with_component(current_dir, component) current_dir = current_dir.get_entry(component) Args: path: Path to tokenize. Returns: The list of names split from path. """ if not path or path == self._path_separator(path): return [] drive, path = self.splitdrive(path) path_components = path.split(self._path_separator(path)) assert drive or path_components if not path_components[0]: if len(path_components) > 1 and not path_components[1]: path_components = [] else: # This is an absolute path. path_components = path_components[1:] if drive: path_components.insert(0, drive) return path_components
python
def _path_components(self, path): """Breaks the path into a list of component names. Does not include the root directory as a component, as all paths are considered relative to the root directory for the FakeFilesystem. Callers should basically follow this pattern: .. code:: python file_path = self.absnormpath(file_path) path_components = self._path_components(file_path) current_dir = self.root for component in path_components: if component not in current_dir.contents: raise IOError _do_stuff_with_component(current_dir, component) current_dir = current_dir.get_entry(component) Args: path: Path to tokenize. Returns: The list of names split from path. """ if not path or path == self._path_separator(path): return [] drive, path = self.splitdrive(path) path_components = path.split(self._path_separator(path)) assert drive or path_components if not path_components[0]: if len(path_components) > 1 and not path_components[1]: path_components = [] else: # This is an absolute path. path_components = path_components[1:] if drive: path_components.insert(0, drive) return path_components
[ "def", "_path_components", "(", "self", ",", "path", ")", ":", "if", "not", "path", "or", "path", "==", "self", ".", "_path_separator", "(", "path", ")", ":", "return", "[", "]", "drive", ",", "path", "=", "self", ".", "splitdrive", "(", "path", ")",...
Breaks the path into a list of component names. Does not include the root directory as a component, as all paths are considered relative to the root directory for the FakeFilesystem. Callers should basically follow this pattern: .. code:: python file_path = self.absnormpath(file_path) path_components = self._path_components(file_path) current_dir = self.root for component in path_components: if component not in current_dir.contents: raise IOError _do_stuff_with_component(current_dir, component) current_dir = current_dir.get_entry(component) Args: path: Path to tokenize. Returns: The list of names split from path.
[ "Breaks", "the", "path", "into", "a", "list", "of", "component", "names", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1662-L1699
train
203,490
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem._starts_with_drive_letter
def _starts_with_drive_letter(self, file_path): """Return True if file_path starts with a drive letter. Args: file_path: the full path to be examined. Returns: `True` if drive letter support is enabled in the filesystem and the path starts with a drive letter. """ colon = self._matching_string(file_path, ':') return (self.is_windows_fs and len(file_path) >= 2 and file_path[:1].isalpha and (file_path[1:2]) == colon)
python
def _starts_with_drive_letter(self, file_path): """Return True if file_path starts with a drive letter. Args: file_path: the full path to be examined. Returns: `True` if drive letter support is enabled in the filesystem and the path starts with a drive letter. """ colon = self._matching_string(file_path, ':') return (self.is_windows_fs and len(file_path) >= 2 and file_path[:1].isalpha and (file_path[1:2]) == colon)
[ "def", "_starts_with_drive_letter", "(", "self", ",", "file_path", ")", ":", "colon", "=", "self", ".", "_matching_string", "(", "file_path", ",", "':'", ")", "return", "(", "self", ".", "is_windows_fs", "and", "len", "(", "file_path", ")", ">=", "2", "and...
Return True if file_path starts with a drive letter. Args: file_path: the full path to be examined. Returns: `True` if drive letter support is enabled in the filesystem and the path starts with a drive letter.
[ "Return", "True", "if", "file_path", "starts", "with", "a", "drive", "letter", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1701-L1713
train
203,491
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.ends_with_path_separator
def ends_with_path_separator(self, file_path): """Return True if ``file_path`` ends with a valid path separator.""" if is_int_type(file_path): return False file_path = make_string_path(file_path) return (file_path and file_path not in (self.path_separator, self.alternative_path_separator) and (file_path.endswith(self._path_separator(file_path)) or self.alternative_path_separator is not None and file_path.endswith( self._alternative_path_separator(file_path))))
python
def ends_with_path_separator(self, file_path): """Return True if ``file_path`` ends with a valid path separator.""" if is_int_type(file_path): return False file_path = make_string_path(file_path) return (file_path and file_path not in (self.path_separator, self.alternative_path_separator) and (file_path.endswith(self._path_separator(file_path)) or self.alternative_path_separator is not None and file_path.endswith( self._alternative_path_separator(file_path))))
[ "def", "ends_with_path_separator", "(", "self", ",", "file_path", ")", ":", "if", "is_int_type", "(", "file_path", ")", ":", "return", "False", "file_path", "=", "make_string_path", "(", "file_path", ")", "return", "(", "file_path", "and", "file_path", "not", ...
Return True if ``file_path`` ends with a valid path separator.
[ "Return", "True", "if", "file_path", "ends", "with", "a", "valid", "path", "separator", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1730-L1741
train
203,492
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.exists
def exists(self, file_path, check_link=False): """Return true if a path points to an existing file system object. Args: file_path: The path to examine. Returns: (bool) True if the corresponding object exists. Raises: TypeError: if file_path is None. """ if check_link and self.islink(file_path): return True file_path = make_string_path(file_path) if file_path is None: raise TypeError if not file_path: return False if file_path == self.dev_null.name: return not self.is_windows_fs try: if self.is_filepath_ending_with_separator(file_path): return False file_path = self.resolve_path(file_path) except (IOError, OSError): return False if file_path == self.root.name: return True path_components = self._path_components(file_path) current_dir = self.root for component in path_components: current_dir = self._directory_content(current_dir, component)[1] if not current_dir: return False return True
python
def exists(self, file_path, check_link=False): """Return true if a path points to an existing file system object. Args: file_path: The path to examine. Returns: (bool) True if the corresponding object exists. Raises: TypeError: if file_path is None. """ if check_link and self.islink(file_path): return True file_path = make_string_path(file_path) if file_path is None: raise TypeError if not file_path: return False if file_path == self.dev_null.name: return not self.is_windows_fs try: if self.is_filepath_ending_with_separator(file_path): return False file_path = self.resolve_path(file_path) except (IOError, OSError): return False if file_path == self.root.name: return True path_components = self._path_components(file_path) current_dir = self.root for component in path_components: current_dir = self._directory_content(current_dir, component)[1] if not current_dir: return False return True
[ "def", "exists", "(", "self", ",", "file_path", ",", "check_link", "=", "False", ")", ":", "if", "check_link", "and", "self", ".", "islink", "(", "file_path", ")", ":", "return", "True", "file_path", "=", "make_string_path", "(", "file_path", ")", "if", ...
Return true if a path points to an existing file system object. Args: file_path: The path to examine. Returns: (bool) True if the corresponding object exists. Raises: TypeError: if file_path is None.
[ "Return", "true", "if", "a", "path", "points", "to", "an", "existing", "file", "system", "object", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1762-L1798
train
203,493
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.resolve_path
def resolve_path(self, file_path, allow_fd=False, raw_io=True): """Follow a path, resolving symlinks. ResolvePath traverses the filesystem along the specified file path, resolving file names and symbolic links until all elements of the path are exhausted, or we reach a file which does not exist. If all the elements are not consumed, they just get appended to the path resolved so far. This gives us the path which is as resolved as it can be, even if the file does not exist. This behavior mimics Unix semantics, and is best shown by example. Given a file system that looks like this: /a/b/ /a/b/c -> /a/b2 c is a symlink to /a/b2 /a/b2/x /a/c -> ../d /a/x -> y Then: /a/b/x => /a/b/x /a/c => /a/d /a/x => /a/y /a/b/c/d/e => /a/b2/d/e Args: file_path: The path to examine. allow_fd: If `True`, `file_path` may be open file descriptor. raw_io: `True` if called from low-level I/O functions. Returns: The resolved_path (string) or None. Raises: TypeError: if `file_path` is `None`. IOError: if `file_path` is '' or a part of the path doesn't exist. """ if (allow_fd and sys.version_info >= (3, 3) and isinstance(file_path, int)): return self.get_open_file(file_path).get_object().path file_path = make_string_path(file_path) if file_path is None: # file.open(None) raises TypeError, so mimic that. raise TypeError('Expected file system path string, received None') file_path = self._to_string(file_path) if not file_path or not self._valid_relative_path(file_path): # file.open('') raises IOError, so mimic that, and validate that # all parts of a relative path exist. self.raise_io_error(errno.ENOENT, file_path) file_path = self.absnormpath(self._original_path(file_path)) if self._is_root_path(file_path): return file_path if file_path == self.dev_null.name: return file_path path_components = self._path_components(file_path) resolved_components = self._resolve_components(path_components, raw_io) return self._components_to_path(resolved_components)
python
def resolve_path(self, file_path, allow_fd=False, raw_io=True): """Follow a path, resolving symlinks. ResolvePath traverses the filesystem along the specified file path, resolving file names and symbolic links until all elements of the path are exhausted, or we reach a file which does not exist. If all the elements are not consumed, they just get appended to the path resolved so far. This gives us the path which is as resolved as it can be, even if the file does not exist. This behavior mimics Unix semantics, and is best shown by example. Given a file system that looks like this: /a/b/ /a/b/c -> /a/b2 c is a symlink to /a/b2 /a/b2/x /a/c -> ../d /a/x -> y Then: /a/b/x => /a/b/x /a/c => /a/d /a/x => /a/y /a/b/c/d/e => /a/b2/d/e Args: file_path: The path to examine. allow_fd: If `True`, `file_path` may be open file descriptor. raw_io: `True` if called from low-level I/O functions. Returns: The resolved_path (string) or None. Raises: TypeError: if `file_path` is `None`. IOError: if `file_path` is '' or a part of the path doesn't exist. """ if (allow_fd and sys.version_info >= (3, 3) and isinstance(file_path, int)): return self.get_open_file(file_path).get_object().path file_path = make_string_path(file_path) if file_path is None: # file.open(None) raises TypeError, so mimic that. raise TypeError('Expected file system path string, received None') file_path = self._to_string(file_path) if not file_path or not self._valid_relative_path(file_path): # file.open('') raises IOError, so mimic that, and validate that # all parts of a relative path exist. self.raise_io_error(errno.ENOENT, file_path) file_path = self.absnormpath(self._original_path(file_path)) if self._is_root_path(file_path): return file_path if file_path == self.dev_null.name: return file_path path_components = self._path_components(file_path) resolved_components = self._resolve_components(path_components, raw_io) return self._components_to_path(resolved_components)
[ "def", "resolve_path", "(", "self", ",", "file_path", ",", "allow_fd", "=", "False", ",", "raw_io", "=", "True", ")", ":", "if", "(", "allow_fd", "and", "sys", ".", "version_info", ">=", "(", "3", ",", "3", ")", "and", "isinstance", "(", "file_path", ...
Follow a path, resolving symlinks. ResolvePath traverses the filesystem along the specified file path, resolving file names and symbolic links until all elements of the path are exhausted, or we reach a file which does not exist. If all the elements are not consumed, they just get appended to the path resolved so far. This gives us the path which is as resolved as it can be, even if the file does not exist. This behavior mimics Unix semantics, and is best shown by example. Given a file system that looks like this: /a/b/ /a/b/c -> /a/b2 c is a symlink to /a/b2 /a/b2/x /a/c -> ../d /a/x -> y Then: /a/b/x => /a/b/x /a/c => /a/d /a/x => /a/y /a/b/c/d/e => /a/b2/d/e Args: file_path: The path to examine. allow_fd: If `True`, `file_path` may be open file descriptor. raw_io: `True` if called from low-level I/O functions. Returns: The resolved_path (string) or None. Raises: TypeError: if `file_path` is `None`. IOError: if `file_path` is '' or a part of the path doesn't exist.
[ "Follow", "a", "path", "resolving", "symlinks", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1806-L1864
train
203,494
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem._follow_link
def _follow_link(self, link_path_components, link): """Follow a link w.r.t. a path resolved so far. The component is either a real file, which is a no-op, or a symlink. In the case of a symlink, we have to modify the path as built up so far /a/b => ../c should yield /a/../c (which will normalize to /a/c) /a/b => x should yield /a/x /a/b => /x/y/z should yield /x/y/z The modified path may land us in a new spot which is itself a link, so we may repeat the process. Args: link_path_components: The resolved path built up to the link so far. link: The link object itself. Returns: (string) The updated path resolved after following the link. Raises: IOError: if there are too many levels of symbolic link """ link_path = link.contents sep = self._path_separator(link_path) # For links to absolute paths, we want to throw out everything # in the path built so far and replace with the link. For relative # links, we have to append the link to what we have so far, if not self._starts_with_root_path(link_path): # Relative path. Append remainder of path to what we have # processed so far, excluding the name of the link itself. # /a/b => ../c should yield /a/../c # (which will normalize to /c) # /a/b => d should yield a/d components = link_path_components[:-1] components.append(link_path) link_path = sep.join(components) # Don't call self.NormalizePath(), as we don't want to prepend # self.cwd. return self.normpath(link_path)
python
def _follow_link(self, link_path_components, link): """Follow a link w.r.t. a path resolved so far. The component is either a real file, which is a no-op, or a symlink. In the case of a symlink, we have to modify the path as built up so far /a/b => ../c should yield /a/../c (which will normalize to /a/c) /a/b => x should yield /a/x /a/b => /x/y/z should yield /x/y/z The modified path may land us in a new spot which is itself a link, so we may repeat the process. Args: link_path_components: The resolved path built up to the link so far. link: The link object itself. Returns: (string) The updated path resolved after following the link. Raises: IOError: if there are too many levels of symbolic link """ link_path = link.contents sep = self._path_separator(link_path) # For links to absolute paths, we want to throw out everything # in the path built so far and replace with the link. For relative # links, we have to append the link to what we have so far, if not self._starts_with_root_path(link_path): # Relative path. Append remainder of path to what we have # processed so far, excluding the name of the link itself. # /a/b => ../c should yield /a/../c # (which will normalize to /c) # /a/b => d should yield a/d components = link_path_components[:-1] components.append(link_path) link_path = sep.join(components) # Don't call self.NormalizePath(), as we don't want to prepend # self.cwd. return self.normpath(link_path)
[ "def", "_follow_link", "(", "self", ",", "link_path_components", ",", "link", ")", ":", "link_path", "=", "link", ".", "contents", "sep", "=", "self", ".", "_path_separator", "(", "link_path", ")", "# For links to absolute paths, we want to throw out everything", "# i...
Follow a link w.r.t. a path resolved so far. The component is either a real file, which is a no-op, or a symlink. In the case of a symlink, we have to modify the path as built up so far /a/b => ../c should yield /a/../c (which will normalize to /a/c) /a/b => x should yield /a/x /a/b => /x/y/z should yield /x/y/z The modified path may land us in a new spot which is itself a link, so we may repeat the process. Args: link_path_components: The resolved path built up to the link so far. link: The link object itself. Returns: (string) The updated path resolved after following the link. Raises: IOError: if there are too many levels of symbolic link
[ "Follow", "a", "link", "w", ".", "r", ".", "t", ".", "a", "path", "resolved", "so", "far", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L1924-L1963
train
203,495
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.resolve
def resolve(self, file_path, follow_symlinks=True, allow_fd=False): """Search for the specified filesystem object, resolving all links. Args: file_path: Specifies the target FakeFile object to retrieve. follow_symlinks: If `False`, the link itself is resolved, otherwise the object linked to. allow_fd: If `True`, `file_path` may be an open file descriptor Returns: The FakeFile object corresponding to `file_path`. Raises: IOError: if the object is not found. """ if isinstance(file_path, int): if allow_fd and sys.version_info >= (3, 3): return self.get_open_file(file_path).get_object() raise TypeError('path should be string, bytes or ' 'os.PathLike (if supported), not int') if follow_symlinks: file_path = make_string_path(file_path) return self.get_object_from_normpath(self.resolve_path(file_path)) return self.lresolve(file_path)
python
def resolve(self, file_path, follow_symlinks=True, allow_fd=False): """Search for the specified filesystem object, resolving all links. Args: file_path: Specifies the target FakeFile object to retrieve. follow_symlinks: If `False`, the link itself is resolved, otherwise the object linked to. allow_fd: If `True`, `file_path` may be an open file descriptor Returns: The FakeFile object corresponding to `file_path`. Raises: IOError: if the object is not found. """ if isinstance(file_path, int): if allow_fd and sys.version_info >= (3, 3): return self.get_open_file(file_path).get_object() raise TypeError('path should be string, bytes or ' 'os.PathLike (if supported), not int') if follow_symlinks: file_path = make_string_path(file_path) return self.get_object_from_normpath(self.resolve_path(file_path)) return self.lresolve(file_path)
[ "def", "resolve", "(", "self", ",", "file_path", ",", "follow_symlinks", "=", "True", ",", "allow_fd", "=", "False", ")", ":", "if", "isinstance", "(", "file_path", ",", "int", ")", ":", "if", "allow_fd", "and", "sys", ".", "version_info", ">=", "(", "...
Search for the specified filesystem object, resolving all links. Args: file_path: Specifies the target FakeFile object to retrieve. follow_symlinks: If `False`, the link itself is resolved, otherwise the object linked to. allow_fd: If `True`, `file_path` may be an open file descriptor Returns: The FakeFile object corresponding to `file_path`. Raises: IOError: if the object is not found.
[ "Search", "for", "the", "specified", "filesystem", "object", "resolving", "all", "links", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L2018-L2042
train
203,496
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.lresolve
def lresolve(self, path): """Search for the specified object, resolving only parent links. This is analogous to the stat/lstat difference. This resolves links *to* the object but not of the final object itself. Args: path: Specifies target FakeFile object to retrieve. Returns: The FakeFile object corresponding to path. Raises: IOError: if the object is not found. """ path = make_string_path(path) if path == self.root.name: # The root directory will never be a link return self.root # remove trailing separator path = self._path_without_trailing_separators(path) path = self._original_path(path) parent_directory, child_name = self.splitpath(path) if not parent_directory: parent_directory = self.cwd try: parent_obj = self.resolve(parent_directory) assert parent_obj if not isinstance(parent_obj, FakeDirectory): if not self.is_windows_fs and isinstance(parent_obj, FakeFile): self.raise_io_error(errno.ENOTDIR, path) self.raise_io_error(errno.ENOENT, path) return parent_obj.get_entry(child_name) except KeyError: self.raise_io_error(errno.ENOENT, path)
python
def lresolve(self, path): """Search for the specified object, resolving only parent links. This is analogous to the stat/lstat difference. This resolves links *to* the object but not of the final object itself. Args: path: Specifies target FakeFile object to retrieve. Returns: The FakeFile object corresponding to path. Raises: IOError: if the object is not found. """ path = make_string_path(path) if path == self.root.name: # The root directory will never be a link return self.root # remove trailing separator path = self._path_without_trailing_separators(path) path = self._original_path(path) parent_directory, child_name = self.splitpath(path) if not parent_directory: parent_directory = self.cwd try: parent_obj = self.resolve(parent_directory) assert parent_obj if not isinstance(parent_obj, FakeDirectory): if not self.is_windows_fs and isinstance(parent_obj, FakeFile): self.raise_io_error(errno.ENOTDIR, path) self.raise_io_error(errno.ENOENT, path) return parent_obj.get_entry(child_name) except KeyError: self.raise_io_error(errno.ENOENT, path)
[ "def", "lresolve", "(", "self", ",", "path", ")", ":", "path", "=", "make_string_path", "(", "path", ")", "if", "path", "==", "self", ".", "root", ".", "name", ":", "# The root directory will never be a link", "return", "self", ".", "root", "# remove trailing ...
Search for the specified object, resolving only parent links. This is analogous to the stat/lstat difference. This resolves links *to* the object but not of the final object itself. Args: path: Specifies target FakeFile object to retrieve. Returns: The FakeFile object corresponding to path. Raises: IOError: if the object is not found.
[ "Search", "for", "the", "specified", "object", "resolving", "only", "parent", "links", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L2044-L2080
train
203,497
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.add_object
def add_object(self, file_path, file_object, error_fct=None): """Add a fake file or directory into the filesystem at file_path. Args: file_path: The path to the file to be added relative to self. file_object: File or directory to add. error_class: The error class to be thrown if file_path does not correspond to a directory (used internally( Raises: IOError or OSError: if file_path does not correspond to a directory. """ error_fct = error_fct or self.raise_os_error if not file_path: target_directory = self.root else: target_directory = self.resolve(file_path) if not S_ISDIR(target_directory.st_mode): error = errno.ENOENT if self.is_windows_fs else errno.ENOTDIR error_fct(error, file_path) target_directory.add_entry(file_object)
python
def add_object(self, file_path, file_object, error_fct=None): """Add a fake file or directory into the filesystem at file_path. Args: file_path: The path to the file to be added relative to self. file_object: File or directory to add. error_class: The error class to be thrown if file_path does not correspond to a directory (used internally( Raises: IOError or OSError: if file_path does not correspond to a directory. """ error_fct = error_fct or self.raise_os_error if not file_path: target_directory = self.root else: target_directory = self.resolve(file_path) if not S_ISDIR(target_directory.st_mode): error = errno.ENOENT if self.is_windows_fs else errno.ENOTDIR error_fct(error, file_path) target_directory.add_entry(file_object)
[ "def", "add_object", "(", "self", ",", "file_path", ",", "file_object", ",", "error_fct", "=", "None", ")", ":", "error_fct", "=", "error_fct", "or", "self", ".", "raise_os_error", "if", "not", "file_path", ":", "target_directory", "=", "self", ".", "root", ...
Add a fake file or directory into the filesystem at file_path. Args: file_path: The path to the file to be added relative to self. file_object: File or directory to add. error_class: The error class to be thrown if file_path does not correspond to a directory (used internally( Raises: IOError or OSError: if file_path does not correspond to a directory.
[ "Add", "a", "fake", "file", "or", "directory", "into", "the", "filesystem", "at", "file_path", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L2082-L2103
train
203,498
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeFilesystem.rename
def rename(self, old_file_path, new_file_path, force_replace=False): """Renames a FakeFile object at old_file_path to new_file_path, preserving all properties. Args: old_file_path: Path to filesystem object to rename. new_file_path: Path to where the filesystem object will live after this call. force_replace: If set and destination is an existing file, it will be replaced even under Windows if the user has permissions, otherwise replacement happens under Unix only. Raises: OSError: if old_file_path does not exist. OSError: if new_file_path is an existing directory (Windows, or Posix if old_file_path points to a regular file) OSError: if old_file_path is a directory and new_file_path a file OSError: if new_file_path is an existing file and force_replace not set (Windows only). OSError: if new_file_path is an existing file and could not be removed (Posix, or Windows with force_replace set). OSError: if dirname(new_file_path) does not exist. OSError: if the file would be moved to another filesystem (e.g. mount point). """ ends_with_sep = self.ends_with_path_separator(old_file_path) old_file_path = self.absnormpath(old_file_path) new_file_path = self.absnormpath(new_file_path) if not self.exists(old_file_path, check_link=True): self.raise_os_error(errno.ENOENT, old_file_path, 2) if ends_with_sep: self._handle_broken_link_with_trailing_sep(old_file_path) old_object = self.lresolve(old_file_path) if not self.is_windows_fs: self._handle_posix_dir_link_errors( new_file_path, old_file_path, ends_with_sep) if self.exists(new_file_path, check_link=True): new_file_path = self._rename_to_existing_path( force_replace, new_file_path, old_file_path, old_object, ends_with_sep) if not new_file_path: return old_dir, old_name = self.splitpath(old_file_path) new_dir, new_name = self.splitpath(new_file_path) if not self.exists(new_dir): self.raise_os_error(errno.ENOENT, new_dir) old_dir_object = self.resolve(old_dir) new_dir_object = self.resolve(new_dir) if old_dir_object.st_dev != new_dir_object.st_dev: self.raise_os_error(errno.EXDEV, old_file_path) if not S_ISDIR(new_dir_object.st_mode): self.raise_os_error( errno.EACCES if self.is_windows_fs else errno.ENOTDIR, new_file_path) if new_dir_object.has_parent_object(old_object): self.raise_os_error(errno.EINVAL, new_file_path) object_to_rename = old_dir_object.get_entry(old_name) old_dir_object.remove_entry(old_name, recursive=False) object_to_rename.name = new_name new_name = new_dir_object._normalized_entryname(new_name) if new_name in new_dir_object.contents: # in case of overwriting remove the old entry first new_dir_object.remove_entry(new_name) new_dir_object.add_entry(object_to_rename)
python
def rename(self, old_file_path, new_file_path, force_replace=False): """Renames a FakeFile object at old_file_path to new_file_path, preserving all properties. Args: old_file_path: Path to filesystem object to rename. new_file_path: Path to where the filesystem object will live after this call. force_replace: If set and destination is an existing file, it will be replaced even under Windows if the user has permissions, otherwise replacement happens under Unix only. Raises: OSError: if old_file_path does not exist. OSError: if new_file_path is an existing directory (Windows, or Posix if old_file_path points to a regular file) OSError: if old_file_path is a directory and new_file_path a file OSError: if new_file_path is an existing file and force_replace not set (Windows only). OSError: if new_file_path is an existing file and could not be removed (Posix, or Windows with force_replace set). OSError: if dirname(new_file_path) does not exist. OSError: if the file would be moved to another filesystem (e.g. mount point). """ ends_with_sep = self.ends_with_path_separator(old_file_path) old_file_path = self.absnormpath(old_file_path) new_file_path = self.absnormpath(new_file_path) if not self.exists(old_file_path, check_link=True): self.raise_os_error(errno.ENOENT, old_file_path, 2) if ends_with_sep: self._handle_broken_link_with_trailing_sep(old_file_path) old_object = self.lresolve(old_file_path) if not self.is_windows_fs: self._handle_posix_dir_link_errors( new_file_path, old_file_path, ends_with_sep) if self.exists(new_file_path, check_link=True): new_file_path = self._rename_to_existing_path( force_replace, new_file_path, old_file_path, old_object, ends_with_sep) if not new_file_path: return old_dir, old_name = self.splitpath(old_file_path) new_dir, new_name = self.splitpath(new_file_path) if not self.exists(new_dir): self.raise_os_error(errno.ENOENT, new_dir) old_dir_object = self.resolve(old_dir) new_dir_object = self.resolve(new_dir) if old_dir_object.st_dev != new_dir_object.st_dev: self.raise_os_error(errno.EXDEV, old_file_path) if not S_ISDIR(new_dir_object.st_mode): self.raise_os_error( errno.EACCES if self.is_windows_fs else errno.ENOTDIR, new_file_path) if new_dir_object.has_parent_object(old_object): self.raise_os_error(errno.EINVAL, new_file_path) object_to_rename = old_dir_object.get_entry(old_name) old_dir_object.remove_entry(old_name, recursive=False) object_to_rename.name = new_name new_name = new_dir_object._normalized_entryname(new_name) if new_name in new_dir_object.contents: # in case of overwriting remove the old entry first new_dir_object.remove_entry(new_name) new_dir_object.add_entry(object_to_rename)
[ "def", "rename", "(", "self", ",", "old_file_path", ",", "new_file_path", ",", "force_replace", "=", "False", ")", ":", "ends_with_sep", "=", "self", ".", "ends_with_path_separator", "(", "old_file_path", ")", "old_file_path", "=", "self", ".", "absnormpath", "(...
Renames a FakeFile object at old_file_path to new_file_path, preserving all properties. Args: old_file_path: Path to filesystem object to rename. new_file_path: Path to where the filesystem object will live after this call. force_replace: If set and destination is an existing file, it will be replaced even under Windows if the user has permissions, otherwise replacement happens under Unix only. Raises: OSError: if old_file_path does not exist. OSError: if new_file_path is an existing directory (Windows, or Posix if old_file_path points to a regular file) OSError: if old_file_path is a directory and new_file_path a file OSError: if new_file_path is an existing file and force_replace not set (Windows only). OSError: if new_file_path is an existing file and could not be removed (Posix, or Windows with force_replace set). OSError: if dirname(new_file_path) does not exist. OSError: if the file would be moved to another filesystem (e.g. mount point).
[ "Renames", "a", "FakeFile", "object", "at", "old_file_path", "to", "new_file_path", "preserving", "all", "properties", "." ]
6c36fb8987108107fc861fc3013620d46c7d2f9c
https://github.com/jmcgeheeiv/pyfakefs/blob/6c36fb8987108107fc861fc3013620d46c7d2f9c/pyfakefs/fake_filesystem.py#L2105-L2173
train
203,499