_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q270400
Percentage.count
test
def count(self): """ Count the number of domain for each status. """ if self.status: # The status is parsed. # We increase the number of tested. PyFunceble.INTERN["counter"]["number"]["tested"] += 1 if ( self.status.lower() in PyFunceble.STATUS["list"]["up"] or self.status.lower() in PyFunceble.STATUS["list"]["valid"] ): # The status is in the list of up status. # We increase the number of up. PyFunceble.INTERN["counter"]["number"]["up"] += 1 elif self.status.lower() in PyFunceble.STATUS["list"]["down"]: # The status is in the list of down status. # We increase the number of down. PyFunceble.INTERN["counter"]["number"]["down"] += 1 else: # The status is not in the list of up nor down status. # We increase the number of invalid. PyFunceble.INTERN["counter"]["number"]["invalid"] += 1
python
{ "resource": "" }
q270401
Percentage._calculate
test
def _calculate(cls): """ Calculate the percentage of each status. """ # We map the current state/counters of the different status. percentages = { "up": PyFunceble.INTERN["counter"]["number"]["up"], "down": PyFunceble.INTERN["counter"]["number"]["down"], "invalid": PyFunceble.INTERN["counter"]["number"]["invalid"], } for percentage in percentages: # We loop through our map index. # We calculate the percentage. calculation = ( percentages[percentage] * 100 // PyFunceble.INTERN["counter"]["number"]["tested"] ) # And we update the percentage counter of the actual status. PyFunceble.INTERN["counter"]["percentage"].update({percentage: calculation})
python
{ "resource": "" }
q270402
Percentage.log
test
def log(self): """ Print on screen and on file the percentages for each status. """ if ( PyFunceble.CONFIGURATION["show_percentage"] and PyFunceble.INTERN["counter"]["number"]["tested"] > 0 ): # * We are allowed to show the percentage on screen. # and # * The number of tested is greater than 0. # We initiate the output file. output = ( PyFunceble.OUTPUT_DIRECTORY + PyFunceble.OUTPUTS["parent_directory"] + PyFunceble.OUTPUTS["logs"]["directories"]["parent"] + PyFunceble.OUTPUTS["logs"]["directories"]["percentage"] + PyFunceble.OUTPUTS["logs"]["filenames"]["percentage"] ) # We delete the output file if it does exist. File(output).delete() # We calculate the percentage of each statuses. self._calculate() if not PyFunceble.CONFIGURATION["quiet"]: # The quiet mode is activated. # We print a new line. print("\n") # We print the percentage header on file and screen. Prints(None, "Percentage", output).header() # We construct the different lines/data to print on screen and file. lines_to_print = [ [ PyFunceble.STATUS["official"]["up"], str(PyFunceble.INTERN["counter"]["percentage"]["up"]) + "%", PyFunceble.INTERN["counter"]["number"]["up"], ], [ PyFunceble.STATUS["official"]["down"], str(PyFunceble.INTERN["counter"]["percentage"]["down"]) + "%", PyFunceble.INTERN["counter"]["number"]["down"], ], [ PyFunceble.STATUS["official"]["invalid"], str(PyFunceble.INTERN["counter"]["percentage"]["invalid"]) + "%", PyFunceble.INTERN["counter"]["number"]["invalid"], ], ] if PyFunceble.CONFIGURATION["syntax"]: # We are checking for syntax. # We update the denomination of the UP. lines_to_print[0][0] = PyFunceble.STATUS["official"]["valid"] # And we unset the INACTIVE line. del lines_to_print[1] for to_print in lines_to_print: # We loop throught the different line to print. # (one line for each status.) # And we print the current status line on file and screen. Prints(to_print, "Percentage", output).data() elif PyFunceble.INTERN["counter"]["number"]["tested"] > 0: # * We are not allowed to show the percentage on screen. # but # * The number of tested is greater than 0. # We run the calculation. # Note: The following is needed, because all counter calculation are # done by this class. self._calculate()
python
{ "resource": "" }
q270403
Check.is_url_valid
test
def is_url_valid(self, url=None, return_base=False, return_formatted=False): """ Check if the given URL is valid. :param url: The url to validate. :type url: str :param return_base: Allow us the return of the url base (if URL formatted correctly). :type return_formatted: bool :param return_formatted: Allow us to get the URL converted to IDNA if the conversion is activated. :type return_formatted: bool :return: The validity of the URL or its base. :rtype: bool|str """ # We initiate a variable which will save the initial base in case # we have to convert the base to IDNA. initial_base = None if url: # The given url is not empty. # We initiate the element to test. to_test = url elif self.element: # The globaly given url is not empty. # We initiate the element to test. to_test = self.element else: # The given url is empty. # We initiate the element to test from the globaly URl to test. to_test = PyFunceble.INTERN["to_test"] if to_test.startswith("http"): # The element to test starts with http. try: # We initiate a regex which will match the domain or the url base. regex = r"(^(http:\/\/|https:\/\/)(.+?(?=\/)|.+?$))" # We extract the url base with the help of the initiated regex. initial_base = base = Regex( to_test, regex, return_data=True, rematch=True ).match()[2] if PyFunceble.CONFIGURATION["idna_conversion"]: # We have to convert the domain to IDNA. # We convert the initial base to IDNA. base = domain2idna(base) # We check if the url base is a valid domain. domain_status = self.is_domain_valid(base) # We check if the url base is a valid IP. ip_status = self.is_ip_valid(base) if domain_status or ip_status: # * The url base is a valid domain. # and # * The url base is a valid IP. if PyFunceble.CONFIGURATION["idna_conversion"] and return_formatted: # * We have to convert to IDNA. # and # * We have to return the converted full URL. # We return the converted full URL. return Regex( to_test, initial_base, escape=True, return_data=True, replace_with=base, occurences=1, ).replace() if return_formatted: # * We do not have to convert to IDNA. # but # * We have to return the full URL. # We return the initially given URL. return to_test if return_base: # We have to return the base of the URL. # We return the base of the URL. return base # We return True. return True except TypeError: pass if return_formatted: # We have to return an URL. # We return the initily given URL. return to_test # We return False. return False
python
{ "resource": "" }
q270404
Check.is_domain_valid
test
def is_domain_valid( self, domain=None, subdomain_check=False ): # pylint:disable=too-many-return-statements, too-many-branches """ Check if the given domain is a valid. :param domain: The domain to validate. :type domain: str :param subdomain_check: Activate the subdomain checking. :type subdomain_check: bool :return: The validity of the sub-domain. :rtype: bool """ # We initate our regex which will match for valid domains. regex_valid_domains = r"^(?=.{0,253}$)(([a-z0-9][a-z0-9-]{0,61}[a-z0-9]|[a-z0-9])\.)+((?=.*[^0-9])([a-z0-9][a-z0-9-]{0,61}[a-z0-9](?:\.)?|[a-z0-9](?:\.)?))$" # pylint: disable=line-too-long # We initiate our regex which will match for valid subdomains. regex_valid_subdomains = r"^(?=.{0,253}$)(([a-z0-9_][a-z0-9-_]{0,61}[a-z0-9_-]|[a-z0-9])\.)+((?=.*[^0-9])([a-z0-9][a-z0-9-]{0,61}[a-z0-9]|[a-z0-9]))$" # pylint: disable=line-too-long if domain: # A domain is given. # We set the element to test as the parsed domain. to_test = domain elif self.element: # A domain is globally given. # We set the globally parsed domain. to_test = self.element else: # A domain is not given. # We set the element to test as the currently tested element. to_test = PyFunceble.INTERN["to_test"] try: # We get the position of the last point. last_point_index = to_test.rindex(".") # And with the help of the position of the last point, we get the domain extension. extension = to_test[last_point_index + 1 :] if not extension and to_test.endswith("."): try: extension = [x for x in to_test.split(".") if x][-1] except IndexError: pass if not extension or extension not in PyFunceble.INTERN["iana_db"]: # * The extension is not found. # or # * The extension is not into the IANA database. # We return false. return False if ( Regex(to_test, regex_valid_domains, return_data=False).match() and not subdomain_check ): # * The element pass the domain validation. # and # * We are not checking if it is a subdomain. # We return True. The domain is valid. return True # The element did not pass the domain validation. That means that # it has invalid character or the position of - or _ are not right. if extension in PyFunceble.INTERN["psl_db"]: # The extension is into the psl database. for suffix in PyFunceble.INTERN["psl_db"][extension]: # We loop through the element of the extension into the psl database. try: # We try to get the position of the currently read suffix # in the element ot test. suffix_index = to_test.rindex("." + suffix) # We get the element to check. # The idea here is to delete the suffix, then retest with our # subdomains regex. to_check = to_test[:suffix_index] if "." not in to_check and subdomain_check: # * There is no point into the new element to check. # and # * We are checking if it is a subdomain. # We return False, it is not a subdomain. return False if "." in to_check and subdomain_check: # * There is a point into the new element to check. # and # * We are checking if it is a subdomain. # We return True, it is a subdomain. return True # We are not checking if it is a subdomain. if "." in to_check: # There is a point into the new element to check. # We check if it passes our subdomain regex. # * True: It's a valid domain. # * False: It's an invalid domain. return Regex( to_check, regex_valid_subdomains, return_data=False ).match() except ValueError: # In case of a value error because the position is not found, # we continue to the next element. pass # * The extension is not into the psl database. # or # * there was no point into the suffix checking. # We get the element before the last point. to_check = to_test[:last_point_index] if "." in to_check and subdomain_check: # * There is a point in to_check. # and # * We are checking if it is a subdomain. # We return True, it is a subdomain. return True # We are not checking if it is a subdomain. if "." in to_check: # There is a point in to_check. # We check if it passes our subdomain regex. # * True: It's a valid domain. # * False: It's an invalid domain. return Regex( to_check, regex_valid_subdomains, return_data=False ).match() except (ValueError, AttributeError): # In case of a value or attribute error we ignore them. pass # And we return False, the domain is not valid. return False
python
{ "resource": "" }
q270405
Check.is_subdomain
test
def is_subdomain(self, domain=None): """ Check if the given subdomain is a subdomain. :param domain: The domain to validate. :type domain: str :return: The validity of the subdomain. :rtype: bool """ if domain: # A domain is given. # We set the element to test as the parsed domain. to_test = domain elif self.element: # A domain is globally given. # We set the globally parsed domain. to_test = self.element else: # A domain is not given. # We set the element to test as the currently tested element. to_test = PyFunceble.INTERN["to_test"] # We return the status of the check. return self.is_domain_valid(to_test, subdomain_check=True)
python
{ "resource": "" }
q270406
Syntax.get
test
def get(cls): """ Execute the logic behind the Syntax handling. :return: The syntax status. :rtype: str """ if PyFunceble.INTERN["to_test_type"] == "domain": # We are testing for domain or ip. if Check().is_domain_valid() or Check().is_ip_valid(): # * The domain is valid. # or # * The IP is valid. # We handle and return the valid status. return SyntaxStatus(PyFunceble.STATUS["official"]["valid"]).handle() elif PyFunceble.INTERN["to_test_type"] == "url": # We are testing for URL. if Check().is_url_valid(): # * The url is valid. # We handle and return the valid status. return SyntaxStatus(PyFunceble.STATUS["official"]["valid"]).handle() else: raise Exception("Unknow test type.") # We handle and return the invalid status. return SyntaxStatus(PyFunceble.STATUS["official"]["invalid"]).handle()
python
{ "resource": "" }
q270407
Inactive._reformat_historical_formating_error
test
def _reformat_historical_formating_error(self): # pragma: no cover """ Format the old format so it can be merged into the newer format. """ if PyFunceble.CONFIGURATION["inactive_database"]: # The database subsystem is activated. # We construct the possible path to an older version of the database. historical_formating_error = ( PyFunceble.CURRENT_DIRECTORY + "inactive-db.json" ) if PyFunceble.path.isfile(historical_formating_error): # The histortical file already exists. # We get its content. data = Dict().from_json(File(historical_formating_error).read()) # We initiate a variable which will save the data that is going # to be merged. data_to_parse = {} # We get the database keybase. top_keys = data.keys() for top_key in top_keys: # We loop through the list of upper keys. # We get the lowest keys. low_keys = data[top_key].keys() # We initiate the data to parse. data_to_parse[top_key] = {} for low_key in low_keys: # We loop through the list of lower keys. if low_key.isdigit(): # The current low key is a digit. # We parse its content (from the old) into the new format. # In between, we remove 30 days from the low_key so that # it become in the past. This way they will be retested # automatically. data_to_parse[top_key][ int(low_key) - (self.one_day_in_seconds * 30) ] = data[top_key][low_key] else: # The current low key is not a digit. # We parse its content (from the old) into the new format. # In between, we remove 30 days from the current time so that # it become in the past. This way they will be retested # automatically. data_to_parse[top_key][ int(PyFunceble.time()) - (self.one_day_in_seconds * 30) ] = data[top_key][low_key] if "inactive_db" in PyFunceble.INTERN: # The current (new) database is not empty. # We update add the content of the old into the current database. PyFunceble.INTERN["inactive_db"].update(data_to_parse) else: # The current (new) database is empty. # We replace the content with the data_to_parse as it is complient # with the new format. PyFunceble.INTERN["inactive_db"] = data_to_parse # We delete the old database file. File(historical_formating_error).delete()
python
{ "resource": "" }
q270408
Inactive._retrieve
test
def _retrieve(self): """ Return the current content of the inactive-db.json file. """ if PyFunceble.CONFIGURATION["inactive_database"]: # The database subsystem is activated. # We get, format and initiate the historical database file. self._reformat_historical_formating_error() if PyFunceble.path.isfile(self.inactive_db_path): # The database file exist. # We merge our current database into already initiated one. self._merge()
python
{ "resource": "" }
q270409
Inactive._backup
test
def _backup(self): """ Save the current database into the inactive-db.json file. """ if PyFunceble.CONFIGURATION["inactive_database"]: # The database subsystem is activated. # We save the current database state into the database file. Dict(PyFunceble.INTERN["inactive_db"]).to_json(self.inactive_db_path)
python
{ "resource": "" }
q270410
Inactive._timestamp
test
def _timestamp(self): """ Get the timestamp where we are going to save our current list. :return: The timestamp to append with the currently tested element. :rtype: int|str """ if PyFunceble.CONFIGURATION["inactive_database"]: # The database subsystem is activated. if ( "inactive_db" in PyFunceble.INTERN and PyFunceble.INTERN["file_to_test"] in PyFunceble.INTERN["inactive_db"] and PyFunceble.INTERN["inactive_db"][PyFunceble.INTERN["file_to_test"]] ): # The file we are testing is into the database and its content # is not empty. # We get the indexes of the current file (in the dabase). database_keys = [ x for x in PyFunceble.INTERN["inactive_db"][ PyFunceble.INTERN["file_to_test"] ].keys() if x.isdigit() ] if database_keys: # The list of keys is not empty. # We get the most recent date. recent_date = max(database_keys) else: # pragma: no cover # The list of keys is empty. # We return the current time. return int(PyFunceble.time()) if int(PyFunceble.time()) > int(recent_date) + self.one_day_in_seconds: # The most recent time was in more than one day. # We return the current time. return int(PyFunceble.time()) # The most recent time was in less than one day. if int(PyFunceble.time()) < int(recent_date) + self.days_in_seconds: # The most recent time was in less than the expected number of day for # retesting. # We return the most recent data. return int(recent_date) # The database subsystem is not activated. # We return the current time. return int(PyFunceble.time())
python
{ "resource": "" }
q270411
Inactive.content
test
def content(cls): """ Get the content of the database. :return: The content of the database. :rtype: list """ # We initiate a variable which will save what we are going to return. result = [] if ( PyFunceble.CONFIGURATION["inactive_database"] and PyFunceble.INTERN["inactive_db"] ): # * The database subsystem is activated. # and # * The database is not empty. for key in PyFunceble.INTERN["inactive_db"][ PyFunceble.INTERN["file_to_test"] ]: # We loop through the index of the current file database. if key == "to_test": # The current key is `to_test`. # We continue to the next element. continue # We extend the result with the content of the currently read index. result.extend( PyFunceble.INTERN["inactive_db"][PyFunceble.INTERN["file_to_test"]][ key ] ) # We return the content of the database. return result
python
{ "resource": "" }
q270412
Inactive.is_present
test
def is_present(cls): """ Check if the currently tested element is into the database. """ if PyFunceble.CONFIGURATION["inactive_database"]: # The database subsystem is activated. if PyFunceble.INTERN["to_test"] in PyFunceble.INTERN[ "flatten_inactive_db" ] or ( PyFunceble.INTERN["file_to_test"] in PyFunceble.INTERN["inactive_db"] and PyFunceble.INTERN["inactive_db"][PyFunceble.INTERN["file_to_test"]] and "to_test" in PyFunceble.INTERN["inactive_db"][PyFunceble.INTERN["file_to_test"]] and PyFunceble.INTERN["to_test"] in PyFunceble.INTERN["inactive_db"][PyFunceble.INTERN["file_to_test"]][ "to_test" ] ): return True return False
python
{ "resource": "" }
q270413
Whois._retrieve
test
def _retrieve(self): """ Retrieve the data from the database. """ if self._authorization() and "whois_db" not in PyFunceble.INTERN: # The usage of the whois database is activated. if PyFunceble.path.isfile(self.whois_db_path): # The database file exist. # We merge our current database into already initiated one. PyFunceble.INTERN["whois_db"] = Dict().from_json( File(self.whois_db_path).read() ) else: # The database file does not exist. # We initiate an empty database. PyFunceble.INTERN["whois_db"] = {}
python
{ "resource": "" }
q270414
Whois._backup
test
def _backup(self): """ Backup the database into its file. """ if self._authorization(): # We are authorized to work. # We backup the current state of the datbase. Dict(PyFunceble.INTERN["whois_db"]).to_json(self.whois_db_path)
python
{ "resource": "" }
q270415
Whois.is_in_database
test
def is_in_database(self): """ Check if the element is into the database. """ if ( self._authorization() and PyFunceble.INTERN["file_to_test"] in PyFunceble.INTERN["whois_db"] and PyFunceble.INTERN["to_test"] in PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]] ): # * We are authorized to work. # and # * The given file path exist in the database. # and # * The element we are testing is in the database related to the # given file path. # We return True, the element we are testing is into the database. return True # * We are not authorized to work. # or # * The given file path does not exist in the database. # or # * The element we are testing is not in the database related to the # given file path. # We return False,the element we are testing is not into the database. return False
python
{ "resource": "" }
q270416
Whois.is_time_older
test
def is_time_older(self): """ Check if the current time is older than the one in the database. """ if ( self._authorization() and self.is_in_database() and int( PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]][ PyFunceble.INTERN["to_test"] ]["epoch"] ) < int(PyFunceble.time()) ): # * We are authorized to work. # and # * The element we are testing is in the database. # and # * The epoch of the expiration date is less than our current epoch. # The expiration date is in the past, we return True. return True # The expiration date is in the future, we return False. return False
python
{ "resource": "" }
q270417
Whois.get_expiration_date
test
def get_expiration_date(self): """ Get the expiration date from the database. :return: The expiration date from the database. :rtype: str|None """ if self._authorization() and self.is_in_database() and not self.is_time_older(): # * We are authorized to work. # and # * The element we are testing is in the database. # and # * The expiration date is in the future. # We get the expiration date from the database. result = PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]][ PyFunceble.INTERN["to_test"] ]["expiration_date"] if result: # The expiration date from the database is not empty nor # equal to None. # We return it. return result # We return None, there is no data to work with. return None
python
{ "resource": "" }
q270418
Whois.add
test
def add(self): """ Add the currently tested element into the database. """ if self._authorization(): # We are authorized to work. if self.epoch < int(PyFunceble.time()): state = "past" else: state = "future" if self.is_in_database(): # The element we are working with is in the database. if ( str(self.epoch) != PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]][ PyFunceble.INTERN["to_test"] ]["epoch"] ): # The given epoch is diffent from the one saved. # We update it. PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]][ PyFunceble.INTERN["to_test"] ].update( { "epoch": str(self.epoch), "state": state, "expiration_date": self.expiration_date, } ) elif self.is_time_older(): # The expiration date from the database is in the past. if ( PyFunceble.INTERN["whois_db"][ PyFunceble.INTERN["file_to_test"] ][PyFunceble.INTERN["to_test"]]["state"] != "past" ): # pragma: no cover # The state of the element in the datbase is not # equal to `past`. # We update it to `past`. PyFunceble.INTERN["whois_db"][ PyFunceble.INTERN["file_to_test"] ][PyFunceble.INTERN["to_test"]].update({"state": "past"}) elif ( PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]][ PyFunceble.INTERN["to_test"] ]["state"] != "future" ): # * The expiration date from the database is in the future. # and # * The state of the element in the database is not # equal to `future`. # We update it to `future`. PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]][ PyFunceble.INTERN["to_test"] ].update({"state": "future"}) else: # The element we are working with is not in the database. if ( not PyFunceble.INTERN["file_to_test"] in PyFunceble.INTERN["whois_db"] ): # The file path is not in the database. # We initiate it. PyFunceble.INTERN["whois_db"][ PyFunceble.INTERN["file_to_test"] ] = {} # We create the first dataset. PyFunceble.INTERN["whois_db"][PyFunceble.INTERN["file_to_test"]].update( { PyFunceble.INTERN["to_test"]: { "epoch": str(self.epoch), "state": state, "expiration_date": self.expiration_date, } } ) # We do a safety backup of our database. self._backup()
python
{ "resource": "" }
q270419
AutoSave.travis_permissions
test
def travis_permissions(cls): """ Set permissions in order to avoid issues before commiting. """ if PyFunceble.CONFIGURATION["travis"]: try: build_dir = PyFunceble.environ["TRAVIS_BUILD_DIR"] commands = [ "sudo chown -R travis:travis %s" % (build_dir), "sudo chgrp -R travis %s" % (build_dir), "sudo chmod -R g+rwX %s" % (build_dir), "sudo chmod 777 -Rf %s.git" % (build_dir + PyFunceble.directory_separator), r"sudo find %s -type d -exec chmod g+x '{}' \;" % (build_dir), ] for command in commands: Command(command).execute() if Command("git config core.sharedRepository").execute() == "": Command("git config core.sharedRepository group").execute() except KeyError: pass
python
{ "resource": "" }
q270420
AutoSave._travis
test
def _travis(self): """ Logic behind autosave under Travis CI. """ if PyFunceble.CONFIGURATION["travis"]: try: _ = PyFunceble.environ["TRAVIS_BUILD_DIR"] time_autorisation = False try: time_autorisation = int(PyFunceble.time()) >= int( PyFunceble.INTERN["start"] ) + (int(PyFunceble.CONFIGURATION["travis_autosave_minutes"]) * 60) except KeyError: if self.last and not self.bypass: raise Exception( "Please review the way `ExecutionTime()` is called." ) if self.last or time_autorisation or self.bypass: Percentage().log() self.travis_permissions() command = 'git add --all && git commit -a -m "%s"' if self.last or self.bypass: if PyFunceble.CONFIGURATION["command_before_end"]: for line in Command( PyFunceble.CONFIGURATION["command_before_end"] ).run(): sys_stdout.write("{}\n".format(line)) self.travis_permissions() message = ( PyFunceble.CONFIGURATION["travis_autosave_final_commit"] + " [ci skip]" ) Command(command % message).execute() else: if PyFunceble.CONFIGURATION["command"]: for line in Command( PyFunceble.CONFIGURATION["command"] ).run(): sys_stdout.write("{}\n".format(line)) self.travis_permissions() Command( command % PyFunceble.CONFIGURATION["travis_autosave_commit"] ).execute() print( Command( "git push origin %s" % PyFunceble.CONFIGURATION["travis_branch"] ).execute() ) exit(0) except KeyError: pass
python
{ "resource": "" }
q270421
Lookup.nslookup
test
def nslookup(cls): """ Implementation of UNIX nslookup. """ try: # We try to get the addresse information of the given domain or IP. if "current_test_data" in PyFunceble.INTERN: # pragma: no cover # The end-user want more information whith his test. if not Check().is_ip_valid(): # The element we are testing is not an IP. # We request the address informations. request = PyFunceble.socket.getaddrinfo( PyFunceble.INTERN["to_test"], 80, 0, 0, PyFunceble.socket.IPPROTO_TCP, ) for sequence in request: # We loop through the sequence returned by the request. # We append the NS informations into the nslookup index. PyFunceble.INTERN["current_test_data"]["nslookup"].append( sequence[-1][0] ) else: # The element we are testing is an IP. request = PyFunceble.socket.gethostbyaddr( PyFunceble.INTERN["to_test"] ) # We append the NS informations into the nslookup index. PyFunceble.INTERN["current_test_data"]["nslookup"][ "hostname" ] = request[0] PyFunceble.INTERN["current_test_data"]["nslookup"][ "aliases" ] = request[1] PyFunceble.INTERN["current_test_data"]["nslookup"]["ips"] = request[ 2 ] else: if not Check().is_ip_valid(): # The element we are testing is not an IP. PyFunceble.socket.getaddrinfo( PyFunceble.INTERN["to_test"], 80, 0, 0, PyFunceble.socket.IPPROTO_TCP, ) else: # The element we are testing is an IP. PyFunceble.socket.gethostbyaddr(PyFunceble.INTERN["to_test"]) # It was done successfuly, we return True. # Note: we don't need to read the addresses so we consider as successful # as long as there is no error. return True except (OSError, PyFunceble.socket.herror, PyFunceble.socket.gaierror): # One of the listed exception is matched. # It was done unsuccesfuly, we return False. return False
python
{ "resource": "" }
q270422
Lookup.whois
test
def whois(cls, whois_server, domain=None, timeout=None): # pragma: no cover """ Implementation of UNIX whois. :param whois_server: The WHOIS server to use to get the record. :type whois_server: str :param domain: The domain to get the whois record from. :type domain: str :param timeout: The timeout to apply to the request. :type timeout: int :return: The whois record from the given whois server, if exist. :rtype: str|None """ if domain is None: # The domain is not given (localy). # We consider the domain as the domain or IP we are currently testing. domain = PyFunceble.INTERN["to_test"] if timeout is None: # The time is not given (localy). # We consider the timeout from the configuration as the timeout to use. timeout = PyFunceble.CONFIGURATION["seconds_before_http_timeout"] if whois_server: # A whois server is given. # We initiate a PyFunceble.socket. req = PyFunceble.socket.socket( PyFunceble.socket.AF_INET, PyFunceble.socket.SOCK_STREAM ) if timeout % 3 == 0: # The timeout is modulo 3. # We report the timeout to our initiated PyFunceble.socket. req.settimeout(timeout) else: # The timeout is not modulo 3. # We report 3 seconds as the timeout to our initiated PyFunceble.socket. req.settimeout(3) try: # We try to connect to the whois server at the port 43. req.connect((whois_server, 43)) except PyFunceble.socket.error: # We got an error. # We return None. return None # We send end encode the domain we want the data from. req.send((domain + "\r\n").encode()) # We initiate a bytes variable which will save the response # from the server. response = b"" while True: # We loop infinitly. try: # We try to receive the data in a buffer of 4096 bytes. data = req.recv(4096) except (PyFunceble.socket.timeout, ConnectionResetError): # We got an error. # We close the connection. req.close() # And we return None. return None # Everything goes right. # We append data to the response we got. response += data if not data: # The data is empty. # We break the loop. break # We close the connection. req.close() try: # We finally decode and return the response we got from the # server. return response.decode() except UnicodeDecodeError: # We got an encoding error. # We decode the response. # Note: Because we don't want to deal with other issue, we # decided to use `replace` in order to automatically replace # all non utf-8 encoded characters. return response.decode("utf-8", "replace") # The whois server is not given. # We return None. return None
python
{ "resource": "" }
q270423
URL.get
test
def get(cls): # pragma: no cover """ Execute the logic behind the URL handling. :return: The status of the URL. :rtype: str """ if Check().is_url_valid() or PyFunceble.CONFIGURATION["local"]: # * The url is valid. # or # * We are testing in/for a local or private network. if "current_test_data" in PyFunceble.INTERN: PyFunceble.INTERN["current_test_data"]["url_syntax_validation"] = True # We initiate the HTTP status code. PyFunceble.INTERN.update({"http_code": HTTPCode().get()}) # We initiate the list of active status code. active_list = [] active_list.extend(PyFunceble.HTTP_CODE["list"]["potentially_up"]) active_list.extend(PyFunceble.HTTP_CODE["list"]["up"]) # We initiate the list of inactive status code. inactive_list = [] inactive_list.extend(PyFunceble.HTTP_CODE["list"]["potentially_down"]) inactive_list.append("*" * 3) if PyFunceble.INTERN["http_code"] in active_list: # The extracted HTTP status code is in the list of active list. # We handle and return the up status. return URLStatus(PyFunceble.STATUS["official"]["up"]).handle() if PyFunceble.INTERN["http_code"] in inactive_list: # The extracted HTTP status code is in the list of inactive list. # We handle and return the down status. return URLStatus(PyFunceble.STATUS["official"]["down"]).handle() # The extracted HTTP status code is not in the list of active nor invalid list. if "current_test_data" in PyFunceble.INTERN: # The end-user want more information whith his test. # We update the url_syntax_validation index. PyFunceble.INTERN["current_test_data"]["url_syntax_validation"] = False # We handle and return the invalid down status. return URLStatus(PyFunceble.STATUS["official"]["invalid"]).handle()
python
{ "resource": "" }
q270424
Referer.get
test
def get(self): """ Return the referer aka the WHOIS server of the current domain extension. """ if not PyFunceble.CONFIGURATION["local"]: # We are not running a test in a local network. if self.domain_extension not in self.ignored_extension: # The extension of the domain we are testing is not into # the list of ignored extensions. # We set the referer to None as we do not have any. referer = None if self.domain_extension in PyFunceble.INTERN["iana_db"]: # The domain extension is in the iana database. if not PyFunceble.CONFIGURATION["no_whois"]: # We are authorized to use WHOIS for the test result. # We get the referer from the database. referer = PyFunceble.INTERN["iana_db"][self.domain_extension] if not referer: # The referer is not filled. # We log the case of the current extension. Logs().referer_not_found(self.domain_extension) # And we handle and return None status. return None # The referer is into the database. # We return the extracted referer. return referer # We are not authorized to use WHOIS for the test result. # We return None. return None # The domain extension is not in the iana database. # We return False, it is an invalid domain. return False # The extension of the domain we are testing is not into # the list of ignored extensions. # We return None, the domain does not have a whois server. return None # We are running a test in a local network. # We return None. return None
python
{ "resource": "" }
q270425
Proxy._get_current_object
test
def _get_current_object(self): """Get current object. This is useful if you want the real object behind the proxy at a time for performance reasons or because you want to pass the object into a different context. """ loc = object.__getattribute__(self, '_Proxy__local') if not hasattr(loc, '__release_local__'): return loc(*self.__args, **self.__kwargs) try: # pragma: no cover # not sure what this is about return getattr(loc, self.__name__) except AttributeError: # pragma: no cover raise RuntimeError('no object bound to {0.__name__}'.format(self))
python
{ "resource": "" }
q270426
standard_paths
test
def standard_paths(): """Yield paths to standard modules.""" for is_plat_spec in [True, False]: path = distutils.sysconfig.get_python_lib(standard_lib=True, plat_specific=is_plat_spec) for name in os.listdir(path): yield name try: for name in os.listdir(os.path.join(path, 'lib-dynload')): yield name except OSError: # pragma: no cover pass
python
{ "resource": "" }
q270427
standard_package_names
test
def standard_package_names(): """Yield standard module names.""" for name in standard_paths(): if name.startswith('_') or '-' in name: continue if '.' in name and name.rsplit('.')[-1] not in ['so', 'py', 'pyc']: continue yield name.split('.')[0]
python
{ "resource": "" }
q270428
unused_import_line_numbers
test
def unused_import_line_numbers(messages): """Yield line numbers of unused imports.""" for message in messages: if isinstance(message, pyflakes.messages.UnusedImport): yield message.lineno
python
{ "resource": "" }
q270429
unused_import_module_name
test
def unused_import_module_name(messages): """Yield line number and module name of unused imports.""" pattern = r'\'(.+?)\'' for message in messages: if isinstance(message, pyflakes.messages.UnusedImport): module_name = re.search(pattern, str(message)) module_name = module_name.group()[1:-1] if module_name: yield (message.lineno, module_name)
python
{ "resource": "" }
q270430
star_import_used_line_numbers
test
def star_import_used_line_numbers(messages): """Yield line number of star import usage.""" for message in messages: if isinstance(message, pyflakes.messages.ImportStarUsed): yield message.lineno
python
{ "resource": "" }
q270431
star_import_usage_undefined_name
test
def star_import_usage_undefined_name(messages): """Yield line number, undefined name, and its possible origin module.""" for message in messages: if isinstance(message, pyflakes.messages.ImportStarUsage): undefined_name = message.message_args[0] module_name = message.message_args[1] yield (message.lineno, undefined_name, module_name)
python
{ "resource": "" }
q270432
unused_variable_line_numbers
test
def unused_variable_line_numbers(messages): """Yield line numbers of unused variables.""" for message in messages: if isinstance(message, pyflakes.messages.UnusedVariable): yield message.lineno
python
{ "resource": "" }
q270433
duplicate_key_line_numbers
test
def duplicate_key_line_numbers(messages, source): """Yield line numbers of duplicate keys.""" messages = [ message for message in messages if isinstance(message, pyflakes.messages.MultiValueRepeatedKeyLiteral)] if messages: # Filter out complex cases. We don't want to bother trying to parse # this stuff and get it right. We can do it on a key-by-key basis. key_to_messages = create_key_to_messages_dict(messages) lines = source.split('\n') for (key, messages) in key_to_messages.items(): good = True for message in messages: line = lines[message.lineno - 1] key = message.message_args[0] if not dict_entry_has_key(line, key): good = False if good: for message in messages: yield message.lineno
python
{ "resource": "" }
q270434
create_key_to_messages_dict
test
def create_key_to_messages_dict(messages): """Return dict mapping the key to list of messages.""" dictionary = collections.defaultdict(lambda: []) for message in messages: dictionary[message.message_args[0]].append(message) return dictionary
python
{ "resource": "" }
q270435
check
test
def check(source): """Return messages from pyflakes.""" if sys.version_info[0] == 2 and isinstance(source, unicode): # Convert back to original byte string encoding, otherwise pyflakes # call to compile() will complain. See PEP 263. This only affects # Python 2. try: source = source.encode('utf-8') except UnicodeError: # pragma: no cover return [] reporter = ListReporter() try: pyflakes.api.check(source, filename='<string>', reporter=reporter) except (AttributeError, RecursionError, UnicodeDecodeError): pass return reporter.messages
python
{ "resource": "" }
q270436
extract_package_name
test
def extract_package_name(line): """Return package name in import statement.""" assert '\\' not in line assert '(' not in line assert ')' not in line assert ';' not in line if line.lstrip().startswith(('import', 'from')): word = line.split()[1] else: # Ignore doctests. return None package = word.split('.')[0] assert ' ' not in package return package
python
{ "resource": "" }
q270437
multiline_import
test
def multiline_import(line, previous_line=''): """Return True if import is spans multiples lines.""" for symbol in '()': if symbol in line: return True # Ignore doctests. if line.lstrip().startswith('>'): return True return multiline_statement(line, previous_line)
python
{ "resource": "" }
q270438
multiline_statement
test
def multiline_statement(line, previous_line=''): """Return True if this is part of a multiline statement.""" for symbol in '\\:;': if symbol in line: return True sio = io.StringIO(line) try: list(tokenize.generate_tokens(sio.readline)) return previous_line.rstrip().endswith('\\') except (SyntaxError, tokenize.TokenError): return True
python
{ "resource": "" }
q270439
filter_from_import
test
def filter_from_import(line, unused_module): """Parse and filter ``from something import a, b, c``. Return line without unused import modules, or `pass` if all of the module in import is unused. """ (indentation, imports) = re.split(pattern=r'\bimport\b', string=line, maxsplit=1) base_module = re.search(pattern=r'\bfrom\s+([^ ]+)', string=indentation).group(1) # Create an imported module list with base module name # ex ``from a import b, c as d`` -> ``['a.b', 'a.c as d']`` imports = re.split(pattern=r',', string=imports.strip()) imports = [base_module + '.' + x.strip() for x in imports] # We compare full module name (``a.module`` not `module`) to # guarantee the exact same module as detected from pyflakes. filtered_imports = [x.replace(base_module + '.', '') for x in imports if x not in unused_module] # All of the import in this statement is unused if not filtered_imports: return get_indentation(line) + 'pass' + get_line_ending(line) indentation += 'import ' return ( indentation + ', '.join(sorted(filtered_imports)) + get_line_ending(line))
python
{ "resource": "" }
q270440
break_up_import
test
def break_up_import(line): """Return line with imports on separate lines.""" assert '\\' not in line assert '(' not in line assert ')' not in line assert ';' not in line assert '#' not in line assert not line.lstrip().startswith('from') newline = get_line_ending(line) if not newline: return line (indentation, imports) = re.split(pattern=r'\bimport\b', string=line, maxsplit=1) indentation += 'import ' assert newline return ''.join([indentation + i.strip() + newline for i in sorted(imports.split(','))])
python
{ "resource": "" }
q270441
filter_code
test
def filter_code(source, additional_imports=None, expand_star_imports=False, remove_all_unused_imports=False, remove_duplicate_keys=False, remove_unused_variables=False, ignore_init_module_imports=False, ): """Yield code with unused imports removed.""" imports = SAFE_IMPORTS if additional_imports: imports |= frozenset(additional_imports) del additional_imports messages = check(source) if ignore_init_module_imports: marked_import_line_numbers = frozenset() else: marked_import_line_numbers = frozenset( unused_import_line_numbers(messages)) marked_unused_module = collections.defaultdict(lambda: []) for line_number, module_name in unused_import_module_name(messages): marked_unused_module[line_number].append(module_name) if expand_star_imports and not ( # See explanations in #18. re.search(r'\b__all__\b', source) or re.search(r'\bdel\b', source) ): marked_star_import_line_numbers = frozenset( star_import_used_line_numbers(messages)) if len(marked_star_import_line_numbers) > 1: # Auto expanding only possible for single star import marked_star_import_line_numbers = frozenset() else: undefined_names = [] for line_number, undefined_name, _ \ in star_import_usage_undefined_name(messages): undefined_names.append(undefined_name) if not undefined_names: marked_star_import_line_numbers = frozenset() else: marked_star_import_line_numbers = frozenset() if remove_unused_variables: marked_variable_line_numbers = frozenset( unused_variable_line_numbers(messages)) else: marked_variable_line_numbers = frozenset() if remove_duplicate_keys: marked_key_line_numbers = frozenset( duplicate_key_line_numbers(messages, source)) else: marked_key_line_numbers = frozenset() line_messages = get_messages_by_line(messages) sio = io.StringIO(source) previous_line = '' for line_number, line in enumerate(sio.readlines(), start=1): if '#' in line: yield line elif line_number in marked_import_line_numbers: yield filter_unused_import( line, unused_module=marked_unused_module[line_number], remove_all_unused_imports=remove_all_unused_imports, imports=imports, previous_line=previous_line) elif line_number in marked_variable_line_numbers: yield filter_unused_variable(line) elif line_number in marked_key_line_numbers: yield filter_duplicate_key(line, line_messages[line_number], line_number, marked_key_line_numbers, source) elif line_number in marked_star_import_line_numbers: yield filter_star_import(line, undefined_names) else: yield line previous_line = line
python
{ "resource": "" }
q270442
get_messages_by_line
test
def get_messages_by_line(messages): """Return dictionary that maps line number to message.""" line_messages = {} for message in messages: line_messages[message.lineno] = message return line_messages
python
{ "resource": "" }
q270443
filter_star_import
test
def filter_star_import(line, marked_star_import_undefined_name): """Return line with the star import expanded.""" undefined_name = sorted(set(marked_star_import_undefined_name)) return re.sub(r'\*', ', '.join(undefined_name), line)
python
{ "resource": "" }
q270444
filter_duplicate_key
test
def filter_duplicate_key(line, message, line_number, marked_line_numbers, source, previous_line=''): """Return '' if first occurrence of the key otherwise return `line`.""" if marked_line_numbers and line_number == sorted(marked_line_numbers)[0]: return '' return line
python
{ "resource": "" }
q270445
dict_entry_has_key
test
def dict_entry_has_key(line, key): """Return True if `line` is a dict entry that uses `key`. Return False for multiline cases where the line should not be removed by itself. """ if '#' in line: return False result = re.match(r'\s*(.*)\s*:\s*(.*),\s*$', line) if not result: return False try: candidate_key = ast.literal_eval(result.group(1)) except (SyntaxError, ValueError): return False if multiline_statement(result.group(2)): return False return candidate_key == key
python
{ "resource": "" }
q270446
is_literal_or_name
test
def is_literal_or_name(value): """Return True if value is a literal or a name.""" try: ast.literal_eval(value) return True except (SyntaxError, ValueError): pass if value.strip() in ['dict()', 'list()', 'set()']: return True # Support removal of variables on the right side. But make sure # there are no dots, which could mean an access of a property. return re.match(r'^\w+\s*$', value)
python
{ "resource": "" }
q270447
useless_pass_line_numbers
test
def useless_pass_line_numbers(source): """Yield line numbers of unneeded "pass" statements.""" sio = io.StringIO(source) previous_token_type = None last_pass_row = None last_pass_indentation = None previous_line = '' for token in tokenize.generate_tokens(sio.readline): token_type = token[0] start_row = token[2][0] line = token[4] is_pass = (token_type == tokenize.NAME and line.strip() == 'pass') # Leading "pass". if (start_row - 1 == last_pass_row and get_indentation(line) == last_pass_indentation and token_type in ATOMS and not is_pass): yield start_row - 1 if is_pass: last_pass_row = start_row last_pass_indentation = get_indentation(line) # Trailing "pass". if (is_pass and previous_token_type != tokenize.INDENT and not previous_line.rstrip().endswith('\\')): yield start_row previous_token_type = token_type previous_line = line
python
{ "resource": "" }
q270448
filter_useless_pass
test
def filter_useless_pass(source): """Yield code with useless "pass" lines removed.""" try: marked_lines = frozenset(useless_pass_line_numbers(source)) except (SyntaxError, tokenize.TokenError): marked_lines = frozenset() sio = io.StringIO(source) for line_number, line in enumerate(sio.readlines(), start=1): if line_number not in marked_lines: yield line
python
{ "resource": "" }
q270449
get_indentation
test
def get_indentation(line): """Return leading whitespace.""" if line.strip(): non_whitespace_index = len(line) - len(line.lstrip()) return line[:non_whitespace_index] else: return ''
python
{ "resource": "" }
q270450
get_line_ending
test
def get_line_ending(line): """Return line ending.""" non_whitespace_index = len(line.rstrip()) - len(line) if not non_whitespace_index: return '' else: return line[non_whitespace_index:]
python
{ "resource": "" }
q270451
fix_code
test
def fix_code(source, additional_imports=None, expand_star_imports=False, remove_all_unused_imports=False, remove_duplicate_keys=False, remove_unused_variables=False, ignore_init_module_imports=False): """Return code with all filtering run on it.""" if not source: return source # pyflakes does not handle "nonlocal" correctly. if 'nonlocal' in source: remove_unused_variables = False filtered_source = None while True: filtered_source = ''.join( filter_useless_pass(''.join( filter_code( source, additional_imports=additional_imports, expand_star_imports=expand_star_imports, remove_all_unused_imports=remove_all_unused_imports, remove_duplicate_keys=remove_duplicate_keys, remove_unused_variables=remove_unused_variables, ignore_init_module_imports=ignore_init_module_imports, )))) if filtered_source == source: break source = filtered_source return filtered_source
python
{ "resource": "" }
q270452
_split_comma_separated
test
def _split_comma_separated(string): """Return a set of strings.""" return set(text.strip() for text in string.split(',') if text.strip())
python
{ "resource": "" }
q270453
is_python_file
test
def is_python_file(filename): """Return True if filename is Python file.""" if filename.endswith('.py'): return True try: with open_with_encoding( filename, None, limit_byte_check=MAX_PYTHON_FILE_DETECTION_BYTES) as f: text = f.read(MAX_PYTHON_FILE_DETECTION_BYTES) if not text: return False first_line = text.splitlines()[0] except (IOError, IndexError): return False if not PYTHON_SHEBANG_REGEX.match(first_line): return False return True
python
{ "resource": "" }
q270454
is_exclude_file
test
def is_exclude_file(filename, exclude): """Return True if file matches exclude pattern.""" base_name = os.path.basename(filename) if base_name.startswith('.'): return True for pattern in exclude: if fnmatch.fnmatch(base_name, pattern): return True if fnmatch.fnmatch(filename, pattern): return True return False
python
{ "resource": "" }
q270455
find_files
test
def find_files(filenames, recursive, exclude): """Yield filenames.""" while filenames: name = filenames.pop(0) if recursive and os.path.isdir(name): for root, directories, children in os.walk(name): filenames += [os.path.join(root, f) for f in children if match_file(os.path.join(root, f), exclude)] directories[:] = [d for d in directories if match_file(os.path.join(root, d), exclude)] else: if not is_exclude_file(name, exclude): yield name
python
{ "resource": "" }
q270456
_main
test
def _main(argv, standard_out, standard_error): """Return exit status. 0 means no error. """ import argparse parser = argparse.ArgumentParser(description=__doc__, prog='autoflake') parser.add_argument('-c', '--check', action='store_true', help='return error code if changes are needed') parser.add_argument('-i', '--in-place', action='store_true', help='make changes to files instead of printing diffs') parser.add_argument('-r', '--recursive', action='store_true', help='drill down directories recursively') parser.add_argument('--exclude', metavar='globs', help='exclude file/directory names that match these ' 'comma-separated globs') parser.add_argument('--imports', help='by default, only unused standard library ' 'imports are removed; specify a comma-separated ' 'list of additional modules/packages') parser.add_argument('--expand-star-imports', action='store_true', help='expand wildcard star imports with undefined ' 'names; this only triggers if there is only ' 'one star import in the file; this is skipped if ' 'there are any uses of `__all__` or `del` in the ' 'file') parser.add_argument('--remove-all-unused-imports', action='store_true', help='remove all unused imports (not just those from ' 'the standard library)') parser.add_argument('--ignore-init-module-imports', action='store_true', help='exclude __init__.py when removing unused ' 'imports') parser.add_argument('--remove-duplicate-keys', action='store_true', help='remove all duplicate keys in objects') parser.add_argument('--remove-unused-variables', action='store_true', help='remove unused variables') parser.add_argument('--version', action='version', version='%(prog)s ' + __version__) parser.add_argument('files', nargs='+', help='files to format') args = parser.parse_args(argv[1:]) if args.remove_all_unused_imports and args.imports: print('Using both --remove-all and --imports is redundant', file=standard_error) return 1 if args.exclude: args.exclude = _split_comma_separated(args.exclude) else: args.exclude = set([]) filenames = list(set(args.files)) failure = False for name in find_files(filenames, args.recursive, args.exclude): try: fix_file(name, args=args, standard_out=standard_out) except IOError as exception: print(unicode(exception), file=standard_error) failure = True return 1 if failure else 0
python
{ "resource": "" }
q270457
ObtainLeaseResponsePayload.read
test
def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Read the data encoding the ObtainLease response payload and decode it into its constituent parts. Args: input_stream (stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0. Raises: ValueError: Raised if the data attribute is missing from the encoded payload. """ super(ObtainLeaseResponsePayload, self).read( input_stream, kmip_version=kmip_version ) local_stream = utils.BytearrayStream(input_stream.read(self.length)) if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): self._unique_identifier = primitives.TextString( tag=enums.Tags.UNIQUE_IDENTIFIER ) self._unique_identifier.read( local_stream, kmip_version=kmip_version ) if self.is_tag_next(enums.Tags.LEASE_TIME, local_stream): self._lease_time = primitives.Interval( tag=enums.Tags.LEASE_TIME ) self._lease_time.read(local_stream, kmip_version=kmip_version) if self.is_tag_next(enums.Tags.LAST_CHANGE_DATE, local_stream): self._last_change_date = primitives.DateTime( tag=enums.Tags.LAST_CHANGE_DATE ) self._last_change_date.read( local_stream, kmip_version=kmip_version ) self.is_oversized(local_stream)
python
{ "resource": "" }
q270458
ObtainLeaseResponsePayload.write
test
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the data encoding the ObtainLease response payload to a stream. Args: output_stream (stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. Raises: ValueError: Raised if the data attribute is not defined. """ local_stream = utils.BytearrayStream() if self._unique_identifier: self._unique_identifier.write( local_stream, kmip_version=kmip_version ) if self._lease_time: self._lease_time.write( local_stream, kmip_version=kmip_version ) if self._last_change_date: self._last_change_date.write( local_stream, kmip_version=kmip_version ) self.length = local_stream.length() super(ObtainLeaseResponsePayload, self).write( output_stream, kmip_version=kmip_version ) output_stream.write(local_stream.buffer)
python
{ "resource": "" }
q270459
CancelRequestPayload.write
test
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the data encoding the Cancel request payload to a stream. Args: output_stream (stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. Raises: ValueError: Raised if the data attribute is not defined. """ local_stream = utils.BytearrayStream() if self._asynchronous_correlation_value: self._asynchronous_correlation_value.write( local_stream, kmip_version=kmip_version ) self.length = local_stream.length() super(CancelRequestPayload, self).write( output_stream, kmip_version=kmip_version ) output_stream.write(local_stream.buffer)
python
{ "resource": "" }
q270460
CancelResponsePayload.read
test
def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Read the data encoding the Cancel response payload and decode it into its constituent parts. Args: input_stream (stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0. Raises: ValueError: Raised if the data attribute is missing from the encoded payload. """ super(CancelResponsePayload, self).read( input_stream, kmip_version=kmip_version ) local_stream = utils.BytearrayStream(input_stream.read(self.length)) if self.is_tag_next( enums.Tags.ASYNCHRONOUS_CORRELATION_VALUE, local_stream ): self._asynchronous_correlation_value = primitives.ByteString( tag=enums.Tags.ASYNCHRONOUS_CORRELATION_VALUE ) self._asynchronous_correlation_value.read( local_stream, kmip_version=kmip_version ) if self.is_tag_next(enums.Tags.CANCELLATION_RESULT, local_stream): self._cancellation_result = primitives.Enumeration( enums.CancellationResult, tag=enums.Tags.CANCELLATION_RESULT ) self._cancellation_result.read( local_stream, kmip_version=kmip_version ) self.is_oversized(local_stream)
python
{ "resource": "" }
q270461
Name.create
test
def create(cls, name_value, name_type): ''' Returns a Name object, populated with the given value and type ''' if isinstance(name_value, Name.NameValue): value = name_value elif isinstance(name_value, str): value = cls.NameValue(name_value) else: name = 'Name' msg = exceptions.ErrorStrings.BAD_EXP_RECV member = 'name_value' raise TypeError(msg.format('{0}.{1}'.format(name, member), 'name_value', type(Name.NameValue), type(name_value))) if isinstance(name_type, Name.NameType): n_type = name_type elif isinstance(name_type, Enum): n_type = cls.NameType(name_type) else: name = 'Name' msg = exceptions.ErrorStrings.BAD_EXP_RECV member = 'name_type' raise TypeError(msg.format('{0}.{1}'.format(name, member), 'name_type', type(Name.NameType), type(name_type))) return Name(name_value=value, name_type=n_type)
python
{ "resource": "" }
q270462
Digest.read
test
def read(self, istream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Read the data encoding the Digest object and decode it into its constituent parts. Args: istream (Stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0. """ super(Digest, self).read(istream, kmip_version=kmip_version) tstream = BytearrayStream(istream.read(self.length)) self.hashing_algorithm.read(tstream, kmip_version=kmip_version) self.digest_value.read(tstream, kmip_version=kmip_version) self.key_format_type.read(tstream, kmip_version=kmip_version) self.is_oversized(tstream) self.validate()
python
{ "resource": "" }
q270463
Digest.write
test
def write(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the data encoding the Digest object to a stream. Args: ostream (Stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. """ tstream = BytearrayStream() self.hashing_algorithm.write(tstream, kmip_version=kmip_version) self.digest_value.write(tstream, kmip_version=kmip_version) self.key_format_type.write(tstream, kmip_version=kmip_version) self.length = tstream.length() super(Digest, self).write(ostream, kmip_version=kmip_version) ostream.write(tstream.buffer)
python
{ "resource": "" }
q270464
Digest.create
test
def create(cls, hashing_algorithm=HashingAlgorithmEnum.SHA_256, digest_value=b'', key_format_type=KeyFormatTypeEnum.RAW): """ Construct a Digest object from provided digest values. Args: hashing_algorithm (HashingAlgorithm): An enumeration representing the hash algorithm used to compute the digest. Optional, defaults to HashingAlgorithm.SHA_256. digest_value (byte string): The bytes of the digest hash. Optional, defaults to the empty byte string. key_format_type (KeyFormatType): An enumeration representing the format of the key corresponding to the digest. Optional, defaults to KeyFormatType.RAW. Returns: Digest: The newly created Digest. Example: >>> x = Digest.create(HashingAlgorithm.MD5, b'\x00', ... KeyFormatType.RAW) >>> x.hashing_algorithm HashingAlgorithm(value=HashingAlgorithm.MD5) >>> x.digest_value DigestValue(value=bytearray(b'\x00')) >>> x.key_format_type KeyFormatType(value=KeyFormatType.RAW) """ algorithm = HashingAlgorithm(hashing_algorithm) value = DigestValue(bytearray(digest_value)) format_type = KeyFormatType(key_format_type) return Digest(hashing_algorithm=algorithm, digest_value=value, key_format_type=format_type)
python
{ "resource": "" }
q270465
ApplicationSpecificInformation.read
test
def read(self, istream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Read the data encoding the ApplicationSpecificInformation object and decode it into its constituent parts. Args: istream (Stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0. """ super(ApplicationSpecificInformation, self).read( istream, kmip_version=kmip_version ) tstream = BytearrayStream(istream.read(self.length)) self.application_namespace.read(tstream, kmip_version=kmip_version) self.application_data.read(tstream, kmip_version=kmip_version) self.is_oversized(tstream) self.validate()
python
{ "resource": "" }
q270466
ApplicationSpecificInformation.write
test
def write(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the data encoding the ApplicationSpecificInformation object to a stream. Args: ostream (Stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. """ tstream = BytearrayStream() self.application_namespace.write(tstream, kmip_version=kmip_version) self.application_data.write(tstream, kmip_version=kmip_version) self.length = tstream.length() super(ApplicationSpecificInformation, self).write( ostream, kmip_version=kmip_version ) ostream.write(tstream.buffer)
python
{ "resource": "" }
q270467
ApplicationSpecificInformation.create
test
def create(cls, application_namespace, application_data): """ Construct an ApplicationSpecificInformation object from provided data and namespace values. Args: application_namespace (str): The name of the application namespace. application_data (str): Application data related to the namespace. Returns: ApplicationSpecificInformation: The newly created set of application information. Example: >>> x = ApplicationSpecificInformation.create('namespace', 'data') >>> x.application_namespace.value 'namespace' >>> x.application_data.value 'data' """ namespace = ApplicationNamespace(application_namespace) data = ApplicationData(application_data) return ApplicationSpecificInformation( application_namespace=namespace, application_data=data)
python
{ "resource": "" }
q270468
DerivationParameters.read
test
def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Read the data encoding the DerivationParameters struct and decode it into its constituent parts. Args: input_stream (stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0. """ super(DerivationParameters, self).read( input_stream, kmip_version=kmip_version ) local_stream = BytearrayStream(input_stream.read(self.length)) if self.is_tag_next( enums.Tags.CRYPTOGRAPHIC_PARAMETERS, local_stream ): self._cryptographic_parameters = CryptographicParameters() self._cryptographic_parameters.read( local_stream, kmip_version=kmip_version ) if self.is_tag_next(enums.Tags.INITIALIZATION_VECTOR, local_stream): self._initialization_vector = ByteString( tag=enums.Tags.INITIALIZATION_VECTOR ) self._initialization_vector.read( local_stream, kmip_version=kmip_version ) if self.is_tag_next(enums.Tags.DERIVATION_DATA, local_stream): self._derivation_data = ByteString(tag=enums.Tags.DERIVATION_DATA) self._derivation_data.read(local_stream, kmip_version=kmip_version) if self.is_tag_next(enums.Tags.SALT, local_stream): self._salt = ByteString(tag=enums.Tags.SALT) self._salt.read(local_stream, kmip_version=kmip_version) if self.is_tag_next(Tags.ITERATION_COUNT, local_stream): self._iteration_count = Integer(tag=Tags.ITERATION_COUNT) self._iteration_count.read(local_stream, kmip_version=kmip_version) self.is_oversized(local_stream)
python
{ "resource": "" }
q270469
DerivationParameters.write
test
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the data encoding the DerivationParameters struct to a stream. Args: output_stream (stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. """ local_stream = BytearrayStream() if self._cryptographic_parameters: self._cryptographic_parameters.write( local_stream, kmip_version=kmip_version ) if self._initialization_vector: self._initialization_vector.write( local_stream, kmip_version=kmip_version ) if self._derivation_data: self._derivation_data.write( local_stream, kmip_version=kmip_version ) if self._salt: self._salt.write( local_stream, kmip_version=kmip_version ) if self._iteration_count: self._iteration_count.write( local_stream, kmip_version=kmip_version ) self.length = local_stream.length() super(DerivationParameters, self).write( output_stream, kmip_version=kmip_version ) output_stream.write(local_stream.buffer)
python
{ "resource": "" }
q270470
GetRequestPayload.read
test
def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Read the data encoding the Get request payload and decode it into its constituent parts. Args: input_stream (stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0. """ super(GetRequestPayload, self).read( input_stream, kmip_version=kmip_version ) local_stream = utils.BytearrayStream(input_stream.read(self.length)) if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): self._unique_identifier = primitives.TextString( tag=enums.Tags.UNIQUE_IDENTIFIER ) self._unique_identifier.read( local_stream, kmip_version=kmip_version ) if self.is_tag_next(enums.Tags.KEY_FORMAT_TYPE, local_stream): self._key_format_type = primitives.Enumeration( enum=enums.KeyFormatType, tag=enums.Tags.KEY_FORMAT_TYPE ) self._key_format_type.read( local_stream, kmip_version=kmip_version ) if self.is_tag_next(enums.Tags.KEY_COMPRESSION_TYPE, local_stream): self._key_compression_type = primitives.Enumeration( enum=enums.KeyCompressionType, tag=enums.Tags.KEY_COMPRESSION_TYPE ) self._key_compression_type.read( local_stream, kmip_version=kmip_version ) if self.is_tag_next( enums.Tags.KEY_WRAPPING_SPECIFICATION, local_stream ): self._key_wrapping_specification = \ objects.KeyWrappingSpecification() self._key_wrapping_specification.read( local_stream, kmip_version=kmip_version ) self.is_oversized(local_stream)
python
{ "resource": "" }
q270471
GetRequestPayload.write
test
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the data encoding the Get request payload to a stream. Args: output_stream (stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. """ local_stream = utils.BytearrayStream() if self._unique_identifier is not None: self._unique_identifier.write( local_stream, kmip_version=kmip_version ) if self._key_format_type is not None: self._key_format_type.write( local_stream, kmip_version=kmip_version ) if self._key_compression_type is not None: self._key_compression_type.write( local_stream, kmip_version=kmip_version ) if self._key_wrapping_specification is not None: self._key_wrapping_specification.write( local_stream, kmip_version=kmip_version ) self.length = local_stream.length() super(GetRequestPayload, self).write( output_stream, kmip_version=kmip_version ) output_stream.write(local_stream.buffer)
python
{ "resource": "" }
q270472
GetResponsePayload.read
test
def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Read the data encoding the Get response payload and decode it into its constituent parts. Args: input_stream (stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0. Raises: ValueError: Raised if the object type, unique identifier, or secret attributes are missing from the encoded payload. """ super(GetResponsePayload, self).read( input_stream, kmip_version=kmip_version ) local_stream = utils.BytearrayStream(input_stream.read(self.length)) if self.is_tag_next(enums.Tags.OBJECT_TYPE, local_stream): self._object_type = primitives.Enumeration( enum=enums.ObjectType, tag=enums.Tags.OBJECT_TYPE ) self._object_type.read(local_stream, kmip_version=kmip_version) else: raise ValueError( "Parsed payload encoding is missing the object type field." ) if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): self._unique_identifier = primitives.TextString( tag=enums.Tags.UNIQUE_IDENTIFIER ) self._unique_identifier.read( local_stream, kmip_version=kmip_version ) else: raise ValueError( "Parsed payload encoding is missing the unique identifier " "field." ) self.secret = self.secret_factory.create(self.object_type) if self.is_tag_next(self._secret.tag, local_stream): self._secret.read(local_stream, kmip_version=kmip_version) else: raise ValueError( "Parsed payload encoding is missing the secret field." ) self.is_oversized(local_stream)
python
{ "resource": "" }
q270473
GetResponsePayload.write
test
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the data encoding the Get response payload to a stream. Args: output_stream (stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. Raises: ValueError: Raised if the object type, unique identifier, or secret attributes are missing from the payload struct. """ local_stream = utils.BytearrayStream() if self.object_type: self._object_type.write(local_stream, kmip_version=kmip_version) else: raise ValueError("Payload is missing the object type field.") if self.unique_identifier: self._unique_identifier.write( local_stream, kmip_version=kmip_version ) else: raise ValueError( "Payload is missing the unique identifier field." ) if self.secret: self._secret.write(local_stream, kmip_version=kmip_version) else: raise ValueError("Payload is missing the secret field.") self.length = local_stream.length() super(GetResponsePayload, self).write( output_stream, kmip_version=kmip_version ) output_stream.write(local_stream.buffer)
python
{ "resource": "" }
q270474
SignatureVerifyRequestPayload.read
test
def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Read the data encoding the SignatureVerify request payload and decode it into its constituent parts. Args: input_stream (stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0. Raises: ValueError: Raised if the data attribute is missing from the encoded payload. """ super(SignatureVerifyRequestPayload, self).read( input_stream, kmip_version=kmip_version ) local_stream = utils.BytearrayStream(input_stream.read(self.length)) if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): self._unique_identifier = primitives.TextString( tag=enums.Tags.UNIQUE_IDENTIFIER ) self._unique_identifier.read( local_stream, kmip_version=kmip_version ) if self.is_tag_next(enums.Tags.CRYPTOGRAPHIC_PARAMETERS, local_stream): self._cryptographic_parameters = \ attributes.CryptographicParameters() self._cryptographic_parameters.read( local_stream, kmip_version=kmip_version ) if self.is_tag_next(enums.Tags.DATA, local_stream): self._data = primitives.ByteString(tag=enums.Tags.DATA) self._data.read(local_stream, kmip_version=kmip_version) if self.is_tag_next(enums.Tags.DIGESTED_DATA, local_stream): self._digested_data = primitives.ByteString( tag=enums.Tags.DIGESTED_DATA ) self._digested_data.read(local_stream, kmip_version=kmip_version) if self.is_tag_next(enums.Tags.SIGNATURE_DATA, local_stream): self._signature_data = primitives.ByteString( tag=enums.Tags.SIGNATURE_DATA ) self._signature_data.read(local_stream, kmip_version=kmip_version) if self.is_tag_next(enums.Tags.CORRELATION_VALUE, local_stream): self._correlation_value = primitives.ByteString( tag=enums.Tags.CORRELATION_VALUE ) self._correlation_value.read( local_stream, kmip_version=kmip_version ) if self.is_tag_next(enums.Tags.INIT_INDICATOR, local_stream): self._init_indicator = primitives.Boolean( tag=enums.Tags.INIT_INDICATOR ) self._init_indicator.read(local_stream, kmip_version=kmip_version) if self.is_tag_next(enums.Tags.FINAL_INDICATOR, local_stream): self._final_indicator = primitives.Boolean( tag=enums.Tags.FINAL_INDICATOR ) self._final_indicator.read(local_stream, kmip_version=kmip_version) self.is_oversized(local_stream)
python
{ "resource": "" }
q270475
SignatureVerifyRequestPayload.write
test
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the data encoding the SignatureVerify request payload to a stream. Args: output_stream (stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. Raises: ValueError: Raised if the data attribute is not defined. """ local_stream = utils.BytearrayStream() if self._unique_identifier: self._unique_identifier.write( local_stream, kmip_version=kmip_version ) if self._cryptographic_parameters: self._cryptographic_parameters.write( local_stream, kmip_version=kmip_version ) if self._data: self._data.write(local_stream, kmip_version=kmip_version) if self._digested_data: self._digested_data.write(local_stream, kmip_version=kmip_version) if self._signature_data: self._signature_data.write( local_stream, kmip_version=kmip_version ) if self._correlation_value: self._correlation_value.write( local_stream, kmip_version=kmip_version ) if self._init_indicator: self._init_indicator.write( local_stream, kmip_version=kmip_version ) if self._final_indicator: self._final_indicator.write( local_stream, kmip_version=kmip_version ) self.length = local_stream.length() super(SignatureVerifyRequestPayload, self).write( output_stream, kmip_version=kmip_version ) output_stream.write(local_stream.buffer)
python
{ "resource": "" }
q270476
SignatureVerifyResponsePayload.read
test
def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Read the data encoding the SignatureVerify response payload and decode it into its constituent parts. Args: input_stream (stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0. Raises: ValueError: Raised if the data attribute is missing from the encoded payload. """ super(SignatureVerifyResponsePayload, self).read( input_stream, kmip_version=kmip_version ) local_stream = utils.BytearrayStream(input_stream.read(self.length)) if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, local_stream): self._unique_identifier = primitives.TextString( tag=enums.Tags.UNIQUE_IDENTIFIER ) self._unique_identifier.read( local_stream, kmip_version=kmip_version ) else: raise ValueError( "Parsed payload encoding is missing the unique identifier " "field." ) if self.is_tag_next(enums.Tags.VALIDITY_INDICATOR, local_stream): self._validity_indicator = primitives.Enumeration( enums.ValidityIndicator, tag=enums.Tags.VALIDITY_INDICATOR ) self._validity_indicator.read( local_stream, kmip_version=kmip_version ) else: raise ValueError( "Parsed payload encoding is missing the validity indicator " "field." ) if self.is_tag_next(enums.Tags.DATA, local_stream): self._data = primitives.ByteString(tag=enums.Tags.DATA) self._data.read(local_stream, kmip_version=kmip_version) if self.is_tag_next(enums.Tags.CORRELATION_VALUE, local_stream): self._correlation_value = primitives.ByteString( tag=enums.Tags.CORRELATION_VALUE ) self._correlation_value.read( local_stream, kmip_version=kmip_version ) self.is_oversized(local_stream)
python
{ "resource": "" }
q270477
KmipEngine.process_request
test
def process_request(self, request, credential=None): """ Process a KMIP request message. This routine is the main driver of the KmipEngine. It breaks apart and processes the request header, handles any message errors that may result, and then passes the set of request batch items on for processing. This routine is thread-safe, allowing multiple client connections to use the same KmipEngine. Args: request (RequestMessage): The request message containing the batch items to be processed. credential (string): Identifying information about the client obtained from the client certificate. Optional, defaults to None. Returns: ResponseMessage: The response containing all of the results from the request batch items. """ self._client_identity = [None, None] header = request.request_header # Process the protocol version self._set_protocol_version(header.protocol_version) # Process the maximum response size max_response_size = None if header.maximum_response_size: max_response_size = header.maximum_response_size.value # Process the time stamp now = int(time.time()) if header.time_stamp: then = header.time_stamp.value if (now >= then) and ((now - then) < 60): self._logger.info("Received request at time: {0}".format( time.strftime( "%Y-%m-%d %H:%M:%S", time.gmtime(then) ) )) else: if now < then: self._logger.warning( "Received request with future timestamp. Received " "timestamp: {0}, Current timestamp: {1}".format( then, now ) ) raise exceptions.InvalidMessage( "Future request rejected by server." ) else: self._logger.warning( "Received request with old timestamp. Possible " "replay attack. Received timestamp: {0}, Current " "timestamp: {1}".format(then, now) ) raise exceptions.InvalidMessage( "Stale request rejected by server." ) else: self._logger.info("Received request at time: {0}".format( time.strftime( "%Y-%m-%d %H:%M:%S", time.gmtime(now) ) )) # Process the asynchronous indicator self.is_asynchronous = False if header.asynchronous_indicator is not None: self.is_asynchronous = header.asynchronous_indicator.value if self.is_asynchronous: raise exceptions.InvalidMessage( "Asynchronous operations are not supported." ) # Process the authentication credentials if header.authentication: if header.authentication.credentials: auth_credentials = header.authentication.credentials[0] else: auth_credentials = None else: auth_credentials = None self._verify_credential(auth_credentials, credential) # Process the batch error continuation option batch_error_option = enums.BatchErrorContinuationOption.STOP if header.batch_error_cont_option is not None: batch_error_option = header.batch_error_cont_option.value if batch_error_option == enums.BatchErrorContinuationOption.UNDO: raise exceptions.InvalidMessage( "Undo option for batch handling is not supported." ) # Process the batch order option batch_order_option = False if header.batch_order_option: batch_order_option = header.batch_order_option.value response_batch = self._process_batch( request.batch_items, batch_error_option, batch_order_option ) response = self._build_response( header.protocol_version, response_batch ) return response, max_response_size, header.protocol_version
python
{ "resource": "" }
q270478
KmipEngine.build_error_response
test
def build_error_response(self, version, reason, message): """ Build a simple ResponseMessage with a single error result. Args: version (ProtocolVersion): The protocol version the response should be addressed with. reason (ResultReason): An enumeration classifying the type of error occurred. message (str): A string providing additional information about the error. Returns: ResponseMessage: The simple ResponseMessage containing a single error result. """ batch_item = messages.ResponseBatchItem( result_status=contents.ResultStatus( enums.ResultStatus.OPERATION_FAILED ), result_reason=contents.ResultReason(reason), result_message=contents.ResultMessage(message) ) return self._build_response(version, [batch_item])
python
{ "resource": "" }
q270479
KmipEngine._process_template_attribute
test
def _process_template_attribute(self, template_attribute): """ Given a kmip.core TemplateAttribute object, extract the attribute value data into a usable dictionary format. """ attributes = {} if len(template_attribute.names) > 0: raise exceptions.ItemNotFound( "Attribute templates are not supported." ) for attribute in template_attribute.attributes: name = attribute.attribute_name.value if not self._attribute_policy.is_attribute_supported(name): raise exceptions.InvalidField( "The {0} attribute is unsupported.".format(name) ) if self._attribute_policy.is_attribute_multivalued(name): values = attributes.get(name, list()) if (not attribute.attribute_index) and len(values) > 0: raise exceptions.InvalidField( "Attribute index missing from multivalued attribute." ) values.append(attribute.attribute_value) attributes.update([(name, values)]) else: if attribute.attribute_index: if attribute.attribute_index.value != 0: raise exceptions.InvalidField( "Non-zero attribute index found for " "single-valued attribute." ) value = attributes.get(name, None) if value: raise exceptions.IndexOutOfBounds( "Cannot set multiple instances of the " "{0} attribute.".format(name) ) else: attributes.update([(name, attribute.attribute_value)]) return attributes
python
{ "resource": "" }
q270480
KmipEngine._get_attributes_from_managed_object
test
def _get_attributes_from_managed_object(self, managed_object, attr_names): """ Given a kmip.pie object and a list of attribute names, attempt to get all of the existing attribute values from the object. """ attr_factory = attribute_factory.AttributeFactory() retrieved_attributes = list() if not attr_names: attr_names = self._attribute_policy.get_all_attribute_names() for attribute_name in attr_names: object_type = managed_object._object_type if not self._attribute_policy.is_attribute_supported( attribute_name ): continue if self._attribute_policy.is_attribute_applicable_to_object_type( attribute_name, object_type ): try: attribute_value = self._get_attribute_from_managed_object( managed_object, attribute_name ) except Exception: attribute_value = None if attribute_value is not None: if self._attribute_policy.is_attribute_multivalued( attribute_name ): for count, value in enumerate(attribute_value): attribute = attr_factory.create_attribute( enums.AttributeType(attribute_name), value, count ) retrieved_attributes.append(attribute) else: attribute = attr_factory.create_attribute( enums.AttributeType(attribute_name), attribute_value ) retrieved_attributes.append(attribute) return retrieved_attributes
python
{ "resource": "" }
q270481
KmipEngine._get_attribute_from_managed_object
test
def _get_attribute_from_managed_object(self, managed_object, attr_name): """ Get the attribute value from the kmip.pie managed object. """ if attr_name == 'Unique Identifier': return str(managed_object.unique_identifier) elif attr_name == 'Name': names = list() for name in managed_object.names: name = attributes.Name( attributes.Name.NameValue(name), attributes.Name.NameType( enums.NameType.UNINTERPRETED_TEXT_STRING ) ) names.append(name) return names elif attr_name == 'Object Type': return managed_object._object_type elif attr_name == 'Cryptographic Algorithm': return managed_object.cryptographic_algorithm elif attr_name == 'Cryptographic Length': return managed_object.cryptographic_length elif attr_name == 'Cryptographic Parameters': return None elif attr_name == 'Cryptographic Domain Parameters': return None elif attr_name == 'Certificate Type': return managed_object.certificate_type elif attr_name == 'Certificate Length': return None elif attr_name == 'X.509 Certificate Identifier': return None elif attr_name == 'X.509 Certificate Subject': return None elif attr_name == 'X.509 Certificate Issuer': return None elif attr_name == 'Certificate Identifier': return None elif attr_name == 'Certificate Subject': return None elif attr_name == 'Certificate Issuer': return None elif attr_name == 'Digital Signature Algorithm': return None elif attr_name == 'Digest': return None elif attr_name == 'Operation Policy Name': return managed_object.operation_policy_name elif attr_name == 'Cryptographic Usage Mask': return managed_object.cryptographic_usage_masks elif attr_name == 'Lease Time': return None elif attr_name == 'Usage Limits': return None elif attr_name == 'State': return managed_object.state elif attr_name == 'Initial Date': return managed_object.initial_date elif attr_name == 'Activation Date': return None elif attr_name == 'Process Start Date': return None elif attr_name == 'Protect Stop Date': return None elif attr_name == 'Deactivation Date': return None elif attr_name == 'Destroy Date': return None elif attr_name == 'Compromise Occurrence Date': return None elif attr_name == 'Compromise Date': return None elif attr_name == 'Revocation Reason': return None elif attr_name == 'Archive Date': return None elif attr_name == 'Object Group': return None elif attr_name == 'Fresh': return None elif attr_name == 'Link': return None elif attr_name == 'Application Specific Information': return None elif attr_name == 'Contact Information': return None elif attr_name == 'Last Change Date': return None else: # Since custom attribute names are possible, just return None # for unrecognized attributes. This satisfies the spec. return None
python
{ "resource": "" }
q270482
KmipEngine._set_attributes_on_managed_object
test
def _set_attributes_on_managed_object(self, managed_object, attributes): """ Given a kmip.pie object and a dictionary of attributes, attempt to set the attribute values on the object. """ for attribute_name, attribute_value in six.iteritems(attributes): object_type = managed_object._object_type if self._attribute_policy.is_attribute_applicable_to_object_type( attribute_name, object_type): self._set_attribute_on_managed_object( managed_object, (attribute_name, attribute_value) ) else: name = object_type.name raise exceptions.InvalidField( "Cannot set {0} attribute on {1} object.".format( attribute_name, ''.join([x.capitalize() for x in name.split('_')]) ) )
python
{ "resource": "" }
q270483
KmipEngine._set_attribute_on_managed_object
test
def _set_attribute_on_managed_object(self, managed_object, attribute): """ Set the attribute value on the kmip.pie managed object. """ attribute_name = attribute[0] attribute_value = attribute[1] if self._attribute_policy.is_attribute_multivalued(attribute_name): if attribute_name == 'Name': managed_object.names.extend( [x.name_value.value for x in attribute_value] ) for name in managed_object.names: if managed_object.names.count(name) > 1: raise exceptions.InvalidField( "Cannot set duplicate name values." ) else: # TODO (peterhamilton) Remove when all attributes are supported raise exceptions.InvalidField( "The {0} attribute is unsupported.".format(attribute_name) ) else: field = None value = attribute_value.value if attribute_name == 'Cryptographic Algorithm': field = 'cryptographic_algorithm' elif attribute_name == 'Cryptographic Length': field = 'cryptographic_length' elif attribute_name == 'Cryptographic Usage Mask': field = 'cryptographic_usage_masks' value = list() for e in enums.CryptographicUsageMask: if e.value & attribute_value.value: value.append(e) elif attribute_name == 'Operation Policy Name': field = 'operation_policy_name' if field: existing_value = getattr(managed_object, field) if existing_value: if existing_value != value: raise exceptions.InvalidField( "Cannot overwrite the {0} attribute.".format( attribute_name ) ) else: setattr(managed_object, field, value) else: # TODO (peterhamilton) Remove when all attributes are supported raise exceptions.InvalidField( "The {0} attribute is unsupported.".format(attribute_name) )
python
{ "resource": "" }
q270484
KmipEngine.is_allowed
test
def is_allowed( self, policy_name, session_user, session_group, object_owner, object_type, operation ): """ Determine if object access is allowed for the provided policy and session settings. """ policy_section = self.get_relevant_policy_section( policy_name, session_group ) if policy_section is None: return False object_policy = policy_section.get(object_type) if not object_policy: self._logger.warning( "The '{0}' policy does not apply to {1} objects.".format( policy_name, self._get_enum_string(object_type) ) ) return False operation_object_policy = object_policy.get(operation) if not operation_object_policy: self._logger.warning( "The '{0}' policy does not apply to {1} operations on {2} " "objects.".format( policy_name, self._get_enum_string(operation), self._get_enum_string(object_type) ) ) return False if operation_object_policy == enums.Policy.ALLOW_ALL: return True elif operation_object_policy == enums.Policy.ALLOW_OWNER: if session_user == object_owner: return True else: return False elif operation_object_policy == enums.Policy.DISALLOW_ALL: return False else: return False
python
{ "resource": "" }
q270485
DecryptRequestPayload.write
test
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the data encoding the Decrypt request payload to a stream. Args: output_stream (stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. Raises: ValueError: Raised if the data attribute is not defined. """ local_stream = utils.BytearrayStream() if self._unique_identifier: self._unique_identifier.write( local_stream, kmip_version=kmip_version ) if self._cryptographic_parameters: self._cryptographic_parameters.write( local_stream, kmip_version=kmip_version ) if self._data: self._data.write(local_stream, kmip_version=kmip_version) else: raise ValueError("invalid payload missing the data attribute") if self._iv_counter_nonce: self._iv_counter_nonce.write( local_stream, kmip_version=kmip_version ) self.length = local_stream.length() super(DecryptRequestPayload, self).write( output_stream, kmip_version=kmip_version ) output_stream.write(local_stream.buffer)
python
{ "resource": "" }
q270486
SecretFactory.create
test
def create(self, secret_type, value=None): """ Create a secret object of the specified type with the given value. Args: secret_type (ObjectType): An ObjectType enumeration specifying the type of secret to create. value (dict): A dictionary containing secret data. Optional, defaults to None. Returns: secret: The newly constructed secret object. Raises: TypeError: If the provided secret type is unrecognized. Example: >>> factory.create(ObjectType.SYMMETRIC_KEY) SymmetricKey(...) """ if secret_type is ObjectType.CERTIFICATE: return self._create_certificate(value) elif secret_type is ObjectType.SYMMETRIC_KEY: return self._create_symmetric_key(value) elif secret_type is ObjectType.PUBLIC_KEY: return self._create_public_key(value) elif secret_type is ObjectType.PRIVATE_KEY: return self._create_private_key(value) elif secret_type is ObjectType.SPLIT_KEY: return self._create_split_key(value) elif secret_type is ObjectType.TEMPLATE: return self._create_template(value) elif secret_type is ObjectType.SECRET_DATA: return self._create_secret_data(value) elif secret_type is ObjectType.OPAQUE_DATA: return self._create_opaque_data(value) else: raise TypeError("Unrecognized secret type: {0}".format( secret_type))
python
{ "resource": "" }
q270487
KmipServerConfig.set_setting
test
def set_setting(self, setting, value): """ Set a specific setting value. This will overwrite the current setting value for the specified setting. Args: setting (string): The name of the setting to set (e.g., 'certificate_path', 'hostname'). Required. value (misc): The value of the setting to set. Type varies based on setting. Required. Raises: ConfigurationError: Raised if the setting is not supported or if the setting value is invalid. """ if setting not in self._expected_settings + self._optional_settings: raise exceptions.ConfigurationError( "Setting '{0}' is not supported.".format(setting) ) if setting == 'hostname': self._set_hostname(value) elif setting == 'port': self._set_port(value) elif setting == 'certificate_path': self._set_certificate_path(value) elif setting == 'key_path': self._set_key_path(value) elif setting == 'ca_path': self._set_ca_path(value) elif setting == 'auth_suite': self._set_auth_suite(value) elif setting == 'policy_path': self._set_policy_path(value) elif setting == 'enable_tls_client_auth': self._set_enable_tls_client_auth(value) elif setting == 'tls_cipher_suites': self._set_tls_cipher_suites(value) elif setting == 'logging_level': self._set_logging_level(value) else: self._set_database_path(value)
python
{ "resource": "" }
q270488
KmipServerConfig.load_settings
test
def load_settings(self, path): """ Load configuration settings from the file pointed to by path. This will overwrite all current setting values. Args: path (string): The path to the configuration file containing the settings to load. Required. Raises: ConfigurationError: Raised if the path does not point to an existing file or if a setting value is invalid. """ if not os.path.exists(path): raise exceptions.ConfigurationError( "The server configuration file ('{0}') could not be " "located.".format(path) ) self._logger.info( "Loading server configuration settings from: {0}".format(path) ) parser = configparser.ConfigParser() parser.read(path) self._parse_settings(parser) self.parse_auth_settings(parser)
python
{ "resource": "" }
q270489
UsageMaskType.process_bind_param
test
def process_bind_param(self, value, dialect): """ Returns the integer value of the usage mask bitmask. This value is stored in the database. Args: value(list<enums.CryptographicUsageMask>): list of enums in the usage mask dialect(string): SQL dialect """ bitmask = 0x00 for e in value: bitmask = bitmask | e.value return bitmask
python
{ "resource": "" }
q270490
UsageMaskType.process_result_value
test
def process_result_value(self, value, dialect): """ Returns a new list of enums.CryptographicUsageMask Enums. This converts the integer value into the list of enums. Args: value(int): The integer value stored in the database that is used to create the list of enums.CryptographicUsageMask Enums. dialect(string): SQL dialect """ masks = list() if value: for e in enums.CryptographicUsageMask: if e.value & value: masks.append(e) return masks
python
{ "resource": "" }
q270491
LongInteger.read
test
def read(self, istream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Read the encoding of the LongInteger from the input stream. Args: istream (stream): A buffer containing the encoded bytes of a LongInteger. Usually a BytearrayStream object. Required. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0. Raises: InvalidPrimitiveLength: if the long integer encoding read in has an invalid encoded length. """ super(LongInteger, self).read(istream, kmip_version=kmip_version) if self.length is not LongInteger.LENGTH: raise exceptions.InvalidPrimitiveLength( "invalid long integer length read; " "expected: {0}, observed: {1}".format( LongInteger.LENGTH, self.length)) self.value = unpack('!q', istream.read(self.length))[0] self.validate()
python
{ "resource": "" }
q270492
LongInteger.write
test
def write(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the encoding of the LongInteger to the output stream. Args: ostream (stream): A buffer to contain the encoded bytes of a LongInteger. Usually a BytearrayStream object. Required. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. """ super(LongInteger, self).write(ostream, kmip_version=kmip_version) ostream.write(pack('!q', self.value))
python
{ "resource": "" }
q270493
LongInteger.validate
test
def validate(self): """ Verify that the value of the LongInteger is valid. Raises: TypeError: if the value is not of type int or long ValueError: if the value cannot be represented by a signed 64-bit integer """ if self.value is not None: if not isinstance(self.value, six.integer_types): raise TypeError('expected (one of): {0}, observed: {1}'.format( six.integer_types, type(self.value))) else: if self.value > LongInteger.MAX: raise ValueError( 'long integer value greater than accepted max') elif self.value < LongInteger.MIN: raise ValueError( 'long integer value less than accepted min')
python
{ "resource": "" }
q270494
BigInteger.read
test
def read(self, istream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Read the encoding of the BigInteger from the input stream. Args: istream (stream): A buffer containing the encoded bytes of the value of a BigInteger. Usually a BytearrayStream object. Required. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0. Raises: InvalidPrimitiveLength: if the big integer encoding read in has an invalid encoded length. """ super(BigInteger, self).read(istream, kmip_version=kmip_version) # Check for a valid length before even trying to parse the value. if self.length % 8: raise exceptions.InvalidPrimitiveLength( "invalid big integer length read; " "expected: multiple of 8, observed: {0}".format(self.length)) sign = 1 binary = '' # Read the value byte by byte and convert it into binary, padding each # byte as needed. for _ in range(self.length): byte = struct.unpack('!B', istream.read(1))[0] bits = "{0:b}".format(byte) pad = len(bits) % 8 if pad: bits = ('0' * (8 - pad)) + bits binary += bits # If the value is negative, convert via two's complement. if binary[0] == '1': sign = -1 binary = binary.replace('1', 'i') binary = binary.replace('0', '1') binary = binary.replace('i', '0') pivot = binary.rfind('0') binary = binary[0:pivot] + '1' + ('0' * len(binary[pivot + 1:])) # Convert the value back to an integer and reapply the sign. self.value = int(binary, 2) * sign
python
{ "resource": "" }
q270495
BigInteger.write
test
def write(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the encoding of the BigInteger to the output stream. Args: ostream (Stream): A buffer to contain the encoded bytes of a BigInteger object. Usually a BytearrayStream object. Required. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. """ # Convert the value to binary and pad it as needed. binary = "{0:b}".format(abs(self.value)) binary = ("0" * (64 - (len(binary) % 64))) + binary # If the value is negative, convert via two's complement. if self.value < 0: binary = binary.replace('1', 'i') binary = binary.replace('0', '1') binary = binary.replace('i', '0') pivot = binary.rfind('0') binary = binary[0:pivot] + '1' + ('0' * len(binary[pivot + 1:])) # Convert each byte to hex and build the hex string for the value. hexadecimal = b'' for i in range(0, len(binary), 8): byte = binary[i:i + 8] byte = int(byte, 2) hexadecimal += struct.pack('!B', byte) self.length = len(hexadecimal) super(BigInteger, self).write(ostream, kmip_version=kmip_version) ostream.write(hexadecimal)
python
{ "resource": "" }
q270496
BigInteger.validate
test
def validate(self): """ Verify that the value of the BigInteger is valid. Raises: TypeError: if the value is not of type int or long """ if self.value is not None: if not isinstance(self.value, six.integer_types): raise TypeError('expected (one of): {0}, observed: {1}'.format( six.integer_types, type(self.value)))
python
{ "resource": "" }
q270497
Enumeration.validate
test
def validate(self): """ Verify that the value of the Enumeration is valid. Raises: TypeError: if the enum is not of type Enum ValueError: if the value is not of the expected Enum subtype or if the value cannot be represented by an unsigned 32-bit integer """ if not isinstance(self.enum, enumeration.EnumMeta): raise TypeError( 'enumeration type {0} must be of type EnumMeta'.format( self.enum)) if self.value is not None: if not isinstance(self.value, self.enum): raise TypeError( 'enumeration {0} must be of type {1}'.format( self.value, self.enum)) if type(self.value.value) not in six.integer_types: raise TypeError('enumeration value must be an int') else: if self.value.value > Enumeration.MAX: raise ValueError( 'enumeration value greater than accepted max') elif self.value.value < Enumeration.MIN: raise ValueError( 'enumeration value less than accepted min')
python
{ "resource": "" }
q270498
Boolean.read_value
test
def read_value(self, istream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Read the value of the Boolean object from the input stream. Args: istream (Stream): A buffer containing the encoded bytes of the value of a Boolean object. Usually a BytearrayStream object. Required. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be decoded. Optional, defaults to KMIP 1.0. Raises: ValueError: if the read boolean value is not a 0 or 1. """ try: value = unpack('!Q', istream.read(self.LENGTH))[0] except Exception: self.logger.error("Error reading boolean value from buffer") raise if value == 1: self.value = True elif value == 0: self.value = False else: raise ValueError("expected: 0 or 1, observed: {0}".format(value)) self.validate()
python
{ "resource": "" }
q270499
Boolean.write_value
test
def write_value(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the value of the Boolean object to the output stream. Args: ostream (Stream): A buffer to contain the encoded bytes of the value of a Boolean object. Usually a BytearrayStream object. Required. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. """ try: ostream.write(pack('!Q', self.value)) except Exception: self.logger.error("Error writing boolean value to buffer") raise
python
{ "resource": "" }