text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def printer(self): """Prints PDA states and their attributes"""
i = 0 while i < self.n + 1: print "--------- State No --------" + repr(i) self.s[i].printer() i = i + 1
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _ActionDatabase(self, cmd, args = None, commit = True, error = True): """ Do action on database. Parameters cmd : string SQL command. args : tuple [optional : default = None] Arguments to be passed along with the SQL command. e.g. cmd="SELECT Value FROM Config WHERE Name=?" args=(fieldName, ) commit : boolean [optional : default = True] If true commit database changes after command is executed. error : boolean [optional : default = True] If False then any sqlite3.OperationalError exceptions will cause this function to return None, otherwise the exception will be raised. Returns If a valid result is obtained from the database this will be returned. If an error occurs and the error argument is set to False then the return value will be None. """
goodlogging.Log.Info("DB", "Database Command: {0} {1}".format(cmd, args), verbosity=self.logVerbosity) with sqlite3.connect(self._dbPath) as db: try: if args is None: result = db.execute(cmd) else: result = db.execute(cmd, args) except sqlite3.OperationalError: if error is True: raise return None else: if commit is True: db.commit() return result.fetchall()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _PurgeTable(self, tableName): """ Deletes all rows from given table without dropping table. Parameters tableName : string Name of table. """
goodlogging.Log.Info("DB", "Deleting all entries from table {0}".format(tableName), verbosity=self.logVerbosity) self._ActionDatabase("DELETE FROM {0}".format(tableName))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def GetConfigValue(self, fieldName): """ Match given field name in Config table and return corresponding value. Parameters fieldName : string String matching Name column in Config table. Returns string or None If a match is found the corresponding entry in the Value column of the database table is returned, otherwise None is returned (or if multiple matches are found a fatal error is raised). """
result = self._ActionDatabase("SELECT Value FROM Config WHERE Name=?", (fieldName, )) if result is None: return None elif len(result) == 0: return None elif len(result) == 1: goodlogging.Log.Info("DB", "Found database match in config table {0}={1}".format(fieldName, result[0][0]), verbosity=self.logVerbosity) return result[0][0] elif len(result) > 1: goodlogging.Log.Fatal("DB", "Database corrupted - multiple matches found in config table {0}={1}".format(fieldName, result))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def SetConfigValue(self, fieldName, value): """ Set value in Config table. If a entry already exists this is updated with the new value, otherwise a new entry is added. Parameters fieldName : string String to be inserted or matched against Name column in Config table. value : string Entry to be inserted or updated in Value column of Config table. """
currentConfigValue = self.GetConfigValue(fieldName) if currentConfigValue is None: goodlogging.Log.Info("DB", "Adding {0}={1} to database config table".format(fieldName, value), verbosity=self.logVerbosity) self._ActionDatabase("INSERT INTO Config VALUES (?,?)", (fieldName, value)) else: goodlogging.Log.Info("DB", "Updating {0} in database config table from {1} to {2}".format(fieldName, currentConfigValue, value), verbosity=self.logVerbosity) self._ActionDatabase("UPDATE Config SET Value=? WHERE Name=?", (value, fieldName))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _AddToSingleColumnTable(self, tableName, columnHeading, newValue): """ Add an entry to a table containing a single column. Checks existing table entries to avoid duplicate entries if the given value already exists in the table. Parameters tableName : string Name of table to add entry to. columnHeading : string Name of column heading. newValue : string New value to add to table. """
match = None currentTable = self._GetFromSingleColumnTable(tableName) if currentTable is not None: for currentValue in currentTable: if currentValue == newValue: match = True if match is None: goodlogging.Log.Info("DB", "Adding {0} to {1} table".format(newValue, tableName), verbosity=self.logVerbosity) self._ActionDatabase("INSERT INTO {0} VALUES (?)".format(tableName), (newValue, )) else: goodlogging.Log.Info("DB", "{0} already exists in {1} table".format(newValue, tableName), verbosity=self.logVerbosity) ############################################################################ # _GetFromSingleColumnTable ############################################################################ """ Get all entries from a table containing a single column. Parameters ---------- tableName : string Name of table to add entry to. Returns ---------- list or None If either no table or no rows are found this returns None, otherwise a list of all table entries is returned. """
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def AddShowToTVLibrary(self, showName): """ Add show to TVLibrary table. If the show already exists in the table a fatal error is raised. Parameters showName : string Show name to add to TV library table. Returns int Unique show id generated for show when it is added to the table. Used across the database to reference this show. """
goodlogging.Log.Info("DB", "Adding {0} to TV library".format(showName), verbosity=self.logVerbosity) currentShowValues = self.SearchTVLibrary(showName = showName) if currentShowValues is None: self._ActionDatabase("INSERT INTO TVLibrary (ShowName) VALUES (?)", (showName, )) showID = self._ActionDatabase("SELECT (ShowID) FROM TVLibrary WHERE ShowName=?", (showName, ))[0][0] return showID else: goodlogging.Log.Fatal("DB", "An entry for {0} already exists in the TV library".format(showName))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def UpdateShowDirInTVLibrary(self, showID, showDir): """ Update show directory entry for given show id in TVLibrary table. Parameters showID : int Show id value. showDir : string Show directory name. """
goodlogging.Log.Info("DB", "Updating TV library for ShowID={0}: ShowDir={1}".format(showID, showDir)) self._ActionDatabase("UPDATE TVLibrary SET ShowDir=? WHERE ShowID=?", (showDir, showID))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def SearchTVLibrary(self, showName = None, showID = None, showDir = None): """ Search TVLibrary table. If none of the optonal arguments are given it looks up all entries of the table, otherwise it will look up entries which match the given arguments. Note that it only looks up based on one argument - if show directory is given this will be used, otherwise show id will be used if it is given, otherwise show name will be used. Parameters showName : string [optional : default = None] Show name. showID : int [optional : default = None] Show id value. showDir : string [optional : default = None] Show directory name. Returns list or None If no result is found this returns None otherwise it will return a the result of the SQL query as a list. In the case that the result is expected to be unique and multiple entries are return a fatal error will be raised. """
unique = True if showName is None and showID is None and showDir is None: goodlogging.Log.Info("DB", "Looking up all items in TV library", verbosity=self.logVerbosity) queryString = "SELECT * FROM TVLibrary" queryTuple = None unique = False elif showDir is not None: goodlogging.Log.Info("DB", "Looking up from TV library where ShowDir is {0}".format(showDir), verbosity=self.logVerbosity) queryString = "SELECT * FROM TVLibrary WHERE ShowDir=?" queryTuple = (showDir, ) elif showID is not None: goodlogging.Log.Info("DB", "Looking up from TV library where ShowID is {0}".format(showID), verbosity=self.logVerbosity) queryString = "SELECT * FROM TVLibrary WHERE ShowID=?" queryTuple = (showID, ) elif showName is not None: goodlogging.Log.Info("DB", "Looking up from TV library where ShowName is {0}".format(showName), verbosity=self.logVerbosity) queryString = "SELECT * FROM TVLibrary WHERE ShowName=?" queryTuple = (showName, ) result = self._ActionDatabase(queryString, queryTuple, error = False) if result is None: return None elif len(result) == 0: return None elif len(result) == 1: goodlogging.Log.Info("DB", "Found match in TVLibrary: {0}".format(result), verbosity=self.logVerbosity) return result elif len(result) > 1: if unique is True: goodlogging.Log.Fatal("DB", "Database corrupted - multiple matches found in TV Library: {0}".format(result)) else: goodlogging.Log.Info("DB", "Found multiple matches in TVLibrary: {0}".format(result), verbosity=self.logVerbosity) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def SearchFileNameTable(self, fileName): """ Search FileName table. Find the show id for a given file name. Parameters fileName : string File name to look up in table. Returns int or None If a match is found in the database table the show id for this entry is returned, otherwise this returns None. """
goodlogging.Log.Info("DB", "Looking up filename string '{0}' in database".format(fileName), verbosity=self.logVerbosity) queryString = "SELECT ShowID FROM FileName WHERE FileName=?" queryTuple = (fileName, ) result = self._ActionDatabase(queryString, queryTuple, error = False) if result is None: goodlogging.Log.Info("DB", "No match found in database for '{0}'".format(fileName), verbosity=self.logVerbosity) return None elif len(result) == 0: return None elif len(result) == 1: goodlogging.Log.Info("DB", "Found file name match: {0}".format(result), verbosity=self.logVerbosity) return result[0][0] elif len(result) > 1: goodlogging.Log.Fatal("DB", "Database corrupted - multiple matches found in database table for: {0}".format(result))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def AddToFileNameTable(self, fileName, showID): """ Add entry to FileName table. If the file name and show id combination already exists in the table a fatal error is raised. Parameters fileName : string File name. showID : int Show id. """
goodlogging.Log.Info("DB", "Adding filename string match '{0}'={1} to database".format(fileName, showID), verbosity=self.logVerbosity) currentValues = self.SearchFileNameTable(fileName) if currentValues is None: self._ActionDatabase("INSERT INTO FileName (FileName, ShowID) VALUES (?,?)", (fileName, showID)) else: goodlogging.Log.Fatal("DB", "An entry for '{0}' already exists in the FileName table".format(fileName))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def SearchSeasonDirTable(self, showID, seasonNum): """ Search SeasonDir table. Find the season directory for a given show id and season combination. Parameters showID : int Show id for given show. seasonNum : int Season number. Returns string or None If no match is found this returns None, if a single match is found then the season directory name value is returned. If multiple matches are found a fatal error is raised. """
goodlogging.Log.Info("DB", "Looking up directory for ShowID={0} Season={1} in database".format(showID, seasonNum), verbosity=self.logVerbosity) queryString = "SELECT SeasonDir FROM SeasonDir WHERE ShowID=? AND Season=?" queryTuple = (showID, seasonNum) result = self._ActionDatabase(queryString, queryTuple, error = False) if result is None: goodlogging.Log.Info("DB", "No match found in database", verbosity=self.logVerbosity) return None elif len(result) == 0: return None elif len(result) == 1: goodlogging.Log.Info("DB", "Found database match: {0}".format(result), verbosity=self.logVerbosity) return result[0][0] elif len(result) > 1: goodlogging.Log.Fatal("DB", "Database corrupted - multiple matches found in database table for: {0}".format(result))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def AddSeasonDirTable(self, showID, seasonNum, seasonDir): """ Add entry to SeasonDir table. If a different entry for season directory is found for the given show id and season number combination this raises a fatal error. Parameters showID : int Show id. seasonNum : int Season number. seasonDir : string Season directory name. """
goodlogging.Log.Info("DB", "Adding season directory ({0}) to database for ShowID={1}, Season={2}".format(seasonDir, showID, seasonNum), verbosity=self.logVerbosity) currentValue = self.SearchSeasonDirTable(showID, seasonNum) if currentValue is None: self._ActionDatabase("INSERT INTO SeasonDir (ShowID, Season, SeasonDir) VALUES (?,?,?)", (showID, seasonNum, seasonDir)) else: if currentValue == seasonDir: goodlogging.Log.Info("DB", "A matching entry already exists in the SeasonDir table", verbosity=self.logVerbosity) else: goodlogging.Log.Fatal("DB", "A different entry already exists in the SeasonDir table")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def PrintAllTables(self): """ Prints contents of every table. """
goodlogging.Log.Info("DB", "Database contents:\n") for table in self._tableDict.keys(): self._PrintDatabaseTable(table)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _parse(data, obj_name, attr_map): """parse xml data into a python map"""
parsed_xml = minidom.parseString(data) parsed_objects = [] for obj in parsed_xml.getElementsByTagName(obj_name): parsed_obj = {} for (py_name, xml_name) in attr_map.items(): parsed_obj[py_name] = _get_minidom_tag_value(obj, xml_name) parsed_objects.append(parsed_obj) return parsed_objects
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_all_stations(self, station_type=None): """Returns information of all stations. @param<optional> station_type: ['mainline', 'suburban', 'dart'] """
params = None if station_type and station_type in STATION_TYPE_TO_CODE_DICT: url = self.api_base_url + 'getAllStationsXML_WithStationType' params = { 'stationType': STATION_TYPE_TO_CODE_DICT[station_type] } else: url = self.api_base_url + 'getAllStationsXML' response = requests.get( url, params=params, timeout=10) if response.status_code != 200: return [] return self._parse_station_list(response.content)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_all_current_trains(self, train_type=None, direction=None): """Returns all trains that are due to start in the next 10 minutes @param train_type: ['mainline', 'suburban', 'dart'] """
params = None if train_type: url = self.api_base_url + 'getCurrentTrainsXML_WithTrainType' params = { 'TrainType': STATION_TYPE_TO_CODE_DICT[train_type] } else: url = self.api_base_url + 'getCurrentTrainsXML' response = requests.get( url, params=params, timeout=10) if response.status_code != 200: return [] trains = self._parse_all_train_data(response.content) if direction is not None: return self._prune_trains(trains, direction=direction) return trains
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_station_by_name(self, station_name, num_minutes=None, direction=None, destination=None, stops_at=None): """Returns all trains due to serve station `station_name`. @param station_code @param num_minutes. Only trains within this time. Between 5 and 90 @param direction Filter by direction. Northbound or Southbound @param destination Filter by name of the destination stations @param stops_at Filber by name of one of the stops """
url = self.api_base_url + 'getStationDataByNameXML' params = { 'StationDesc': station_name } if num_minutes: url = url + '_withNumMins' params['NumMins'] = num_minutes response = requests.get( url, params=params, timeout=10) if response.status_code != 200: return [] trains = self._parse_station_data(response.content) if direction is not None or destination is not None: return self._prune_trains(trains, direction=direction, destination=destination, stops_at=stops_at) return trains
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_train_stops(self, train_code, date=None): """Get details for a train. @param train_code code for the trian @param date Date in format "15 oct 2017". If none use today """
if date is None: date = datetime.date.today().strftime("%d %B %Y") url = self.api_base_url + 'getTrainMovementsXML' params = { 'TrainId': train_code, 'TrainDate': date } response = requests.get( url, params=params, timeout=10) if response.status_code != 200: return [] return self._parse_train_movement_data(response.content)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fill_fields(self, **kwargs): """Fills the fields referenced by kwargs keys and fill them with the value"""
for name, value in kwargs.items(): field = getattr(self, name) field.send_keys(value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def authorize_url(client_id=None, redirect_uri=None, state=None, scopes=None, show_dialog=False, http_client=None): """ Trigger authorization dialog :param str client_id: Client ID :param str redirect_uri: Application Redirect URI :param str state: Application State :param List[str] scopes: Scopes to request :param bool show_dialog: Show the dialog :param http_client: HTTP Client for requests :return str Authorize URL :rtype str """
params = { 'client_id': client_id or os.environ.get('SPOTIFY_CLIENT_ID'), 'redirect_uri': redirect_uri or os.environ.get('SPOTIFY_REDIRECT_URI'), 'state': state or str(uuid.uuid4()).replace('-', ''), 'scope': ' '.join(scopes) if scopes else '', 'show_dialog': show_dialog, 'response_type': 'code' } query = ['{}={}'.format(k, v) for k, v in params.items()] return '{}?{}'.format('https://accounts.spotify.com/authorize', '&'.join(query))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def refresh(self): """ Refresh the access token """
data = { 'grant_type': 'refresh_token', 'refresh_token': self._token.refresh_token } response = self.http_client.post(self.URL, data=data, auth=(self.client_id, self.client_secret)) response.raise_for_status() self._token = Token.from_json(response.json())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def instance_of(cls): """ Create an invariant requiring the value is an instance of ``cls``. """
def check(value): return ( isinstance(value, cls), u"{value!r} is instance of {actual!s}, required {required!s}".format( value=value, actual=fullyQualifiedName(type(value)), required=fullyQualifiedName(cls), ), ) return check
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def provider_of(iface): """ Create an invariant requiring the value provides the zope.interface ``iface``. """
def check(value): return ( iface.providedBy(value), u"{value!r} does not provide {interface!s}".format( value=value, interface=fullyQualifiedName(iface), ), ) return check
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def temp_dir(suffix='', prefix='tmp', parent_dir=None, make_cwd=False): """ Create a temporary directory and optionally change the current working directory to it. The directory is deleted when the context exits. The temporary directory is created when entering the context manager, and deleted when exiting it: This time let's make the temporary directory our working directory: The suffix, prefix, and parent_dir options are passed to the standard ``tempfile.mkdtemp()`` function: This function can also be used as a decorator, with the in_temp_dir alias: """
prev_cwd = os.getcwd() parent_dir = parent_dir if parent_dir is None else str(parent_dir) abs_path = tempfile.mkdtemp(suffix, prefix, parent_dir) path = pathlib.Path(abs_path) try: if make_cwd: os.chdir(str(abs_path)) yield path.resolve() finally: if make_cwd: os.chdir(prev_cwd) with temporary.util.allow_missing_file(): shutil.rmtree(str(abs_path))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def openSafeReplace(filepath, mode='w+b'): """Context manager to open a temporary file and replace the original file on closing. """
tempfileName = None #Check if the filepath can be accessed and is writable before creating the #tempfile if not _isFileAccessible(filepath): raise IOError('File %s is not writtable' % (filepath, )) with tempfile.NamedTemporaryFile(delete=False, mode=mode) as tmpf: tempfileName = tmpf.name yield tmpf #Check if the filepath can be accessed and is writable before moving the #tempfile if not _isFileAccessible(filepath): raise IOError('File %s is not writtable' % (filepath, )) #Note: here unhandled exceptions may still occur because of race conditions, #messing things up. shutil.move(tempfileName, filepath)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _isFileAccessible(filepath): """Returns True if the specified filepath is writable."""
directory = os.path.dirname(filepath) if not os.access(directory, os.W_OK): #Return False if directory does not exist or is not writable return False if os.path.exists(filepath): if not os.access(filepath, os.W_OK): #Return False if file is not writable return False try: openfile = os.open(filepath, os.O_WRONLY) os.close(openfile) except IOError: #Return False if file is locked return False #Return True if file is writtable return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def writeJsonZipfile(filelike, data, compress=True, mode='w', name='data'): """Serializes the objects contained in data to a JSON formated string and writes it to a zipfile. :param filelike: path to a file (str) or a file-like object :param data: object that should be converted to a JSON formated string. Objects and types in data must be supported by the json.JSONEncoder or have the method ``._reprJSON()`` defined. :param compress: bool, True to use zip file compression :param mode: 'w' to truncate and write a new file, or 'a' to append to an existing file :param name: the file name that will be given to the JSON output in the archive """
zipcomp = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED with zipfile.ZipFile(filelike, mode, allowZip64=True) as containerFile: containerFile.writestr(name, json.dumps(data, cls=MaspyJsonEncoder), zipcomp )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def writeBinaryItemContainer(filelike, binaryItemContainer, compress=True): """Serializes the binaryItems contained in binaryItemContainer and writes them into a zipfile archive. Examples of binaryItem classes are :class:`maspy.core.Ci` and :class:`maspy.core.Sai`. A binaryItem class has to define the function ``_reprJSON()`` which returns a JSON formated string representation of the class instance. In addition it has to contain an attribute ``.arrays``, a dictionary which values are ``numpy.array``, that are serialized to bytes and written to the ``binarydata`` file of the zip archive. See :func:`_dumpArrayDictToFile()` The JSON formated string representation of the binaryItems, together with the metadata, necessary to restore serialized numpy arrays, is written to the ``metadata`` file of the archive in this form: Use the method :func:`loadBinaryItemContainer()` to restore a binaryItemContainer from a zipfile. :param filelike: path to a file (str) or a file-like object :param binaryItemContainer: a dictionary containing binaryItems :param compress: bool, True to use zip file compression """
allMetadata = dict() binarydatafile = io.BytesIO() #Note: It would be possible to sort the items here for index, binaryItem in enumerate(viewvalues(binaryItemContainer)): metadataList = _dumpArrayDictToFile(binarydatafile, binaryItem.arrays) allMetadata[index] = [binaryItem._reprJSON(), metadataList] #TODO: Is seek here still necessary? binarydatafile.seek(0) zipcomp = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED with zipfile.ZipFile(filelike, 'w', allowZip64=True) as containerFile: containerFile.writestr('metadata', json.dumps(allMetadata, cls=MaspyJsonEncoder), zipcomp ) containerFile.writestr('binarydata', binarydatafile.getvalue(), zipcomp)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _dumpArrayToFile(filelike, array): """Serializes a 1-dimensional ``numpy.array`` to bytes, writes the bytes to the filelike object and returns a dictionary with metadata, necessary to restore the ``numpy.array`` from the file. :param filelike: can be a file or a file-like object that provides the methods ``.write()`` and ``.tell()``. :param array: a 1-dimensional ``numpy.array`` :returns: a metadata dictionary :: {'start': start position in the file, 'end': end position in the file, 'size': size of the array, 'dtype': numpy data type of the array } """
bytedata = array.tobytes('C') start = filelike.tell() end = start + len(bytedata) metadata = {'start': start, 'end': end, 'size': array.size, 'dtype': array.dtype.name } filelike.write(bytedata) return metadata
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _dumpNdarrayToFile(filelike, ndarray): """Serializes an N-dimensional ``numpy.array`` to bytes, writes the bytes to the filelike object and returns a dictionary with metadata, necessary to restore the ``numpy.array`` from the file. :param filelike: can be a file or a file-like object that provides the methods ``.write()`` and ``.tell()``. :param ndarray: a N-dimensional ``numpy.array`` :returns: a metadata dictionary :: {'start': start position in the file, 'end': end position in the file, 'size': size of the array, 'dtype': numpy data type of the array, 'shape': description of the array shape } """
bytedata = ndarray.tobytes('C') start = filelike.tell() end = start + len(bytedata) metadata = {'start': start, 'end': end, 'size': ndarray.size, 'dtype': ndarray.dtype.name, 'shape': ndarray.shape } filelike.write(bytedata) return metadata
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _arrayFromBytes(dataBytes, metadata): """Generates and returns a numpy array from raw data bytes. :param bytes: raw data bytes as generated by ``numpy.ndarray.tobytes()`` :param metadata: a dictionary containing the data type and optionally the shape parameter to reconstruct a ``numpy.array`` from the raw data bytes. ``{"dtype": "float64", "shape": (2, 3)}`` :returns: ``numpy.array`` """
array = numpy.fromstring(dataBytes, dtype=numpy.typeDict[metadata['dtype']]) if 'shape' in metadata: array = array.reshape(metadata['shape']) return array
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def searchFileLocation(targetFileName, targetFileExtension, rootDirectory, recursive=True): """Search for a filename with a specified file extension in all subfolders of specified rootDirectory, returns first matching instance. :param targetFileName: #TODO: docstring :type targetFileName: str :param rootDirectory: #TODO: docstring :type rootDirectory: str :param targetFileExtension: #TODO: docstring :type targetFileExtension: str :param recursive: bool, specify whether subdirectories should be searched :returns: a filepath (str) or None """
expectedFileName = targetFileName.split('.')[0] + '.' + targetFileExtension targetFilePath = None if recursive: for dirpath, dirnames, filenames in os.walk(rootDirectory): for filename in filenames: if filename == expectedFileName: targetFilePath = joinpath(dirpath, filename) break if targetFilePath is not None: break else: for filename in os.listdir(rootDirectory): filePath = joinpath(rootDirectory, filename) if not os.path.isfile(filePath): continue if filename == expectedFileName: targetFilePath = filePath break return targetFilePath
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def matchingFilePaths(targetfilename, directory, targetFileExtension=None, selector=None): """Search for files in all subfolders of specified directory, return filepaths of all matching instances. :param targetfilename: filename to search for, only the string before the last "." is used for filename matching. Ignored if a selector function is specified. :param directory: search directory, including all subdirectories :param targetFileExtension: string after the last "." in the filename, has to be identical if specified. "." in targetFileExtension are ignored, thus ".txt" is treated equal to "txt". :param selector: a function which is called with the value of targetfilename and has to return True (include value) or False (discard value). If no selector is specified, equality to targetfilename is used. :returns: list of matching file paths (str) """
targetFilePaths = list() targetfilename = os.path.splitext(targetfilename)[0] targetFileExtension = targetFileExtension.replace('.', '') matchExtensions = False if targetFileExtension is None else True if selector is None: selector = functools.partial(operator.eq, targetfilename) for dirpath, dirnames, filenames in os.walk(directory): for filename in filenames: filenameNoextension = os.path.splitext(filename)[0] if selector(filenameNoextension): if matchExtensions: if not filename.endswith('.' + targetFileExtension): continue targetFilePaths.append(joinpath(dirpath, filename)) return targetFilePaths
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def listFiletypes(targetfilename, directory): """Looks for all occurences of a specified filename in a directory and returns a list of all present file extensions of this filename. In this cas everything after the first dot is considered to be the file extension: ``"filename.txt" -> "txt"``, ``"filename.txt.zip" -> "txt.zip"`` :param targetfilename: a filename without any extensions :param directory: only files present in this directory are compared to the targetfilename :returns: a list of file extensions (str) """
targetextensions = list() for filename in os.listdir(directory): if not os.path.isfile(joinpath(directory, filename)): continue splitname = filename.split('.') basename = splitname[0] extension = '.'.join(splitname[1:]) if basename == targetfilename: targetextensions.append(extension) return targetextensions
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def findAllSubstrings(string, substring): """ Returns a list of all substring starting positions in string or an empty list if substring is not present in string. :param string: a template string :param substring: a string, which is looked for in the ``string`` parameter. :returns: a list of substring starting positions in the template string """
#TODO: solve with regex? what about '.': #return [m.start() for m in re.finditer('(?='+substring+')', string)] start = 0 positions = [] while True: start = string.find(substring, start) if start == -1: break positions.append(start) #+1 instead of +len(substring) to also find overlapping matches start += 1 return positions
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def toList(variable, types=(basestring, int, float, )): """Converts a variable of type string, int, float to a list, containing the variable as the only element. :param variable: any python object :type variable: (str, int, float, others) :returns: [variable] or variable """
if isinstance(variable, types): return [variable] else: return variable
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def calcDeviationLimits(value, tolerance, mode): """Returns the upper and lower deviation limits for a value and a given tolerance, either as relative or a absolute difference. :param value: can be a single value or a list of values if a list of values is given, the minimal value will be used to calculate the lower limit and the maximum value to calculate the upper limit :param tolerance: a number used to calculate the limits :param mode: either ``absolute`` or ``relative``, specifies how the ``tolerance`` should be applied to the ``value``. """
values = toList(value) if mode == 'relative': lowerLimit = min(values) * (1 - tolerance) upperLimit = max(values) * (1 + tolerance) elif mode == 'absolute': lowerLimit = min(values) - tolerance upperLimit = max(values) + tolerance else: raise Exception('mode %s not specified' %(filepath, )) return lowerLimit, upperLimit
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def open(self, filepath, mode='w+b'): """Opens a file - will actually return a temporary file but replace the original file when the context is closed. """
#Check if the filepath can be accessed and is writable before creating #the tempfile if not _isFileAccessible(filepath): raise IOError('File %s is not writable' % (filepath,)) if filepath in self._files: with open(self._files[filepath], mode=mode) as tmpf: yield tmpf else: tempfilepath = None with tempfile.NamedTemporaryFile(delete=False, mode=mode) as tmpf: tempfilepath = tmpf.name yield tmpf self._files[filepath] = tempfilepath
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_state(self): """This function adds a new state"""
sid = len(self.states) self.states.append(SFAState(sid))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _initAddons(cls, recurse=True): """ Initializes the addons for this manager. """
for addon_module in cls.addonModules(recurse): projex.importmodules(addon_module)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_field_label_css_class(self, bound_field): """ Returns 'form-check-label' if widget is CheckboxInput. For all other fields, no css class is added. """
# If we render CheckboxInputs, Bootstrap requires a different # field label css class for checkboxes. if isinstance(bound_field.field.widget, forms.CheckboxInput): return 'form-check-label' return super().get_field_label_css_class(bound_field)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_textfile_with_contents(filename, contents, encoding='utf-8'): """ Creates a textual file with the provided contents in the workdir. Overwrites an existing file. """
ensure_directory_exists(os.path.dirname(filename)) if os.path.exists(filename): os.remove(filename) outstream = codecs.open(filename, "w", encoding) outstream.write(contents) if contents and not contents.endswith("\n"): outstream.write("\n") outstream.flush() outstream.close() assert os.path.exists(filename), "ENSURE file exists: %s" % filename
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ensure_directory_exists(dirname, context=None): """ Ensures that a directory exits. If it does not exist, it is automatically created. """
real_dirname = dirname if context: real_dirname = realpath_with_context(dirname, context) if not os.path.exists(real_dirname): os.makedirs(real_dirname) assert os.path.exists(real_dirname), "ENSURE dir exists: %s" % dirname assert os.path.isdir(real_dirname), "ENSURE isa dir: %s" % dirname
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def deserialize(self, msg): 'deserialize output to a Python object' self.logger.debug('deserializing %s', msg) return json.loads(msg)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def append_request_id(req, resp, resource, params): """Append request id which got from response header to resource.req_ids list. """
def get_headers(resp): if hasattr(resp, 'headers'): return resp.headers if hasattr(resp, '_headers'): return resp._headers return None if(isinstance(resp, Response) or (get_headers(resp) is not None)): # Extract 'x-request-id' from headers if # response is a Response object. request_id = get_headers(resp).get('x-request-id') else: # If resp is of type string or None. request_id = resp if resource.req_ids is None: resource.req_ids = [] if request_id not in resource.req_ids: resource.req_ids.append(request_id)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _sanitizer(self, obj): """Sanitizer method that will be passed to json.dumps."""
if isinstance(obj, datetime.datetime): return obj.isoformat() if hasattr(obj, "to_dict"): return obj.to_dict() return obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def make_uniq_for_step(ctx, ukeys, step, stage, full_data, clean_missing_after_seconds, to_uniq): """initially just a copy from UNIQ_PULL"""
# TODO: # this still seems to work ok for Storage types json/bubble, # for DS we need to reload de dumped step to uniqify if not ukeys: return to_uniq else: uniq_data = bubble_lod_load(ctx, step, stage) ctx.say('Creating uniq identifiers for [' + step + '] information', 0) ctx.gbc.say('uniq_data:', stuff=uniq_data, verbosity=1000) # TODO:make: data->keyed.items uniq_step_res = make_uniq(ctx=ctx, ldict=to_uniq, keyed=uniq_data, uniqstr=ukeys, tag=step, full_data=full_data, remove_missing_after_seconds=clean_missing_after_seconds) ctx.gbc.say('uniq_step_res:', stuff=uniq_step_res, verbosity=1000) to_uniq_newest = get_newest_uniq(ctx.gbc, uniq_step_res) # TODO: selected pulled only from slice of uniq # PROBLEM: slice of pull is not equal to slice of newest uniq, # can only select keys from newest, from slice of pulled # need a uid list from to_transform # to_transform = get_gen_slice(gbc, to_transform_newest, amount, index) # for now not a big problem, as with 'pump' there should be no problem to_uniq = to_uniq_newest # todo make keyed.items->data uniq_res_list = get_uniq_list(ctx.gbc, uniq_step_res) reset = True pfr = bubble_lod_dump(ctx=ctx, step=step, stage=stage, full_data=full_data, reset=reset, data_gen=uniq_res_list) ctx.gbc.say('saved uniq ' + step + ' data res:', stuff=pfr, verbosity=700) return to_uniq
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list_ip(self, instance_id): """Add all IPs"""
output = self.client.describe_instances(InstanceIds=[instance_id]) output = output.get("Reservations")[0].get("Instances")[0] ips = {} ips['PrivateIp'] = output.get("PrivateIpAddress") ips['PublicIp'] = output.get("PublicIpAddress") return ips
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(): """ Testing function for Flex Regular Expressions to FST DFA """
if len(argv) < 2: print 'Usage: %s fst_file [optional: save_file]' % argv[0] return flex_a = Flexparser() mma = flex_a.yyparse(argv[1]) mma.minimize() print mma if len(argv) == 3: mma.save(argv[2])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def has_permission(self): """Permission checking for "normal" Django."""
objs = [None] if hasattr(self, 'get_perms_objects'): objs = self.get_perms_objects() else: if hasattr(self, 'get_object'): try: objs = [self.get_object()] except Http404: raise except: pass if objs == [None]: objs = self.get_queryset() if (hasattr(self, 'permission_filter_queryset') and self.permission_filter_queryset is not False and self.request.method == 'GET'): if objs != [None]: self.perms_filter_queryset(objs) return True else: return check_perms(self.request.user, self.get_permission_required(), objs, self.request.method)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_permissions(self, request): """Permission checking for DRF."""
objs = [None] if hasattr(self, 'get_perms_objects'): objs = self.get_perms_objects() else: if hasattr(self, 'get_object'): try: objs = [self.get_object()] except Http404: raise except: pass if objs == [None]: objs = self.get_queryset() if len(objs) == 0: objs = [None] if (hasattr(self, 'permission_filter_queryset') and self.permission_filter_queryset is not False and self.request.method == 'GET'): if objs != [None]: self.perms_filter_queryset(objs) else: has_perm = check_perms(self.request.user, self.get_permission_required(), objs, self.request.method) if not has_perm: msg = self.get_permission_denied_message( default="Permission denied." ) if isinstance(msg, Sequence): msg = msg[0] self.permission_denied(request, message=msg)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _hashed_key(self): """ Returns 16-digit numeric hash of the redis key """
return abs(int(hashlib.md5( self.key_prefix.encode('utf8') ).hexdigest(), 16)) % (10 ** ( self._size_mod if hasattr(self, '_size_mod') else 5))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update(self, data): """ Set given keys to their respective values @data: #dict or :class:RedisMap of |{key: value}| entries to set """
if not data: return _rk, _dumps = self.get_key, self._dumps data = self._client.mset({ _rk(key): _dumps(value) for key, value in data.items()})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def expire_at(self, key, _time): """ Sets the expiration time of @key to @_time @_time: absolute Unix timestamp (seconds since January 1, 1970) """
return self._client.expireat(self.get_key(key), round(_time))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _bucket_key(self): """ Returns hash bucket key for the redis key """
return "{}.size.{}".format( self.prefix, (self._hashed_key//1000) if self._hashed_key > 1000 else self._hashed_key)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def reverse_iter(self, start=None, stop=None, count=2000): """ -> yields items of the list in reverse """
cursor = '0' count = 1000 start = start if start is not None else (-1 * count) stop = stop if stop is not None else -1 _loads = self._loads while cursor: cursor = self._client.lrange(self.key_prefix, start, stop) for x in reversed(cursor or []): yield _loads(x) start -= count stop -= count
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pop(self, index=None): """ Removes and returns the item at @index or from the end of the list -> item at @index """
if index is None: return self._loads(self._client.rpop(self.key_prefix)) elif index == 0: return self._loads(self._client.lpop(self.key_prefix)) else: _uuid = gen_rand_str(16, 24) r = self[index] self[index] = _uuid self.remove(_uuid) return r
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def count(self, value): """ Not recommended for use on large lists due to time complexity, but it works. Use with caution. -> #int number of occurences of @value """
cnt = 0 for x in self: if x == value: cnt += 1 return cnt
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def push(self, *items): """ Prepends the list with @items -> #int length of list after operation """
if self.serialized: items = list(map(self._dumps, items)) return self._client.lpush(self.key_prefix, *items)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def index(self, item): """ Not recommended for use on large lists due to time complexity, but it works -> #int list index of @item """
for i, x in enumerate(self.iter()): if x == item: return i return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def intersection(self, *others): """ Calculates the intersection of all the given sets, that is, members which are present in all given sets. @others: one or several #str keynames or :class:RedisSet objects -> #set of resulting intersection between @others and this set """
others = self._typesafe_others(others) return set(map( self._loads, self._client.sinter(self.key_prefix, *others)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def rank(self, member): """ Gets the ASC rank of @member from the sorted set, that is, lower scores have lower ranks """
if self.reversed: return self._client.zrevrank(self.key_prefix, self._dumps(member)) return self._client.zrank(self.key_prefix, self._dumps(member))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def recv_blocking(conn, msglen): """Recieve data until msglen bytes have been received."""
msg = b'' while len(msg) < msglen: maxlen = msglen-len(msg) if maxlen > 4096: maxlen = 4096 tmpmsg = conn.recv(maxlen) if not tmpmsg: raise RuntimeError("socket connection broken") msg += tmpmsg logging.debug("Msglen: %d of %d", len(msg), msglen) logging.debug("Message: %s", msg) return msg
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compare_password(expected, actual): """Compare two 64byte encoded passwords."""
if expected == actual: return True, "OK" msg = [] ver_exp = expected[-8:].rstrip() ver_act = actual[-8:].rstrip() if expected[:-8] != actual[:-8]: msg.append("Password mismatch") if ver_exp != ver_act: msg.append("asterisk_mbox version mismatch. Client: '" + ver_act + "', Server: '" + ver_exp + "'") return False, ". ".join(msg)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def encode_to_sha(msg): """coerce numeric list inst sha-looking bytearray"""
if isinstance(msg, str): msg = msg.encode('utf-8') return (codecs.encode(msg, "hex_codec") + (b'00' * 32))[:64]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def decode_from_sha(sha): """convert coerced sha back into numeric list"""
if isinstance(sha, str): sha = sha.encode('utf-8') return codecs.decode(re.sub(rb'(00)*$', b'', sha), "hex_codec")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _api_path(self, item): """Get the API path for the current cursor position."""
if self.base_url is None: raise NotImplementedError("base_url not set") path = "/".join([x.blob["id"] for x in item.path]) return "/".join([self.base_url, path])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def register_references(kb_app: kb, sphinx_app: Sphinx, sphinx_env: BuildEnvironment, docnames: List[str]): """ Walk the registry and add sphinx directives """
references: ReferencesContainer = sphinx_app.env.references for name, klass in kb_app.config.resources.items(): # Name is the value in the decorator and directive, e.g. # @kb.resource('category') means name=category if getattr(klass, 'is_reference', False): references[name] = dict()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def register_handlers(self, handler_classes): """ Create handlers from discovered handler classes :param handler_classes: List of :class:`~responsebot.handlers.base.BaseTweetHandler`'s derived classes """
for handler_class in handler_classes: self.handlers.append(handler_class(client=self.client)) logging.info('Successfully registered {handler_class}'.format( handler_class=getattr(handler_class, '__name__', str(handler_class))) )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_merged_filter(self): """ Return merged filter from list of handlers :return: merged filter :rtype: :class:`~responsebot.models.TweetFilter` """
track = set() follow = set() for handler in self.handlers: track.update(handler.filter.track) follow.update(handler.filter.follow) return TweetFilter(track=list(track), follow=list(follow))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_domain(url): """ Get domain part of an url. For example: https://www.python.org/doc/ -> https://www.python.org """
parse_result = urlparse(url) domain = "{schema}://{netloc}".format( schema=parse_result.scheme, netloc=parse_result.netloc) return domain
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def join_all(domain, *parts): """ Join all url components. Example:: https://www.apple.com/iphone :param domain: Domain parts, example: https://www.python.org :param parts: Other parts, example: "/doc", "/py27" :return: url """
l = list() if domain.endswith("/"): domain = domain[:-1] l.append(domain) for part in parts: for i in part.split("/"): if i.strip(): l.append(i) url = "/".join(l) return url
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _list_remote(store, maildir, verbose=False): """List the a maildir. store is an abstract representation of the source maildir. maildir is the local maildir to which mail will be pulled. This is a generator for a reason. Because of the way ssh multi-mastering works a single open TCP connection allows multiple virtual ssh connections. So the encryption and tcp only has to be done once. If this command returned a list then the ssh list command would have finished and the ssh connection for each message would have to be made again. """
# This command produces a list of all files in the maildir like: # base-filename timestamp container-directory command = """echo {maildir}/{{cur,new}} | tr ' ' '\\n' | while read path ; do ls -1Ugo --time-style=+%s $path | sed -rne "s|[a-zA-Z-]+[ \t]+[0-9]+[ \t]+[0-9]+[ \t]+([0-9]+)[ \t]+([0-9]+\\.[A-Za-z0-9]+)(\\.([.A-Za-z0-9-]+))*(:[2],([PRSTDF]*))*|\\2 \\1 $path|p";done""" stdout = store.cmd(command, verbose) lines = stdout.split("\n") for line in lines: parts = line.split(" ") if len(parts) >= 3: yield parts[0:3]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sshpull(host, maildir, localmaildir, noop=False, verbose=False, filterfile=None): """Pull a remote maildir to the local one. """
store = _SSHStore(host, maildir) _pull(store, localmaildir, noop, verbose, filterfile)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def filepull(maildir, localmaildir, noop=False, verbose=False, filterfile=None): """Pull one local maildir into another. The source need not be an md folder (it need not have a store). In this case filepull is kind of an import. """
store = _Store(maildir) _pull(store, localmaildir, noop, verbose, filterfile)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _filter(msgdata, mailparser, mdfolder, mailfilters): """Filter msgdata by mailfilters"""
if mailfilters: for f in mailfilters: msg = mailparser.parse(StringIO(msgdata)) rule = f(msg, folder=mdfolder) if rule: yield rule return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cmd(self, cmd, verbose=False): """Executes the specified command on the remote host. The cmd must be format safe, this means { and } must be doubled, thusly: echo /var/local/maildir/{{cur,new}} the cmd can include the format word 'maildir' to be replaced by self.directory. eg: echo {maildir}/{{cur,new}} """
command = cmd.format(maildir=self.directory) if verbose: print(command) p = Popen([ "ssh", "-T", self.host, command ], stdin=PIPE, stdout=PIPE, stderr=PIPE) stdout,stderr = p.communicate() return stdout
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fetch_result(self): """Return a list of urls for each search result."""
results = self.soup.find_all('div', {'class': 'container container-small'}) href = None is_match = False i = 0 while i < len(results) and not is_match: result = results[i] anchor = result.find('a', {'rel': 'bookmark'}) is_match = self._filter_results(result, anchor) href = anchor['href'] i += 1 try: page = get_soup(href) except (Exception): page = None # Return page if search is successful if href and page: return page else: raise PageNotFoundError(PAGE_ERROR)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _filter_results(self, result, anchor): """Filter search results by checking category titles and dates"""
valid = True try: cat_tag = result.find('a', {'rel': 'category tag'}).string title = anchor.string.lower() date_tag = result.find('time').string except (AttributeError, TypeError): return False if cat_tag != "Daily Ratings": valid = False if not date_in_range(self.date, date_tag, 5): valid = False if self.category == 'cable' and 'cable' not in title: valid = False elif self.category != 'cable' and 'cable' in title: valid = False return valid
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _build_url(self): """Build url based on searching by date or by show."""
url_params = [ BASE_URL, self.category + ' ratings', self.day, self.year, self.month ] return SEARCH_URL.format(*url_params)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _assert_category(self, category): """Validate category argument"""
category = category.lower() valid_categories = ['cable', 'broadcast', 'final', 'tv'] assert_msg = "%s is not a valid category." % (category) assert (category in valid_categories), assert_msg
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_data(self, path, **params): """ Giving a service path and optional specific arguments, returns the XML data from the API parsed as a dict structure. """
xml = self.get_response(path, **params) try: return parse(xml) except Exception as err: print(path) print(params) print(err) raise
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run(self, port): # pragma: no coverage """ Run on given port. Parse standard options and start the http server. """
tornado.options.parse_command_line() http_server = tornado.httpserver.HTTPServer(self) http_server.listen(port) tornado.ioloop.IOLoop.instance().start()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def log_request(self, handler): """ Override base method to log requests to JSON UDP collector and emit a metric. """
packet = {'method': handler.request.method, 'uri': handler.request.uri, 'remote_ip': handler.request.remote_ip, 'status': handler.get_status(), 'request_time_ms': handler.request.request_time() * 1000.0, 'service_id': self.service_id, 'request_id': handler.request.headers.get(REQUEST_ID_HEADER, 'undefined') } # handler can optionally define additional data to log if hasattr(handler, 'logvalues'): for key, value in handler.logvalues.iteritems(): packet[key] = value servicelog.log(packet) metric = "requests." + str(handler.get_status()) metrics.timing(metric, handler.request.request_time() * 1000.0) super(LoggingApplication, self).log_request(handler)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def logvalue(self, key, value): """Add log entry to request log info"""
if not hasattr(self, 'logvalues'): self.logvalues = {} self.logvalues[key] = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def write_error(self, status_code, **kwargs): """Log halt_reason in service log and output error page"""
message = default_message = httplib.responses.get(status_code, '') # HTTPError exceptions may have a log_message attribute if 'exc_info' in kwargs: (_, exc, _) = kwargs['exc_info'] if hasattr(exc, 'log_message'): message = str(exc.log_message) or default_message self.logvalue('halt_reason', message) title = "{}: {}".format(status_code, default_message) body = "{}: {}".format(status_code, message) self.finish("<html><title>" + title + "</title>" "<body>" + body + "</body></html>")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def timeit(self, metric, func, *args, **kwargs): """Time execution of callable and emit metric then return result."""
return metrics.timeit(metric, func, *args, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def require_content_type(self, content_type): """Raises a 400 if request content type is not as specified."""
if self.request.headers.get('content-type', '') != content_type: self.halt(400, 'Content type must be ' + content_type)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _ensure_request_id_header(self): "Ensure request headers have a request ID. Set one if needed." if REQUEST_ID_HEADER not in self.request.headers: self.request.headers.add(REQUEST_ID_HEADER, uuid.uuid1().hex)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(): """Testing function for DFA _Brzozowski Operation"""
if len(argv) < 2: targetfile = 'target.y' else: targetfile = argv[1] print 'Parsing ruleset: ' + targetfile, flex_a = Flexparser() mma = flex_a.yyparse(targetfile) print 'OK' print 'Perform minimization on initial automaton:', mma.minimize() print 'OK' print 'Perform StateRemoval on minimal automaton:', state_removal = StateRemoval(mma) mma_regex = state_removal.get_regex() print mma_regex
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _state_removal_init(self): """State Removal Operation Initialization"""
# First, we remove all multi-edges: for state_i in self.mma.states: for state_j in self.mma.states: if state_i.stateid == state_j.stateid: self.l_transitions[ state_i.stateid, state_j.stateid] = self.epsilon else: self.l_transitions[ state_i.stateid, state_j.stateid] = self.empty for arc in state_i.arcs: if arc.nextstate == state_j.stateid: if self.l_transitions[state_i.stateid, state_j.stateid] != self.empty: self.l_transitions[state_i.stateid, state_j.stateid] \ += self.mma.isyms.find(arc.ilabel) else: self.l_transitions[state_i.stateid, state_j.stateid] = \ self.mma.isyms.find(arc.ilabel)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _state_removal_solve(self): """The State Removal Operation"""
initial = sorted( self.mma.states, key=attrgetter('initial'), reverse=True)[0].stateid for state_k in self.mma.states: if state_k.final: continue if state_k.stateid == initial: continue self._state_removal_remove(state_k.stateid) print self.l_transitions return self.l_transitions
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def request(self, url, method, body="", headers={}, retry=True): """Execute an HTTP request and return a dict containing the response and the response status code. Keyword arguments: url -- The path to execute the result against, not including the API version or project ID, with no leading /. Required. method -- The HTTP method to use. Required. body -- A string or file object to send as the body of the request. Defaults to an empty string. headers -- HTTP Headers to send with the request. Can overwrite the defaults. Defaults to {}. retry -- Whether exponential backoff should be employed. Defaults to True. """
if headers: headers = dict(list(headers.items()) + list(self.headers.items())) else: headers = self.headers if not sys.version_info >= (3,) and headers: headers = dict((k.encode('ascii') if isinstance(k, unicode) else k, v.encode('ascii') if isinstance(v, unicode) else v) for k, v in headers.items()) url = self.base_url + url if not sys.version_info >= (3,): if isinstance(url, unicode): url = url.encode('ascii') r = self._doRequest(url, method, body, headers) retry_http_codes = [503, 504] if r.status_code in retry_http_codes and retry: tries = 5 delay = .5 backoff = 2 while r.status_code in retry_http_codes and tries > 0: tries -= 1 time.sleep(delay) delay *= backoff r = self._doRequest(url, method, body, headers) r.raise_for_status() result = {} contentType = r.headers["Content-Type"] if contentType is None: contentType = "text/plain" else: contentType = contentType.split(";")[0] if contentType.lower() == "application/json": try: result["body"] = json.loads(r.text) except: result["body"] = r.text else: result["body"] = r.text result["status"] = r.status_code result["resp"] = r result["content-type"] = contentType return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, url, headers={}, retry=True): """Execute an HTTP GET request and return a dict containing the response and the response status code. Keyword arguments: url -- The path to execute the result against, not including the API version or project ID, with no leading /. Required. headers -- HTTP Headers to send with the request. Can overwrite the defaults. Defaults to {}. retry -- Whether exponential backoff should be employed. Defaults to True. """
return self.request(url=url, method="GET", headers=headers, retry=retry)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def post(self, url, body="", headers={}, retry=True): """Execute an HTTP POST request and return a dict containing the response and the response status code. Keyword arguments: url -- The path to execute the result against, not including the API version or project ID, with no leading /. Required. body -- A string or file object to send as the body of the request. Defaults to an empty string. headers -- HTTP Headers to send with the request. Can overwrite the defaults. Defaults to {}. retry -- Whether exponential backoff should be employed. Defaults to True. """
headers["Content-Length"] = str(len(body)) return self.request(url=url, method="POST", body=body, headers=headers, retry=retry)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def patch(self, url, body="", headers={}, retry=True): """Execute an HTTP PATCH request and return a dict containing the response and the response status code. Keyword arguments: url -- The path to execute the result against, not including the API version or project ID, with no leading /. Required. body -- A string or file object to send as the body of the request. Defaults to an empty string. headers -- HTTP Headers to send with the request. Can overwrite the defaults. Defaults to {}. retry -- Whether exponential backoff should be employed. Defaults to True. """
return self.request(url=url, method="PATCH", body=body, headers=headers, retry=retry)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def clone(cls, srcpath, destpath): """Copy a main repository to a new location."""
try: os.makedirs(destpath) except OSError as e: if not e.errno == errno.EEXIST: raise cmd = [SVNADMIN, 'dump', '--quiet', '.'] dump = subprocess.Popen( cmd, cwd=srcpath, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) repo = cls.create(destpath) repo.load(dump.stdout) stderr = dump.stderr.read() dump.stdout.close() dump.stderr.close() dump.wait() if dump.returncode != 0: raise subprocess.CalledProcessError(dump.returncode, cmd, stderr) return repo
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def proplist(self, rev, path=None): """List Subversion properties of the path"""
rev, prefix = self._maprev(rev) if path is None: return self._proplist(str(rev), None) else: path = type(self).cleanPath(_join(prefix, path)) return self._proplist(str(rev), path)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def propget(self, prop, rev, path=None): """Get Subversion property value of the path"""
rev, prefix = self._maprev(rev) if path is None: return self._propget(prop, str(rev), None) else: path = type(self).cleanPath(_join(prefix, path)) return self._propget(prop, str(rev), path)