text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_schema(ldif_file): """ Load a schema from the given file into the SamDB """
samdb = samdb_connect() dn = samdb.domain_dn() samdb.transaction_start() try: setup_add_ldif(samdb, ldif_file, { "DOMAINDN": dn, }) except: samdb.transaction_cancel() raise samdb.transaction_commit()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_child(self, child): """ Add a child node """
if not isinstance(child, DependencyNode): raise TypeError('"child" must be a DependencyNode') self._children.append(child)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_children(self, children): """ Add multiple children """
if not isinstance(children, list): raise TypeError('"children" must be a list') for child in children: self.add_child(child)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def head_values(self): """ Return set of the head values """
values = set() for head in self._heads: values.add(head.value) return values
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_head(self, head): """ Add head Node """
if not isinstance(head, DependencyNode): raise TypeError('"head" must be a DependencyNode') self._heads.append(head)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_parser(parser): """Update the parser object for the shell. Arguments: parser: An instance of argparse.ArgumentParser. """
def __stdin(s): if s is None: return None if s == '-': return sys.stdin return open(s, 'r', encoding = 'utf8') parser.add_argument('--root-prompt', metavar = 'STR', default = 'PlayBoy', help = 'the root prompt string') parser.add_argument('--temp-dir', metavar = 'DIR', default = '/tmp/easyshell_demo', help = 'the directory to save history files') parser.add_argument('--debug', action = 'store_true', help = 'turn debug infomation on') parser.add_argument('file', metavar = 'FILE', nargs = '?', type = __stdin, help = "execute script in non-interactive mode. '-' = stdin")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _init(): """ Internal function to install the module finder. """
global finder if finder is None: finder = ModuleFinder() if finder not in sys.meta_path: sys.meta_path.insert(0, finder)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_module(self, name, path=None): """ Called when an import is made. If there are hooks waiting for this module to be imported then we stop the normal import process and manually load the module. @param name: The name of the module being imported. @param path The root path of the module (if a package). We ignore this. @return: If we want to hook this module, we return a C{loader} interface (which is this instance again). If not we return C{None} to allow the standard import process to continue. """
if name in self.loaded_modules: return None hooks = self.post_load_hooks.get(name, None) if hooks: return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_module(self, name): """ If we get this far, then there are hooks waiting to be called on import of this module. We manually load the module and then run the hooks. @param name: The name of the module to import. """
self.loaded_modules.append(name) try: __import__(name, {}, {}, []) mod = sys.modules[name] self._run_hooks(name, mod) except: self.loaded_modules.pop() raise return mod
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _run_hooks(self, name, module): """ Run all hooks for a module. """
hooks = self.post_load_hooks.pop(name, []) for hook in hooks: hook(module)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def index_repeatmasker_alignment_by_id(fh, out_fh, vebrose=False): """Build an index for a repeat-masker alignment file by repeat-masker ID."""
def extract_UID(rm_alignment): return rm_alignment.meta[multipleAlignment.RM_ID_KEY] index = IndexedFile(fh, repeat_masker_alignment_iterator, extract_UID) index.write_index(out_fh)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def index_genome_alignment_by_locus(fh, out_fh, verbose=False): """Build an index for a genome alig. using coords in ref genome as keys."""
bound_iter = functools.partial(genome_alignment_iterator, reference_species="hg19", index_friendly=True) hash_func = JustInTimeGenomeAlignmentBlock.build_hash idx = IndexedFile(fh, bound_iter, hash_func) idx.write_index(out_fh, verbose=verbose)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def lookup_genome_alignment_index(index_fh, indexed_fh, out_fh=sys.stdout, key=None, verbose=False): """Load a GA index and its indexed file and extract one or more blocks. :param index_fh: the index file to load. Can be a filename or a stream-like object. :param indexed_fh: the file that the index was built for, :param key: A single key, iterable of keys, or None. This key will be used for lookup. If None, user is prompted to enter keys interactively. """
# load the genome alignment as a JIT object bound_iter = functools.partial(genome_alignment_iterator, reference_species="hg19", index_friendly=True) hash_func = JustInTimeGenomeAlignmentBlock.build_hash idx = IndexedFile(record_iterator=bound_iter, record_hash_function=hash_func) idx.read_index(index_fh, indexed_fh) if key is None: while key is None or key.strip() != "": sys.stderr.write("[WAITING FOR KEY ENTRY ON STDIN; " + "END WITH EMPTY LINE]\n") key = raw_input() # we know keys for genome alignments have tabs as delims, so.. key = '\t'.join(key.split()).strip() if key != "": out_fh.write(str(idx[key]) + "\n") sys.stderr.write("\n") else: # we know keys for genome alignments have tabs as delims, so.. key = '\t'.join(key.split()) out_fh.write(str(idx[key]) + "\n")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getUI_build_index(prog_name, args): """ build and return a UI object for the 'build' option. :param args: raw arguments to parse """
programName = prog_name long_description = "Build an index for one or more files." short_description = long_description ui = CLI(programName, short_description, long_description) ui.minArgs = 0 ui.maxArgs = -1 ui.addOption(Option(short="o", long="output", argName="filename", description="output to given file, else stdout", required=False, type=str)) ui.addOption(Option(short="t", long="type", argName="filename", description="the type of the file. If missing, " + "the script will try to guess the file type. " + "Supported file types are: " + ", ".join([f.name for f in FileType]), required=False, type=str)) ui.addOption(Option(short="v", long="verbose", description="output additional messages to stderr " + "about run (default: " + str(DEFAULT_VERBOSITY) + ")", default=DEFAULT_VERBOSITY, required=False)) ui.addOption(Option(short="h", long="help", description="show this help message ", special=True)) ui.addOption(Option(short="u", long="test", description="run unit tests ", special=True)) ui.parseCommandLine(args) return ui
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __get_indexer(in_fns, selected_type=None): """Determine which indexer to use based on input files and type option."""
indexer = None if selected_type is not None: indexer = get_indexer_by_filetype(selected_type) else: if len(in_fns) == 0: raise IndexError("reading from stdin, unable to guess input file " + "type, use -t option to set manually.\n") else: extension = set([os.path.splitext(f)[1] for f in in_fns]) assert(len(extension) >= 1) if len(extension) > 1: raise IndexError("more than one file extension present, unable " + "to get input type, use -t option to set manually.\n") else: indexer = get_indexer_by_file_extension(list(extension)[0]) assert(indexer is not None) return indexer
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __get_lookup(in_fn, selected_type=None): """Determine which lookup func to use based on inpt files and type option."""
lookup_func = None if selected_type is not None: lookup_func = get_lookup_by_filetype(selected_type) else: extension = os.path.splitext(in_fn)[1] lookup_func = get_lookup_by_file_extension(extension) assert(lookup_func is not None) return lookup_func
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list_surveys(): # noqa: E501 """list the surveys available List the surveys available # noqa: E501 :rtype: List[Survey] """
pm = PrecisionMapper(login=_LOGIN, password=_PASSWORD) pm.sign_in() surveys = pm.get_surveys() shared_surveys = pm.get_shared_surveys() survey_list = [] for survey in surveys+shared_surveys: survey_obj = Survey( date=survey.date, image_nb=survey.image_nb, location=survey.location, name=survey.name, sensors=survey.sensor, size=survey.size, survey_id=survey.id) survey_list.append(survey_obj) return survey_list
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def pg2df(res): ''' takes a getlog requests result returns a table as df ''' # parse res soup = BeautifulSoup(res.text) if u'Pas de r\xe9ponse pour cette recherche.' in soup.text: pass # <-- don't pass ! else: params = urlparse.parse_qs(urlparse.urlsplit(res.url)[-2]) tb = soup.find_all('table')[0] data = [ [col.text for col in row.find_all('td')] + [params['dep'][0], params['mod'][0]] for row in tb.find_all('tr')[1:]] # <-- escape header row data = [dict(zip( [u'depcom', u'date', u'obs', u'dep', u'mod'], lst)) for lst in data] return pd.DataFrame(data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def getlog(start, end, deplist=['00'], modlist=['M0'], xlsx=None): ''' batch gets changelogs for cogs ''' # entry point url api = 'http://www.insee.fr/fr/methodes/nomenclatures/cog/recherche_historique.asp' # build payloads if modlist == ['M0']: modlist = ['MA', 'MB', 'MC', 'MD', 'ME', 'MF', 'MG'] payloads = [{'debut':start, 'fin':end, 'dep':dep, 'mod':mod} for dep in deplist for mod in modlist] # send requests results = [pg2df(requests.get(api, params=payload)) for payload in payloads] # make a df and fine tune it (force dtypes) data = pd.concat(results) data.reset_index() data['date'] = pd.to_datetime(data['date'], format='%d/%m/%Y') data[['dep', 'depcom', 'mod', 'obs']] = data[['dep', 'depcom', 'mod', 'obs']].astype(object) # write xlsx if xlsx: pd.core.format.header_style = None data.to_excel(xlsx, index=False) return pd.concat(results)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def currency_format(cents): """Format currency with symbol and decimal points. >> currency_format(-600) - $6.00 TODO: Add localization support. """
try: cents = int(cents) except ValueError: return cents negative = (cents < 0) if negative: cents = -1 * cents if cents < 100: dollars = 0 else: dollars = cents / 100 cents = cents % 100 centstr = str(cents) if len(centstr) < 2: centstr = '0' + centstr if negative: return "- $%s.%s" % (intcomma(dollars), centstr) return "$%s.%s" % (intcomma(dollars), centstr)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_website_affected(self, website): """ Tell if the website is affected by the domain change """
if self.domain is None: return True if not self.include_subdomains: return self.domain in website['subdomains'] else: dotted_domain = "." + self.domain for subdomain in website['subdomains']: if subdomain == self.domain or subdomain.endswith(dotted_domain): return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_affected_domains(self): """ Return a list of all affected domain and subdomains """
results = set() dotted_domain = ("." + self.domain) if self.domain else None for website in self.websites: for subdomain in website['subdomains']: if self.domain is None or subdomain == self.domain or \ (self.include_subdomains and subdomain.endswith(dotted_domain)): results.add(subdomain) # sort them by lenght so the shortest domain is the first results = sorted(list(results), key=lambda item: len(item)) return results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def secured_apps_copy(self, apps): """ Given the http app list of a website, return what should be in the secure version """
return [[app_name, path] for app_name, path in apps if app_name not in (self.LETSENCRYPT_VERIFY_APP_NAME,)]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_le_verification_app(self): """ Create the let's encrypt app to verify the ownership of the domain """
if self.LETSENCRYPT_VERIFY_APP_NAME in self._apps: logger.debug( "The LE verification APP already exists as %s" % self.LETSENCRYPT_VERIFY_APP_NAME ) verification_app = self._apps[self.LETSENCRYPT_VERIFY_APP_NAME] else: logger.info("Creating the identity-verification app for let's encrypt") verification_app = self.api.create_app( self.LETSENCRYPT_VERIFY_APP_NAME, 'static_php70', ) self._apps[self.LETSENCRYPT_VERIFY_APP_NAME] = verification_app # LE use the .well-known subfolder of the domain to do its verifications. # we will mount the app in the .well-known path, so we apply an hack to serve # the app-folder/.well-known on root app_root = os.path.join('~/webapps', self.LETSENCRYPT_VERIFY_APP_NAME) well_known_folder = os.path.join(app_root, '.well-known') if not is_link(well_known_folder): logger.info("Preparing static app for the verification") run('ln -s {app_root} {well_known_folder}'.format(**locals())) return verification_app
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sync_certificates(self, subdomains=None): """ Check all certificates available in acme in the host and sync them with the webfaction certificates """
result = run(".acme.sh/acme.sh --list", quiet=True) logger.info("Syncing Webfaction certificates") for acme_certificate_description in result.split('\n')[1:]: main_domain = acme_certificate_description.split()[0] if subdomains and main_domain not in subdomains: continue if exists(os.path.join("~/.acme.sh/", main_domain)): certificate_cer = self.get_remote_content( os.path.join("~/.acme.sh/", main_domain, main_domain + ".cer") ) certificate_key = self.get_remote_content( os.path.join("~/.acme.sh/", main_domain, main_domain + ".key") ) certificate_ca = self.get_remote_content( os.path.join("~/.acme.sh/", main_domain, "ca.cer") ) certificate_name = self.slugify(main_domain) certificate = self._certificates.get(certificate_name) if (certificate is None or certificate['certificate'] != certificate_cer or certificate['private_key'] != certificate_key or certificate['intermediates'] != certificate_ca): new_certificate = dict( name=certificate_name, certificate=certificate_cer, private_key=certificate_key, intermediates=certificate_ca, ) if certificate is None: logger.info("Creating new certificate for %s" % main_domain) self.api.create_certificate(new_certificate) else: logger.info("Updating certificate for %s" % main_domain) self.api.update_certificate(new_certificate) self._certificates[certificate_name] = new_certificate
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_remote_content(filepath): """ A handy wrapper to get a remote file content """
with hide('running'): temp = BytesIO() get(filepath, temp) content = temp.getvalue().decode('utf-8') return content.strip()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_main_domain(self, website): """ Given a list of subdomains, return the main domain of them If the subdomain are across multiple domain, then we cannot have a single website it should be splitted """
subdomains = website['subdomains'] main_domains = set() for sub in subdomains: for d in self._domains: if sub == d or sub.endswith("." + d): main_domains.add(d) if len(main_domains) > 1: logger.error( "The secure site %s cover multiple domains, it should be splitted" % website['name'] ) exit(1) elif not main_domains: logger.error("We cannot find the main domain for %s" % website['name']) return list(main_domains)[0]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_stroke_freq(Ax, Az, fs_a, nperseg, peak_thresh, stroke_ratio=None): '''Determine stroke frequency to use as a cutoff for filtering Args ---- Ax: numpy.ndarray, shape (n,) x-axis accelermeter data (longitudinal) Ay: numpy.ndarray, shape (n,) x-axis accelermeter data (lateral) Az: numpy.ndarray, shape (n,) z-axis accelermeter data (dorso-ventral) fs_a: float sampling frequency (i.e. number of samples per second) nperseg: int length of each segment (i.e. number of samples per frq. band in PSD calculation. Default to 512 (scipy.signal.welch() default is 256) peak_thresh: float PSD power level threshold. Only peaks over this threshold are returned. Returns ------- cutoff_frq: float cutoff frequency of signal (Hz) to be used for low/high-pass filtering stroke_frq: float frequency of dominant wavelength in signal stroke_ratio: float Notes ----- During all descents and ascents phases where mainly steady swimming occurs. When calculated for the whole dive it may be difficult to differentiate the peak at which stroking rate occurs as there are movements than only steady swimming Here the power spectra is calculated of the longitudinal and dorso-ventral accelerometer signals during descents and ascents to determine the dominant stroke frequency for each animal in each phase This numpy samples per f segment 512 and a sampling rate of fs_a. Output: S is the amount of power in each particular frequency (f) ''' import numpy from . import dsp from . import utils from .plots import plotdsp # Axes to be used for determining `stroke_frq` stroke_axes = [(0,'x','dorsa-ventral', Ax), (2,'z','lateral', Az)] # Lists for appending values from each axis cutoff_frqs = list() stroke_frqs = list() stroke_ratios = list() # Iterate over axes in `stroke_axes` list appending output to above lists for i, i_alph, name, data in stroke_axes: frqs, S, _, _ = dsp.calc_PSD_welch(data, fs_a, nperseg) # Find index positions of local maxima and minima in PSD delta = S.max()/1000 max_ind, min_ind = dsp.simple_peakfinder(range(len(S)), S, delta) max0 = max_ind[0] # TODO hack fix, improve later try: min0 = min_ind[0] except: min0 = None stroke_ratio = 0.4 stroke_frq = frqs[max0] # Prompt user for `cutoff_frq` value after inspecting PSD plot title = 'PSD - {} axis (n={}), {}'.format(i_alph, i, name) # Plot power spectrum against frequency distribution plotdsp.plot_welch_peaks(frqs, S, max_ind, title=title) # Get user input of cutoff frequency identified off plots cutoff_frq = utils.recursive_input('cutoff frequency', float) # Append values for axis to list cutoff_frqs.append(cutoff_frq) stroke_frqs.append(stroke_frq) stroke_ratios.append(stroke_ratio) # Average values for all axes cutoff_frq = float(numpy.mean(cutoff_frqs)) stroke_frq = float(numpy.mean(stroke_frqs)) # Handle exception of manual selection when `stroke_ratio == None` # TODO with fix try: stroke_ratio = float(numpy.mean(stroke_ratios)) except: stroke_ratio = None return cutoff_frq, stroke_frq, stroke_ratio
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_stroke_glide_indices(A_g_hf, fs_a, J, t_max): '''Get stroke and glide indices from high-pass accelerometer data Args ---- A_g_hf: 1-D ndarray Animal frame triaxial accelerometer matrix at sampling rate fs_a. fs_a: int Number of accelerometer samples per second J: float Frequency threshold for detecting a fluke stroke in m/s^2. If J is not given, fluke strokes will not be located but the rotations signal (pry) will be computed. t_max: int Maximum duration allowable for a fluke stroke in seconds. A fluke stroke is counted whenever there is a cyclic variation in the pitch deviation with peak-to-peak magnitude greater than +/-J and consistent with a fluke stroke duration of less than t_max seconds, e.g., for Mesoplodon choose t_max=4. Returns ------- GL: 1-D ndarray Matrix containing the start time (first column) and end time (2nd column) of any glides (i.e., no zero crossings in t_max or more seconds). Times are in seconds. Note ---- If no J or t_max is given, J=[], or t_max=[], GL returned as None ''' import numpy from . import dsp # Check if input array is 1-D if A_g_hf.ndim > 1: raise IndexError('A_g_hf multidimensional: Glide index determination ' 'requires 1-D acceleration array as input') # Convert t_max to number of samples n_max = t_max * fs_a # Find zero-crossing start/stops in pry(:,n), rotations around n axis. zc = dsp.findzc(A_g_hf, J, n_max/2) # find glides - any interval between zeros crossings greater than `t_max` ind = numpy.where(zc[1:, 0] - zc[0:-1, 1] > n_max)[0] gl_ind = numpy.vstack([zc[ind, 0] - 1, zc[ind + 1, 1] + 1]).T # Compute mean index position of glide, Only include sections with jerk < J gl_mean_idx = numpy.round(numpy.mean(gl_ind, 1)).astype(int) gl_ind = numpy.round(gl_ind).astype(int) for i in range(len(gl_mean_idx)): col = range(gl_mean_idx[i], gl_ind[i, 0], - 1) test = numpy.where(numpy.isnan(A_g_hf[col]))[0] if test.size != 0: gl_mean_idx[i] = numpy.nan gl_ind[i,0] = numpy.nan gl_ind[i,1] = numpy.nan else: over_J1 = numpy.where(abs(A_g_hf[col]) >= J)[0][0] gl_ind[i,0] = gl_mean_idx[i] - over_J1 + 1 col = range(gl_mean_idx[i], gl_ind[i, 1]) over_J2 = numpy.where(abs(A_g_hf[col]) >= J)[0][0] gl_ind[i,1] = gl_mean_idx[i] + over_J2 - 1 GL = gl_ind GL = GL[numpy.where(GL[:, 1] - GL[:, 0] > n_max / 2)[0], :] return GL
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_updates( self, display_all_distributions=False, verbose=False ): # pragma: no cover """ When called, get the environment updates and write updates to a CSV file and if a new config has been provided, write a new configuration file. Args: display_all_distributions (bool): Return distribution even if it is up-to-date. verbose (bool): If ``True``, log to terminal to terminal. """
if verbose: logging.basicConfig( stream=sys.stdout, level=logging.INFO, format='%(message)s', ) logging.info('Checking installed packages for updates...') updates = self._get_environment_updates( display_all_distributions=display_all_distributions ) if updates: for update in updates: logging.info(update) if updates and self._csv_file_name: self.write_updates_to_csv(updates) if updates and self._new_config: self.write_new_config(updates) return updates
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def csv_writer(csvfile): """ Get a CSV writer for the version of python that is being run. """
if sys.version_info >= (3,): writer = csv.writer(csvfile, delimiter=',', lineterminator='\n') else: writer = csv.writer(csvfile, delimiter=b',', lineterminator='\n') return writer
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def write_updates_to_csv(self, updates): """ Given a list of updates, write the updates out to the provided CSV file. Args: updates (list): List of Update objects. """
with open(self._csv_file_name, 'w') as csvfile: csvwriter = self.csv_writer(csvfile) csvwriter.writerow(CSV_COLUMN_HEADERS) for update in updates: row = [ update.name, update.current_version, update.new_version, update.prelease, ] csvwriter.writerow(row)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def write_new_config(self, updates): """ Given a list of updates, write the updates out to the provided configuartion file. Args: updates (list): List of Update objects. """
with open(self._new_config, 'w') as config_file: for update in updates: line = '{0}=={1} # The installed version is: {2}\n'.format( update.name, update.new_version, update.current_version ) config_file.write(line)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_environment_updates(self, display_all_distributions=False): """ Check all pacakges installed in the environment to see if there are any updates availalble. Args: display_all_distributions (bool): Return distribution even if it is up-to-date. Defaults to ``False``. Returns: list: A list of Update objects ordered based on ``instance.name``. """
updates = [] for distribution in self.pip.get_installed_distributions(): versions = self.get_available_versions(distribution.project_name) max_version = max(versions.keys()) if versions else UNKNOW_NUM update = None distribution_version = self._parse_version(distribution.version) if versions and max_version > distribution_version: update = Update( distribution.project_name, distribution.version, versions[max_version], prelease=max_version[-1] ) elif ( display_all_distributions and max_version == distribution_version ): update = Update( distribution.project_name, distribution.version, versions[max_version], ) elif display_all_distributions: update = Update( distribution.project_name, distribution.version, UNKNOWN ) if update: updates.append(update) return sorted(updates, key=lambda x: x.name)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_available_versions(self, project_name): """ Query PyPI to see if package has any available versions. Args: project_name (str): The name the project on PyPI. Returns: dict: Where keys are tuples of parsed versions and values are the versions returned by PyPI. """
available_versions = self.pypi_client.package_releases(project_name) if not available_versions: available_versions = self.pypi_client.package_releases( project_name.capitalize() ) # ``dict()`` for Python 2.6 syntax. return dict( (self._parse_version(version), version) for version in available_versions )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _parse_version(version): """ Parse a version string. Args: version (str): A string representing a version e.g. '1.9rc2' Returns: tuple: major, minor, patch parts cast as integer and whether or not it was a pre-release version. """
parsed_version = parse_version(version) return tuple( int(dot_version) for dot_version in parsed_version.base_version.split('.') ) + (parsed_version.is_prerelease,)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def support(self, version): """ return `True` if current python version match version passed. raise a deprecation warning if only PY2 or PY3 is supported as you probably have a conditional that should be removed. """
if not self._known_version(version): warn("unknown feature: %s"%version) return True else: if not self._get_featureset_support(version): warn("You are not supporting %s anymore "%str(version), UserWarning, self.level) if self._alone_version(version): warn("%s is the last supported feature of this group, you can simplifiy this logic. "%str(version), UserWarning,self.level) return self.predicates.get(version, True) if (not self.PY3_supported) or (not self.PY2_supported): warn("You are only supporting 1 version of Python", UserWarning, self.level) if version == PY3: return sys.version_info.major == 3 elif version == PY2: return sys.version_info.major == 2
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _default_warner(self, message, stacklevel=1): """ default warner function use a pending deprecation warning, and correct for the correct stacklevel """
return warnings.warn(message, PendingDeprecationWarning, stacklevel=stacklevel+4)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _check_if_complete(self, url, json_response): """ Check if a request has been completed and return the redirect URL if it has @type url: str @type json_response: list or dict @rtype: str or bool """
if '__done' in json_response and isinstance(json_response, list): mr_parts = list(urlparse(url)) mr_query = parse_qs(mr_parts[4]) mr_query['mr'] = '"' + str(json_response[0]) + '"' mr_parts[4] = urlencode(mr_query, True) mr_link = urlunparse(mr_parts) mr_j, mr_r = self._ajax(mr_link) self.log.debug('MultipleRedirect link: %s', mr_link) return super(Installer, self)._check_if_complete(url, mr_j) return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def entry_index(request, limit=0, template='djournal/entry_index.html'): '''Returns a reponse of a fixed number of entries; all of them, by default. ''' entries = Entry.public.all() if limit > 0: entries = entries[:limit] context = { 'entries': entries, } return render_to_response( template, context, context_instance=RequestContext(request), )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def entry_detail(request, slug, template='djournal/entry_detail.html'): '''Returns a response of an individual entry, for the given slug.''' entry = get_object_or_404(Entry.public, slug=slug) context = { 'entry': entry, } return render_to_response( template, context, context_instance=RequestContext(request), )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def tagged_entry_index(request, slug, template='djournal/tagged_entry_index.html'): '''Returns a response of all entries tagged with a given tag.''' tag = get_object_or_404(Tag, slug=slug) entries = Entry.public.filter(tags__in=[tag]) context = { 'entries': entries, 'tag': tag, } return render_to_response( template, context, context_instance=RequestContext(request), )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getErrorComponent(result, tag): """get total error contribution for component with specific tag"""
return math.sqrt(sum( (error*2)**2 for (var, error) in result.error_components().items() if var.tag == tag ))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getEdges(npArr): """get np array of bin edges"""
edges = np.concatenate(([0], npArr[:,0] + npArr[:,2])) return np.array([Decimal(str(i)) for i in edges])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getMaskIndices(mask): """get lower and upper index of mask"""
return [ list(mask).index(True), len(mask) - 1 - list(mask)[::-1].index(True) ]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getCocktailSum(e0, e1, eCocktail, uCocktail): """get the cocktail sum for a given data bin range"""
# get mask and according indices mask = (eCocktail >= e0) & (eCocktail <= e1) # data bin range wider than single cocktail bin if np.any(mask): idx = getMaskIndices(mask) # determine coinciding flags eCl, eCu = eCocktail[idx[0]], eCocktail[idx[1]] not_coinc_low, not_coinc_upp = (eCl != e0), (eCu != e1) # get cocktail sum in data bin (always w/o last bin) uCocktailSum = fsum(uCocktail[mask[:-1]][:-1]) logging.debug(' sum: {}'.format(uCocktailSum)) # get correction for non-coinciding edges if not_coinc_low: eCl_bw = eCl - eCocktail[idx[0]-1] corr_low = (eCl - e0) / eCl_bw abs_corr_low = float(corr_low) * uCocktail[idx[0]-1] uCocktailSum += abs_corr_low logging.debug((' low: %g == %g -> %g (%g) -> %g -> {} -> {}' % ( e0, eCl, eCl - e0, eCl_bw, corr_low )).format(abs_corr_low, uCocktailSum)) if not_coinc_upp: if idx[1]+1 < len(eCocktail): eCu_bw = eCocktail[idx[1]+1] - eCu corr_upp = (e1 - eCu) / eCu_bw abs_corr_upp = float(corr_upp) * uCocktail[idx[1]] else:# catch last index (quick fix!) abs_corr_upp = eCu_bw = corr_upp = 0 uCocktailSum += abs_corr_upp logging.debug((' upp: %g == %g -> %g (%g) -> %g -> {} -> {}' % ( e1, eCu, e1 - eCu, eCu_bw, corr_upp )).format(abs_corr_upp, uCocktailSum)) else: mask = (eCocktail >= e0) idx = getMaskIndices(mask) # only use first index # catch if already at last index if idx[0] == idx[1] and idx[0] == len(eCocktail)-1: corr = (e1 - e0) / (eCocktail[idx[0]] - eCocktail[idx[0]-1]) uCocktailSum = float(corr) * uCocktail[idx[0]-1] else: # default case corr = (e1 - e0) / (eCocktail[idx[0]+1] - eCocktail[idx[0]]) uCocktailSum = float(corr) * uCocktail[idx[0]] logging.debug(' sum: {}'.format(uCocktailSum)) return uCocktailSum
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def break_sandbox(): """Patches sandbox to add match-all regex to sandbox whitelist """
class EvilCM(object): def __enter__(self): return self def __exit__(self, exc_type, exc, tb): import re tb.tb_next.tb_next.tb_next.tb_frame.f_locals[ 'self']._enabled_regexes.append(re.compile('.*')) return True try: import sqlite3 # noqa except ImportError: with EvilCM(): __import__('sqlite3')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def execute(self, *args, **kwargs): """Executes the action."""
timeout = kwargs.pop("timeout", -1) run_async = kwargs.pop("run_async", False) self._is_running = True result = None if self._action_lock.acquire(False): self._state = ACTION_PENDING self._action_event = threading.Event() self.spine.send_command("kervi_action_" + self._action_id, *args, **kwargs) if not run_async: if self._action_event.wait(timeout): self._state = ACTION_FAILED raise TimeoutError("Timeout in call to action: " + self._action_id) self._action_event = None result = self._last_result else: self._action_lock.release() else: if not self._action_lock.acquire(True, timeout): return None self._action_lock.release() return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_interrupt(self, method=None, **kwargs): """ Decorator that turns a function or controller method into an action interrupt. """
def action_wrap(f): action_id = kwargs.get("action_id", f.__name__) name = kwargs.get("name", action_id) if inspect.ismethod(f): # not "." in f.__qualname__: self._interrupt = _ActionInterrupt(f) self._ui_parameters["interrupt_enabled"] = True return self._interrupt else: qual_name = getattr(f, "__qualname__", None) owner_class = kwargs.get("controller_class", None) if owner_class: qual_name = owner_class + "." + f.__name__ if qual_name: Actions.add_unbound_interrupt(qual_name, self) else: print("using upython? if yes you need to pass the name of the controller class via the controller_class parameter.") return f if method: return action_wrap(method) else: return action_wrap
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _freezer_lookup(freezer_string): """ Translate a string that may be a freezer name into the internal freezer constant :param freezer_string :return: """
sanitized = freezer_string.lower().strip() for freezer in FREEZER.ALL: freezer_instance = freezer() freezer_name = six.text_type(freezer_instance) if freezer_name == six.text_type(sanitized): return freezer else: if sanitized != freezer_string: raise ValueError(u"Unsupported freezer type \"{0}\". (Sanitized to \"{1}\")".format(freezer_string, sanitized)) else: raise ValueError(u"Unsupported freezer type \"{0}\".".format(freezer_string))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def resolve_freezer(freezer): """ Locate the appropriate freezer given FREEZER or string input from the programmer. :param freezer: FREEZER constant or string for the freezer that is requested. (None = FREEZER.DEFAULT) :return: """
# Set default freezer if there was none if not freezer: return _Default() # Allow character based lookups as well if isinstance(freezer, six.string_types): cls = _freezer_lookup(freezer) return cls() # Allow plain class definition lookups (we instantiate the class) if freezer.__class__ == type.__class__: return freezer() # Warn when a custom freezer implementation is used. if freezer not in FREEZER.ALL: warn(u"Using custom freezer implelmentation: {0}".format(freezer)) return freezer
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _split_packages(cls, include_packages): """ Split an iterable of packages into packages that need to be passed through, and those that need to have their disk location resolved. Some modules don't have a '__file__' attribute. AFAIK these aren't packages, so they can just be passed through to the includes as-is :return: 2-tuple of a list of the pass-through includes and the package_root_paths """
passthrough_includes = set([ six.text_type(package.__name__) for package in include_packages if not hasattr(package, '__file__') ]) package_file_paths = dict([ (six.text_type(os.path.abspath(package.__file__)), six.text_type(package.__name__)) for package in include_packages if hasattr(package, '__file__') ]) return passthrough_includes, package_file_paths
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_json_qry(qry_str): """ Parses a json query string into its parts args: qry_str: query string params: variables passed into the string """
def param_analyzer(param_list): rtn_list = [] for param in param_list: parts = param.strip().split("=") try: rtn_list.append(\ JsonQryProcessor[parts[0].strip().lower()](parts[1])) except IndexError: rtn_list.append(\ JsonQryProcessor[parts[0].strip().lower()]()) return rtn_list def part_analyzer(part, idx): nonlocal dallor, asterick, question_mark if part == "$": dallor = idx return part elif part == "*": asterick = idx return part elif part == "?": question_mark = idx return part elif part.startswith("="): return part return cssparse(part)[0] # pdb.set_trace() main_parts = qry_str.split("|") or_parts = main_parts.pop(0).strip() params = param_analyzer(main_parts) rtn_list = [] for or_part in [item.strip() for item in or_parts.split(",") if item.strip()]: dallor, asterick, question_mark = None, None, None dot_parts = or_part.split(".") rtn_list.append(([part_analyzer(part, i) \ for i, part in enumerate(dot_parts)], dallor, asterick, question_mark)) return {"qry_parts": rtn_list, "params": params}
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def json_qry(dataset, qry_str, params={}): """ Takes a json query string and returns the results args: dataset: RdfDataset to query against qry_str: query string params: dictionary of params """
# if qry_str.startswith("$.bf_itemOf[rdf_type=bf_Print].='print',\n"): # pdb.set_trace() if not '$' in qry_str: qry_str = ".".join(['$', qry_str.strip()]) dallor_val = params.get("$", dataset) if isinstance(dallor_val, rdflib.URIRef): dallor_val = Uri(dallor_val) if qry_str.strip() == '$': return [dallor_val] parsed_qry = parse_json_qry(qry_str) qry_parts = parsed_qry['qry_parts'] post_actions = parsed_qry['params'] # print(qry_parts) rtn_list = UniqueList() if params.get('dataset'): dataset = params['dataset'] for or_part in qry_parts: if or_part[1] == 0: if isinstance(dallor_val, dict): result = dallor_val else: try: result = dataset[dallor_val] except KeyError: try: result = dataset[Uri(dallor_val)] except KeyError: try: result = dataset[BlankNode(dallor_val)] except KeyError: continue forward = True for part in or_part[0][1:]: if part == "*": forward = not forward else: if forward: result = get_json_qry_item(result, part) else: result = get_reverse_json_qry_item(result, part, False) else: result = dataset parts = or_part[0].copy() parts.reverse() forward = False for part in parts[1:]: if part == "*": forward = not forward else: if forward: result = get_json_qry_item(result, part) else: result = get_reverse_json_qry_item(result, part, False, dallor_val) rtn_list += result for action in post_actions: rtn_list = action(rtn_list) return rtn_list
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def readMixedArray(self): """ Read mixed array. @rtype: L{pyamf.MixedArray} """
# TODO: something with the length/strict self.stream.read_ulong() # length obj = pyamf.MixedArray() self.context.addObject(obj) attrs = self.readObjectAttributes(obj) for key in attrs.keys(): try: key = int(key) except ValueError: pass obj[key] = attrs[key] return obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def readTypedObject(self): """ Reads an aliased ActionScript object from the stream and attempts to 'cast' it into a python class. @see: L{pyamf.register_class} """
class_alias = self.readString() try: alias = self.context.getClassAlias(class_alias) except pyamf.UnknownClassAlias: if self.strict: raise alias = pyamf.TypedObjectClassAlias(class_alias) obj = alias.createInstance(codec=self) self.context.addObject(obj) attrs = self.readObjectAttributes(obj) alias.applyAttributes(obj, attrs, codec=self) return obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def readObject(self): """ Reads an anonymous object from the data stream. @rtype: L{ASObject<pyamf.ASObject>} """
obj = pyamf.ASObject() self.context.addObject(obj) obj.update(self.readObjectAttributes(obj)) return obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def readReference(self): """ Reads a reference from the data stream. @raise pyamf.ReferenceError: Unknown reference. """
idx = self.stream.read_ushort() o = self.context.getObject(idx) if o is None: raise pyamf.ReferenceError('Unknown reference %d' % (idx,)) return o
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def readDate(self): """ Reads a UTC date from the data stream. Client and servers are responsible for applying their own timezones. Date: C{0x0B T7 T6} .. C{T0 Z1 Z2 T7} to C{T0} form a 64 bit Big Endian number that specifies the number of nanoseconds that have passed since 1/1/1970 0:00 to the specified time. This format is UTC 1970. C{Z1} and C{Z0} for a 16 bit Big Endian number indicating the indicated time's timezone in minutes. """
ms = self.stream.read_double() / 1000.0 self.stream.read_short() # tz # Timezones are ignored d = util.get_datetime(ms) if self.timezone_offset: d = d + self.timezone_offset self.context.addObject(d) return d
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def readLongString(self): """ Read UTF8 string. """
l = self.stream.read_ulong() bytes = self.stream.read(l) return self.context.getStringForBytes(bytes)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def readXML(self): """ Read XML. """
data = self.readLongString() root = xml.fromstring(data) self.context.addObject(root) return root
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def writeList(self, a): """ Write array to the stream. @param a: The array data to be encoded to the AMF0 data stream. """
if self.writeReference(a) != -1: return self.context.addObject(a) self.writeType(TYPE_ARRAY) self.stream.write_ulong(len(a)) for data in a: self.writeElement(data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def writeNumber(self, n): """ Write number to the data stream . @param n: The number data to be encoded to the AMF0 data stream. """
self.writeType(TYPE_NUMBER) self.stream.write_double(float(n))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def writeBoolean(self, b): """ Write boolean to the data stream. @param b: The boolean data to be encoded to the AMF0 data stream. """
self.writeType(TYPE_BOOL) if b: self.stream.write_uchar(1) else: self.stream.write_uchar(0)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def writeBytes(self, s): """ Write a string of bytes to the data stream. """
l = len(s) if l > 0xffff: self.writeType(TYPE_LONGSTRING) else: self.writeType(TYPE_STRING) if l > 0xffff: self.stream.write_ulong(l) else: self.stream.write_ushort(l) self.stream.write(s)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def writeString(self, u): """ Write a unicode to the data stream. """
s = self.context.getBytesForString(u) self.writeBytes(s)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def writeReference(self, o): """ Write reference to the data stream. @param o: The reference data to be encoded to the AMF0 datastream. """
idx = self.context.getObjectReference(o) if idx == -1 or idx > 65535: return -1 self.writeType(TYPE_REFERENCE) self.stream.write_ushort(idx) return idx
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def writeMixedArray(self, o): """ Write mixed array to the data stream. @type o: L{pyamf.MixedArray} """
if self.writeReference(o) != -1: return self.context.addObject(o) self.writeType(TYPE_MIXEDARRAY) # TODO: optimise this # work out the highest integer index try: # list comprehensions to save the day max_index = max([y[0] for y in o.items() if isinstance(y[0], (int, long))]) if max_index < 0: max_index = 0 except ValueError: max_index = 0 self.stream.write_ulong(max_index) self._writeDict(o) self._writeEndObject()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def writeObject(self, o): """ Write a Python object to the stream. @param o: The object data to be encoded to the AMF0 data stream. """
if self.writeReference(o) != -1: return self.context.addObject(o) alias = self.context.getClassAlias(o.__class__) alias.compile() if alias.amf3: self.writeAMF3(o) return if alias.anonymous: self.writeType(TYPE_OBJECT) else: self.writeType(TYPE_TYPEDOBJECT) self.serialiseString(alias.alias) attrs = alias.getEncodableAttributes(o, codec=self) if alias.static_attrs and attrs: for key in alias.static_attrs: value = attrs.pop(key) self.serialiseString(key) self.writeElement(value) if attrs: self._writeDict(attrs) self._writeEndObject()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def writeDate(self, d): """ Writes a date to the data stream. @type d: Instance of C{datetime.datetime} @param d: The date to be encoded to the AMF0 data stream. """
if isinstance(d, datetime.time): raise pyamf.EncodeError('A datetime.time instance was found but ' 'AMF0 has no way to encode time objects. Please use ' 'datetime.datetime instead (got:%r)' % (d,)) # According to the Red5 implementation of AMF0, dates references are # created, but not used. if self.timezone_offset is not None: d -= self.timezone_offset secs = util.get_timestamp(d) tz = 0 self.writeType(TYPE_DATE) self.stream.write_double(secs * 1000.0) self.stream.write_short(tz)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def writeXML(self, e): """ Writes an XML instance. """
self.writeType(TYPE_XML) data = xml.tostring(e) if isinstance(data, unicode): data = data.encode('utf-8') self.stream.write_ulong(len(data)) self.stream.write(data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def des(clas,keyblob,valblob): "deserialize. translate publish message, basically" raise NotImplementedError("don't use tuples, it breaks __eq__. this function probably isn't used in real life") raw_keyvals=msgpack.loads(keyblob) (namespace,version),keyvals=raw_keyvals[:2],raw_keyvals[2:] if namespace!=clas.NAMESPACE or version!=clas.VERSION: raise TypeError('des_mismatch got %s want %s'%((namespace,version),(clas.NAMESPACE,clas.VERSION))) vals=tuple(msgpack.loads(valblob)) clas.type_check(zip(*clas.KEY)[1],keyvals,'KEY') clas.type_check(zip(*clas.VALUE)[1],vals,'VALUE') return clas(tuple(keyvals),vals)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def wait(self): "wait for a message, respecting timeout" data=self.getcon().recv(256) # this can raise socket.timeout if not data: raise PubsubDisco if self.reset: self.reset=False # i.e. ack it. reset is used to tell the wait-thread there was a reconnect (though it's plausible that this never happens) raise PubsubDisco self.buf+=data msg,self.buf=complete_message(self.buf) return msg
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_access_token(self): """ get a valid access token """
if self.is_access_token_expired(): if is_debug_enabled(): debug('requesting new access_token') token = get_access_token(username=self.username, password=self.password, client_id=self.client_id, client_secret=self.client_secret, app_url=self.app_url) # lets make sure to refresh before we're halfway to expiring self.expires_at = time.time() + token['expires_in']/2 self.access_token = token['access_token'] return self.access_token
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_lines(self, line): """Find all lines matching a given line."""
for other_line in self.lines: if other_line.match(line): yield other_line
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove(self, line): """Delete all lines matching the given line."""
nb = 0 for block in self.blocks: nb += block.remove(line) return nb
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add(self, key, value): """Add a new value for a key. This differs from __setitem__ in adding a new value instead of updating the list of values, thus avoiding the need to fetch the previous list of values. """
self.configfile.add(self.name, key, value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_section(self, name, create=True): """Retrieve a section by name. Create it on first access."""
try: return self.sections[name] except KeyError: if not create: raise section = Section(name) self.sections[name] = section return section
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_line(self, section, line): """Retrieve all lines compatible with a given line."""
try: section = self._get_section(section, create=False) except KeyError: return [] return section.find_lines(line)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def iter_lines(self, section): """Iterate over all lines in a section. This will skip 'header' lines. """
try: section = self._get_section(section, create=False) except KeyError: return for block in section: for line in block: yield line
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def enter_block(self, name): """Mark 'entering a block'."""
section = self._get_section(name) block = self.current_block = section.new_block() self.blocks.append(block) return block
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def insert_line(self, line): """Insert a new line"""
if self.current_block is not None: self.current_block.append(line) else: self.header.append(line)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def handle_line(self, line): """Read one line."""
if line.kind == ConfigLine.KIND_HEADER: self.enter_block(line.header) else: self.insert_line(line)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(self, fileobj, name_hint='', parser=None): """Fill from a file-like object."""
self.current_block = None # Reset current block parser = parser or Parser() for line in parser.parse(fileobj, name_hint=name_hint): self.handle_line(line)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_line(self, section, line): """Remove all instances of a line. Returns: int: the number of lines removed """
try: s = self._get_section(section, create=False) except KeyError: # No such section, skip. return 0 return s.remove(line)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_or_update(self, section, key, value): """Update the key or, if no previous value existed, add it. Returns: int: Number of updated lines. """
updates = self.update(section, key, value) if updates == 0: self.add(section, key, value) return updates
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cache_set(key, value, timeout=None, refreshed=False): """ Wrapper for ``cache.set``. Stores the cache entry packed with the desired cache expiry time. When the entry is retrieved from cache, the packed expiry time is also checked, and if past, the stale cache entry is stored again with an expiry that has ``CACHE_SET_DELAY_SECONDS`` added to it. In this case the entry is not returned, so that a cache miss occurs and the entry should be set by the caller, but all other callers will still get the stale entry, so no real cache misses ever occur. """
if timeout is None: timeout = settings.CACHE_MIDDLEWARE_SECONDS refresh_time = timeout + time() real_timeout = timeout + settings.CACHE_SET_DELAY_SECONDS packed = (value, refresh_time, refreshed) return cache.set(_hashed_key(key), packed, real_timeout)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cache_installed(): """ Returns ``True`` if a cache backend is configured, and the cache middleware classes or subclasses thereof are present. This will be evaluated once per run, and then cached. """
has_key = bool(getattr(settings, "NEVERCACHE_KEY", "")) def flatten(seqs): return (item for seq in seqs for item in seq) middleware_classes = map(import_string, get_middleware_setting()) middleware_ancestors = set(flatten(map(getmro, middleware_classes))) yacms_cache_middleware_classes = { import_string("yacms.core.middleware.UpdateCacheMiddleware"), import_string("yacms.core.middleware.FetchFromCacheMiddleware"), } return (has_key and settings.CACHES and not settings.TESTING and yacms_cache_middleware_classes.issubset(middleware_ancestors))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cache_key_prefix(request): """ Cache key for yacms's cache middleware. Adds the current device and site ID. """
cache_key = "%s.%s.%s." % ( settings.CACHE_MIDDLEWARE_KEY_PREFIX, current_site_id(), device_from_request(request) or "default", ) return _i18n_cache_key_suffix(request, cache_key)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_cache_bypass(url): """ Adds the current time to the querystring of the URL to force a cache reload. Used for when a form post redirects back to a page that should display updated content, such as new comments or ratings. """
if not cache_installed(): return url hash_str = "" if "#" in url: url, hash_str = url.split("#", 1) hash_str = "#" + hash_str url += "?" if "?" not in url else "&" return url + "t=" + str(time()).replace(".", "") + hash_str
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _dbus_get_object(bus_name, object_name): """ Fetches DBUS proxy object given the specified parameters. `bus_name` Name of the bus interface. `object_name` Object path related to the interface. Returns object or ``None``. """
try: bus = dbus.SessionBus() obj = bus.get_object(bus_name, object_name) return obj except (NameError, dbus.exceptions.DBusException): return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _dbus_get_interface(bus_name, object_name, interface_name): """ Fetches DBUS interface proxy object given the specified parameters. `bus_name` Name of the bus interface. `object_name` Object path related to the interface. `interface_name` Name of the interface. Returns object or ``None``. """
try: obj = _dbus_get_object(bus_name, object_name) if not obj: raise NameError return dbus.Interface(obj, interface_name) except (NameError, dbus.exceptions.DBusException): return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _pidgin_status(status, message): """ Updates status and message for Pidgin IM application. `status` Status type. `message` Status message. """
try: iface = _dbus_get_interface('im.pidgin.purple.PurpleService', '/im/pidgin/purple/PurpleObject', 'im.pidgin.purple.PurpleInterface') if iface: # create new transient status code = PIDGIN_CODE_MAP[status] saved_status = iface.PurpleSavedstatusNew('', code) # set the message, if provided iface.PurpleSavedstatusSetMessage(saved_status, message) # activate status iface.PurpleSavedstatusActivate(saved_status) except dbus.exceptions.DBusException: pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _empathy_status(status, message): """ Updates status and message for Empathy IM application. `status` Status type. `message` Status message. """
ACCT_IFACE = 'org.freedesktop.Telepathy.Account' DBUS_PROP_IFACE = 'org.freedesktop.DBus.Properties' ACCT_MAN_IFACE = 'org.freedesktop.Telepathy.AccountManager' ACCT_MAN_PATH = '/org/freedesktop/Telepathy/AccountManager' SP_IFACE = ('org.freedesktop.Telepathy.' 'Connection.Interface.SimplePresence') # fetch main account manager interface am_iface = _dbus_get_interface(ACCT_MAN_IFACE, ACCT_MAN_PATH, DBUS_PROP_IFACE) if am_iface: account_paths = am_iface.Get(ACCT_MAN_IFACE, 'ValidAccounts') for account_path in account_paths: try: # fetch account interface account = _dbus_get_object(ACCT_MAN_IFACE, account_path) # skip disconnected, disabled, etc. if account.Get(ACCT_IFACE, 'ConnectionStatus') != 0: continue # fetch simple presence interface for account connection conn_path = account.Get(ACCT_IFACE, 'Connection') conn_iface = conn_path.replace("/", ".")[1:] sp_iface = _dbus_get_interface(conn_iface, conn_path, SP_IFACE) except dbus.exceptions.DBusException: continue # set status and message for code in EMPATHY_CODE_MAP[status]: try: sp_iface.SetPresence(code, message) except dbus.exceptions.DBusException: pass else: break
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _linux_skype_status(status, message): """ Updates status and message for Skype IM application on Linux. `status` Status type. `message` Status message. """
try: iface = _dbus_get_interface('com.Skype.API', '/com/Skype', 'com.Skype.API') if iface: # authenticate if iface.Invoke('NAME focus') != 'OK': msg = 'User denied authorization' raise dbus.exceptions.DbusException(msg) iface.Invoke('PROTOCOL 5') # set status iface.Invoke('SET USERSTATUS {0}'.format(SKYPE_CODE_MAP[status])) # set the message, if provided iface.Invoke('SET PROFILE MOOD_TEXT {0}' .format(message)) except dbus.exceptions.DBusException: pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _set_status(self, status, message=''): """ Updates the status and message on all supported IM apps. `status` Status type (See ``VALID_STATUSES``). `message` Status message. """
message = message.strip() # fetch away message from provided id if message.startswith(':'): msg_id = message[1:] message = self.messages.get(msg_id, '') message = message.encode('utf-8', 'replace') # attempt to set status for each supported application for func in self.set_status_funcs: func(status, message)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_option(self, option, block_name, *values): """ Parse status, end_status, timer_status and status_msg options. """
if option.endswith('status'): status = values[0] if status not in self.VALID_STATUSES: raise ValueError(u'Invalid IM status "{0}"'.format(status)) if len(values) > 2: raise TypeError if option == 'status': option = 'start_' + option key = option.split('_', 1)[0] self.statuses[key] = values[:2] elif option == 'status_msg': if len(values) != 2: raise TypeError name, msg = values self.messages[name] = msg
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def GetNumCoresOnHosts(hosts, private_key): """ Returns list of the number of cores for each host requested in hosts. """
results = runner.Runner(host_list=hosts, private_key=private_key, module_name='setup').run() num_cores_list = [] for _, props in results['contacted'].iteritems(): cores = props['ansible_facts']['ansible_processor_cores'] val = 0 try: val = int(cores) except ValueError: pass num_cores_list.append(val) return num_cores_list
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def RunPlaybookOnHosts(playbook_path, hosts, private_key, extra_vars=None): """ Runs the playbook and returns True if it completes successfully on all hosts. """
inventory = ansible_inventory.Inventory(hosts) if not inventory.list_hosts(): raise RuntimeError("Host list is empty.") stats = callbacks.AggregateStats() verbose = 0 playbook_cb = ansible.callbacks.PlaybookCallbacks(verbose=verbose) runner_cb = ansible.callbacks.PlaybookRunnerCallbacks(stats, verbose=verbose) pb = playbook.PlayBook(playbook=playbook_path, host_list=hosts, remote_user='ubuntu', private_key_file=None, private_key=private_key, stats=stats, callbacks=playbook_cb, runner_callbacks=runner_cb, extra_vars=extra_vars) results = pb.run() # Check if all hosts completed playbook without error success = True if 'dark' in results: if len(results['dark']) > 0: print "Contact failures:" for host, reason in results['dark'].iteritems(): print " %s (%s)" % (host, reason['msg']) success = False for host, status in results.iteritems(): if host == 'dark': continue failures = status['failures'] if failures: logging.info( '%s %s' % (host, status)) success = False return success
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def RunPlaybookOnHost(playbook_path, host, private_key, extra_vars=None): """ Runs the playbook and returns True if it completes successfully on a single host. """
return RunPlaybookOnHosts(playbook_path, [host], private_key, extra_vars)