text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def status(self, status_id, raise_exception_on_failure=False): """Return the status of the generation job."""
query = {"output": "json", "user_credentials": self.api_key} resp = requests.get( "%sstatus/%s" % (self._url, status_id), params=query, timeout=self._timeout ) if raise_exception_on_failure and resp.status_code != 200: raise DocumentStatusFailure(resp.content, resp.status_code) if resp.status_code == 200: as_json = json.loads(resp.content) if as_json["status"] == "completed": as_json["download_key"] = _get_download_key(as_json["download_url"]) return as_json return resp
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def download(self, download_key, raise_exception_on_failure=False): """Download the file represented by the download_key."""
query = {"output": "json", "user_credentials": self.api_key} resp = requests.get( "%sdownload/%s" % (self._url, download_key), params=query, timeout=self._timeout, ) if raise_exception_on_failure and resp.status_code != 200: raise DocumentDownloadFailure(resp.content, resp.status_code) return resp
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_parsing_plan_for_multifile_children(self, obj_on_fs: PersistedObject, desired_type: Type[Any], logger: Logger) -> Dict[str, Any]: """ Simply inspects the required type to find the base type expected for items of the collection, and relies on the ParserFinder to find the parsing plan :param obj_on_fs: :param desired_type: :param logger: :return: """
# nb of file children n_children = len(obj_on_fs.get_multifile_children()) # first extract base collection type subtypes, key_type = _extract_collection_base_type(desired_type) if isinstance(subtypes, tuple): # -- check the tuple length if n_children != len(subtypes): raise FolderAndFilesStructureError.create_for_multifile_tuple(obj_on_fs, len(subtypes), len(obj_on_fs.get_multifile_children())) else: # -- repeat the subtype n times subtypes = [subtypes] * n_children # -- for each child create a plan with the appropriate parser children_plan = OrderedDict() # use sorting for reproducible results in case of multiple errors for (child_name, child_fileobject), child_typ in zip(sorted(obj_on_fs.get_multifile_children().items()), subtypes): # -- use the parserfinder to find the plan t, child_parser = self.parser_finder.build_parser_for_fileobject_and_desiredtype(child_fileobject, child_typ, logger) children_plan[child_name] = child_parser.create_parsing_plan(t, child_fileobject, logger, _main_call=False) return children_plan
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def plot_stat_summary(df, fig=None): ''' Plot stats grouped by test capacitor load _and_ frequency. In other words, we calculate the mean of all samples in the data frame for each test capacitance and frequency pairing, plotting the following stats: - Root mean squared error - Coefficient of variation - Bias ## [Coefficient of variation][1] ## > In probability theory and statistics, the coefficient of > variation (CV) is a normalized measure of dispersion of a > probability distribution or frequency distribution. It is defined > as the ratio of the standard deviation to the mean. [1]: http://en.wikipedia.org/wiki/Coefficient_of_variation ''' if fig is None: fig = plt.figure(figsize=(8, 8)) # Define a subplot layout, 3 rows, 2 columns grid = GridSpec(3, 2) stats = calculate_stats(df, groupby=['test_capacitor', 'frequency']).dropna() for i, stat in enumerate(['RMSE %', 'cv %', 'bias %']): axis = fig.add_subplot(grid[i, 0]) axis.set_title(stat) # Plot a colormap to show how the statistical value changes # according to frequency/capacitance pairs. plot_colormap(stats, stat, axis=axis, fig=fig) axis = fig.add_subplot(grid[i, 1]) axis.set_title(stat) # Plot a histogram to show the distribution of statistical # values across all frequency/capacitance pairs. try: axis.hist(stats[stat].values, bins=50) except AttributeError: print stats[stat].describe() fig.tight_layout()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_manifest(raw_manifest, namespace=None, **kwargs): """ wrapper method which generates the manifest from various sources """
if isinstance(raw_manifest, configparser.RawConfigParser): return Manifest(raw_manifest) manifest = create_configparser() if not manifest.has_section('config'): manifest.add_section('config') _load_manifest_interpret_source(manifest, raw_manifest, **kwargs) return Manifest(manifest, namespace=namespace)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _load_manifest_from_url(manifest, url, verify_certificate=True, username=None, password=None): """ load a url body into a manifest """
try: if username and password: manifest_file_handler = StringIO(lib.authenticated_get(username, password, url, verify=verify_certificate).decode("utf-8")) else: manifest_file_handler = StringIO(lib.cleaned_request( 'get', url, verify=verify_certificate ).text) manifest.readfp(manifest_file_handler) except requests.exceptions.RequestException: logger.debug("", exc_info=True) error_message = sys.exc_info()[1] raise ManifestException("There was an error retrieving {0}!\n {1}".format(url, str(error_message)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _load_manifest_from_file(manifest, path): """ load manifest from file """
path = os.path.abspath(os.path.expanduser(path)) if not os.path.exists(path): raise ManifestException("Manifest does not exist at {0}!".format(path)) manifest.read(path) if not manifest.has_option('config', 'source'): manifest.set('config', 'source', str(path))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def formula_sections(self): """ Return all sections related to a formula, re-ordered according to the "depends" section. """
if self.dtree is not None: return self.dtree.order else: return [s for s in self.manifest.sections() if s != "config"]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_affirmative(self, section, option): """ Return true if the section option combo exists and it is set to a truthy value. """
return self.has_option(section, option) and \ lib.is_affirmative(self.get(section, option))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def write(self, file_handle): """ write the current state to a file manifest """
for k, v in self.inputs.write_values().items(): self.set('config', k, v) self.set('config', 'namespace', self.namespace) self.manifest.write(file_handle)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_context_dict(self): """ return a context dict of the desired state """
context_dict = {} for s in self.sections(): for k, v in self.manifest.items(s): context_dict["%s:%s" % (s, k)] = v for k, v in self.inputs.values().items(): context_dict["config:{0}".format(k)] = v context_dict.update(self.additional_context_variables.items()) context_dict.update(dict([("%s|escaped" % k, re.escape(str(v) or "")) for k, v in context_dict.items()])) return context_dict
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, section, key, default=MANIFEST_NULL_KEY): """ Returns the value if it exist, or default if default is set """
if not self.manifest.has_option(section, key) and default is not MANIFEST_NULL_KEY: return default return self.manifest.get(section, key)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __parse_namespace(self): """ Parse the namespace from various sources """
if self.manifest.has_option('config', 'namespace'): return self.manifest.get('config', 'namespace') elif self.manifest.has_option('config', 'source'): return NAMESPACE_REGEX.search(self.manifest.get('config', 'source')).groups()[0] else: logger.warn('Could not parse namespace implicitely') return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __generate_dependency_tree(self): """ Generate the dependency tree object """
dependency_dict = {} for s in self.manifest.sections(): if s != "config": if self.manifest.has_option(s, 'depends'): dependency_list = [d.strip() for d in re.split('\n|,', self.manifest.get(s, 'depends'))] dependency_dict[s] = dependency_list else: dependency_dict[s] = [] try: return DependencyTree(dependency_dict) except DependencyTreeException: dte = sys.exc_info()[1] raise ManifestException("Dependency tree for manifest is invalid! %s" % str(dte))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __substitute_objects(self, value, context_dict): """ recursively substitute value with the context_dict """
if type(value) == dict: return dict([(k, self.__substitute_objects(v, context_dict)) for k, v in value.items()]) elif type(value) == str: try: return value % context_dict except KeyError: e = sys.exc_info()[1] logger.warn("Could not specialize %s! Error: %s" % (value, e)) return value else: return value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __setup_inputs(self): """ Setup the inputs object """
input_object = Inputs() # populate input schemas for s in self.manifest.sections(): if self.has_option(s, 'inputs'): input_object.add_inputs_from_inputstring(self.get(s, 'inputs')) # add in values for k, v in self.items('config'): if input_object.is_input(s): input_object.set_input(k, v) return input_object
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def should_run(self): """ Returns true if the feature should run """
should_run = True config = self.target or self.source if config.has('systems'): should_run = False valid_systems = [s.lower() for s in config.get('systems').split(",")] for system_type, param in [('is_osx', 'osx'), ('is_debian', 'debian')]: if param in valid_systems and getattr(system, system_type)(): should_run = True return should_run
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def resolve(self): """ Resolve differences between the target and the source configuration """
if self.source and self.target: for key in self.source.keys(): if (key not in self.dont_carry_over_options and not self.target.has(key)): self.target.set(key, self.source.get(key))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _log_error(self, message): """ Log an error for the feature """
key = (self.feature_name, self.target.get('formula')) self.environment.log_feature_error(key, "ERROR: " + message)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def jinja_fragment_extension(tag, endtag=None, name=None, tag_only=False, allow_args=True, callblock_args=None): """Decorator to easily create a jinja extension which acts as a fragment. """
if endtag is None: endtag = "end" + tag def decorator(f): def parse(self, parser): lineno = parser.stream.next().lineno args = [] kwargs = [] if allow_args: args, kwargs = parse_block_signature(parser) call = self.call_method("support_method", args, kwargs, lineno=lineno) if tag_only: return nodes.Output([call], lineno=lineno) call_args = [] if callblock_args is not None: for arg in callblock_args: call_args.append(nodes.Name(arg, 'param', lineno=lineno)) body = parser.parse_statements(['name:' + endtag], drop_needle=True) return nodes.CallBlock(call, call_args, [], body, lineno=lineno) def support_method(self, *args, **kwargs): return f(*args, **kwargs) attrs = {"tags": set([tag]), "parse": parse, "support_method": support_method} return type(name or f.__name__, (Extension,), attrs) return decorator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def jinja_block_as_fragment_extension(name, tagname=None, classname=None): """Creates a fragment extension which will just act as a replacement of the block statement. """
if tagname is None: tagname = name if classname is None: classname = "%sBlockFragmentExtension" % name.capitalize() return type(classname, (BaseJinjaBlockAsFragmentExtension,), { "tags": set([tagname]), "end_tag": "end" + tagname, "block_name": name})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_copies(input_dir, exclude_list): """ find files that are not templates and not in the exclude_list for copying from template to image """
copies = [] def copy_finder(copies, dirname): for obj in os.listdir(dirname): pathname = os.path.join(dirname, obj) if os.path.isdir(pathname): continue if obj in exclude_list: continue if obj.endswith('.mustache'): continue copies.append(os.path.join(dirname, obj)) dir_visitor( input_dir, functools.partial(copy_finder, copies) ) return copies
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_devices(self, refresh=False, generic_type=None): """Get all devices from Lupusec."""
_LOGGER.info("Updating all devices...") if refresh or self._devices is None: if self._devices is None: self._devices = {} responseObject = self.get_sensors() if (responseObject and not isinstance(responseObject, (tuple, list))): responseObject = responseObject for deviceJson in responseObject: # Attempt to reuse an existing device device = self._devices.get(deviceJson['name']) # No existing device, create a new one if device: device.update(deviceJson) else: device = newDevice(deviceJson, self) if not device: _LOGGER.info('Device is unknown') continue self._devices[device.device_id] = device # We will be treating the Lupusec panel itself as an armable device. panelJson = self.get_panel() _LOGGER.debug("Get the panel in get_devices: %s", panelJson) self._panel.update(panelJson) alarmDevice = self._devices.get('0') if alarmDevice: alarmDevice.update(panelJson) else: alarmDevice = ALARM.create_alarm(panelJson, self) self._devices['0'] = alarmDevice # Now we will handle the power switches switches = self.get_power_switches() _LOGGER.debug( 'Get active the power switches in get_devices: %s', switches) for deviceJson in switches: # Attempt to reuse an existing device device = self._devices.get(deviceJson['name']) # No existing device, create a new one if device: device.update(deviceJson) else: device = newDevice(deviceJson, self) if not device: _LOGGER.info('Device is unknown') continue self._devices[device.device_id] = device if generic_type: devices = [] for device in self._devices.values(): if (device.type is not None and device.type in generic_type[0]): devices.append(device) return devices return list(self._devices.values())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_from_dict(json_dict): """ Given a Unified Uploader message, parse the contents and return a MarketHistoryList instance. :param dict json_dict: A Unified Uploader message as a dict. :rtype: MarketOrderList :returns: An instance of MarketOrderList, containing the orders within. """
history_columns = json_dict['columns'] history_list = MarketHistoryList( upload_keys=json_dict['uploadKeys'], history_generator=json_dict['generator'], ) for rowset in json_dict['rowsets']: generated_at = parse_datetime(rowset['generatedAt']) region_id = rowset['regionID'] type_id = rowset['typeID'] history_list.set_empty_region(region_id, type_id, generated_at) for row in rowset['rows']: history_kwargs = _columns_to_kwargs( SPEC_TO_KWARG_CONVERSION, history_columns, row) historical_date = parse_datetime(history_kwargs['historical_date']) history_kwargs.update({ 'type_id': type_id, 'region_id': region_id, 'historical_date': historical_date, 'generated_at': generated_at, }) history_list.add_entry(MarketHistoryEntry(**history_kwargs)) return history_list
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def encode_to_json(history_list): """ Encodes this MarketHistoryList instance to a JSON string. :param MarketHistoryList history_list: The history instance to serialize. :rtype: str """
rowsets = [] for items_in_region_list in history_list._history.values(): region_id = items_in_region_list.region_id type_id = items_in_region_list.type_id generated_at = gen_iso_datetime_str(items_in_region_list.generated_at) rows = [] for entry in items_in_region_list.entries: historical_date = gen_iso_datetime_str(entry.historical_date) # The order in which these values are added is crucial. It must # match STANDARD_ENCODED_COLUMNS. rows.append([ historical_date, entry.num_orders, entry.total_quantity, entry.low_price, entry.high_price, entry.average_price, ]) rowsets.append(dict( generatedAt = generated_at, regionID = region_id, typeID = type_id, rows = rows, )) json_dict = { 'resultType': 'history', 'version': '0.1', 'uploadKeys': history_list.upload_keys, 'generator': history_list.history_generator, 'currentTime': gen_iso_datetime_str(now_dtime_in_utc()), # This must match the order of the values in the row assembling portion # above this. 'columns': STANDARD_ENCODED_COLUMNS, 'rowsets': rowsets, } return json.dumps(json_dict)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load(self, configuration): """ Load a YAML configuration file. :param configuration: Configuration filename or YAML string """
try: self.config = yaml.load(open(configuration, "rb")) except IOError: try: self.config = yaml.load(configuration) except ParserError, e: raise ParserError('Error parsing config: %s' % e) # put customer data into self.customer if isinstance(self.config, dict): self.customer = self.config.get('customer', {}) self.instances_dict = self.config.get('instances', {}) self.web2py_dir = self.config.get('web2py', None) self.api_type = self.config.get('api_type', 'jsonrpc') self.valid = True else: self.customer = {} self.instances_dict = {} self.web2py_dir = None self.valid = False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def instances(self, test_type=".*"): """ Returns a dict of all instances defined using a regex :param test_type: Regular expression to match for self.instance['test_type'] value names """
import re data = {} for k, v in self.instances_dict.iteritems(): if re.match(test_type, v.get('test_type'), re.IGNORECASE): if 'filter_type' in v: hostfilter = { 'filtertype': v['filter_type'], 'content': v['filter_value'] } else: hostfilter = {} data[k] = { 'name': v.get('name'), 'start': v.get('start'), 'end': v.get('end'), 'url': v.get('url'), 'hostfilter': hostfilter, 'test_type': v.get('test_type') } return data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def none_to_blank(s, exchange=''): """Replaces NoneType with '' '' '' u'something' [u'1', ''] :param s: String to replace :para exchange: Character to return for None, default is blank ('') :return: If s is None, returns exchange """
if isinstance(s, list): return [none_to_blank(z) for y, z in enumerate(s)] return exchange if s is None else unicode(s)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def make_good_url(url=None, addition="/"): """Appends addition to url, ensuring the right number of slashes exist and the path doesn't get clobbered. 'http://www.server.com/anywhere/else' 'http://test.com/somewhere/over/the/rainbow/' 'None/' :param url: URL :param addition: Something to add to the URL :return: New URL with addition"""
if url is None: return None if isinstance(url, str) and isinstance(addition, str): return "%s/%s" % (url.rstrip('/'), addition.lstrip('/')) else: return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_kvasir_url( proto="https", server="localhost", port="8443", base="Kvasir", user="test", password="test", path=KVASIR_JSONRPC_PATH): """ Creates a full URL to reach Kvasir given specific data 'https://test@test/localhost:8443/Kvasir/api/call/jsonrpc' 'https://test@test/localhost:8443/Kvasir/api/call/jsonrpc' 'https://test@password/localhost:443/Kvasir/bad/path' :param proto: Protocol type - http or https :param server: Hostname or IP address of Web2py server :param port: Port to reach server :param base: Base application name :param user: Username for basic auth :param password: Password for basic auth :param path: Full path to JSONRPC (/api/call/jsonrpc) :return: A full URL that can reach Kvasir's JSONRPC interface """
uri = proto + '://' + user + '@' + password + '/' + server + ':' + port + '/' + base return make_good_url(uri, path)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_default(parser, section, option, default): """helper to get config settings with a default if not present"""
try: result = parser.get(section, option) except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): result = default return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_db_application_prefix(prefix, sep=None): """Set the global app prefix and separator."""
global _APPLICATION_PREFIX, _APPLICATION_SEP _APPLICATION_PREFIX = prefix if (sep is not None): _APPLICATION_SEP = sep
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_by_index(self, cls, index_name, value): """Find records matching index query - defer to backend."""
return self.backend.find_by_index(cls, index_name, value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def humanTime(seconds): ''' Convert seconds to something more human-friendly ''' intervals = ['days', 'hours', 'minutes', 'seconds'] x = deltaTime(seconds=seconds) return ' '.join('{} {}'.format(getattr(x, k), k) for k in intervals if getattr(x, k))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def humanTimeConverter(): ''' Cope whether we're passed a time in seconds on the command line or via stdin ''' if len(sys.argv) == 2: print humanFriendlyTime(seconds=int(sys.argv[1])) else: for line in sys.stdin: print humanFriendlyTime(int(line)) sys.exit(0)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def train(self, data, **kwargs): """ Calculate the standard deviations and means in the training data """
self.data = data for i in xrange(0,data.shape[1]): column_mean = np.mean(data.icol(i)) column_stdev = np.std(data.icol(i)) #Have to do += or "list" type will fail (ie with append) self.column_means += [column_mean] self.column_stdevs += [column_stdev] self.data = self.predict(data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def predict(self, test_data, **kwargs): """ Adjust new input by the values in the training data """
if test_data.shape[1]!=self.data.shape[1]: raise Exception("Test data has different number of columns than training data.") for i in xrange(0,test_data.shape[1]): test_data.loc[:,i] = test_data.icol(i) - self.column_means[i] if int(self.column_stdevs[i])!=0: test_data.loc[:,i] = test_data.icol(i) / self.column_stdevs[i] return test_data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def action_decorator(name): """Decorator to register an action decorator """
def decorator(cls): action_decorators.append((name, cls)) return cls return decorator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_global_config(config_path): """ Load a global configuration object, and query for any required variables along the way """
config = configparser.RawConfigParser() if os.path.exists(config_path): logger.debug("Checking and setting global parameters...") config.read(config_path) else: _initial_run() logger.info("Unable to find a global sprinter configuration!") logger.info("Creating one now. Please answer some questions" + " about what you would like sprinter to do.") logger.info("") # checks and sets sections if not config.has_section('global'): config.add_section('global') configure_config(config) write_config(config, config_path) return config
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_global_config(global_config): """ print the global configuration """
if global_config.has_section('shell'): print("\nShell configurations:") for shell_type, set_value in global_config.items('shell'): print("{0}: {1}".format(shell_type, set_value)) if global_config.has_option('global', 'env_source_rc'): print("\nHave sprinter env source rc: {0}".format( global_config.get('global', 'env_source_rc')))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_default_config(): """ Create a default configuration object, with all parameters filled """
config = configparser.RawConfigParser() config.add_section('global') config.set('global', 'env_source_rc', False) config.add_section('shell') config.set('shell', 'bash', "true") config.set('shell', 'zsh', "true") config.set('shell', 'gui', "true") return config
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _initial_run(): """ Check things during the initial setting of sprinter's global config """
if not system.is_officially_supported(): logger.warn(warning_template + "===========================================================\n" + "Sprinter is not officially supported on {0}! Please use at your own risk.\n\n".format(system.operating_system()) + "You can find the supported platforms here:\n" + "(http://sprinter.readthedocs.org/en/latest/index.html#compatible-systems)\n\n" + "Conversely, please help us support your system by reporting on issues\n" + "(http://sprinter.readthedocs.org/en/latest/faq.html#i-need-help-who-do-i-talk-to)\n" + "===========================================================") else: logger.info( "\nThanks for using \n" + "=" * 60 + sprinter_template + "=" * 60 )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _configure_shell(config): """ Checks and queries values for the shell """
config.has_section('shell') or config.add_section('shell') logger.info( "What shells or environments would you like sprinter to work with?\n" "(Sprinter will not try to inject into environments not specified here.)\n" "If you specify 'gui', sprinter will attempt to inject it's state into graphical programs as well.\n" "i.e. environment variables sprinter set will affect programs as well, not just shells\n" "WARNING: injecting into the GUI can be very dangerous. it usually requires a restart\n" " to modify any environmental configuration." ) environments = list(enumerate(sorted(SHELL_CONFIG), start=1)) logger.info("[0]: All, " + ", ".join(["[%d]: %s" % (index, val) for index, val in environments])) desired_environments = lib.prompt("type the environment, comma-separated", default="0") for index, val in environments: if str(index) in desired_environments or "0" in desired_environments: config.set('shell', val, 'true') else: config.set('shell', val, 'false')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _configure_env_source_rc(config): """ Configures wether to have .env source .rc """
config.set('global', 'env_source_rc', False) if system.is_osx(): logger.info("On OSX, login shells are default, which only source sprinter's 'env' configuration.") logger.info("I.E. environment variables would be sourced, but not shell functions " + "or terminal status lines.") logger.info("The typical solution to get around this is to source your rc file (.bashrc, .zshrc) " + "from your login shell.") env_source_rc = lib.prompt("would you like sprinter to source the rc file too?", default="yes", boolean=True) config.set('global', 'env_source_rc', env_source_rc)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_members(self): """Return all members in the group as CSHMember objects"""
res = self.__con__.search_s( self.__ldap_base_dn__, ldap.SCOPE_SUBTREE, "(memberof=%s)" % self.__dn__, ['uid']) ret = [] for val in res: val = val[1]['uid'][0] try: ret.append(val.decode('utf-8')) except UnicodeDecodeError: ret.append(val) except KeyError: continue return [CSHMember(self.__lib__, result, uid=True) for result in ret]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_member(self, member, dn=False): """Check if a Member is in the bound group. Arguments: member -- the CSHMember object (or distinguished name) of the member to check against Keyword arguments: dn -- whether or not member is a distinguished name """
if dn: res = self.__con__.search_s( self.__dn__, ldap.SCOPE_BASE, "(member=%s)" % dn, ['ipaUniqueID']) else: res = self.__con__.search_s( self.__dn__, ldap.SCOPE_BASE, "(member=%s)" % member.get_dn(), ['ipaUniqueID']) return len(res) > 0
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_member(self, member, dn=False): """Add a member to the bound group Arguments: member -- the CSHMember object (or distinguished name) of the member Keyword arguments: dn -- whether or not member is a distinguished name """
if dn: if self.check_member(member, dn=True): return mod = (ldap.MOD_ADD, 'member', member.encode('ascii')) else: if self.check_member(member): return mod = (ldap.MOD_ADD, 'member', member.get_dn().encode('ascii')) if self.__lib__.__batch_mods__: self.__lib__.enqueue_mod(self.__dn__, mod) elif not self.__lib__.__ro__: mod_attrs = [mod] self.__con__.modify_s(self.__dn__, mod_attrs) else: print("ADD VALUE member = {} FOR {}".format(mod[2], self.__dn__))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_object_from_yaml(desired_type: Type[Any], file_object: TextIOBase, logger: Logger, fix_imports: bool = True, errors: str = 'strict', *args, **kwargs) -> Any: """ Parses a yaml file. :param desired_type: :param file_object: :param logger: :param fix_imports: :param errors: :param args: :param kwargs: :return: """
return yaml.load(file_object)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_collection_from_yaml(desired_type: Type[Any], file_object: TextIOBase, logger: Logger, conversion_finder: ConversionFinder, fix_imports: bool = True, errors: str = 'strict', **kwargs) -> Any: """ Parses a collection from a yaml file. :param desired_type: :param file_object: :param logger: :param fix_imports: :param errors: :param args: :param kwargs: :return: """
res = yaml.load(file_object) # convert if required return ConversionFinder.convert_collection_values_according_to_pep(res, desired_type, conversion_finder, logger, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pass_feature(*feature_names): """Injects a feature instance into the kwargs """
def decorator(f): @functools.wraps(f) def wrapper(*args, **kwargs): for name in feature_names: kwargs[name] = feature_proxy(name) return f(*args, **kwargs) return wrapper return decorator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract_tar(url, target_dir, additional_compression="", remove_common_prefix=False, overwrite=False): """ extract a targz and install to the target directory """
try: if not os.path.exists(target_dir): os.makedirs(target_dir) tf = tarfile.TarFile.open(fileobj=download_to_bytesio(url)) if not os.path.exists(target_dir): os.makedirs(target_dir) common_prefix = os.path.commonprefix(tf.getnames()) if not common_prefix.endswith('/'): common_prefix += "/" for tfile in tf.getmembers(): if remove_common_prefix: tfile.name = tfile.name.replace(common_prefix, "", 1) if tfile.name != "": target_path = os.path.join(target_dir, tfile.name) if target_path != target_dir and os.path.exists(target_path): if overwrite: remove_path(target_path) else: continue tf.extract(tfile, target_dir) except OSError: e = sys.exc_info()[1] raise ExtractException(str(e)) except IOError: e = sys.exc_info()[1] raise ExtractException(str(e))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_path(target_path): """ Delete the target path """
if os.path.isdir(target_path): shutil.rmtree(target_path) else: os.unlink(target_path)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save(self, obj, id_code): """ Save an object, and use id_code in the filename obj - any object id_code - unique identifier """
filestream = open('{0}/{1}'.format(self.data_path, id_code), 'w+') pickle.dump(obj, filestream) filestream.close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load(self, id_code): """ Loads a workflow identified by id_code id_code - unique identifier, previously must have called save with same id_code """
filestream = open('{0}/{1}'.format(self.data_path, id_code), 'rb') workflow = pickle.load(filestream) return workflow
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_object_from_pickle(desired_type: Type[T], file_path: str, encoding: str, fix_imports: bool = True, errors: str = 'strict', *args, **kwargs) -> Any: """ Parses a pickle file. :param desired_type: :param file_path: :param encoding: :param fix_imports: :param errors: :param args: :param kwargs: :return: """
import pickle file_object = open(file_path, mode='rb') try: return pickle.load(file_object, fix_imports=fix_imports, encoding=encoding, errors=errors) finally: file_object.close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def should_display_warnings_for(to_type): """ Central method where we control whether warnings should be displayed """
if not hasattr(to_type, '__module__'): return True elif to_type.__module__ in {'builtins'} or to_type.__module__.startswith('parsyfiles') \ or to_type.__name__ in {'DataFrame'}: return False elif issubclass(to_type, int) or issubclass(to_type, str) \ or issubclass(to_type, float) or issubclass(to_type, bool): return False else: return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_dict(dict_name, dict_value, logger: Logger = None): """ Utility method to print a named dictionary :param dict_name: :param dict_value: :return: """
if logger is None: print(dict_name + ' = ') try: from pprint import pprint pprint(dict_value) except: print(dict_value) else: logger.info(dict_name + ' = ') try: from pprint import pformat logger.info(pformat(dict_value)) except: logger.info(dict_value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_able_to_parse_detailed(self, desired_type: Type[Any], desired_ext: str, strict: bool): """ Explicitly declare that we are not able to parse collections :param desired_type: :param desired_ext: :param strict: :return: """
if not _is_valid_for_dict_to_object_conversion(strict, None, None if desired_type is JOKER else desired_type): return False, None else: return super(MultifileObjectParser, self).is_able_to_parse_detailed(desired_type, desired_ext, strict)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parsyfiles_global_config(multiple_errors_tb_limit: int = None, full_paths_in_logs: bool = None, dict_to_object_subclass_limit: int = None): """ This is the method you should use to configure the parsyfiles library :param multiple_errors_tb_limit: the traceback size (default is 3) of individual parsers exceptions displayed when parsyfiles tries several parsing chains and all of them fail. :param full_paths_in_logs: if True, full file paths will be displayed in logs. Otherwise only the parent path will be displayed and children paths will be indented (default is False) :param dict_to_object_subclass_limit: the number of subclasses that the <dict_to_object> converter will try, when instantiating an object from a dictionary. Default is 50 :return: """
if multiple_errors_tb_limit is not None: GLOBAL_CONFIG.multiple_errors_tb_limit = multiple_errors_tb_limit if full_paths_in_logs is not None: GLOBAL_CONFIG.full_paths_in_logs = full_paths_in_logs if dict_to_object_subclass_limit is not None: GLOBAL_CONFIG.dict_to_object_subclass_limit = dict_to_object_subclass_limit
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_valid(self, context): """Checks through the previous_actions iterable if required actions have been executed """
if self.requires: for r in self.requires: if not r in context.executed_actions: raise RequirementMissingError("Action '%s' requires '%s'" % (self.name, r)) return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_file_contents(file_path): """Get the context of the file using full path name"""
full_path = os.path.join(package_dir, file_path) return open(full_path, 'r').read()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def refresh(self): """Refresh a device"""
# new_device = {} if self.type in CONST.BINARY_SENSOR_TYPES: response = self._lupusec.get_sensors() for device in response: if device['device_id'] == self._device_id: self.update(device) return device elif self.type == CONST.ALARM_TYPE: response = self._lupusec.get_panel() self.update(response) return response elif self.type == CONST.TYPE_POWER_SWITCH: response = self._lupusec.get_power_switches() for pss in response: if pss['device_id'] == self._device_id: self.update(pss) return pss
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def desc(self): """Get a short description of the device."""
return '{0} (ID: {1}) - {2} - {3}'.format( self.name, self.device_id, self.type, self.status)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list(declared, undeclared): """List configured queues."""
queues = current_queues.queues.values() if declared: queues = filter(lambda queue: queue.exists, queues) elif undeclared: queues = filter(lambda queue: not queue.exists, queues) queue_names = [queue.routing_key for queue in queues] queue_names.sort() for queue in queue_names: click.secho(queue)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def declare(queues): """Initialize the given queues."""
current_queues.declare(queues=queues) click.secho( 'Queues {} have been declared.'.format( queues or current_queues.queues.keys()), fg='green' )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def purge_queues(queues=None): """Purge the given queues."""
current_queues.purge(queues=queues) click.secho( 'Queues {} have been purged.'.format( queues or current_queues.queues.keys()), fg='green' )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_queue(queues): """Delete the given queues."""
current_queues.delete(queues=queues) click.secho( 'Queues {} have been deleted.'.format( queues or current_queues.queues.keys()), fg='green' )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_needed_formatter(input_format, output_format): """ Find a data formatter given an input and output format input_format - needed input format. see utils.input.dataformats output_format - needed output format. see utils.input.dataformats """
#Only take the formatters in the registry selected_registry = [re.cls for re in registry if re.category==RegistryCategories.formatters] needed_formatters = [] for formatter in selected_registry: #Initialize the formatter (needed so it can discover its formats) formatter_inst = formatter() if input_format in formatter_inst.input_formats and output_format in formatter_inst.output_formats: needed_formatters.append(formatter) if len(needed_formatters)>0: return needed_formatters[0] return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_needed_input(input_format): """ Find a needed input class input_format - needed input format, see utils.input.dataformats """
needed_inputs = [re.cls for re in registry if re.category==RegistryCategories.inputs and re.cls.input_format == input_format] if len(needed_inputs)>0: return needed_inputs[0] return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def exists_in_registry(category, namespace, name): """ See if a given category, namespace, name combination exists in the registry category - See registrycategories. Type of module namespace - Namespace of the module, defined in settings name - the lowercase name of the module """
selected_registry = [re for re in registry if re.category==category and re.namespace==namespace and re.name == name] if len(selected_registry)>0: return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def register(cls): """ Register a given model in the registry """
registry_entry = RegistryEntry(category = cls.category, namespace = cls.namespace, name = cls.name, cls=cls) if registry_entry not in registry and not exists_in_registry(cls.category, cls.namespace, cls.name): registry.append(registry_entry) else: log.warn("Class {0} already in registry".format(cls))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _set_fields(self): """ Initialize the fields for data caching. """
self.fields = [] self.required_input = [] for member_name, member_object in inspect.getmembers(self.__class__): if inspect.isdatadescriptor(member_object) and not member_name.startswith("__"): self.fields.append(member_name) if member_object.required_input: self.required_input.append(member_name)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def subscriber(address,topics,callback,message_type): """ Creates a subscriber binding to the given address and subscribe the given topics. The callback is invoked for every message received. Args: - address: the address to bind the PUB socket to. - topics: the topics to subscribe - callback: the callback to invoke for every message. Must accept 2 variables - topic and message - message_type: the type of message to receive """
return Subscriber(address,topics,callback,message_type)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def start(self): """ Start a thread that consumes the messages and invokes the callback """
t=threading.Thread(target=self._consume) t.start()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_forecast_api(self, longitude: str, latitude: str) -> {}: """gets data from API"""
api_url = APIURL_TEMPLATE.format(longitude, latitude) response = urlopen(api_url) data = response.read().decode('utf-8') json_data = json.loads(data) return json_data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: async def async_get_forecast_api(self, longitude: str, latitude: str) -> {}: """gets data from API asyncronious"""
api_url = APIURL_TEMPLATE.format(longitude, latitude) if self.session is None: self.session = aiohttp.ClientSession() async with self.session.get(api_url) as response: if response.status != 200: raise SmhiForecastException( "Failed to access weather API with status code {}".format( response.status) ) data = await response.text() return json.loads(data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def all(iterable = None, *, name = None, metric = call_default): """Measure total time and item count for consuming an iterable :arg iterable: any iterable :arg function metric: f(name, count, total_time) :arg str name: name for the metric """
if iterable is None: return _iter_decorator(name, metric) else: return _do_all(iterable, name, metric)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def each(iterable = None, *, name = None, metric = call_default): """Measure time elapsed to produce each item of an iterable :arg iterable: any iterable :arg function metric: f(name, 1, time) :arg str name: name for the metric """
if iterable is None: return _each_decorator(name, metric) else: return _do_each(iterable, name, metric)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def first(iterable = None, *, name = None, metric = call_default): """Measure time elapsed to produce first item of an iterable :arg iterable: any iterable :arg function metric: f(name, 1, time) :arg str name: name for the metric """
if iterable is None: return _first_decorator(name, metric) else: return _do_first(iterable, name, metric)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def reducer(*, name = None, metric = call_default): """Decorator to measure a function that consumes many items. The wrapped ``func`` should take either a single ``iterable`` argument or ``*args`` (plus keyword arguments). :arg function metric: f(name, count, total_time) :arg str name: name for the metric """
class instrument_reducer_decorator(object): def __init__(self, func): self.orig_func = func self.wrapping = wraps(func) self.metric_name = name if name is not None else func.__module__ + '.' +func.__name__ self.varargs = inspect.getargspec(func).varargs is not None if self.varargs: self.method = _varargs_to_iterable_method(func) self.func = _varargs_to_iterable_func(func) self.callme = _iterable_to_varargs_func(self._call) else: self.method = func self.func = func self.callme = self._call # we need _call/callme b/c CPython short-circurits CALL_FUNCTION to # directly access __call__, bypassing our varargs decorator def __call__(self, *args, **kwargs): return self.callme(*args, **kwargs) def _call(self, iterable, **kwargs): it = counted_iterable(iterable) t = time.time() try: return self.func(it, **kwargs) finally: metric(self.metric_name, it.count, time.time() - t) def __get__(self, instance, class_): metric_name = name if name is not None else\ ".".join((class_.__module__, class_.__name__, self.orig_func.__name__)) def wrapped_method(iterable, **kwargs): it = counted_iterable(iterable) t = time.time() try: return self.method(instance, it, **kwargs) finally: metric(metric_name, it.count, time.time() - t) # wrap in func version b/c self is handled for us by descriptor (ie, `instance`) if self.varargs: wrapped_method = _iterable_to_varargs_func(wrapped_method) wrapped_method = self.wrapping(wrapped_method) return wrapped_method return instrument_reducer_decorator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def producer(*, name = None, metric = call_default): """Decorator to measure a function that produces many items. The function should return an object that supports ``__len__`` (ie, a list). If the function returns an iterator, use :func:`all` instead. :arg function metric: f(name, count, total_time) :arg str name: name for the metric """
def wrapper(func): def instrumenter(name_, *args, **kwargs): t = time.time() try: ret = func(*args, **kwargs) except Exception: # record a metric for other exceptions, than raise metric(name_, 0, time.time() - t) raise else: # normal path, record metric & return metric(name_, len(ret), time.time() - t) return ret name_ = name if name is not None else func.__module__ + '.' +func.__name__ class instrument_decorator(object): # must be a class for descriptor magic to work @wraps(func) def __call__(self, *args, **kwargs): return instrumenter(name_, *args, **kwargs) def __get__(self, instance, class_): name_ = name if name is not None else\ ".".join((class_.__module__, class_.__name__, func.__name__)) @wraps(func) def wrapped_method(*args, **kwargs): return instrumenter(name_, instance, *args, **kwargs) return wrapped_method return instrument_decorator() return wrapper
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def block(*, name = None, metric = call_default, count = 1): """Context manager to measure execution time of a block :arg function metric: f(name, 1, time) :arg str name: name for the metric :arg int count: user-supplied number of items, defaults to 1 """
t = time.time() try: yield finally: metric(name, count, time.time() - t)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __get_package_manager(self): """ Installs and verifies package manager """
package_manager = "" args = "" sudo_required = True if system.is_osx(): package_manager = "brew" sudo_required = False args = " install" elif system.is_debian(): package_manager = "apt-get" args = " -y install" elif system.is_fedora(): package_manager = "yum" args = " install" elif system.is_arch(): package_manager = "pacman" args = " --noconfirm -S" if lib.which(package_manager) is None: self.logger.warn("Package manager %s not installed! Packages will not be installed." % package_manager) self.package_manager = None self.package_manager = package_manager self.sudo_required = sudo_required self.args = args
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def parse(self, data, doctype): ''' Parse an input string, and return an AST doctype must have WCADocument as a baseclass ''' self.doctype = doctype self.lexer.lineno = 0 del self.errors[:] del self.warnings[:] self.lexer.lexerror = False ast = self.parser.parse(data, lexer=self.lexer) if self.lexer.lexerror: ast = None if ast is None: self.errors.append("Couldn't build AST.") else: for check in self.sema[self.doctype]: visitor = check() if not visitor.visit(ast): self.errors.append("Couldn't visit AST.") self.errors.extend(visitor.errors) self.warnings.extend(visitor.warnings) return (ast, list(self.errors), list(self.warnings))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def p_error(self, elem): '''Handle syntax error''' self.errors.append("Syntax error on line " + str(self.lexer.lineno) + ". Got unexpected token " + elem.type)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_progress_brackets(self, start, end): """Set brackets to set around a progress bar."""
self.sep_start = start self.sep_end = end
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_progress(self, width): """Create the formatted string that displays the progress."""
chunk_widths = self._get_chunk_sizes(width) progress_chunks = [chunk.format_chunk(chunk_width) for (chunk, chunk_width) in zip(self._progress_chunks, chunk_widths)] return "{sep_start}{progress}{sep_end}".format( sep_start=self.sep_start, progress="".join(progress_chunks), sep_end=self.sep_end )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def summary_width(self): """Calculate how long a string is needed to show a summary string. This is not simply the length of the formatted summary string since that string might contain ANSI codes. """
chunk_counts = [chunk.count for chunk in self._progress_chunks] numbers_width = sum(max(1, ceil(log10(count + 1))) for count in chunk_counts) separators_with = len(chunk_counts) - 1 return numbers_width + separators_with
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_summary(self): """Generate a summary string for the progress bar."""
chunks = [chunk.format_chunk_summary() for chunk in self._progress_chunks] return "/".join(chunks)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_status(self, width=None, label_width=None, progress_width=None, summary_width=None): """Generate the formatted status bar string."""
if width is None: # pragma: no cover width = shutil.get_terminal_size()[0] if label_width is None: label_width = len(self.label) if summary_width is None: summary_width = self.summary_width() if progress_width is None: progress_width = width - label_width - summary_width - 2 if len(self.label) > label_width: # FIXME: This actually *will* break if we ever have fewer than # three characters assigned to format the label, but that would # be an extreme situation so I won't fix it just yet. label = self.label[:label_width - 3] + "..." else: label_format = "{{label:{fill_char}<{width}}}".format( width=label_width, fill_char=self.fill_char) label = label_format.format(label=self.label) summary_format = "{{:>{width}}}".format(width=summary_width) summary = summary_format.format(self._progress.format_summary()) progress = self._progress.format_progress(width=progress_width) return "{label} {progress} {summary}".format( label=label, progress=progress, summary=summary )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_status_line(self, label): """Add a status bar line to the table. This function returns the status bar and it can be modified from this return value. """
status_line = StatusBar(label, self._sep_start, self._sep_end, self._fill_char) self._lines.append(status_line) return status_line
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def calculate_field_widths(self, width=None, min_label_width=10, min_progress_width=10): """Calculate how wide each field should be so we can align them. We always find room for the summaries since these are short and packed with information. If possible, we will also find room for labels, but if this would make the progress bar width shorter than the specified minium then we will shorten the labels, though never below the minium there. If this mean we have bars that are too wide for the terminal, then your terminal needs to be wider. """
if width is None: # pragma: no cover width = shutil.get_terminal_size()[0] summary_width = self.summary_width() label_width = self.label_width() remaining = width - summary_width - label_width - 2 if remaining >= min_progress_width: progress_width = remaining else: progress_width = min_progress_width remaining = width - summary_width - progress_width - 2 if remaining >= min_label_width: label_width = remaining else: label_width = min_label_width return (label_width, progress_width, summary_width)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_table(self, width=None, min_label_width=10, min_progress_width=10): """Format the entire table of progress bars. The function first computes the widths of the fields so they can be aligned across lines and then returns formatted lines as a list of strings. """
# handle the special case of an empty table. if len(self._lines) == 0: return [] if width is None: # pragma: no cover width = shutil.get_terminal_size()[0] labelw, progw, summaryw = self.calculate_field_widths( width=width, min_label_width=min_label_width, min_progress_width=min_progress_width ) output = [ sb.format_status( label_width=labelw, progress_width=progw, summary_width=summaryw ) for sb in self._lines ] return output
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_log_dict(request, response): """ Create a dictionary with logging data. """
remote_addr = request.META.get('REMOTE_ADDR') if remote_addr in getattr(settings, 'INTERNAL_IPS', []): remote_addr = request.META.get( 'HTTP_X_FORWARDED_FOR') or remote_addr user_email = "-" if hasattr(request, 'user'): user_email = getattr(request.user, 'email', '-') if response.streaming: content_length = 'streaming' else: content_length = len(response.content) return { # 'event' makes event-based filtering possible in logging backends # like logstash 'event': settings.LOGUTILS_LOGGING_MIDDLEWARE_EVENT, 'remote_address': remote_addr, 'user_email': user_email, 'method': request.method, 'url': request.get_full_path(), 'status': response.status_code, 'content_length': content_length, 'request_time': -1, # NA value: real value added by LoggingMiddleware }
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_log_message(log_dict, use_sql_info=False, fmt=True): """ Create the logging message string. """
log_msg = ( "%(remote_address)s %(user_email)s %(method)s %(url)s %(status)d " "%(content_length)d (%(request_time).2f seconds)" ) if use_sql_info: sql_time = sum( float(q['time']) for q in connection.queries) * 1000 extra_log = { 'nr_queries': len(connection.queries), 'sql_time': sql_time} log_msg += " (%(nr_queries)d SQL queries, %(sql_time)f ms)" log_dict.update(extra_log) return log_msg % log_dict if fmt else log_msg
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def process_response(self, request, response): """ Create the logging message.. """
try: log_dict = create_log_dict(request, response) # add the request time to the log_dict; if no start time is # available, use -1 as NA value request_time = ( time.time() - self.start_time if hasattr(self, 'start_time') and self.start_time else -1) log_dict.update({'request_time': request_time}) is_request_time_too_high = ( request_time > float(settings.LOGUTILS_REQUEST_TIME_THRESHOLD)) use_sql_info = settings.DEBUG or is_request_time_too_high log_msg = create_log_message(log_dict, use_sql_info, fmt=False) if is_request_time_too_high: logger.warning(log_msg, log_dict, extra=log_dict) else: logger.info(log_msg, log_dict, extra=log_dict) except Exception as e: logger.exception(e) return response
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def as_completed(jobs): ''' Generator function that yields the jobs in order of their completion. Attaches a new listener to each job. ''' jobs = tuple(jobs) event = threading.Event() callback = lambda f, ev: event.set() [job.add_listener(Job.SUCCESS, callback, once=True) for job in jobs] [job.add_listener(Job.ERROR, callback, once=True) for job in jobs] while jobs: event.wait() event.clear() jobs, finished = split_list_by(jobs, lambda x: x.finished) for job in finished: yield job
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def reraise(tpe, value, tb=None): " Reraise an exception from an exception info tuple. " Py3 = (sys.version_info[0] == 3) if value is None: value = tpe() if Py3: if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value else: exec('raise tpe, value, tb')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def finished(self): """ True if the job run and finished. There is no difference if the job finished successfully or errored. """
return self.__state in (Job.ERROR, Job.SUCCESS, Job.CANCELLED)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _trigger_event(self, event): """ Private. Triggers and event and removes all one-off listeners for that event. """
if event is None or event not in self.__listeners: raise ValueError('invalid event type: {0!r}'.format(event)) # Check the event has not already been triggered, then mark # the event as triggered. if event in self.__event_set: raise RuntimeError('event already triggered: {0!r}'.format(event)) self.__event_set.add(event) listeners = self.__listeners[event] + self.__listeners[None] # Remove one-off listeners. self.__listeners[event][:] = (l for l in self.__listeners[event] if not l.once) self.__listeners[None][:] = (l for l in self.__listeners[None] if not l.once) for listener in listeners: # XXX: What to do on exceptions? Catch and make sure all listeners # run through? What to do with the exception(s) then? listener.callback(self, event)