text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def process_ioc(args): """Process actions related to the IOC switch."""
client = IndicatorClient.from_config() client.set_debug(True) if args.get: response = client.get_indicators() elif args.single: response = client.add_indicators(indicators=[args.single], private=args.private, tags=args.tags) else: if not os.path.isfile(args.file): raise Exception("File path isn't valid!") indicators = list() with open(args.file, 'r') as handle: for line in handle: line = line.strip() if line == '': continue indicators.append(line) response = client.add_indicators(indicators=indicators, private=args.private, tags=args.tags) return response
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def process_events(args): """Process actions related to events switch."""
client = EventsClient.from_config() client.set_debug(True) if args.get: response = client.get_events() elif args.flush: response = client.flush_events() return response
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(): """Run the code."""
parser = ArgumentParser(description="Blockade Analyst Bench") subs = parser.add_subparsers(dest='cmd') ioc = subs.add_parser('ioc', help="Perform actions with IOCs") ioc.add_argument('--single', '-s', help="Send a single IOC") ioc.add_argument('--file', '-f', help="Parse a file of IOCs") ioc.add_argument('--private', '-p', action="store_true", help="Submit the IOCs to the node hashed, \ instead of in clear") ioc.add_argument('--tags', '-t', help="Add a comma-separated list of tags to store \ with the indicators") ioc.add_argument('--get', '-g', action="store_true", help="List indicators on the remote node") events = subs.add_parser('events', help="Perform actions with Events") events.add_argument('--get', '-g', action='store_true', help="Get recent events") events.add_argument('--flush', '-f', action='store_true', help="Flush all events from cloud node") args, unknown = parser.parse_known_args() try: if args.cmd == 'ioc': if (args.single and args.file): raise Exception("Can't use single and file together!") if (not args.single and not args.file and not args.get): ioc.print_help() sys.exit(1) response = process_ioc(args) elif args.cmd == 'events': if (not args.get and not args.flush): events.print_help() sys.exit(1) response = process_events(args) else: parser.print_usage() sys.exit(1) except ValueError as e: parser.print_usage() sys.stderr.write('{}\n'.format(str(e))) sys.exit(1) print(response.get('message', ''))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def window(iterable, size=2): ''' yields wondows of a given size ''' iterable = iter(iterable) d = deque(islice(iterable, size-1), maxlen=size) for _ in map(d.append, iterable): yield tuple(d)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def payment_mode(self, payment_mode): """Sets the payment_mode of this CreditCardPayment. :param payment_mode: The payment_mode of this CreditCardPayment. :type: str """
allowed_values = ["authorize", "capture"] if payment_mode is not None and payment_mode not in allowed_values: raise ValueError( "Invalid value for `payment_mode` ({0}), must be one of {1}" .format(payment_mode, allowed_values) ) self._payment_mode = payment_mode
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def match_via_correlation_coefficient(image, template, raw_tolerance=1, normed_tolerance=0.9): """ Matching algorithm based on 2-dimensional version of Pearson product-moment correlation coefficient. This is more robust in the case where the match might be scaled or slightly rotated. From experimentation, this method is less prone to false positives than the correlation method. """
h, w = image.shape th, tw = template.shape temp_mean = np.mean(template) temp_minus_mean = template - temp_mean convolution = fftconvolve(image, temp_minus_mean[::-1,::-1]) convolution = convolution[th-1:h, tw-1:w] match_position_dict = get_tiles_at_potential_match_regions(image, template, convolution, method='correlation coefficient', raw_tolerance=raw_tolerance) # this is empty, so think condition is wrong results = normalise_correlation_coefficient(match_position_dict, convolution, template, normed_tolerance=normed_tolerance) return results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def match_positions(shape, list_of_coords): """ In cases where we have multiple matches, each highlighted by a region of coordinates, we need to separate matches, and find mean of each to return as match position """
match_array = np.zeros(shape) try: # excpetion hit on this line if nothing in list_of_coords- i.e. no matches match_array[list_of_coords[:,0],list_of_coords[:,1]] = 1 labelled = label(match_array) objects = find_objects(labelled[0]) coords = [{'x':(slice_x.start, slice_x.stop),'y':(slice_y.start, slice_y.stop)} for (slice_y,slice_x) in objects] final_positions = [(int(np.mean(coords[i]['x'])),int(np.mean(coords[i]['y']))) for i in range(len(coords))] return final_positions except IndexError: print 'no matches found' # this error occurs if no matches are found return []
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def is_empty(self): ''' Return `True` if form is valid and contains an empty lookup. ''' return (self.is_valid() and not self.simple_lookups and not self.complex_conditions and not self.extra_conditions)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def load_inventory(hosts_file=HOSTS_FILE): '''Loads Ansible inventory from file. Parameters ---------- hosts_file: str, optional path to Ansible hosts file Returns ------- ConfigParser.SafeConfigParser content of `hosts_file` ''' inventory = SafeConfigParser(allow_no_value=True) if os.path.exists(hosts_file): inventory.read(hosts_file) else: logger.warn('inventory file doesn\'t exist: %s', hosts_file) return inventory
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def save_inventory(inventory, hosts_file=HOSTS_FILE): '''Saves Ansible inventory to file. Parameters ---------- inventory: ConfigParser.SafeConfigParser content of the `hosts_file` hosts_file: str, optional path to Ansible hosts file ''' with open(hosts_file, 'w') as f: inventory.write(f)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _init_config(self, width, height, spi=None, spiMosi= None, spiDC=None, spiCS=None, spiReset=None, spiClk=None): """! SPI hardware and display width, height initialization. """
self._spi = spi self._spi_mosi = spiMosi self._spi_dc = spiDC self._spi_cs = spiCS self._spi_reset = spiReset self._spi_clk = spiClk self.width = width self.height = height
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _init_io(self): """! GPIO initialization. Set GPIO into BCM mode and init other IOs mode """
GPIO.setwarnings(False) GPIO.setmode( GPIO.BCM ) pins = [ self._spi_dc ] for pin in pins: GPIO.setup( pin, GPIO.OUT )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def clear(self, fill = 0x00): """! Clear buffer data and other data RPiDiaplay object just implemented clear buffer data """
self._buffer = [ fill ] * ( self.width * self.height )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def connect(self): """This method connects to RabbitMQ using a SelectConnection object, returning the connection handle. When the connection is established, the on_connection_open method will be invoked by pika. :rtype: pika.SelectConnection """
count = 1 no_of_servers = len(self._rabbit_urls) while True: server_choice = (count % no_of_servers) - 1 self._url = self._rabbit_urls[server_choice] try: logger.info('Connecting', attempt=count) return pika.SelectConnection(pika.URLParameters(self._url), self.on_connection_open, stop_ioloop_on_close=False) except pika.exceptions.AMQPConnectionError: logger.exception("Connection error") count += 1 logger.error("Connection sleep", no_of_seconds=count) time.sleep(count) continue
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def nack_message(self, delivery_tag, **kwargs): """Negative acknowledge a message :param int delivery_tag: The deliver tag from the Basic.Deliver frame """
logger.info('Nacking message', delivery_tag=delivery_tag, **kwargs) self._channel.basic_nack(delivery_tag)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tx_id(properties): """ Gets the tx_id for a message from a rabbit queue, using the message properties. Will raise KeyError if tx_id is missing from message headers. : param properties: Message properties : returns: tx_id of survey response : rtype: str """
tx_id = properties.headers['tx_id'] logger.info("Retrieved tx_id from message properties: tx_id={}".format(tx_id)) return tx_id
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def on_message(self, unused_channel, basic_deliver, properties, body): """Called on receipt of a message from a queue. Processes the message using the self._process method or function and positively acknowledges the queue if successful. If processing is not succesful, the message can either be rejected, quarantined or negatively acknowledged, depending on the failure mode. : param basic_deliver: AMQP basic.deliver method : param properties: Message properties : param body: Message body : returns: None """
if self.check_tx_id: try: tx_id = self.tx_id(properties) logger.info('Received message', queue=self._queue, delivery_tag=basic_deliver.delivery_tag, app_id=properties.app_id, tx_id=tx_id) except KeyError as e: self.reject_message(basic_deliver.delivery_tag) logger.error("Bad message properties - no tx_id", action="rejected", exception=str(e)) return None except TypeError as e: self.reject_message(basic_deliver.delivery_tag) logger.error("Bad message properties - no headers", action="rejected", exception=str(e)) return None else: logger.debug("check_tx_id is False. Not checking tx_id for message.", delivery_tag=basic_deliver.delivery_tag) tx_id = None try: try: self.process(body.decode("utf-8"), tx_id) except TypeError: logger.error('Incorrect call to process method') raise QuarantinableError self.acknowledge_message(basic_deliver.delivery_tag, tx_id=tx_id) except (QuarantinableError, BadMessageError) as e: # Throw it into the quarantine queue to be dealt with try: self.quarantine_publisher.publish_message(body, headers={'tx_id': tx_id}) self.reject_message(basic_deliver.delivery_tag, tx_id=tx_id) logger.error("Quarantinable error occured", action="quarantined", exception=str(e), tx_id=tx_id) except PublishMessageError: logger.error("Unable to publish message to quarantine queue. Rejecting message and requeuing.") self.reject_message(basic_deliver.delivery_tag, requeue=True, tx_id=tx_id) except RetryableError as e: self.nack_message(basic_deliver.delivery_tag, tx_id=tx_id) logger.error("Failed to process", action="nack", exception=str(e), tx_id=tx_id) except Exception as e: self.nack_message(basic_deliver.delivery_tag, tx_id=tx_id) logger.exception("Unexpected exception occurred") logger.error("Failed to process", action="nack", exception=str(e), tx_id=tx_id)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def authenticate(self, username, password): """Authenticate against the ObjectRocket API. :param str username: The username to perform basic authentication against the API with. :param str password: The password to perform basic authentication against the API with. :returns: A token used for authentication against token protected resources. :rtype: str """
# Update the username and password bound to this instance for re-authentication needs. self._username = username self._password = password # Attempt to authenticate. resp = requests.get( self._url, auth=(username, password), **self._default_request_kwargs ) # Attempt to extract authentication data. try: if resp.status_code == 200: json_data = resp.json() token = json_data['data']['token'] elif resp.status_code == 401: raise errors.AuthFailure(resp.json().get('message', 'Authentication Failure.')) else: raise errors.AuthFailure( "Unknown exception while authenticating: '{}'".format(resp.text) ) except errors.AuthFailure: raise except Exception as ex: logging.exception(ex) raise errors.AuthFailure('{}: {}'.format(ex.__class__.__name__, ex)) # Update the token bound to this instance for use by other client operations layers. self._token = token logger.info('New API token received: "{}".'.format(token)) return token
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _refresh(self): """Refresh the API token using the currently bound credentials. This is simply a convenience method to be invoked automatically if authentication fails during normal client use. """
# Request and set a new API token. new_token = self.authenticate(self._username, self._password) self._token = new_token logger.info('New API token received: "{}".'.format(new_token)) return self._token
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _verify(self, token): """Verify that the given token is valid. :param str token: The API token to verify. :returns: The token's corresponding user model as a dict, or None if invalid. :rtype: dict """
# Attempt to authenticate. url = '{}{}/'.format(self._url, 'verify') resp = requests.post( url, json={'token': token}, **self._default_request_kwargs ) if resp.status_code == 200: return resp.json().get('data', None) return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def preprocess(net, image): ''' convert to Caffe input image layout ''' return np.float32(np.rollaxis(image, 2)[::-1]) - net.transformer.mean["data"]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def make_step( net, step_size = 1.5, end = "inception_4c/output", jitter = 32, clip = True, objective = objective_L2 ): ''' basic gradient ascent step ''' src = net.blobs["data"] dst = net.blobs[end] ox, oy = np.random.randint(- jitter, jitter + 1, 2) # Apply jitter shift. src.data[0] = np.roll(np.roll(src.data[0], ox, -1), oy, -2) # Specify the optimisation objective. net.forward(end = end) objective(dst) net.backward(start = end) g = src.diff[0] # Apply normalised ascent step to the input image. src.data[:] += step_size / np.abs(g).mean() * g # Unshift the image. src.data[0] = np.roll(np.roll(src.data[0], - ox, - 1), -oy, -2) if clip: bias = net.transformer.mean["data"] src.data[:] = np.clip(src.data, -bias, 255-bias)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def deepdream( net, base_image, iter_n = 10, octave_n = 4, octave_scale = 1.4, end = "inception_4c/output", clip = True, **step_params ): ''' an ascent through different scales called "octaves" ''' # Prepare base images for all octaves. octaves = [preprocess(net, base_image)] for i in xrange(octave_n-1): octaves.append( nd.zoom( octaves[-1], (1, 1.0 / octave_scale, 1.0 / octave_scale), order = 1 ) ) src = net.blobs["data"] # Allocate image for network-produced details. detail = np.zeros_like(octaves[-1]) for octave, octave_base in enumerate(octaves[::-1]): h, w = octave_base.shape[-2:] if octave > 0: # Upscale details from the previous octave. h1, w1 = detail.shape[-2:] detail = nd.zoom( detail, (1, 1.0 * h / h1, 1.0 * w/w1), order = 1 ) # Resize the network input image size. src.reshape(1, 3, h, w) src.data[0] = octave_base+detail for i in xrange(iter_n): make_step(net, end = end, clip = clip, **step_params) # visualisation vis = deprocess(net, src.data[0]) # If clipping is disabled, adjust image contrast. if not clip: vis = vis*(255.0 / np.percentile(vis, 99.98)) log.info("octave: {octave}, index: {index}, blob/layer: {end}, dimensions: {shape}".format( octave = octave, index = i, end = end, shape = vis.shape, )) # Extract details produced on the current octave. detail = src.data[0] - octave_base # Return the resulting image. return deprocess(net, src.data[0])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(param_path='parameters.txt'): """ Entry point function for analysis based on parameter files. Parameters param_path : str Path to user-generated parameter file """
# Confirm parameters file is present if not os.path.isfile(param_path): raise IOError, "Parameter file not found at %s" % param_path # Get raw params and base options (non-run-dependent options) params, base_options = _get_params_base_options(param_path) # Configure and start logging # Done here instead of in function so will affect all subsequent calls log_path = os.path.join(base_options['results_dir'], '_log.txt') if os.path.isfile(log_path): os.remove(log_path) logging.basicConfig(level=logging.INFO, format='%(message)s') fileh = logging.FileHandler(log_path) fileh.setLevel(logging.DEBUG) filefmt = logging.Formatter( time.strftime("%Y/%m/%d %H:%M:%S %p", time.localtime()) + ' - %(name)s - %(levelname)s - %(message)s') fileh.setFormatter(filefmt) logging.getLogger('').addHandler(fileh) def log_uncaught(type1, value1, traceback1): tb_list = traceback.format_exception(type1, value1, traceback1) tb_str = ''.join(tb_list) logging.critical('\n\n'+tb_str) sys.excepthook = log_uncaught logging.info('Running macroeco') # v%s' % __version__) logging.info('Parameters file at %s' % os.path.abspath(param_path)) # Preliminary check for errors in parameters file bad_params = misc.check_parameter_file(param_path) if len(bad_params[0]) > 0: logging.warning("Possible formatting error(s) in" + " %s: parameters %s on lines %s" % (param_path, bad_params[0], bad_params[1])) logging.info('Starting analysis') # Do analysis for each run for run_name in base_options['run_names']: logging.info('Starting run %s' % run_name) options = dict(params[run_name]) # All parameters from this run options.update(base_options) # Add base parameters options['run_dir'] = os.path.join(base_options['results_dir'],run_name) if 'format' in options['analysis']: _do_format(options) else: _do_analysis(options) logging.info('Finished run %s' % run_name) logging.info('Finished analysis successfully') logging.info('Results available at %s' % options['param_dir']) # Close logging - releases log file lock in Windows GUI logging.shutdown()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _do_analysis(options): """ Do analysis for a single run, as specified by options. Parameters options : dict Option names and values for analysis """
module = _function_location(options) core_results = _call_analysis_function(options, module) if module == 'emp' and ('models' in options.keys()): fit_results = _fit_models(options, core_results) else: fit_results = None _save_results(options, module, core_results, fit_results)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _call_analysis_function(options, module): """ Call function from module and get result, using inputs from options Parameters options : dict Option names and values for analysis module : str Short name of module within macroeco containing analysis function Returns ------- dataframe, array, value, list of tuples Functions from emp module return a list of tuples in which first element of the tuple gives a string describing the result and the second element giving the result of the analysis as a dataframe. Functions in other modules return dataframe, array, or value. """
args, kwargs = _get_args_kwargs(options, module) return eval("%s.%s(*args, **kwargs)" % (module, options['analysis']))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _emp_extra_options(options): """ Get special options patch, cols, and splits if analysis in emp module """
# Check that metadata is valid metadata_path = os.path.normpath(os.path.join(options['param_dir'], options['metadata'])) if not os.path.isfile(metadata_path): raise IOError, ("Path to metadata file %s is invalid." % metadata_path) options['metadata_path'] = metadata_path # Using subset if given, create and store patch subset = options.get('subset', '') options['patch'] = emp.Patch(metadata_path, subset) # If cols or splits not given in options, make empty strings if 'cols' not in options.keys(): options['cols'] = '' if 'splits' not in options.keys(): options['splits'] = '' return options
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _fit_models(options, core_results): """ Fit models to empirical result from a function in emp module Parameters options : dict Option names and values for analysis core_results : list of tuples Output of function in emp Returns ------- list of dicts Each element in list corresponds to a subset. The dict has a key for each model given in options, and the value is a list of fitted parameters (tuple), values (array), comparison statistic names (list), and comparison statistic values (list). Notes ----- To determine if the empirical result refers to a curve or a distribution, the result dataframe is inspected for a column 'x', which indicates a curve. """
logging.info("Fitting models") models = options['models'].replace(' ', '').split(';') # TODO: Make work for 2D results, i.e., curves, comm_sep, o_ring # TODO: Make work for curves in general (check if 'x' present in core_res) fit_results = [] for core_result in core_results: # Each subset fit_result = {} for model in models: fits = _get_fits(core_result, model, options) values = _get_values(core_result, model, fits) stat_names, stats = _get_comparison_stat(core_result, values, model, fits) fit_result[model] = [fits, values, stat_names, stats] fit_results.append(fit_result) return fit_results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _save_results(options, module, core_results, fit_results): """ Save results of analysis as tables and figures Parameters options : dict Option names and values for analysis module : str Module that contained function used to generate core_results core_results : dataframe, array, value, list of tuples Results of main analysis fit_results : list or None Results of comparing emp analysis to models, None if not applicable """
logging.info("Saving all results") # Use custom plot format mpl.rcParams.update(misc.rcparams.ggplot_rc) # Make run directory os.makedirs(options['run_dir']) # Write core results _write_core_tables(options, module, core_results) # Write additional results if analysis from emp if module == 'emp': _write_subset_index_file(options, core_results) # Write model/data comparison if models were given if fit_results: models = options['models'].replace(' ','').split(';') for i, core_result in enumerate(core_results): _write_fitted_params(i, models, options, fit_results) _write_test_statistics(i, models, options, fit_results) _write_comparison_plot_table(i, models, options, core_results, fit_results)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _write_subset_index_file(options, core_results): """ Write table giving index of subsets, giving number and subset string """
f_path = os.path.join(options['run_dir'], '_subset_index.csv') subset_strs = zip(*core_results)[0] index = np.arange(len(subset_strs)) + 1 df = pd.DataFrame({'subsets': subset_strs}, index=index) df.to_csv(f_path)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _pad_plot_frame(ax, pad=0.01): """ Provides padding on sides of frame equal to pad fraction of plot """
xmin, xmax = ax.get_xlim() ymin, ymax = ax.get_ylim() xr = xmax - xmin yr = ymax - ymin ax.set_xlim(xmin - xr*pad, xmax + xr*pad) ax.set_ylim(ymin - yr*pad, ymax + yr*pad) return ax
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _output_cdf_plot(core_result, spid, models, options, fit_results): """Function for plotting cdf"""
# CDF x = core_result['y'].values df = emp.empirical_cdf(x) df.columns = ['x', 'empirical'] def calc_func(model, df, shapes): return eval("mod.%s.cdf(df['x'], *shapes)" % model) plot_exec_str = "ax.step(df['x'], emp, color='k', lw=3);ax.set_ylim(top=1)" _save_table_and_plot(spid, models, options, fit_results, 'data_pred_cdf', df, calc_func, plot_exec_str)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def openOnlyAccel(self, cycleFreq = 0x00 ): """! Trun on device into Accelerometer Only Low Power Mode @param cycleFreq can be choise: @see VAL_PWR_MGMT_2_LP_WAKE_CTRL_1_25HZ is default @see VAL_PWR_MGMT_2_LP_WAKE_CTRL_5HZ @see VAL_PWR_MGMT_2_LP_WAKE_CTRL_20HZ @see VAL_PWR_MGMT_2_LP_WAKE_CTRL_40HZ """
self.openWith(accel = True, gyro = False, temp = False, cycle = True, cycleFreq = cycleFreq)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setMotionInt(self, motDHPF = 0x01, motTHR = 0x14, motDUR = 0x30, motDeteDec = 0x15 ): """! Set to enable Motion Detection Interrupt @param motDHPF Set the Digital High Pass Filter. Default is 0x01 (5Hz) @param motTHR Desired motion threshold. Default is 20 (0x14) @param motDUR Desired motion duration. Default is 48ms (0x30) @param motDeteDec Motion detection decrement. Default is 21 (0x15) @note <b>motDHPF</b> should be one of the following values:<br> 0x00: RESET,<br> 0x01: 5Hz,<br> 0x02: 2.5Hz,<br> 0x03: 1.25Hz,<br> 0x04: 0.63Hz,<br> 0x07: HOLD<br> """
#After power on (0x00 to register (decimal) 107), the Motion Detection Interrupt can be enabled as follows: #self._sendCmd( self.REG_PWR_MGMT_1, 0x00 ) #(optionally?) Reset all internal signal paths in the MPU-6050 by writing 0x07 to register 0x68; self._sendCmd( self.REG_SIGNAL_PATH_RESET, 0x07 ) #write register 0x37 to select how to use the interrupt pin. #For an active high, push-pull signal that stays until register #(decimal) 58 is read, write 0x20 (need read to clear INT state) or 0x00 (auto clear INT state). self._sendCmd( self.REG_INT_PIN_CFG, 0x00 ) orgAccelConf = self._readByte(self.REG_ACCEL_CONFIG) newAccelConf = ( (orgAccelConf | 0xE7) ^ 0xE7 ) | motDHPF # Write register 28 (==0x1C) to set the Digital High Pass Filter, # bits 3:0. For example set it to 0x01 for 5Hz. # (These 3 bits are grey in the data sheet, but they are used! # Leaving them 0 means the filter always outputs 0.) # # 0x00: RESET, # 0x01: 5Hz, # 0x02: 2.5Hz, # 0x03: 1.25Hz, # 0x04: 0.63Hz, # 0x07: hold # # 高通滤波器灵敏度调节 # self._sendCmd( self.REG_ACCEL_CONFIG, newAccelConf ) #Write the desired Motion threshold to register 0x1F (For example, write decimal 20). self._sendCmd( self.REG_MOTION_DET, motTHR ) #To register 0x20 (hex), write the desired motion duration, for example 40ms (0x28). # 灵敏度调节 self._sendCmd( self.REG_MOTION_DET_DUR, motDUR ) #to register 0x69, write the motion detection decrement and #a few other settings (for example write 0x15 to set both #free-fall and motion decrements to 1 and accelerome0x00ter #start-up delay to 5ms total by adding 1ms. ) self._sendCmd( self.REG_MOTION_DET_CTRL, motDeteDec ) #write register 0x38, bit 6 (0x40), to enable motion detection interrupt. self._sendCmd( self.REG_INT_ENABLE, self.VAL_INT_ENABLE_MOTION )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def readAccelRange( self ): """! Reads the range of accelerometer setup. @return an int value. It should be one of the following values: @see ACCEL_RANGE_2G @see ACCEL_RANGE_4G @see ACCEL_RANGE_8G @see ACCEL_RANGE_16G """
raw_data = self._readByte(self.REG_ACCEL_CONFIG) raw_data = (raw_data | 0xE7) ^ 0xE7 return raw_data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getAccelData( self, raw = False ): """! Gets and returns the X, Y and Z values from the accelerometer. @param raw If raw is True, it will return the data in m/s^2,<br> If raw is False, it will return the data in g @return a dictionary with the measurement results or Boolean. @retval False means 'Unkown accel range', that you need to check the "accel range" configuration @note Result data format: {"x":0.45634,"y":0.2124,"z":1.334} """
x = self._readWord(self.REG_ACCEL_XOUT_H) y = self._readWord(self.REG_ACCEL_YOUT_H) z = self._readWord(self.REG_ACCEL_ZOUT_H) accel_scale_modifier = None accel_range = self.readAccelRange() if accel_range == self.ACCEL_RANGE_2G: accel_scale_modifier = self.ACCEL_SCALE_MODIFIER_2G elif accel_range == self.ACCEL_RANGE_4G: accel_scale_modifier = self.ACCEL_SCALE_MODIFIER_4G elif accel_range == self.ACCEL_RANGE_8G: accel_scale_modifier = self.ACCEL_SCALE_MODIFIER_8G elif accel_range == self.ACCEL_RANGE_16G: accel_scale_modifier = self.ACCEL_SCALE_MODIFIER_16G else: print( "ERROR: Unkown accel range!" ) return False #accel_scale_modifier = self.ACCEL_SCALE_MODIFIER_2G x = x / accel_scale_modifier y = y / accel_scale_modifier z = z / accel_scale_modifier if raw == True: return { 'x': x, 'y': y, 'z': z } elif raw == False: return { 'x': x * self._gravityFactor, 'y': y * self._gravityFactor, 'z': z * self._gravityFactor }
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def readGyroRange( self ): """! Read range of gyroscope. @return an int value. It should be one of the following values (GYRO_RANGE_250DEG) @see GYRO_RANGE_250DEG @see GYRO_RANGE_500DEG @see GYRO_RANGE_1KDEG @see GYRO_RANGE_2KDEG """
raw_data = self._readByte( self.REG_GYRO_CONFIG ) raw_data = (raw_data | 0xE7) ^ 0xE7 return raw_data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getGyroData(self): """! Gets and returns the X, Y and Z values from the gyroscope @return a dictionary with the measurement results or Boolean. @retval False means 'Unkown gyroscope range', that you need to check the "gyroscope range" configuration @note Result data format: {"x":0.45634,"y":0.2124,"z":1.334} """
x = self._readWord(self.REG_GYRO_XOUT_H) y = self._readWord(self.REG_GYRO_YOUT_H) z = self._readWord(self.REG_GYRO_ZOUT_H) gyro_scale_modifier = None gyro_range = self.readGyroRange() if gyro_range == self.GYRO_RANGE_250DEG: gyro_scale_modifier = self.GYRO_SCALE_MODIFIER_250DEG elif gyro_range == self.GYRO_RANGE_500DEG: gyro_scale_modifier = self.GYRO_SCALE_MODIFIER_500DEG elif gyro_range == self.GYRO_RANGE_1KDEG: gyro_scale_modifier = self.GYRO_SCALE_MODIFIER_1KDEG elif gyro_range == self.GYRO_RANGE_2KDEG: gyro_scale_modifier = self.GYRO_SCALE_MODIFIER_2KDEG else: print("ERROR: Unkown gyroscope range!") return False #gyro_scale_modifier = self.GYRO_SCALE_MODIFIER_250DEG x = x / gyro_scale_modifier y = y / gyro_scale_modifier z = z / gyro_scale_modifier return {'x': x, 'y': y, 'z': z}
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getAllData(self, temp = True, accel = True, gyro = True): """! Get all the available data. @param temp: True - Allow to return Temperature data @param accel: True - Allow to return Accelerometer data @param gyro: True - Allow to return Gyroscope data @return a dictionary data @retval {} Did not read any data @retval {"temp":32.3,"accel":{"x":0.45634,"y":0.2124,"z":1.334},"gyro":{"x":0.45634,"y":0.2124,"z":1.334}} Returned all data """
allData = {} if temp: allData["temp"] = self.getTemp() if accel: allData["accel"] = self.getAccelData( raw = False ) if gyro: allData["gyro"] = self.getGyroData() return allData
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def repeater(pipe, how_many=2): ''' this function repeats each value in the pipeline however many times you need ''' r = range(how_many) for i in pipe: for _ in r: yield i
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def kld(p1, p2): """Compute Kullback-Leibler divergence between p1 and p2. It assumes that p1 and p2 are already normalized that each of them sums to 1. """
return np.sum(np.where(p1 != 0, p1 * np.log(p1 / p2), 0))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def jsd(p1, p2): """Compute Jensen-Shannon divergence between p1 and p2. It assumes that p1 and p2 are already normalized that each of them sums to 1. """
m = (p1 + p2) / 2 return (kld(p1, m) + kld(p2, m)) / 2
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def njsd(network, ref_gene_expression_dict, query_gene_expression_dict, gene_set): """Calculate Jensen-Shannon divergence between query and reference gene expression profile. """
gene_jsd_dict = dict() reference_genes = ref_gene_expression_dict.keys() assert len(reference_genes) != 'Reference gene expression profile should have > 0 genes.' for gene in gene_set: if gene not in network.nodes: continue neighbors = find_neighbors(network, gene) query_expression_vec = get_neighbor_expression_vector(neighbors, query_gene_expression_dict) ref_expression_vec = get_neighbor_expression_vector(neighbors, ref_gene_expression_dict) assert len(query_expression_vec) == len(ref_expression_vec), 'Topology of reference network and query network differs. Please check.' # A gene which has non-expressed neighbors is ignored. if np.sum(query_expression_vec) == 0 and np.sum(ref_expression_vec) == 0: continue query_p_vec = exp2prob(query_expression_vec) ref_p_vec = exp2prob(ref_expression_vec) gene_jsd_dict[gene] = jsd(query_p_vec, ref_p_vec) return np.mean(list(gene_jsd_dict.values()))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def lookupProcessor(name): """Lookup processor class object by its name"""
if name in _proc_lookup: return _proc_lookup[name] else: error_string = 'If you are creating a new processor, please read the\ documentation on creating a new processor' raise LookupError("Unknown processor %s\n%s" % (name, error_string))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def serialize(self, value, entity=None, request=None): """ Validate and serialize the value. This is the default implementation """
ret = self.from_python(value) self.validate(ret) self.run_validators(value) return ret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def side_task(pipe, *side_jobs): ''' allows you to run a function in a pipeline without affecting the data ''' # validate the input assert iterable(pipe), 'side_task needs the first argument to be iterable' for sj in side_jobs: assert callable(sj), 'all side_jobs need to be functions, not {}'.format(sj) # add a pass through function to side_jobs side_jobs = (lambda i:i ,) + side_jobs # run the pipeline for i in map(pipe, *side_jobs): yield i[0]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _connect(self): """Connects to the ec2 cloud provider :return: :py:class:`boto.ec2.connection.EC2Connection` :raises: Generic exception on error """
# check for existing connection if self._ec2_connection: return self._ec2_connection if not self._vpc: vpc_connection = None try: log.debug("Connecting to ec2 host %s", self._ec2host) region = ec2.regioninfo.RegionInfo(name=self._region_name, endpoint=self._ec2host) # connect to webservice ec2_connection = boto.connect_ec2( aws_access_key_id=self._access_key, aws_secret_access_key=self._secret_key, is_secure=self._secure, host=self._ec2host, port=self._ec2port, path=self._ec2path, region=region) log.debug("EC2 connection has been successful.") if self._vpc: vpc_connection = boto.connect_vpc( aws_access_key_id=self._access_key, aws_secret_access_key=self._secret_key, is_secure=self._secure, host=self._ec2host, port=self._ec2port, path=self._ec2path, region=region) log.debug("VPC connection has been successful.") for vpc in vpc_connection.get_all_vpcs(): log.debug("Checking whether %s matches %s/%s" % (self._vpc, vpc.tags['Name'], vpc.id)) if self._vpc in [vpc.tags['Name'], vpc.id]: self._vpc_id = vpc.id if self._vpc != self._vpc_id: log.debug("VPC %s matches %s" % (self._vpc, self._vpc_id)) break else: raise VpcError('VPC %s does not exist.' % self._vpc) # list images to see if the connection works # images = self._ec2_connection.get_all_images() # log.debug("%d images found on cloud %s", # len(images), self._ec2host) except Exception as e: log.error("connection to ec2 could not be " "established: message=`%s`", str(e)) raise self._ec2_connection, self._vpc_connection = ( ec2_connection, vpc_connection) return self._ec2_connection
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def split(pipe, splitter, skip_empty=False): ''' this function works a lot like groupby but splits on given patterns, the same behavior as str.split provides. if skip_empty is True, split only yields pieces that have contents Example: splitting 1011101010101 by 10 returns ,11,,,,1 Or if skip_empty is True splitting 1011101010101 by 10 returns 11,1 ''' splitter = tuple(splitter) len_splitter = len(splitter) pipe=iter(pipe) current = deque() tmp = [] windowed = window(pipe, len(splitter)) for i in windowed: if i == splitter: skip(windowed, len(splitter)-1) yield list(current) current.clear() tmp = [] else: current.append(i[0]) tmp = i if len(current) or len(tmp): yield list(chain(current,tmp))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def query_tracking_code(tracking_code, year=None): """ Given a tracking_code return a list of events related the tracking code """
payload = { 'Anio': year or datetime.now().year, 'Tracking': tracking_code, } response = _make_request(TRACKING_URL, payload) if not response['d']: return [] data = response['d'][0] destination = data['RetornoCadena6'] payload.update({ 'Destino': destination, }) response = _make_request(TRACKING_DETAIL_URL, payload) return _process_detail(response['d'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def comments_nb_counts(): """Get number of comments for the record `recid`."""
recid = request.view_args.get('recid') if recid is None: return elif recid == 0: return 0 else: return CmtRECORDCOMMENT.count(*[ CmtRECORDCOMMENT.id_bibrec == recid, CmtRECORDCOMMENT.star_score == 0, CmtRECORDCOMMENT.status.notin_(['dm', 'da']) ])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def decide_k(airport_code): """A function to decide if a leading 'K' is throwing off an airport match and return the correct code."""
if airport_code[:1].upper() == 'K': try: # if there's a match without the K that's likely what it is. return Airport.objects.get(location_identifier__iexact=airport_code[1:]).location_identifier except Airport.DoesNotExist: return airport_code else: return airport_code
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_date(datestring): """Attepmts to parse an ISO8601 formatted ``datestring``. Returns a ``datetime.datetime`` object. """
datestring = str(datestring).strip() if not datestring[0].isdigit(): raise ParseError() if 'W' in datestring.upper(): try: datestring = datestring[:-1] + str(int(datestring[-1:]) -1) except: pass for regex, pattern in DATE_FORMATS: if regex.match(datestring): found = regex.search(datestring).groupdict() dt = datetime.utcnow().strptime(found['matched'], pattern) if 'fraction' in found and found['fraction'] is not None: dt = dt.replace(microsecond=int(found['fraction'][1:])) if 'timezone' in found and found['timezone'] is not None: dt = dt.replace(tzinfo=Timezone(found.get('timezone', ''))) return dt return parse_time(datestring)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_time(timestring): """Attepmts to parse an ISO8601 formatted ``timestring``. Returns a ``datetime.datetime`` object. """
timestring = str(timestring).strip() for regex, pattern in TIME_FORMATS: if regex.match(timestring): found = regex.search(timestring).groupdict() dt = datetime.utcnow().strptime(found['matched'], pattern) dt = datetime.combine(date.today(), dt.time()) if 'fraction' in found and found['fraction'] is not None: dt = dt.replace(microsecond=int(found['fraction'][1:])) if 'timezone' in found and found['timezone'] is not None: dt = dt.replace(tzinfo=Timezone(found.get('timezone', ''))) return dt raise ParseError()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def connect(self): """connect to the database **Return:** - ``dbConn`` -- the database connection See the class docstring for usage """
self.log.debug('starting the ``get`` method') dbSettings = self.dbSettings port = False if "tunnel" in dbSettings and dbSettings["tunnel"]: port = self._setup_tunnel( tunnelParameters=dbSettings["tunnel"] ) # SETUP A DATABASE CONNECTION host = dbSettings["host"] user = dbSettings["user"] passwd = dbSettings["password"] dbName = dbSettings["db"] dbConn = ms.connect( host=host, user=user, passwd=passwd, db=dbName, port=port, use_unicode=True, charset='utf8', local_infile=1, client_flag=ms.constants.CLIENT.MULTI_STATEMENTS, connect_timeout=36000, max_allowed_packet=51200000 ) if self.autocommit: dbConn.autocommit(True) self.log.debug('completed the ``get`` method') return dbConn
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def map_(cache: Mapping[Domain, Range]) -> Operator[Map[Domain, Range]]: """ Returns decorator that calls wrapped function if nothing was found in cache for its argument. Wrapped function arguments should be hashable. """
def wrapper(function: Map[Domain, Range]) -> Map[Domain, Range]: @wraps(function) def wrapped(argument: Domain) -> Range: try: return cache[argument] except KeyError: return function(argument) return wrapped return wrapper
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def updatable_map(cache: MutableMapping[Domain, Range]) -> Operator[Map]: """ Returns decorator that calls wrapped function if nothing was found in cache for its argument and reuses result afterwards. Wrapped function arguments should be hashable. """
def wrapper(function: Map[Domain, Range]) -> Map[Domain, Range]: @wraps(function) def wrapped(argument: Domain) -> Range: try: return cache[argument] except KeyError: result = function(argument) cache[argument] = result return result return wrapped return wrapper
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def property_(getter: Map[Domain, Range]) -> property: """ Returns property that calls given getter on the first access and reuses result afterwards. Class instances should be hashable and weak referenceable. """
return property(map_(WeakKeyDictionary())(getter))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_context_data(self, **kwargs): """Tests cookies. """
self.request.session.set_test_cookie() if not self.request.session.test_cookie_worked(): messages.add_message( self.request, messages.ERROR, "Please enable cookies.") self.request.session.delete_test_cookie() return super().get_context_data(**kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print(root): # type: (Union[Nonterminal,Terminal,Rule])-> str """ Transform the parsed tree to the string. Expects tree like structure. You can see example output below. (R)SplitRules26 |--(N)Iterate | `--(R)SplitRules30 | `--(N)Symb | `--(R)SplitRules4 | `--(T)e `--(N)Concat `--(R)SplitRules27 `--(N)Iterate `--(R)SplitRules30 `--(N)Symb `--(R)SplitRules5 `--(T)f :param root: Root node of the parsed tree. :return: String representing the parsed tree (ends with newline). """
# print the part before the element def print_before(previous=0, defined=None, is_last=False): defined = defined or {} ret = '' if previous != 0: for i in range(previous - 1): # if the column is still active write | if i in defined: ret += '| ' # otherwise just print space else: ret += ' ' # if is current element last child, don't print |-- but `-- instead ret += '`--' if is_last else '|--' return ret # print the terminal def terminal_traverse(term, callback, previous=0, defined=None, is_last=False): before = print_before(previous, defined, is_last) yield before + '(T)' + str(term.s) + '\n' # print the nonterminal def nonterminal_traverse(nonterm, callback, previous=0, defined=None, is_last=False): before = print_before(previous, defined, is_last) yield before + '(N)' + nonterm.__class__.__name__ + '\n' yield callback(nonterm.to_rule, previous + 1, defined, True) # print the rule def rule_traverse(rule, callback, previous=0, defined=None, is_last=False): # print the rule name before = print_before(previous, defined, is_last) yield before + '(R)' + rule.__class__.__name__ + '\n' # register new column defined = defined or set() defined.add(previous) # print all childs except the last one for i in range(len(rule.to_symbols) - 1): yield callback(rule.to_symbols[i], previous + 1, defined, False) # unregister the column as last child print it automatically defined.remove(previous) yield callback(rule.to_symbols[-1], previous + 1, defined, True) res = Traversing.traverse_separated(root, rule_traverse, nonterminal_traverse, terminal_traverse) return str.join("", res)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_filter(self): """ Get FilterForm instance. """
return self.filter_form_cls(self.request.GET, runtime_context=self.get_runtime_context(), use_filter_chaining=self.use_filter_chaining)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_context_data(self, **kwargs): """ Add filter form to the context. TODO: Currently we construct the filter form object twice - in get_queryset and here, in get_context_data. Will need to figure out a good way to eliminate extra initialization. """
context = super(FilterFormMixin, self).get_context_data(**kwargs) context[self.context_filterform_name] = self.get_filter() return context
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def compile_to_python(exp, env, done=None): '''assemble steps from dao expression to python code''' original_exp = exp compiler = Compiler() if done is None: done = il.Done(compiler.new_var(il.ConstLocalVar('v'))) compiler.exit_block_cont_map = {} compiler.continue_block_cont_map = {} compiler.protect_cont = done if env is None: env = Environment() exp = element(exp) exp = exp.alpha(env, compiler) exp = exp.cps(compiler, done) exp.analyse(compiler) env = Environment() exp = exp.optimize(env, compiler) #exp = exp.tail_recursive_convert() function = compiler.new_var(il.ConstLocalVar('compiled_dao_function')) exp = il.Function(function, (), exp) exp = il.begin(*exp.pythonize(env, compiler)[0]) if isinstance(exp, il.Begin): exp = exp.statements[0] exp.body = exp.body.replace_return_with_yield() compiler = Compiler() result = exp.to_code(compiler) return prelude + result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def last(pipe, items=1): ''' this function simply returns the last item in an iterable ''' if items == 1: tmp=None for i in pipe: tmp=i return tmp else: return tuple(deque(pipe, maxlen=items))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_help(filename, table, dest=sys.stdout): """ Print help to the given destination file object. """
cmds = '|'.join(sorted(table.keys())) print >> dest, "Syntax: %s %s [args]" % (path.basename(filename), cmds)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dispatch(table, args): """ Dispatches to a function based on the contents of `args`. """
# No arguments: print help. if len(args) == 1: print_help(args[0], table) sys.exit(0) # Bad command or incorrect number of arguments: print help to stderr. if args[1] not in table or len(args) != len(table[args[1]]) + 1: print_help(args[0], table, dest=sys.stderr) sys.exit(1) # Cast all the arguments to fit their function's signature to ensure # they're correct and to make them safe for consumption. sig = table[args[1]] try: fixed_args = [type_(arg) for arg, type_ in zip(args[2:], sig[1:])] except TypeError: # If any are wrong, complain to stderr. print_help(args[0], table, dest=sys.stderr) sys.exit(1) # Dispatch the call to the correct function. sig[0](*fixed_args)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_all(s, sub, start=0, end=0, limit=-1, reverse=False): """ Find all indexes of sub in s. :param s: the string to search :param sub: the string to search for :param start: the index in s at which to begin the search (same as in ''.find) :param end: the index in s at which to stop searching (same as in ''.find) :param limit: the maximum number of matches to find :param reverse: if False search s forwards; otherwise search backwards :return: all occurrences of substring sub in string s """
indexes = [] if not bool(s and sub): return indexes lstr = len(s) if lstr <= start: return indexes lsub = len(sub) if lstr < lsub: return indexes if limit == 0: return indexes elif limit < 0: limit = lstr end = min(end, lstr) or lstr idx = s.rfind(sub, start, end) if reverse else s.find(sub, start, end) while idx != -1: indexes.append(idx) if reverse: idx = s.rfind(sub, start, idx - lstr) else: idx = s.find(sub, idx + lsub, end) if len(indexes) >= limit: break return indexes
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_substructure(data, path): """ Tries to retrieve a sub-structure within some data. If the path does not match any sub-structure, returns None. [1, 2, [{'f': [57]}], 4] [1, 2, [{'f': [57]}], 4] 57 None @param data: a container @type data: str|dict|list|(an indexable container) @param path: location of the data @type path: list|str @rtype: * """
if not len(path): return data try: return get_substructure(data[path[0]], path[1:]) except (TypeError, IndexError, KeyError): return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def iterable(target): ''' returns true if the given argument is iterable ''' if any(i in ('next', '__next__', '__iter__') for i in dir(target)): return True else: try: iter(target) return True except: return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _thread_worker(self): """Process callbacks from the queue populated by &listen."""
while self._running: # Retrieve next cmd, or block packet = self._queue.get(True) if isinstance(packet, dict) and QS_CMD in packet: try: self._callback_listen(packet) except Exception as err: # pylint: disable=broad-except _LOGGER.error("Exception in callback\nType: %s: %s", type(err), err) self._queue.task_done()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _thread_listen(self): """The main &listen loop."""
while self._running: try: rest = requests.get(URL_LISTEN.format(self._url), timeout=self._timeout) if rest.status_code == 200: self._queue.put(rest.json()) else: _LOGGER.error('QSUSB response code %s', rest.status_code) sleep(30) # Received for "Read timed out" and "Connection refused" except requests.exceptions.ConnectionError as err: if str(err).find('timed') > 0: # "Read timedout" update self._queue.put({QS_CMD: CMD_UPDATE}) else: # "Connection refused" QSUSB down _LOGGER.error(str(err)) sleep(60) except Exception as err: # pylint: disable=broad-except _LOGGER.error("%s - %s", str(type(err)), str(err)) sleep(5) self._queue.put({})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def hsla_to_rgba(h, s, l, a): """ 0 <= H < 360, 0 <= s,l,a < 1 """
h = h % 360 s = max(0, min(1, s)) l = max(0, min(1, l)) a = max(0, min(1, a)) c = (1 - abs(2*l - 1)) * s x = c * (1 - abs(h/60%2 - 1)) m = l - c/2 if h<60: r, g, b = c, x, 0 elif h<120: r, g, b = x, c, 0 elif h<180: r, g, b = 0, c, x elif h<240: r, g, b = 0, x, c elif h<300: r, g, b = x, 0, c else: r, g, b = c, 0, x return (int((r+m)*255), int((g+m)*255), int((b+m)*255), int(a*255))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def dir_list(directory): '''Returns the list of all files in the directory.''' try: content = listdir(directory) return content except WindowsError as winErr: print("Directory error: " + str((winErr)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def read_dir(directory): '''Returns the text of all files in a directory.''' content = dir_list(directory) text = '' for filename in content: text += read_file(directory + '/' + filename) text += ' ' return text
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def colorize(occurence,maxoccurence,minoccurence): '''A formula for determining colors.''' if occurence == maxoccurence: color = (255,0,0) elif occurence == minoccurence: color = (0,0,255) else: color = (int((float(occurence)/maxoccurence*255)),0,int(float(minoccurence)/occurence*255)) return color
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def fontsize(count,maxsize,minsize,maxcount): '''A formula for determining font sizes.''' size = int(maxsize - (maxsize)*((float(maxcount-count)/maxcount))) if size < minsize: size = minsize return size
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _init_display(self): """! \~english Initialize the SSD1306 display chip \~chinese 初始化SSD1306显示芯片 """
self._command([ # 0xAE self.CMD_SSD1306_DISPLAY_OFF, #Stop Scroll self.CMD_SSD1306_SET_SCROLL_DEACTIVE, # 0xA8 SET MULTIPLEX 0x3F self.CMD_SSD1306_SET_MULTIPLEX_RATIO, 0x3F, # 0xD3 SET DISPLAY OFFSET self.CMD_SSD1306_SET_DISPLAY_OFFSET, 0x00, # 0x40 Set Mapping RAM Display Start Line (0x00~0x3F) self.CMD_SSD1306_SET_DISPLAY_START_LINE, # 0xDA Set COM Pins hardware configuration, (0x00/0x01/0x02) self.CMD_SSD1306_SET_COM_PINS, (0x02 | 0x10), self.CMD_SSD1306_SET_CONTRAST, 0x7F, # 0xA4 Disable Entire Display On self.CMD_SSD1306_ENTIRE_DISPLAY_ON_0, # 0xA6 Set normal display self.CMD_SSD1306_NORMAL_DISPLAY, # 0xA7 Set inverse display # CMD_SSD1306_INVERSE_DISPLAY, # 0xD5 Set osc frequency 0x80 self.CMD_SSD1306_SET_CLOCK_DIVIDE_RATIO, 0x80, # 0x8D Enable DC/DC charge pump regulator 0x14 self.CMD_SSD1306_CHARGE_PUMP, 0x14, # 0x20 Set Page Addressing Mode (0x00/0x01/0x02) self.CMD_SSD1306_SET_MEM_ADDR_MODE, 0x01, # 0xC0 / 0xC8 Set COM Output Scan Direction #CMD_SSD1306_SCAN_DIRECTION_INC, #CMD_SSD1306_SCAN_DIRECTION_DEC, self.CMD_SSD1306_SCAN_DIRECTION_INC if self._mirror_v else self.CMD_SSD1306_SCAN_DIRECTION_DEC, # 0xA0 / oxA1 Set Segment re-map # 0xA0 left to right # 0xA1 right to left self.CMD_SSD1306_SET_SEGMENT_REMAP_0 if self._mirror_h else self.CMD_SSD1306_SET_SEGMENT_REMAP_1, ])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def display(self, buffer = None): """! \~english Write buffer to physical display. @param buffer: Data to display,If <b>None</b> mean will use self._buffer data to display \~chinese 将缓冲区写入物理显示屏。 @param buffer: 要显示的数据,如果是 <b>None</b>(默认) 将把 self._buffer 数据写入物理显示屏 """
if buffer != None: self._display_buffer( buffer ) else: self._display_buffer( self._buffer )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def scrollWith(self, hStart = 0x00, hEnd=0x00, vOffset = 0x00, vStart=0x00, vEnd=0x00, int = 0x00, dire = "left" ): """! \~english Scroll screen @param hStart: Set horizontal scroll PAGE start address, value can be chosen between 0 and 7 @param hEnd: Set horizontal scroll PAGE end address, value can be chose between 0 and 7 @param vOffset: Vertical scroll offset row, if set to 0x00(0) means off vertical scroll @param vStart: Vertical scroll start line, value can be chose between 0x00 and screen.height @param vEnd: Vertical scroll end line, value can be chose between 0x00 and screen.height @param int: Set time interval between each scroll step @param dire: Set scroll direction, value can be "left" or "right" \~chinese 屏幕滚动 @param hStart: 设置水平滚动PAGE起始地址,数值可以在0到7之间选择 @param hEnd: 设置水平滚动PAGE结束地址,值可以在0和7之间选择 @param vOffset: 垂直滚动偏移行,如果设置为0x00(0),表示关闭垂直滚动 @param vStart: 垂直滚动起始行,值可以在0x00和screen.height之间选择 @param vEnd: 垂直滚动结束行,值可以在0x00和screen.height之间选择 @param int: 设置每个滚动步骤之间的时间间隔 @param dire: 设置滚动方向,数值可以是 "left" 或 "right" """
self._command( [self.CMD_SSD1306_SET_SCROLL_DEACTIVE] ) if vOffset != 0: self._command( [ self.CMD_SSD1306_SET_SCROLL_VERTICAL_AREA, vStart, vEnd, 0x00 ]) self._command( [ self.CMD_SSD1306_SET_SCROLL_HORIZONTAL_VERTICAL_LEFT if dire.upper()=="LEFT" else self.CMD_SSD1306_SET_SCROLL_HORIZONTAL_VERTICAL_RIGHT, 0x00, hStart, int, hEnd, vOffset, 0x00, self.CMD_SSD1306_SET_SCROLL_ACTIVE ])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run(self, schedule_type, lookup_id, **kwargs): """ Loads Schedule linked to provided lookup """
log = self.get_logger(**kwargs) log.info("Queuing <%s> <%s>" % (schedule_type, lookup_id)) task_run = QueueTaskRun() task_run.task_id = self.request.id or uuid4() task_run.started_at = now() tr_qs = QueueTaskRun.objects # Load the schedule active items schedules = Schedule.objects.filter(enabled=True) if schedule_type == "crontab": schedules = schedules.filter(celery_cron_definition=lookup_id) tr_qs = tr_qs.filter(celery_cron_definition=lookup_id) scheduler_type = CrontabSchedule task_run.celery_cron_definition_id = lookup_id elif schedule_type == "interval": schedules = schedules.filter(celery_interval_definition=lookup_id) tr_qs = tr_qs.filter(celery_interval_definition=lookup_id) scheduler_type = IntervalSchedule task_run.celery_interval_definition_id = lookup_id # Confirm that this task should run now based on last run time. try: last_task_run = tr_qs.latest("started_at") except QueueTaskRun.DoesNotExist: # No previous run so it is safe to continue. pass else: # This basicly replicates what celery beat is meant to do, but # we can't trust celery beat and django-celery to always accurately # update their own last run time. sched = scheduler_type.objects.get(id=lookup_id) due, due_next = sched.schedule.is_due(last_task_run.started_at) if not due and due_next >= settings.DEFAULT_CLOCK_SKEW_SECONDS: return ( "Aborted Queuing <%s> <%s> due to last task run (%s) " "at %s" % ( schedule_type, lookup_id, last_task_run.id, last_task_run.started_at, ) ) task_run.save() # create tasks for each active schedule queued = 0 schedules = schedules.values("id", "auth_token", "endpoint", "payload") for schedule in schedules.iterator(): schedule["schedule_id"] = str(schedule.pop("id")) DeliverTask.apply_async(kwargs=schedule) queued += 1 task_run.completed_at = now() task_run.save() return "Queued <%s> Tasks" % (queued,)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def bind(renderer, to): """ Bind a renderer to the given callable by constructing a new rendering view. """
@wraps(to) def view(request, **kwargs): try: returned = to(request, **kwargs) except Exception as error: view_error = getattr(renderer, "view_error", None) if view_error is None: raise return view_error(request, error) try: return renderer.render(request, returned) except Exception as error: render_error = getattr(renderer, "render_error", None) if render_error is None: raise return render_error(request, returned, error) return view
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_perm_model(): """ Returns the Perm model that is active in this project. """
try: return django_apps.get_model(settings.PERM_MODEL, require_ready=False) except ValueError: raise ImproperlyConfigured("PERM_MODEL must be of the form 'app_label.model_name'") except LookupError: raise ImproperlyConfigured( "PERM_MODEL refers to model '{}' that has not been installed".format(settings.PERM_MODEL) )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _load_yaml_config(cls, config_data, filename="(unknown)"): """Load a yaml config file."""
try: config = yaml.safe_load(config_data) except yaml.YAMLError as err: if hasattr(err, 'problem_mark'): mark = err.problem_mark errmsg = ("Invalid YAML syntax in Configuration file " "%(file)s at line: %(line)s, column: %(column)s." % dict(file=filename, line=mark.line + 1, column=mark.column + 1)) else: errmsg = ("YAML error reading Configuration file " "%(file)s" % dict(file=filename)) logger.error(errmsg) raise logger.info("Configuration: %s", config) return config
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sround(x, precision=0): """ Round a single number using default non-deterministic generator. @param x: to round. @param precision: decimal places to round. """
sr = StochasticRound(precision=precision) return sr.round(x)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _parse_chord_line(line): ''' Parse a chord line into a `ChordLineData` object. ''' chords = [ TabChord(position=position, chord=chord) for chord, position in Chord.extract_chordpos(line) ] return ChordLineData(chords=chords)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _get_line_type(line): ''' Decide the line type in function of its contents ''' stripped = line.strip() if not stripped: return 'empty' remainder = re.sub(r"\s+", " ", re.sub(CHORD_RE, "", stripped)) if len(remainder) * 2 < len(re.sub(r"\s+", " ", stripped)): return 'chord' return 'lyric'
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def parse_line(line): ''' Parse a line into a `TabLine` object. ''' line = line.rstrip() line_type = _get_line_type(line) return TabLine( type=line_type, data=_DATA_PARSERS[line_type](line), original=line, )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def parse_tablature(lines): ''' Parse a list of lines into a `Tablature`. ''' lines = [parse_line(l) for l in lines] return Tablature(lines=lines)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def preview(df,preview_rows = 20): #,preview_max_cols = 0): """ Returns a preview of a dataframe, which contains both header rows and tail rows. """
if preview_rows < 4: preview_rows = 4 preview_rows = min(preview_rows,df.shape[0]) outer = math.floor(preview_rows / 4) return pd.concat([df.head(outer), df[outer:-outer].sample(preview_rows-2*outer), df.tail(outer)])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def title_line(text): """Returns a string that represents the text as a title blurb """
columns = shutil.get_terminal_size()[0] start = columns // 2 - len(text) // 2 output = '='*columns + '\n\n' + \ ' ' * start + str(text) + "\n\n" + \ '='*columns + '\n' return output
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def RadiusGrid(gridSize): """ Return a square grid with values of the distance from the centre of the grid to each gridpoint """
x,y=np.mgrid[0:gridSize,0:gridSize] x = x-(gridSize-1.0)/2.0 y = y-(gridSize-1.0)/2.0 return np.abs(x+1j*y)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def CircularMaskGrid(gridSize, diameter=None): """ Return a square grid with ones inside and zeros outside a given diameter circle """
if diameter is None: diameter=gridSize return np.less_equal(RadiusGrid(gridSize),diameter/2.0)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def AdaptiveOpticsCorrect(pupils,diameter,maxRadial,numRemove=None): """ Correct a wavefront using Zernike rejection up to some maximal order. Can operate on multiple telescopes in parallel. Note that this version removes the piston mode as well """
gridSize=pupils.shape[-1] pupilsVector=np.reshape(pupils,(-1,gridSize**2)) zernikes=np.reshape(ZernikeGrid(gridSize,maxRadial,diameter),(-1,gridSize**2)) if numRemove is None: numRemove=zernikes.shape[0] numScreen=pupilsVector.shape[0] normalisation=1.0/np.sum(zernikes[0]) # Note extra iteration to remove residual piston for i in list(range(numRemove))+[0,]: amplitudes=np.inner(zernikes[i],pupilsVector)*normalisation pupilsVector=pupilsVector-zernikes[i]*amplitudes[:,np.newaxis] return np.reshape(pupilsVector,pupils.shape)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def FibreCouple(pupils,modeDiameter): """ Return the complex amplitudes coupled into a set of fibers """
gridSize=pupils.shape[-1] pupilsVector=np.reshape(pupils,(-1,gridSize**2)) mode=np.reshape(FibreMode(gridSize,modeDiameter),(gridSize**2,)) return np.inner(pupilsVector,mode)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def SingleModeCombine(pupils,modeDiameter=None): """ Return the instantaneous coherent fluxes and photometric fluxes for a multiway single-mode fibre combiner """
if modeDiameter is None: modeDiameter=0.9*pupils.shape[-1] amplitudes=FibreCouple(pupils,modeDiameter) cc=np.conj(amplitudes) fluxes=(amplitudes*cc).real coherentFluxes=[amplitudes[i]*cc[j] for i in range(1,len(amplitudes)) for j in range(i)] return fluxes,coherentFluxes
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_unicode(s): """ Convert to unicode, raise exception with instructive error message if s is not unicode, ascii, or utf-8. """
if not isinstance(s, TEXT): if not isinstance(s, bytes): raise TypeError('You are required to pass either unicode or ' 'bytes here, not: %r (%s)' % (type(s), s)) try: s = s.decode('utf-8') except UnicodeDecodeError as le: raise TypeError('You are required to pass either a unicode ' 'object or a utf-8-encoded bytes string here. ' 'You passed a bytes object which contained ' 'non-utf-8: %r. The UnicodeDecodeError that ' 'resulted from attempting to interpret it as ' 'utf-8 was: %s' % (s, le,)) return s
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_postdata(self): """Serialize as post data for a POST request."""
items = [] for k, v in sorted(self.items()): # predictable for testing items.append((k.encode('utf-8'), to_utf8_optional_iterator(v))) # tell urlencode to deal with sequence values and map them correctly # to resulting querystring. for example self["k"] = ["v1", "v2"] will # result in 'k=v1&k=v2' and not k=%5B%27v1%27%2C+%27v2%27%5D return urlencode(items, True).replace('+', '%20').encode('ascii')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fetch_request_token(self, oauth_request): """Processes a request_token request and returns the request token on success. """
try: # Get the request token for authorization. token = self._get_token(oauth_request, 'request') except Error: # No token required for the initial token request. version = self._get_version(oauth_request) consumer = self._get_consumer(oauth_request) try: callback = self.get_callback(oauth_request) except Error: callback = None # 1.0, no callback specified. self._check_signature(oauth_request, consumer, None) # Fetch a new token. token = self.data_store.fetch_request_token(consumer, callback) return token
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fetch_access_token(self, oauth_request): """Processes an access_token request and returns the access token on success. """
version = self._get_version(oauth_request) consumer = self._get_consumer(oauth_request) try: verifier = self._get_verifier(oauth_request) except Error: verifier = None # Get the request token. token = self._get_token(oauth_request, 'request') self._check_signature(oauth_request, consumer, token) new_token = self.data_store.fetch_access_token(consumer, token, verifier) return new_token
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_token(self, oauth_request, token_type='access'): """Try to find the token for the provided request token key."""
token_field = oauth_request.get_parameter('oauth_token') token = self.data_store.lookup_token(token_type, token_field) if not token: raise OAuthError('Invalid %s token: %s' % (token_type, token_field)) return token
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def mete_upscale_iterative_alt(S, N, doublings): """ This function is used to upscale from the anchor area. Parameters S : int or float Number of species at anchor scale N : int or float Number of individuals at anchor scale doublings : int Number of doublings of A. Result vector will be length doublings + 1. Returns ------- result : ndarray 1D array of number of species at each doubling """
# Arrays to store N and S at all doublings n_arr = np.empty(doublings+1) s_arr = np.empty(doublings+1) # Loop through all scales for i in xrange(doublings+1): # If this is first step (doubling 0), N and S are initial values if i == 0: n_arr[i] = N s_arr[i] = S # If not first step else: # Get previous S SA = s_arr[i-1] # N is double previous N n_arr[i] = 2 * n_arr[i-1] N2A = n_arr[i] # Eq 8 from Harte 2009, setup to return S2A given input of x # x is exp(-lam_phi, 2A) def S2A_calc(x, SA, N2A): return ((SA + N2A * (1-x)/(x-x**(N2A+1)) * (1 - (x**N2A)/(N2A+1))) / x**-1) # Eq 9 from Harte 2009, setup to equal to zero, used to solve x # Note that two summations are replaced by known formulas for sum # of geometric and logarithmic series. # Note "offset" of 1e-23, which is needed because f(a) and f(b) do # not have the same sign in solver otherwise. This introduces no # more than a 1e-23 error in the calculation of x, which should not # cause a significant problem. def x_calc(x, SA, N2A): return (S2A_calc(x,SA,N2A) / N2A * x*(x**N2A-1)/(x-1) - (x**N2A * (-lerchphi(x,1,N2A+1))-np.log(1-x)) ) - 1e-23 # Solve for x x = (optimize.brentq(x_calc, 1e-24, 1-1e-16, args=(SA,N2A), xtol=1e-16, maxiter=1000, disp=True) + 1e-23) # Given x, calculate S2A s_arr[i] = S2A_calc(x,SA,N2A) return s_arr