text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def paypaltime2datetime(s): """Convert a PayPal time string to a DateTime."""
naive = datetime.datetime.strptime(s, PayPalNVP.TIMESTAMP_FORMAT) if not settings.USE_TZ: return naive else: # TIMESTAMP_FORMAT is UTC return timezone.make_aware(naive, timezone.utc)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def express_endpoint_for_token(token, commit=False): """ Returns the PayPal Express Checkout endpoint for a token. Pass 'commit=True' if you will not prompt for confirmation when the user returns to your site. """
pp_params = dict(token=token) if commit: pp_params['useraction'] = 'commit' return express_endpoint() % urlencode(pp_params)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def strip_ip_port(ip_address): """ Strips the port from an IPv4 or IPv6 address, returns a unicode object. """
# IPv4 with or without port if '.' in ip_address: cleaned_ip = ip_address.split(':')[0] # IPv6 with port elif ']:' in ip_address: # Remove the port following last ':', and then strip first and last chars for []. cleaned_ip = ip_address.rpartition(':')[0][1:-1] # IPv6 without port else: cleaned_ip = ip_address return cleaned_ip
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def doDirectPayment(self, params): """Call PayPal DoDirectPayment method."""
defaults = {"method": "DoDirectPayment", "paymentaction": "Sale"} required = ["creditcardtype", "acct", "expdate", "cvv2", "ipaddress", "firstname", "lastname", "street", "city", "state", "countrycode", "zip", "amt", ] nvp_obj = self._fetch(params, required, defaults) if nvp_obj.flag: raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj) # @@@ Could check cvv2match / avscode are both 'X' or '0' # qd = django.http.QueryDict(nvp_obj.response) # if qd.get('cvv2match') not in ['X', '0']: # nvp_obj.set_flag("Invalid cvv2match: %s" % qd.get('cvv2match') # if qd.get('avscode') not in ['X', '0']: # nvp_obj.set_flag("Invalid avscode: %s" % qd.get('avscode') return nvp_obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setExpressCheckout(self, params): """ Initiates an Express Checkout transaction. Optionally, the SetExpressCheckout API operation can set up billing agreements for reference transactions and recurring payments. Returns a NVP instance - check for token and payerid to continue! """
if self._is_recurring(params): params = self._recurring_setExpressCheckout_adapter(params) defaults = {"method": "SetExpressCheckout", "noshipping": 1} required = ["returnurl", "cancelurl", "paymentrequest_0_amt"] nvp_obj = self._fetch(params, required, defaults) if nvp_obj.flag: raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj) return nvp_obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def createRecurringPaymentsProfile(self, params, direct=False): """ Set direct to True to indicate that this is being called as a directPayment. Returns True PayPal successfully creates the profile otherwise False. """
defaults = {"method": "CreateRecurringPaymentsProfile"} required = ["profilestartdate", "billingperiod", "billingfrequency", "amt"] # Direct payments require CC data if direct: required + ["creditcardtype", "acct", "expdate", "firstname", "lastname"] else: required + ["token", "payerid"] nvp_obj = self._fetch(params, required, defaults) # Flag if profile_type != ActiveProfile if nvp_obj.flag: raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj) return nvp_obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def manangeRecurringPaymentsProfileStatus(self, params, fail_silently=False): """ Requires `profileid` and `action` params. Action must be either "Cancel", "Suspend", or "Reactivate". """
defaults = {"method": "ManageRecurringPaymentsProfileStatus"} required = ["profileid", "action"] nvp_obj = self._fetch(params, required, defaults) # TODO: This fail silently check should be using the error code, but its not easy to access flag_info_test_string = 'Invalid profile status for cancel action; profile should be active or suspended' if nvp_obj.flag and not (fail_silently and nvp_obj.flag_info == flag_info_test_string): raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj) return nvp_obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _recurring_setExpressCheckout_adapter(self, params): """ The recurring payment interface to SEC is different than the recurring payment interface to ECP. This adapts a normal call to look like a SEC call. """
params['l_billingtype0'] = "RecurringPayments" params['l_billingagreementdescription0'] = params['desc'] REMOVE = ["billingfrequency", "billingperiod", "profilestartdate", "desc"] for k in params.keys(): if k in REMOVE: del params[k] return params
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _fetch(self, params, required, defaults): """Make the NVP request and store the response."""
defaults.update(params) pp_params = self._check_and_update_params(required, defaults) pp_string = self.signature + urlencode(pp_params) response = self._request(pp_string) response_params = self._parse_response(response) log.debug('PayPal Request:\n%s\n', pprint.pformat(defaults)) log.debug('PayPal Response:\n%s\n', pprint.pformat(response_params)) # Gather all NVP parameters to pass to a new instance. nvp_params = {} tmpd = defaults.copy() tmpd.update(response_params) for k, v in tmpd.items(): if k in self.NVP_FIELDS: nvp_params[str(k)] = v # PayPal timestamp has to be formatted. if 'timestamp' in nvp_params: nvp_params['timestamp'] = paypaltime2datetime(nvp_params['timestamp']) nvp_obj = PayPalNVP(**nvp_params) nvp_obj.init(self.request, params, response_params) nvp_obj.save() return nvp_obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _request(self, data): """Moved out to make testing easier."""
return requests.post(self.endpoint, data=data.encode("ascii")).content
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _check_and_update_params(self, required, params): """ Ensure all required parameters were passed to the API call and format them correctly. """
for r in required: if r not in params: raise PayPalError("Missing required param: %s" % r) # Upper case all the parameters for PayPal. return (dict((k.upper(), v) for k, v in params.items()))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _parse_response(self, response): """Turn the PayPal response into a dict"""
q = QueryDict(response, encoding='UTF-8').dict() return {k.lower(): v for k, v in q.items()}
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _postback(self): """ Perform PayPal PDT Postback validation. Sends the transaction ID and business token to PayPal which responses with SUCCESS or FAILED. """
return requests.post(self.get_endpoint(), data=dict(cmd="_notify-synch", at=IDENTITY_TOKEN, tx=self.tx)).content
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def duplicate_txn_id(ipn_obj): """ Returns True if a record with this transaction id exists and its payment_status has not changed. This function has been completely changed from its previous implementation where it used to specifically only check for a Pending->Completed transition. """
# get latest similar transaction(s) similars = (ipn_obj.__class__._default_manager .filter(txn_id=ipn_obj.txn_id) .exclude(id=ipn_obj.id) .exclude(flag=True) .order_by('-created_at')[:1]) if len(similars) > 0: # we have a similar transaction, has the payment_status changed? return similars[0].payment_status == ipn_obj.payment_status return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def make_secret(form_instance, secret_fields=None): """ Returns a secret for use in a EWP form or an IPN verification based on a selection of variables in params. Should only be used with SSL. """
warn_untested() # @@@ Moved here as temporary fix to avoid dependancy on auth.models. # @@@ amount is mc_gross on the IPN - where should mapping logic go? # @@@ amount / mc_gross is not nessecarily returned as it was sent - how to use it? 10.00 vs. 10.0 # @@@ the secret should be based on the invoice or custom fields as well - otherwise its always the same. # Build the secret with fields availible in both PaymentForm and the IPN. Order matters. if secret_fields is None: secret_fields = ['business', 'item_name'] data = "" for name in secret_fields: if hasattr(form_instance, 'cleaned_data'): if name in form_instance.cleaned_data: data += unicode(form_instance.cleaned_data[name]) else: # Initial data passed into the constructor overrides defaults. if name in form_instance.initial: data += unicode(form_instance.initial[name]) elif name in form_instance.fields and form_instance.fields[name].initial is not None: data += unicode(form_instance.fields[name].initial) secret = get_sha1_hexdigest(settings.SECRET_KEY, data) return secret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_number(self): """True if there is at least one digit in number."""
self.number = re.sub(r'[^\d]', '', self.number) return self.number.isdigit()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_mod10(self): """Returns True if number is valid according to mod10."""
double = 0 total = 0 for i in range(len(self.number) - 1, -1, -1): for c in str((double + 1) * int(self.number[i])): total = total + int(c) double = (double + 1) % 2 return (total % 10) == 0
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_type(self): """Return the type if it matches one of the cards."""
for card, pattern in CARDS.items(): if pattern.match(self.number): return card return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def verify(self): """Returns the card type if valid else None."""
if self.is_number() and not self.is_test() and self.is_mod10(): return self.get_type() return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def clean(self, value): """Raises a ValidationError if the card is not valid and stashes card type."""
if value: value = value.replace('-', '').replace(' ', '') self.card_type = verify_credit_card(value) if self.card_type is None: raise forms.ValidationError("Invalid credit card number.") return value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def process(self, request, item): """Process a PayPal direct payment."""
warn_untested() from paypal.pro.helpers import PayPalWPP wpp = PayPalWPP(request) params = self.cleaned_data params['creditcardtype'] = self.fields['acct'].card_type params['expdate'] = self.cleaned_data['expdate'].strftime("%m%Y") params['ipaddress'] = request.META.get("REMOTE_ADDR", "") params.update(item) try: # Create single payment: if 'billingperiod' not in params: wpp.doDirectPayment(params) # Create recurring payment: else: wpp.createRecurringPaymentsProfile(params, direct=True) except PayPalFailure: return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _postback(self): """Perform PayPal Postback validation."""
return requests.post(self.get_endpoint(), data=b"cmd=_notify-validate&" + self.query.encode("ascii")).content
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def send_signals(self): """Shout for the world to hear whether a txn was successful."""
if self.flag: invalid_ipn_received.send(sender=self) return else: valid_ipn_received.send(sender=self)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_columns(self, connection, table_name, schema=None, **kw): """ Return information about columns in `table_name`. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.get_columns`. """
cols = self._get_redshift_columns(connection, table_name, schema, **kw) if not self._domains: self._domains = self._load_domains(connection) domains = self._domains columns = [] for col in cols: column_info = self._get_column_info( name=col.name, format_type=col.format_type, default=col.default, notnull=col.notnull, domains=domains, enums=[], schema=col.schema, encode=col.encode) columns.append(column_info) return columns
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_pk_constraint(self, connection, table_name, schema=None, **kw): """ Return information about the primary key constraint on `table_name`. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.get_pk_constraint`. """
constraints = self._get_redshift_constraints(connection, table_name, schema, **kw) pk_constraints = [c for c in constraints if c.contype == 'p'] if not pk_constraints: return {'constrained_columns': [], 'name': ''} pk_constraint = pk_constraints[0] m = PRIMARY_KEY_RE.match(pk_constraint.condef) colstring = m.group('columns') constrained_columns = SQL_IDENTIFIER_RE.findall(colstring) return { 'constrained_columns': constrained_columns, 'name': pk_constraint.conname, }
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_foreign_keys(self, connection, table_name, schema=None, **kw): """ Return information about foreign keys in `table_name`. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.get_pk_constraint`. """
constraints = self._get_redshift_constraints(connection, table_name, schema, **kw) fk_constraints = [c for c in constraints if c.contype == 'f'] uniques = defaultdict(lambda: defaultdict(dict)) for con in fk_constraints: uniques[con.conname]["key"] = con.conkey uniques[con.conname]["condef"] = con.condef fkeys = [] for conname, attrs in uniques.items(): m = FOREIGN_KEY_RE.match(attrs['condef']) colstring = m.group('referred_columns') referred_columns = SQL_IDENTIFIER_RE.findall(colstring) referred_table = m.group('referred_table') referred_schema = m.group('referred_schema') colstring = m.group('columns') constrained_columns = SQL_IDENTIFIER_RE.findall(colstring) fkey_d = { 'name': conname, 'constrained_columns': constrained_columns, 'referred_schema': referred_schema, 'referred_table': referred_table, 'referred_columns': referred_columns, } fkeys.append(fkey_d) return fkeys
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_table_names(self, connection, schema=None, **kw): """ Return a list of table names for `schema`. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.get_table_names`. """
return self._get_table_or_view_names('r', connection, schema, **kw)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_view_names(self, connection, schema=None, **kw): """ Return a list of view names for `schema`. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.get_view_names`. """
return self._get_table_or_view_names('v', connection, schema, **kw)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_unique_constraints(self, connection, table_name, schema=None, **kw): """ Return information about unique constraints in `table_name`. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.get_unique_constraints`. """
constraints = self._get_redshift_constraints(connection, table_name, schema, **kw) constraints = [c for c in constraints if c.contype == 'u'] uniques = defaultdict(lambda: defaultdict(dict)) for con in constraints: uniques[con.conname]["key"] = con.conkey uniques[con.conname]["cols"][con.attnum] = con.attname return [ {'name': None, 'column_names': [uc["cols"][i] for i in uc["key"]]} for name, uc in uniques.items() ]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_table_options(self, connection, table_name, schema, **kw): """ Return a dictionary of options specified when the table of the given name was created. Overrides interface :meth:`~sqlalchemy.engine.Inspector.get_table_options`. """
def keyfunc(column): num = int(column.sortkey) # If sortkey is interleaved, column numbers alternate # negative values, so take abs. return abs(num) table = self._get_redshift_relation(connection, table_name, schema, **kw) columns = self._get_redshift_columns(connection, table_name, schema, **kw) sortkey_cols = sorted([col for col in columns if col.sortkey], key=keyfunc) interleaved = any([int(col.sortkey) < 0 for col in sortkey_cols]) sortkey = [col.name for col in sortkey_cols] interleaved_sortkey = None if interleaved: interleaved_sortkey = sortkey sortkey = None distkeys = [col.name for col in columns if col.distkey] distkey = distkeys[0] if distkeys else None return { 'redshift_diststyle': table.diststyle, 'redshift_distkey': distkey, 'redshift_sortkey': sortkey, 'redshift_interleaved_sortkey': interleaved_sortkey, }
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_connect_args(self, *args, **kwargs): """ Build DB-API compatible connection arguments. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.create_connect_args`. """
default_args = { 'sslmode': 'verify-full', 'sslrootcert': pkg_resources.resource_filename( __name__, 'redshift-ca-bundle.crt' ), } cargs, cparams = super(RedshiftDialect, self).create_connect_args( *args, **kwargs ) default_args.update(cparams) return cargs, default_args
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def visit_unload_from_select(element, compiler, **kw): """Returns the actual sql query for the UnloadFromSelect class."""
template = """ UNLOAD (:select) TO :unload_location CREDENTIALS :credentials {manifest} {header} {delimiter} {encrypted} {fixed_width} {gzip} {add_quotes} {null} {escape} {allow_overwrite} {parallel} {region} {max_file_size} """ el = element qs = template.format( manifest='MANIFEST' if el.manifest else '', header='HEADER' if el.header else '', delimiter=( 'DELIMITER AS :delimiter' if el.delimiter is not None else '' ), encrypted='ENCRYPTED' if el.encrypted else '', fixed_width='FIXEDWIDTH AS :fixed_width' if el.fixed_width else '', gzip='GZIP' if el.gzip else '', add_quotes='ADDQUOTES' if el.add_quotes else '', escape='ESCAPE' if el.escape else '', null='NULL AS :null_as' if el.null is not None else '', allow_overwrite='ALLOWOVERWRITE' if el.allow_overwrite else '', parallel='PARALLEL OFF' if not el.parallel else '', region='REGION :region' if el.region is not None else '', max_file_size=( 'MAXFILESIZE :max_file_size MB' if el.max_file_size is not None else '' ), ) query = sa.text(qs) if el.delimiter is not None: query = query.bindparams(sa.bindparam( 'delimiter', value=element.delimiter, type_=sa.String, )) if el.fixed_width: query = query.bindparams(sa.bindparam( 'fixed_width', value=_process_fixed_width(el.fixed_width), type_=sa.String, )) if el.null is not None: query = query.bindparams(sa.bindparam( 'null_as', value=el.null, type_=sa.String )) if el.region is not None: query = query.bindparams(sa.bindparam( 'region', value=el.region, type_=sa.String )) if el.max_file_size is not None: max_file_size_mib = float(el.max_file_size) / 1024 / 1024 query = query.bindparams(sa.bindparam( 'max_file_size', value=max_file_size_mib, type_=sa.Float )) return compiler.process( query.bindparams( sa.bindparam('credentials', value=el.credentials, type_=sa.String), sa.bindparam( 'unload_location', value=el.unload_location, type_=sa.String, ), sa.bindparam( 'select', value=compiler.process( el.select, literal_binds=True, ), type_=sa.String, ), ), **kw )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def visit_create_library_command(element, compiler, **kw): """ Returns the actual sql query for the CreateLibraryCommand class. """
query = """ CREATE {or_replace} LIBRARY {name} LANGUAGE pythonplu FROM :location WITH CREDENTIALS AS :credentials {region} """ bindparams = [ sa.bindparam( 'location', value=element.location, type_=sa.String, ), sa.bindparam( 'credentials', value=element.credentials, type_=sa.String, ), ] if element.region is not None: bindparams.append(sa.bindparam( 'region', value=element.region, type_=sa.String, )) quoted_lib_name = compiler.preparer.quote_identifier(element.library_name) query = query.format(name=quoted_lib_name, or_replace='OR REPLACE' if element.replace else '', region='REGION :region' if element.region else '') return compiler.process(sa.text(query).bindparams(*bindparams), **kw)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_proxy(url, host=None): """ Finds proxy string for the given url and host. If host is not defined, it's extracted from the url. """
if host is None: m = _URL_REGEX.match(url) if not m: raise URLError(url) if len(m.groups()) is 1: host = m.groups()[0] else: raise URLError(url) return _pacparser.find_proxy(url, host)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def just_find_proxy(pacfile, url, host=None): """ This function is a wrapper around init, parse_pac, find_proxy and cleanup. This is the function to call if you want to find proxy just for one url. """
if not os.path.isfile(pacfile): raise IOError('Pac file does not exist: {}'.format(pacfile)) init() parse_pac(pacfile) proxy = find_proxy(url,host) cleanup() return proxy
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def split_data(iterable, pred): """ Split data from ``iterable`` into two lists. Each element is passed to function ``pred``; elements for which ``pred`` returns True are put into ``yes`` list, other elements are put into ``no`` list. (['Bar', 'Spam'], ['foo', 'egg']) """
yes, no = [], [] for d in iterable: if pred(d): yes.append(d) else: no.append(d) return yes, no
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def match_url(self, url, options=None): """ Return if this rule matches the URL. What to do if rule is matched is up to developer. Most likely ``.is_exception`` attribute should be taken in account. """
options = options or {} for optname in self.options: if optname == 'match-case': # TODO continue if optname not in options: raise ValueError("Rule requires option %s" % optname) if optname == 'domain': if not self._domain_matches(options['domain']): return False continue if options[optname] != self.options[optname]: return False return self._url_matches(url)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def matching_supported(self, options=None): """ Return whether this rule can return meaningful result, given the `options` dict. If some options are missing, then rule shouldn't be matched against, and this function returns False. No options: True Option is used in the rule, but its value is not available at matching time: False Option is used in the rule, and option value is available at matching time: True Rule is a comment: False """
if self.is_comment: return False if self.is_html_rule: # HTML rules are not supported yet return False options = options or {} keys = set(options.keys()) if not keys.issuperset(self._options_keys): # some of the required options are not given return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def rule_to_regex(cls, rule): """ Convert AdBlock rule to a regular expression. """
if not rule: return rule # Check if the rule isn't already regexp if rule.startswith('/') and rule.endswith('/'): if len(rule) > 1: rule = rule[1:-1] else: raise AdblockParsingError('Invalid rule') return rule # escape special regex characters rule = re.sub(r"([.$+?{}()\[\]\\])", r"\\\1", rule) # XXX: the resulting regex must use non-capturing groups (?: # for performance reasons; also, there is a limit on number # of capturing groups, no using them would prevent building # a single regex out of several rules. # Separator character ^ matches anything but a letter, a digit, or # one of the following: _ - . %. The end of the address is also # accepted as separator. rule = rule.replace("^", "(?:[^\w\d_\-.%]|$)") # * symbol rule = rule.replace("*", ".*") # | in the end means the end of the address if rule[-1] == '|': rule = rule[:-1] + '$' # || in the beginning means beginning of the domain name if rule[:2] == '||': # XXX: it is better to use urlparse for such things, # but urlparse doesn't give us a single regex. # Regex is based on http://tools.ietf.org/html/rfc3986#appendix-B if len(rule) > 2: # | | complete part | # | scheme | of the domain | rule = r"^(?:[^:/?#]+:)?(?://(?:[^/?#]*\.)?)?" + rule[2:] elif rule[0] == '|': # | in the beginning means start of the address rule = '^' + rule[1:] # other | symbols should be escaped # we have "|$" in our regexp - do not touch it rule = re.sub("(\|)[^$]", r"\|", rule) return rule
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tokenize(self, data_source, callback=None): """ Read data from `data_source`, one frame a time, and process the read frames in order to detect sequences of frames that make up valid tokens. :Parameters: `data_source` : instance of the :class:`DataSource` class that implements a `read` method. 'read' should return a slice of signal, i.e. frame (of whatever \ type as long as it can be processed by validator) and None if \ there is no more signal. `callback` : an optional 3-argument function. If a `callback` function is given, it will be called each time a valid token is found. :Returns: A list of tokens if `callback` is None. Each token is tuple with the following elements: .. code python (data, start, end) where `data` is a list of read frames, `start`: index of the first frame in the original data and `end` : index of the last frame. """
self._reinitialize() if callback is not None: self._deliver = callback while True: frame = data_source.read() if frame is None: break self._current_frame += 1 self._process(frame) self._post_process() if callback is None: _ret = self._tokens self._tokens = None return _ret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read(self): """ Read one character from buffer. :Returns: Current character or None if end of buffer is reached """
if self._current >= len(self._data): return None self._current += 1 return self._data[self._current - 1]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_data(self, data): """ Set a new data buffer. :Parameters: `data` : a basestring object New data buffer. """
if not isinstance(data, basestring): raise ValueError("data must an instance of basestring") self._data = data self._current = 0
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_data(self, data_buffer): """ Set new data for this audio stream. :Parameters: `data_buffer` : str, basestring, Bytes a string buffer with a length multiple of (sample_width * channels) """
if len(data_buffer) % (self.sample_width * self.channels) != 0: raise ValueError("length of data_buffer must be a multiple of (sample_width * channels)") self._buffer = data_buffer self._index = 0 self._left = 0 if self._buffer is None else len(self._buffer)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def append_data(self, data_buffer): """ Append data to this audio stream :Parameters: `data_buffer` : str, basestring, Bytes a buffer with a length multiple of (sample_width * channels) """
if len(data_buffer) % (self.sample_width * self.channels) != 0: raise ValueError("length of data_buffer must be a multiple of (sample_width * channels)") self._buffer += data_buffer self._left += len(data_buffer)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def user_post_save(sender, **kwargs): """ After User.save is called we check to see if it was a created user. If so, we check if the User object wants account creation. If all passes we create an Account object. We only run on user creation to avoid having to check for existence on each call to User.save. """
# Disable post_save during manage.py loaddata if kwargs.get("raw", False): return False user, created = kwargs["instance"], kwargs["created"] disabled = getattr(user, "_disable_account_creation", not settings.ACCOUNT_CREATE_ON_SAVE) if created and not disabled: Account.create(user=user)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_password_expired(user): """ Return True if password is expired and system is using password expiration, False otherwise. """
if not settings.ACCOUNT_PASSWORD_USE_HISTORY: return False if hasattr(user, "password_expiry"): # user-specific value expiry = user.password_expiry.expiry else: # use global value expiry = settings.ACCOUNT_PASSWORD_EXPIRY if expiry == 0: # zero indicates no expiration return False try: # get latest password info latest = user.password_history.latest("timestamp") except PasswordHistory.DoesNotExist: return False now = datetime.datetime.now(tz=pytz.UTC) expiration = latest.timestamp + datetime.timedelta(seconds=expiry) if expiration < now: return True else: return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def login_required(func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None): """ Decorator for views that checks that the user is logged in, redirecting to the log in page if necessary. """
def decorator(view_func): @functools.wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if is_authenticated(request.user): return view_func(request, *args, **kwargs) return handle_redirect_to_login( request, redirect_field_name=redirect_field_name, login_url=login_url ) return _wrapped_view if func: return decorator(func) return decorator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_next(self, url, context): """ With both `redirect_field_name` and `redirect_field_value` available in the context, add on a querystring to handle "next" redirecting. """
if all([key in context for key in ["redirect_field_name", "redirect_field_value"]]): if context["redirect_field_value"]: url += "?" + urlencode({ context["redirect_field_name"]: context["redirect_field_value"], }) return url
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _verify( self, request, return_payload=False, verify=True, raise_missing=False, request_args=None, request_kwargs=None, *args, **kwargs ): """ If there is a "permakey", then we will verify the token by checking the database. Otherwise, just do the normal verification. Typically, any method that begins with an underscore in sanic-jwt should not be touched. In this case, we are trying to break the rules a bit to handle a unique use case: handle both expirable and non-expirable tokens. """
if "permakey" in request.headers: # Extract the permakey from the headers permakey = request.headers.get("permakey") # In production, probably should have some exception handling Here # in case the permakey is an empty string or some other bad value payload = self._decode(permakey, verify=verify) # Sometimes, the application will call _verify(...return_payload=True) # So, let's make sure to handle this scenario. if return_payload: return payload # Retrieve the user from the database user_id = payload.get("user_id", None) user = userid_table.get(user_id) # If wer cannot find a user, then this method should return # is_valid == False # reason == some text for why # status == some status code, probably a 401 if not user_id or not user: is_valid = False reason = "No user found" status = 401 else: # After finding a user, make sure the permakey matches, # or else return a bad status or some other error. # In production, both this scenario, and the above "No user found" # scenario should return an identical message and status code. # This is to prevent your application accidentally # leaking information about the existence or non-existence of users. is_valid = user.permakey == permakey reason = None if is_valid else "Permakey mismatch" status = 200 if is_valid else 401 return is_valid, status, reason else: return super()._verify( request=request, return_payload=return_payload, verify=verify, raise_missing=raise_missing, request_args=request_args, request_kwargs=request_kwargs, *args, **kwargs )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, item): """Helper method to avoid calling getattr """
if item in self: # noqa item = getattr(self, item) return item()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract_presets(app_config): """ Pull the application's configurations for Sanic JWT """
return { x.lower()[10:]: app_config.get(x) for x in filter(lambda x: x.startswith("SANIC_JWT"), app_config) }
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def initialize(*args, **kwargs): """ Functional approach to initializing Sanic JWT. This was the original method, but was replaced by the Initialize class. It is recommended to use the class because it is more flexible. There is no current plan to remove this method, but it may be depracated in the future. """
if len(args) > 1: kwargs.update({"authenticate": args[1]}) return Initialize(args[0], **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __check_deprecated(self): """ Checks for deprecated configuration keys """
# Depracation notices if "SANIC_JWT_HANDLER_PAYLOAD_SCOPES" in self.app.config: raise exceptions.InvalidConfiguration( "SANIC_JWT_HANDLER_PAYLOAD_SCOPES has been deprecated. " "Instead, pass your handler method (not an import path) as " "initialize(add_scopes_to_payload=my_scope_extender)" ) if "SANIC_JWT_PAYLOAD_HANDLER" in self.app.config: raise exceptions.InvalidConfiguration( "SANIC_JWT_PAYLOAD_HANDLER has been deprecated. " "Instead, you will need to subclass Authentication. " ) if "SANIC_JWT_HANDLER_PAYLOAD_EXTEND" in self.app.config: raise exceptions.InvalidConfiguration( "SANIC_JWT_HANDLER_PAYLOAD_EXTEND has been deprecated. " "Instead, you will need to subclass Authentication. " "Check out the documentation for more information." )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __add_endpoints(self): """ Initialize the Sanic JWT Blueprint and add to the instance initialized """
for mapping in endpoint_mappings: if all(map(self.config.get, mapping.keys)): self.__add_single_endpoint( mapping.cls, mapping.endpoint, mapping.is_protected ) self.bp.exception(exceptions.SanicJWTException)( self.responses.exception_response ) if not self.instance_is_blueprint: url_prefix = self._get_url_prefix() self.instance.blueprint(self.bp, url_prefix=url_prefix)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __add_class_views(self): """ Include any custom class views on the Sanic JWT Blueprint """
config = self.config if "class_views" in self.kwargs: class_views = self.kwargs.pop("class_views") for route, view in class_views: if issubclass(view, endpoints.BaseEndpoint) and isinstance( route, str ): self.bp.add_route( view.as_view( self.responses, config=self.config, instance=self.instance, ), route, strict_slashes=config.strict_slashes(), ) else: raise exceptions.InvalidClassViewsFormat()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: async def _get_user_id(self, user, *, asdict=False): """ Get a user_id from a user object. If `asdict` is True, will return it as a dict with `config.user_id` as key. The `asdict` keyword defaults to `False`. """
uid = self.config.user_id() if isinstance(user, dict): user_id = user.get(uid) elif hasattr(user, "to_dict"): _to_dict = await utils.call(user.to_dict) user_id = _to_dict.get(uid) else: raise exceptions.InvalidRetrieveUserObject() if asdict: return {uid: user_id} return user_id
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _check_authentication(self, request, request_args, request_kwargs): """ Checks a request object to determine if that request contains a valid, and authenticated JWT. It returns a tuple: 1. Boolean whether the request is authenticated with a valid JWT 2. HTTP status code 3. Reasons (if any) for a potential authentication failure """
try: is_valid, status, reasons = self._verify( request, request_args=request_args, request_kwargs=request_kwargs, ) except Exception as e: logger.debug(e.args) if self.config.debug(): raise e args = e.args if isinstance(e, SanicJWTException) else [] raise exceptions.Unauthorized(*args) return is_valid, status, reasons
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _decode(self, token, verify=True): """ Take a JWT and return a decoded payload. Optionally, will verify the claims on the token. """
secret = self._get_secret() algorithm = self._get_algorithm() kwargs = {} for claim in self.claims: if claim != "exp": setting = "claim_{}".format(claim.lower()) if setting in self.config: # noqa value = self.config.get(setting) kwargs.update({claim_label[claim]: value}) kwargs["leeway"] = int(self.config.leeway()) if "claim_aud" in self.config: # noqa kwargs["audience"] = self.config.claim_aud() if "claim_iss" in self.config: # noqa kwargs["issuer"] = self.config.claim_iss() decoded = jwt.decode( token, secret, algorithms=[algorithm], verify=verify, options={"verify_exp": self.config.verify_exp()}, **kwargs ) return decoded
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: async def _get_payload(self, user): """ Given a user object, create a payload and extend it as configured. """
payload = await utils.call(self.build_payload, user) if ( not isinstance(payload, dict) or self.config.user_id() not in payload ): raise exceptions.InvalidPayload payload = await utils.call(self.add_claims, payload, user) extend_payload_args = inspect.getfullargspec(self.extend_payload) args = [payload] if "user" in extend_payload_args.args: args.append(user) payload = await utils.call(self.extend_payload, *args) if self.config.scopes_enabled(): scopes = await utils.call(self.add_scopes_to_payload, user) if not isinstance(scopes, (tuple, list)): scopes = [scopes] payload[self.config.scopes_name()] = scopes claims = self.claims + [x.get_key() for x in self._custom_claims] missing = [x for x in claims if x not in payload] if missing: logger.debug("") raise exceptions.MissingRegisteredClaim(missing=missing) return payload
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_token_from_cookies(self, request, refresh_token): """ Extract the token if present inside the request cookies. """
if refresh_token: cookie_token_name_key = "cookie_refresh_token_name" else: cookie_token_name_key = "cookie_access_token_name" cookie_token_name = getattr(self.config, cookie_token_name_key) return request.cookies.get(cookie_token_name(), None)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_token_from_headers(self, request, refresh_token): """ Extract the token if present inside the headers of a request. """
header = request.headers.get(self.config.authorization_header(), None) if header is None: return None else: header_prefix_key = "authorization_header_prefix" header_prefix = getattr(self.config, header_prefix_key) if header_prefix(): try: prefix, token = header.split(" ") if prefix != header_prefix(): raise Exception except Exception: raise exceptions.InvalidAuthorizationHeader() else: token = header if refresh_token: token = request.json.get(self.config.refresh_token_name()) return token
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_token_from_query_string(self, request, refresh_token): """ Extract the token if present from the request args. """
if refresh_token: query_string_token_name_key = "query_string_refresh_token_name" else: query_string_token_name_key = "query_string_access_token_name" query_string_token_name = getattr( self.config, query_string_token_name_key ) return request.args.get(query_string_token_name(), None)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_token(self, request, refresh_token=False): """ Extract a token from a request object. """
if self.config.cookie_set(): token = self._get_token_from_cookies(request, refresh_token) if token: return token else: if self.config.cookie_strict(): raise exceptions.MissingAuthorizationCookie() if self.config.query_string_set(): token = self._get_token_from_query_string(request, refresh_token) if token: return token else: if self.config.query_string_strict(): raise exceptions.MissingAuthorizationQueryArg() token = self._get_token_from_headers(request, refresh_token) if token: return token raise exceptions.MissingAuthorizationHeader()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _verify( self, request, return_payload=False, verify=True, raise_missing=False, request_args=None, request_kwargs=None, *args, **kwargs ): """ Verify that a request object is authenticated. """
try: token = self._get_token(request) is_valid = True reason = None except ( exceptions.MissingAuthorizationCookie, exceptions.MissingAuthorizationQueryArg, exceptions.MissingAuthorizationHeader, ) as e: token = None is_valid = False reason = list(e.args) status = e.status_code if self.config.debug() else 401 if raise_missing: if not self.config.debug(): e.status_code = 401 raise e if token: try: payload = self._decode(token, verify=verify) if verify: if self._extra_verifications: self._verify_extras(payload) if self._custom_claims: self._verify_custom_claims(payload) except ( jwt.exceptions.ExpiredSignatureError, jwt.exceptions.InvalidIssuerError, jwt.exceptions.ImmatureSignatureError, jwt.exceptions.InvalidIssuedAtError, jwt.exceptions.InvalidAudienceError, InvalidVerificationError, InvalidCustomClaimError, ) as e: # Make sure that the reasons all end with '.' for consistency reason = [ x if x.endswith(".") else "{}.".format(x) for x in list(e.args) ] payload = None status = 401 is_valid = False except jwt.exceptions.DecodeError as e: self._reasons = e.args # Make sure that the reasons all end with '.' for consistency reason = ( [ x if x.endswith(".") else "{}.".format(x) for x in list(e.args) ] if self.config.debug() else "Auth required." ) logger.debug(e.args) is_valid = False payload = None status = 400 if self.config.debug() else 401 else: payload = None if return_payload: return payload status = 200 if is_valid else status return is_valid, status, reason
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract_payload(self, request, verify=True, *args, **kwargs): """ Extract a payload from a request object. """
payload = self._verify( request, return_payload=True, verify=verify, *args, **kwargs ) return payload
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract_scopes(self, request): """ Extract scopes from a request object. """
payload = self.extract_payload(request) if not payload: return None scopes_attribute = self.config.scopes_name() return payload.get(scopes_attribute, None)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extract_user_id(self, request): """ Extract a user id from a request object. """
payload = self.extract_payload(request) user_id_attribute = self.config.user_id() return payload.get(user_id_attribute, None)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: async def generate_access_token(self, user): """ Generate an access token for a given user. """
payload = await self._get_payload(user) secret = self._get_secret(True) algorithm = self._get_algorithm() return jwt.encode(payload, secret, algorithm=algorithm).decode("utf-8")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: async def generate_refresh_token(self, request, user): """ Generate a refresh token for a given user. """
refresh_token = await utils.call(self.config.generate_refresh_token()) user_id = await self._get_user_id(user) await utils.call( self.store_refresh_token, user_id=user_id, refresh_token=refresh_token, request=request, ) return refresh_token
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tsplit(df, shape): """Split array into two parts."""
if isinstance(df, (pd.DataFrame, pd.Series)): return df.iloc[0:shape], df.iloc[shape:] else: return df[0:shape], df[shape:]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def concat(x, y, axis=0): """Concatenate a sequence of pandas or numpy objects into one entity."""
if all([isinstance(df, (pd.DataFrame, pd.Series)) for df in [x, y]]): return pd.concat([x, y], axis=axis) else: if axis == 0: return np.concatenate([x, y]) else: return np.column_stack([x, y])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def reshape_1d(df): """If parameter is 1D row vector then convert it into 2D matrix."""
shape = df.shape if len(shape) == 1: return df.reshape(shape[0], 1) else: return df
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def idx(df, index): """Universal indexing for numpy and pandas objects."""
if isinstance(df, (pd.DataFrame, pd.Series)): return df.iloc[index] else: return df[index, :]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def xgb_progressbar(rounds=1000): """Progressbar for xgboost using tqdm library. Examples -------- """
pbar = tqdm(total=rounds) def callback(_, ): pbar.update(1) return callback
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add(self, model): """Adds a single model. Parameters model : `Estimator` """
if isinstance(model, (Regressor, Classifier)): self.models.append(model) else: raise ValueError('Unrecognized estimator.')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def stack(self, k=5, stratify=False, shuffle=True, seed=100, full_test=True, add_diff=False): """Stacks sequence of models. Parameters k : int, default 5 Number of folds. stratify : bool, default False shuffle : bool, default True seed : int, default 100 full_test : bool, default True If True then evaluate test dataset on the full data otherwise take the mean of every fold. add_diff : bool, default False Returns ------- `DataFrame` Examples -------- """
result_train = [] result_test = [] y = None for model in self.models: result = model.stack(k=k, stratify=stratify, shuffle=shuffle, seed=seed, full_test=full_test) train_df = pd.DataFrame(result.X_train, columns=generate_columns(result.X_train, model.name)) test_df = pd.DataFrame(result.X_test, columns=generate_columns(result.X_test, model.name)) result_train.append(train_df) result_test.append(test_df) if y is None: y = result.y_train result_train = pd.concat(result_train, axis=1) result_test = pd.concat(result_test, axis=1) if add_diff: result_train = feature_combiner(result_train) result_test = feature_combiner(result_test) ds = Dataset(X_train=result_train, y_train=y, X_test=result_test) return ds
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def blend(self, proportion=0.2, stratify=False, seed=100, indices=None, add_diff=False): """Blends sequence of models. Parameters proportion : float, default 0.2 stratify : bool, default False seed : int, default False indices : list(np.ndarray,np.ndarray), default None Two numpy arrays that contain indices for train/test slicing. add_diff : bool, default False Returns ------- `DataFrame` Examples -------- """
result_train = [] result_test = [] y = None for model in self.models: result = model.blend(proportion=proportion, stratify=stratify, seed=seed, indices=indices) train_df = pd.DataFrame(result.X_train, columns=generate_columns(result.X_train, model.name)) test_df = pd.DataFrame(result.X_test, columns=generate_columns(result.X_test, model.name)) result_train.append(train_df) result_test.append(test_df) if y is None: y = result.y_train result_train = pd.concat(result_train, axis=1, ignore_index=True) result_test = pd.concat(result_test, axis=1, ignore_index=True) if add_diff: result_train = feature_combiner(result_train) result_test = feature_combiner(result_test) return Dataset(X_train=result_train, y_train=y, X_test=result_test)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_weights(self, scorer, test_size=0.2, method='SLSQP'): """Finds optimal weights for weighted average of models. Parameters scorer : function Scikit-learn like metric. test_size : float, default 0.2 method : str Type of solver. Should be one of: - 'Nelder-Mead' - 'Powell' - 'CG' - 'BFGS' - 'Newton-CG' - 'L-BFGS-B' - 'TNC' - 'COBYLA' - 'SLSQP' - 'dogleg' - 'trust-ncg' Returns ------- list """
p = Optimizer(self.models, test_size=test_size, scorer=scorer) return p.minimize(method)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def weight(self, weights): """Applies weighted mean to models. Parameters weights : list Returns ------- np.ndarray Examples """
return self.apply(lambda x: np.average(x, axis=0, weights=weights))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def onehot_features(train, test, features, full=False, sparse=False, dummy_na=True): """Encode categorical features using a one-hot scheme. Parameters train : pd.DataFrame test : pd.DataFrame features : list Column names in the DataFrame to be encoded. full : bool, default False Whether use all columns from train/test or only from train. sparse : bool, default False Whether the dummy columns should be sparse or not. dummy_na : bool, default True Add a column to indicate NaNs, if False NaNs are ignored. Returns ------- train : pd.DataFrame test : pd.DataFrame """
features = [f for f in features if f in train.columns] for column in features: if full: categories = pd.concat([train[column], test[column]]).dropna().unique() else: categories = train[column].dropna().unique() train[column] = train[column].astype('category', categories=categories) test[column] = test[column].astype('category', categories=categories) train = pd.get_dummies(train, columns=features, dummy_na=dummy_na, sparse=sparse) test = pd.get_dummies(test, columns=features, dummy_na=dummy_na, sparse=sparse) # d_cols = train.columns[(train == 0).all()] # train.drop(d_cols, 1, inplace=True) # test.drop(d_cols, 1, inplace=True) return train, test
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def factorize(train, test, features, na_value=-9999, full=False, sort=True): """Factorize categorical features. Parameters train : pd.DataFrame test : pd.DataFrame features : list Column names in the DataFrame to be encoded. na_value : int, default -9999 full : bool, default False Whether use all columns from train/test or only from train. sort : bool, default True Sort by values. Returns ------- train : pd.DataFrame test : pd.DataFrame """
for column in features: if full: vs = pd.concat([train[column], test[column]]) labels, indexer = pd.factorize(vs, sort=sort) else: labels, indexer = pd.factorize(train[column], sort=sort) train[column] = indexer.get_indexer(train[column]) test[column] = indexer.get_indexer(test[column]) if na_value != -1: train[column] = train[column].replace(-1, na_value) test[column] = test[column].replace(-1, na_value) return train, test
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def woe(df, feature_name, target_name): """Calculate weight of evidence. Parameters df: Dataframe feature_name: str Column name to encode. target_name: str Target column name. Returns ------- Series """
def group_woe(group): event = float(group.sum()) non_event = group.shape[0] - event rel_event = event / event_total rel_non_event = non_event / non_event_total return np.log(rel_non_event / rel_event) * 100 if df[target_name].nunique() > 2: raise ValueError('Target column should be binary (1/0).') event_total = float(df[df[target_name] == 1.0].shape[0]) non_event_total = float(df.shape[0] - event_total) woe_vals = df.groupby(feature_name)[target_name].transform(group_woe) return woe_vals
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def kfold(self, k=5, stratify=False, shuffle=True, seed=33): """K-Folds cross validation iterator. Parameters k : int, default 5 stratify : bool, default False shuffle : bool, default True seed : int, default 33 Yields ------- X_train, y_train, X_test, y_test, train_index, test_index """
if stratify: kf = StratifiedKFold(n_splits=k, random_state=seed, shuffle=shuffle) else: kf = KFold(n_splits=k, random_state=seed, shuffle=shuffle) for train_index, test_index in kf.split(self.X_train, self.y_train): X_train, y_train = idx(self.X_train, train_index), self.y_train[train_index] X_test, y_test = idx(self.X_train, test_index), self.y_train[test_index] yield X_train, y_train, X_test, y_test, train_index, test_index
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def hash(self): """Return md5 hash for current dataset."""
if self._hash is None: m = hashlib.new('md5') if self._preprocessor is None: # generate hash from numpy array m.update(numpy_buffer(self._X_train)) m.update(numpy_buffer(self._y_train)) if self._X_test is not None: m.update(numpy_buffer(self._X_test)) if self._y_test is not None: m.update(numpy_buffer(self._y_test)) elif callable(self._preprocessor): # generate hash from user defined object (source code) m.update(inspect.getsource(self._preprocessor).encode('utf-8')) self._hash = m.hexdigest() return self._hash
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def merge(self, ds, inplace=False, axis=1): """Merge two datasets. Parameters axis : {0,1} ds : `Dataset` inplace : bool, default False Returns ------- `Dataset` """
if not isinstance(ds, Dataset): raise ValueError('Expected `Dataset`, got %s.' % ds) X_train = concat(ds.X_train, self.X_train, axis=axis) y_train = concat(ds.y_train, self.y_train, axis=axis) if ds.X_test is not None: X_test = concat(ds.X_test, self.X_test, axis=axis) else: X_test = None if ds.y_test is not None: y_test = concat(ds.y_test, self.y_test, axis=axis) else: y_test = None if inplace: self._X_train = X_train self._y_train = y_train if X_test is not None: self._X_test = X_test if y_test is not None: self._y_test = y_test return None return Dataset(X_train, y_train, X_test, y_test)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_csc(self): """Convert Dataset to scipy's Compressed Sparse Column matrix."""
self._X_train = csc_matrix(self._X_train) self._X_test = csc_matrix(self._X_test)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_csr(self): """Convert Dataset to scipy's Compressed Sparse Row matrix."""
self._X_train = csr_matrix(self._X_train) self._X_test = csr_matrix(self._X_test)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_dense(self): """Convert sparse Dataset to dense matrix."""
if hasattr(self._X_train, 'todense'): self._X_train = self._X_train.todense() self._X_test = self._X_test.todense()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _dhash(self, params): """Generate hash of the dictionary object."""
m = hashlib.new('md5') m.update(self.hash.encode('utf-8')) for key in sorted(params.keys()): h_string = ('%s-%s' % (key, params[key])).encode('utf-8') m.update(h_string) return m.hexdigest()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate(self, scorer=None, k=1, test_size=0.1, stratify=False, shuffle=True, seed=100, indices=None): """Evaluate score by cross-validation. Parameters scorer : function(y_true,y_pred), default None Scikit-learn like metric that returns a score. k : int, default 1 The number of folds for validation. If k=1 then randomly split X_train into two parts otherwise use K-fold approach. test_size : float, default 0.1 Size of the test holdout if k=1. stratify : bool, default False shuffle : bool, default True seed : int, default 100 indices : list(np.array,np.array), default None Two numpy arrays that contain indices for train/test slicing. (train_index,test_index) Returns ------- y_true: list Actual labels. y_pred: list Predicted labels. Examples -------- """
if self.use_cache: pdict = {'k': k, 'stratify': stratify, 'shuffle': shuffle, 'seed': seed, 'test_size': test_size} if indices is not None: pdict['train_index'] = np_hash(indices[0]) pdict['test_index'] = np_hash(indices[1]) dhash = self._dhash(pdict) c = Cache(dhash, prefix='v') if c.available: logger.info('Loading %s\'s validation results from cache.' % self._name) elif (self.dataset.X_train is None) and (self.dataset.y_train is None): self.dataset.load() scores = [] y_true = [] y_pred = [] if k == 1: X_train, y_train, X_test, y_test = self.dataset.split(test_size=test_size, stratify=stratify, seed=seed, indices=indices) if self.use_cache and c.available: prediction = c.retrieve('0') else: prediction = self._predict(X_train, y_train, X_test, y_test) if self.use_cache: c.store('0', prediction) if scorer is not None: scores.append(scorer(y_test, prediction)) y_true.append(y_test) y_pred.append(prediction) else: for i, fold in enumerate(self.dataset.kfold(k, stratify=stratify, seed=seed, shuffle=shuffle)): X_train, y_train, X_test, y_test, train_index, test_index = fold if self.use_cache and c.available: prediction = c.retrieve(str(i)) else: prediction = None if prediction is None: logger.info('Calculating %s\'s fold #%s' % (self._name, i + 1)) prediction = self._predict(X_train, y_train, X_test, y_test) if self.use_cache: c.store(str(i), prediction) if scorer is not None: scores.append(scorer(y_test, prediction)) y_true.append(y_test) y_pred.append(prediction) if scorer is not None: report_score(scores, scorer) return y_true, y_pred
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def stack(self, k=5, stratify=False, shuffle=True, seed=100, full_test=True): """Stack a single model. You should rarely be using this method. Use `ModelsPipeline.stack` instead. Parameters k : int, default 5 stratify : bool, default False shuffle : bool, default True seed : int, default 100 full_test : bool, default True If `True` then evaluate test dataset on the full data otherwise take the mean of every fold. Returns ------- `Dataset` with out of fold predictions. """
train = None test = [] if self.use_cache: pdict = {'k': k, 'stratify': stratify, 'shuffle': shuffle, 'seed': seed, 'full_test': full_test} dhash = self._dhash(pdict) c = Cache(dhash, prefix='s') if c.available: logger.info('Loading %s\'s stack results from cache.' % self._name) train = c.retrieve('train') test = c.retrieve('test') y_train = c.retrieve('y_train') return Dataset(X_train=train, y_train=y_train, X_test=test) elif not self.dataset.loaded: self.dataset.load() for i, fold in enumerate(self.dataset.kfold(k, stratify=stratify, seed=seed, shuffle=shuffle)): X_train, y_train, X_test, y_test, train_index, test_index = fold logger.info('Calculating %s\'s fold #%s' % (self._name, i + 1)) if full_test: prediction = reshape_1d(self._predict(X_train, y_train, X_test, y_test)) else: xt_shape = X_test.shape[0] x_t = concat(X_test, self.dataset.X_test) prediction_concat = reshape_1d(self._predict(X_train, y_train, x_t)) prediction, prediction_test = tsplit(prediction_concat, xt_shape) test.append(prediction_test) if train is None: train = np.zeros((self.dataset.X_train.shape[0], prediction.shape[1])) train[test_index] = prediction if full_test: logger.info('Calculating %s\'s test data' % self._name) test = self._predict(self.dataset.X_train, self.dataset.y_train, self.dataset.X_test) else: test = np.mean(test, axis=0) test = reshape_1d(test) if self.use_cache: c.store('train', train) c.store('test', test) c.store('y_train', self.dataset.y_train) return Dataset(X_train=train, y_train=self.dataset.y_train, X_test=test)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def blend(self, proportion=0.2, stratify=False, seed=100, indices=None): """Blend a single model. You should rarely be using this method. Use `ModelsPipeline.blend` instead. Parameters proportion : float, default 0.2 Test size holdout. stratify : bool, default False seed : int, default 100 indices : list(np.ndarray,np.ndarray), default None Two numpy arrays that contain indices for train/test slicing. (train_index,test_index) Returns ------- `Dataset` """
if self.use_cache: pdict = {'proportion': proportion, 'stratify': stratify, 'seed': seed, 'indices': indices} if indices is not None: pdict['train_index'] = np_hash(indices[0]) pdict['test_index'] = np_hash(indices[1]) dhash = self._dhash(pdict) c = Cache(dhash, prefix='b') if c.available: logger.info('Loading %s\'s blend results from cache.' % self._name) train = c.retrieve('train') test = c.retrieve('test') y_train = c.retrieve('y_train') return Dataset(X_train=train, y_train=y_train, X_test=test) elif not self.dataset.loaded: self.dataset.load() X_train, y_train, X_test, y_test = self.dataset.split(test_size=proportion, stratify=stratify, seed=seed, indices=indices) xt_shape = X_test.shape[0] x_t = concat(X_test, self.dataset.X_test) prediction_concat = reshape_1d(self._predict(X_train, y_train, x_t)) new_train, new_test = tsplit(prediction_concat, xt_shape) if self.use_cache: c.store('train', new_train) c.store('test', new_test) c.store('y_train', y_test) return Dataset(new_train, y_test, new_test)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def numpy_buffer(ndarray): """Creates a buffer from c_contiguous numpy ndarray."""
# Credits to: https://github.com/joblib/joblib/blob/04b001861e1dd03a857b7b419c336de64e05714c/joblib/hashing.py if isinstance(ndarray, (pd.Series, pd.DataFrame)): ndarray = ndarray.values if ndarray.flags.c_contiguous: obj_c_contiguous = ndarray elif ndarray.flags.f_contiguous: obj_c_contiguous = ndarray.T else: obj_c_contiguous = ndarray.flatten() obj_c_contiguous = obj_c_contiguous.view(np.uint8) if hasattr(np, 'getbuffer'): return np.getbuffer(obj_c_contiguous) else: return memoryview(obj_c_contiguous)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def store(self, key, data): """Takes an array and stores it in the cache."""
if not os.path.exists(self._hash_dir): os.makedirs(self._hash_dir) if isinstance(data, pd.DataFrame): columns = data.columns.tolist() np.save(os.path.join(self._hash_dir, key), data.values) json.dump(columns, open(os.path.join(self._hash_dir, '%s.json' % key), 'w')) else: np.save(os.path.join(self._hash_dir, key), data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def retrieve(self, key): """Retrieves a cached array if possible."""
column_file = os.path.join(self._hash_dir, '%s.json' % key) cache_file = os.path.join(self._hash_dir, '%s.npy' % key) if os.path.exists(cache_file): data = np.load(cache_file) if os.path.exists(column_file): with open(column_file, 'r') as json_file: columns = json.load(json_file) data = pd.DataFrame(data, columns=columns) else: return None return data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_coords(cls, x, y): """ Creates an ECPoint object from the X and Y integer coordinates of the point :param x: The X coordinate, as an integer :param y: The Y coordinate, as an integer :return: An ECPoint object """
x_bytes = int(math.ceil(math.log(x, 2) / 8.0)) y_bytes = int(math.ceil(math.log(y, 2) / 8.0)) num_bytes = max(x_bytes, y_bytes) byte_string = b'\x04' byte_string += int_to_bytes(x, width=num_bytes) byte_string += int_to_bytes(y, width=num_bytes) return cls(byte_string)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_coords(self): """ Returns the X and Y coordinates for this EC point, as native Python integers :return: A 2-element tuple containing integers (X, Y) """
data = self.native first_byte = data[0:1] # Uncompressed if first_byte == b'\x04': remaining = data[1:] field_len = len(remaining) // 2 x = int_from_bytes(remaining[0:field_len]) y = int_from_bytes(remaining[field_len:]) return (x, y) if first_byte not in set([b'\x02', b'\x03']): raise ValueError(unwrap( ''' Invalid EC public key - first byte is incorrect ''' )) raise ValueError(unwrap( ''' Compressed representations of EC public keys are not supported due to patent US6252960 ''' ))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unwrap(self): """ Unwraps the private key into an RSAPrivateKey, DSAPrivateKey or ECPrivateKey object :return: An RSAPrivateKey, DSAPrivateKey or ECPrivateKey object """
if self.algorithm == 'rsa': return self['private_key'].parsed if self.algorithm == 'dsa': params = self['private_key_algorithm']['parameters'] return DSAPrivateKey({ 'version': 0, 'p': params['p'], 'q': params['q'], 'g': params['g'], 'public_key': self.public_key, 'private_key': self['private_key'].parsed, }) if self.algorithm == 'ec': output = self['private_key'].parsed output['parameters'] = self['private_key_algorithm']['parameters'] output['public_key'] = self.public_key return output
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fingerprint(self): """ Creates a fingerprint that can be compared with a public key to see if the two form a pair. This fingerprint is not compatible with fingerprints generated by any other software. :return: A byte string that is a sha256 hash of selected components (based on the key type) """
if self._fingerprint is None: params = self['private_key_algorithm']['parameters'] key = self['private_key'].parsed if self.algorithm == 'rsa': to_hash = '%d:%d' % ( key['modulus'].native, key['public_exponent'].native, ) elif self.algorithm == 'dsa': public_key = self.public_key to_hash = '%d:%d:%d:%d' % ( params['p'].native, params['q'].native, params['g'].native, public_key.native, ) elif self.algorithm == 'ec': public_key = key['public_key'].native if public_key is None: public_key = self.public_key.native if params.name == 'named': to_hash = '%s:' % params.chosen.native to_hash = to_hash.encode('utf-8') to_hash += public_key elif params.name == 'implicit_ca': to_hash = public_key elif params.name == 'specified': to_hash = '%s:' % params.chosen['field_id']['parameters'].native to_hash = to_hash.encode('utf-8') to_hash += b':' + params.chosen['curve']['a'].native to_hash += b':' + params.chosen['curve']['b'].native to_hash += public_key if isinstance(to_hash, str_cls): to_hash = to_hash.encode('utf-8') self._fingerprint = hashlib.sha256(to_hash).digest() return self._fingerprint
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run(ci=False): """ Runs the tests while measuring coverage :param ci: If coverage is being run in a CI environment - this triggers trying to run the tests for the rest of modularcrypto and uploading coverage data :return: A bool - if the tests ran successfully """
xml_report_path = os.path.join(package_root, 'coverage.xml') if os.path.exists(xml_report_path): os.unlink(xml_report_path) cov = coverage.Coverage(include='%s/*.py' % package_name) cov.start() from .tests import run as run_tests result = run_tests() print() if ci: suite = unittest.TestSuite() loader = unittest.TestLoader() for other_package in other_packages: for test_class in _load_package_tests(other_package): suite.addTest(loader.loadTestsFromTestCase(test_class)) if suite.countTestCases() > 0: print('Running tests from other modularcrypto packages') sys.stdout.flush() runner_result = unittest.TextTestRunner(stream=sys.stdout, verbosity=1).run(suite) result = runner_result.wasSuccessful() and result print() sys.stdout.flush() cov.stop() cov.save() cov.report(show_missing=False) print() sys.stdout.flush() if ci: cov.xml_report() if ci and result and os.path.exists(xml_report_path): _codecov_submit() print() return result