text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def resolve_pattern(pattern): '''Resolve a glob pattern into a filelist''' if os.path.exists(pattern) and os.path.isdir(pattern): pattern = os.path.join(pattern, '**/*.bench.py') return recursive_glob(pattern)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def cli(patterns, times, json, csv, rst, md, ref, unit, precision, debug): '''Execute minibench benchmarks''' if ref: ref = JSON.load(ref) filenames = [] reporters = [CliReporter(ref=ref, debug=debug, unit=unit, precision=precision)] kwargs = {} for pattern in patterns or ['**/*.bench.py']: filenames.extend(resolve_pattern(pattern)) if json: reporters.append(JsonReporter(json, precision=precision)) if csv: reporters.append(CsvReporter(csv, precision=precision)) if rst: reporters.append(RstReporter(rst, precision=precision)) if md: reporters.append(MarkdownReporter(md, precision=precision)) if times: kwargs['times'] = times runner = BenchmarkRunner(*filenames, reporters=reporters, debug=debug) runner.run(**kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_json(self): """Load JSON from the request body and store them in self.request.arguments, like Tornado does by default for POSTed form parameters. If JSON cannot be decoded :raises ValueError: JSON Could not be decoded """
try: self.request.arguments = json.loads(self.request.body) except ValueError: msg = "Could not decode JSON: %s" % self.request.body self.logger.debug(msg) self.raise_error(400, msg)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_dict_of_all_args(self): """Generates a dictionary from a handler paths query string and returns it :returns: Dictionary of all key/values in arguments list :rtype: dict """
dictionary = {} for arg in [arg for arg in self.request.arguments if arg not in self.settings.get("reserved_query_string_params", [])]: val = self.get_argument(arg, default=None) if val: dictionary[arg] = val return dictionary
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_arg_value_as_type(self, key, default=None, convert_int=False): """Allow users to pass through truthy type values like true, yes, no and get to a typed variable in your code :param str val: The string reprensentation of the value you want to convert :returns: adapted value :rtype: dynamic """
val = self.get_query_argument(key, default) if isinstance(val, int): return val if val.lower() in ['true', 'yes']: return True if val.lower() in ['false', 'no']: return False return val
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_mongo_query_from_arguments(self, reserved_attributes=[]): """Generate a mongo query from the given URL query parameters, handles OR query via multiples :param list reserved_attributes: A list of attributes you want to exclude from this particular query :return: dict """
query = {} for arg in self.request.arguments: if arg not in reserved_attributes: if len(self.request.arguments.get(arg)) > 1: query["$or"] = [] for val in self.request.arguments.get(arg): query["$or"].append({arg: self.get_arg_value_as_type(val)}) else: query[arg] = self.get_arg_value_as_type(self.request.arguments.get(arg)[0]) return query
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_meta_data(self): """Creates the meta data dictionary for a revision"""
return { "comment": self.request.headers.get("comment", ""), "author": self.get_current_user() or self.settings.get('annonymous_user') }
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def arg_as_array(self, arg, split_char="|"): """Turns an argument into an array, split by the splitChar :param str arg: The name of the query param you want to turn into an array based on the value :param str split_char: The character the value should be split on. :returns: A list of values :rtype: list """
valuesString = self.get_argument(arg, default=None) if valuesString: valuesArray = valuesString.split(split_char) return valuesArray return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: """ Sets an error status and returns a message to the user in JSON format :param int status: The status code to use :param str message: The message to return in the JSON response """
self.set_status(status) self.write({"message" : message, "status" : status})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def return_resource(self, resource, status=200, statusMessage="OK"): """Return a resource response :param str resource: The JSON String representation of a resource response :param int status: Status code to use :param str statusMessage: The message to use in the error response """
self.set_status(status, statusMessage) self.write(json.loads(json_util.dumps(resource)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def group_objects_by(self, list, attr, valueLabel="value", childrenLabel="children"): """ Generates a group object based on the attribute value on of the given attr value that is passed in. :param list list: A list of dictionary objects :param str attr: The attribute that the dictionaries should be sorted upon :param str valueLabel: What to call the key of the field we're sorting upon :param str childrenLabel: What to call the list of child objects on the group object :returns: list of grouped objects by a given attribute :rtype: list """
groups = [] for obj in list: val = obj.get(attr) if not val: pass newGroup = {"attribute": attr, valueLabel: val, childrenLabel: [obj]} found = False for i in range(0,len(groups)): if val == groups[i].get(valueLabel): found = True groups[i][childrenLabel].append(obj) pass if not found: groups.append(newGroup) return groups
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def write_hyper_response(self, links=[], meta={}, entity_name=None, entity=None, notifications=[], actions=[]): """Writes a hyper media response object :param list links: A list of links to the resources :param dict meta: The meta data for this response :param str entity_name: The entity name :param object entity: The Entity itself :param list notifications: List of notifications :param list actions: List of actions """
assert entity_name is not None assert entity is not None meta.update({ "status": self.get_status() }) self.write({ "links": links, "meta": meta, entity_name: entity, "notifications": notifications, "actions": actions })
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, id): """ Get an by object by unique identifier :id string id: the bson id of an object :rtype: JSON """
try: if self.request.headers.get("Id"): object_ = yield self.client.find_one({self.request.headers.get("Id"): id}) else: object_ = yield self.client.find_one_by_id(id) if object_: self.write(object_) return self.raise_error(404, "%s/%s not found" % (self.object_name, id)) except InvalidId as ex: self.raise_error(400, message="Your ID is malformed: %s" % id) except Exception as ex: self.logger.error(ex) self.raise_error()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def put(self, id): """ Update a resource by bson ObjectId :returns: json string representation :rtype: JSON """
try: #Async update flow object_ = json_util.loads(self.request.body) toa = self.request.headers.get("Caesium-TOA", None) obj_check = yield self.client.find_one_by_id(id) if not obj_check: self.raise_error(404, "Resource not found: %s" % id) self.finish() return if toa: stack = AsyncSchedulableDocumentRevisionStack(self.client.collection_name, self.settings, master_id=id) revision_id = yield stack.push(object_, int(toa), meta=self._get_meta_data()) if isinstance(revision_id, str): self.set_header("Caesium-TOA", toa) #We add the id of the original request, because we don't want to infer this #On the client side, as the state of the client code could change easily #We want this request to return with the originating ID as well. object_["id"] = id self.return_resource(object_) else: self.raise_error(404, "Revision not scheduled for object: %s" % id) else: if object_.get("_id"): del object_["_id"] response = yield self.client.update(id, object_) if response.get("updatedExisting"): object_ = yield self.client.find_one_by_id(id) self.return_resource(object_) else: self.raise_error(404, "Resource not found: %s" % id) except ValidationError as vex: self.logger.error("%s validation error" % self.object_name, vex) self.raise_error(400, "Your %s cannot be updated because it is missing required fields, see docs" % self.object_name) except ValueError as ex: self.raise_error(400, "Invalid JSON Body, check formatting. %s" % ex[0]) except InvalidId as ex: self.raise_error(message="Your ID is malformed: %s" % id) except Exception as ex: self.logger.error(ex) self.raise_error()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def post(self, id=None): """ Create a new object resource :json: Object to create :returns: json string representation :rtype: JSON """
try: try: base_object = json_util.loads(self.request.body) except TypeError: base_object = json_util.loads(self.request.body.decode()) #assert not hasattr(base_object, "_id") toa = self.request.headers.get("Caesium-TOA", None) if toa: # Async create flow stack = AsyncSchedulableDocumentRevisionStack(self.client.collection_name, self.settings) revision_id = yield stack.push(base_object, toa=int(toa), meta=self._get_meta_data()) resource = yield stack.preview(revision_id) if isinstance(revision_id, str): self.set_header("Caesium-TOA", toa) self.return_resource(resource.get("snapshot")) else: self.raise_error(404, "Revision not scheduled for object: %s" % id) else: id = yield self.client.insert(base_object) base_object = yield self.client.find_one_by_id(id) self.return_resource(base_object) except ValidationError as vex: self.logger.error("%s validation error" % self.object_name, vex) self.raise_error(400, "Your %s cannot be created because it is missing required fields, see docs" % self.object_name) except ValueError as ex: self.raise_error(400, "Invalid JSON Body, check formatting. %s" % ex[0]) except Exception as ex: self.logger.error(ex) self.raise_error()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def initialize(self): """Initializer for the Search Handler"""
self.logger = logging.getLogger(self.__class__.__name__) self.client = None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def __lazy_migration(self, master_id): """ Creates a revision for a master id that didn't previously have a revision, this allows you to easily turn on revisioning for a collection that didn't previously allow for it. :param master_id: :returns: list of objects """
collection_name = self.request.headers.get("collection") if collection_name: stack = AsyncSchedulableDocumentRevisionStack(collection_name, self.settings, master_id=master_id, ) objects = yield stack._lazy_migration(meta=self._get_meta_data()) raise Return(objects) self.raise_error(500, "This object %s/%s didn't exist as a revision, " "we tried to create it but we failed... Sorry. " "Please check this object" % (collection_name, master_id)) raise Return(None)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, master_id): """ Get a list of revisions by master ID :param master_id: :return: """
collection_name = self.request.headers.get("collection") self.client = BaseAsyncMotorDocument("%s_revisions" % collection_name) limit = self.get_query_argument("limit", 2) add_current_revision = self.get_arg_value_as_type("addCurrent", "false") show_history = self.get_arg_value_as_type("showHistory", "false") objects_processed = [] if isinstance(limit, unicode): limit = int(limit) objects = yield self.client.find({"master_id": master_id, "processed": False}, orderby="toa", order_by_direction=1, page=0, limit=20) # If this is a document that should have a revision and doesn't we # orchestratioin creation of the first one if len(objects) == 0: new_revision = yield self.__lazy_migration(master_id) if not new_revision: return if show_history: objects_processed = yield self.client.find({"master_id": master_id, "processed": True}, orderby="toa", order_by_direction=-1, page=0, limit=limit) elif add_current_revision: objects_processed = yield self.client.find({"master_id": master_id, "processed": True}, orderby="toa", order_by_direction=-1, page=0, limit=1) if len(objects_processed) > 0: objects_processed = objects_processed[::-1] objects_processed[-1]["current"] = True objects = objects_processed + objects self.write({ "count": len(objects), "results": objects })
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def put(self, id): """ Update a revision by ID :param id: BSON id :return: """
collection_name = self.request.headers.get("collection") if not collection_name: self.raise_error(400, "Missing a collection name header") self.client = BaseAsyncMotorDocument("%s_revisions" % collection_name) super(self.__class__, self).put(id)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, id): """ Get revision based on the stack preview algorithm :param id: BSON id :return: JSON """
collection_name = self.request.headers.get("collection") if not collection_name: self.raise_error(400, "Missing a collection name for stack") self.stack = AsyncSchedulableDocumentRevisionStack(collection_name, self.settings) revision = yield self.stack.preview(id) self.write(revision)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self): """ Standard search end point for a resource of any type, override this get method as necessary in any specifc sub class. This is mostly here as a convenience for basic querying functionality on attribute example URL:: foo?attr1=foo&attr2=true will create a query of:: { "attr1": "foo", "attr2": true } """
objects = yield self.client.find(self.get_mongo_query_from_arguments()) self.write({ "count" : len(objects), "results": objects }) self.finish()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def put(self, id=None): """Update many objects with a single PUT. Example Request:: { "ids": ["52b0ede98ac752b358b1bd69", "52b0ede98ac752b358b1bd70"], "patch": { "foo": "bar" } } """
toa = self.request.headers.get("Caesium-TOA") if not toa: self.raise_error(400, "Caesium-TOA header is required, none found") self.finish(self.request.headers.get("Caesium-TOA")) meta = self._get_meta_data() meta["bulk_id"] = uuid.uuid4().get_hex() ids = self.get_json_argument("ids") patch = self.get_json_argument("patch") self.get_json_argument("ids", []) for id in ids: stack = AsyncSchedulableDocumentRevisionStack(self.client.collection_name, self.settings, master_id=id) stack.push(patch, toa=toa, meta=meta) self.write({ "count": len(ids), "result": { "ids": ids, "toa": toa, "patch": patch, } }) self.finish()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete(self, bulk_id): """Update many objects with a single toa :param str bulk_id: The bulk id for the job you want to delete """
collection_name = self.request.headers.get("collection") if not collection_name: self.raise_error(400, "Missing a collection name header") self.revisions = BaseAsyncMotorDocument("%s_revisions" % collection_name) self.logger.info("Deleting revisions with bulk_id %s" % (bulk_id)) result = yield self.revisions.collection.remove({"meta.bulk_id": bulk_id}) self.write(result)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def author_from_git(self): """ Get the author name from git information. """
self.author = None try: encoding = locale.getdefaultlocale()[1] # launch git command and get answer cmd = Popen(["git", "config", "--get", "user.name"], stdout=PIPE) stdoutdata = cmd.communicate().decode(encoding) if (stdoutdata[0]): import ipdb;ipdb.set_trace() author = stdoutdata[0].rstrip(os.linesep) self.author = author#.decode('utf8') except ImportError: pass except CalledProcessError: pass except OSError: pass return self.author
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _getframe(level=0): ''' A reimplementation of `sys._getframe`. `sys._getframe` is a private function, and isn't guaranteed to exist in all versions and implementations of Python. This function is about 2 times slower than the native implementation. It relies on the asumption that the traceback objects have `tb_frame` attributues holding proper frame objects. :param level: The number of levels deep in the stack to return the frame from. Defaults to `0`. :returns: A frame object `levels` deep from the top of the stack. ''' if level < 0: level = 0 try: raise except: # `sys.exc_info` returns `(type, value, traceback)`. _, _, traceback = sys.exc_info() frame = traceback.tb_frame # Account for our exception, this will stop at `-1`. while ~level: frame = frame.f_back if frame is None: break level -= 1 finally: sys.exc_clear() # Act as close to `sys._getframe` as possible. if frame is None: raise ValueError('call stack is not deep enough') return frame
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def graft(func=None, *, namespace=None): """Decorator for marking a function as a graft. Parameters: namespace (str): namespace of data, same format as targeting. Returns: Graft For example, these grafts:: @graft def foo_data: return {'foo', True} @graft(namespace='bar') def bar_data: return False will be redered has:: { 'foo': True, 'bar': False } """
if not func: return functools.partial(graft, namespace=namespace) if isinstance(func, Graft): return func return Graft(func, namespace=namespace)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load(force=False): """Magical loading of all grafted functions. Parameters: force (bool): force reload """
if GRAFTS and not force: return GRAFTS # insert missing paths # this could be a configurated item userpath = settings.userpath if os.path.isdir(userpath) and userpath not in __path__: __path__.append(userpath) def notify_error(name): logging.error('unable to load %s package' % name) # autoload decorated functions walker = walk_packages(__path__, '%s.' % __name__, onerror=notify_error) for module_finder, name, ispkg in walker: loader = module_finder.find_module(name) mod = loader.load_module(name) for func in mod.__dict__.values(): if is_graft(func): GRAFTS.append(func) # append setuptools modules for entry_point in iter_entry_points(group=settings.entry_point): try: func = entry_point.load() if is_graft(func): GRAFTS.append(func) else: notify_error(entry_point.name) except Exception as error: logging.exception(error) notify_error(entry_point.name) return GRAFTS
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _write_buildproc_yaml(build_data, env, user, cmd, volumes, app_folder): """ Write a proc.yaml for the container and return the container path """
buildproc = ProcData({ 'app_folder': str(app_folder), 'app_name': build_data.app_name, 'app_repo_url': '', 'app_repo_type': '', 'buildpack_url': '', 'buildpack_version': '', 'config_name': 'build', 'env': env, 'host': '', 'port': 0, 'version': build_data.version, 'release_hash': '', 'settings': {}, 'user': user, 'cmd': cmd, 'volumes': volumes, 'proc_name': 'build', 'image_name': build_data.image_name, 'image_url': build_data.image_url, 'image_md5': build_data.image_md5, }) # write a proc.yaml for the container. with open('buildproc.yaml', 'w') as f: f.write(buildproc.as_yaml()) return get_container_path(buildproc)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def assert_compile_finished(app_folder): """ Once builder.sh has invoked the compile script, it should return and we should set a flag to the script returned. If that flag is missing, then it is an indication that the container crashed, and we generate an error. This function will clean up the flag after the check is performed, so only call this function once. See issue #141. """
fpath = os.path.join(app_folder, '.postbuild.flag') if not os.path.isfile(fpath): msg = ('No postbuild flag set, LXC container may have crashed while ' 'building. Check compile logs for build.') raise AssertionError(msg) try: os.remove(fpath) except OSError: # It doesn't matter if it fails. pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def recover_release_data(app_folder): """ Given the path to an app folder where an app was just built, return a dictionary containing the data emitted from running the buildpack's release script. Relies on the builder.sh script storing the release data in ./.release.yaml inside the app folder. """
with open(os.path.join(app_folder, '.release.yaml'), 'rb') as f: return yaml.safe_load(f)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def recover_buildpack(app_folder): """ Given the path to an app folder where an app was just built, return a BuildPack object pointing to the dir for the buildpack used during the build. Relies on the builder.sh script storing the buildpack location in /.buildpack inside the container. """
filepath = os.path.join(app_folder, '.buildpack') with open(filepath) as f: buildpack_picked = f.read() buildpack_picked = buildpack_picked.lstrip('/') buildpack_picked = buildpack_picked.rstrip('\n') buildpack_picked = os.path.join(os.getcwd(), buildpack_picked) return BuildPack(buildpack_picked)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pull_buildpack(url): """ Update a buildpack in its shared location, then make a copy into the current directory, using an md5 of the url. """
defrag = _defrag(urllib.parse.urldefrag(url)) with lock_or_wait(defrag.url): bp = update_buildpack(url) dest = bp.basename + '-' + hash_text(defrag.url) shutil.copytree(bp.folder, dest) # Make the buildpack dir writable, per # https://bitbucket.org/yougov/velociraptor/issues/178 path.Path(dest).chmod('a+wx') return dest
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def make_tarball(self, app_folder, build_data): """ Following a successful build, create a tarball and build result. """
# slugignore clean_slug_dir(app_folder) # tar up the result with tarfile.open('build.tar.gz', 'w:gz') as tar: tar.add(app_folder, arcname='') build_data.build_md5 = file_md5('build.tar.gz') tardest = os.path.join(self.outfolder, 'build.tar.gz') shutil.move('build.tar.gz', tardest) build_data_path = os.path.join(self.outfolder, 'build_result.yaml') print("Writing", build_data_path) with open(build_data_path, 'w') as f: f.write(build_data.as_yaml())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def process_request(self, request): ''' checks if the host domain is one of the site objects and sets request.site_id ''' site_id = 0 domain = request.get_host().lower() if hasattr(settings, 'SITE_ID'): site_id = settings.SITE_ID try: site = Site.objects.get(domain__iexact=domain) site_id = site.id except Site.DoesNotExist: pass request.site_id = site_id
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sign(self, request, authheaders, secret): """Returns the v2 signature appropriate for the request. The request is not changed by this function. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. secret -- The base64-encoded secret key for the HMAC authorization. """
if "id" not in authheaders or authheaders["id"] == '': raise KeyError("id required in authorization headers.") if "nonce" not in authheaders or authheaders["nonce"] == '': raise KeyError("nonce required in authorization headers.") if "realm" not in authheaders or authheaders["realm"] == '': raise KeyError("realm required in authorization headers.") if request.get_header('x-authorization-timestamp') == '': raise KeyError("X-Authorization-Timestamp is required.") bodyhash = None if request.body is not None and request.body != b'': sha256 = hashlib.sha256() sha256.update(request.body) bodyhash = base64.b64encode(sha256.digest()).decode('utf-8') try: mac = hmac.HMAC(base64.b64decode(secret.encode('utf-8'), validate=True), digestmod=self.digest) except TypeError: s = secret.encode('utf-8') if not re.match(b'^[A-Za-z0-9+/]*={0,2}$', s): raise binascii.Error('Non-base64 digit found') mac = hmac.HMAC(base64.b64decode(s), digestmod=self.digest) mac.update(self.signable(request, authheaders, bodyhash).encode('utf-8')) digest = mac.digest() return base64.b64encode(digest).decode('utf-8')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_response_signer(self): """Returns the response signer for this version of the signature. """
if not hasattr(self, "response_signer"): self.response_signer = V2ResponseSigner(self.digest, orig=self) return self.response_signer
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check(self, request, secret): """Verifies whether or not the request bears an authorization appropriate and valid for this version of the signature. This verifies every element of the signature, including the timestamp's value. Does not alter the request. Keyword arguments: request -- A request object which can be consumed by this API. secret -- The base64-encoded secret key for the HMAC authorization. """
if request.get_header("Authorization") == "": return False ah = self.parse_auth_headers(request.get_header("Authorization")) if "signature" not in ah: return False if request.get_header('x-authorization-timestamp') == '': raise KeyError("X-Authorization-Timestamp is required.") timestamp = int(float(request.get_header('x-authorization-timestamp'))) if timestamp == 0: raise ValueError("X-Authorization-Timestamp must be a valid, non-zero timestamp.") if self.preset_time is None: curr_time = time.time() else: curr_time = self.preset_time if timestamp > curr_time + 900: raise ValueError("X-Authorization-Timestamp is too far in the future.") if timestamp < curr_time - 900: raise ValueError("X-Authorization-Timestamp is too far in the past.") if request.body is not None and request.body != b'': content_hash = request.get_header("x-authorization-content-sha256") if content_hash == '': raise KeyError("X-Authorization-Content-SHA256 is required for requests with a request body.") sha256 = hashlib.sha256() sha256.update(request.body) if content_hash != base64.b64encode(sha256.digest()).decode('utf-8'): raise ValueError("X-Authorization-Content-SHA256 must match the SHA-256 hash of the request body.") return ah["signature"] == self.sign(request, ah, secret)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unroll_auth_headers(self, authheaders, exclude_signature=False, sep=",", quote=True): """Converts an authorization header dict-like object into a string representing the authorization. Keyword arguments: authheaders -- A string-indexable object which contains the headers appropriate for this signature version. """
res = "" ordered = collections.OrderedDict(sorted(authheaders.items())) form = '{0}=\"{1}\"' if quote else '{0}={1}' if exclude_signature: return sep.join([form.format(k, urlquote(str(v), safe='')) for k, v in ordered.items() if k != 'signature']) else: return sep.join([form.format(k, urlquote(str(v), safe='') if k != 'signature' else str(v)) for k, v in ordered.items()])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sign_direct(self, request, authheaders, secret): """Signs a request directly with a v2 signature. The request's Authorization header will change. This function may also add the required X-Authorization-Timestamp and X-Authorization-Content-SHA256 headers. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. secret -- The base64-encoded secret key for the HMAC authorization. """
if request.get_header('x-authorization-timestamp') == '': request.with_header("X-Authorization-Timestamp", str(time.time())) if request.body is not None and request.body != b'': if request.get_header("x-authorization-content-sha256") == '': sha256 = hashlib.sha256() sha256.update(request.body) request.with_header("X-Authorization-Content-SHA256", base64.b64encode(sha256.digest()).decode('utf-8')) sig = self.sign(request, authheaders, secret) authheaders["signature"] = sig return request.with_header("Authorization", "acquia-http-hmac {0}".format(self.unroll_auth_headers(authheaders)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check(self, request, response, secret): """Checks the response for the appropriate signature. Returns True if the signature matches the expected value. Keyword arguments: request -- A request object which can be consumed by this API. response -- A requests response object or compatible signed response object. secret -- The base64-encoded secret key for the HMAC authorization. """
auth = request.get_header('Authorization') if auth == '': raise KeyError('Authorization header is required for the request.') ah = self.orig.parse_auth_headers(auth) act = response.headers['X-Server-Authorization-HMAC-SHA256'] if act == '': raise KeyError('Response is missing the signature header X-Server-Authorization-HMAC-SHA256.') sig = self.sign(request, ah, response.text, secret) return sig == act
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def signable(self, request, authheaders, response_body): """Creates the signable string for a response and returns it. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. response_body -- A string or bytes-like object which represents the body of the response. """
nonce = authheaders["nonce"] timestamp = request.get_header("x-authorization-timestamp") try: body_str = response_body.decode('utf-8') except: body_str = response_body return '{0}\n{1}\n{2}'.format(nonce, timestamp, body_str)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sign(self, request, authheaders, response_body, secret): """Returns the response signature for the response to the request. Keyword arguments: request -- A request object which can be consumed by this API. authheaders -- A string-indexable object which contains the headers appropriate for this signature version. response_body -- A string or bytes-like object which represents the body of the response. secret -- The base64-encoded secret key for the HMAC authorization. """
if "nonce" not in authheaders or authheaders["nonce"] == '': raise KeyError("nonce required in authorization headers.") if request.get_header('x-authorization-timestamp') == '': raise KeyError("X-Authorization-Timestamp is required.") try: mac = hmac.HMAC(base64.b64decode(secret.encode('utf-8'), validate=True), digestmod=self.digest) except TypeError: s = secret.encode('utf-8') if not re.match(b'^[A-Za-z0-9+/]*={0,2}$', s): raise binascii.Error('Non-base64 digit found') mac = hmac.HMAC(base64.b64decode(s), digestmod=self.digest) mac.update(self.signable(request, authheaders, response_body).encode('utf-8')) digest = mac.digest() return base64.b64encode(digest).decode('utf-8')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_task(self, id, client=None): """Deletes a task from the current task queue. If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type id: string :param id: A task name to delete. :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current taskqueue. :raises: :class:`gcloud.exceptions.NotFound` """
client = self._require_client(client) task = Task(taskqueue=self, id=id) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). client.connection.api_request(method='DELETE', path=task.path, _target_object=None)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_task(self, id, client=None): """Gets a named task from taskqueue If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type id: string :param id: A task name to get :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current taskqueue. :rtype: :class:`_Task`. :returns: a task :raises: :class:`gcloud.exceptions.NotFound` """
client = self._require_client(client) task = Task(taskqueue=self, id=id) try: response = client.connection.api_request(method='GET', path=task.path, _target_object=task) task._set_properties(response) return task except NotFound: return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def lease(self, lease_time, num_tasks, group_by_tag=False, tag=None, client=None): """ Acquires a lease on the topmost N unowned tasks in the specified queue. :type lease_time: int :param lease_time: How long to lease this task, in seconds. :type num_tasks: int :param num_tasks: The number of tasks to lease. :type group_by_tag: bool :param group_by_tag: Optional. When True, returns tasks of the same tag. Specify which tag by using the tag parameter. If tag is not specified, returns tasks of the same tag as the oldest task in the queue. :type tag: string :param tag: Optional. Only specify tag if groupByTag is true. If groupByTag is true and tag is not specified, the tag is assumed to be that of the oldest task by ETA. I.e., the first available tag. :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the task's taskqueue. :rtype: :class:`_TaskIterator`. :returns: An iterator of tasks. """
client = self._require_client(client) if group_by_tag: query_params = {"leaseSecs": lease_time, "numTasks": num_tasks, "groupByTag": group_by_tag, "tag": tag} else: query_params = {"leaseSecs": lease_time, "numTasks": num_tasks} response = client.connection.api_request(method='POST', path=self.path + "/tasks/lease", query_params=query_params) for item in response.get('items', []): id = item.get('id') task = Task(id, taskqueue=self) task._set_properties(item) yield task
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_task(self, id, new_lease_time, client=None): """ Updates the duration of a task lease If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type id: string :param id: A task name to update :type new_lease_time: int :param new_lease_time: New lease time, in seconds. :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the task's taskqueue. :rtype: :class:`_Task`. :returns: a task :raises: :class:`gcloud.exceptions.NotFound` """
client = self._require_client(client) task = Task(taskqueue=self, id=id) try: response = client.connection.api_request(method='POST', path=self.path + "/tasks/" + id, query_params={"newLeaseSeconds": new_lease_time}, _target_object=task) task._set_properties(response) return task except NotFound: return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def insert_task(self, description, tag=None, client=None): """ Insert task in task queue. If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type description: string :param description: Description of task to perform :type tag: string :param tag: Optional. The tag for this task, allows leasing tasks with a specific tag :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the task's taskqueue. :rtype: :class:`_Task`. :returns: a task :raises: :class:`gcloud.exceptions.NotFound` """
client = self._require_client(client) new_task = { "queueName": self.full_name, "payloadBase64": base64.b64encode(description).decode('ascii'), "tag": tag } response = client.connection.api_request(method='POST', path=self.path + "/tasks/", data=new_task) task = Task(taskqueue=self, id=response.get('id')) task._set_properties(response) return task
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def add_to(self, container): ''' Add the class to @container. ''' if self.container: self.remove_from(self.container) container.add(self)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def draw(self, surf): ''' Draw all widgets and sub-containers to @surf. ''' if self.shown: for w in self.widgets: surf.blit(w.image, self.convert_rect(w.rect)) for c in self.containers: c.draw(surf)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def kill(self): ''' Remove the class from its container, contained items and sub-widgets. Runs automatically when the class is garbage collected. ''' Base.kill(self) for c in self.containers: c.remove_internal(self) for w in self.widgets: w.remove_internal(self)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def bspace(self): ''' Remove the character before the cursor. ''' try: self.text.pop(self.cursor_loc - 1) self.cursor_loc -= 1 except IndexError: pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def paste(self): ''' Insert text from the clipboard at the cursor. ''' try: t = pygame.scrap.get(SCRAP_TEXT) if t: self.insert(t) return True except: # pygame.scrap is experimental, allow for changes return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def animate_cli(animation_, step, event): """Print out the animation cycle to stdout. This function is for use with synchronous functions and must be run in a thread. Args: animation_ (generator): A generator that produces strings for the animation. Should be endless. step (float): Seconds between each animation frame. """
while True: # run at least once, important for tests! time.sleep(step) frame = next(animation_) sys.stdout.write(frame) sys.stdout.flush() if event.is_set(): break sys.stdout.write(animation_.get_erase_frame()) sys.stdout.flush() animation_.reset()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def energy_ES(q, v): """Compute the kinetic and potential energy of the earth sun system"""
# Body 0 is the sun, Body 1 is the earth m0 = mass[0] m1 = mass[1] # Positions of sun and earth q0: np.ndarray = q[:, slices[0]] q1: np.ndarray = q[:, slices[1]] # Velocities of sun and earth v0: np.ndarray = v[:, slices[0]] v1: np.ndarray = v[:, slices[1]] # Kinetic energy is 1/2 mv^2 T0: np.ndarray = 0.5 * m0 * np.sum(v0 * v0, axis=1) T1: np.ndarray = 0.5 * m1 * np.sum(v1 * v1, axis=1) T: np.ndarray = T0 + T1 # Potential energy is -G m1 m2 / r dv_01 = q1 - q0 r_01 = np.linalg.norm(dv_01, axis=1) U_01: np.ndarray = -G * m0 * m1 / r_01 U: np.ndarray = U_01 # Total energy H = T + U H = T + U return H, T, U
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def make_force_ES(q_vars, mass): """Fluxion with the potential energy of the earth-sun sytem"""
# Build the potential energy fluxion; just one pair of bodies U = U_ij(q_vars, mass, 0, 1) # Varname arrays for both the coordinate system and U vn_q = np.array([q.var_name for q in q_vars]) vn_fl = np.array(sorted(U.var_names)) # Permutation array for putting variables in q in the order expected by U (alphabetical) q2fl = np.array([np.argmax((vn_q == v)) for v in vn_fl]) # Permutation array for putting results of U.diff() in order of q_vars fl2q = np.array([np.argmax((vn_fl == v)) for v in vn_q]) # Return a force function from this potential force_func = lambda q: -U.diff(q[q2fl]).squeeze()[fl2q] return force_func
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate(self, meta, val): """Validate an account_id"""
val = string_or_int_as_string_spec().normalise(meta, val) if not regexes['amazon_account_id'].match(val): raise BadOption("Account id must match a particular regex", got=val, should_match=regexes['amazon_account_id'].pattern) return val
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def aws_syncr_spec(self): """Spec for aws_syncr options"""
formatted_string = formatted(string_spec(), MergedOptionStringFormatter, expected_type=string_types) return create_spec(AwsSyncr , extra = defaulted(formatted_string, "") , stage = defaulted(formatted_string, "") , debug = defaulted(boolean(), False) , dry_run = defaulted(boolean(), False) , location = defaulted(formatted_string, "ap-southeast-2") , artifact = formatted_string , environment = formatted_string , config_folder = directory_spec() )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def accounts_spec(self): """Spec for accounts options"""
formatted_account_id = formatted(valid_account_id(), MergedOptionStringFormatter, expected_type=string_types) return dictof(string_spec(), formatted_account_id)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _gen_s3_path(self, model, props): """ Return the part of the S3 path based on inputs The path will be passed to the s3_upload method & will ultimately be merged with the standard AWS S3 URL. An example model type of 'users' with a resource ID of 99 & an API endpoint ending with 'photos' will have a path generated in the following way: users/99/photos/<timestamp>.<extension> The timestamp is a high precision timestamp & the extension is typically 3 characters & derived in the form-data deserializer. """
now = '%.5f' % time.time() return '%s/%s/%s/%s.%s' % (model.rtype, model.rid_value, self._s3_rtype, now, props['file-ext'])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def on_post(self, req, resp, rid): """ Deserialize the file upload & save it to S3 File uploads are associated with a model of some kind. Ensure the associating model exists first & foremost. """
signals.pre_req.send(self.model) signals.pre_req_upload.send(self.model) props = req.deserialize(self.mimetypes) model = find(self.model, rid) signals.pre_upload.send(self.model, model=model) try: conn = s3_connect(self.key, self.secret) path = self._gen_s3_path(model, props) s3_url = s3_upload(self.acl, self.bucket, conn, props['content'], props['content-type'], path) except IOError: abort(ServiceUnavailable(**{ 'detail': 'The upload attempt failed unexpectedly', })) else: signals.post_upload.send(self.model, model=model, url=s3_url) resp.location = s3_url resp.status = falcon.HTTP_201 resp.serialize({'data': {'url': s3_url}}) signals.post_req.send(self.model) signals.post_req_upload.send(self.model)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def before(f, chain=False): """Runs f before the decorated function."""
def decorator(g): @wraps(g) def h(*args, **kargs): if chain: return g(f(*args, **kargs)) else: f(*args, **kargs) return g(*args, **kargs) return h return decorator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def after(f, chain=False): """Runs f with the result of the decorated function."""
def decorator(g): @wraps(g) def h(*args, **kargs): if chain: return f(g(*args, **kargs)) else: r = g(*args, **kargs) f(*args, **kargs) return r return h return decorator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def during(f): """Runs f during the decorated function's execution in a separate thread."""
def decorator(g): @wraps(g) def h(*args, **kargs): tf = Thread(target=f, args=args, kwargs=kargs) tf.start() r = g(*args, **kargs) tf.join() return r return h return decorator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def current_frame(raw=False): ''' Gives the current execution frame. :returns: The current execution frame that is actually executing this. ''' # `import sys` is important here, because the `sys` module is special # and we will end up with the class frame instead of the `current` one. if NATIVE: import sys frame = sys._getframe() else: frame = _getframe() frame = frame.f_back if not raw: frame = Frame(frame) return frame
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def locate(callback, root_frame=None, include_root=False, raw=False): ''' Locates a frame by criteria. :param callback: One argument function to check the frame against. The frame we are curretly on, is given as that argument. :param root_frame: The root frame to start the search from. Can be a callback taking no arguments. :param include_root: `True` if the search should start from the `root_frame` or the one beneath it. Defaults to `False`. :param raw: whether to use raw frames or wrap them in our own object. Defaults to `False`. :raises RuntimeError: When no matching frame is found. :returns: The first frame which responds to the `callback`. ''' def get_from(maybe_callable): if callable(maybe_callable): return maybe_callable() return maybe_callable # Creates new frames, whether raw or not. new = lambda frame: frame if raw else Frame(frame) current_frame = get_from(root_frame or Frame.current_frame(raw=True)) current_frame = new(current_frame) if not include_root: current_frame = new(current_frame.f_back) # The search will stop, because at some point the frame will be falsy. while current_frame: found = callback(current_frame) if found: return current_frame current_frame = new(current_frame.f_back) raise Frame.NotFound('No matching frame found')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_call(self, func, *args, **kwargs): """ Sets the function & its arguments to be called when the task is processed. Ex:: task.to_call(my_function, 1, 'c', another=True) :param func: The callable with business logic to execute :type func: callable :param args: Positional arguments to pass to the callable task :type args: list :param kwargs: Keyword arguments to pass to the callable task :type kwargs: dict """
self.func = func self.func_args = args self.func_kwargs = kwargs
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def serialize(self): """ Serializes the ``Task`` data for storing in the queue. All data must be JSON-serializable in order to be stored properly. :returns: A JSON strong of the task data. """
data = { 'task_id': self.task_id, 'retries': self.retries, 'async': self.async, 'module': determine_module(self.func), 'callable': determine_name(self.func), 'args': self.func_args, 'kwargs': self.func_kwargs, 'options': {}, } if self.on_start: data['options']['on_start'] = { 'module': determine_module(self.on_start), 'callable': determine_name(self.on_start), } if self.on_success: data['options']['on_success'] = { 'module': determine_module(self.on_success), 'callable': determine_name(self.on_success), } if self.on_error: data['options']['on_error'] = { 'module': determine_module(self.on_error), 'callable': determine_name(self.on_error), } return json.dumps(data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def deserialize(cls, data): """ Given some data from the queue, deserializes it into a ``Task`` instance. The data must be similar in format to what comes from ``Task.serialize`` (a JSON-serialized dictionary). Required keys are ``task_id``, ``retries`` & ``async``. :param data: A JSON-serialized string of the task data :type data: string :returns: A populated task :rtype: A ``Task`` instance """
data = json.loads(data) options = data.get('options', {}) task = cls( task_id=data['task_id'], retries=data['retries'], async=data['async'] ) func = import_attr(data['module'], data['callable']) task.to_call(func, *data.get('args', []), **data.get('kwargs', {})) if options.get('on_start'): task.on_start = import_attr( options['on_start']['module'], options['on_start']['callable'] ) if options.get('on_success'): task.on_success = import_attr( options['on_success']['module'], options['on_success']['callable'] ) if options.get('on_error'): task.on_error = import_attr( options['on_error']['module'], options['on_error']['callable'] ) return task
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run(self): """ Runs the task. This fires the ``on_start`` hook function first (if present), passing the task itself. Then it runs the target function supplied via ``Task.to_call`` with its arguments & stores the result. If the target function succeeded, the ``on_success`` hook function is called, passing both the task & the result to it. If the target function failed (threw an exception), the ``on_error`` hook function is called, passing both the task & the exception to it. Then the exception is re-raised. Finally, the result is returned. """
if self.on_start: self.on_start(self) try: result = self.func(*self.func_args, **self.func_kwargs) except Exception as err: self.to_failed() if self.on_error: self.on_error(self, err) raise self.to_success() if self.on_success: self.on_success(self, result) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _check_typecode_list(ofwhat, tcname): '''Check a list of typecodes for compliance with Struct requirements.''' for o in ofwhat: if callable(o): #skip if _Mirage continue if not isinstance(o, TypeCode): raise TypeError( tcname + ' ofwhat outside the TypeCode hierarchy, ' + str(o.__class__)) if o.pname is None and not isinstance(o, AnyElement): raise TypeError(tcname + ' element ' + str(o) + ' has no name')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _get_type_or_substitute(typecode, pyobj, sw, elt): '''return typecode or substitute type for wildcard or derived type. For serialization only. ''' sub = getattr(pyobj, 'typecode', typecode) if sub is typecode or sub is None: return typecode # Element WildCard if isinstance(typecode, AnyElement): return sub # Global Element Declaration if isinstance(sub, ElementDeclaration): if (typecode.nspname,typecode.pname) == (sub.nspname,sub.pname): raise TypeError(\ 'bad usage, failed to serialize element reference (%s, %s), in: %s' % (typecode.nspname, typecode.pname, sw.Backtrace(elt),)) # check substitutionGroup if _is_substitute_element(typecode, sub): return sub raise TypeError(\ 'failed to serialize (%s, %s) illegal sub GED (%s,%s): %s' % (typecode.nspname, typecode.pname, sub.nspname, sub.pname, sw.Backtrace(elt),)) # Local Element if not isinstance(typecode, AnyType) and not isinstance(sub, typecode.__class__): raise TypeError(\ 'failed to serialize substitute %s for %s, not derivation: %s' % (sub, typecode, sw.Backtrace(elt),)) # Make our substitution type match the elements facets, # since typecode is created for a single existing pyobj # some facets are irrelevant. sub = _copy(sub) sub.nspname = typecode.nspname sub.pname = typecode.pname sub.aname = typecode.aname sub.minOccurs = sub.maxOccurs = 1 return sub
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setDerivedTypeContents(self, extensions=None, restrictions=None): """For derived types set appropriate parameter and """
if extensions: ofwhat = list(self.ofwhat) if type(extensions) in _seqtypes: ofwhat += list(extensions) else: ofwhat.append(extensions) elif restrictions: if type(restrictions) in _seqtypes: ofwhat = restrictions else: ofwhat = (restrictions,) else: return self.ofwhat = tuple(ofwhat) self.lenofwhat = len(self.ofwhat)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def contains_non_repeat_actions(self): ''' Because repeating repeat actions can get ugly real fast ''' for action in self.actions: if not isinstance(action, (int, dynamic.RepeatCommand)): return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _all_correct_list(array): """ Make sure, that all items in `array` has good type and size. Args: array (list): Array of python types. Returns: True/False """
if type(array) not in _ITERABLE_TYPES: return False for item in array: if not type(item) in _ITERABLE_TYPES: return False if len(item) != 2: return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _convert_to_dict(data): """ Convert `data` to dictionary. Tries to get sense in multidimensional arrays. Args: data: List/dict/tuple of variable dimension. Returns: dict: If the data can be converted to dictionary. Raises: MetaParsingException: When the data are unconvertible to dict. """
if isinstance(data, dict): return data if isinstance(data, list) or isinstance(data, tuple): if _all_correct_list(data): return dict(data) else: data = zip(data[::2], data[1::2]) return dict(data) else: raise MetaParsingException( "Can't decode provided metadata - unknown structure." )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_structure(data): """ Check whether the structure is flat dictionary. If not, try to convert it to dictionary. Args: data: Whatever data you have (dict/tuple/list). Returns: dict: When the conversion was successful or `data` was already `good`. Raises: MetaParsingException: When the data couldn't be converted or had `bad` structure. """
if not isinstance(data, dict): try: data = _convert_to_dict(data) except MetaParsingException: raise except: raise MetaParsingException( "Metadata format has invalid strucure (dict is expected)." ) for key, val in data.iteritems(): if type(key) not in _ALLOWED_TYPES: raise MetaParsingException( "Can't decode the meta file - invalid type of keyword '" + str(key) + "'!" ) if type(val) not in _ALLOWED_TYPES: raise MetaParsingException( "Can't decode the meta file - invalid type of keyword '" + str(key) + "'!" ) return data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _remove_accents(self, input_str): """ Convert unicode string to ASCII. Credit: http://stackoverflow.com/a/517974 """
nkfd_form = unicodedata.normalize('NFKD', input_str) return u"".join([c for c in nkfd_form if not unicodedata.combining(c)])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def process(self, key, val): """ Try to look for `key` in all required and optional fields. If found, set the `val`. """
for field in self.fields: if field.check(key, val): return for field in self.optional: if field.check(key, val): return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_size(cls): """ Total byte size of fields in this structure => total byte size of the structure on the file """
return sum([getattr(cls, name).length for name in cls.get_fields_names()])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def format_image(path, options): '''Formats an image. Args: path (str): Path to the image file. options (dict): Options to apply to the image. Returns: (list) A list of PIL images. The list will always be of length 1 unless resolutions for resizing are provided in the options. ''' image = Image.open(path) image_pipeline_results = __pipeline_image(image, options) return image_pipeline_results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_instance(key, expire=None): """Return an instance of RedisSet."""
global _instances try: instance = _instances[key] except KeyError: instance = RedisSet( key, _redis, expire=expire ) _instances[key] = instance return instance
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add(self, value): """Add value to set."""
added = self.redis.sadd( self.key, value ) if self.redis.scard(self.key) < 2: self.redis.expire(self.key, self.expire) return added
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def write(name, keyword, domain, citation, author, description, species, version, contact, licenses, values, functions, output, value_prefix): """Build a namespace from items."""
write_namespace( name, keyword, domain, author, citation, values, namespace_description=description, namespace_species=species, namespace_version=version, author_contact=contact, author_copyright=licenses, functions=functions, file=output, value_prefix=value_prefix )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def history(namespace_module): """Hash all versions on Artifactory."""
for path in get_namespace_history(namespace_module): h = get_bel_resource_hash(path.as_posix()) click.echo('{}\t{}'.format(path, h))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def convert_to_annotation(file, output): """Convert a namespace file to an annotation file."""
resource = parse_bel_resource(file) write_annotation( keyword=resource['Namespace']['Keyword'], values={k: '' for k in resource['Values']}, citation_name=resource['Citation']['NameString'], description=resource['Namespace']['DescriptionString'], file=output )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def history(annotation_module): """Output the hashes for the annotation resources' versions."""
for path in get_annotation_history(annotation_module): h = get_bel_resource_hash(path.as_posix()) click.echo('{}\t{}'.format(path, h))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def convert_to_namespace(file, output, keyword): """Convert an annotation file to a namespace file."""
resource = parse_bel_resource(file) write_namespace( namespace_keyword=(keyword or resource['AnnotationDefinition']['Keyword']), namespace_name=resource['AnnotationDefinition']['Keyword'], namespace_description=resource['AnnotationDefinition']['DescriptionString'], author_name='Charles Tapley Hoyt', namespace_domain=NAMESPACE_DOMAIN_OTHER, values=resource['Values'], citation_name=resource['Citation']['NameString'], file=output )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_proxy(): """Return a random proxy from proxy config."""
proxies = _config['proxies'] return proxies[ random.randint(0, len(proxies) - 1) ] if len(proxies) > 0 else None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_instance(): """Return an instance of Client."""
global _instances user_agents = _config['user-agents'] user_agent = user_agents[ random.randint(0, len(user_agents) - 1) ] if len(user_agents) > 0 else DEFAULT_UA instance_key = user_agent try: instance = _instances[instance_key] except KeyError: instance = Client(user_agent, get_proxy) _instances[instance_key] = instance return instance
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, uri, disable_proxy=False, stream=False): """Return Requests response to GET request."""
response = requests.get( uri, headers=self.headers, allow_redirects=True, cookies={}, stream=stream, proxies=self.proxy if not disable_proxy else False ) if response.status_code in _PERMITTED_STATUS_CODES: self.response_headers = response.headers return response.content if not stream else response.iter_content() else: raise requests.exceptions.HTTPError( "HTTP response did not have a permitted status code." )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_content(self, uri, disable_proxy=False): """Return content from URI if Response status is good."""
return self.get(uri=uri, disable_proxy=disable_proxy)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_iter_content(self, uri, disable_proxy=False): """Return iterable content from URI if Response status is good."""
return self.get(uri=uri, disable_proxy=disable_proxy, stream=True)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dict_diff(first, second): """ Return a dict of keys that differ with another config object. If a value is not found in one fo the configs, it will be represented by KEYNOTFOUND. @param first: Fist dictionary to diff. @param second: Second dicationary to diff. @return diff: Dict of Key => (first.val, second.val) """
diff = {} # Check all keys in first dict for key in first: if key not in second: diff[key] = (first[key], None) elif (first[key] != second[key]): diff[key] = (first[key], second[key]) # Check all keys in second dict to find missing for key in second: if key not in first: diff[key] = (None, second[key]) return diff
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def process_file(self, path, dryrun): """ Concat files and return filename. """
# special case - skip output file so we won't include it in result if path == self._output_path: return None # if dryrun skip and return file if dryrun: return path # concat file with output file with open(path, "rb") as infile: data = infile.read() self._output_file.write(data) # return processed file path return path
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _chain_forks(elements): """Detect whether a sequence of elements leads to a fork of streams"""
# we are only interested in the result, so unwind from the end for element in reversed(elements): if element.chain_fork: return True elif element.chain_join: return False return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(filename): """ parse a scene release string and return a dictionary of parsed values."""
screensize = re.compile('720p|1080p', re.I) source = re.compile( '\.(AHDTV|MBluRay|MDVDR|CAM|TS|TELESYNC|DVDSCR|DVD9|BDSCR|DDC|R5LINE|R5|DVDRip|HDRip|BRRip|BDRip|WEBRip|WEB-?HD|HDtv|PDTV|WEBDL|BluRay)', re.I) year = re.compile('(1|2)\d{3}') series = re.compile('s\d{1,3}e\d{1,3}', re.I) group = re.compile('[A-Za-z0-9]+$', re.I) video = re.compile('DVDR|Xvid|MP4|NTSC|PAL|[xh][\.\s]?264', re.I) audio = re.compile('AAC2[\.\s]0|AAC|AC3|DTS|DD5', re.I) edition = re.compile( '\.(UNRATED|DC|(Directors|EXTENDED)[\.\s](CUT|EDITION)|EXTENDED|3D|2D|\bNF\b)', re.I) tags = re.compile( '\.(COMPLETE|LiMiTED|DL|DUAL|iNTERNAL|UNCUT|FS|FESTIVAL|DOKU|DOCU|DUBBED|SUBBED|WS)', re.I) release = re.compile( 'REAL[\.\s]PROPER|REMASTERED|PROPER|REPACK|READNFO|READ[\.\s]NFO|DiRFiX|NFOFiX', re.I) subtitles = re.compile( '\.(MULTi(SUBS)?|FiNNiSH|NORDiC|DANiSH|SWEDiSH|NORWEGiAN|iTALiAN|SPANiSH|SWESUB)', re.I) language = re.compile('\.(German|ITALIAN|Chinese|CZECH|RUSSIAN|FRENCH|TRUEFRENCH)', re.I) title = filename attrs = {'screenSize': screensize, 'source': source, 'year': year, 'series': series, 'release_group': group, 'video': video, 'audio': audio, 'edition': edition, 'tags': tags, 'release': release, 'subtitles': subtitles, 'language': language } data = {} for attr in attrs: match = methodcaller('search', filename)(attrs[attr]) if match: matched = methodcaller('group')(match) data[attr] = matched.strip('.') title = re.sub(matched, '', title) if 'series' in data: s, e = re.split('e|E', data['series']) # use lstrip to remove leading zeros data['season'] = s[1:].lstrip('0') data['episode'] = e.lstrip('0') data['series'] = True temptitle = title.replace('.', ' ').strip('-').strip() data['title'] = re.sub('\s{2,}', ' ', temptitle) return data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def generate_ecdsap256_server_certificate(server_id, server_pub_key, expiry, root_id, root_priv_key): """ Creates a new server certificate signed by the provided root. :param Identity server_id: the identity for the certificate :param ECDSAP256PublicKey server_pub_key: the public key for the certificate :param CertificateExpiry expiry: the expiry date for the certificate :param CertificateRootId root_id: the root identity to sign this certificate :param ECDSAP256PrivateKey root_priv_key: the root private key to sign this certificate """
cert = ECDSAP256ServerCertificate() rc = _lib.xtt_generate_server_certificate_ecdsap256(cert.native, server_id.native, server_pub_key.native, expiry.native, root_id.native, root_priv_key.native) if rc == RC.SUCCESS: return cert else: raise error_from_code(rc)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def project(self, win_width, win_height, fov, viewer_distance): """ Transforms this 3D point to 2D using a perspective projection. """
factor = fov / (viewer_distance + self.z) x = self.x * factor + win_width // 2 y = -self.y * factor + win_height // 2 return Point3D(x, y, 1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_default_parser(parser): """ Set defaulr parser instance Parameters parser : instance or string An instance or registered name of parser class. The specified parser instance will be used when user did not specified :attr:`parser` in :func:`maidenhair.functions.load` function. See also -------- :func:`maidenhair.utils.plugins.Registry.register` : Register new class """
if isinstance(parser, basestring): parser = registry.find(parser)() if not isinstance(parser, BaseParser): parser = parser() global _parser _parser = parser
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_default_loader(loader): """ Set defaulr loader instance Parameters loader : instance or string An instance or registered name of loader class. The specified loader instance will be used when user did not specified :attr:`loader` in :func:`maidenhair.functions.load` function. See also -------- :func:`maidenhair.utils.plugins.Registry.register` : Register new class """
if isinstance(loader, basestring): loader = registry.find(loader)() if not isinstance(loader, BaseLoader): loader = loader() global _loader _loader = loader