text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _create_endpoints(self): """Create all api endpoints using self.endpoint and partial from functools"""
for k, v in self.endpoints.items(): _repr = '%s.%s' % (self.__class__.__name__, k) self.__dict__[k] = EndPointPartial(self._make_request, v, _repr)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(binary, **params): """Turns a ZIP file into a frozen sample."""
binary = io.BytesIO(binary) collection = list() with zipfile.ZipFile(binary, 'r') as zip_: for zip_info in zip_.infolist(): content_type, encoding = mimetypes.guess_type(zip_info.filename) content = zip_.read(zip_info) content = content_encodings.get(encoding).decode(content) content = content_types.get(content_type).parse(content, **params) collection.apppend((zip_info.filename, content)) return collection
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format(collection, **params): """Truns a python object into a ZIP file."""
binary = io.BytesIO() with zipfile.ZipFile(binary, 'w') as zip_: now = datetime.datetime.utcnow().timetuple() for filename, content in collection: content_type, encoding = mimetypes.guess_type(filename) content = content_types.get(content_type).parse(content, **params) content = content_encodings.get(encoding).decode(content) zip_info = zipfile.ZipInfo(filename, now) zip_info.file_size = len(content) zip_.writestr(zip_info, content) binary.seek(0) return binary.read()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def keywords_for(*args): """ Return a list of ``Keyword`` objects for the given model instance or a model class. In the case of a model class, retrieve all keywords for all instances of the model and apply a ``weight`` attribute that can be used to create a tag cloud. """
# Handle a model instance. if isinstance(args[0], Model): obj = args[0] if getattr(obj, "content_model", None): obj = obj.get_content_model() keywords_name = obj.get_keywordsfield_name() keywords_queryset = getattr(obj, keywords_name).all() # Keywords may have been prefetched already. If not, we # need select_related for the actual keywords. prefetched = getattr(obj, "_prefetched_objects_cache", {}) if keywords_name not in prefetched: keywords_queryset = keywords_queryset.select_related("keyword") return [assigned.keyword for assigned in keywords_queryset] # Handle a model class. try: app_label, model = args[0].split(".", 1) except ValueError: return [] content_type = ContentType.objects.get(app_label=app_label, model=model) assigned = AssignedKeyword.objects.filter(content_type=content_type) keywords = Keyword.objects.filter(assignments__in=assigned) keywords = keywords.annotate(item_count=Count("assignments")) if not keywords: return [] counts = [keyword.item_count for keyword in keywords] min_count, max_count = min(counts), max(counts) factor = (settings.TAG_CLOUD_SIZES - 1.) if min_count != max_count: factor /= (max_count - min_count) for kywd in keywords: kywd.weight = int(round((kywd.item_count - min_count) * factor)) + 1 return keywords
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def flag(request, comment_id, next=None): """ Flags a comment. Confirmation on GET, action on POST. Templates: :template:`comments/flag.html`, Context: comment the flagged `comments.comment` object """
comment = get_object_or_404(comments.get_model(), pk=comment_id, site__pk=settings.SITE_ID) # Flag on POST if request.method == 'POST': perform_flag(request, comment) return next_redirect(request, fallback=next or 'comments-flag-done', c=comment.pk) # Render a form on GET else: return render_to_response('comments/flag.html', {'comment': comment, "next": next}, template.RequestContext(request) )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def perform_flag(request, comment): """ Actually perform the flagging of a comment from a request. """
flag, created = comments.models.CommentFlag.objects.get_or_create( comment = comment, user = request.user, flag = comments.models.CommentFlag.SUGGEST_REMOVAL ) signals.comment_was_flagged.send( sender = comment.__class__, comment = comment, flag = flag, created = created, request = request, )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def onConnect(self, request): """ Called when a client opens a websocket connection """
logger.debug("Connection opened ({peer})".format(peer=self.peer)) self.storage = {} self._client_id = str(uuid1())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def onOpen(self): """ Called when a client has opened a websocket connection """
self.factory.add_client(self) # Publish ON_OPEN message self.factory.mease.publisher.publish( message_type=ON_OPEN, client_id=self._client_id, client_storage=self.storage)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def onClose(self, was_clean, code, reason): """ Called when a client closes a websocket connection """
logger.debug("Connection closed ({peer})".format(peer=self.peer)) # Publish ON_CLOSE message self.factory.mease.publisher.publish( message_type=ON_CLOSE, client_id=self._client_id, client_storage=self.storage) self.factory.remove_client(self)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def onMessage(self, payload, is_binary): """ Called when a client sends a message """
if not is_binary: payload = payload.decode('utf-8') logger.debug("Incoming message ({peer}) : {message}".format( peer=self.peer, message=payload)) # Publish ON_RECEIVE message self.factory.mease.publisher.publish( message_type=ON_RECEIVE, client_id=self._client_id, client_storage=self.storage, message=payload)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def send(self, payload, *args, **kwargs): """ Alias for WebSocketServerProtocol `sendMessage` method """
if isinstance(payload, (list, dict)): payload = json.dumps(payload) self.sendMessage(payload.encode(), *args, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run_server(self): """ Runs the WebSocket server """
self.protocol = MeaseWebSocketServerProtocol reactor.listenTCP(port=self.port, factory=self, interface=self.host) logger.info("Websocket server listening on {address}".format( address=self.address)) reactor.run()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def open(self, name, *args, **kwargs): """Open file, possibly relative to a base directory."""
if self.basedir is not None: name = os.path.join(self.basedir, name) return em.Subsystem.open(self, name, *args, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_live_weather(lat, lon, writer): """Gets the live weather via lat and long"""
requrl = FORECAST_BASE_URL+forecast_api_token+'/'+str(lat)+','+str(lon) req = requests.get(requrl) if req.status_code == requests.codes.ok: weather = req.json() if not weather['currently']: click.secho("No live weather currently", fg="red", bold=True) return writer.live_weather(weather) else: click.secho("There was problem getting live weather", fg="red", bold=True)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def convert(string, sanitize=False): """ Swap characters from script to transliterated version and vice versa. Optionally sanitize string by using preprocess function. :param sanitize: :param string: :return: """
return r.convert(string, (preprocess if sanitize else False))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_yaml(filename): """ Loads a YAML-formatted file. """
with open(filename) as f: ydoc = yaml.safe_load(f.read()) return (ydoc, serialize_tojson(ydoc))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def serialize_yaml_tofile(filename, resource): """ Serializes a K8S resource to YAML-formatted file. """
stream = file(filename, "w") yaml.dump(resource, stream, default_flow_style=False)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(binary, **params): """Turns a JSON structure into a python object."""
encoding = params.get('charset', 'UTF-8') return json.loads(binary, encoding=encoding)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format(item, **params): """Truns a python object into a JSON structure."""
encoding = params.get('charset', 'UTF-8') return json.dumps(item, encoding=encoding)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save_results(vcs, signature, result_path, patterns): """Save results matching `patterns` at `result_path`. Args: vcs (easyci.vcs.base.Vcs) - the VCS object for the actual project (not the disposable copy) signature (str) - the project state signature result_path (str) - the path containing the result, usually a disposable copy of the project patterns (str) - `rsync`-compatible patterns matching test results to save. """
results_directory = _get_results_directory(vcs, signature) if not os.path.exists(results_directory): os.makedirs(results_directory) with open(os.path.join(results_directory, 'patterns'), 'w') as f: f.write('\n'.join(patterns)) if not os.path.exists(os.path.join(results_directory, 'results')): os.mkdir(os.path.join(results_directory, 'results')) includes = ['--include={}'.format(x) for x in patterns] cmd = ['rsync', '-r'] + includes + ['--exclude=*', os.path.join(result_path, ''), os.path.join(results_directory, 'results', '')] subprocess.check_call(cmd)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sync_results(vcs, signature): """Sync the saved results for `signature` back to the project. Args: vcs (easyci.vcs.base.Vcs) signature (str) Raises: ResultsNotFoundError """
results_directory = _get_results_directory(vcs, signature) if not os.path.exists(results_directory): raise ResultsNotFoundError with open(os.path.join(results_directory, 'patterns'), 'r') as f: patterns = f.read().strip().split() includes = ['--include={}'.format(x) for x in patterns] cmd = ['rsync', '-r'] + includes + ['--exclude=*', os.path.join( results_directory, 'results', ''), os.path.join(vcs.path, '')] subprocess.check_call(cmd)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_results(vcs, signature): """Removed saved results for this signature Args: vcs (easyci.vcs.base.Vcs) signature (str) Raises: ResultsNotFoundError """
results_directory = _get_results_directory(vcs, signature) if not os.path.exists(results_directory): raise ResultsNotFoundError shutil.rmtree(results_directory)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_signatures_with_results(vcs): """Returns the list of signatures for which test results are saved. Args: vcs (easyci.vcs.base.Vcs) Returns: List[str] """
results_dir = os.path.join(vcs.private_dir(), 'results') if not os.path.exists(results_dir): return [] rel_paths = os.listdir(results_dir) return [p for p in rel_paths if os.path.isdir(os.path.join(results_dir, p))]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def nub(it): '''Dedups an iterable in arbitrary order. Uses memory proportional to the number of unique items in ``it``. ''' seen = set() for v in it: h = hash(v) if h in seen: continue seen.add(h) yield v
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def folders(self, ann_id=None): '''Yields an unordered generator for all available folders. By default (with ``ann_id=None``), folders are shown for all anonymous users. Optionally, ``ann_id`` can be set to a username, which restricts the list to only folders owned by that user. :param str ann_id: Username :rtype: generator of folder_id ''' ann_id = self._annotator(ann_id) if len(self.prefix) > 0: prefix = '|'.join([urllib.quote(self.prefix, safe='~'), 'topic', ann_id, '']) else: prefix = '|'.join(['topic', ann_id, '']) logger.info('Scanning for folders with prefix %r', prefix) return imap(lambda id: self.unwrap_folder_content_id(id)['folder_id'], self.store.scan_prefix_ids(prefix))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def subfolders(self, folder_id, ann_id=None): '''Yields an unodered generator of subfolders in a folder. By default (with ``ann_id=None``), subfolders are shown for all anonymous users. Optionally, ``ann_id`` can be set to a username, which restricts the list to only subfolders owned by that user. :param str folder_id: Folder id :param str ann_id: Username :rtype: generator of subfolder_id ''' self.assert_valid_folder_id(folder_id) ann_id = self._annotator(ann_id) folder_cid = self.wrap_folder_content_id(ann_id, folder_id) if self.store.get(folder_cid) is None: raise KeyError(folder_id) all_labels = self.label_store.directly_connected(folder_cid) return nub(la.subtopic_for(folder_cid) for la in all_labels)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def parent_subfolders(self, ident, ann_id=None): '''An unordered generator of parent subfolders for ``ident``. ``ident`` can either be a ``content_id`` or a tuple of ``(content_id, subtopic_id)``. Parent subfolders are limited to the annotator id given. :param ident: identifier :type ident: ``str`` or ``(str, str)`` :param str ann_id: Username :rtype: generator of ``(folder_id, subfolder_id)`` ''' ann_id = self._annotator(ann_id) cid, _ = normalize_ident(ident) for lab in self.label_store.directly_connected(ident): folder_cid = lab.other(cid) subfolder_sid = lab.subtopic_for(folder_cid) if not folder_cid.startswith('topic|'): continue folder = self.unwrap_folder_content_id(folder_cid) subfolder = self.unwrap_subfolder_subtopic_id(subfolder_sid) if folder['annotator_id'] != ann_id: continue yield (folder['folder_id'], subfolder)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def items(self, folder_id, subfolder_id, ann_id=None): '''Yields an unodered generator of items in a subfolder. The generator yields items, which are represented by a tuple of ``content_id`` and ``subtopic_id``. The format of these identifiers is unspecified. By default (with ``ann_id=None``), subfolders are shown for all anonymous users. Optionally, ``ann_id`` can be set to a username, which restricts the list to only subfolders owned by that user. :param str folder_id: Folder id :param str subfolder_id: Subfolder id :param str ann_id: Username :rtype: generator of ``(content_id, subtopic_id)`` ''' self.assert_valid_folder_id(folder_id) self.assert_valid_folder_id(subfolder_id) ann_id = self._annotator(ann_id) folder_cid = self.wrap_folder_content_id(ann_id, folder_id) subfolder_sid = self.wrap_subfolder_subtopic_id(subfolder_id) ident = (folder_cid, subfolder_sid) if self.store.get(folder_cid) is None: raise KeyError(folder_id) for lab in self.label_store.directly_connected(ident): cid = lab.other(folder_cid) subid = lab.subtopic_for(cid) yield (cid, subid)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def grouped_items(self, folder_id, subfolder_id, ann_id=None): '''Returns a dictionary from content ids to subtopic ids. Namely, the mapping is ``content_id |--> list of subtopic id``. By default (with ``ann_id=None``), subfolders are shown for all anonymous users. Optionally, ``ann_id`` can be set to a username, which restricts the list to only subfolders owned by that user. :param str folder_id: Folder id :param str subfolder_id: Subfolder id :param str ann_id: Username :rtype: ``dict`` of ``content_id |--> [subtopic_id]`` ''' d = defaultdict(list) for cid, subid in self.items(folder_id, subfolder_id, ann_id=ann_id): d[cid].append(subid) return d
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def add_folder(self, folder_id, ann_id=None): '''Add a folder. If ``ann_id`` is set, then the folder is owned by the given user. Otherwise, the folder is owned and viewable by all anonymous users. :param str folder_id: Folder id :param str ann_id: Username ''' self.assert_valid_folder_id(folder_id) ann_id = self._annotator(ann_id) cid = self.wrap_folder_content_id(ann_id, folder_id) self.store.put([(cid, FeatureCollection())]) logger.info('Added folder %r with content id %r', folder_id, cid)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def add_item(self, folder_id, subfolder_id, content_id, subtopic_id=None, ann_id=None): '''Add an item to a subfolder. The format of ``content_id`` and ``subtopic_id`` is unspecified. It is application specific. If ``ann_id`` is set, then the item is owned by the given user. Otherwise, the item is owned and viewable by all anonymous users. :param str folder_id: Folder id :param str subfolder_id: Folder id :param str content_id: content identifier :param str subtopic_id: subtopic identifier :param str ann_id: Username ''' self.assert_valid_folder_id(folder_id) self.assert_valid_folder_id(subfolder_id) ann_id = self._annotator(ann_id) folder_cid = self.wrap_folder_content_id(ann_id, folder_id) subfolder_sid = self.wrap_subfolder_subtopic_id(subfolder_id) if self.store.get(folder_cid) is None: raise KeyError(folder_id) lab = Label(folder_cid, content_id, ann_id, CorefValue.Positive, subtopic_id1=subfolder_sid, subtopic_id2=subtopic_id) self.label_store.put(lab) logger.info('Added subfolder item: %r', lab)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def generate_models(args): """Generates models from the script input."""
data_table = get_data_table(args.filename) tables = to_tables(data_table.rows_to_dicts()) attr_indent = "\n" + args.indent * 2 attr_sep = "," + attr_indent for tname, cols in tables.items(): model_name = table_to_model_name(tname, list(cols.values())[0]["table_schema"]) pk_cols, oth_cols = split_pks(cols) timestamps = get_timestamps(cols, args.created_at_col_name, args.updated_at_col_name) is_auto = len(pk_cols) == 1 and cols[pk_cols[0]]["is_auto"] == "t" attrs = OrderedDict() for cname in oth_cols: if cname not in timestamps: attrs[cname] = None print(_MODEL_SOURCE.format( class_name=model_name, base_class_name="ModelBase", indent=args.indent, table_name=repr(tname), pk_name=repr(pk_cols[0] if len(pk_cols) == 1 else pk_cols), pk_is_auto=is_auto, timestamps=timestamps, attrs="dict(" + attr_indent + attr_sep.join("{0}={1}".format(k, v) for k, v in attrs.items()) + ")")) print()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_data_table(filename): """Returns a DataTable instance built from either the filename, or STDIN if filename is None."""
with get_file_object(filename, "r") as rf: return DataTable(list(csv.reader(rf)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_tables(cols): """Builds and returns a Dictionary whose keys are table names and values are OrderedDicts whose keys are column names and values are the col objects from which the definition is derived. """
tables = OrderedDict() for col in cols: tname = col["table_name"] if tname not in tables: tables[tname] = OrderedDict() tables[tname][col["column_name"]] = col return tables
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def snake_to_pascal(name, singularize=False): """Converts snake_case to PascalCase. If singularize is True, an attempt is made at singularizing each part of the resulting name. """
parts = name.split("_") if singularize: return "".join(p.upper() if p in _ALL_CAPS else to_singular(p.title()) for p in parts) else: return "".join(p.upper() if p in _ALL_CAPS else p.title() for p in parts)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_singular(word): """Attempts to singularize a word."""
if word[-1] != "s": return word elif word.endswith("ies"): return word[:-3] + "y" elif word.endswith("ses"): return word[:-2] else: return word[:-1]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_timestamps(cols, created_name, updated_name): """Returns a 2-tuple of the timestamp columns that were found on the table definition."""
has_created = created_name in cols has_updated = updated_name in cols return (created_name if has_created else None, updated_name if has_updated else None)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pull_from_origin(repo_path): """Execute 'git pull' at the provided repo_path."""
LOG.info("Pulling from origin at %s." % repo_path) command = GIT_PULL_CMD.format(repo_path) resp = envoy.run(command) if resp.status_code != 0: LOG.exception("Pull failed.") raise GitException(resp.std_err) else: LOG.info("Pull successful.")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_remote_origin(repo_dir): """Read the remote origin URL from the given git repo, or None if unset."""
conf = ConfigParser() conf.read(os.path.join(repo_dir, '.git/config')) return conf.get('remote "origin"', 'url')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def clone_from(repo_url, repo_dir): """Clone a remote git repo into a local directory."""
repo_url = _fix_repo_url(repo_url) LOG.info("Cloning %s into %s." % (repo_url, repo_dir)) cmd = GIT_CLONE_CMD.format(repo_url, repo_dir) resp = envoy.run(cmd) if resp.status_code != 0: LOG.error("Cloned failed: %s" % resp.std_err) raise GitException(resp.std_err) LOG.info("Clone successful.")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def route(method, pattern, handler=None): """register a routing rule Example: route('GET', '/path/<param>', handler) """
if handler is None: return partial(route, method, pattern) return routes.append(method, pattern, handler)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def spawn(self, owner, *args, **kwargs): """Spawns a new subordinate actor of `owner` and stores it in this container. jobs = Container() jobs.spawn(self, Job) jobs.spawn(self, Job, some_param=123) jobs = Container(Job) jobs.spawn(self) jobs.spawn(self, some_param=123) jobs = Container(Job.using('abc', some_kwarg=321)) jobs.spawn(self, extra_kwarg=123) jobs.spawn(self, some_kwarg=123, extra_kwarg=123) jobs.spawn(self, 'xyz', some_kwarg=345, extra_kwarg=567) """
return (self._spawn(owner, self.factory, *args, **kwargs) if self.factory else self._spawn(owner, *args, **kwargs))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def watch(ctx): """Watch the directory for changes. Automatically run tests. """
vcs = ctx.obj['vcs'] event_handler = TestsEventHandler(vcs) observer = Observer() observer.schedule(event_handler, vcs.path, recursive=True) observer.start() click.echo('Watching directory `{path}`. Use ctrl-c to stop.'.format(path=vcs.path)) while observer.isAlive(): observer.join(timeout=1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def settings(request=None): """ Add the settings object to the template context. """
from yacms.conf import settings allowed_settings = settings.TEMPLATE_ACCESSIBLE_SETTINGS template_settings = TemplateSettings(settings, allowed_settings) template_settings.update(DEPRECATED) # This is basically the same as the old ADMIN_MEDIA_PREFIX setting, # we just use it in a few spots in the admin to optionally load a # file from either grappelli or Django admin if grappelli isn't # installed. We don't call it ADMIN_MEDIA_PREFIX in order to avoid # any confusion. admin_prefix = "grappelli/" if settings.GRAPPELLI_INSTALLED else "admin/" template_settings["YACMS_ADMIN_PREFIX"] = admin_prefix return {"settings": template_settings}
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list(options): """ list programs that belong to the authenticated user """
configuration = config.get_default() app_url = configuration['app_url'] if options.deployment != None: deployment_name = options.deployment else: deployment_name = configuration['deployment_name'] client_id = configuration['client_id'] client_secret = configuration['client_secret'] token_manager = auth.TokenManager(client_id=client_id, client_secret=client_secret, app_url=app_url) if options.all == True: account_id = None else: account_id = accounts.get_logged_in_account_id(token_manager=token_manager, app_url=app_url) programs_details = programs.get_programs(deployment_name, token_manager=token_manager, created_by=account_id, app_url=app_url) account_ids = set() for program in programs_details: account_ids.add(program['createdBy']) accounts_details = accounts.get_accounts(account_ids, token_manager=token_manager, app_url=app_url) account_lookup = {} for account in accounts_details['accounts']: account_lookup[account['id']] = account headers = ['Name', 'Last Saved', 'Created By'] table = [] for program in programs_details: username = account_lookup[program['createdBy']]['username'] program_name = program['name'] last_edited = program['lastEdited'] table.append([program_name, last_edited, username]) if options.format == 'table': info(tabulate.tabulate(table, headers, tablefmt='orgtbl')) elif options.format == 'text': info(tabulate.tabulate(table, headers, tablefmt='orgtbl', stralign='center')) else: raise JutException('Unsupported format "%s"' % options.format)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def m(self, msg, state=False, more=None, cmdd=None, critical=True, verbose=None): ''' Mysterious mega method managing multiple meshed modules magically .. note:: If this function is used, the code contains facepalms: ``m(`` * It is possible to just show a message, \ or to run a command with message. * But it is not possible to run a command without a message, \ use the `verbose`-flag to hide your debug message. :param msg: Add a message. Shown depending on `verbose` (see below) :param state: Pass `state` down to :func:`util.system.shell_notify` :param more: Pass `more` down to :func:`util.system.shell_notify` :param dict cmdd: If given, :func:`util.system.shell_run` is launched with it's values :param critical: If set to ``True``: |appteardown| on failure of `cmdd` contents. * Similar to :func:`util.system.shell_run` `critical`-flag :param verbose: Overrules parent's class `verbose`-flag. * If left to ``None``, the verbose value Photon \ was started with is used * Messages are shown/hidden if explicitly set to ``True``/``False`` :returns: A dictionary specified the following: * 'more': `more` if it is not a dictionary otherwise \ it gets merged in if `more` is specified * The output of :func:`util.system.shell_run` gets merged in \ if `cmdd` is specified * 'failed': ``True`` if command failed :func:`util.system.shell_notify` is used with this dictionary to pipe it's output into :func:`meta.Meta.log` before returning. ''' if verbose is None: verbose = self.__verbose res = dict() if more: res.update(more if isinstance(more, dict) else dict(more=more)) if cmdd and isinstance(cmdd, dict) and cmdd.get('cmd'): res.update(shell_run( cmdd.get('cmd'), cin=cmdd.get('cin'), cwd=cmdd.get('cwd'), timeout=cmdd.get('timeout', 120), critical=False, verbose=cmdd.get('verbose', verbose) )) if res.get('returncode', -1) != 0: res.update(dict(failed=True)) if state or critical and res.get('failed'): self.meta.log = dict(message=msg, more=res, verbose=verbose) shell_notify(msg, more=res, state=True) self.meta.log = shell_notify(msg, more=res, state=state, verbose=verbose) return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def s2m(self): ''' Imports settings to meta ''' m = '%s settings' % (IDENT) self.meta.load(m, 'import %s' % (m), mdict=self.settings.get)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def file_to_base64(path_or_obj, max_mb=None): """converts contents of a file to base64 encoding :param str_or_object path_or_obj: fool pathname string for a file or a file like object that supports read :param int max_mb: maximum number in MegaBytes to accept :param float lon2: longitude of second place (decimal degrees) :raises ErrorFileTooBig: if file contents > max_mb (see :class:`ErrorFileTooBig`) :raises IOError: if file path can't be found (Also possible other exceptions depending on file_object) """
if not hasattr(path_or_obj, 'read'): rt = read_file(path_or_obj) else: rt = path_or_obj.read() if max_mb: len_mb = len(rt) / (10024.0 * 1000) if len_mb > max_mb: raise ErrorFileTooBig("File is too big ({.2f} MBytes)" (len_mb)) return b64encode(rt)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dict_clip(a_dict, inlude_keys_lst=[]): """returns a new dict with keys not in included in inlude_keys_lst clipped off"""
return dict([[i[0], i[1]] for i in list(a_dict.items()) if i[0] in inlude_keys_lst])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list_pp(ll, separator='|', header_line=True, autonumber=True): """pretty print list of lists ll"""
if autonumber: for cnt, i in enumerate(ll): i.insert(0, cnt if cnt > 0 or not header_line else '#') def lenlst(l): return [len(str(i)) for i in l] lst_len = [lenlst(i) for i in ll] lst_rot = zip(*lst_len[::-1]) lst_len = [max(i) for i in lst_rot] frmt = separator + separator.join(["{!s:"+str(i)+"}" for i in lst_len]) + separator if header_line: header_line = '-' * len(frmt.format(*ll[0])) for cnt, l in enumerate(ll): if cnt < 2 and header_line: print(header_line) print(frmt.format(*l)) if header_line: print(header_line) return lst_len
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def signal_terminate(on_terminate): """a common case program termination signal"""
for i in [signal.SIGINT, signal.SIGQUIT, signal.SIGUSR1, signal.SIGUSR2, signal.SIGTERM]: signal.signal(i, on_terminate)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _safe_mkdir(directory): """Create a directory, ignoring errors if it already exists."""
try: os.makedirs(directory) except OSError as error: if error.errno != errno.EEXIST: raise error
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _stamp_and_update_hook(method, # suppress(too-many-arguments) dependencies, stampfile, func, *args, **kwargs): """Write stamp and call update_stampfile_hook on method."""
result = _stamp(stampfile, func, *args, **kwargs) method.update_stampfile_hook(dependencies) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _sha1_for_file(filename): """Return sha1 for contents of filename."""
with open(filename, "rb") as fileobj: contents = fileobj.read() return hashlib.sha1(contents).hexdigest()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_dependency(self, dependency_path): """Check if mtime of dependency_path is greater than stored mtime."""
stored_hash = self._stamp_file_hashes.get(dependency_path) # This file was newly added, or we don't have a file # with stored hashes yet. Assume out of date. if not stored_hash: return False return stored_hash == _sha1_for_file(dependency_path)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_stampfile_hook(self, dependencies): # suppress(no-self-use) """Loop over all dependencies and store hash for each of them."""
hashes = {d: _sha1_for_file(d) for d in dependencies if os.path.exists(d)} with open(self._stamp_file_hashes_path, "wb") as hashes_file: hashes_file.write(json.dumps(hashes).encode("utf-8"))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unicode_value(self, string): """ String argument must be in unicode format. """
result = 0 # don't accept strings that contain numbers if self.regex_has_numbers.search(string): raise AbnumException(error_msg % string) else: num_str = self.regex_values.sub(lambda x: '%s ' % self.values[x.group()], string) # don't accept strings, that contains letters which haven't been be converted to numbers try: result = sum([int(i) for i in num_str.split()]) except Exception as e: raise AbnumException(error_msg % string) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def import_from_string(value): """Copy of rest_framework.settings.import_from_string"""
value = value.replace('-', '_') try: module_path, class_name = value.rsplit('.', 1) module = import_module(module_path) return getattr(module, class_name) except (ImportError, AttributeError) as ex: raise ImportError("Could not import '{}'. {}: {}.".format( value, ex.__class__.__name__, ex))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _parse_args(args): """ Interpret command line arguments. :param args: `sys.argv` :return: The populated argparse namespace. """
parser = argparse.ArgumentParser(prog='nibble', description='Speed, distance and time ' 'calculations around ' 'quantities of digital ' 'information.') parser.add_argument('-V', '--version', action='version', version='%(prog)s ' + nibble.__version__) parser.add_argument('-v', '--verbosity', help='increase output verbosity', action='count', default=0) parser.add_argument('expression', type=util.decode_cli_arg, nargs='+', help='the calculation to execute') return parser.parse_args(args[1:])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(args): """ Nibble's entry point. :param args: Command-line arguments, with the program in position 0. """
args = _parse_args(args) # sort out logging output and level level = util.log_level_from_vebosity(args.verbosity) root = logging.getLogger() root.setLevel(level) handler = logging.StreamHandler(sys.stdout) handler.setLevel(level) handler.setFormatter(logging.Formatter('%(levelname)s %(message)s')) root.addHandler(handler) logger.debug(args) expression = ' '.join(args.expression) try: print(Parser().parse(expression)) except (LexingError, ParsingError) as e: util.print_error(e) return 1 return 0
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def run(self, conn, tmp, module_name, module_args, inject): ''' transfer & execute a module that is not 'copy' or 'template' ''' # shell and command are the same module if module_name == 'shell': module_name = 'command' module_args += " #USE_SHELL" vv("REMOTE_MODULE %s %s" % (module_name, module_args), host=conn.host) return self.runner._execute_module(conn, tmp, module_name, module_args, inject=inject)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_models(args): """ Parse a list of ModelName, appname or appname.ModelName list, and return the list of model classes in the IndexRegistry. If the list if falsy, return all the models in the registry. """
if args: models = [] for arg in args: match_found = False for model in registry.get_models(): if model._meta.app_label == arg: models.append(model) match_found = True elif '%s.%s' % (model._meta.app_label, model._meta.model_name) == arg: models.append(model) match_found = True if not match_found: raise ValueError("No model or app named %s" % arg) else: models = registry.get_models() return set(models)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def plot_dives(dv0, dv1, p, dp, t_on, t_off): '''Plots depths and delta depths with dive start stop markers Args ---- dv0: int Index position of dive start in cue array dv1: int Index position of dive stop in cue array p: ndarray Depth values dp: ndarray Delta depths t_on: ndarray Cue array with start index position of dives t_off: ndarray Cue array with stop index postition of dives ''' fig, (ax1, ax2) = plt.subplots(2, 1, sharex=True) x0 = t_on[dv0:dv1] - t_on[dv0] x1 = t_off[dv0:dv1] - t_on[dv0] # Extract start end depths y0_p = p[t_on[dv0:dv1]] y1_p = p[t_off[dv0:dv1]] # Extract start end delta depths y0_dp = dp[t_on[dv0:dv1]] y1_dp = dp[t_off[dv0:dv1]] start = t_on[dv0] stop = t_off[dv1] ax1.title.set_text('Dives depths') ax1.plot(range(len(p[start:stop])), p[start:stop]) ax1.scatter(x0, y0_p, label='start') ax1.scatter(x1, y1_p, label='stop') ax1.set_ylabel('depth (m)') ax1.title.set_text('Depth rate of change') ax2.plot(range(len(dp[start:stop])), dp[start:stop]) ax2.scatter(x0, y0_dp, label='start') ax2.scatter(x1, y1_dp, label='stop') ax2.set_ylabel('depth (dm/t)') ax2.set_xlabel('sample') for ax in [ax1, ax2]: ax.legend(loc='upper right') ax.set_xlim([-50, len(dp[start:stop])+50]) plt.show() return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def plot_dives_pitch(depths, dive_mask, des, asc, pitch, pitch_lf): '''Plot dives with phase and associated pitch angle with HF signal Args ---- depths: ndarray Depth values at each sensor sampling dive_mask: ndarray Boolean mask slicing dives from the tag data des: ndarray boolean mask for slicing descent phases of dives from tag dta asc: ndarray boolean mask for slicing asccent phases of dives from tag dta pitch: ndarray Pitch angle derived from acceleromter data pitch_lf: ndarray Low-pass filtered derived pitch angle data ''' import copy import numpy from . import plotutils fig, (ax1, ax2) = plt.subplots(2,1, sharex=True) des_ind = numpy.where(dive_mask & des)[0] asc_ind = numpy.where(dive_mask & asc)[0] ax1.title.set_text('Dive descents and ascents') ax1 = plotutils.plot_noncontiguous(ax1, depths, des_ind, _colors[0], 'descents') ax1 = plotutils.plot_noncontiguous(ax1, depths, asc_ind, _colors[1], 'ascents') ax1.legend(loc='upper right') ax1.invert_yaxis() ax1.yaxis.label.set_text('depth (m)') ax1.xaxis.label.set_text('samples') ax2.title.set_text('Pitch and Low-pass filtered pitch') ax2.plot(range(len(pitch)), pitch, color=_colors[2], linewidth=_linewidth, label='pitch') ax2.plot(range(len(pitch_lf)), pitch_lf, color=_colors[3], linewidth=_linewidth, label='pitch filtered') ax2.legend(loc='upper right') ax2.yaxis.label.set_text('Radians') ax2.yaxis.label.set_text('Samples') plt.show() return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def plot_depth_descent_ascent(depths, dive_mask, des, asc): '''Plot depth data for whole deployment, descents, and ascents Args ---- depths: ndarray Depth values at each sensor sampling dive_mask: ndarray Boolean mask slicing dives from the tag data des: ndarray boolean mask for slicing descent phases of dives from tag dta asc: ndarray boolean mask for slicing asccent phases of dives from tag dta ''' import numpy from . import plotutils # Indices where depths are descents or ascents des_ind = numpy.where(dive_mask & des)[0] asc_ind = numpy.where(dive_mask & asc)[0] fig, ax1 = plt.subplots() ax1.title.set_text('Dive descents and ascents') ax1 = plotutils.plot_noncontiguous(ax1, depths, des_ind, _colors[0], 'descents') ax1 = plotutils.plot_noncontiguous(ax1, depths, asc_ind, _colors[1], 'ascents') ax1.legend(loc='upper right') ax1.invert_yaxis() ax1.yaxis.label.set_text('depth (m)') ax1.xaxis.label.set_text('samples') plt.show() return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _update_docstrings(self): """ Runs through the operation methods & updates their docstrings if necessary. If the method has the default placeholder docstring, this will replace it with the docstring from the underlying connection. """
ops = self._details.resource_data['operations'] for method_name in ops.keys(): meth = getattr(self.__class__, method_name, None) if not meth: continue if meth.__doc__ != DEFAULT_DOCSTRING: # It already has a custom docstring. Leave it alone. continue # Needs updating. So there's at least *something* vaguely useful # there, use the docstring from the underlying ``Connection`` # method. # FIXME: We need to figure out a way to make this more useful, if # possible. api_name = ops[method_name]['api_name'] conn_meth = getattr(self._connection, to_snake_case(api_name)) # We need to do detection here, because Py2 treats ``.__doc__`` # as a special read-only attribute. :/ if six.PY3: meth.__doc__ = conn_meth.__doc__ else: meth.__func__.__doc__ = conn_meth.__doc__
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_relation(self, name, klass=None): """ Constructs a related ``Resource`` or ``Collection``. This allows for construction of classes with information prepopulated from what the current instance has. This enables syntax like:: bucket = Bucket(bucket='some-bucket-name') for obj in bucket.objects.each(): print(obj.key) :param name: The name of the relation from the ResourceJSON :type name: string :param klass: (Optional) An overridable class to construct. Typically only useful if you need a custom subclass used in place of what kotocore provides. :type klass: class :returns: An instantiated related object """
try: rel_data = self._details.relations[name] except KeyError: msg = "No such relation named '{0}'.".format(name) raise NoRelation(msg) if klass is None: # This is the typical case, where we're not explicitly given a # class to build with. Hit the session & look up what we should # be loading. if rel_data['class_type'] == 'collection': klass = self._details.session.get_collection( self._details.service_name, rel_data['class'] ) elif rel_data['class_type'] == 'resource': klass = self._details.session.get_resource( self._details.service_name, rel_data['class'] ) else: msg = "Unknown class '{0}' for '{1}'.".format( rel_data['class_type'], name ) raise NoRelation(msg) # Instantiate & return it. kwargs = {} # Just populating identifiers is enough for the 1-M case. kwargs.update(self.get_identifiers()) if rel_data.get('rel_type', '1-M') == '1-1': # FIXME: If it's not a collection, we might have some instance data # (i.e. ``bucket``) in ``self._data`` to populate as well. # This seems like a can of worms, so ignore for the moment. pass return klass(connection=self._connection, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def post_process_get(self, result): """ Given an object with identifiers, fetches the data for that object from the service. This alters the data on the object itself & simply passes through what was received. :param result: The response data :type result: dict :returns: The unmodified response data """
if not hasattr(result, 'items'): # If it's not a dict, give up & just return whatever you get. return result # We need to possibly drill into the response & get out the data here. # Check for a result key. result_key = self._details.result_key_for('get') if not result_key: # There's no result_key. Just use the top-level data. data = result else: data = result[result_key] for key, value in data.items(): self._data[to_snake_case(key)] = value return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def construct_for(self, service_name, resource_name, base_class=None): """ Builds a new, specialized ``Resource`` subclass as part of a given service. This will load the ``ResourceJSON``, determine the correct mappings/methods & constructs a brand new class with those methods on it. :param service_name: The name of the service to construct a resource for. Ex. ``sqs``, ``sns``, ``dynamodb``, etc. :type service_name: string :param resource_name: The name of the ``Resource``. Ex. ``Queue``, ``Notification``, ``Table``, etc. :type resource_name: string :returns: A new resource class for that service """
details = self.details_class( self.session, service_name, resource_name, loader=self.loader ) attrs = { '_details': details, } # Determine what we should call it. klass_name = self._build_class_name(resource_name) # Construct what the class ought to have on it. attrs.update(self._build_methods(details)) if base_class is None: base_class = self.base_resource_class # Create the class. return type( klass_name, (base_class,), attrs )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def filter_headers(criterion): """Filter already loaded headers against some criterion. The criterion function must accept a single argument, which is an instance of sastool.classes2.header.Header, or one of its subclasses. The function must return True if the header is to be kept or False if it needs to be discarded. All manipulations on the header (including sample name changes, etc.) carried out by this function are preserved. """
ip = get_ipython() for headerkind in ['processed', 'raw']: for h in ip.user_ns['_headers'][headerkind][:]: if not criterion(h): ip.user_ns['_headers'][headerkind].remove(h) ip.user_ns['allsamplenames'] = {h.title for h in ip.user_ns['_headers']['processed']}
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_headers(fsns:List[int]): """Load header files """
ip = get_ipython() ip.user_ns['_headers'] = {} for type_ in ['raw', 'processed']: print("Loading %d headers (%s)" % (len(fsns), type_), flush=True) processed = type_ == 'processed' headers = [] for f in fsns: for l in [l_ for l_ in ip.user_ns['_loaders'] if l_.processed == processed]: try: headers.append(l.loadheader(f)) break except FileNotFoundError: continue allsamplenames = {h.title for h in headers} if not headers: print('NO HEADERS READ FOR TYPE "%s"' % type_) else: print("%d headers (%s) out of %d have been loaded successfully." % (len(headers), type_, len(fsns))) print('Read FSN range:', min([h.fsn for h in headers]), 'to', max([h.fsn for h in headers])) print("Samples covered by these headers:") print(" " + "\n ".join(sorted(allsamplenames)), flush=True) if processed: ip.user_ns['allsamplenames'] = allsamplenames ip.user_ns['_headers'][type_] = headers
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_size(path): '''Return the size of path in bytes if it exists and can be determined.''' size = os.path.getsize(path) for item in os.walk(path): for file in item[2]: size += os.path.getsize(os.path.join(item[0], file)) return size
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def local_data(path): """Return tuples of names, directories, total sizes and files. Each directory represents a single film and the files are the files contained in the directory, such as video, audio and subtitle files."""
dirs = [os.path.join(path, item) for item in os.listdir(path)] names, sizes, files = zip(*[(dir.split('/')[-1], str(get_size(dir)), '##'.join([file for file in os.listdir(dir)])) for dir in dirs]) return zip(names, dirs, sizes, files)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create(): """Create a new database with information about the films in the specified directory or directories."""
if not all(map(os.path.isdir, ARGS.directory)): exit('Error: One or more of the specified directories does not exist.') with sqlite3.connect(ARGS.database) as connection: connection.text_factory = str cursor = connection.cursor() cursor.execute('DROP TABLE IF EXISTS Movies') cursor.execute('''CREATE TABLE Movies(name TEXT, path TEXT, size TEXT, files BLOB)''') for dir in ARGS.directory: cursor.executemany('INSERT INTO Movies VALUES(?, ?, ?, ?)', local_data(dir))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ls(): """List all items in the database in a predefined format."""
if not os.path.exists(ARGS.database): exit('Error: The database does not exist; you must create it first.') with sqlite3.connect(ARGS.database) as connection: connection.text_factory = str cursor = connection.cursor() if ARGS.pattern: if not ARGS.strict: ARGS.pattern = '%{0}%'.format(ARGS.pattern) cursor.execute('SELECT * FROM Movies WHERE Name LIKE (?)', [ARGS.pattern]) else: cursor.execute('SELECT * FROM Movies') movies = sorted([row for row in cursor]) if ARGS.name: print '\n'.join([movie[0] for movie in movies]) elif ARGS.location: print '\n'.join([movie[1] for movie in movies]) elif ARGS.size: print '\n'.join([prefix_size(int(movie[2])) for movie in movies]) elif ARGS.files: for movie in movies: print ', '.join(movie[3].split('##')) else: for i, movie in enumerate(movies): print 'Name:\t\t{0}'.format(movie[0]) print 'Location:\t{0}'.format(movie[1]) print 'Size:\t\t{0}'.format(prefix_size(int(movie[2]))) print 'Files:\t\t{0}'.format(', '.join(movie[3].split('##'))) if not i == len(movies) - 1: print
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def play(): """Open the matched movie with a media player."""
with sqlite3.connect(ARGS.database) as connection: connection.text_factory = str cursor = connection.cursor() if ARGS.pattern: if not ARGS.strict: ARGS.pattern = '%{0}%'.format(ARGS.pattern) cursor.execute('SELECT * FROM Movies WHERE Name LIKE (?)', [ARGS.pattern]) try: path = sorted([row for row in cursor])[0][1] replace_map = {' ': '\\ ', '"': '\\"', "'": "\\'"} for key, val in replace_map.iteritems(): path = path.replace(key, val) os.system('{0} {1} &'.format(ARGS.player, path)) except IndexError: exit('Error: Movie not found.')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def random_string(length, numeric_only=False): """ Generates a random string of length equal to the length parameter """
choices = string.digits if numeric_only else string.ascii_uppercase + string.digits return ''.join(random.choice(choices) for _ in range(length))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def random_date(start_year=2000, end_year=2020): """ Generates a random "sensible" date for use in things like issue dates and maturities """
return date(random.randint(start_year, end_year), random.randint(1, 12), random.randint(1, 28))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _finite_well_energy(P, n=1, atol=1e-6): ''' Returns the nth bound-state energy for a finite-potential quantum well with the given well-strength parameter, `P`. ''' assert n > 0 and n <= _finite_well_states(P) pi_2 = pi / 2. r = (1 / (P + pi_2)) * (n * pi_2) eta = n * pi_2 - arcsin(r) - r * P w = 1 # relaxation parameter (for succesive relaxation) while True: assert r <= 1 if abs(eta) < atol: break r2 = r ** 2. sqrt_1mr2 = sqrt(1. - r2) denom = (1. + P * sqrt_1mr2) t1 = P * sqrt_1mr2 / denom * eta # t2 = -r * P / (2 * (1. + P * sqrt_1mr2) ** 3) * eta ** 2 while True: next_r = (1 - w) * r + w * (r + t1) # next_r = (1 - w) * r + w * (r + t1 + t2) next_eta = n * pi_2 - arcsin(next_r) - next_r * P # decrease w until eta is converging if abs(next_eta / eta) < 1: r = next_r eta = next_eta break else: w *= 0.5 alpha = P * r E = 2 * (alpha) ** 2 # hbar**2 / (m * L**2) return E
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def top(self, n=10, cache=None, prefetch=False): """Find the most popular torrents. Return an array of Torrent objects representing the top n torrents. If the cache option is non-None, override the Scraper's default caching settings. Use the prefetch option to hit each Torrent's info page up front (instead of lazy fetching the info on-demand later). """
use_cache = self._use_cache(cache) if use_cache and len(self._top_cache) >= n: return self._top_cache[:n] soup = get(TOP).soup links = soup.find_all("a", class_="detLink")[:n] urls = [urlparse.urljoin(TOP, link.get('href')) for link in links] torrents = [self.torrent_from_url(url, use_cache, prefetch) for url in urls] if use_cache: self._top_cache = torrents self._add_to_torrent_cache(torrents) return torrents
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def torrent_from_url(self, url, cache=True, prefetch=False): """Create a Torrent object from a given URL. If the cache option is set, check to see if we already have a Torrent object representing it. If prefetch is set, automatically query the torrent's info page to fill in the torrent object. (If prefetch is false, then the torrent page will be queried lazily on-demand.) """
if self._use_cache(cache) and url in self._torrent_cache: return self._torrent_cache[url] torrent = Torrent(url, cache, prefetch) if cache: self._torrent_cache[url] = torrent return torrent
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _send_command(self, command, immediate=False, timeout=1.0, check_echo=None): """ Send a single command to the drive after sanitizing it. Takes a single given `command`, sanitizes it (strips out comments, extra whitespace, and newlines), sends the command to the drive, and returns the sanitized command. The validity of the command is **NOT** checked. Parameters command : str The command to send to the Gemini drive. immediate : bool, optional Whether to make it so the command is executed immediately or not. timeout : number, optional Optional timeout in seconds to use to get the command right when we are doing echo checking. A negative value or ``None`` indicates that the an infinite timeout should be used. check_echo : bool or None, optional Whether the echoing of the command as it is being written to the drive should be used to correct mistakes in what the drive is seeing, or whether the default set when the instance of this class was created should be used (``None``). Returns ------- sanitized_command : str The sanitized command that was sent to the drive. """
# Use the default echo checking if None was given. if check_echo is None: check_echo = self._check_echo # Convert to bytes and then strip comments, whitespace, and # newlines. if sys.hexversion >= 0x03000000: c = bytes(command, encoding='ASCII') else: c = command c = c.split(b';')[0].strip() # If the command is supposed to be immediate, insure that it # starts with an '!'. if immediate and not c.startswith(b'!'): c = b'!' + c # Read out any junk on the serial port before we start. self._ser.read(self._ser.inWaiting()) # The command needs to be written a character at a time with # pauses between them to make sure nothing gets lost or # corrupted. This is a simple loop if we are not checking the # echo. If we are, it is more complicated. if not check_echo: for i in range(0, len(c)): self._ser.write(bytes([c[i]])) time.sleep(0.01) else: # Infinite timeouts need to be converted to None. Finite # ones need to be checked to make sure they are not too big, # which is threading.TIMEOUT_MAX on Python 3.x and not # specified on Python 2.x (lets use a week). if timeout is None or timeout <= 0: timeout = None else: if sys.hexversion >= 0x03000000: maxtimeout = threading.TIMEOUT_MAX else: maxtimeout = 7*24*3600 timeout = min(timeout, maxtimeout) # A timer will be made that takes timeout to finish. Then, # it is a matter of checking whether it is alive or not to # know whether the timeout was exceeded or not. Then, the # timer is started. tm = threading.Timer(timeout, lambda : None) tm.start() # Each character needs to be written one by one while the # echo is collected. If any mistakes occur, they need to be # corrected with backspaces b'\x08'. The echo starts out # empty. We go until either the echo is identical to the # command or the timeout is exceeded. echo = b'' while c != echo and tm.is_alive(): # If there are no mistakes, then echo will be the # beginning of c meaning the next character can be # written. Otherwise, there is a mistake and a backspace # needs to be written. if c.startswith(echo): self._ser.write(bytes([c[len(echo)]])) else: self._ser.write(b'\x08') # Pause for a bit to make sure nothing gets lost. Then # read the drive's output add it to the echo. time.sleep(0.01) echo += self._ser.read(self._ser.inWaiting()) # All backspaces in echo need to be processed. Each # backspace deletes itself and the character before it # (if any). while b'\x08' in echo: index = echo.index(b'\x08') if index == 0: echo = echo[1:] else: echo = echo[0:(index-1)] + echo[(index+1):] # Turn off the timer in the case that it is still running # (command completely written before timeout). tm.cancel() # Write the carriage return to enter the command and then return # the sanitized command. self._ser.write(b'\r') if sys.hexversion >= 0x03000000: return c.decode(errors='replace') else: return c
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_response(self, timeout=1.0, eor=('\n', '\n- ')): """ Reads a response from the drive. Reads the response returned by the drive with an optional timeout. All carriage returns and linefeeds are kept. Parameters timeout : number, optional Optional timeout in seconds to use when reading the response. A negative value or ``None`` indicates that the an infinite timeout should be used. eor : str or iterable of str, optional ``str`` or iterable of ``str`` that denote the allowed End Of Response. For most commands, it should be ``('\\n', '\\n- ')``, but for running a program, it should be ``'*END\\n'``. The default is ``('\\n', '\\n- ')``. Returns ------- response : str The response obtained from the drive. Carriage returns and linefeeds are preserved. """
# If no timeout is given or it is invalid and we are using '\n' # as the eor, use the wrapper to read a line with an infinite # timeout. Otherwise, the reading and timeout must be # implemented manually. if (timeout is None or timeout < 0) and eor == '\n': return self._sio.readline() else: # A timer will be made that takes timeout to finish. Then, # it is a matter of checking whether it is alive or not to # know whether the timeout was exceeded or not. They need to # be checked to make sure they are not too big, which is # threading.TIMEOUT_MAX on Python 3.x and not specified on # Python 2.x (lets use a week). Then, the timer is started. if sys.hexversion >= 0x03000000: maxtimeout = threading.TIMEOUT_MAX else: maxtimeout = 7*24*3600 timeout = min(timeout, maxtimeout) tm = threading.Timer(timeout, lambda : None) tm.start() # eor needs to be converted to bytes. If it is just an str, # it needs to be wrapped in a tuple. if isinstance(eor, str): eor = tuple([eor]) if sys.hexversion >= 0x03000000: eor = [s.encode(encoding='ASCII') for s in eor] # Read from the serial port into buf until the EOR is found # or the timer has stopped. A small pause is done each time # so that this thread doesn't hog the CPU. buf = b'' while not any([(x in buf) for x in eor]) and tm.is_alive(): time.sleep(0.001) buf += self._ser.read(self._ser.inWaiting()) # Just in case the timer has not stopped (EOR was found), # stop it. tm.cancel() # Remove anything after the EOR if there is one. First, a # set of matches (index, eor_str) for each string in eor # needs to be constructed. Sorting the matches by their # index puts all the ones that were not found (index of -1) # at the front. Then a list of bools that are True for each # index that isn't -1 is made, converted to a bytes (True # goes to b'\x01' and False goes to b'\x00'), and then the # index of the first True value found. If it is not -1, then # there was a successful match and all the characters are # dropped after that eor_str. matches = [(buf.find(x), x) for x in eor] matches.sort(key=lambda x: x[0]) index = bytes([x[0] != -1 for x in matches]).find(b'\x01') if index != -1: buf = buf[:(matches[index][0] + len(matches[index][1]))] # Convert to an str before returning. if sys.hexversion >= 0x03000000: return buf.decode(errors='replace') else: return buf
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _process_response(self, response): """ Processes a response from the drive. Processes the response returned from the drive. It is broken down into the echoed command (drive echoes it back), any error returned by the drive (leading '*' is stripped), and the different lines of the response. Parameters response : str The response returned by the drive. Returns ------- processed_response : list A 4-element ``list``. The elements, in order, are `response` (``str``), the echoed command (``str``), any error response (``None`` if none, or the ``str`` of the error), and the lines of the response that are not the echo or error line (``list`` of ``str`` with newlines stripped). """
# Strip the trailing newline and split the response into lines # by carriage returns. rsp_lines = response.rstrip('\r\n').split('\r') # If we have at least one line, the first one is the echoed # command. If available, it needs to be grabbed and that line # removed from rsp_lines since it is just the echoing, not the # actual response to the command. None will be used to denote a # non-existent echo. if len(rsp_lines) > 0: echoed_command = rsp_lines[0] del rsp_lines[0] else: echoed_command = None # If the next line is one of the different possible error # strings, then there was an error that must be grabbed (leading # '*' is stripped). If there was an error, remove that line from # the response. None will be used to denote the lack of an error. if len(rsp_lines) > 0 and \ rsp_lines[0] in ('*INVALID_ADDRESS', '*INVALID_DATA', \ '*INVALID_DATA_HIGH', '*INVALID_DATA_LOW', \ '*UNDEFINED_LABEL'): err = rsp_lines[0][1:] del rsp_lines[0] else: err = None return [response, echoed_command, err, rsp_lines]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def send_command(self, command, immediate=False, timeout=1.0, max_retries=0, eor=('\n', '\n- ')): """ Sends a single command to the drive and returns output. Takes a single given `command`, sanitizes it, sends it to the drive, reads the response, and returns the processed response. The command is first sanitized by removing comments, extra whitespace, and newline characters. If `immediate` is set, the command is made to be an immediate command. Note, the command is **NOT** checked for validity. If the drive returns an error, the command is re-executed up to `max_tries` more times. The response from the final execution is processed and returned. The response from the drive is broken down into the echoed command (drive echoes it back), any error returned by the drive (leading '*' is stripped), and the different lines of the response; which are all returned. Parameters command : str The command to send to the Gemini drive. immediate : bool, optional Whether to make it so the command is executed immediately or not. timeout : float or None, optional Optional timeout in seconds to use when reading the response. A negative value or ``None`` indicates that the an infinite timeout should be used. max_retries : int, optional Maximum number of retries to do per command in the case of errors. eor : str or iterable of str, optional ``str`` or an iterable of ``str`` that denote the allowed End Of Response. For most commands, it should be ``('\\n', '\\n- ')``, but for running a program, it should be ``'*END\\n'``. The default is ``('\\n', '\\n- ')``. Returns ------- output : list A 5-element ``list``. The elements, in order, are the sanitized command (``str``), the full response (``str``), the echoed command (``str``), any error response (``None`` if none, or the ``str`` of the error), and the lines of the response that are not the echo or error line (``list`` of ``str`` with newlines stripped). See Also -------- send_commands : Send multiple commands. Examples -------- Simple command energizing the motor with no response and no errors. ['DRIVE1', 'DRIVE1\\r\\r\\n', 'DRIVE1', None, []] Same command but made immediate. ['!DRIVE1', '!DRIVE1\\r\\r\\n', '!DRIVE1', None, []] Same command with a typo. ['DRIV1', 'DRIV1\\r*UNDEFINED_LABEL\\r\\r\\n', 'DRIV1', 'UNDEFINED_LABEL', []] Simple command asking whether the motor is energized or not. ['DRIVE', 'DRIVE\\r*DRIVE1\\r\\r\\n', 'DRIVE', None, ['*DRIVE1']] """
# Execute the command till it either doesn't have an error or # the maximum number of retries is exceeded. for i in range(0, max_retries+1): # Send the command and stuff the sanitized version in a # list. Then process the response and add it to the list. response = [self._send_command(command, immediate=immediate)] output = self._get_response(timeout=timeout, eor=eor) # If echo checking was done, the echo was already grabbed, # is identical to the command, and needs to be placed back # in front of the output so that it can be processed # properly. if self._check_echo: output = response[0] + output response.extend(self._process_response(output)) # We are done if there is no error. if not self.command_error(response): break # Put in a slight pause so the drive has a bit of breathing # time between retries. time.sleep(0.25) return response
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def send_commands(self, commands, timeout=1.0, max_retries=1, eor=('\n', '\n- ')): """ Send a sequence of commands to the drive and collect output. Takes a sequence of many commands and executes them one by one till either all are executed or one runs out of retries (`max_retries`). Retries are optionally performed if a command's repsonse indicates that there was an error. Remaining commands are not executed. The processed output of the final execution (last try or retry) of each command that was actually executed is returned. This function basically feeds commands one by one to ``send_command`` and collates the outputs. Parameters commands : iterable of str Iterable of commands to send to the drive. Each command must be an ``str``. timeout : float or None, optional Optional timeout in seconds to use when reading the response. A negative value or ``None`` indicates that the an infinite timeout should be used. max_retries : int, optional Maximum number of retries to do per command in the case of errors. eor : str or iterable of str, optional End Of Resonse. An EOR is either a ``str`` or an iterable of ``str`` that denote the possible endings of a response. 'eor' can be a single EOR, in which case it is used for all commands, or it can be an iterable of EOR to use for each individual command. For most commands, it should be ``('\\n', '\\n- ')``, but for running a program, it should be ``'*END\\n'``. The default is ``('\\n', '\\n- ')``. Returns ------- outputs : list of lists ``list`` composed of the processed responses of each command in the order that they were done up to and including the last command executed. See ``send_command`` for the format of processed responses. See Also -------- send_command : Send a single command. Examples -------- A sequence of commands to energize the motor, move it a bit away from the starting position, and then do 4 forward/reverse cycles, and de-energize the motor. **DO NOT** try these specific movement distances without checking that the motion won't damage something (very motor and application specific). [['DRIVE1', 'DRIVE1\\r', 'DRIVE1', None, []], ['D-10000', 'D-10000\\r', 'D-10000', None, []], ['GO', 'GO\\r', 'GO', None, []], ['D-10000', 'D-10000\\r', 'D-10000', None, []], ['GO', 'GO\\r', 'GO', None, []], ['D10000', 'D10000\\r', 'D10000', None, []], ['GO', 'GO\\r', 'GO', None, []], ['D-10000', 'D-10000\\r', 'D-10000', None, []], ['GO', 'GO\\r', 'GO', None, []], ['D10000', 'D10000\\r', 'D10000', None, []], ['GO', 'GO\\r', 'GO', None, []], ['D-10000', 'D-10000\\r', 'D-10000', None, []], ['GO', 'GO\\r', 'GO', None, []], ['D10000', 'D10000\\r', 'D10000', None, []], ['GO', 'GO\\r', 'GO', None, []], ['D-10000', 'D-10000\\r', 'D-10000', None, []], ['GO', 'GO\\r', 'GO', None, []], ['D10000', 'D10000\\r', 'D10000', None, []], ['GO', 'GO\\r', 'GO', None, []], ['DRIVE0', 'DRIVE0\\r', 'DRIVE0', None, []]] """
# If eor is not a list, make a list of it replicated enough for # every command. if not isinstance(eor, list): eor = [eor]*len(commands) # Do every command one by one, collecting the responses and # stuffing them in a list. Commands that failed are retried, and # we stop if the last retry is exhausted. responses = [] for i, command in enumerate(commands): rsp = self.send_command(command, timeout=timeout, max_retries=max_retries, eor=eor[i]) responses.append(rsp) if self.command_error(rsp): break # Put in a slight pause so the drive has a bit of breathing # time between commands. time.sleep(0.25) return responses
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def low_level_scan(self, verification_resource, scan_profile_resource, path_list, notification_resource_list): """ Low level implementation of the scan launch which allows you to start a new scan when you already know the ids for the required resources. :param verification_resource: The verification associated with the domain resource to scan :param scan_profile_resource: The scan profile resource :param path_list: A list with the paths :param notification_resource_list: The notifications to use All the *_resource* parameters are obtained by calling the respective getters such as: - get_email_notification - get_scan_profile And are expected to be of Resource type This method's last step is to send a POST request to /1.0/scans/ using a post-data similar to: {"verification_href": "/1.0/verifications/6", "profile_href": "/1.0/profiles/2", "start_time": "now", "email_notifications_href": [], "path_list": ["/"]}' :return: The newly generated scan id """
data = {"verification_href": verification_resource.href, "profile_href": scan_profile_resource.href, "start_time": "now", "email_notifications_href": [n.href for n in notification_resource_list], "path_list": path_list} url = self.build_full_url('/scans/') return self.create_resource(url, data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setAsApplication(myappid): """ Tells Windows this is an independent application with an unique icon on task bar. id is an unique string to identify this application, like: 'mycompany.myproduct.subproduct.version' """
if os.name == 'nt': import ctypes ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(myappid)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getBestTranslation(basedir, lang=None): """ Find inside basedir the best translation available. lang, if defined, should be a list of prefered languages. It will look for file in the form: - en-US.qm - en_US.qm - en.qm """
if not lang: lang = QtCore.QLocale.system().uiLanguages() for l in lang: l = l.translate({ord('_'): '-'}) f = os.path.join(basedir, l+'.qm') if os.path.isfile(f): break l = l.translate({ord('-'): '_'}) f = os.path.join(basedir, l+'.qm') if os.path.isfile(f): break l = l.split('_')[0] f = os.path.join(basedir, l+'.qm') if os.path.isfile(f): break else: return None translator = QtCore.QTranslator() translator.load(f) return translator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def normalize(cls, name): """Return string in all lower case with spaces and question marks removed"""
name = name.lower() # lower-case for _replace in [' ','-','(',')','?']: name = name.replace(_replace,'') return name
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def nein(x): "this is 'not' but not is a keyword so it's 'nein'" if not isinstance(x,(bool,ThreeVL)): raise TypeError(type(x)) return not x if isinstance(x,bool) else ThreeVL(dict(t='f',f='t',u='u')[x.value])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def compare(operator,a,b): "this could be replaced by overloading but I want == to return a bool for 'in' use" # todo(awinter): what about nested 3vl like "(a=b)=(c=d)". is that allowed by sql? It will choke here if there's a null involved. f=({'=':lambda a,b:a==b,'!=':lambda a,b:a!=b,'>':lambda a,b:a>b,'<':lambda a,b:a<b}[operator]) return ThreeVL('u') if None in (a,b) else f(a,b)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def login(request, signature): """ Automatically logs in a user based on a signed PK of a user object. The signature should be generated with the `login` management command. The signature will only work for 60 seconds. """
signer = TimestampSigner() try: pk = signer.unsign(signature, max_age=MAX_AGE_OF_SIGNATURE_IN_SECONDS) except (BadSignature, SignatureExpired) as e: return HttpResponseForbidden("Can't log you in") user = get_object_or_404(get_user_model(), pk=pk) # we *have* to set the backend for this user, so we just use the first one user.backend = settings.AUTHENTICATION_BACKENDS[0] django_login(request, user) return HttpResponseRedirect(settings.LOGIN_REDIRECT_URL)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cloak(request, pk=None): """ Masquerade as a particular user and redirect based on the REDIRECT_FIELD_NAME parameter, or the LOGIN_REDIRECT_URL. Callers can either pass the pk of the user in the URL itself, or as a POST param. """
pk = request.POST.get('pk', pk) if pk is None: return HttpResponse("You need to pass a pk POST parameter, or include it in the URL") user = get_object_or_404(get_user_model(), pk=pk) if not can_cloak_as(request.user, user): return HttpResponseForbidden("You are not allowed to cloak as this user") request.session[SESSION_USER_KEY] = user.pk # save the referer information so when uncloaking, we can redirect the user # back to where they were request.session[SESSION_REDIRECT_KEY] = request.META.get("HTTP_REFERER", settings.LOGIN_REDIRECT_URL) # redirect the cloaked user to the URL specified in the "next" parameter, # or to the default redirect URL return HttpResponseRedirect(request.POST.get(REDIRECT_FIELD_NAME, settings.LOGIN_REDIRECT_URL))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def select_template_from_string(arg): """ Select a template from a string, which can include multiple template paths separated by commas. """
if ',' in arg: tpl = loader.select_template( [tn.strip() for tn in arg.split(',')]) else: tpl = loader.get_template(arg) return tpl
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_package_path(self): """Gets the path of a Python package"""
if not self.package: return [] if not hasattr(self, 'package_path'): m = __import__(self.package) parts = self.package.split('.')[1:] self.package_path = os.path.join(os.path.dirname(m.__file__), *parts) return [self.package_path]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_paths(self): """Return a list of paths to search for plugins in The list is searched in order."""
ret = [] ret += ['%s/library/' % os.path.dirname(os.path.dirname(__file__))] ret += self._extra_dirs for basedir in _basedirs: fullpath = os.path.join(basedir, self.subdir) if fullpath not in ret: ret.append(fullpath) ret += self.config.split(os.pathsep) ret += self._get_package_path() return ret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_paths(self): """Returns a string suitable for printing of the search path"""
# Uses a list to get the order right ret = [] for i in self._get_paths(): if i not in ret: ret.append(i) return os.pathsep.join(ret)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_plugin(self, name): """Find a plugin named name"""
suffix = ".py" if not self.class_name: suffix = "" for i in self._get_paths(): path = os.path.join(i, "%s%s" % (name, suffix)) if os.path.exists(path): return path return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_fieldsets(self, *args, **kwargs): """Re-order fields"""
result = super(EventAdmin, self).get_fieldsets(*args, **kwargs) result = list(result) fields = list(result[0][1]['fields']) for name in ('content', 'start', 'end', 'repeat', 'repeat_until', \ 'external_link', 'calendars'): fields.remove(name) fields.append(name) result[0][1]['fields'] = tuple(fields) return tuple(result)