text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _txtinfo_to_python(self, data): """ Converts txtinfo format to python """
self._format = 'txtinfo' # find interesting section lines = data.split('\n') try: start = lines.index('Table: Topology') + 2 except ValueError: raise ParserError('Unrecognized format') topology_lines = [line for line in lines[start:] if line] # convert to python list parsed_lines = [] for line in topology_lines: values = line.split(' ') parsed_lines.append({ 'source': values[0], 'target': values[1], 'cost': float(values[4]) }) return parsed_lines
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_primary_address(self, mac_address, node_list): """ Uses the _get_aggregated_node_list structure to find the primary mac address associated to a secondary one, if none is found returns itself. """
for local_addresses in node_list: if mac_address in local_addresses: return local_addresses[0] return mac_address
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_aggregated_node_list(self, data): """ Returns list of main and secondary mac addresses. """
node_list = [] for node in data: local_addresses = [node['primary']] if 'secondary' in node: local_addresses += node['secondary'] node_list.append(local_addresses) return node_list
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _parse_alfred_vis(self, data): """ Converts a alfred-vis JSON object to a NetworkX Graph object which is then returned. Additionally checks for "source_vesion" to determine the batman-adv version. """
# initialize graph and list of aggregated nodes graph = self._init_graph() if 'source_version' in data: self.version = data['source_version'] if 'vis' not in data: raise ParserError('Parse error, "vis" key not found') node_list = self._get_aggregated_node_list(data['vis']) # loop over topology section and create networkx graph for node in data["vis"]: for neigh in node["neighbors"]: graph.add_node(node['primary'], **{ 'local_addresses': node.get('secondary', []), 'clients': node.get('clients', []) }) primary_neigh = self._get_primary_address(neigh['neighbor'], node_list) # networkx automatically ignores duplicated edges graph.add_edge(node['primary'], primary_neigh, weight=float(neigh['metric'])) return graph
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def json(self, dict=False, **kwargs): """ Outputs NetJSON format """
try: graph = self.graph except AttributeError: raise NotImplementedError() return _netjson_networkgraph(self.protocol, self.version, self.revision, self.metric, graph.nodes(data=True), graph.edges(data=True), dict, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def diff(old, new): """ Returns differences of two network topologies old and new in NetJSON NetworkGraph compatible format """
protocol = new.protocol version = new.version revision = new.revision metric = new.metric # calculate differences in_both = _find_unchanged(old.graph, new.graph) added_nodes, added_edges = _make_diff(old.graph, new.graph, in_both) removed_nodes, removed_edges = _make_diff(new.graph, old.graph, in_both) changed_edges = _find_changed(old.graph, new.graph, in_both) # create netjson objects # or assign None if no changes if added_nodes.nodes() or added_edges.edges(): added = _netjson_networkgraph(protocol, version, revision, metric, added_nodes.nodes(data=True), added_edges.edges(data=True), dict=True) else: added = None if removed_nodes.nodes() or removed_edges.edges(): removed = _netjson_networkgraph(protocol, version, revision, metric, removed_nodes.nodes(data=True), removed_edges.edges(data=True), dict=True) else: removed = None if changed_edges: changed = _netjson_networkgraph(protocol, version, revision, metric, [], changed_edges, dict=True) else: changed = None return OrderedDict(( ('added', added), ('removed', removed), ('changed', changed) ))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _make_diff(old, new, both): """ calculates differences between topologies 'old' and 'new' returns a tuple with two network graph objects the first graph contains the added nodes, the secnod contains the added links """
# make a copy of old topology to avoid tampering with it diff_edges = new.copy() not_different = [tuple(edge) for edge in both] diff_edges.remove_edges_from(not_different) # repeat operation with nodes diff_nodes = new.copy() not_different = [] for new_node in new.nodes(): if new_node in old.nodes(): not_different.append(new_node) diff_nodes.remove_nodes_from(not_different) # return tuple with modified graphs # one for nodes and one for links return diff_nodes, diff_edges
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _find_unchanged(old, new): """ returns edges that are in both old and new """
edges = [] old_edges = [set(edge) for edge in old.edges()] new_edges = [set(edge) for edge in new.edges()] for old_edge in old_edges: if old_edge in new_edges: edges.append(set(old_edge)) return edges
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _find_changed(old, new, both): """ returns links that have changed cost """
# create two list of sets of old and new edges including cost old_edges = [] for edge in old.edges(data=True): # skip links that are not in both if set((edge[0], edge[1])) not in both: continue # wrap cost in tuple so it will be recognizable cost = (edge[2]['weight'],) old_edges.append(set((edge[0], edge[1], cost))) new_edges = [] for edge in new.edges(data=True): # skip links that are not in both if set((edge[0], edge[1])) not in both: continue # wrap cost in tuple so it will be recognizable cost = (edge[2]['weight'],) new_edges.append(set((edge[0], edge[1], cost))) # find out which edge changed changed = [] for new_edge in new_edges: if new_edge not in old_edges: # new_edge is a set, convert it to list new_edge = list(new_edge) for item in new_edge: if isinstance(item, tuple): # unwrap cost from tuple and put it in a dict cost = {'weight': item[0]} new_edge.remove(item) changed.append((new_edge[0], new_edge[1], cost)) return changed
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(self, data): """ Converts a BMX6 b6m JSON to a NetworkX Graph object which is then returned. """
# initialize graph and list of aggregated nodes graph = self._init_graph() if len(data) != 0: if "links" not in data[0]: raise ParserError('Parse error, "links" key not found') # loop over topology section and create networkx graph # this data structure does not contain cost information, so we set it as 1 for node in data: for link in node['links']: cost = (link['txRate'] + link['rxRate']) / 2.0 graph.add_edge(node['name'], link['name'], weight=cost, tx_rate=link['txRate'], rx_rate=link['rxRate']) return graph
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(self, data): """ Converts a CNML structure to a NetworkX Graph object which is then returned. """
graph = self._init_graph() # loop over links and create networkx graph # Add only working nodes with working links for link in data.get_inner_links(): if link.status != libcnml.libcnml.Status.WORKING: continue interface_a, interface_b = link.getLinkedInterfaces() source = interface_a.ipv4 dest = interface_b.ipv4 # add link to Graph graph.add_edge(source, dest, weight=1) return graph
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(self, data): """ Converts a dict representing an OLSR 0.6.x topology to a NetworkX Graph object, which is then returned. Additionally checks for "config" data in order to determine version and revision. """
graph = self._init_graph() if 'topology' not in data: raise ParserError('Parse error, "topology" key not found') elif 'mid' not in data: raise ParserError('Parse error, "mid" key not found') # determine version and revision if 'config' in data: version_info = data['config']['olsrdVersion'].replace(' ', '').split('-') self.version = version_info[1] # try to get only the git hash if 'hash_' in version_info[-1]: version_info[-1] = version_info[-1].split('hash_')[-1] self.revision = version_info[-1] # process alias list alias_dict = {} for node in data['mid']: local_addresses = [alias['ipAddress'] for alias in node['aliases']] alias_dict[node['ipAddress']] = local_addresses # loop over topology section and create networkx graph for link in data['topology']: try: source = link['lastHopIP'] target = link['destinationIP'] cost = link['tcEdgeCost'] properties = { 'link_quality': link['linkQuality'], 'neighbor_link_quality': link['neighborLinkQuality'] } except KeyError as e: raise ParserError('Parse error, "%s" key not found' % e) # add nodes with their local_addresses for node in [source, target]: if node not in alias_dict: continue graph.add_node(node, local_addresses=alias_dict[node]) # skip links with infinite cost if cost == float('inf'): continue # original olsrd cost (jsoninfo multiplies by 1024) cost = float(cost) / 1024.0 # add link to Graph graph.add_edge(source, target, weight=cost, **properties) return graph
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _txtinfo_to_jsoninfo(self, data): """ converts olsr 1 txtinfo format to jsoninfo """
# replace INFINITE with inf, which is convertible to float data = data.replace('INFINITE', 'inf') # find interesting section lines = data.split('\n') # process links in topology section try: start = lines.index('Table: Topology') + 2 end = lines[start:].index('') + start except ValueError: raise ParserError('Unrecognized format') topology_lines = lines[start:end] # convert topology section to jsoninfo format topology = [] for line in topology_lines: values = line.split('\t') topology.append({ 'destinationIP': values[0], 'lastHopIP': values[1], 'linkQuality': float(values[2]), 'neighborLinkQuality': float(values[3]), 'tcEdgeCost': float(values[4]) * 1024.0 }) # process alias (MID) section try: start = lines.index('Table: MID') + 2 end = lines[start:].index('') + start except ValueError: raise ParserError('Unrecognized format') mid_lines = lines[start:end] # convert mid section to jsoninfo format mid = [] for line in mid_lines: values = line.split('\t') node = values[0] aliases = values[1].split(';') mid.append({ 'ipAddress': node, 'aliases': [{'ipAddress': alias} for alias in aliases] }) return { 'topology': topology, 'mid': mid }
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_for_launchpad(old_vendor, name, urls): """Check if the project is hosted on launchpad. :param name: str, name of the project :param urls: set, urls to check. :return: the name of the project on launchpad, or an empty string. """
if old_vendor != "pypi": # XXX This might work for other starting vendors # XXX but I didn't check. For now only allow # XXX pypi -> launchpad. return '' for url in urls: try: return re.match(r"https?://launchpad.net/([\w.\-]+)", url).groups()[0] except AttributeError: continue return ''
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_switch_vendor(old_vendor, name, urls, _depth=0): """Check if the project should switch vendors. E.g project pushed on pypi, but changelog on launchpad. :param name: str, name of the project :param urls: set, urls to check. :return: tuple, (str(new vendor name), str(new project name)) """
if _depth > 3: # Protect against recursive things vendors here. return "" new_name = check_for_launchpad(old_vendor, name, urls) if new_name: return "launchpad", new_name return "", ""
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_pil(func): """ PIL module checking decorator. """
def __wrapper(*args, **kwargs): root = kwargs.get('root') if not Image: if root and root.get_opt('warn'): warn("Images manipulation require PIL") return 'none' return func(*args, **kwargs) return __wrapper
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _mix(color1, color2, weight=0.5, **kwargs): """ Mixes two colors together. """
weight = float(weight) c1 = color1.value c2 = color2.value p = 0.0 if weight < 0 else 1.0 if weight > 1 else weight w = p * 2 - 1 a = c1[3] - c2[3] w1 = ((w if (w * a == -1) else (w + a) / (1 + w * a)) + 1) / 2.0 w2 = 1 - w1 q = [w1, w1, w1, p] r = [w2, w2, w2, 1 - p] return ColorValue([c1[i] * q[i] + c2[i] * r[i] for i in range(4)])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _hsla(h, s, l, a, **kwargs): """ HSL with alpha channel color value. """
res = colorsys.hls_to_rgb(float(h), float(l), float(s)) return ColorValue([x * 255.0 for x in res] + [float(a)])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _hue(color, **kwargs): """ Get hue value of HSL color. """
h = colorsys.rgb_to_hls(*[x / 255.0 for x in color.value[:3]])[0] return NumberValue(h * 360.0)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _lightness(color, **kwargs): """ Get lightness value of HSL color. """
l = colorsys.rgb_to_hls(*[x / 255.0 for x in color.value[:3]])[1] return NumberValue((l * 100, '%'))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _saturation(color, **kwargs): """ Get saturation value of HSL color. """
s = colorsys.rgb_to_hls(*[x / 255.0 for x in color.value[:3]])[2] return NumberValue((s * 100, '%'))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load(path, cache=None, precache=False): """ Parse from file. """
parser = Stylesheet(cache) return parser.load(path, precache=precache)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(self, target): """ Parse nested rulesets and save it in cache. """
if isinstance(target, ContentNode): if target.name: self.parent = target self.name.parse(self) self.name += target.name target.ruleset.append(self) self.root.cache['rset'][str(self.name).split()[0]].add(self) super(Ruleset, self).parse(target)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(self, target): """ Parse nested declaration. """
if not isinstance(target, Node): parent = ContentNode(None, None, []) parent.parse(target) target = parent super(Declaration, self).parse(target) self.name = str(self.data[0]) while isinstance(target, Declaration): self.name = '-'.join((str(target.data[0]), self.name)) target = target.parent self.expr = ' '.join(str (n) for n in self.data [2:] if not isinstance(n, Declaration)) if self.expr: target.declareset.append(self)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(self, target): """ Update root and parent context. """
super(VarDefinition, self).parse(target) if isinstance(self.parent, ParseNode): self.parent.ctx.update({self.name: self.expression.value}) self.root.set_var(self)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_var(self, vardef): """ Set variable to global stylesheet context. """
if not(vardef.default and self.cache['ctx'].get(vardef.name)): self.cache['ctx'][vardef.name] = vardef.expression.value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_opt(self, name, value): """ Set option. """
self.cache['opts'][name] = value if name == 'compress': self.cache['delims'] = self.def_delims if not value else ( '', '', '')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update(self, cache): """ Update self cache from other. """
self.cache['delims'] = cache.get('delims') self.cache['opts'].update(cache.get('opts')) self.cache['rset'].update(cache.get('rset')) self.cache['mix'].update(cache.get('mix')) map(self.set_var, cache['ctx'].values())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def scan(src): """ Scan scss from string and return nodes. """
assert isinstance(src, (unicode_, bytes_)) try: nodes = STYLESHEET.parseString(src, parseAll=True) return nodes except ParseBaseException: err = sys.exc_info()[1] print(err.line, file=sys.stderr) print(" " * (err.column - 1) + "^", file=sys.stderr) print(err, file=sys.stderr) sys.exit(1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def loads(self, src): """ Compile css from scss string. """
assert isinstance(src, (unicode_, bytes_)) nodes = self.scan(src.strip()) self.parse(nodes) return ''.join(map(str, nodes))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load(self, f, precache=None): """ Compile scss from file. File is string path of file object. """
precache = precache or self.get_opt('cache') or False nodes = None if isinstance(f, file_): path = os.path.abspath(f.name) else: path = os.path.abspath(f) f = open(f) cache_path = os.path.splitext(path)[0] + '.ccss' if precache and os.path.exists(cache_path): ptime = os.path.getmtime(cache_path) ttime = os.path.getmtime(path) if ptime > ttime: dump = open(cache_path, 'rb').read() nodes = pickle.loads(dump) if not nodes: src = f.read() nodes = self.scan(src.strip()) if precache: f = open(cache_path, 'wb') pickle.dump(nodes, f, protocol=1) self.parse(nodes) return ''.join(map(str, nodes))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_config(filename, filepath=''): """ Loads config file Parameters filename: str Filename of config file (incl. file extension filepath: str Absolute path to directory of desired config file """
FILE = path.join(filepath, filename) try: cfg.read(FILE) global _loaded _loaded = True except: print("configfile not found.")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def emoji(string): '''emot.emoji is use to detect emoji from text >>> text = "I love python 👨 :-)" >>> emot.emoji(text) >>> {'value': ['👨'], 'mean': [':man:'], 'location': [[14, 14]], 'flag': True} ''' __entities = {} __value = [] __mean = [] __location = [] flag = True try: pro_string = str(string) for pos,ej in enumerate(pro_string): if ej in emo_unicode.UNICODE_EMO: try: __value.append(ej) __mean.append(emo_unicode.UNICODE_EMO[ej]) __location.append([pos,pos]) except Exception as e: flag = False __entities.append({"flag": False}) return __entities except Exception as e: flag = False __entities.append({"flag": False}) return __entities if len(__value) < 1: flag = False __entities = { 'value' : __value, 'mean' : __mean, 'location' : __location, 'flag' : flag } return __entities
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def emoticons(string): '''emot.emoticons is use to detect emoticons from text >>> text = "I love python 👨 :-)" >>> emot.emoticons(text) >>> {'value': [':-)'], 'location': [[16, 19]], 'mean': ['Happy face smiley'], 'flag': True} ''' __entities = [] flag = True try: pattern = u'(' + u'|'.join(k for k in emo_unicode.EMOTICONS) + u')' __entities = [] __value = [] __location = [] matches = re.finditer(r"%s"%pattern,str(string)) for et in matches: __value.append(et.group().strip()) __location.append([et.start(),et.end()]) __mean = [] for each in __value: __mean.append(emo_unicode.EMOTICONS_EMO[each]) if len(__value) < 1: flag = False __entities = { 'value' : __value, 'location' : __location, 'mean' : __mean, 'flag' : flag } except Exception as e: __entities = [{'flag' : False}] #print("No emoiticons found") return __entities return __entities
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def init_app(self, app, add_context_processor=True): """ Initialize with app configuration """
# Check if login manager has been initialized if not hasattr(app, 'login_manager'): self.login_manager.init_app( app, add_context_processor=add_context_processor) # Clear flashed messages since we redirect to auth immediately self.login_manager.login_message = None self.login_manager.needs_refresh_message = None # Set default unauthorized callback self.login_manager.unauthorized_handler(self.unauthorized_callback)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def login_url(self, params=None, **kwargs): """ Return login url with params encoded in state Available Google auth server params: response_type: code, token prompt: none, select_account, consent approval_prompt: force, auto access_type: online, offline scopes: string (separated with commas) or list redirect_uri: string login_hint: string """
kwargs.setdefault('response_type', 'code') kwargs.setdefault('access_type', 'online') if 'prompt' not in kwargs: kwargs.setdefault('approval_prompt', 'auto') scopes = kwargs.pop('scopes', self.scopes.split(',')) if USERINFO_PROFILE_SCOPE not in scopes: scopes.append(USERINFO_PROFILE_SCOPE) redirect_uri = kwargs.pop('redirect_uri', self.redirect_uri) state = self.sign_params(params or {}) return GOOGLE_OAUTH2_AUTH_URL + '?' + urlencode( dict(client_id=self.client_id, scope=' '.join(scopes), redirect_uri=redirect_uri, state=state, **kwargs))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unauthorized_callback(self): """ Redirect to login url with next param set as request.url """
return redirect(self.login_url(params=dict(next=request.url)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_access_token(self, refresh_token): """ Use a refresh token to obtain a new access token """
token = requests.post(GOOGLE_OAUTH2_TOKEN_URL, data=dict( refresh_token=refresh_token, grant_type='refresh_token', client_id=self.client_id, client_secret=self.client_secret, )).json() if not token or token.get('error'): return return token
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def oauth2callback(self, view_func): """ Decorator for OAuth2 callback. Calls `GoogleLogin.login` then passes results to `view_func`. """
@wraps(view_func) def decorated(*args, **kwargs): params = {} # Check sig if 'state' in request.args: params.update(**self.parse_state(request.args.get('state'))) if params.pop('sig', None) != make_secure_token(**params): return self.login_manager.unauthorized() code = request.args.get('code') # Web server flow if code: token = self.exchange_code( code, url_for( request.endpoint, _external=True, _scheme=self.redirect_scheme, ), ) userinfo = self.get_userinfo(token['access_token']) params.update(token=token, userinfo=userinfo) # Browser flow else: if params: params.update(dict(request.args.items())) else: return ''' <script> window.onload = function() { location.href = '?' + window.location.hash.substr(1); }; </script> ''' return view_func(**params) return decorated
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def complete(text, state): """ Auto complete scss constructions in interactive mode. """
for cmd in COMMANDS: if cmd.startswith(text): if not state: return cmd else: state -= 1
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate_args(self): """ Validates arguments. """
from ..mixins import ModelMixin for arg in ("instance", "decider", "identifier", "fields", "default_language"): if getattr(self, arg) is None: raise AttributeError("%s must not be None" % arg) if not isinstance(self.instance, (ModelMixin,)): raise ImproperlyConfigured('"instance" argument must be a Linguist model') if not issubclass(self.decider, (models.Model,)): raise ImproperlyConfigured( '"decider" argument must be a valid Django model' )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def active_language(self): """ Returns active language. """
# Current instance language (if user uses activate_language() method) if self._language is not None: return self._language # Current site language (translation.get_language()) current = utils.get_language() if current in self.supported_languages: return current # Default language descriptor return self.default_language
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def translation_instances(self): """ Returns translation instances. """
return [ instance for k, v in six.iteritems(self.instance._linguist_translations) for instance in v.values() ]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_cache( self, instance, translation=None, language=None, field_name=None, field_value=None, ): """ Returns translation from cache. """
is_new = bool(instance.pk is None) try: cached_obj = instance._linguist_translations[field_name][language] if not cached_obj.field_name: cached_obj.field_name = field_name if not cached_obj.language: cached_obj.language = language if not cached_obj.identifier: cached_obj.identifier = self.instance.linguist_identifier except KeyError: cached_obj = None if not is_new: if translation is None: try: translation = self.decider.objects.get( identifier=self.instance.linguist_identifier, object_id=self.instance.pk, language=language, field_name=field_name, ) except self.decider.DoesNotExist: pass if cached_obj is None: if translation is not None: cached_obj = CachedTranslation.from_object(translation) else: cached_obj = CachedTranslation( instance=instance, language=language, field_name=field_name, field_value=field_value, ) instance._linguist_translations[cached_obj.field_name][ cached_obj.language ] = cached_obj return cached_obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_cache( self, instance=None, translation=None, language=None, field_name=None, field_value=None, ): """ Add a new translation into the cache. """
if instance is not None and translation is not None: cached_obj = CachedTranslation.from_object(translation) instance._linguist_translations[translation.field_name][ translation.language ] = cached_obj return cached_obj if instance is None: instance = self.instance cached_obj = self.get_cache( instance, translation=translation, field_value=field_value, language=language, field_name=field_name, ) if field_value is None and cached_obj.field_value: cached_obj.deleted = True if field_value != cached_obj.field_value: cached_obj.has_changed = True cached_obj.field_value = field_value return cached_obj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _filter_or_exclude(self, negate, *args, **kwargs): """ Overrides default behavior to handle linguist fields. """
from .models import Translation new_args = self.get_cleaned_args(args) new_kwargs = self.get_cleaned_kwargs(kwargs) translation_args = self.get_translation_args(args) translation_kwargs = self.get_translation_kwargs(kwargs) has_linguist_args = self.has_linguist_args(args) has_linguist_kwargs = self.has_linguist_kwargs(kwargs) if translation_args or translation_kwargs: ids = list( set( Translation.objects.filter( *translation_args, **translation_kwargs ).values_list("object_id", flat=True) ) ) if ids: new_kwargs["id__in"] = ids has_kwargs = has_linguist_kwargs and not (new_kwargs or new_args) has_args = has_linguist_args and not (new_args or new_kwargs) # No translations but we looked for translations? # Returns empty queryset. if has_kwargs or has_args: return self._clone().none() return super(QuerySetMixin, self)._filter_or_exclude( negate, *new_args, **new_kwargs )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def has_linguist_kwargs(self, kwargs): """ Parses the given kwargs and returns True if they contain linguist lookups. """
for k in kwargs: if self.is_linguist_lookup(k): return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def has_linguist_args(self, args): """ Parses the given args and returns True if they contain linguist lookups. """
linguist_args = [] for arg in args: condition = self._get_linguist_condition(arg) if condition: linguist_args.append(condition) return bool(linguist_args)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_translation_args(self, args): """ Returns linguist args from model args. """
translation_args = [] for arg in args: condition = self._get_linguist_condition(arg, transform=True) if condition: translation_args.append(condition) return translation_args
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_linguist_lookup(self, lookup): """ Returns true if the given lookup is a valid linguist lookup. """
field = utils.get_field_name_from_lookup(lookup) # To keep default behavior with "FieldError: Cannot resolve keyword". if ( field not in self.concrete_field_names and field in self.linguist_field_names ): return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_linguist_condition(self, condition, reverse=False, transform=False): """ Parses Q tree and returns linguist lookups or model lookups if reverse is True. """
# We deal with a node if isinstance(condition, Q): children = [] for child in condition.children: parsed = self._get_linguist_condition( condition=child, reverse=reverse, transform=transform ) if parsed is not None: if (isinstance(parsed, Q) and parsed.children) or isinstance( parsed, tuple ): children.append(parsed) new_condition = copy.deepcopy(condition) new_condition.children = children return new_condition # We are dealing with a lookup ('field', 'value'). lookup, value = condition is_linguist = self.is_linguist_lookup(lookup) if transform and is_linguist: return Q( **utils.get_translation_lookup( self.model._linguist.identifier, lookup, value ) ) if (reverse and not is_linguist) or (not reverse and is_linguist): return condition
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_cleaned_args(self, args): """ Returns positional arguments for related model query. """
if not args: return args cleaned_args = [] for arg in args: condition = self._get_linguist_condition(arg, True) if condition: cleaned_args.append(condition) return cleaned_args
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_cleaned_kwargs(self, kwargs): """ Returns concrete field lookups. """
cleaned_kwargs = kwargs.copy() if kwargs is not None: for k in kwargs: if self.is_linguist_lookup(k): del cleaned_kwargs[k] return cleaned_kwargs
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def with_translations(self, **kwargs): """ Prefetches translations. Takes three optional keyword arguments: * ``field_names``: ``field_name`` values for SELECT IN * ``languages``: ``language`` values for SELECT IN * ``chunks_length``: fetches IDs by chunk """
force = kwargs.pop("force", False) if self._prefetch_translations_done and force is False: return self self._prefetched_translations_cache = utils.get_grouped_translations( self, **kwargs ) self._prefetch_translations_done = True return self._clone()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def available_languages(self): """ Returns available languages. """
from .models import Translation return ( Translation.objects.filter( identifier=self.linguist_identifier, object_id=self.pk ) .values_list("language", flat=True) .distinct() .order_by("language") )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_translations(self, language=None): """ Deletes related translations. """
from .models import Translation return Translation.objects.delete_translations(obj=self, language=language)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def override_language(self, language): """ Context manager to override the instance language. """
previous_language = self._linguist.language self._linguist.language = language yield self._linguist.language = previous_language
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate_meta(meta): """ Validates Linguist Meta attribute. """
if not isinstance(meta, (dict,)): raise TypeError('Model Meta "linguist" must be a dict') required_keys = ("identifier", "fields") for key in required_keys: if key not in meta: raise KeyError('Model Meta "linguist" dict requires %s to be defined', key) if not isinstance(meta["fields"], (list, tuple)): raise ImproperlyConfigured( "Linguist Meta's fields attribute must be a list or tuple" )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def default_value_setter(field): """ When setting to the name of the field itself, the value in the current language will be set. """
def default_value_func_setter(self, value): localized_field = utils.build_localized_field_name( field, self._linguist.active_language ) setattr(self, localized_field, value) return default_value_func_setter
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def field_factory(base_class): """ Takes a field base class and wrap it with ``TranslationField`` class. """
from .fields import TranslationField class TranslationFieldField(TranslationField, base_class): pass TranslationFieldField.__name__ = "Translation%s" % base_class.__name__ return TranslationFieldField
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_translation_field(translated_field, language): """ Takes the original field, a given language, a decider model and return a Field class for model. """
cls_name = translated_field.__class__.__name__ if not isinstance(translated_field, tuple(SUPPORTED_FIELDS.keys())): raise ImproperlyConfigured("%s is not supported by Linguist." % cls_name) translation_class = field_factory(translated_field.__class__) kwargs = get_translation_class_kwargs(translated_field.__class__) return translation_class( translated_field=translated_field, language=language, **kwargs )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def connect(self): ''' Connect to the drone. :raises RuntimeError: if the drone is connected or closed already. ''' if self.connected: raise RuntimeError( '{} is connected already'.format(self.__class__.__name__)) if self.closed: raise RuntimeError( '{} is closed already'.format(self.__class__.__name__)) self.connected = True self._connect()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def close(self): ''' Exit all threads and disconnect the drone. This method has no effect if the drone is closed already or not connected yet. ''' if not self.connected: return if self.closed: return self.closed = True self._close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def _set_flags(self, **flags): ''' Set the flags of this argument. Example: ``int_param._set_flags(a=1, b=2, c=4, d=8)`` ''' self._flags = enum.IntEnum('_flags', flags) self.__dict__.update(self._flags.__members__) self._patch_flag_doc()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_translations(sender, instance, **kwargs): """ Deletes related instance's translations when instance is deleted. """
if issubclass(sender, (ModelMixin,)): instance._linguist.decider.objects.filter( identifier=instance.linguist_identifier, object_id=instance.pk ).delete()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def draw_tree(node, child_iter=lambda n: n.children, text_str=str): """Support asciitree 0.2 API. This function solely exist to not break old code (using asciitree 0.2). Its use is deprecated."""
return LeftAligned(traverse=Traversal(get_text=text_str, get_children=child_iter), draw=LegacyStyle())(node)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_language(): """ Returns an active language code that is guaranteed to be in settings.SUPPORTED_LANGUAGES. """
lang = _get_language() if not lang: return get_fallback_language() langs = [l[0] for l in settings.SUPPORTED_LANGUAGES] if lang not in langs and "-" in lang: lang = lang.split("-")[0] if lang in langs: return lang return settings.DEFAULT_LANGUAGE
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def activate_language(instances, language): """ Activates the given language for the given instances. """
language = ( language if language in get_supported_languages() else get_fallback_language() ) for instance in instances: instance.activate_language(language)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_class(class_path, setting_name=None): """ Loads a class given a class_path. The setting value may be a string or a tuple. The setting_name parameter is only there for pretty error output, and therefore is optional. """
if not isinstance(class_path, six.string_types): try: class_path, app_label = class_path except: if setting_name: raise exceptions.ImproperlyConfigured( CLASS_PATH_ERROR % (setting_name, setting_name) ) else: raise exceptions.ImproperlyConfigured( CLASS_PATH_ERROR % ("this setting", "It") ) try: class_module, class_name = class_path.rsplit(".", 1) except ValueError: if setting_name: txt = "%s isn't a valid module. Check your %s setting" % ( class_path, setting_name, ) else: txt = "%s isn't a valid module." % class_path raise exceptions.ImproperlyConfigured(txt) try: mod = import_module(class_module) except ImportError as e: if setting_name: txt = 'Error importing backend %s: "%s". Check your %s setting' % ( class_module, e, setting_name, ) else: txt = 'Error importing backend %s: "%s".' % (class_module, e) raise exceptions.ImproperlyConfigured(txt) try: clazz = getattr(mod, class_name) except AttributeError: if setting_name: txt = ( 'Backend module "%s" does not define a "%s" class. Check' " your %s setting" % (class_module, class_name, setting_name) ) else: txt = 'Backend module "%s" does not define a "%s" class.' % ( class_module, class_name, ) raise exceptions.ImproperlyConfigured(txt) return clazz
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_translation_lookup(identifier, field, value): """ Mapper that takes a language field, its value and returns the related lookup for Translation model. """
# Split by transformers parts = field.split("__") # Store transformers transformers = parts[1:] if len(parts) > 1 else None # defaults to "title" and default language field_name = parts[0] language = get_fallback_language() name_parts = parts[0].split("_") if len(name_parts) > 1: supported_languages = get_supported_languages() last_part = name_parts[-1] if last_part in supported_languages: # title_with_underscore_fr? field_name = "_".join(name_parts[:-1]) language = last_part else: # title_with_underscore? # Let's use default language field_name = "_".join(name_parts) value_lookup = ( "field_value" if transformers is None else "field_value__%s" % "__".join(transformers) ) lookup = {"field_name": field_name, "identifier": identifier, "language": language} lookup[value_lookup] = value return lookup
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_grouped_translations(instances, **kwargs): """ Takes instances and returns grouped translations ready to be set in cache. """
grouped_translations = collections.defaultdict(list) if not instances: return grouped_translations if not isinstance(instances, collections.Iterable): instances = [instances] if isinstance(instances, QuerySet): model = instances.model else: model = instances[0]._meta.model instances_ids = [] for instance in instances: instances_ids.append(instance.pk) if instance._meta.model != model: raise Exception( "You cannot use different model instances, only one authorized." ) from .models import Translation from .mixins import ModelMixin decider = model._meta.linguist.get("decider", Translation) identifier = model._meta.linguist.get("identifier", None) chunks_length = kwargs.get("chunks_length", None) populate_missing = kwargs.get("populate_missing", True) if identifier is None: raise Exception('You must define Linguist "identifier" meta option') lookup = dict(identifier=identifier) for kwarg in ("field_names", "languages"): value = kwargs.get(kwarg, None) if value is not None: if not isinstance(value, (list, tuple)): value = [value] lookup["%s__in" % kwarg[:-1]] = value if chunks_length is not None: translations_qs = [] for ids in utils.chunks(instances_ids, chunks_length): ids_lookup = copy.copy(lookup) ids_lookup["object_id__in"] = ids translations_qs.append(decider.objects.filter(**ids_lookup)) translations = itertools.chain.from_iterable(translations_qs) else: lookup["object_id__in"] = instances_ids translations = decider.objects.filter(**lookup) for translation in translations: grouped_translations[translation.object_id].append(translation) return grouped_translations
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_free_udp_port(): ''' Get a free UDP port. Note this is vlunerable to race conditions. ''' import socket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.bind(('localhost', 0)) addr = sock.getsockname() sock.close() return addr[1]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_available_languages(self, obj): """ Returns available languages for current object. """
return obj.available_languages if obj is not None else self.model.objects.none()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def languages_column(self, obj): """ Adds languages columns. """
languages = self.get_available_languages(obj) return '<span class="available-languages">{0}</span>'.format( " ".join(languages) )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def prefetch_translations(instances, **kwargs): """ Prefetches translations for the given instances. Can be useful for a list of instances. """
from .mixins import ModelMixin if not isinstance(instances, collections.Iterable): instances = [instances] populate_missing = kwargs.get("populate_missing", True) grouped_translations = utils.get_grouped_translations(instances, **kwargs) # In the case of no translations objects if not grouped_translations and populate_missing: for instance in instances: instance.populate_missing_translations() for instance in instances: if ( issubclass(instance.__class__, ModelMixin) and instance.pk in grouped_translations ): for translation in grouped_translations[instance.pk]: instance._linguist.set_cache(instance=instance, translation=translation) if populate_missing: instance.populate_missing_translations()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_translations(self, obj, language=None): """ Shorcut method to retrieve translations for a given object. """
lookup = {"identifier": obj.linguist_identifier, "object_id": obj.pk} if language is not None: lookup["language"] = language return self.get_queryset().filter(**lookup)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def takeoff(self): ''' Sends the takeoff command. ''' self.send(at.REF(at.REF.input.start))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def emergency(self): ''' Sends the emergency command. ''' self.send(at.REF(at.REF.input.select))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def move( self, *, forward=0, backward=0, left=0, right=0, up=0, down=0, cw=0, ccw=0): ''' Moves the drone. To move the drone forward at 0.8x speed: >>> drone.move(forward=0.8) To move the drone right at 0.5x speed and upward at full speed: >>> drone.move(right=0.5, up=1) To rotate clockwise at 0.7x speed: >>> drone.move(cw=0.7) :param forward: speed for moving forward :param backward: speed for moving backward :param left: speed for moving left :param right: speed for moving right :param up: speed for moving up :param down: speed for moving down :param cw: speed for rotating clockwise :param ccw: speed for rotating counter-clockwise ''' self._move( roll=right-left, pitch=backward-forward, gaz=up-down, yaw=cw-ccw )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def encode(number, checksum=False, split=0): """Encode an integer into a symbol string. A ValueError is raised on invalid input. If checksum is set to True, a check symbol will be calculated and appended to the string. If split is specified, the string will be divided into clusters of that size separated by hyphens. The encoded string is returned. """
number = int(number) if number < 0: raise ValueError("number '%d' is not a positive integer" % number) split = int(split) if split < 0: raise ValueError("split '%d' is not a positive integer" % split) check_symbol = '' if checksum: check_symbol = encode_symbols[number % check_base] if number == 0: return '0' + check_symbol symbol_string = '' while number > 0: remainder = number % base number //= base symbol_string = encode_symbols[remainder] + symbol_string symbol_string = symbol_string + check_symbol if split: chunks = [] for pos in range(0, len(symbol_string), split): chunks.append(symbol_string[pos:pos + split]) symbol_string = '-'.join(chunks) return symbol_string
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def decode(symbol_string, checksum=False, strict=False): """Decode an encoded symbol string. If checksum is set to True, the string is assumed to have a trailing check symbol which will be validated. If the checksum validation fails, a ValueError is raised. If strict is set to True, a ValueError is raised if the normalization step requires changes to the string. The decoded string is returned. """
symbol_string = normalize(symbol_string, strict=strict) if checksum: symbol_string, check_symbol = symbol_string[:-1], symbol_string[-1] number = 0 for symbol in symbol_string: number = number * base + decode_symbols[symbol] if checksum: check_value = decode_symbols[check_symbol] modulo = number % check_base if check_value != modulo: raise ValueError("invalid check symbol '%s' for string '%s'" % (check_symbol, symbol_string)) return number
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def normalize(symbol_string, strict=False): """Normalize an encoded symbol string. Normalization provides error correction and prepares the string for decoding. These transformations are applied: 1. Hyphens are removed 2. 'I', 'i', 'L' or 'l' are converted to '1' 3. 'O' or 'o' are converted to '0' 4. All characters are converted to uppercase A TypeError is raised if an invalid string type is provided. A ValueError is raised if the normalized string contains invalid characters. If the strict parameter is set to True, a ValueError is raised if any of the above transformations are applied. The normalized string is returned. """
if isinstance(symbol_string, string_types): if not PY3: try: symbol_string = symbol_string.encode('ascii') except UnicodeEncodeError: raise ValueError("string should only contain ASCII characters") else: raise TypeError("string is of invalid type %s" % symbol_string.__class__.__name__) norm_string = symbol_string.replace('-', '').translate(normalize_symbols).upper() if not valid_symbols.match(norm_string): raise ValueError("string '%s' contains invalid characters" % norm_string) if strict and norm_string != symbol_string: raise ValueError("string '%s' requires normalization" % symbol_string) return norm_string
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setup_requires(): """ Return required packages Plus any version tests and warnings """
from pkg_resources import parse_version required = ['cython>=0.24.0'] numpy_requirement = 'numpy>=1.7.1' try: import numpy except Exception: required.append(numpy_requirement) else: if parse_version(numpy.__version__) < parse_version('1.7.1'): required.append(numpy_requirement) return required
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _build_block_context(template, context): """Populate the block context with BlockNodes from parent templates."""
# Ensure there's a BlockContext before rendering. This allows blocks in # ExtendsNodes to be found by sub-templates (allowing {{ block.super }} and # overriding sub-blocks to work). if BLOCK_CONTEXT_KEY not in context.render_context: context.render_context[BLOCK_CONTEXT_KEY] = BlockContext() block_context = context.render_context[BLOCK_CONTEXT_KEY] for node in template.nodelist: if isinstance(node, ExtendsNode): compiled_parent = node.get_parent(context) # Add the parent node's blocks to the context. (This ends up being # similar logic to ExtendsNode.render(), where we're adding the # parent's blocks to the context so a child can find them.) block_context.add_blocks( {n.name: n for n in compiled_parent.nodelist.get_nodes_by_type(BlockNode)}) _build_block_context(compiled_parent, context) return compiled_parent # The ExtendsNode has to be the first non-text node. if not isinstance(node, TextNode): break
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _render_template_block_nodelist(nodelist, block_name, context): """Recursively iterate over a node to find the wanted block."""
# Attempt to find the wanted block in the current template. for node in nodelist: # If the wanted block was found, return it. if isinstance(node, BlockNode): # No matter what, add this block to the rendering context. context.render_context[BLOCK_CONTEXT_KEY].push(node.name, node) # If the name matches, you're all set and we found the block! if node.name == block_name: return node.render(context) # If a node has children, recurse into them. Based on # django.template.base.Node.get_nodes_by_type. for attr in node.child_nodelists: try: new_nodelist = getattr(node, attr) except AttributeError: continue # Try to find the block recursively. try: return _render_template_block_nodelist(new_nodelist, block_name, context) except BlockNotFound: continue # The wanted block_name was not found. raise BlockNotFound("block with name '%s' does not exist" % block_name)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def render_block_to_string(template_name, block_name, context=None): """ Loads the given template_name and renders the given block with the given dictionary as context. Returns a string. template_name The name of the template to load and render. If it's a list of template names, Django uses select_template() instead of get_template() to find the template. """
# Like render_to_string, template_name can be a string or a list/tuple. if isinstance(template_name, (tuple, list)): t = loader.select_template(template_name) else: t = loader.get_template(template_name) # Create the context instance. context = context or {} # The Django backend. if isinstance(t, DjangoTemplate): return django_render_block(t, block_name, context) elif isinstance(t, Jinja2Template): from render_block.jinja2 import jinja2_render_block return jinja2_render_block(t, block_name, context) else: raise UnsupportedEngine( 'Can only render blocks from the Django template backend.')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_host_path(root, path, instance=None): """ Generates the host path for a container volume. If the given path is a dictionary, uses the entry of the instance name. :param root: Root path to prepend, if ``path`` does not already describe an absolute path. :type root: unicode | str | AbstractLazyObject :param path: Path string or dictionary of per-instance paths. :type path: unicode | str | dict | AbstractLazyObject :param instance: Optional instance name. :type instance: unicode | str :return: Path on the host that is mapped to the container volume. :rtype: unicode | str """
r_val = resolve_value(path) if isinstance(r_val, dict): r_instance = instance or 'default' r_path = resolve_value(r_val.get(r_instance)) if not r_path: raise ValueError("No path defined for instance {0}.".format(r_instance)) else: r_path = r_val r_root = resolve_value(root) if r_path and r_root and (r_path[0] != posixpath.sep): return posixpath.join(r_root, r_path) return r_path
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run_actions(self, actions): """ Runs the given lists of attached actions and instance actions on the client. :param actions: Actions to apply. :type actions: list[dockermap.map.action.ItemAction] :return: Where the result is not ``None``, returns the output from the client. Note that this is a generator and needs to be consumed in order for all actions to be performed. :rtype: collections.Iterable[dict] """
policy = self._policy for action in actions: config_id = action.config_id config_type = config_id.config_type client_config = policy.clients[action.client_name] client = client_config.get_client() c_map = policy.container_maps[config_id.map_name] if config_type == ItemType.CONTAINER: config = c_map.get_existing(config_id.config_name) item_name = policy.cname(config_id.map_name, config_id.config_name, config_id.instance_name) elif config_type == ItemType.VOLUME: a_parent_name = config_id.config_name if c_map.use_attached_parent_name else None item_name = policy.aname(config_id.map_name, config_id.instance_name, parent_name=a_parent_name) if client_config.features['volumes']: config = c_map.get_existing_volume(config_id.config_name) else: config = c_map.get_existing(config_id.config_name) elif config_type == ItemType.NETWORK: config = c_map.get_existing_network(config_id.config_name) item_name = policy.nname(config_id.map_name, config_id.config_name) elif config_type == ItemType.IMAGE: config = None item_name = format_image_tag(config_id.config_name, config_id.instance_name) else: raise ValueError("Invalid configuration type.", config_id.config_type) for action_type in action.action_types: try: a_method = self.action_methods[(config_type, action_type)] except KeyError: raise ActionTypeException(config_id, action_type) action_config = ActionConfig(action.client_name, action.config_id, client_config, client, c_map, config) try: res = a_method(action_config, item_name, **action.extra_data) except Exception: exc_info = sys.exc_info() raise ActionException(exc_info, action.client_name, config_id, action_type) if res is not None: yield ActionOutput(action.client_name, config_id, action_type, res)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_client(cls, client): """ Constructs a configuration object from an existing client instance. If the client has already been created with a configuration object, returns that instance. :param client: Client object to derive the configuration from. :type client: docker.client.Client :return: ClientConfiguration """
if hasattr(client, 'client_configuration'): return client.client_configuration kwargs = {'client': client} for attr in cls.init_kwargs: if hasattr(client, attr): kwargs[attr] = getattr(client, attr) if hasattr(client, 'api_version'): kwargs['version'] = client.api_version return cls(**kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_init_kwargs(self): """ Generates keyword arguments for creating a new Docker client instance. :return: Keyword arguments as defined through this configuration. :rtype: dict """
init_kwargs = {} for k in self.init_kwargs: if k in self.core_property_set: init_kwargs[k] = getattr(self, k) elif k in self: init_kwargs[k] = self[k] return init_kwargs
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_client(self): """ Retrieves or creates a client instance from this configuration object. If instantiated from this configuration, the resulting object is also cached in the property ``client`` and a reference to this configuration is stored on the client object. :return: Client object instance. :rtype: docker.client.Client """
client = self._client if not client: self._client = client = self.client_constructor(**self.get_init_kwargs()) client.client_configuration = self # Client might update the version number after construction. updated_version = getattr(client, 'api_version', None) if updated_version: self.version = updated_version return client
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def exec_commands(self, action, c_name, run_cmds, **kwargs): """ Runs a single command inside a container. :param action: Action configuration. :type action: dockermap.map.runner.ActionConfig :param c_name: Container name. :type c_name: unicode | str :param run_cmds: Commands to run. :type run_cmds: list[dockermap.map.input.ExecCommand] :return: List of exec command return values (e.g. containing the command id), if applicable, or ``None`` if either no commands have been run or no values have been returned from the API. :rtype: list[dict] | NoneType """
client = action.client exec_results = [] for run_cmd in run_cmds: cmd = run_cmd.cmd cmd_user = run_cmd.user log.debug("Creating exec command in container %s with user %s: %s.", c_name, cmd_user, cmd) ec_kwargs = self.get_exec_create_kwargs(action, c_name, cmd, cmd_user) create_result = client.exec_create(**ec_kwargs) if create_result: e_id = create_result['Id'] log.debug("Starting exec command with id %s.", e_id) es_kwargs = self.get_exec_start_kwargs(action, c_name, e_id) client.exec_start(**es_kwargs) exec_results.append(create_result) else: log.debug("Exec command was created, but did not return an id. Assuming that it has been started.") if exec_results: return exec_results return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def exec_container_commands(self, action, c_name, **kwargs): """ Runs all configured commands of a container configuration inside the container instance. :param action: Action configuration. :type action: dockermap.map.runner.ActionConfig :param c_name: Container name. :type c_name: unicode | str :return: List of exec command return values (e.g. containing the command id), if applicable, or ``None`` if either no commands have been run or no values have been returned from the API. :rtype: list[dict] | NoneType """
config_cmds = action.config.exec_commands if not config_cmds: return None return self.exec_commands(action, c_name, run_cmds=config_cmds)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def prepare_path(path, replace_space, replace_sep, expandvars, expanduser): """ Performs `os.path` replacement operations on a path string. :param path: Path string :type path: unicode | str :param replace_space: Mask spaces with backslash. :param replace_sep: Replace potentially different path separators with POSIX path notation (use :const:`posixpath.sep`). :type replace_sep: bool :param expandvars: Expand environment variables (:func:`~os.path.expandvars`). :type expandvars: bool :param expanduser: Expand user variables (:func:`~os.path.expanduser`). :type expanduser: bool :return: Path string from `path` with aforementioned replacements. :rtype: unicode | str """
r_path = path if expandvars: r_path = os.path.expandvars(r_path) if expanduser: r_path = os.path.expanduser(r_path) if replace_sep and os.sep != posixpath.sep: r_path = r_path.replace(os.path.sep, posixpath.sep) if replace_space: r_path = r_path.replace(' ', '\\ ') return r_path
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_command(cmd, shell=False): """ Converts a command line to the notation as used in a Dockerfile ``CMD`` and ``ENTRYPOINT`` command. In shell notation, this returns a simple string, whereas by default it returns a JSON-list format with the command and arguments. :param cmd: Command line as a string or tuple. :type cmd: unicode | str | tuple | list :param shell: Use the notation so that Docker runs the command in a shell. Default is ``False``. :type shell: bool :return: The command string. :rtype: unicode | str """
def _split_cmd(): line = None for part in cmd.split(' '): line = part if line is None else '{0} {1}'.format(line, part) if part[-1] != '\\': yield line line = None if line is not None: yield line if cmd in ([], ''): return '[]' if shell: if isinstance(cmd, (list, tuple)): return ' '.join(cmd) elif isinstance(cmd, six.string_types): return cmd else: if isinstance(cmd, (list, tuple)): return json.dumps(map(six.text_type, cmd)) elif isinstance(cmd, six.string_types): return json.dumps(list(_split_cmd())) raise ValueError("Invalid type of command string or sequence: {0}".format(cmd))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_expose(expose): """ Converts a port number or multiple port numbers, as used in the Dockerfile ``EXPOSE`` command, to a tuple. :param: Port numbers, can be as integer, string, or a list/tuple of those. :type expose: int | unicode | str | list | tuple :return: A tuple, to be separated by spaces before inserting in a Dockerfile. :rtype: tuple """
if isinstance(expose, six.string_types): return expose, elif isinstance(expose, collections.Iterable): return map(six.text_type, expose) return six.text_type(expose),
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_file(self, src_path, dst_path=None, ctx_path=None, replace_space=True, expandvars=False, expanduser=False, remove_final=False): """ Adds a file to the Docker build. An ``ADD`` command is inserted, and the path is stored for later packaging of the context tarball. :param src_path: Path to the file or directory. :type src_path: unicode | str :param dst_path: Destination path during the Docker build. By default uses the last element of `src_path`. :type dst_path: unicode | str :param ctx_path: Path inside the context tarball. Can be set in order to avoid name clashes. By default identical to the destination path. :type ctx_path: unicode | str :param replace_space: Mask spaces in path names with a backslash. Default is ``True``. :type replace_space: bool :param expandvars: Expand local environment variables. Default is ``False``. :type expandvars: bool :param expanduser: Expand local user variables. Default is ``False``. :type expanduser: bool :param remove_final: Remove the file after the build operation has completed. Can be useful e.g. for source code archives, which are no longer needed after building the binaries. Note that this will not reduce the size of the resulting image (actually may increase instead) unless the image is squashed. :type remove_final: bool :return: The path of the file in the Dockerfile context. :rtype: unicode | str """
if dst_path is None: head, tail = os.path.split(src_path) if not tail: # On trailing backslashes. tail = os.path.split(head)[1] if not tail: ValueError("Could not generate target path from input '{0}'; needs to be specified explicitly.") target_path = tail else: target_path = dst_path source_path = prepare_path(src_path, False, False, expandvars, expanduser) target_path = prepare_path(target_path, replace_space, True, expandvars, expanduser) if ctx_path: context_path = prepare_path(ctx_path, replace_space, True, expandvars, expanduser) else: context_path = target_path self.prefix('ADD', context_path, target_path) self._files.append((source_path, context_path)) if remove_final: self._remove_files.add(target_path) return context_path
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def write(self, input_str): """ Adds content to the Dockerfile. :param input_str: Content. :type input_str: unicode | str """
self.check_not_finalized() if isinstance(input_str, six.binary_type): self.fileobj.write(input_str) else: self.fileobj.write(input_str.encode('utf-8'))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def merge_dependency(self, item, resolve_parent, parents): """ Merge dependencies of current configuration with further dependencies; in this instance, it means that in case of container configuration first parent dependencies are checked, and then immediate dependencies of the current configuration should be added to the list, but without duplicating any entries. :param item: Configuration item. :type item: (unicode | str, unicode | str, unicode | str, unicode | str) :param resolve_parent: Function to resolve parent dependencies. :type resolve_parent: function :type parents: collections.Iterable[(unicode | str, unicode | str, unicode | str, unicode | str)] :return: List of recursively resolved dependencies of this container. :rtype: list[(unicode | str, unicode | str, unicode | str, unicode | str)] :raise CircularDependency: If the current element depends on one found deeper in the hierarchy. """
dep = [] for parent_key in parents: if item == parent_key: raise CircularDependency(item, True) if parent_key.config_type == ItemType.CONTAINER: parent_dep = resolve_parent(parent_key) if item in parent_dep: raise CircularDependency(item) merge_list(dep, parent_dep) merge_list(dep, parents) return dep
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_map(stream, name=None, check_integrity=True, check_duplicates=True): """ Loads a ContainerMap configuration from a YAML document stream. :param stream: YAML stream. :type stream: file :param name: Name of the ContainerMap. If not provided, will be attempted to read from a ``name`` attribute on the document root level. :type name: unicode | str :param check_integrity: Performs a brief integrity check; default is ``True``. :type check_integrity: bool :param check_duplicates: Check for duplicate attached volumes during integrity check. :type check_duplicates: bool :return: A ContainerMap object. :rtype: ContainerMap """
map_dict = yaml.safe_load(stream) if isinstance(map_dict, dict): map_name = name or map_dict.pop('name', None) if not map_name: raise ValueError("No map name provided, and none found in YAML stream.") return ContainerMap(map_name, map_dict, check_integrity=check_integrity, check_duplicates=check_duplicates) raise ValueError("Valid map could not be decoded.")