text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def generate_shapes(shapes): """ Generate the shapes for the topology :param dict shapes: A dict of converted shapes from the old topology :return: dict containing two lists (ellipse, rectangle) :rtype: dict """
new_shapes = {'ellipse': [], 'rectangle': []} for shape in shapes: tmp_shape = {} for shape_item in shapes[shape]: if shape_item != 'type': tmp_shape[shape_item] = shapes[shape][shape_item] new_shapes[shapes[shape]['type']].append(tmp_shape) return new_shapes
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def generate_notes(notes): """ Generate the notes list :param dict notes: A dict of converted notes from the old topology :return: List of notes for the the topology :rtype: list """
new_notes = [] for note in notes: tmp_note = {} for note_item in notes[note]: tmp_note[note_item] = notes[note][note_item] new_notes.append(tmp_note) return new_notes
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def generate_images(self, pixmaps): """ Generate the images list and store the images to copy :param dict pixmaps: A dict of converted pixmaps from the old topology :return: A list of images :rtype: list """
new_images = [] for image in pixmaps: tmp_image = {} for img_item in pixmaps[image]: if img_item == 'path': path = os.path.join('images', os.path.basename( pixmaps[image][img_item])) tmp_image['path'] = fix_path(path) self.images.append(pixmaps[image][img_item]) else: tmp_image[img_item] = pixmaps[image][img_item] new_images.append(tmp_image) return new_images
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_qemu_path(self, instance): """ Add the qemu path to the hypervisor conf data :param instance: Hypervisor instance """
tmp_conf = {'qemu_path': self.old_top[instance]['qemupath']} if len(self.topology['conf']) == 0: self.topology['conf'].append(tmp_conf) else: self.topology['conf'][self.hv_id].update(tmp_conf)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_conf_item(self, instance, item): """ Add a hypervisor configuration item :param instance: Hypervisor instance :param item: Item to add """
tmp_conf = {} if item not in EXTRA_CONF: tmp_conf['model'] = MODEL_TRANSFORM[item] for s_item in sorted(self.old_top[instance][item]): if self.old_top[instance][item][s_item] is not None: tmp_conf[s_item] = self.old_top[instance][item][s_item] if item in EXTRA_CONF: tmp_conf = {item: tmp_conf} if len(self.topology['conf']) == 0: self.topology['conf'].append(tmp_conf) else: self.topology['conf'][self.hv_id].update(tmp_conf) else: self.topology['conf'].append(tmp_conf) self.hv_id = len(self.topology['conf']) - 1
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def device_typename(item): """ Convert the old names to new-style names and types :param str item: A device in the form of 'TYPE NAME' :return: tuple containing device name and type details """
dev_type = {'ROUTER': {'from': 'ROUTER', 'desc': 'Router', 'type': 'Router', 'label_x': 19.5}, 'QEMU': {'from': 'QEMU', 'desc': 'QEMU VM', 'type': 'QemuVM', 'ext_conf': 'QemuDevice', 'label_x': -12}, 'ASA': {'from': 'ASA', 'desc': 'QEMU VM', 'type': 'QemuVM', 'ext_conf': '5520', 'label_x': 2.5}, 'PIX': {'from': 'PIX', 'desc': 'QEMU VM', 'type': 'QemuVM', 'ext_conf': '525', 'label_x': -12}, 'JUNOS': {'from': 'JUNOS', 'desc': 'QEMU VM', 'type': 'QemuVM', 'ext_conf': 'O-series', 'label_x': -12}, 'IDS': {'from': 'IDS', 'desc': 'QEMU VM', 'type': 'QemuVM', 'ext_conf': 'IDS-4215', 'label_x': -12}, 'VBOX': {'from': 'VBOX', 'desc': 'VirtualBox VM', 'type': 'VirtualBoxVM', 'ext_conf': 'VBoxDevice', 'label_x': -4.5}, 'FRSW': {'from': 'FRSW', 'desc': 'Frame Relay switch', 'type': 'FrameRelaySwitch', 'label_x': 7.5}, 'ETHSW': {'from': 'ETHSW', 'desc': 'Ethernet switch', 'type': 'EthernetSwitch', 'label_x': 15.5}, 'Hub': {'from': 'Hub', 'desc': 'Ethernet hub', 'type': 'EthernetHub', 'label_x': 12.0}, 'ATMSW': {'from': 'ATMSW', 'desc': 'ATM switch', 'type': 'ATMSwitch', 'label_x': 2.0}, 'ATMBR': {'from': 'ATMBR', # TODO: Investigate ATM Bridge 'desc': 'ATMBR', 'type': 'ATMBR'}, 'Cloud': {'from': 'Cloud', 'desc': 'Cloud', 'type': 'Cloud', 'label_x': 47.5}} item_type = item.split(' ')[0] name = item.replace('%s ' % dev_type[item_type]['from'], '') return name, dev_type[item_type]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_topology(self): """ Get the converted topology ready for JSON encoding :return: converted topology assembled into a single dict :rtype: dict """
topology = {'name': self._name, 'resources_type': 'local', 'topology': {}, 'type': 'topology', 'version': '1.0'} if self._links: topology['topology']['links'] = self._links if self._nodes: topology['topology']['nodes'] = self._nodes if self._servers: topology['topology']['servers'] = self._servers if self._notes: topology['topology']['notes'] = self._notes if self._shapes['ellipse']: topology['topology']['ellipses'] = self._shapes['ellipse'] if self._shapes['rectangle']: topology['topology']['rectangles'] = \ self._shapes['rectangle'] if self._images: topology['topology']['images'] = self._images return topology
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_vboxes(self): """ Get the maximum ID of the VBoxes :return: Maximum VBox ID :rtype: int """
vbox_list = [] vbox_max = None for node in self.nodes: if node['type'] == 'VirtualBoxVM': vbox_list.append(node['vbox_id']) if len(vbox_list) > 0: vbox_max = max(vbox_list) return vbox_max
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_qemus(self): """ Get the maximum ID of the Qemu VMs :return: Maximum Qemu VM ID :rtype: int """
qemu_vm_list = [] qemu_vm_max = None for node in self.nodes: if node['type'] == 'QemuVM': qemu_vm_list.append(node['qemu_id']) if len(qemu_vm_list) > 0: qemu_vm_max = max(qemu_vm_list) return qemu_vm_max
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def getElements(self,name=''): 'Get a list of child elements' #If no tag name is specified, return the all children if not name: return self.children else: # else return only those children with a matching tag name elements = [] for element in self.children: if element.name == name: elements.append(element) return elements
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def StartElement(self,name,attributes): 'SAX start element even handler' # Instantiate an Element object element = Element(name.encode(),attributes) # Push element onto the stack and make it a child of parent if len(self.nodeStack) > 0: parent = self.nodeStack[-1] parent.AddChild(element) else: self.root = element self.nodeStack.append(element)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def CharacterData(self,data): 'SAX character data event handler' ## HACK: to preserve the newlines #if string.strip(data): data = data.encode("utf-8") element = self.nodeStack[-1] element.cdata += data return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def doShow(self, xml=0): """Shows the contents of our resultset."""
if xml == 0: print 'Errorcode:', self.errorcode print print 'Product information:' for key in self.product.keys(): print ' ', key.encode('UTF-8'), print '->', self.product[key].encode('UTF-8') print print 'Database information:' for key in self.database.keys(): print ' ', key.encode('UTF-8'), print'->', self.database[key].encode('UTF-8') print print 'Metadata:' for field in self.metadata.keys(): print print ' ', field.encode('UTF-8') for property in self.metadata[field]: print ' ', property.encode('UTF-8'), print '->', self.metadata[field][property].encode('UTF-8') print print 'Records:' for record in self.resultset: print for column in record: print ' ', column.encode('UTF-8'), if type(record[column]) == UnicodeType: print '->', record[column].encode('UTF-8') else: print '->', record[column] else: tags = [ 'FMPXMLRESULT', 'ERRORCODE', 'PRODUCT', 'DATABASE', 'METADATA', 'FIELD', 'RESULTSET', 'ROW', 'COL', 'DATA' ] xml = self.data for tag in tags: xml = string.replace(xml, '></' + tag, '>\n</' + tag) xml = string.replace(xml, '><' + tag, '>\n<' + tag) print xml
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _setSkipRecords(self, skipRec): """Specifies how many records to skip in the found set"""
if type(skipRec) == int or (type(skipRec) == str and skipRec.isdigit()): self._skipRecords = skipRec else: raise FMError, 'Unsupported -skip value (not a number).'
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _setLogicalOperator(self, lop): """Sets the way the find fields should be combined together."""
if not lop.lower() in ['and', 'or']: raise FMError, 'Unsupported logical operator (not one of "and" or "or").' self._lop = lop.lower()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _setComparasionOperator(self, field, oper): """Sets correct operator for given string representation"""
if oper != '': validOperators = { 'eq':'eq', 'equals':'eq', '=':'eq', '==':'eq', 'cn':'cn', 'contains':'cn', '%%':'cn', '%':'cn', '*':'cn', 'bw':'bw', 'begins with':'bw', '^':'bw', 'ew':'ew', 'ends with':'ew', '$':'ew', 'gt':'gt', 'greater than':'gt', '>':'gt', 'gte':'gte', 'greater than or equals':'gte', '>=':'gte', 'lt':'lt', 'less than':'lt', '<':'lt', 'lte':'lte', 'less than or equals':'lte', '<=':'lte', 'neq':'neq', 'not equals':'neq', '!=':'neq', '<>':'neq' } if not string.lower(oper) in validOperators.keys(): raise FMError, 'Invalid operator "'+ oper + '" for "' + field + '"' oper = validOperators[oper.lower()] self._dbParams.append( ["%s.op" % field, oper] )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _addDBParam(self, name, value): """Adds a database parameter"""
if name[-4:] == '__OP': return self._setComparasionOperator(name[:-4], value) if name[-3:] == '.op': return self._setComparasionOperator(name[:-3], value) if name.find('__') != -1: import re name = name.replace('__','::') elif name.find('.') != -1: name = name.replace('.','::') self._dbParams.append( [name, value] )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getFile(self, file_xml_uri): """ This will execute cmd to fetch file data from FMServer """
find = re.match('/fmi/xml/cnt/([\w\d.-]+)\.([\w]+)?-*', file_xml_uri) file_name = find.group(1) file_extension = find.group(2) file_binary = self._doRequest(is_file=True, file_xml_uri=file_xml_uri) return (file_name, file_extension, file_binary)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def doScript(self, script_name, params=None, return_all=False): """This function executes the script for given layout for the current db."""
request = [ uu({'-db': self._db }), uu({'-lay': self._layout }), uu({'-script': script_name}) ] if params: request.append(uu({'-script.param': params })) request.append(uu({'-findall': '' })) result = self._doRequest(request) result = FMResultset.FMResultset(result) try: # Try to return results from the script resp = result.resultset if return_all else result.resultset[0] except IndexError: resp = None return resp
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def doScriptAfter(self, func, func_kwargs={}, script_name='', params=None): """ This function will execute extra script after passed function """
request = [ uu({'-script': script_name}) ] if params: request.append(uu({'-script.param': params })) self._extra_script = request return func(**func_kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getDbNames(self): """This function returns the list of open databases"""
request = [] request.append(uu({'-dbnames': '' })) result = self._doRequest(request) result = FMResultset.FMResultset(result) dbNames = [] for dbName in result.resultset: dbNames.append(string.lower(dbName['DATABASE_NAME'])) return dbNames
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def doFind(self, WHAT={}, SORT=[], SKIP=None, MAX=None, LOP='AND', **params): """This function will perform the command -find."""
self._preFind(WHAT, SORT, SKIP, MAX, LOP) for key in params: self._addDBParam(key, params[key]) try: return self._doAction('-find') except FMServerError as e: if e.args[0] in [401, 8]: return []
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def doFindAll(self, WHAT={}, SORT=[], SKIP=None, MAX=None): """This function will perform the command -findall."""
self._preFind(WHAT, SORT, SKIP, MAX) return self._doAction('-findall')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def doFindAny(self, WHAT={}, SORT=[], SKIP=None, MAX=None, LOP='AND', **params): """This function will perform the command -findany."""
self._preFind(WHAT, SORT, SKIP, MAX, LOP) for key in params: self._addDBParam(key, params[key]) return self._doAction('-findany')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def doDelete(self, WHAT={}): """This function will perform the command -delete."""
if hasattr(WHAT, '_modified'): self._addDBParam('RECORDID', WHAT.RECORDID) self._addDBParam('MODID', WHAT.MODID) elif type(WHAT) == dict and WHAT.has_key('RECORDID'): self._addDBParam('RECORDID', WHAT['RECORDID']) else: raise FMError, 'Python Runtime: Object type (%s) given to function doDelete as argument WHAT cannot be used.' % type(WHAT) if self._layout == '': raise FMError, 'No layout was selected' if self._checkRecordID() == 0: raise FMError, 'RecordID is missing' return self._doAction('-delete')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def doNew(self, WHAT={}, **params): """This function will perform the command -new."""
if hasattr(WHAT, '_modified'): for key in WHAT: if key not in ['RECORDID','MODID']: if WHAT.__new2old__.has_key(key): self._addDBParam(WHAT.__new2old__[key].encode('utf-8'), WHAT[key]) else: self._addDBParam(key, WHAT[key]) elif type(WHAT)==dict: for key in WHAT: self._addDBParam(key, WHAT[key]) else: raise FMError, 'Python Runtime: Object type (%s) given to function doNew as argument WHAT cannot be used.' % type(WHAT) if self._layout == '': raise FMError, 'No layout was selected' for key in params: self._addDBParam(key, params[key]) if len(self._dbParams) == 0: raise FMError, 'No data to be added' return self._doAction('-new')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def doDup(self, WHAT={}, **params): """This function will perform the command -dup."""
if hasattr(WHAT, '_modified'): for key, value in WHAT._modified(): if WHAT.__new2old__.has_key(key): self._addDBParam(WHAT.__new2old__[key].encode('utf-8'), value) else: self._addDBParam(key, value) self._addDBParam('RECORDID', WHAT.RECORDID) self._addDBParam('MODID', WHAT.MODID) elif type(WHAT) == dict: for key in WHAT: self._addDBParam(key, WHAT[key]) else: raise FMError, 'Python Runtime: Object type (%s) given to function doDup as argument WHAT cannot be used.' % type(WHAT) if self._layout == '': raise FMError, 'No layout was selected' for key in params: self._addDBParam(key, params[key]) if self._checkRecordID() == 0: raise FMError, 'RecordID is missing' return self._doAction('-dup')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _buildUrl(self): """Builds url for normal FM requests."""
return '%(protocol)s://%(host)s:%(port)s%(address)s'%{ 'protocol': self._protocol, 'host': self._host, 'port': self._port, 'address': self._address, }
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _buildFileUrl(self, xml_req): """Builds url for fetching the files from FM."""
return '%(protocol)s://%(host)s:%(port)s%(xml_req)s'%{ 'protocol': self._protocol, 'host': self._host, 'port': self._port, 'xml_req': xml_req, }
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _doRequest(self, request=None, is_file=False, file_xml_uri=''): """This function will perform the specified request on the FileMaker server, and it will return the raw result from FileMaker."""
if request is None: request = [] if is_file and file_xml_uri: url = self._buildFileUrl(file_xml_uri) else: request = '&'.join(request) url = "%s?%s" % (self._buildUrl(), request) if self._debug: print '[PyFileMaker DEBUG] ', url resp = requests.get( url = url, auth = (self._login, self._password) ) resp.raise_for_status() return resp.content
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def FMErrorByNum( num ): """This function raises an error based on the specified error code."""
if not num in FMErrorNum.keys(): raise FMServerError, (num, FMErrorNum[-1]) elif num == 102: raise FMFieldError, (num, FMErrorNum[num]) else: raise FMServerError, (num, FMErrorNum[num])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def doParseXMLData( self ): """This function parses the XML output of FileMaker."""
parser = xml2obj.Xml2Obj() # Not valid document comming from FMServer if self.data[-6:] == '</COL>': self.data += '</ROW></RESULTSET></FMPXMLRESULT>' xobj = parser.ParseString( self.data ) try: el = xobj.getElements( 'ERRORCODE') if el: self.errorcode = int( el[0].getData() ) else: self.errorcode = int( xobj.getElements('error')[0].getAttribute('code') ) except: FMErrorByNum( 954 ) if self.errorcode != 0: FMErrorByNum( self.errorcode ) return xobj
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fill(metrics_headers=()): """Add the metrics headers known to GAX. Return an OrderedDict with all of the metrics headers provided to this function, as well as the metrics known to GAX (such as its own version, the GRPC version, etc.). """
# Create an ordered dictionary with the Python version, which # should go first. answer = collections.OrderedDict(( ('gl-python', platform.python_version()), )) # Add anything that already appears in the passed metrics headers, # in order. for key, value in collections.OrderedDict(metrics_headers).items(): answer[key] = value # Add the GAX and GRPC headers to our metrics. # These come after what may have been passed in (generally the GAPIC # library). answer['gax'] = gax.__version__ # pylint: disable=no-member answer['grpc'] = pkg_resources.get_distribution('grpcio').version # pylint: enable=no-member return answer
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def stringify(metrics_headers=()): """Convert the provided metrics headers to a string. Iterate over the metrics headers (a dictionary, usually ordered) and return a properly-formatted space-separated string (e.g. foo/1.2.3 bar/3.14.159). """
metrics_headers = collections.OrderedDict(metrics_headers) return ' '.join(['%s/%s' % (k, v) for k, v in metrics_headers.items()])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _str_dotted_getattr(obj, name): """Expands extends getattr to allow dots in x to indicate nested objects. Args: obj (object): an object. name (str): a name for a field in the object. Returns: Any: the value of named attribute. Raises: AttributeError: if the named attribute does not exist. """
for part in name.split('.'): obj = getattr(obj, part) return str(obj) if obj else None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def request_bytesize(self): """The size of in bytes of the bundled field elements."""
return sum(len(str(e)) for elts in self._in_deque for e in elts)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run(self): """Call the task's func. The task's func will be called with the bundling requests func """
if not self._in_deque: return req = self._bundling_request del getattr(req, self.bundled_field)[:] getattr(req, self.bundled_field).extend( [e for elts in self._in_deque for e in elts]) subresponse_field = self.subresponse_field if subresponse_field: self._run_with_subresponses(req, subresponse_field, self._kwargs) else: self._run_with_no_subresponse(req, self._kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def extend(self, elts): """Adds elts to the tasks. Args: elts (Sequence): a iterable of elements that can be appended to the task's bundle_field. Returns: Event: an event that can be used to wait on the response. """
# Use a copy, not a reference, as it is later necessary to mutate # the proto field from which elts are drawn in order to construct # the bundled request. elts = elts[:] self._in_deque.append(elts) event = self._event_for(elts) self._event_deque.append(event) return event
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _event_for(self, elts): """Creates an Event that is set when the bundle with elts is sent."""
event = Event() event.canceller = self._canceller_for(elts, event) return event
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _canceller_for(self, elts, event): """Obtains a cancellation function that removes elts. The returned cancellation function returns ``True`` if all elements was removed successfully from the _in_deque, and false if it was not. """
def canceller(): """Cancels submission of ``elts`` as part of this bundle. Returns: bool: ``False`` if any of elements had already been sent, otherwise ``True``. """ try: self._event_deque.remove(event) self._in_deque.remove(elts) return True except ValueError: return False return canceller
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def schedule(self, api_call, bundle_id, bundle_desc, bundling_request, kwargs=None): """Schedules bundle_desc of bundling_request as part of bundle_id. The returned value an :class:`Event` that * has a ``result`` attribute that will eventually be set to the result the api call * will be used to wait for the response * holds the canceller function for canceling this part of the bundle Args: api_call (callable[[object], object]): the scheduled API call. bundle_id (str): identifies the bundle on which the API call should be made. bundle_desc (gax.BundleDescriptor): describes the structure of the bundled call. bundling_request (object): the request instance to use in the API call. kwargs (dict): optional, the keyword arguments passed to the API call. Returns: Event: the scheduled event. """
kwargs = kwargs or dict() bundle = self._bundle_for(api_call, bundle_id, bundle_desc, bundling_request, kwargs) elts = getattr(bundling_request, bundle_desc.bundled_field) event = bundle.extend(elts) # Run the bundle if the count threshold was reached. count_threshold = self._options.element_count_threshold if count_threshold > 0 and bundle.element_count >= count_threshold: self._run_now(bundle.bundle_id) # Run the bundle if the size threshold was reached. size_threshold = self._options.request_byte_threshold if size_threshold > 0 and bundle.request_bytesize >= size_threshold: self._run_now(bundle.bundle_id) return event
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_stub(generated_create_stub, channel=None, service_path=None, service_port=None, credentials=None, scopes=None, ssl_credentials=None): """Creates a gRPC client stub. Args: generated_create_stub (Callable): The generated gRPC method to create a stub. channel (grpc.Channel): A Channel object through which to make calls. If None, a secure channel is constructed. If specified, all remaining arguments are ignored. service_path (str): The domain name of the API remote host. service_port (int): The port on which to connect to the remote host. credentials (google.auth.credentials.Credentials): The authorization credentials to attach to requests. These credentials identify your application to the service. scopes (Sequence[str]): The OAuth scopes for this service. This parameter is ignored if a credentials is specified. ssl_credentials (grpc.ChannelCredentials): gRPC channel credentials used to create a secure gRPC channel. If not specified, SSL credentials will be created using default certificates. Returns: grpc.Client: A gRPC client stub. """
if channel is None: target = '{}:{}'.format(service_path, service_port) if credentials is None: credentials = _grpc_google_auth.get_default_credentials(scopes) channel = _grpc_google_auth.secure_authorized_channel( credentials, target, ssl_credentials=ssl_credentials) return generated_create_stub(channel)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_default_credentials(scopes): """Gets the Application Default Credentials."""
credentials, _ = google.auth.default(scopes=scopes) return credentials
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_timeout_arg(a_func, timeout, **kwargs): """Updates a_func so that it gets called with the timeout as its final arg. This converts a callable, a_func, into another callable with an additional positional arg. Args: a_func (callable): a callable to be updated timeout (int): to be added to the original callable as it final positional arg. kwargs: Addtional arguments passed through to the callable. Returns: callable: the original callable updated to the timeout arg """
def inner(*args): """Updates args with the timeout.""" updated_args = args + (timeout,) return a_func(*updated_args, **kwargs) return inner
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def retryable(a_func, retry_options, **kwargs): """Creates a function equivalent to a_func, but that retries on certain exceptions. Args: a_func (callable): A callable. retry_options (RetryOptions): Configures the exceptions upon which the callable should retry, and the parameters to the exponential backoff retry algorithm. kwargs: Addtional arguments passed through to the callable. Returns: Callable: A function that will retry on exception. """
delay_mult = retry_options.backoff_settings.retry_delay_multiplier max_delay_millis = retry_options.backoff_settings.max_retry_delay_millis has_timeout_settings = _has_timeout_settings(retry_options.backoff_settings) if has_timeout_settings: timeout_mult = retry_options.backoff_settings.rpc_timeout_multiplier max_timeout = (retry_options.backoff_settings.max_rpc_timeout_millis / _MILLIS_PER_SECOND) total_timeout = (retry_options.backoff_settings.total_timeout_millis / _MILLIS_PER_SECOND) def inner(*args): """Equivalent to ``a_func``, but retries upon transient failure. Retrying is done through an exponential backoff algorithm configured by the options in ``retry``. """ delay = retry_options.backoff_settings.initial_retry_delay_millis exc = errors.RetryError('Retry total timeout exceeded before any' 'response was received') if has_timeout_settings: timeout = ( retry_options.backoff_settings.initial_rpc_timeout_millis / _MILLIS_PER_SECOND) now = time.time() deadline = now + total_timeout else: timeout = None deadline = None while deadline is None or now < deadline: try: to_call = add_timeout_arg(a_func, timeout, **kwargs) return to_call(*args) except Exception as exception: # pylint: disable=broad-except code = config.exc_to_code(exception) if code not in retry_options.retry_codes: raise errors.RetryError( 'Exception occurred in retry method that was not' ' classified as transient', exception) exc = errors.RetryError( 'Retry total timeout exceeded with exception', exception) # Sleep a random number which will, on average, equal the # expected delay. to_sleep = random.uniform(0, delay * 2) time.sleep(to_sleep / _MILLIS_PER_SECOND) delay = min(delay * delay_mult, max_delay_millis) if has_timeout_settings: now = time.time() timeout = min( timeout * timeout_mult, max_timeout, deadline - now) raise exc return inner
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_error(msg, cause=None): """Creates a ``GaxError`` or subclass. Attributes: msg (string): describes the error that occurred. cause (Exception, optional): the exception raised by a lower layer of the RPC stack (for example, gRPC) that caused this exception, or None if this exception originated in GAX. Returns: .GaxError: The exception that wraps ``cause``. """
status_code = config.exc_to_code(cause) status_name = config.NAME_STATUS_CODES.get(status_code) if status_name == 'INVALID_ARGUMENT': return InvalidArgumentError(msg, cause=cause) else: return GaxError(msg, cause=cause)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_operation(self, name, options=None): """ Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service. Example: Args: name (string): The name of the operation resource. options (:class:`google.gax.CallOptions`): Overrides the default settings for this call, e.g, timeout, retries etc. Returns: A :class:`google.longrunning.operations_pb2.Operation` instance. Raises: :exc:`google.gax.errors.GaxError` if the RPC is aborted. :exc:`ValueError` if the parameters are invalid. """
# Create the request object. request = operations_pb2.GetOperationRequest(name=name) return self._get_operation(request, options)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cancel_operation(self, name, options=None): """ Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Clients can use ``Operations.GetOperation`` or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an ``Operation.error`` value with a ``google.rpc.Status.code`` of 1, corresponding to ``Code.CANCELLED``. Example: Args: name (string): The name of the operation resource to be cancelled. options (:class:`google.gax.CallOptions`): Overrides the default settings for this call, e.g, timeout, retries etc. Raises: :exc:`google.gax.errors.GaxError` if the RPC is aborted. :exc:`ValueError` if the parameters are invalid. """
# Create the request object. request = operations_pb2.CancelOperationRequest(name=name) self._cancel_operation(request, options)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_operation(self, name, options=None): """ Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Example: Args: name (string): The name of the operation resource to be deleted. options (:class:`google.gax.CallOptions`): Overrides the default settings for this call, e.g, timeout, retries etc. Raises: :exc:`google.gax.errors.GaxError` if the RPC is aborted. :exc:`ValueError` if the parameters are invalid. """
# Create the request object. request = operations_pb2.DeleteOperationRequest(name=name) self._delete_operation(request, options)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_oneof(**kwargs): """Raise ValueError if more than one keyword argument is not none. Args: kwargs (dict): The keyword arguments sent to the function. Returns: None Raises: ValueError: If more than one entry in kwargs is not none. """
# Sanity check: If no keyword arguments were sent, this is fine. if not kwargs: return None not_nones = [val for val in kwargs.values() if val is not None] if len(not_nones) > 1: raise ValueError('Only one of {fields} should be set.'.format( fields=', '.join(sorted(kwargs.keys())), ))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _bundleable(desc): """Creates a function that transforms an API call into a bundling call. It transform a_func from an API call that receives the requests and returns the response into a callable that receives the same request, and returns a :class:`bundling.Event`. The returned Event object can be used to obtain the eventual result of the bundled call. Args: desc (gax.BundleDescriptor): describes the bundling that a_func supports. Returns: Callable: takes the API call's request and keyword args and returns a bundling.Event object. """
def inner(a_func, settings, request, **kwargs): """Schedules execution of a bundling task.""" if not settings.bundler: return a_func(request, **kwargs) the_id = bundling.compute_bundle_id( request, desc.request_discriminator_fields) return settings.bundler.schedule(a_func, the_id, desc, request, kwargs) return inner
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _page_streamable(page_descriptor): """Creates a function that yields an iterable to performs page-streaming. Args: page_descriptor (:class:`PageDescriptor`): indicates the structure of page streaming to be performed. Returns: Callable: A function that returns an iterator. """
def inner(a_func, settings, request, **kwargs): """Actual page-streaming based on the settings.""" page_iterator = gax.PageIterator( a_func, page_descriptor, settings.page_token, request, **kwargs) if settings.flatten_pages: return gax.ResourceIterator(page_iterator) else: return page_iterator return inner
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def construct_settings( service_name, client_config, config_override, retry_names, bundle_descriptors=None, page_descriptors=None, metrics_headers=(), kwargs=None): """Constructs a dictionary mapping method names to _CallSettings. The ``client_config`` parameter is parsed from a client configuration JSON file of the form: .. code-block:: json { "interfaces": { "google.fake.v1.ServiceName": { "retry_codes": { "idempotent": ["UNAVAILABLE", "DEADLINE_EXCEEDED"], "non_idempotent": [] }, "retry_params": { "default": { "initial_retry_delay_millis": 100, "retry_delay_multiplier": 1.2, "max_retry_delay_millis": 1000, "initial_rpc_timeout_millis": 2000, "rpc_timeout_multiplier": 1.5, "max_rpc_timeout_millis": 30000, "total_timeout_millis": 45000 } }, "methods": { "CreateFoo": { "retry_codes_name": "idempotent", "retry_params_name": "default", "timeout_millis": 30000 }, "Publish": { "retry_codes_name": "non_idempotent", "retry_params_name": "default", "bundling": { "element_count_threshold": 40, "element_count_limit": 200, "request_byte_threshold": 90000, "request_byte_limit": 100000, "delay_threshold_millis": 100 } } } } } } Args: service_name (str): The fully-qualified name of this service, used as a key into the client config file (in the example above, this value would be ``google.fake.v1.ServiceName``). client_config (dict): A dictionary parsed from the standard API client config file. bundle_descriptors (Mapping[str, BundleDescriptor]): A dictionary of method names to BundleDescriptor objects for methods that are bundling-enabled. page_descriptors (Mapping[str, PageDescriptor]): A dictionary of method names to PageDescriptor objects for methods that are page streaming-enabled. config_override (str): A dictionary in the same structure of client_config to override the settings. Usually client_config is supplied from the default config and config_override will be specified by users. retry_names (Mapping[str, object]): A dictionary mapping the strings referring to response status codes to the Python objects representing those codes. metrics_headers (Mapping[str, str]): Dictionary of headers to be passed for analytics. Sent as a dictionary; eventually becomes a space-separated string (e.g. 'foo/1.0.0 bar/3.14.1'). kwargs (dict): The keyword arguments to be passed to the API calls. Returns: dict: A dictionary mapping method names to _CallSettings. Raises: KeyError: If the configuration for the service in question cannot be located in the provided ``client_config``. """
# pylint: disable=too-many-locals # pylint: disable=protected-access defaults = {} bundle_descriptors = bundle_descriptors or {} page_descriptors = page_descriptors or {} kwargs = kwargs or {} # Sanity check: It is possible that we got this far but some headers # were specified with an older library, which sends them as... # kwargs={'metadata': [('x-goog-api-client', 'foo/1.0 bar/3.0')]} # # Note: This is the final format we will send down to GRPC shortly. # # Remove any x-goog-api-client header that may have been present # in the metadata list. if 'metadata' in kwargs: kwargs['metadata'] = [value for value in kwargs['metadata'] if value[0].lower() != 'x-goog-api-client'] # Fill out the metrics headers with GAX and GRPC info, and convert # to a string in the format that the GRPC layer expects. kwargs.setdefault('metadata', []) kwargs['metadata'].append( ('x-goog-api-client', metrics.stringify(metrics.fill(metrics_headers))) ) try: service_config = client_config['interfaces'][service_name] except KeyError: raise KeyError('Client configuration not found for service: {}' .format(service_name)) overrides = config_override.get('interfaces', {}).get(service_name, {}) for method in service_config.get('methods'): method_config = service_config['methods'][method] overriding_method = overrides.get('methods', {}).get(method, {}) snake_name = _upper_camel_to_lower_under(method) if overriding_method and overriding_method.get('timeout_millis'): timeout = overriding_method['timeout_millis'] else: timeout = method_config['timeout_millis'] timeout /= _MILLIS_PER_SECOND bundle_descriptor = bundle_descriptors.get(snake_name) bundling_config = method_config.get('bundling', None) if overriding_method and 'bundling' in overriding_method: bundling_config = overriding_method['bundling'] bundler = _construct_bundling(bundling_config, bundle_descriptor) retry_options = _merge_retry_options( _construct_retry(method_config, service_config['retry_codes'], service_config['retry_params'], retry_names), _construct_retry(overriding_method, overrides.get('retry_codes'), overrides.get('retry_params'), retry_names)) defaults[snake_name] = gax._CallSettings( timeout=timeout, retry=retry_options, page_descriptor=page_descriptors.get(snake_name), bundler=bundler, bundle_descriptor=bundle_descriptor, kwargs=kwargs) return defaults
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _catch_errors(a_func, to_catch): """Updates a_func to wrap exceptions with GaxError Args: a_func (callable): A callable. to_catch (list[Exception]): Configures the exceptions to wrap. Returns: Callable: A function that will wrap certain exceptions with GaxError """
def inner(*args, **kwargs): """Wraps specified exceptions""" try: return a_func(*args, **kwargs) # pylint: disable=catching-non-exception except tuple(to_catch) as exception: utils.raise_with_traceback( gax.errors.create_error('RPC failed', cause=exception)) return inner
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_api_call(func, settings): """Converts an rpc call into an API call governed by the settings. In typical usage, ``func`` will be a callable used to make an rpc request. This will mostly likely be a bound method from a request stub used to make an rpc call. The result is created by applying a series of function decorators defined in this module to ``func``. ``settings`` is used to determine which function decorators to apply. The result is another callable which for most values of ``settings`` has has the same signature as the original. Only when ``settings`` configures bundling does the signature change. Args: func (Callable[Sequence[object], object]): is used to make a bare rpc call. settings (_CallSettings): provides the settings for this call Returns: Callable[Sequence[object], object]: a bound method on a request stub used to make an rpc call Raises: ValueError: if ``settings`` has incompatible values, e.g, if bundling and page_streaming are both configured """
def base_caller(api_call, _, *args): """Simply call api_call and ignore settings.""" return api_call(*args) def inner(request, options=None): """Invoke with the actual settings.""" this_options = _merge_options_metadata(options, settings) this_settings = settings.merge(this_options) if this_settings.retry and this_settings.retry.retry_codes: api_call = gax.retry.retryable( func, this_settings.retry, **this_settings.kwargs) else: api_call = gax.retry.add_timeout_arg( func, this_settings.timeout, **this_settings.kwargs) api_call = _catch_errors(api_call, gax.config.API_ERRORS) return api_caller(api_call, this_settings, request) if settings.page_descriptor: if settings.bundler and settings.bundle_descriptor: raise ValueError('The API call has incompatible settings: ' 'bundling and page streaming') api_caller = _page_streamable(settings.page_descriptor) elif settings.bundler and settings.bundle_descriptor: api_caller = _bundleable(settings.bundle_descriptor) else: api_caller = base_caller return inner
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(pb_or_dict, key, default=_SENTINEL): """Retrieve the given key off of the object. If a default is specified, return it if the key is not found, otherwise raise KeyError. Args: pb_or_dict (Union[~google.protobuf.message.Message, Mapping]): the object. key (str): The key to retrieve from the object in question. default (Any): If the key is not present on the object, and a default is set, returns that default instead. A type-appropriate falsy default is generally recommended, as protobuf messages almost always have default values for unset values and it is not always possible to tell the difference between a falsy value and an unset one. If no default is set, raises KeyError for not found values. Returns: Any: The return value from the underlying message or dict. Raises: KeyError: If the key is not found. Note that, for unset values, messages and dictionaries may not have consistent behavior. TypeError: If pb_or_dict is not a Message or Mapping. """
# We may need to get a nested key. Resolve this. key, subkey = _resolve_subkeys(key) # Attempt to get the value from the two types of objects we know baout. # If we get something else, complain. if isinstance(pb_or_dict, Message): answer = getattr(pb_or_dict, key, default) elif isinstance(pb_or_dict, collections.Mapping): answer = pb_or_dict.get(key, default) else: raise TypeError('Tried to fetch a key %s on an invalid object; ' 'expected a dict or protobuf message.') # If the object we got back is our sentinel, raise KeyError; this is # a "not found" case. if answer is _SENTINEL: raise KeyError(key) # If a subkey exists, call this method recursively against the answer. if subkey and answer is not default: return get(answer, subkey, default=default) # Return the value. return answer
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set(pb_or_dict, key, value): """Set the given key on the object. Args: pb_or_dict (Union[~google.protobuf.message.Message, Mapping]): the object. key (str): The key on the object in question. value (Any): The value to set. Raises: TypeError: If pb_or_dict is not a Message or Mapping. """
# pylint: disable=redefined-builtin,too-many-branches # redefined-builtin: We want 'set' to be part of the public interface. # too-many-branches: This method is inherently complex. # Sanity check: Is our target object valid? if not isinstance(pb_or_dict, (collections.MutableMapping, Message)): raise TypeError('Tried to set a key %s on an invalid object; ' 'expected a dict or protobuf message.' % key) # We may be setting a nested key. Resolve this. key, subkey = _resolve_subkeys(key) # If a subkey exists, then get that object and call this method # recursively against it using the subkey. if subkey is not None: if isinstance(pb_or_dict, collections.MutableMapping): pb_or_dict.setdefault(key, {}) set(get(pb_or_dict, key), subkey, value) return # Attempt to set the value on the types of objects we know how to deal # with. if isinstance(pb_or_dict, collections.MutableMapping): pb_or_dict[key] = value elif isinstance(value, (collections.MutableSequence, tuple)): # Clear the existing repeated protobuf message of any elements # currently inside it. while getattr(pb_or_dict, key): getattr(pb_or_dict, key).pop() # Write our new elements to the repeated field. for item in value: if isinstance(item, collections.Mapping): getattr(pb_or_dict, key).add(**item) else: getattr(pb_or_dict, key).extend([item]) elif isinstance(value, collections.Mapping): # Assign the dictionary values to the protobuf message. for item_key, item_value in value.items(): set(getattr(pb_or_dict, key), item_key, item_value) elif isinstance(value, Message): # Assign the protobuf message values to the protobuf message. for item_key, item_value in value.ListFields(): set(getattr(pb_or_dict, key), item_key.name, item_value) else: setattr(pb_or_dict, key, value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setdefault(pb_or_dict, key, value): """Set the key on the object to the value if the current value is falsy. Because protobuf Messages do not distinguish between unset values and falsy ones particularly well, this method treats any falsy value (e.g. 0, empty list) as a target to be overwritten, on both Messages and dictionaries. Args: pb_or_dict (Union[~google.protobuf.message.Message, Mapping]): the object. key (str): The key on the object in question. value (Any): The value to set. Raises: TypeError: If pb_or_dict is not a Message or Mapping. """
if not get(pb_or_dict, key, default=None): set(pb_or_dict, key, value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _resolve_subkeys(key, separator='.'): """Given a key which may actually be a nested key, return the top level key and any nested subkeys as separate values. Args: key (str): A string that may or may not contain the separator. separator (str): The namespace separator. Defaults to `.`. Returns: Tuple[str, str]: The key and subkey(s). """
subkey = None if separator in key: index = key.index(separator) subkey = key[index + 1:] key = key[:index] return key, subkey
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def merge(self, options): """Returns new _CallSettings merged from this and a CallOptions object. Note that passing if the CallOptions instance specifies a page_token, the merged _CallSettings will have ``flatten_pages`` disabled. This permits toggling per-resource/per-page page streaming. Args: options (CallOptions): an instance whose values override those in this object. If None, ``merge`` returns a copy of this object Returns: CallSettings: The merged settings and options. """
if not options: return _CallSettings( timeout=self.timeout, retry=self.retry, page_descriptor=self.page_descriptor, page_token=self.page_token, bundler=self.bundler, bundle_descriptor=self.bundle_descriptor, kwargs=self.kwargs) else: if options.timeout == OPTION_INHERIT: timeout = self.timeout else: timeout = options.timeout if options.retry == OPTION_INHERIT: retry = self.retry else: retry = options.retry if options.page_token == OPTION_INHERIT: page_token = self.page_token else: page_token = options.page_token if options.is_bundling: bundler = self.bundler else: bundler = None if options.kwargs == OPTION_INHERIT: kwargs = self.kwargs else: kwargs = self.kwargs.copy() kwargs.update(options.kwargs) return _CallSettings( timeout=timeout, retry=retry, page_descriptor=self.page_descriptor, page_token=page_token, bundler=bundler, bundle_descriptor=self.bundle_descriptor, kwargs=kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cancel(self): """If last Operation's value of `done` is true, returns false; otherwise, issues OperationsClient.cancel_operation and returns true. """
if self.done(): return False self._client.cancel_operation(self._operation.name) return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def result(self, timeout=None): """Enters polling loop on OperationsClient.get_operation, and once Operation.done is true, then returns Operation.response if successful or throws GaxError if not successful. This method will wait up to timeout seconds. If the call hasn't completed in timeout seconds, then a RetryError will be raised. timeout can be an int or float. If timeout is not specified or None, there is no limit to the wait time. """
# Check exceptional case: raise if no response if not self._poll(timeout).HasField('response'): raise GaxError(self._operation.error.message) # Return expected result return _from_any(self._result_type, self._operation.response)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_done_callback(self, fn): # pylint: disable=invalid-name """Enters a polling loop on OperationsClient.get_operation, and once the operation is done or cancelled, calls the function with this _OperationFuture. Added callables are called in the order that they were added. """
if self._operation.done: _try_callback(self, fn) else: self._queue.put(dill.dumps(fn)) if self._process is None: self._process = mp.Process(target=self._execute_tasks) self._process.start()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_sql(self): """ Generates the JOIN sql for the join tables and join condition :rtype: str :return: the JOIN sql for the join tables and join condition """
return '{0} {1} ON {2}'.format(self.join_type, self.right_table.get_sql(), self.get_condition())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_left_table(self, left_table=None): """ Sets the left table for this join clause. If no table is specified, the first table in the query will be used :type left_table: str or dict or :class:`Table <querybuilder.tables.Table>` or None :param left_table: The left table being joined with. This can be a string of the table name, a dict of {'alias': table}, or a ``Table`` instance. Defaults to the first table in the query. """
if left_table: self.left_table = TableFactory( table=left_table, owner=self.owner, ) else: self.left_table = self.get_left_table()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_left_table(self): """ Returns the left table if one was specified, otherwise the first table in the query is returned :rtype: :class:`Table <querybuilder.tables.Table>` :return: the left table if one was specified, otherwise the first table in the query """
if self.left_table: return self.left_table if len(self.owner.tables): return self.owner.tables[0]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_all_related_objects(self, table): """ Fix for django 1.10 to replace deprecated code. Keep support for django 1.7 """
# Django 1.7 method if hasattr(table.model._meta, 'get_all_related_objects'): return table.model._meta.get_all_related_objects() else: # Django > 1.7 return [ f for f in table.model._meta.get_fields() if (f.one_to_many or f.one_to_one) and f.auto_created and not f.concrete ]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_right_table(self, table): """ Sets the right table for this join clause and try to automatically set the condition if one isn't specified """
self.right_table = table if self.left_table is None: return # find table prefix if type(self.left_table) is ModelTable and type(self.right_table) is ModelTable: # loop through fields to find the field for this model # check if this join type is for a related field for field in self.get_all_related_objects(self.left_table): related_model = field.model if hasattr(field, 'related_model'): related_model = field.related_model if related_model == self.right_table.model: if self.right_table.field_prefix is None: self.right_table.field_prefix = field.get_accessor_name() if len(self.right_table.field_prefix) > 4 and self.right_table.field_prefix[-4:] == '_set': self.right_table.field_prefix = self.right_table.field_prefix[:-4] return # check if this join type is for a foreign key for field in self.left_table.model._meta.fields: if ( field.get_internal_type() == 'OneToOneField' or field.get_internal_type() == 'ForeignKey' ): if field.remote_field.model == self.right_table.model: if self.right_table.field_prefix is None: self.right_table.field_prefix = field.name return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_condition(self): """ Determines the condition to be used in the condition part of the join sql. :return: The condition for the join clause :rtype: str or None """
if self.condition: return self.condition if type(self.right_table) is ModelTable and type(self.right_table) is ModelTable: # loop through fields to find the field for this model # check if this join type is for a related field for field in self.get_all_related_objects(self.right_table): related_model = field.model if hasattr(field, 'related_model'): related_model = field.related_model if related_model == self.left_table.model: table_join_field = field.field.column # self.table_join_name = field.get_accessor_name() condition = '{0}.{1} = {2}.{3}'.format( self.right_table.get_identifier(), self.right_table.model._meta.pk.name, self.left_table.get_identifier(), table_join_field, ) return condition # check if this join type is for a foreign key for field in self.right_table.model._meta.fields: if ( field.get_internal_type() == 'OneToOneField' or field.get_internal_type() == 'ForeignKey' ): if field.remote_field.model == self.left_table.model: table_join_field = field.column # self.table_join_name = field.name condition = '{0}.{1} = {2}.{3}'.format( self.right_table.get_identifier(), table_join_field, self.left_table.get_identifier(), self.left_table.model._meta.pk.name ) return condition return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_sql(self): """ Builds and returns the WHERE portion of the sql :return: the WHERE portion of the sql :rtype: str """
# reset arg index and args self.arg_index = 0 self.args = {} # build the WHERE sql portion if needed if len(self.wheres): where = self.build_where_part(self.wheres) return 'WHERE {0} '.format(where) return ''
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_condition_value(self, operator, value): """ Gets the condition value based on the operator and value :param operator: the condition operator name :type operator: str :param value: the value to be formatted based on the condition operator :type value: object :return: the comparison operator from the Where class's comparison_map :rtype: str """
if operator in ('contains', 'icontains'): value = '%{0}%'.format(value) elif operator == 'startswith': value = '{0}%'.format(value) return value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_arg(self, value): """ Set the query param in self.args based on the prefix and arg index and auto increment the arg_index :return: the string placeholder for the arg :rtype: str """
named_arg = '{0}A{1}'.format(self.arg_prefix, self.arg_index) self.args[named_arg] = value self.arg_index += 1 return named_arg
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_name(self, use_alias=True): """ Gets the name to reference the sorted field :return: the name to reference the sorted field :rtype: str """
if self.desc: direction = 'DESC' else: direction = 'ASC' if use_alias: return '{0} {1}'.format(self.field.get_identifier(), direction) return '{0} {1}'.format(self.field.get_select_sql(), direction)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_sql(self): """ Generates the sql used for the limit clause of a Query :return: the sql for the limit clause of a Query :rtype: str """
sql = '' if self.limit and self.limit > 0: sql += 'LIMIT {0} '.format(self.limit) if self.offset and self.offset > 0: sql += 'OFFSET {0} '.format(self.offset) return sql
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def init_defaults(self): """ Sets the default values for this instance """
self.sql = '' self.tables = [] self.joins = [] self._where = Where() self.groups = [] self.sorters = [] self._limit = None self.table_prefix = '' self.is_inner = False self.with_tables = [] self._distinct = False self.distinct_ons = [] self.field_names = [] self.field_names_pk = None self.values = []
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_table(self, table=None, fields='*', schema=None, **kwargs): """ Adds a ``Table`` and any optional fields to the list of tables this query is selecting from. :type table: str or dict or :class:`Table <querybuilder.tables.Table>` or :class:`Query <querybuilder.query.Query>` or :class:`ModelBase <django:django.db.models.base.ModelBase>` :param table: The table to select fields from. This can be a string of the table name, a dict of {'alias': table}, a ``Table`` instance, a Query instance, or a django Model instance :type fields: str or tuple or list or Field :param fields: The fields to select from ``table``. Defaults to '*'. This can be a single field, a tuple of fields, or a list of fields. Each field can be a string or ``Field`` instance :type schema: str :param schema: This is not implemented, but it will be a string of the db schema name :param kwargs: Any additional parameters to be passed into the constructor of ``TableFactory`` :return: self :rtype: :class:`Query <querybuilder.query.Query>` """
# self.mark_dirty() self.tables.append(TableFactory( table=table, fields=fields, schema=schema, owner=self, **kwargs )) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def insert_into(self, table=None, field_names=None, values=None, **kwargs): """ Bulk inserts a list of values into a table :type table: str or dict or :class:`Table <querybuilder.tables.Table>` or :class:`Query <querybuilder.query.Query>` or :class:`ModelBase <django:django.db.models.base.ModelBase>` :param table: The table to select fields from. This can be a string of the table name, a dict of {'alias': table}, a ``Table`` instance, a Query instance, or a django Model instance :type field_names: list :param field_names: A list of ordered field names that relate to the data in the values list :type values: list of list :param values: A list each values list with the values in the same order as the field names :param kwargs: Any additional parameters to be passed into the constructor of ``TableFactory`` :return: self :rtype: :class:`Query <querybuilder.query.Query>` """
table = TableFactory( table=table, **kwargs ) self.tables.append(table) self.field_names = field_names self.values = values return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_table(self, table=None, field_names=None, values=None, pk=None, **kwargs): """ Bulk updates rows in a table :type table: str or dict or :class:`Table <querybuilder.tables.Table>` or :class:`Query <querybuilder.query.Query>` or :class:`ModelBase <django:django.db.models.base.ModelBase>` :param table: The table to select fields from. This can be a string of the table name, a dict of {'alias': table}, a ``Table`` instance, a Query instance, or a django Model instance :type field_names: list :param field_names: A list of ordered field names that relate to the data in the values list :type values: list of list :param values: A list each values list with the values in the same order as the field names :type pk: int :param pk: The name of the primary key in the table and field_names :param kwargs: Any additional parameters to be passed into the constructor of ``TableFactory`` :rtype: :class:`Query <querybuilder.query.Query>` :return: self """
table = TableFactory( table=table, **kwargs ) self.tables.append(table) self.field_names = field_names self.values = values self.field_names_pk = pk
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def join(self, right_table=None, fields=None, condition=None, join_type='JOIN', schema=None, left_table=None, extract_fields=True, prefix_fields=False, field_prefix=None, allow_duplicates=False): """ Joins a table to another table based on a condition and adds fields from the joined table to the returned fields. :type right_table: str or dict or :class:`Table <querybuilder.tables.Table>` :param right_table: The table being joined with. This can be a string of the table name, a dict of {'alias': table}, or a ``Table`` instance :type fields: str or tuple or list or :class:`Field <querybuilder.fields.Field>` :param fields: The fields to select from ``right_table``. Defaults to `None`. This can be a single field, a tuple of fields, or a list of fields. Each field can be a string or ``Field`` instance :type condition: str :param condition: The join condition specifying the fields being joined. If the two tables being joined are instances of ``ModelTable`` then the condition should be created automatically. :type join_type: str :param join_type: The type of join (JOIN, LEFT JOIN, INNER JOIN, etc). Defaults to 'JOIN' :type schema: str :param schema: This is not implemented, but it will be a string of the db schema name :type left_table: str or dict or Table :param left_table: The left table being joined with. This can be a string of the table name, a dict of {'alias': table}, or a ``Table`` instance. Defaults to the first table in the query. :type extract_fields: bool :param extract_fields: If True and joining with a ``ModelTable``, then '*' fields will be converted to individual fields for each column in the table. Defaults to True. :type prefix_fields: bool :param prefix_fields: If True, then the joined table will have each of its field names prefixed with the field_prefix. If not field_prefix is specified, a name will be generated based on the join field name. This is usually used with nesting results in order to create models in python or javascript. Defaults to True. :type field_prefix: str :param field_prefix: The field prefix to be used in front of each field name if prefix_fields is set to True. If no field_prefix is set, one will be automatically created based on the join field name. :rtype: :class:`Query <querybuilder.query.Query>` :return: self """
# self.mark_dirty() # TODO: fix bug when joining from simple table to model table with no condition # it assumes left_table.model # if there is no left table, assume the query's first table # TODO: add test for auto left table to replace old auto left table # if left_table is None and len(self.tables): # left_table = self.tables[0] # left_table = TableFactory(left_table) # right_table = TableFactory(right_table) # create the join item new_join_item = Join( left_table=left_table, right_table=right_table, fields=fields, condition=condition, join_type=join_type, schema=schema, owner=self, extract_fields=extract_fields, prefix_fields=prefix_fields, field_prefix=field_prefix, ) # check if this table is already joined upon # TODO: add test for this if allow_duplicates is False: for join_item in self.joins: if join_item.right_table.get_identifier() == new_join_item.right_table.get_identifier(): if join_item.left_table.get_identifier() == new_join_item.left_table.get_identifier(): return self self.joins.append(new_join_item) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def join_left(self, right_table=None, fields=None, condition=None, join_type='LEFT JOIN', schema=None, left_table=None, extract_fields=True, prefix_fields=False, field_prefix=None, allow_duplicates=False): """ Wrapper for ``self.join`` with a default join of 'LEFT JOIN' :type right_table: str or dict or :class:`Table <querybuilder.tables.Table>` :param right_table: The table being joined with. This can be a string of the table name, a dict of {'alias': table}, or a ``Table`` instance :type fields: str or tuple or list or :class:`Field <querybuilder.fields.Field>` :param fields: The fields to select from ``right_table``. Defaults to `None`. This can be a single field, a tuple of fields, or a list of fields. Each field can be a string or ``Field`` instance :type condition: str :param condition: The join condition specifying the fields being joined. If the two tables being joined are instances of ``ModelTable`` then the condition should be created automatically. :type join_type: str :param join_type: The type of join (JOIN, LEFT JOIN, INNER JOIN, etc). Defaults to 'JOIN' :type schema: str :param schema: This is not implemented, but it will be a string of the db schema name :type left_table: str or dict or :class:`Table <querybuilder.tables.Table>` :param left_table: The left table being joined with. This can be a string of the table name, a dict of {'alias': table}, or a ``Table`` instance. Defaults to the first table in the query. :type extract_fields: bool :param extract_fields: If True and joining with a ``ModelTable``, then '*' fields will be converted to individual fields for each column in the table. Defaults to True. :type prefix_fields: bool :param prefix_fields: If True, then the joined table will have each of its field names prefixed with the field_prefix. If not field_prefix is specified, a name will be generated based on the join field name. This is usually used with nesting results in order to create models in python or javascript. Defaults to True. :type field_prefix: str :param field_prefix: The field prefix to be used in front of each field name if prefix_fields is set to True. If no field_prefix is set, one will be automatically created based on the join field name. :return: self :rtype: :class:`Query <querybuilder.query.Query>` """
return self.join( right_table=right_table, fields=fields, condition=condition, join_type=join_type, schema=schema, left_table=left_table, extract_fields=extract_fields, prefix_fields=prefix_fields, field_prefix=field_prefix, allow_duplicates=allow_duplicates )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def where(self, q=None, where_type='AND', **kwargs): """ Adds a where condition as a Q object to the query's ``Where`` instance. :type q: :class:`Q <django:django.db.models.Q>` :param q: A django ``Q`` instance. This will be added to the query's ``Where`` object. If no Q object is passed, the kwargs will be examined for params to be added to Q objects :param where_type: str :param where_type: The connection type of the where condition ('AND', 'OR') :return: self :rtype: :class:`Query <querybuilder.query.Query>` """
# self.mark_dirty() if q is not None: self._where.wheres.add(q, where_type) if len(kwargs): for key, value in kwargs.items(): q = Q(**{ key: value }) self._where.wheres.add(q, where_type) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def group_by(self, field=None, table=None, allow_duplicates=False): """ Adds a group by clause to the query by adding a ``Group`` instance to the query's groups list :type field: str or dict or :class:`Field <querybuilder.fields.Field>` :param field: This can be a string of a field name, a dict of {'alias': field}, or a ``Field`` instance :type table: str or dict or :class:`Table <querybuilder.table.Table>` :param table: Optional. This can be a string of a table name, a dict of {'alias': table}, or a ``Table`` instance. A table only needs to be supplied in more complex queries where the field name is ambiguous. :return: self :rtype: :class:`Query <querybuilder.query.Query>` """
new_group_item = Group( field=field, table=table, ) if allow_duplicates is False: for group_item in self.groups: if group_item.field.get_identifier() == new_group_item.field.get_identifier(): return self self.groups.append(new_group_item) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def order_by(self, field=None, table=None, desc=False): """ Adds an order by clause to the query by adding a ``Sorter`` instance to the query's sorters list :type field: str or dict or :class:`Field <querybuilder.fields.Field>` :param field: This can be a string of a field name, a dict of {'alias': field}, or a ``Field`` instance :type table: str or dict or :class:`Table <querybuilder.table.Table>` :param table: Optional. This can be a string of a table name, a dict of {'alias': table}, or a ``Table`` instance. A table only needs to be supplied in more complex queries where the field name is ambiguous. :type desc: bool :param desc: Set to True to sort by this field in DESC order or False to sort by this field in ASC order. Defaults to False. :rtype: :class:`Query <querybuilder.query.Query>` :return: self """
self.sorters.append(Sorter( field=field, table=table, desc=desc )) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_name_collisions(self): """ Checks if there are any tables referenced by the same identifier and updated the auto_alias accordingly. This is called when generating the sql for a query and should only be called internally. """
table_index = 0 table_names = {} for table in self.tables + self.with_tables: table_prefix = 'T{0}'.format(table_index) auto_alias = '{0}{1}'.format(self.table_prefix, table_prefix) identifier = table.get_identifier() if identifier is None or identifier in table_names: table.auto_alias = auto_alias table_names[identifier] = True # prefix inner query args and update self args if type(table) is QueryTable: table.query.prefix_args(auto_alias) table.query.table_prefix = auto_alias table_index += 1
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_sql(self, debug=False, use_cache=True): """ Generates the sql for this query and returns the sql as a string. :type debug: bool :param debug: If True, the sql will be returned in a format that is easier to read and debug. Defaults to False :type use_cache: bool :param use_cache: If True, the query will returned the cached sql if it exists rather then generating the sql again. If False, the sql will be generated again. Defaults to True. :rtype: str :return: The generated sql for this query """
# TODO: enable caching # if self.sql and use_cache and not debug: # return self.sql # auto alias any naming collisions self.check_name_collisions() # if debugging, return the debug formatted sql if debug: return self.format_sql() # build each part of the query sql = '' sql += self.build_withs() sql += self.build_select_fields() sql += self.build_from_table() sql += self.build_joins() sql += self.build_where() sql += self.build_groups() sql += self.build_order_by() sql += self.build_limit() # remove any whitespace from the beginning and end of the sql self.sql = sql.strip() return self.sql
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_sql(self): """ Builds the sql in a format that is easy for humans to read and debug :return: The formatted sql for this query :rtype: str """
# TODO: finish adding the other parts of the sql generation sql = '' # build SELECT select_segment = self.build_select_fields() select_segment = select_segment.replace('SELECT ', '', 1) fields = [field.strip() for field in select_segment.split(',')] sql += 'SELECT\n\t{0}\n'.format(',\n\t'.join(fields)) # build FROM from_segment = self.build_from_table() from_segment = from_segment.replace('FROM ', '', 1) tables = [table.strip() for table in from_segment.split(',')] sql += 'FROM\n\t{0}\n'.format(',\n\t'.join(tables)) # build ORDER BY order_by_segment = self.build_order_by() if len(order_by_segment): order_by_segment = order_by_segment.replace('ORDER BY ', '', 1) sorters = [sorter.strip() for sorter in order_by_segment.split(',')] sql += 'ORDER BY\n\t{0}\n'.format(',\n\t'.join(sorters)) # build LIMIT limit_segment = self.build_limit() if len(limit_segment): if 'LIMIT' in limit_segment: limit_segment = limit_segment.replace('LIMIT ', 'LIMIT\n\t', 1) if 'OFFSET' in limit_segment: limit_segment = limit_segment.replace('OFFSET ', '\nOFFSET\n\t', 1) elif 'OFFSET' in limit_segment: limit_segment = limit_segment.replace('OFFSET ', 'OFFSET\n\t', 1) sql += limit_segment return sql
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_select_fields(self): """ Generates the sql for the SELECT portion of the query :return: the SELECT portion of the query :rtype: str """
field_sql = [] # get the field sql for each table for table in self.tables: field_sql += table.get_field_sql() # get the field sql for each join table for join_item in self.joins: field_sql += join_item.right_table.get_field_sql() # combine all field sql separated by a comma sql = 'SELECT {0}{1} '.format(self.get_distinct_sql(), ', '.join(field_sql)) return sql
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_from_table(self): """ Generates the sql for the FROM portion of the query :return: the FROM portion of the query :rtype: str """
table_parts = [] # get the table sql for each table for table in self.tables: sql = table.get_sql() if len(sql): table_parts.append(sql) # combine all table sql separated by a comma sql = 'FROM {0} '.format(', '.join(table_parts)) return sql
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_joins(self): """ Generates the sql for the JOIN portion of the query :return: the JOIN portion of the query :rtype: str """
join_parts = [] # get the sql for each join object for join_item in self.joins: join_parts.append(join_item.get_sql()) # if there are any joins, combine them if len(join_parts): combined_joins = ' '.join(join_parts) return '{0} '.format(combined_joins) return ''
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_groups(self): """ Generates the sql for the GROUP BY portion of the query :return: the GROUP BY portion of the query :rtype: str """
# check if there are any groupings if len(self.groups): groups = [] # get the group sql for each grouping for group in self.groups: groups.append(group.get_name()) return 'GROUP BY {0} '.format(', '.join(groups)) return ''
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_order_by(self, use_alias=True): """ Generates the sql for the ORDER BY portion of the query :type use_alias: bool :param use_alias: If True, the alias for the field will be used in the order by. This is an option before query windows do not use the alias. Defaults to True. :return: the ORDER BY portion of the query :rtype: str """
# check if there are any sorters if len(self.sorters): sorters = [] # get the sql for each sorter for sorter in self.sorters: sorters.append(sorter.get_name(use_alias=use_alias)) return 'ORDER BY {0} '.format(', '.join(sorters)) return ''
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_table(self, table): """ Finds a table by name or alias. The FROM tables and JOIN tables are included in the search. :type table: str or :class:`ModelBase <django:django.db.models.base.ModelBase>` :param table: string of the table name or alias or a ModelBase instance :return: The table if it is found, otherwise None :rtype: Table or None """
table = TableFactory(table) identifier = table.get_identifier() join_tables = [join_item.right_table for join_item in self.joins] for table in (self.tables + join_tables): if table.get_identifier() == identifier: return table return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def wrap(self, alias=None): """ Wraps the query by selecting all fields from itself :rtype: :class:`Query <querybuilder.query.Query>` :return: The wrapped query """
field_names = self.get_field_names() query = Query(self.connection).from_table(deepcopy(self), alias=alias) self.__dict__.update(query.__dict__) # set explicit field names self.tables[0].set_fields(field_names) field_names = self.get_field_names() return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def copy(self): """ Deeply copies everything in the query object except the connection object is shared """
connection = self.connection del self.connection copied_query = deepcopy(self) copied_query.connection = connection self.connection = connection return copied_query
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_args(self): """ Gets the args for the query which will be escaped when being executed by the db. All inner queries are inspected and their args are combined with this query's args. :return: all args for this query as a dict :rtype: dict """
for table in self.tables + self.with_tables: if type(table) is QueryTable: self._where.args.update(table.query.get_args()) return self._where.args
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def explain(self, sql=None, sql_args=None): """ Runs EXPLAIN on this query :type sql: str or None :param sql: The sql to run EXPLAIN on. If None is specified, the query will use ``self.get_sql()`` :type sql_args: dict or None :param sql_args: A dictionary of the arguments to be escaped in the query. If None and sql is None, the query will use ``self.get_args()`` :rtype: list of str :return: list of each line of output from the EXPLAIN statement """
cursor = self.get_cursor() if sql is None: sql = self.get_sql() sql_args = self.get_args() elif sql_args is None: sql_args = {} cursor.execute('EXPLAIN {0}'.format(sql), sql_args) rows = self._fetch_all_as_dict(cursor) return rows
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def select(self, return_models=False, nest=False, bypass_safe_limit=False, sql=None, sql_args=None): """ Executes the SELECT statement and returns the rows as a list of dictionaries or a list of model instances :type return_models: bool :param return_models: Set to True to return a list of models instead of a list of dictionaries. Defaults to False :type nest: bool :param nest: Set to True to treat all double underscores in keynames as nested data. This will convert all keys with double underscores to dictionaries keyed off of the left side of the underscores. Ex: {"id": 1", "account__id": 1, "account__name": "Name"} becomes {"id": 1, "account": {"id": 1, "name": "Name"}} :type bypass_safe_limit: bool :param bypass_safe_limit: Ignores the safe_limit option even if the safe_limit is enabled :type sql: str or None :param sql: The sql to execute in the SELECT statement. If one is not specified, then the query will use ``self.get_sql()`` :type sql_args: str or None :param sql_args: The sql args to be used in the SELECT statement. If none are specified, then the query wil use ``self.get_args()`` :rtype: list of dict :return: list of dictionaries of the rows """
# Check if we need to set a safe limit if bypass_safe_limit is False: if Query.enable_safe_limit: if self.count() > Query.safe_limit: self.limit(Query.safe_limit) # determine which sql to use if sql is None: sql = self.get_sql() # determine which sql args to use if sql_args is None: sql_args = self.get_args() # get the cursor to execute the query cursor = self.get_cursor() # execute the query cursor.execute(sql, sql_args) # get the results as a list of dictionaries rows = self._fetch_all_as_dict(cursor) # check if models should be returned instead of dictionaries if return_models: # set nesting to true, so the nested models can easily load the data nest = True # build model map of map name to model model_map = {} for join_item in self.joins: model_map[join_item.right_table.field_prefix] = join_item.right_table.model # check if results should be nested if nest: # convert keys with double underscores to dictionaries for row in rows: _row = row.copy() for key, value in _row.items(): set_value_for_keypath(row, key, value, True, '__') if '__' in key: row.pop(key) # create models if needed if return_models: model_class = self.tables[0].model new_rows = [] for row in rows: model = model_class() # assign all non-model keys first because django 1.5 requires # that the model has an id set before setting a property that is # a foreign key for key, value in row.items(): if key not in model_map: setattr(model, key, value) # assign all model instances for key, value in row.items(): if key in model_map: child_model = model_map[key]() for child_key, child_value in value.items(): setattr(child_model, child_key, child_value) value = child_model setattr(model, key, value) new_rows.append(model) rows = new_rows return rows
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update(self, rows): """ Updates records in the db """
if len(rows) == 0: return sql, sql_args = self.get_update_sql(rows) # get the cursor to execute the query cursor = self.get_cursor() # execute the query cursor.execute(sql, sql_args)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_auto_field_name(self, model_class): """ If one of the unique_fields is the model's AutoField, return the field name, otherwise return None """
# Get auto field name (a model can only have one AutoField) for field in model_class._meta.fields: if isinstance(field, AutoField): return field.column return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def upsert(self, rows, unique_fields, update_fields, return_rows=False, return_models=False): """ Performs an upsert with the set of models defined in rows. If the unique field which is meant to cause a conflict is an auto increment field, then the field should be excluded when its value is null. In this case, an upsert will be performed followed by a bulk_create """
if len(rows) == 0: return ModelClass = self.tables[0].model rows_with_null_auto_field_value = [] # Get auto field name (a model can only have one AutoField) auto_field_name = self.get_auto_field_name(ModelClass) # Check if unique fields list contains an auto field if auto_field_name in unique_fields: # Separate the rows that need to be inserted vs the rows that need to be upserted rows_with_null_auto_field_value = [row for row in rows if getattr(row, auto_field_name) is None] rows = [row for row in rows if getattr(row, auto_field_name) is not None] return_value = [] if rows: sql, sql_args = self.get_upsert_sql( rows, unique_fields, update_fields, auto_field_name=auto_field_name, return_rows=return_rows or return_models ) # get the cursor to execute the query cursor = self.get_cursor() # execute the upsert query cursor.execute(sql, sql_args) if return_rows or return_models: return_value.extend(self._fetch_all_as_dict(cursor)) if rows_with_null_auto_field_value: sql, sql_args = self.get_upsert_sql( rows_with_null_auto_field_value, unique_fields, update_fields, auto_field_name=auto_field_name, only_insert=True, return_rows=return_rows or return_models ) # get the cursor to execute the query cursor = self.get_cursor() # execute the upsert query cursor.execute(sql, sql_args) if return_rows or return_models: return_value.extend(self._fetch_all_as_dict(cursor)) if return_models: ModelClass = self.tables[0].model model_objects = [ ModelClass(**row_dict) for row_dict in return_value ] # Set the state to indicate the object has been loaded from db for model_object in model_objects: model_object._state.adding = False model_object._state.db = 'default' return_value = model_objects return return_value