text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: async def _get_response(self, msg): """Perform the request, get the response."""
try: protocol = await self._get_protocol() pr = protocol.request(msg) r = await pr.response return pr, r except ConstructionRenderableError as e: raise ClientError("There was an error with the request.", e) except RequestTimedOut as e: await self._reset_protocol(e) raise RequestTimeout('Request timed out.', e) except (OSError, socket.gaierror, Error) as e: # aiocoap sometimes raises an OSError/socket.gaierror too. # aiocoap issue #124 await self._reset_protocol(e) raise ServerError("There was an error with the request.", e) except asyncio.CancelledError as e: await self._reset_protocol(e) raise e
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def member_ids(self): """Members of this group."""
info = self.raw.get(ATTR_MEMBERS, {}) if not info or ROOT_DEVICES2 not in info: return [] return info[ROOT_DEVICES2].get(ATTR_ID, [])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_dimmer(self, dimmer, transition_time=None): """Set dimmer value of a group. dimmer: Integer between 0..255 transition_time: Integer representing tenth of a second (default None) """
values = { ATTR_LIGHT_DIMMER: dimmer, } if transition_time is not None: values[ATTR_TRANSITION_TIME] = transition_time return self.set_values(values)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_gateway(): """Print gateway info as JSON"""
print("Printing information about the Gateway") data = api(gateway.get_gateway_info()).raw print(jsonify(data))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_all_devices(): """Print all devices as JSON"""
print("Printing information about all devices paired to the Gateway") if len(devices) == 0: exit(bold("No devices paired")) container = [] for dev in devices: container.append(dev.raw) print(jsonify(container))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_lamps(): """Print all lamp devices as JSON"""
print("Printing information about all lamps paired to the Gateway") lights = [dev for dev in devices if dev.has_light_control] if len(lights) == 0: exit(bold("No lamps paired")) container = [] for l in lights: container.append(l.raw) print(jsonify(container))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_smart_tasks(): """Print smart tasks as JSON"""
print("Printing information about smart tasks") tasks = api(gateway.get_smart_tasks()) if len(tasks) == 0: exit(bold("No smart tasks defined")) container = [] for task in tasks: container.append(api(task).task_control.raw) print(jsonify(container))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_groups(): """Print all groups as JSON"""
print("Printing information about all groups defined in the Gateway") groups = api(gateway.get_groups()) if len(groups) == 0: exit(bold("No groups defined")) container = [] for group in groups: container.append(api(group).raw) print(jsonify(container))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def LoadGDAL(filename, no_data=None): """Read a GDAL file. Opens any file GDAL can read, selects the first raster band, and loads it and its metadata into a RichDEM array of the appropriate data type. If you need to do something more complicated, look at the source of this function. Args: filename (str): Name of the raster file to open no_data (float): Optionally, set the no_data value to this. Returns: A RichDEM array """
if not GDAL_AVAILABLE: raise Exception("richdem.LoadGDAL() requires GDAL.") allowed_types = {gdal.GDT_Byte,gdal.GDT_Int16,gdal.GDT_Int32,gdal.GDT_UInt16,gdal.GDT_UInt32,gdal.GDT_Float32,gdal.GDT_Float64} #Read in data src_ds = gdal.Open(filename) srcband = src_ds.GetRasterBand(1) if no_data is None: no_data = srcband.GetNoDataValue() if no_data is None: raise Exception("The source data did not have a NoData value. Please use the no_data argument to specify one. If should not be equal to any of the actual data values. If you are using all possible data values, then the situation is pretty hopeless - sorry.") srcdata = rdarray(srcband.ReadAsArray(), no_data=no_data) # raster_srs = osr.SpatialReference() # raster_srs.ImportFromWkt(raster.GetProjectionRef()) if not srcband.DataType in allowed_types: raise Exception("This datatype is not supported. Please file a bug report on RichDEM.") srcdata.projection = src_ds.GetProjectionRef() srcdata.geotransform = src_ds.GetGeoTransform() srcdata.metadata = dict() for k,v in src_ds.GetMetadata().items(): srcdata.metadata[k] = v _AddAnalysis(srcdata, "LoadGDAL(filename={0}, no_data={1})".format(filename, no_data)) return srcdata
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def SaveGDAL(filename, rda): """Save a GDAL file. Saves a RichDEM array to a data file in GeoTIFF format. If you need to do something more complicated, look at the source of this function. Args: filename (str): Name of the raster file to be created rda (rdarray): Data to save. Returns: No Return """
if type(rda) is not rdarray: raise Exception("A richdem.rdarray or numpy.ndarray is required!") if not GDAL_AVAILABLE: raise Exception("richdem.SaveGDAL() requires GDAL.") driver = gdal.GetDriverByName('GTiff') data_type = gdal.GDT_Float32 #TODO data_set = driver.Create(filename, xsize=rda.shape[1], ysize=rda.shape[0], bands=1, eType=data_type) data_set.SetGeoTransform(rda.geotransform) data_set.SetProjection(rda.projection) band = data_set.GetRasterBand(1) band.SetNoDataValue(rda.no_data) band.WriteArray(np.array(rda)) for k,v in rda.metadata.items(): data_set.SetMetadataItem(str(k),str(v))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def FillDepressions( dem, epsilon = False, in_place = False, topology = 'D8' ): """Fills all depressions in a DEM. Args: dem (rdarray): An elevation model epsilon (float): If True, an epsilon gradient is imposed to all flat regions. This ensures that there is always a local gradient. in_place (bool): If True, the DEM is modified in place and there is no return; otherwise, a new, altered DEM is returned. topology (string): A topology indicator Returns: DEM without depressions. """
if type(dem) is not rdarray: raise Exception("A richdem.rdarray or numpy.ndarray is required!") if topology not in ['D8','D4']: raise Exception("Unknown topology!") if not in_place: dem = dem.copy() _AddAnalysis(dem, "FillDepressions(dem, epsilon={0})".format(epsilon)) demw = dem.wrap() if epsilon: if topology=='D8': _richdem.rdPFepsilonD8(demw) elif topology=='D4': _richdem.rdPFepsilonD4(demw) else: if topology=='D8': _richdem.rdFillDepressionsD8(demw) elif topology=='D4': _richdem.rdFillDepressionsD4(demw) dem.copyFromWrapped(demw) if not in_place: return dem
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def BreachDepressions( dem, in_place = False, topology = 'D8' ): """Breaches all depressions in a DEM. Args: dem (rdarray): An elevation model in_place (bool): If True, the DEM is modified in place and there is no return; otherwise, a new, altered DEM is returned. topology (string): A topology indicator Returns: DEM without depressions. """
if type(dem) is not rdarray: raise Exception("A richdem.rdarray or numpy.ndarray is required!") if topology not in ['D8','D4']: raise Exception("Unknown topology!") if not in_place: dem = dem.copy() _AddAnalysis(dem, "BreachDepressions(dem)") demw = dem.wrap() if topology=='D8': _richdem.rdBreachDepressionsD8(demw) elif topology=='D4': _richdem.rdBreachDepressionsD4(demw) dem.copyFromWrapped(demw) if not in_place: return dem
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def ResolveFlats( dem, in_place = False ): """Attempts to resolve flats by imposing a local gradient Args: dem (rdarray): An elevation model in_place (bool): If True, the DEM is modified in place and there is no return; otherwise, a new, altered DEM is returned. Returns: DEM modified such that all flats drain. """
if type(dem) is not rdarray: raise Exception("A richdem.rdarray or numpy.ndarray is required!") if not in_place: dem = dem.copy() _AddAnalysis(dem, "ResolveFlats(dem, in_place={in_place})".format(in_place=in_place)) demw = dem.wrap() _richdem.rdResolveFlatsEpsilon(demw) dem.copyFromWrapped(demw) if not in_place: return dem
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def FlowAccumulation( dem, method = None, exponent = None, weights = None, in_place = False ): """Calculates flow accumulation. A variety of methods are available. Args: dem (rdarray): An elevation model method (str): Flow accumulation method to use. (See below.) exponent (float): Some methods require an exponent; refer to the relevant publications for details. weights (rdarray): Flow accumulation weights to use. This is the amount of flow generated by each cell. If this is not provided, each cell will generate 1 unit of flow. in_place (bool): If True, then `weights` is modified in place. An accumulation matrix is always returned, but it will just be a view of the modified data if `in_place` is True. =================== ============================== =========================== Method Note Reference =================== ============================== =========================== Tarboton Alias for Dinf. `Taroboton (1997) doi: 10.1029/96WR03137 <http://dx.doi.org/10.1029/96WR03137>`_ Dinf Alias for Tarboton. `Taroboton (1997) doi: 10.1029/96WR03137 <http://dx.doi.org/10.1029/96WR03137>`_ Quinn Holmgren with exponent=1. `Quinn et al. (1991) doi: 10.1002/hyp.3360050106 <http://dx.doi.org/10.1002/hyp.3360050106>`_ Holmgren(E) Generalization of Quinn. `Holmgren (1994) doi: 10.1002/hyp.3360080405 <http://dx.doi.org/10.1002/hyp.3360080405>`_ Freeman(E) TODO `Freeman (1991) doi: 10.1016/0098-3004(91)90048-I <http://dx.doi.org/10.1016/0098-3004(91)90048-I>`_ FairfieldLeymarieD8 Alias for Rho8. `Fairfield and Leymarie (1991) doi: 10.1029/90WR02658 <http://dx.doi.org/10.1029/90WR02658>`_ FairfieldLeymarieD4 Alias for Rho4. `Fairfield and Leymarie (1991) doi: 10.1029/90WR02658 <http://dx.doi.org/10.1029/90WR02658>`_ Rho8 Alias for FairfieldLeymarieD8. `Fairfield and Leymarie (1991) doi: 10.1029/90WR02658 <http://dx.doi.org/10.1029/90WR02658>`_ Rho4 Alias for FairfieldLeymarieD4. `Fairfield and Leymarie (1991) doi: 10.1029/90WR02658 <http://dx.doi.org/10.1029/90WR02658>`_ OCallaghanD8 Alias for D8. `O'Callaghan and Mark (1984) doi: 10.1016/S0734-189X(84)80011-0 <http://dx.doi.org/10.1016/S0734-189X(84)80011-0>`_ OCallaghanD4 Alias for D8. `O'Callaghan and Mark (1984) doi: 10.1016/S0734-189X(84)80011-0 <http://dx.doi.org/10.1016/S0734-189X(84)80011-0>`_ D8 Alias for OCallaghanD8. `O'Callaghan and Mark (1984) doi: 10.1016/S0734-189X(84)80011-0 <http://dx.doi.org/10.1016/S0734-189X(84)80011-0>`_ D4 Alias for OCallaghanD4. `O'Callaghan and Mark (1984) doi: 10.1016/S0734-189X(84)80011-0 <http://dx.doi.org/10.1016/S0734-189X(84)80011-0>`_ =================== ============================== =========================== **Methods marked (E) require the exponent argument.** Returns: A flow accumulation according to the desired method. If `weights` was provided and `in_place` was True, then this matrix is a view of the modified data. """
if type(dem) is not rdarray: raise Exception("A richdem.rdarray or numpy.ndarray is required!") facc_methods = { "Tarboton": _richdem.FA_Tarboton, "Dinf": _richdem.FA_Tarboton, "Quinn": _richdem.FA_Quinn, "FairfieldLeymarieD8": _richdem.FA_FairfieldLeymarieD8, "FairfieldLeymarieD4": _richdem.FA_FairfieldLeymarieD4, "Rho8": _richdem.FA_Rho8, "Rho4": _richdem.FA_Rho4, "OCallaghanD8": _richdem.FA_OCallaghanD8, "OCallaghanD4": _richdem.FA_OCallaghanD4, "D8": _richdem.FA_D8, "D4": _richdem.FA_D4 } facc_methods_exponent = { "Freeman": _richdem.FA_Freeman, "Holmgren": _richdem.FA_Holmgren } if weights is not None and in_place: accum = rdarray(weights, no_data=-1) elif weights is not None and not in_place: accum = rdarray(weights, copy=True, meta_obj=dem, no_data=-1) elif weights is None: accum = rdarray(np.ones(shape=dem.shape, dtype='float64'), meta_obj=dem, no_data=-1) else: raise Exception("Execution should never reach this point!") if accum.dtype!='float64': raise Exception("Accumulation array must be of type 'float64'!") accumw = accum.wrap() _AddAnalysis(accum, "FlowAccumulation(dem, method={method}, exponent={exponent}, weights={weights}, in_place={in_place})".format( method = method, exponent = exponent, weights = 'None' if weights is None else 'weights', in_place = in_place )) if method in facc_methods: facc_methods[method](dem.wrap(),accumw) elif method in facc_methods_exponent: if exponent is None: raise Exception('FlowAccumulation method "'+method+'" requires an exponent!') facc_methods_exponent[method](dem.wrap(),accumw,exponent) else: raise Exception("Invalid FlowAccumulation method. Valid methods are: " + ', '.join(list(facc_methods.keys()) + list(facc_methods_exponent.keys()) )) accum.copyFromWrapped(accumw) return accum
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def FlowAccumFromProps( props, weights = None, in_place = False ): """Calculates flow accumulation from flow proportions. Args: props (rdarray): An elevation model weights (rdarray): Flow accumulation weights to use. This is the amount of flow generated by each cell. If this is not provided, each cell will generate 1 unit of flow. in_place (bool): If True, then `weights` is modified in place. An accumulation matrix is always returned, but it will just be a view of the modified data if `in_place` is True. Returns: A flow accumulation array. If `weights` was provided and `in_place` was True, then this matrix is a view of the modified data. """
if type(props) is not rd3array: raise Exception("A richdem.rd3array or numpy.ndarray is required!") if weights is not None and in_place: accum = rdarray(weights, no_data=-1) elif weights is not None and not in_place: accum = rdarray(weights, copy=True, meta_obj=props, no_data=-1) elif weights is None: accum = rdarray(np.ones(shape=props.shape[0:2], dtype='float64'), meta_obj=props, no_data=-1) else: raise Exception("Execution should never reach this point!") if accum.dtype!='float64': raise Exception("Accumulation array must be of type 'float64'!") accumw = accum.wrap() _AddAnalysis(accum, "FlowAccumFromProps(dem, weights={weights}, in_place={in_place})".format( weights = 'None' if weights is None else 'weights', in_place = in_place )) _richdem.FlowAccumulation(props.wrap(),accumw) accum.copyFromWrapped(accumw) return accum
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _join(*args): """Join S3 bucket args together. Remove empty entries and strip left-leading ``/`` """
return delimiter.join(filter(lambda s: s != '', map(lambda s: s.lstrip(delimiter), args)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def initialize(self): """Initialize a helper to get bookstore settings and session information quickly"""
self.bookstore_settings = BookstoreSettings(config=self.config) self.session = aiobotocore.get_session()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: async def put(self, path=''): """Publish a notebook on a given path. The payload directly matches the contents API for PUT. """
self.log.info("Attempt publishing to %s", path) if path == '' or path == '/': raise web.HTTPError(400, "Must provide a path for publishing") model = self.get_json_body() if model: await self._publish(model, path.lstrip('/')) else: raise web.HTTPError(400, "Cannot publish an empty model")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: async def _publish(self, model, path): """Publish notebook model to the path"""
if model['type'] != 'notebook': raise web.HTTPError(400, "bookstore only publishes notebooks") content = model['content'] full_s3_path = s3_path( self.bookstore_settings.s3_bucket, self.bookstore_settings.published_prefix, path ) file_key = s3_key(self.bookstore_settings.published_prefix, path) self.log.info( "Publishing to %s", s3_display_path( self.bookstore_settings.s3_bucket, self.bookstore_settings.published_prefix, path ), ) async with self.session.create_client( 's3', aws_secret_access_key=self.bookstore_settings.s3_secret_access_key, aws_access_key_id=self.bookstore_settings.s3_access_key_id, endpoint_url=self.bookstore_settings.s3_endpoint_url, region_name=self.bookstore_settings.s3_region_name, ) as client: self.log.info("Processing published write of %s", path) obj = await client.put_object( Bucket=self.bookstore_settings.s3_bucket, Key=file_key, Body=json.dumps(content) ) self.log.info("Done with published write of %s", path) self.set_status(201) resp_content = {"s3path": full_s3_path} if 'VersionId' in obj: resp_content["versionID"] = obj['VersionId'] resp_str = json.dumps(resp_content) self.finish(resp_str)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setup_auth(self): """ Sets up token access for authorizing requests to notebook server. This sets the notebook token as self.token and the xsrf_token as self.xsrf_token. """
self.token = self.nb_record.token first = requests.get(f"{self.url}/login") self.xsrf_token = first.cookies.get("_xsrf", "")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setup_request_sessions(self): """ Sets up a requests.Session object for sharing headers across API requests. """
self.req_session = requests.Session() self.req_session.headers.update(self.headers)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: async def archive(self, record: ArchiveRecord): """Process a record to write to storage. Acquire a path lock before archive. Writing to storage will only be allowed to a path if a valid `path_lock` is held and the path is not locked by another process. Parameters record : ArchiveRecord A notebook and where it should be written to storage """
async with self.path_lock_ready: lock = self.path_locks.get(record.filepath) if lock is None: lock = Lock() self.path_locks[record.filepath] = lock # Skip writes when a given path is already locked if lock.locked(): self.log.info("Skipping archive of %s", record.filepath) return async with lock: try: async with self.session.create_client( 's3', aws_secret_access_key=self.settings.s3_secret_access_key, aws_access_key_id=self.settings.s3_access_key_id, endpoint_url=self.settings.s3_endpoint_url, region_name=self.settings.s3_region_name, ) as client: self.log.info("Processing storage write of %s", record.filepath) file_key = s3_key(self.settings.workspace_prefix, record.filepath) await client.put_object( Bucket=self.settings.s3_bucket, Key=file_key, Body=record.content ) self.log.info("Done with storage write of %s", record.filepath) except Exception as e: self.log.error( 'Error while archiving file: %s %s', record.filepath, e, exc_info=True )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run_pre_save_hook(self, model, path, **kwargs): """Send request to store notebook to S3. This hook offloads the storage request to the event loop. When the event loop is available for execution of the request, the storage of the notebook will be done and the write to storage occurs. Parameters model : str The type of file path : str The storage location """
if model["type"] != "notebook": return content = json.dumps(model["content"]) loop = ioloop.IOLoop.current() # Offload archival and schedule write to storage with the current event loop loop.spawn_callback( self.archive, ArchiveRecord( content=content, filepath=path, queued_time=ioloop.IOLoop.current().time() ), )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, url): """ do a get transaction """
return requests.get(url, params=self.data, headers=self.config.HEADERS)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def post(self, url): """ do a post request """
return requests.post(url, data=self.data, headers=self.config.HEADERS)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def checkout(self, transparent=False, **kwargs): """ create a pagseguro checkout """
self.data['currency'] = self.config.CURRENCY self.build_checkout_params(**kwargs) if transparent: response = self.post(url=self.config.TRANSPARENT_CHECKOUT_URL) else: response = self.post(url=self.config.CHECKOUT_URL) return PagSeguroCheckoutResponse(response.content, config=self.config)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pre_approval_ask_payment(self, **kwargs): """ ask form a subscribe payment """
self.build_pre_approval_payment_params(**kwargs) response = self.post(url=self.config.PRE_APPROVAL_PAYMENT_URL) return PagSeguroPreApprovalPayment(response.content, self.config)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pre_approval_cancel(self, code): """ cancel a subscribe """
response = self.get(url=self.config.PRE_APPROVAL_CANCEL_URL % code) return PagSeguroPreApprovalCancel(response.content, self.config)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_transaction(self, code): """ check a transaction by its code """
response = self.get(url=self.config.TRANSACTION_URL % code) return PagSeguroNotificationResponse(response.content, self.config)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def query_transactions(self, initial_date, final_date, page=None, max_results=None): """ query transaction by date range """
last_page = False results = [] while last_page is False: search_result = self._consume_query_transactions( initial_date, final_date, page, max_results) results.extend(search_result.transactions) if search_result.current_page is None or \ search_result.total_pages is None or \ search_result.current_page == search_result.total_pages: last_page = True else: page = search_result.current_page + 1 return results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def query_pre_approvals(self, initial_date, final_date, page=None, max_results=None): """ query pre-approvals by date range """
last_page = False results = [] while last_page is False: search_result = self._consume_query_pre_approvals( initial_date, final_date, page, max_results) results.extend(search_result.pre_approvals) if search_result.current_page is None or \ search_result.total_pages is None or \ search_result.current_page == search_result.total_pages: last_page = True else: page = search_result.current_page + 1 return results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_to_cart(item_id): """ Cart with Product """
cart = Cart(session['cart']) if cart.change_item(item_id, 'add'): session['cart'] = cart.to_dict() return list_products()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_dict(self): """ Attribute values to dict """
return { "total": self.total, "subtotal": self.subtotal, "items": self.items, "extra_amount": self.extra_amount }
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_connection(self, url): """ Sets the connection URL to the address a Neo4j server is set up at """
u = urlparse(url) if u.netloc.find('@') > -1 and (u.scheme == 'bolt' or u.scheme == 'bolt+routing'): credentials, hostname = u.netloc.rsplit('@', 1) username, password, = credentials.split(':') else: raise ValueError("Expecting url format: bolt://user:password@localhost:7687" " got {0}".format(url)) self.driver = GraphDatabase.driver(u.scheme + '://' + hostname, auth=basic_auth(username, password), encrypted=config.ENCRYPTED_CONNECTION, max_pool_size=config.MAX_POOL_SIZE) self.url = url self._pid = os.getpid() self._active_transaction = None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def begin(self, access_mode=None): """ Begins a new transaction, raises SystemError exception if a transaction is in progress """
if self._active_transaction: raise SystemError("Transaction in progress") self._active_transaction = self.driver.session(access_mode=access_mode).begin_transaction()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _object_resolution(self, result_list): """ Performs in place automatic object resolution on a set of results returned by cypher_query. The function operates recursively in order to be able to resolve Nodes within nested list structures. Not meant to be called directly, used primarily by cypher_query. :param result_list: A list of results as returned by cypher_query. :type list: :return: A list of instantiated objects. """
# Object resolution occurs in-place for a_result_item in enumerate(result_list): for a_result_attribute in enumerate(a_result_item[1]): try: # Primitive types should remain primitive types, # Nodes to be resolved to native objects resolved_object = a_result_attribute[1] if type(a_result_attribute[1]) is Node: resolved_object = self._NODE_CLASS_REGISTRY[frozenset(a_result_attribute[1].labels)].inflate( a_result_attribute[1]) if type(a_result_attribute[1]) is list: resolved_object = self._object_resolution([a_result_attribute[1]]) result_list[a_result_item[0]][a_result_attribute[0]] = resolved_object except KeyError: # Not being able to match the label set of a node with a known object results # in a KeyError in the internal dictionary used for resolution. If it is impossible # to match, then raise an exception with more details about the error. raise ModelDefinitionMismatch(a_result_attribute[1], self._NODE_CLASS_REGISTRY) return result_list
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def install_traversals(cls, node_set): """ For a StructuredNode class install Traversal objects for each relationship definition on a NodeSet instance """
rels = cls.defined_properties(rels=True, aliases=False, properties=False) for key, value in rels.items(): if hasattr(node_set, key): raise ValueError("Can't install traversal '{0}' exists on NodeSet".format(key)) rel = getattr(cls, key) rel._lookup_node_class() traversal = Traversal(source=node_set, name=key, definition=rel.definition) setattr(node_set, key, traversal)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def process_filter_args(cls, kwargs): """ loop through properties in filter parameters check they match class definition deflate them and convert into something easy to generate cypher from """
output = {} for key, value in kwargs.items(): if '__' in key: prop, operator = key.rsplit('__') operator = OPERATOR_TABLE[operator] else: prop = key operator = '=' if prop not in cls.defined_properties(rels=False): raise ValueError("No such property {0} on {1}".format(prop, cls.__name__)) property_obj = getattr(cls, prop) if isinstance(property_obj, AliasProperty): prop = property_obj.aliased_to() deflated_value = getattr(cls, prop).deflate(value) else: # handle special operators if operator == _SPECIAL_OPERATOR_IN: if not isinstance(value, tuple) and not isinstance(value, list): raise ValueError('Value must be a tuple or list for IN operation {0}={1}'.format(key, value)) deflated_value = [property_obj.deflate(v) for v in value] elif operator == _SPECIAL_OPERATOR_ISNULL: if not isinstance(value, bool): raise ValueError('Value must be a bool for isnull operation on {0}'.format(key)) operator = 'IS NULL' if value else 'IS NOT NULL' deflated_value = None elif operator in _REGEX_OPERATOR_TABLE.values(): deflated_value = property_obj.deflate(value) if not isinstance(deflated_value, basestring): raise ValueError('Must be a string value for {0}'.format(key)) if operator in _STRING_REGEX_OPERATOR_TABLE.values(): deflated_value = re.escape(deflated_value) deflated_value = operator.format(deflated_value) operator = _SPECIAL_OPERATOR_REGEX else: deflated_value = property_obj.deflate(value) # map property to correct property name in the database db_property = cls.defined_properties(rels=False)[prop].db_property or prop output[db_property] = (operator, deflated_value) return output
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def process_has_args(cls, kwargs): """ loop through has parameters check they correspond to class rels defined """
rel_definitions = cls.defined_properties(properties=False, rels=True, aliases=False) match, dont_match = {}, {} for key, value in kwargs.items(): if key not in rel_definitions: raise ValueError("No such relation {0} defined on a {1}".format(key, cls.__name__)) rhs_ident = key rel_definitions[key]._lookup_node_class() if value is True: match[rhs_ident] = rel_definitions[key].definition elif value is False: dont_match[rhs_ident] = rel_definitions[key].definition elif isinstance(value, NodeSet): raise NotImplementedError("Not implemented yet") else: raise ValueError("Expecting True / False / NodeSet got: " + repr(value)) return match, dont_match
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_traversal(self, traversal): """ traverse a relationship from a node to a set of nodes """
# build source rhs_label = ':' + traversal.target_class.__label__ # build source lhs_ident = self.build_source(traversal.source) rhs_ident = traversal.name + rhs_label self._ast['return'] = traversal.name self._ast['result_class'] = traversal.target_class rel_ident = self.create_ident() stmt = _rel_helper(lhs=lhs_ident, rhs=rhs_ident, ident=rel_ident, **traversal.definition) self._ast['match'].append(stmt) if traversal.filters: self.build_where_stmt(rel_ident, traversal.filters) return traversal.name
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_label(self, ident, cls): """ match nodes by a label """
ident_w_label = ident + ':' + cls.__label__ self._ast['match'].append('({0})'.format(ident_w_label)) self._ast['return'] = ident self._ast['result_class'] = cls return ident
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_where_stmt(self, ident, filters, q_filters=None, source_class=None): """ construct a where statement from some filters """
if q_filters is not None: stmts = self._parse_q_filters(ident, q_filters, source_class) if stmts: self._ast['where'].append(stmts) else: stmts = [] for row in filters: negate = False # pre-process NOT cases as they are nested dicts if '__NOT__' in row and len(row) == 1: negate = True row = row['__NOT__'] for prop, op_and_val in row.items(): op, val = op_and_val if op in _UNARY_OPERATORS: # unary operators do not have a parameter statement = '{0} {1}.{2} {3}'.format('NOT' if negate else '', ident, prop, op) else: place_holder = self._register_place_holder(ident + '_' + prop) statement = '{0} {1}.{2} {3} {{{4}}}'.format('NOT' if negate else '', ident, prop, op, place_holder) self._query_params[place_holder] = val stmts.append(statement) self._ast['where'].append(' AND '.join(stmts))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def first(self, **kwargs): """ Retrieve the first node from the set matching supplied parameters :param kwargs: same syntax as `filter()` :return: node """
result = result = self._get(limit=1, **kwargs) if result: return result[0] else: raise self.source_class.DoesNotExist(repr(kwargs))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def filter(self, *args, **kwargs): """ Apply filters to the existing nodes in the set. :param kwargs: filter parameters Filters mimic Django's syntax with the double '__' to separate field and operators. e.g `.filter(salary__gt=20000)` results in `salary > 20000`. The following operators are available: * 'lt': less than * 'gt': greater than * 'lte': less than or equal to * 'gte': greater than or equal to * 'ne': not equal to * 'in': matches one of list (or tuple) * 'isnull': is null * 'regex': matches supplied regex (neo4j regex format) * 'exact': exactly match string (just '=') * 'iexact': case insensitive match string * 'contains': contains string * 'icontains': case insensitive contains * 'startswith': string starts with * 'istartswith': case insensitive string starts with * 'endswith': string ends with * 'iendswith': case insensitive string ends with :return: self """
if args or kwargs: self.q_filters = Q(self.q_filters & Q(*args, **kwargs)) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def exclude(self, *args, **kwargs): """ Exclude nodes from the NodeSet via filters. :param kwargs: filter parameters see syntax for the filter method :return: self """
if args or kwargs: self.q_filters = Q(self.q_filters & ~Q(*args, **kwargs)) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def order_by(self, *props): """ Order by properties. Prepend with minus to do descending. Pass None to remove ordering. """
should_remove = len(props) == 1 and props[0] is None if not hasattr(self, '_order_by') or should_remove: self._order_by = [] if should_remove: return self if '?' in props: self._order_by.append('?') else: for prop in props: prop = prop.strip() if prop.startswith('-'): prop = prop[1:] desc = True else: desc = False if prop not in self.source_class.defined_properties(rels=False): raise ValueError("No such property {0} on {1}".format( prop, self.source_class.__name__)) property_obj = getattr(self.source_class, prop) if isinstance(property_obj, AliasProperty): prop = property_obj.aliased_to() self._order_by.append(prop + (' DESC' if desc else '')) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def match(self, **kwargs): """ Traverse relationships with properties matching the given parameters. e.g: `.match(price__lt=10)` :param kwargs: see `NodeSet.filter()` for syntax :return: self """
if kwargs: if self.definition.get('model') is None: raise ValueError("match() with filter only available on relationships with a model") output = process_filter_args(self.definition['model'], kwargs) if output: self.filters.append(output) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def inflate(self, value): """ Handles the marshalling from Neo4J POINT to NeomodelPoint :param value: Value returned from the database :type value: Neo4J POINT :return: NeomodelPoint """
if not isinstance(value,neo4j.types.spatial.Point): raise TypeError('Invalid datatype to inflate. Expected POINT datatype, received {}'.format(type(value))) try: value_point_crs = SRID_TO_CRS[value.srid] except KeyError: raise ValueError('Invalid SRID to inflate. ' 'Expected one of {}, received {}'.format(SRID_TO_CRS.keys(), value.srid)) if self._crs != value_point_crs: raise ValueError('Invalid CRS. ' 'Expected POINT defined over {}, received {}'.format(self._crs, value_point_crs)) # cartesian if value.srid == 7203: return NeomodelPoint(x=value.x, y=value.y) # cartesian-3d elif value.srid == 9157: return NeomodelPoint(x=value.x, y=value.y, z=value.z) # wgs-84 elif value.srid == 4326: return NeomodelPoint(longitude=value.longitude, latitude=value.latitude) # wgs-83-3d elif value.srid == 4979: return NeomodelPoint(longitude=value.longitude, latitude=value.latitude, height=value.height)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def deflate(self, value): """ Handles the marshalling from NeomodelPoint to Neo4J POINT :param value: The point that was assigned as value to a property in the model :type value: NeomodelPoint :return: Neo4J POINT """
if not isinstance(value, NeomodelPoint): raise TypeError('Invalid datatype to deflate. Expected NeomodelPoint, received {}'.format(type(value))) if not value.crs == self._crs: raise ValueError('Invalid CRS. ' 'Expected NeomodelPoint defined over {}, ' 'received NeomodelPoint defined over {}'.format(self._crs, value.crs)) if value.crs == 'cartesian-3d': return neo4j.types.spatial.CartesianPoint((value.x, value.y, value.z)) elif value.crs == 'cartesian': return neo4j.types.spatial.CartesianPoint((value.x,value.y)) elif value.crs == 'wgs-84': return neo4j.types.spatial.WGS84Point((value.longitude, value.latitude)) elif value.crs == 'wgs-84-3d': return neo4j.types.spatial.WGS84Point((value.longitude, value.latitude, value.height))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add(self, data, conn_type, squash=True): """ Combine this tree and the data represented by data using the connector conn_type. The combine is done by squashing the node other away if possible. This tree (self) will never be pushed to a child node of the combined tree, nor will the connector or negated properties change. Return a node which can be used in place of data regardless if the node other got squashed or not. If `squash` is False the data is prepared and added as a child to this tree without further logic. Args: conn_type (str, optional ["AND", "OR"]): connection method """
if data in self.children: return data if not squash: self.children.append(data) return data if self.connector == conn_type: # We can reuse self.children to append or squash the node other. if (isinstance(data, QBase) and not data.negated and (data.connector == conn_type or len(data) == 1)): # We can squash the other node's children directly into this # node. We are just doing (AB)(CD) == (ABCD) here, with the # addition that if the length of the other node is 1 the # connector doesn't matter. However, for the len(self) == 1 # case we don't want to do the squashing, as it would alter # self.connector. self.children.extend(data.children) return self else: # We could use perhaps additional logic here to see if some # children could be used for pushdown here. self.children.append(data) return data else: obj = self._new_instance(self.children, self.connector, self.negated) self.connector = conn_type self.children = [obj, data] return data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _check_node(self, obj): """check for valid node i.e correct class and is saved"""
if not issubclass(type(obj), self.definition['node_class']): raise ValueError("Expected node of class " + self.definition['node_class'].__name__) if not hasattr(obj, 'id'): raise ValueError("Can't perform operation on unsaved node " + repr(obj))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def replace(self, node, properties=None): """ Disconnect all existing nodes and connect the supplied node :param node: :param properties: for the new relationship :type: dict :return: """
self.disconnect_all() self.connect(node, properties)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def relationship(self, node): """ Retrieve the relationship object for this first relationship between self and node. :param node: :return: StructuredRel """
self._check_node(node) my_rel = _rel_helper(lhs='us', rhs='them', ident='r', **self.definition) q = "MATCH " + my_rel + " WHERE id(them)={them} and id(us)={self} RETURN r LIMIT 1" rels = self.source.cypher(q, {'them': node.id})[0] if not rels: return rel_model = self.definition.get('model') or StructuredRel return self._set_start_end_cls(rel_model.inflate(rels[0][0]), node)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def reconnect(self, old_node, new_node): """ Disconnect old_node and connect new_node copying over any properties on the original relationship. Useful for preventing cardinality violations :param old_node: :param new_node: :return: None """
self._check_node(old_node) self._check_node(new_node) if old_node.id == new_node.id: return old_rel = _rel_helper(lhs='us', rhs='old', ident='r', **self.definition) # get list of properties on the existing rel result, meta = self.source.cypher( "MATCH (us), (old) WHERE id(us)={self} and id(old)={old} " "MATCH " + old_rel + " RETURN r", {'old': old_node.id}) if result: node_properties = _get_node_properties(result[0][0]) existing_properties = node_properties.keys() else: raise NotConnected('reconnect', self.source, old_node) # remove old relationship and create new one new_rel = _rel_helper(lhs='us', rhs='new', ident='r2', **self.definition) q = "MATCH (us), (old), (new) " \ "WHERE id(us)={self} and id(old)={old} and id(new)={new} " \ "MATCH " + old_rel q += " CREATE UNIQUE" + new_rel # copy over properties if we have for p in existing_properties: q += " SET r2.{0} = r.{1}".format(p, p) q += " WITH r DELETE r" self.source.cypher(q, {'old': old_node.id, 'new': new_node.id})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def disconnect(self, node): """ Disconnect a node :param node: :return: """
rel = _rel_helper(lhs='a', rhs='b', ident='r', **self.definition) q = "MATCH (a), (b) WHERE id(a)={self} and id(b)={them} " \ "MATCH " + rel + " DELETE r" self.source.cypher(q, {'them': node.id})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def disconnect_all(self): """ Disconnect all nodes :return: """
rhs = 'b:' + self.definition['node_class'].__label__ rel = _rel_helper(lhs='a', rhs=rhs, ident='r', **self.definition) q = 'MATCH (a) WHERE id(a)={self} MATCH ' + rel + ' DELETE r' self.source.cypher(q)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def connect(self, node, properties=None): """ Connect to a node. :param node: :type: StructuredNode :param properties: relationship properties :type: dict :return: True / rel instance """
if len(self): raise AttemptedCardinalityViolation( "Node already has {0} can't connect more".format(self)) else: return super(ZeroOrOne, self).connect(node, properties)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def single(self): """ Fetch one of the related nodes :return: Node """
nodes = super(OneOrMore, self).all() if nodes: return nodes[0] raise CardinalityViolation(self, 'none')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def drop_indexes(quiet=True, stdout=None): """ Discover and drop all indexes. :type: bool :return: None """
results, meta = db.cypher_query("CALL db.indexes()") pattern = re.compile(':(.*)\((.*)\)') for index in results: db.cypher_query('DROP ' + index[0]) match = pattern.search(index[0]) stdout.write(' - Dropping index on label {0} with property {1}.\n'.format( match.group(1), match.group(2))) stdout.write("\n")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_all_labels(stdout=None): """ Calls functions for dropping constraints and indexes. :param stdout: output stream :return: None """
if not stdout: stdout = sys.stdout stdout.write("Droping constraints...\n") drop_constraints(quiet=False, stdout=stdout) stdout.write('Droping indexes...\n') drop_indexes(quiet=False, stdout=stdout)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def install_labels(cls, quiet=True, stdout=None): """ Setup labels with indexes and constraints for a given class :param cls: StructuredNode class :type: class :param quiet: (default true) enable standard output :param stdout: stdout stream :type: bool :return: None """
if not hasattr(cls, '__label__'): if not quiet: stdout.write(' ! Skipping class {0}.{1} is abstract\n'.format(cls.__module__, cls.__name__)) return for name, property in cls.defined_properties(aliases=False, rels=False).items(): db_property = property.db_property or name if property.index: if not quiet: stdout.write(' + Creating index {0} on label {1} for class {2}.{3}\n'.format( name, cls.__label__, cls.__module__, cls.__name__)) db.cypher_query("CREATE INDEX on :{0}({1}); ".format( cls.__label__, db_property)) elif property.unique_index: if not quiet: stdout.write(' + Creating unique constraint for {0} on label {1} for class {2}.{3}\n'.format( name, cls.__label__, cls.__module__, cls.__name__)) db.cypher_query("CREATE CONSTRAINT " "on (n:{0}) ASSERT n.{1} IS UNIQUE; ".format( cls.__label__, db_property))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _build_merge_query(cls, merge_params, update_existing=False, lazy=False, relationship=None): """ Get a tuple of a CYPHER query and a params dict for the specified MERGE query. :param merge_params: The target node match parameters, each node must have a "create" key and optional "update". :type merge_params: list of dict :param update_existing: True to update properties of existing nodes, default False to keep existing values. :type update_existing: bool :rtype: tuple """
query_params = dict(merge_params=merge_params) n_merge = "n:{0} {{{1}}}".format( ":".join(cls.inherited_labels()), ", ".join("{0}: params.create.{0}".format(getattr(cls, p).db_property or p) for p in cls.__required_properties__)) if relationship is None: # create "simple" unwind query query = "UNWIND {{merge_params}} as params\n MERGE ({0})\n ".format(n_merge) else: # validate relationship if not isinstance(relationship.source, StructuredNode): raise ValueError("relationship source [{0}] is not a StructuredNode".format(repr(relationship.source))) relation_type = relationship.definition.get('relation_type') if not relation_type: raise ValueError('No relation_type is specified on provided relationship') from .match import _rel_helper query_params["source_id"] = relationship.source.id query = "MATCH (source:{0}) WHERE ID(source) = {{source_id}}\n ".format(relationship.source.__label__) query += "WITH source\n UNWIND {merge_params} as params \n " query += "MERGE " query += _rel_helper(lhs='source', rhs=n_merge, ident=None, relation_type=relation_type, direction=relationship.definition['direction']) query += "ON CREATE SET n = params.create\n " # if update_existing, write properties on match as well if update_existing is True: query += "ON MATCH SET n += params.update\n" # close query if lazy: query += "RETURN id(n)" else: query += "RETURN n" return query, query_params
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create(cls, *props, **kwargs): """ Call to CREATE with parameters map. A new instance will be created and saved. :param props: dict of properties to create the nodes. :type props: tuple :param lazy: False by default, specify True to get nodes with id only without the parameters. :type: bool :rtype: list """
if 'streaming' in kwargs: warnings.warn('streaming is not supported by bolt, please remove the kwarg', category=DeprecationWarning, stacklevel=1) lazy = kwargs.get('lazy', False) # create mapped query query = "CREATE (n:{0} {{create_params}})".format(':'.join(cls.inherited_labels())) # close query if lazy: query += " RETURN id(n)" else: query += " RETURN n" results = [] for item in [cls.deflate(p, obj=_UnsavedNode(), skip_empty=True) for p in props]: node, _ = db.cypher_query(query, {'create_params': item}) results.extend(node[0]) nodes = [cls.inflate(node) for node in results] if not lazy and hasattr(cls, 'post_create'): for node in nodes: node.post_create() return nodes
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_or_update(cls, *props, **kwargs): """ Call to MERGE with parameters map. A new instance will be created and saved if does not already exists, this is an atomic operation. If an instance already exists all optional properties specified will be updated. Note that the post_create hook isn't called after create_or_update :param props: List of dict arguments to get or create the entities with. :type props: tuple :param relationship: Optional, relationship to get/create on when new entity is created. :param lazy: False by default, specify True to get nodes with id only without the parameters. :rtype: list """
lazy = kwargs.get('lazy', False) relationship = kwargs.get('relationship') # build merge query, make sure to update only explicitly specified properties create_or_update_params = [] for specified, deflated in [(p, cls.deflate(p, skip_empty=True)) for p in props]: create_or_update_params.append({"create": deflated, "update": dict((k, v) for k, v in deflated.items() if k in specified)}) query, params = cls._build_merge_query(create_or_update_params, update_existing=True, relationship=relationship, lazy=lazy) if 'streaming' in kwargs: warnings.warn('streaming is not supported by bolt, please remove the kwarg', category=DeprecationWarning, stacklevel=1) # fetch and build instance for each result results = db.cypher_query(query, params) return [cls.inflate(r[0]) for r in results[0]]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cypher(self, query, params=None): """ Execute a cypher query with the param 'self' pre-populated with the nodes neo4j id. :param query: cypher query string :type: string :param params: query parameters :type: dict :return: list containing query results :rtype: list """
self._pre_action_check('cypher') params = params or {} params.update({'self': self.id}) return db.cypher_query(query, params)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete(self): """ Delete a node and it's relationships :return: True """
self._pre_action_check('delete') self.cypher("MATCH (self) WHERE id(self)={self} " "OPTIONAL MATCH (self)-[r]-()" " DELETE r, self") delattr(self, 'id') self.deleted = True return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_or_create(cls, *props, **kwargs): """ Call to MERGE with parameters map. A new instance will be created and saved if does not already exists, this is an atomic operation. Parameters must contain all required properties, any non required properties with defaults will be generated. Note that the post_create hook isn't called after get_or_create :param props: dict of properties to get or create the entities with. :type props: tuple :param relationship: Optional, relationship to get/create on when new entity is created. :param lazy: False by default, specify True to get nodes with id only without the parameters. :rtype: list """
lazy = kwargs.get('lazy', False) relationship = kwargs.get('relationship') # build merge query get_or_create_params = [{"create": cls.deflate(p, skip_empty=True)} for p in props] query, params = cls._build_merge_query(get_or_create_params, relationship=relationship, lazy=lazy) if 'streaming' in kwargs: warnings.warn('streaming is not supported by bolt, please remove the kwarg', category=DeprecationWarning, stacklevel=1) # fetch and build instance for each result results = db.cypher_query(query, params) return [cls.inflate(r[0]) for r in results[0]]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def inherited_labels(cls): """ Return list of labels from nodes class hierarchy. :return: list """
return [scls.__label__ for scls in cls.mro() if hasattr(scls, '__label__') and not hasattr( scls, '__abstract_node__')]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def refresh(self): """ Reload the node from neo4j """
self._pre_action_check('refresh') if hasattr(self, 'id'): request = self.cypher("MATCH (n) WHERE id(n)={self}" " RETURN n")[0] if not request or not request[0]: raise self.__class__.DoesNotExist("Can't refresh non existent node") node = self.inflate(request[0][0]) for key, val in node.__properties__.items(): setattr(self, key, val) else: raise ValueError("Can't refresh unsaved node")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save(self): """ Save the node to neo4j or raise an exception :return: the node instance """
# create or update instance node if hasattr(self, 'id'): # update params = self.deflate(self.__properties__, self) query = "MATCH (n) WHERE id(n)={self} \n" query += "\n".join(["SET n.{0} = {{{1}}}".format(key, key) + "\n" for key in params.keys()]) for label in self.inherited_labels(): query += "SET n:`{0}`\n".format(label) self.cypher(query, params) elif hasattr(self, 'deleted') and self.deleted: raise ValueError("{0}.save() attempted on deleted node".format( self.__class__.__name__)) else: # create self.id = self.create(self.__properties__)[0].id return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def default_value(self): """ Generate a default value :return: the value """
if self.has_default: if hasattr(self.default, '__call__'): return self.default() else: return self.default else: raise Exception("No default value specified")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save(self): """ Save the relationship :return: self """
props = self.deflate(self.__properties__) query = "MATCH ()-[r]->() WHERE id(r)={self} " for key in props: query += " SET r.{0} = {{{1}}}".format(key, key) props['self'] = self.id db.cypher_query(query, props) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def start_node(self): """ Get start node :return: StructuredNode """
return db.cypher_query("MATCH (aNode) " "WHERE id(aNode)={nodeid} " "RETURN aNode".format(nodeid=self._start_node_id), resolve_objects = True)[0][0][0]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def end_node(self): """ Get end node :return: StructuredNode """
return db.cypher_query("MATCH (aNode) " "WHERE id(aNode)={nodeid} " "RETURN aNode".format(nodeid=self._end_node_id), resolve_objects = True)[0][0][0]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_coi(self, params_dict): """ Invokes the CO Index endpoint :param params_dict: dict of parameters :returns: a string containing raw JSON data :raises: *ValueError*, *APICallError* """
lat = str(params_dict['lat']) lon = str(params_dict['lon']) start = params_dict['start'] interval = params_dict['interval'] # build request URL if start is None: timeref = 'current' else: if interval is None: timeref = self._trim_to(timeformatutils.to_date(start), 'year') else: timeref = self._trim_to(timeformatutils.to_date(start), interval) fixed_url = '%s/%s,%s/%s.json' % (CO_INDEX_URL, lat, lon, timeref) uri = http_client.HttpClient.to_url(fixed_url, self._API_key, None) _, json_data = self._client.cacheable_get_json(uri) return json_data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_JSON(self, JSON_string): """ Parses a `pyowm.alertapi30.alert.Alert` instance out of raw JSON data. :param JSON_string: a raw JSON string :type JSON_string: str :return: a `pyowm.alertapi30.alert.Alert` instance or ``None`` if no data is available :raises: *ParseResponseError* if it is impossible to find or parse the data needed to build the result """
if JSON_string is None: raise parse_response_error.ParseResponseError('JSON data is None') d = json.loads(JSON_string) try: alert_id = d['_id'] t = d['last_update'].split('.')[0].replace('T', ' ') + '+00' alert_last_update = timeformatutils._ISO8601_to_UNIXtime(t) alert_trigger_id = d['triggerId'] alert_met_conds = [ dict(current_value=c['current_value']['min'], condition=Condition.from_dict(c['condition'])) for c in d['conditions'] ] alert_coords = d['coordinates'] return Alert(alert_id, alert_trigger_id, alert_met_conds, alert_coords, last_update=alert_last_update) except ValueError as e: raise parse_response_error.ParseResponseError('Impossible to parse JSON: %s' % e) except KeyError as e: raise parse_response_error.ParseResponseError('Impossible to parse JSON: %s' % e)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add(self, data): """ Adds a new data node to the front list. The provided data will be encapsulated into a new instance of LinkedListNode class and linked list pointers will be updated, as well as list's size. :param data: the data to be inserted in the new list node :type data: object """
node = LinkedListNode(data, None) if self._size == 0: self._first_node = node self._last_node = node else: second_node = self._first_node self._first_node = node self._first_node.update_next(second_node) self._size += 1
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove(self, data): """ Removes a data node from the list. If the list contains more than one node having the same data that shall be removed, then the node having the first occurrency of the data is removed. :param data: the data to be removed in the new list node :type data: object """
current_node = self._first_node deleted = False if self._size == 0: return if data == current_node.data(): # case 1: the list has only one item if current_node.next() is None: self._first_node = LinkedListNode(None, None) self._last_node = self._first_node self._size = 0 return # case 2: the list has more than one item current_node = current_node.next() self._first_node = current_node self._size -= 1 return while True: if current_node is None: deleted = False break # Check next element's data next_node = current_node.next() if next_node is not None: if data == next_node.data(): next_next_node = next_node.next() current_node.update_next(next_next_node) next_node = None deleted = True break current_node = current_node.next() if deleted: self._size -= 1
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def contains(self, data): """ Checks if the provided data is stored in at least one node of the list. :param data: the seeked data :type data: object :returns: a boolean """
for item in self: if item.data() == data: return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pop(self): """ Removes the last node from the list """
popped = False result = None current_node = self._first_node while not popped: next_node = current_node.next() next_next_node = next_node.next() if not next_next_node: self._last_node = current_node self._last_node.update_next(None) self._size -= 1 result = next_node.data() popped = True current_node = next_node return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_sunset_time(self, timeformat='unix'): """Returns the GMT time of sunset :param timeformat: the format for the time value. May be: '*unix*' (default) for UNIX time or '*iso*' for ISO8601-formatted string in the format ``YYYY-MM-DD HH:MM:SS+00`` :type timeformat: str :returns: an int or a str or None :raises: ValueError """
if self._sunset_time is None: return None return timeformatutils.timeformat(self._sunset_time, timeformat)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_sunrise_time(self, timeformat='unix'): """Returns the GMT time of sunrise :param timeformat: the format for the time value. May be: '*unix*' (default) for UNIX time or '*iso*' for ISO8601-formatted string in the format ``YYYY-MM-DD HH:MM:SS+00`` :type timeformat: str :returns: an int or a str or None :raises: ValueError """
if self._sunrise_time is None: return None return timeformatutils.timeformat(self._sunrise_time, timeformat)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_temperature(self, unit='kelvin'): """Returns a dict with temperature info :param unit: the unit of measure for the temperature values. May be: '*kelvin*' (default), '*celsius*' or '*fahrenheit*' :type unit: str :returns: a dict containing temperature values. :raises: ValueError when unknown temperature units are provided """
# This is due to the fact that the OWM Weather API responses are mixing # absolute temperatures and temperature deltas together to_be_converted = dict() not_to_be_converted = dict() for label, temp in self._temperature.items(): if temp is None or temp < 0: not_to_be_converted[label] = temp else: to_be_converted[label] = temp converted = temputils.kelvin_dict_to(to_be_converted, unit) return dict(list(converted.items()) + \ list(not_to_be_converted.items()))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def creation_time(self, timeformat='unix'): """Returns the UTC time of creation of this station :param timeformat: the format for the time value. May be: '*unix*' (default) for UNIX time, '*iso*' for ISO8601-formatted string in the format ``YYYY-MM-DD HH:MM:SS+00`` or `date` for a ``datetime.datetime`` object :type timeformat: str :returns: an int or a str or a ``datetime.datetime`` object or None :raises: ValueError """
if self.created_at is None: return None return timeformatutils.timeformat(self.created_at, timeformat)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def last_update_time(self, timeformat='unix'): """Returns the UTC time of the last update on this station's metadata :param timeformat: the format for the time value. May be: '*unix*' (default) for UNIX time, '*iso*' for ISO8601-formatted string in the format ``YYYY-MM-DD HH:MM:SS+00`` or `date` for a ``datetime.datetime`` object :type timeformat: str :returns: an int or a str or a ``datetime.datetime`` object or None :raises: ValueError """
if self.updated_at is None: return None return timeformatutils.timeformat(self.updated_at, timeformat)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def bounding_polygon(self): """ Returns the bounding box polygon for this tile :return: `pywom.utils.geo.Polygon` instance """
lon_left, lat_bottom, lon_right, lat_top = Tile.tile_coords_to_bbox(self.x, self.y, self.zoom) print(lon_left, lat_bottom, lon_right, lat_top) return Polygon([[[lon_left, lat_top], [lon_right, lat_top], [lon_right, lat_bottom], [lon_left, lat_bottom], [lon_left, lat_top]]])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tile_coords_for_point(cls, geopoint, zoom): """ Returns the coordinates of the tile containing the specified geopoint at the specified zoom level :param geopoint: the input geopoint instance :type geopoint: `pywom.utils.geo.Point` :param zoom: zoom level :type zoom: int :return: a tuple (x, y) containing the tile-coordinates """
return Tile.geoocoords_to_tile_coords(geopoint.lon, geopoint.lat, zoom)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_tile(self, x, y, zoom): """ Retrieves the tile having the specified coordinates and zoom level :param x: horizontal tile number in OWM tile reference system :type x: int :param y: vertical tile number in OWM tile reference system :type y: int :param zoom: zoom level for the tile :type zoom: int :returns: a `pyowm.tiles.Tile` instance """
status, data = self.http_client.get_png( ROOT_TILE_URL % self.map_layer + '/%s/%s/%s.png' % (zoom, x, y), params={'appid': self.API_key}) img = Image(data, ImageTypeEnum.PNG) return Tile(x, y, zoom, self.map_layer, img)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_triggers(self): """ Retrieves all of the user's triggers that are set on the Weather Alert API. :returns: list of `pyowm.alertapi30.trigger.Trigger` objects """
status, data = self.http_client.get_json( TRIGGERS_URI, params={'appid': self.API_key}, headers={'Content-Type': 'application/json'}) return [self.trigger_parser.parse_dict(item) for item in data]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_trigger(self, trigger_id): """ Retrieves the named trigger from the Weather Alert API. :param trigger_id: the ID of the trigger :type trigger_id: str :return: a `pyowm.alertapi30.trigger.Trigger` instance """
assert isinstance(trigger_id, str), "Value must be a string" status, data = self.http_client.get_json( NAMED_TRIGGER_URI % trigger_id, params={'appid': self.API_key}, headers={'Content-Type': 'application/json'}) return self.trigger_parser.parse_dict(data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_trigger(self, trigger): """ Deletes from the Alert API the trigger record identified by the ID of the provided `pyowm.alertapi30.trigger.Trigger`, along with all related alerts :param trigger: the `pyowm.alertapi30.trigger.Trigger` object to be deleted :type trigger: `pyowm.alertapi30.trigger.Trigger` :returns: `None` if deletion is successful, an exception otherwise """
assert trigger is not None assert isinstance(trigger.id, str), "Value must be a string" status, _ = self.http_client.delete( NAMED_TRIGGER_URI % trigger.id, params={'appid': self.API_key}, headers={'Content-Type': 'application/json'})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_stations(self): """ Retrieves all of the user's stations registered on the Stations API. :returns: list of *pyowm.stationsapi30.station.Station* objects """
status, data = self.http_client.get_json( STATIONS_URI, params={'appid': self.API_key}, headers={'Content-Type': 'application/json'}) return [self.stations_parser.parse_dict(item) for item in data]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_station(self, id): """ Retrieves a named station registered on the Stations API. :param id: the ID of the station :type id: str :returns: a *pyowm.stationsapi30.station.Station* object """
status, data = self.http_client.get_json( NAMED_STATION_URI % str(id), params={'appid': self.API_key}, headers={'Content-Type': 'application/json'}) return self.stations_parser.parse_dict(data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_station(self, external_id, name, lat, lon, alt=None): """ Create a new station on the Station API with the given parameters :param external_id: the user-given ID of the station :type external_id: str :param name: the name of the station :type name: str :param lat: latitude of the station :type lat: float :param lon: longitude of the station :type lon: float :param alt: altitude of the station :type alt: float :returns: the new *pyowm.stationsapi30.station.Station* object """
assert external_id is not None assert name is not None assert lon is not None assert lat is not None if lon < -180.0 or lon > 180.0: raise ValueError("'lon' value must be between -180 and 180") if lat < -90.0 or lat > 90.0: raise ValueError("'lat' value must be between -90 and 90") if alt is not None: if alt < 0.0: raise ValueError("'alt' value must not be negative") status, payload = self.http_client.post( STATIONS_URI, params={'appid': self.API_key}, data=dict(external_id=external_id, name=name, lat=lat, lon=lon, alt=alt), headers={'Content-Type': 'application/json'}) return self.stations_parser.parse_dict(payload)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def send_measurement(self, measurement): """ Posts the provided Measurement object's data to the Station API. :param measurement: the *pyowm.stationsapi30.measurement.Measurement* object to be posted :type measurement: *pyowm.stationsapi30.measurement.Measurement* instance :returns: `None` if creation is successful, an exception otherwise """
assert measurement is not None assert measurement.station_id is not None status, _ = self.http_client.post( MEASUREMENTS_URI, params={'appid': self.API_key}, data=[self._structure_dict(measurement)], headers={'Content-Type': 'application/json'})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def send_measurements(self, list_of_measurements): """ Posts data about the provided list of Measurement objects to the Station API. The objects may be related to different station IDs. :param list_of_measurements: list of *pyowm.stationsapi30.measurement.Measurement* objects to be posted :type list_of_measurements: list of *pyowm.stationsapi30.measurement.Measurement* instances :returns: `None` if creation is successful, an exception otherwise """
assert list_of_measurements is not None assert all([m.station_id is not None for m in list_of_measurements]) msmts = [self._structure_dict(m) for m in list_of_measurements] status, _ = self.http_client.post( MEASUREMENTS_URI, params={'appid': self.API_key}, data=msmts, headers={'Content-Type': 'application/json'})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_measurements(self, station_id, aggregated_on, from_timestamp, to_timestamp, limit=100): """ Reads measurements of a specified station recorded in the specified time window and aggregated on minute, hour or day. Optionally, the number of resulting measurements can be limited. :param station_id: unique station identifier :type station_id: str :param aggregated_on: aggregation time-frame for this measurement :type aggregated_on: string between 'm','h' and 'd' :param from_timestamp: Unix timestamp corresponding to the beginning of the time window :type from_timestamp: int :param to_timestamp: Unix timestamp corresponding to the end of the time window :type to_timestamp: int :param limit: max number of items to be returned. Defaults to 100 :type limit: int :returns: list of *pyowm.stationsapi30.measurement.AggregatedMeasurement* objects """
assert station_id is not None assert aggregated_on is not None assert from_timestamp is not None assert from_timestamp > 0 assert to_timestamp is not None assert to_timestamp > 0 if to_timestamp < from_timestamp: raise ValueError("End timestamp can't be earlier than begin timestamp") assert isinstance(limit, int) assert limit >= 0 query = {'appid': self.API_key, 'station_id': station_id, 'type': aggregated_on, 'from': from_timestamp, 'to': to_timestamp, 'limit': limit} status, data = self.http_client.get_json( MEASUREMENTS_URI, params=query, headers={'Content-Type': 'application/json'}) return [self.aggregated_measurements_parser.parse_dict(item) for item in data]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def send_buffer(self, buffer): """ Posts to the Stations API data about the Measurement objects contained into the provided Buffer instance. :param buffer: the *pyowm.stationsapi30.buffer.Buffer* instance whose measurements are to be posted :type buffer: *pyowm.stationsapi30.buffer.Buffer* instance :returns: `None` if creation is successful, an exception otherwise """
assert buffer is not None msmts = [self._structure_dict(m) for m in buffer.measurements] status, _ = self.http_client.post( MEASUREMENTS_URI, params={'appid': self.API_key}, data=msmts, headers={'Content-Type': 'application/json'})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_DOM_node_from_dict(d, name, parent_node): """ Dumps dict data to an ``xml.etree.ElementTree.SubElement`` DOM subtree object and attaches it to the specified DOM parent node. The created subtree object is named after the specified name. If the supplied dict is ``None`` no DOM node is created for it as well as no DOM subnodes are generated for eventual ``None`` values found inside the dict :param d: the input dictionary :type d: dict :param name: the name for the DOM subtree to be created :type name: str :param parent_node: the parent DOM node the newly created subtree must be attached to :type parent_node: ``xml.etree.ElementTree.Element`` or derivative objects :returns: ``xml.etree.ElementTree.SubElementTree`` object """
if d is not None: root_dict_node = ET.SubElement(parent_node, name) for key, value in d.items(): if value is not None: node = ET.SubElement(root_dict_node, key) node.text = str(value) return root_dict_node
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def DOM_node_to_XML(tree, xml_declaration=True): """ Prints a DOM tree to its Unicode representation. :param tree: the input DOM tree :type tree: an ``xml.etree.ElementTree.Element`` object :param xml_declaration: if ``True`` (default) prints a leading XML declaration line :type xml_declaration: bool :returns: Unicode object """
result = ET.tostring(tree, encoding='utf8', method='xml').decode('utf-8') if not xml_declaration: result = result.split("<?xml version='1.0' encoding='utf8'?>\n")[1] return result