{"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nvalidates a string input", "response": "def validateStringInput(input_key,input_data, read=False):\n \"\"\"\n To check if a string has the required format. This is only used for POST APIs.\n \"\"\"\n log = clog.error_log\n func = None\n if '*' in input_data or '%' in input_data:\n func = validationFunctionWildcard.get(input_key)\n if func is None:\n func = searchstr\n elif input_key == 'migration_input' :\n if input_data.find('#') != -1 : func = block\n else : func = dataset\n else:\n if not read:\n func = validationFunction.get(input_key)\n if func is None:\n func = namestr\n else:\n if input_key == 'dataset':\n func = reading_dataset_check\n elif input_key == 'block_name':\n func = reading_block_check\n elif input_key == 'logical_file_name':\n func = reading_lfn_check\n else:\n func = namestr\n try:\n func(input_data)\n except AssertionError as ae:\n serverLog = str(ae) + \" key-value pair (%s, %s) cannot pass input checking\" %(input_key, input_data)\n #print serverLog\n dbsExceptionHandler(\"dbsException-invalid-input2\", message=\"Invalid Input Data %s...: Not Match Required Format\" %input_data[:10], \\\n logger=log.error, serverError=serverLog)\n return input_data"}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning a list of all primary datasets if pattern is not provided.", "response": "def execute(self, conn, transaction=False):\n \"\"\"\n Lists all primary datasets if pattern is not provided.\n \"\"\"\n sql = self.sql\n binds = {}\n \n\tcursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True)\n result = []\n for c in cursors:\n result.extend(self.formatCursor(c, size=100))\n return result"}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef execute(self, conn, daoinput, transaction = False):\n if not conn:\n\t dbsExceptionHandler(\"dbsException-failed-connect2host\", \"Oracle/MigrationRequests/Remove. Expects db connection from upper layer.\",\n self.logger.exception)\n daoinput['create_by'] = dbsUtils().getCreateBy()\n\ttry:\n msg = \"DBSMigration: Invalid request. Sucessfully processed or processing requests cannot be removed,\\\n or the requested migration did not exist, or the requestor for removing and creating has to be the same user. \"\n checkit = self.dbi.processData(self.select, daoinput, conn, transaction)\n if self.formatDict(checkit)[0][\"count\"] >= 1:\n\t\treqID = {'migration_rqst_id':daoinput['migration_rqst_id']}\n\t result = self.dbi.processData(self.sql, reqID, conn, transaction)\n else:\n dbsExceptionHandler('dbsException-invalid-input', msg, self.logger.exception)\n\texcept:\n raise", "response": "Executes the SQL DELETE command."}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef listDatasetAccessTypes(self, dataset_access_type=\"\"):\n if isinstance(dataset_access_type, basestring):\n try:\n dataset_access_type = str(dataset_access_type)\n except: \n dbsExceptionHandler('dbsException-invalid-input', 'dataset_access_type given is not valid : %s' %dataset_access_type)\n else:\n dbsExceptionHandler('dbsException-invalid-input', 'dataset_access_type given is not valid : %s' %dataset_access_type)\n conn = self.dbi.connection()\n try:\n plist = self.datasetAccessType.execute(conn, dataset_access_type.upper())\n result = [{}]\n if plist:\n t = []\n for i in plist:\n for k, v in i.iteritems():\n t.append(v)\n result[0]['dataset_access_type'] = t\n return result\n finally:\n if conn:\n conn.close()", "response": "List dataset access types."}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef block_before(self):\n # To avoid unnecessary database queries, ignore the IP check for\n # requests for static files\n if request.path.startswith(url_for('static', filename='')):\n return\n\n # Some static files might be served from the root path (e.g.\n # favicon.ico, robots.txt, etc.). Ignore the IP check for most\n # common extensions of those files.\n ignored_extensions = ('ico', 'png', 'txt', 'xml')\n if request.path.rsplit('.', 1)[-1] in ignored_extensions:\n return\n\n ips = request.headers.getlist('X-Forwarded-For')\n if not ips:\n return\n\n # If the X-Forwarded-For header contains multiple comma-separated\n # IP addresses, we're only interested in the last one.\n ip = ips[0].strip()\n if ip[-1] == ',':\n ip = ip[:-1]\n ip = ip.rsplit(',', 1)[-1].strip()\n\n if self.matches_ip(ip):\n if self.logger is not None:\n self.logger.info(\"IPBlock: matched {}, {}\".format(ip, self.block_msg))\n if self.blocking_enabled:\n return 'IP Blocked', 200", "response": "Check the current request and block it if the IP address it s coming from is blacklisted."}
{"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nreturns True if the given IP is blacklisted False otherwise.", "response": "def matches_ip(self, ip):\n \"\"\"Return True if the given IP is blacklisted, False otherwise.\"\"\"\n\n # Check the cache if caching is enabled\n if self.cache is not None:\n matches_ip = self.cache.get(ip)\n if matches_ip is not None:\n return matches_ip\n\n # Query MongoDB to see if the IP is blacklisted\n matches_ip = IPNetwork.matches_ip(\n ip, read_preference=self.read_preference)\n\n # Cache the result if caching is enabled\n if self.cache is not None:\n self.cache[ip] = matches_ip\n\n return matches_ip"}
{"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef execute(self, conn, logical_file_name, block_name, block_id, transaction=False):\n binds = {}\n sql = ''\n\n if logical_file_name:\n if isinstance(logical_file_name, basestring):\n wheresql = \"WHERE F.LOGICAL_FILE_NAME = :logical_file_name\"\n binds = {\"logical_file_name\": logical_file_name}\n sql = \"{sql} {wheresql}\".format(sql=self.sql, wheresql=wheresql)\n elif isinstance(logical_file_name, list):\n wheresql = \"WHERE F.LOGICAL_FILE_NAME in (SELECT TOKEN FROM TOKEN_GENERATOR)\"\n lfn_generator, binds = create_token_generator(logical_file_name)\n sql = \"{lfn_generator} {sql} {wheresql}\".format(lfn_generator=lfn_generator, sql=self.sql,\n wheresql=wheresql)\n elif block_name:\n joins = \"JOIN {owner}BLOCKS B on B.BLOCK_ID = F.BLOCK_ID\".format(owner=self.owner)\n wheresql = \"WHERE B.BLOCK_NAME = :block_name\"\n binds = {\"block_name\": block_name}\n sql = \"{sql} {joins} {wheresql}\".format(sql=self.sql, joins=joins, wheresql=wheresql)\n elif block_id:\n wheresql = \"WHERE F.BLOCK_ID = :block_id\"\n binds = {\"block_id\": block_id}\n sql = \"{sql} {wheresql}\".format(sql=self.sql, wheresql=wheresql)\n else:\n dbsExceptionHandler('dbsException-invalid-input', \"Logical_file_names is required for listChild dao.\", self.logger.exception)\n\n cursors = self.dbi.processData(sql, binds, conn, transaction=transaction, returnCursor=True)\n result = []\n for c in cursors:\n result.extend(self.formatCursor(c, size=100))\n return result", "response": "List all primary datasets of a given logical file name and block id."}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef listAcquisitionEras(self, acq=''):\n try:\n acq = str(acq)\n except:\n dbsExceptionHandler('dbsException-invalid-input', 'acquistion_era_name given is not valid : %s' %acq)\n conn = self.dbi.connection()\n try:\n result = self.acqlst.execute(conn, acq)\n return result\n finally:\n if conn:conn.close()", "response": "List all acquistion eras in DBS."}
{"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nlisting all acquisition eras in DBS.", "response": "def listAcquisitionEras_CI(self, acq=''):\n \"\"\"\n Returns all acquistion eras in dbs\n \"\"\"\n try:\n acq = str(acq)\n except:\n dbsExceptionHandler('dbsException-invalid-input', 'aquistion_era_name given is not valid : %s'%acq)\n conn = self.dbi.connection()\n try:\n result = self.acqlst_ci.execute(conn, acq)\n return result\n finally:\n if conn:conn.close()"}
{"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef insertAcquisitionEra(self, businput):\n conn = self.dbi.connection()\n tran = conn.begin()\n try:\n businput[\"acquisition_era_id\"] = self.sm.increment(conn, \"SEQ_AQE\", tran)\n businput[\"acquisition_era_name\"] = businput[\"acquisition_era_name\"]\n #self.logger.warning(businput)\n self.acqin.execute(conn, businput, tran)\n tran.commit()\n tran = None\n except KeyError as ke:\n dbsExceptionHandler('dbsException-invalid-input', \"Invalid input:\"+ke.args[0])\n except Exception as ex:\n if str(ex).lower().find(\"unique constraint\") != -1 or str(ex).lower().find(\"duplicate\") != -1:\n dbsExceptionHandler('dbsException-invalid-input2', \"Invalid input: acquisition_era_name already exists in DB\", serverError=\"%s\" %ex)\n else:\n raise\n finally:\n if tran:\n tran.rollback()\n if conn:\n conn.close()", "response": "Insert a new acquisition era into DB."}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef UpdateAcqEraEndDate(self, acquisition_era_name =\"\", end_date=0):\n if acquisition_era_name ==\"\" or end_date==0:\n dbsExceptionHandler('dbsException-invalid-input', \"acquisition_era_name and end_date are required\")\n conn = self.dbi.connection()\n tran = conn.begin()\n try:\n self.acqud.execute(conn, acquisition_era_name, end_date, tran)\n if tran:tran.commit()\n tran = None\n finally:\n if tran:tran.rollback()\n if conn:conn.close()", "response": "Update the end date of a acquisitionera."}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef execute(self, conn, app=\"\", release_version=\"\", pset_hash=\"\", output_label=\"\", \n global_tag='', transaction = False):\n \"\"\"\n returns id for a given application\n \"\"\"\n\tsql = self.sql\n binds = {}\n\tsetAnd=False\n\tif not app == \"\":\n\t\tsql += \" A.APP_NAME=:app_name\"\n \tbinds[\"app_name\"]=app\n\t\tsetAnd=True\n\tif not release_version == \"\":\n\t\tif setAnd : sql += \" AND \"\n\t\tsql += \" R.RELEASE_VERSION=:release_version\"\n\t\tbinds[\"release_version\"]=release_version\n\t\tsetAnd=True\n\tif not pset_hash == \"\":\n\t\tif setAnd : sql += \" AND \"\n\t\tsql += \" P.PSET_HASH=:pset_hash\"\n\t\tbinds[\"pset_hash\"]=pset_hash\n\t\tsetAnd=True\n\tif not output_label == \"\":\n\t\tif setAnd : sql += \" AND \"\n\t\tsql += \" O.OUTPUT_MODULE_LABEL=:output_module_label\"\n\t\tbinds[\"output_module_label\"]=output_label\n setAnd=True\n if not global_tag == \"\":\n if setAnd : sql += \" AND \"\n sql += \" O.GLOBAL_TAG=:global_tag\"\n binds[\"global_tag\"]=global_tag\n\tif app == release_version == pset_hash == global_tag == \"\":\n dbsExceptionHandler('dbsException-invalid-input', \"%s Either app_name, release_version, pset_hash or global_tag must be provided\", self.logger.exception)\t\n\n result = self.dbi.processData(sql, binds, conn, transaction)\n plist = self.formatDict(result)\n\tif len(plist) < 1: return -1\n return plist[0][\"output_mod_config_id\"]", "response": "returns the id of a given application and release version"}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nprepare the ordered list of blocks from source dataset and the parent dataset.", "response": "def prepareDatasetMigrationList(self, conn, request):\n \"\"\"\n Prepare the ordered lists of blocks based on input DATASET (note Block is different)\n 1. Get list of blocks from source\n 2. Check and see if these blocks are already at DST\n 3. Check if dataset has parents\n 4. Check if parent blocks are already at DST\n\n \"\"\"\n ordered_dict = {}\n order_counter = 0\n srcdataset = request[\"migration_input\"]\n url = request[\"migration_url\"]\n try:\n tmp_ordered_dict = self.processDatasetBlocks(url, conn,\n srcdataset, order_counter)\n if tmp_ordered_dict != {}:\n ordered_dict.update(tmp_ordered_dict)\n\t\tself.logger.debug(\"ordered_dict length at level %s\" %order_counter)\n\t\tself.logger.debug(len(ordered_dict))\n else:\n #return {}\n m = 'Requested dataset %s is already in destination' %srcdataset\n dbsExceptionHandler('dbsException-invalid-input2', message=m, serverError=m)\n # Now process the parent datasets\n parent_ordered_dict = self.getParentDatasetsOrderedList(url, conn,\n srcdataset, order_counter+1)\n if parent_ordered_dict != {}:\n ordered_dict.update(parent_ordered_dict)\n\t\tself.logger.debug(\"***** parent ordered_dict length at level %s ******\" %(order_counter+1))\n\t\tself.logger.debug(len(ordered_dict))\n return remove_duplicated_items(ordered_dict)\n except dbsException:\n raise\n except Exception as ex:\n if 'urlopen error' in str(ex):\n message='Connection to source DBS server refused. Check your source url.'\n elif 'Bad Request' in str(ex):\n message='cannot get data from the source DBS server. Check your migration input.'\n else:\n message='Failed to make a dataset migration list.'\n dbsExceptionHandler('dbsException-invalid-input2', \\\n serverError=\"\"\"DBSMigrate/prepareDatasetMigrationList failed\n to prepare ordered block list: %s\"\"\" %str(ex), message=message)"}
{"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef getParentDatasetsOrderedList(self, url, conn, dataset, order_counter):\n ordered_dict = {}\n parentSrcDatasets = self.getSrcDatasetParents(url, dataset)\n if len(parentSrcDatasets) > 0:\n parentSrcDatasetNames = [y['parent_dataset']\n for y in parentSrcDatasets]\n for aparentDataset in parentSrcDatasetNames:\n parent_ordered_dict = self.processDatasetBlocks(url, conn,\n aparentDataset, order_counter)\n\t\tself.logger.debug(\"************ dict length of parent blocks for the parent dataset %s at level %s\" %(aparentDataset, order_counter))\n\t\tself.logger.debug(len(parent_ordered_dict))\n if parent_ordered_dict != {}:\n ordered_dict.update(parent_ordered_dict)\n\t\t self.logger.debug(\"**** ordered_dict length ****\")\n\t\t self.logger.debug(len(ordered_dict))\t\n # parents of parent\n pparent_ordered_dict = self.getParentDatasetsOrderedList(url,\n conn, aparentDataset, order_counter+1)\n\t\tself.logger.debug(\"************dict length parent parent blocks for the parent dataset %s at level %s\" %(aparentDataset, order_counter+1))\n\t\tself.logger.debug(len(pparent_ordered_dict))\n if pparent_ordered_dict != {}:\n ordered_dict.update(pparent_ordered_dict)\n\t self.logger.debug(\"**** ordered_dict length ****\")\n\t\t self.logger.debug(len(ordered_dict))\t\n return ordered_dict", "response": "get the ordered list of all parent datasets"}
{"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nprepare the ordered list of blocks based on input BLOCKNUM", "response": "def prepareBlockMigrationList(self, conn, request):\n \"\"\"\n Prepare the ordered lists of blocks based on input BLOCK\n 1. see if block already exists at dst (no need to migrate),\n raise \"ALREADY EXISTS\"\n 2. see if block exists at src & make sure the block's open_for_writing=0\n 3. see if block has parents\n 4. see if parent blocks are already at dst\n 5. add 'order' to parent and then this block (ascending)\n 6. return the ordered list\n \"\"\"\n ordered_dict = {}\n block_name = request[\"migration_input\"]\n url = request[\"migration_url\"]\n order_counter = 0\n try:\n #1.\n dstblock = self.blocklist.execute(conn, block_name=block_name)\n\t for item in dstblock:\n\t\tif item:\n\t\t dbsExceptionHandler('dbsException-invalid-input', 'ALREADY EXISTS: \\\n\t\t\tRequired block (%s) migration is already at destination' %block_name, self.logger.exception)\n #2.\n srcblock = self.getSrcBlocks(url, block=block_name)\n if len(srcblock) < 1:\n e = 'DBSMigration: Invalid input. Required Block %s not found at source %s.' %(block_name, url)\n dbsExceptionHandler('dbsException-invalid-input2', e, self.logger.exception, e)\n ##This block has to be migrated\n ordered_dict[order_counter] = []\n ordered_dict[order_counter].append(block_name)\n parent_ordered_dict = self.getParentBlocksOrderedList(url, conn,\n block_name, order_counter+1)\n if parent_ordered_dict != {}:\n ordered_dict.update(parent_ordered_dict)\n #6.\n #check for duplicates\n\n return remove_duplicated_items(ordered_dict)\n except Exception as ex:\n\t if '500 Internal Server Error' in str(ex):\t\n\t\t#\"Server Error\" is the default in dbsExceptionHandler\n\t dbsExceptionHandler('Server Error', str(ex), self.logger.exception, \"DBSMigrate/prepareBlockMigrationList: \"+str(ex))\n\t if isinstance(ex, pycurl.error):\n\t\tif ex.args[0] == 7:\n\t\t message = ex.args[1]\n\t\t dbsExceptionHandler('dbsException-failed-connect2host', message, self.logger.exception, message)\t\n\t if 'urlopen error' in str(ex):\n message='Connection to source DBS server refused. Check your source url.'\n elif 'Bad Request' in str(ex):\n message='cannot get data from the source DBS server. Check your migration input.'\n else:\n message='Failed to make a block migration list.'\n dbsExceptionHandler('dbsException-invalid-input2', \\\n \"\"\"DBSMigrate/prepareBlockMigrationList failed\n to prepare ordered block list: %s\"\"\" %str(ex), self.logger.exception, message)"}
{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef removeMigrationRequest(self, migration_rqst):\n conn = self.dbi.connection()\n try:\n tran = conn.begin()\n self.mgrremove.execute(conn, migration_rqst)\n tran.commit()\n except dbsException as he:\n if conn: conn.close()\n raise\n except Exception as ex:\n if conn: conn.close()\n raise\n if conn: conn.close()", "response": "Method to remove pending or failed migration request from the queue."}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef insertMigrationRequest(self, request):\n conn = self.dbi.connection()\n # check if already queued.\n #If the migration_input is the same, but the src url is different,\n #We will consider it as a submitted request. YG 05-18-2012\n try:\n alreadyqueued = self.mgrlist.execute(conn,\n migration_input=request[\"migration_input\"])\n is_already_queued = len(alreadyqueued) > 0\n # close connection before returning json object\n if is_already_queued and conn:\n conn.close()\n #if the queued is not failed, then we don't need to do it again.\n #add a new migration_status=9 (terminal failure)\n\t if is_already_queued and alreadyqueued[0]['migration_status'] == 2:\n return {\"migration_report\" : \"REQUEST ALREADY QUEUED. Migration is finished\",\n \"migration_details\" : alreadyqueued[0] }\n elif is_already_queued and alreadyqueued[0]['migration_status'] != 9:\n return {\"migration_report\" : \"REQUEST ALREADY QUEUED. Migration in progress\",\n \"migration_details\" : alreadyqueued[0] }\n elif is_already_queued and alreadyqueued[0]['migration_status'] == 9:\n return {\"migration_report\" : \"REQUEST ALREADY QUEUED. Migration terminally failed. \",\n \"migration_details\" : alreadyqueued[0] }\n else:\n # not already queued\n #Determine if its a dataset or block migration\n #The prepare list calls will check if the requested blocks/dataset already in destination.\n if request[\"migration_input\"].find(\"#\") != -1:\n ordered_list = self.prepareBlockMigrationList(conn, request)\n else:\n ordered_list = self.prepareDatasetMigrationList(conn, request)\n # now we have the blocks that need to be queued (ordered)\n except Exception as ex:\n if conn: conn.close()\n raise\n\n tran = conn.begin()\n try:\n # insert the request\n #request.update(migration_status=0)\n request['migration_request_id'] = self.sm.increment(conn, \"SEQ_MR\", tran)\n self.mgrin.execute(conn, request, tran)\n # INSERT the ordered_list\n totalQueued = 0\n\t k = ordered_list.keys()\n\t k.sort()\n\t k.reverse()\t\n\t self.logger.debug(\"****************** ordered_list keys **********\")\n self.logger.debug(k)\n #for iter in reversed(range(len(ordered_list))):\n\t for iter in k:\t\n\t\tself.logger.debug(\"length for Key: %s\" %iter)\n\t self.logger.debug(len(ordered_list[iter]))\t\n if len(ordered_list[iter]) > 0:\n daoinput = [{\n \"migration_block_id\" :\n self.sm.increment(conn, \"SEQ_MB\", tran),\n \"migration_request_id\" :\n request[\"migration_request_id\"],\n \"migration_block_name\" : blk,\n \"migration_order\" : iter,\n \"migration_status\" : 0,\n \"creation_date\" : request['creation_date'],\n \"last_modification_date\" : request['last_modification_date'],\n \"create_by\" : request['create_by'],\n \"last_modified_by\" : request['last_modified_by']\n }\n for blk in ordered_list[iter]]\n self.mgrblkin.execute(conn, daoinput, tran)\n totalQueued += len(ordered_list[iter])\n # all good ?, commit the transaction\n tran.commit()\n if conn: conn.close()\n # return things like (X blocks queued for migration)\n return {\n \"migration_report\" : \"REQUEST QUEUED with total %d blocks to be migrated\" %totalQueued,\n \"migration_details\" : request }\n except SQLAlchemyIntegrityError as ex:\n\t e = \"DBSMigration: ENQUEUEING_FAILED1 from SQLAichemy Integrity Error. Reason may be (%s)\" %(ex.statement + \"; \" + str(ex.params) + \"; \" + str(ex.orig))\n\t self.logger.debug(e)\t\t\n\t import traceback\n tk = traceback.format_exc() \t\t\n\t self.logger.debug(tk) \t\n tran.rollback()\n if conn: conn.close()\n if (str(ex).find(\"unique constraint\") != -1 or\n str(ex).lower().find(\"duplicate\") != -1):\n #FIXME: Need to check which unique key. YG 2/11/13\n #The unique constraints are: MIGRATION_REQUESTS(MIGRATION_INPUT)\n #MIGRATION_BLOCKS(MIGRATION_BLOCK_NAME, MIGRATION_REQUEST_ID)\n return {\n \"migration_report\" : \"REQUEST ALREADY QUEUED\",\n \"migration_details\" : request }\n else:\n if conn: conn.close()\n\t\tself.logger.error(tk)\n m = \"DBSMigration: ENQUEUEING_FAILED1.\"\n dbsExceptionHandler('dbsException-invalid-input2', m, self.logger.exception, e)\n\texcept HTTPError as he:\n\t raise he\t\n except Exception as ex:\n\t import traceback\n self.logger.error(traceback.format_exc())\t\n if tran: tran.rollback()\n if conn: conn.close()\n m = \"DBSMigration: ENQUEUEING_FAILED.\"\n e = \"DBSMigration: ENQUEUEING_FAILED. General exception caught: Reason may be (%s)\" %str(ex)\n dbsExceptionHandler('dbsException-invalid-input2', m, self.logger.exception, e)\n finally:\n if conn: conn.close()", "response": "Method to insert a migration request into the MIGRATION_REQUESTS table."}
{"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nlists the status of the migration request", "response": "def listMigrationRequests(self, migration_request_id=\"\", block_name=\"\",\n dataset=\"\", user=\"\", oldest=False):\n \"\"\"\n get the status of the migration\n migratee : can be dataset or block_name\n \"\"\"\n\n conn = self.dbi.connection()\n migratee = \"\"\n try:\n if block_name:\n migratee = block_name\n elif dataset:\n migratee = dataset\n result = self.mgrlist.execute(conn, migration_url=\"\",\n migration_input=migratee, create_by=user,\n migration_request_id=migration_request_id, oldest=oldest)\n return result\n\n finally:\n if conn: conn.close()"}
{"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nlists all migration blocks", "response": "def listMigrationBlocks(self, migration_request_id=\"\"):\n \"\"\"\n get eveything of block that is has status = 0 and migration_request_id as specified.\n \"\"\"\n\n conn = self.dbi.connection()\n try:\n return self.mgrblklist.execute(conn, migration_request_id=migration_request_id)\n finally:\n if conn: conn.close()"}
{"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nupdating the status of a migration request.", "response": "def updateMigrationRequestStatus(self, migration_status, migration_request_id):\n \"\"\"\n migration_status:\n 0=PENDING\n 1=IN PROGRESS\n 2=COMPLETED\n 3=FAILED (will be retried)\n 9=Terminally FAILED \n status change:\n 0 -> 1\n 1 -> 2\n 1 -> 3\n 1 -> 9\n are only allowed changes for working through migration.\n 3 -> 1 is allowed for retrying and retry count +1.\n\n \"\"\"\n\n conn = self.dbi.connection()\n tran = conn.begin()\n try:\n upst = dict(migration_status=migration_status,\n migration_request_id=migration_request_id,\n last_modification_date=dbsUtils().getTime())\n self.mgrRqUp.execute(conn, upst)\n except:\n if tran:tran.rollback()\n raise\n else:\n if tran:tran.commit()\n finally:\n #open transaction is committed when conn closed.\n if conn:conn.close()"}
{"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef updateMigrationBlockStatus(self, migration_status=0, migration_block=None, migration_request=None):\n\n conn = self.dbi.connection()\n tran = conn.begin()\n try:\n if migration_block:\n upst = dict(migration_status=migration_status,\n migration_block_id=migration_block, last_modification_date=dbsUtils().getTime())\n elif migration_request:\n upst = dict(migration_status=migration_status, migration_request_id=migration_request,\n last_modification_date=dbsUtils().getTime())\n self.mgrup.execute(conn, upst)\n except:\n if tran:tran.rollback()\n raise\n else:\n if tran:tran.commit()\n finally:\n if conn:conn.close()", "response": "Update the status of a migration block."}
{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef getSrcDatasetParents(self, url, dataset):\n #resturl = \"%s/datasetparents?dataset=%s\" % (url, dataset)\n params={'dataset':dataset}\n return cjson.decode(self.callDBSService(url, 'datasetparents', params, {}))", "response": "Get list of parents of a dataset at a given URL"}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ngetting the parents of a block at a given URL.", "response": "def getSrcBlockParents(self, url, block):\n \"\"\"\n List block at src DBS\n \"\"\"\n #blockname = block.replace(\"#\", urllib.quote_plus('#'))\n #resturl = \"%s/blockparents?block_name=%s\" % (url, blockname)\n params={'block_name':block}\n return cjson.decode(self.callDBSService(url, 'blockparents', params, {}))"}
{"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nget all source blocks from a dataset", "response": "def getSrcBlocks(self, url, dataset=\"\", block=\"\"):\n \"\"\"\n Need to list all blocks of the dataset and its parents starting from the top\n For now just list the blocks from this dataset.\n Client type call...\n \"\"\"\n if block:\n params={'block_name':block, 'open_for_writing':0}\n elif dataset:\n params={'dataset':dataset, 'open_for_writing':0}\n else:\n m = 'DBSMigration: Invalid input. Either block or dataset name has to be provided'\n e = 'DBSMigrate/getSrcBlocks: Invalid input. Either block or dataset name has to be provided'\n dbsExceptionHandler('dbsException-invalid-input2', m, self.logger.exception, e )\n\n return cjson.decode(self.callDBSService(url, 'blocks', params, {}))"}
{"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nbuild dynamic sql based on daoinput", "response": "def executeSingle( self, conn, daoinput, tablename, transaction = False):\t\t\t\n\t\"\"\"build dynamic sql based on daoinput\"\"\"\n\tsql1 = \" insert into %s%s( \" %(self.owner, tablename)\n\tsql2 =\" values(\"\n\t\"Now loop over all the input keys. We need to check if all the keys are valid !!!\"\n for key in daoinput:\n sql1 += \"%s,\" %key.upper()\n sql2 += \":%s,\" %key.lower()\n \n sql = sql1.strip(',') + ') ' + sql2.strip(',') + ' )'\n\n self.dbi.processData(sql, daoinput, conn, transaction)"}
{"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ngets the value for the option or default if not defined.", "response": "def get(self, option, default=undefined, cast=undefined):\n \"\"\"\n Return the value for option or default if defined.\n \"\"\"\n if option in self.repository:\n value = self.repository.get(option)\n else:\n value = default\n\n if isinstance(value, Undefined):\n raise UndefinedValueError('%s option not found and default value was not defined.' % option)\n\n if isinstance(cast, Undefined):\n cast = lambda v: v # nop\n elif cast is bool:\n cast = self._cast_boolean\n\n return cast(value)"}
{"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nparsing a requirements file and return a list of the install_requires components", "response": "def parse_requirements(requirements_file):\n \"\"\"\n Create a list for the 'install_requires' component of the setup function\n by parsing a requirements file\n \"\"\"\n\n if os.path.exists(requirements_file):\n # return a list that contains each line of the requirements file\n return open(requirements_file, 'r').read().splitlines()\n else:\n print(\"ERROR: requirements file \" + requirements_file + \" not found.\")\n sys.exit(1)"}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nexecute the SQL query.", "response": "def execute(self, conn, dataset=\"\", block_name=\"\", data_tier_name=\"\", origin_site_name=\"\", logical_file_name=\"\",\n run_num=-1, min_cdate=0, max_cdate=0, min_ldate=0, max_ldate=0, cdate=0,\n ldate=0, open_for_writing=-1, transaction = False):\n \"\"\"\n dataset: /a/b/c\n block: /a/b/c#d\n \"\"\"\n binds = {}\n\n basesql = self.sql\n joinsql = \"\"\n wheresql = \"\"\n generatedsql = \"\"\n\n if logical_file_name and logical_file_name != \"%\":\n joinsql += \" JOIN %sFILES FL ON FL.BLOCK_ID = B.BLOCK_ID \" %(self.owner)\n op = (\"=\", \"like\")[\"%\" in logical_file_name]\n wheresql += \" WHERE LOGICAL_FILE_NAME %s :logical_file_name \" % op\n binds.update( logical_file_name = logical_file_name )\n\n if block_name and block_name !=\"%\":\n andorwhere = (\"WHERE\", \"AND\")[bool(wheresql)]\n op = (\"=\", \"like\")[\"%\" in block_name]\n wheresql += \" %s B.BLOCK_NAME %s :block_name \" % ((andorwhere, op))\n binds.update( block_name = block_name )\n\n if data_tier_name or (dataset and dataset!=\"%\"):\n joinsql += \"JOIN %sDATASETS DS ON DS.DATASET_ID = B.DATASET_ID \" % (self.owner)\n andorwhere = (\"WHERE\", \"AND\")[bool(wheresql)]\n if dataset:\n op = (\"=\", \"like\")[\"%\" in dataset]\n wheresql += \" %s DS.DATASET %s :dataset \" % ((andorwhere, op))\n binds.update(dataset=dataset)\n if data_tier_name:\n joinsql += \"JOIN {owner}DATA_TIERS DT ON DS.DATA_TIER_ID=DT.DATA_TIER_ID \".format(owner=self.owner)\n wheresql += \" %s DT.DATA_TIER_NAME=:data_tier_name \" % (andorwhere)\n binds.update(data_tier_name=data_tier_name)\n\n if origin_site_name and origin_site_name != \"%\":\n op = (\"=\", \"like\")[\"%\" in origin_site_name]\n wheresql += \" AND B.ORIGIN_SITE_NAME %s :origin_site_name \" % op\n binds.update(origin_site_name = origin_site_name)\n\n if open_for_writing == 0 or open_for_writing == 1:\n wheresql += \" AND B.OPEN_FOR_WRITTING = :open_for_writing \"\n\n if cdate != 0:\n wheresql += \"AND B.CREATION_DATE = :cdate \"\n binds.update(cdate = cdate)\n elif min_cdate != 0 and max_cdate != 0:\n wheresql += \"AND B.CREATION_DATE BETWEEN :min_cdate and :max_cdate \"\n binds.update(min_cdate = min_cdate)\n binds.update(max_cdate = max_cdate)\n elif min_cdate != 0 and max_cdate == 0:\n wheresql += \"AND B.CREATION_DATE > :min_cdate \"\n binds.update(min_cdate = min_cdate)\n elif min_cdate ==0 and max_cdate != 0:\n wheresql += \"AND B.CREATION_DATE < :max_cdate \"\n binds.update(max_cdate = max_cdate)\n else:\n pass\n if ldate != 0:\n wheresql += \"AND B.LAST_MODIFICATION_DATE = :ldate \"\n binds.update(ldate = ldate)\n elif min_ldate != 0 and max_ldate != 0:\n wheresql += \"AND B.LAST_MODIFICATION_DATE BETWEEN :min_ldate and :max_ldate \"\n binds.update(min_ldate = min_ldate)\n binds.update(max_ldate = max_ldate)\n elif min_ldate != 0 and max_ldate == 0:\n wheresql += \"AND B.LAST_MODIFICATION_DATE > :min_ldate \"\n binds.update(min_ldate = min_ldate)\n elif min_cdate ==0 and max_cdate != 0:\n wheresql += \"AND B.LAST_MODIFICATION_DATE < :max_ldate \"\n binds.update(max_ldate = max_ldate)\n else:\n pass\n\n #one may provide a list of runs , so it has to be the last one in building the bind.\n if run_num !=-1 :\n basesql = basesql.replace(\"SELECT\", \"SELECT DISTINCT\") + \" , FLM.RUN_NUM \"\n if not logical_file_name:\n joinsql += \" JOIN %sFILES FL ON FL.BLOCK_ID = B.BLOCK_ID \" %(self.owner)\n joinsql += \" JOIN %sFILE_LUMIS FLM on FLM.FILE_ID = FL.FILE_ID \" %(self.owner)\n run_list=[]\n wheresql_run_list=''\n wheresql_run_range=''\n #\n for r in parseRunRange(run_num):\n if isinstance(r, basestring) or isinstance(r, int) or isinstance(r, long):\n run_list.append(str(r))\n if isinstance(r, run_tuple):\n if r[0] == r[1]:\n dbsExceptionHandler('dbsException-invalid-input', \"DBS run_num range must be apart at least by 1.\", self.logger.exception)\n wheresql_run_range = \" FLM.RUN_NUM between :minrun and :maxrun \"\n binds.update({\"minrun\":r[0]})\n binds.update({\"maxrun\":r[1]})\n #\n if run_list:\n wheresql_run_list = \" FLM.RUN_NUM in (SELECT TOKEN FROM TOKEN_GENERATOR) \"\n generatedsql, run_binds = create_token_generator(run_list)\n binds.update(run_binds)\n # \n if wheresql_run_range and wheresql_run_list:\n wheresql += \" and (\" + wheresql_run_range + \" or \" + wheresql_run_list + \" )\"\n elif wheresql_run_range and not wheresql_run_list:\n wheresql += \" and \" + wheresql_run_range\n elif not wheresql_run_range and wheresql_run_list:\n wheresql += \" and \" + wheresql_run_list\n #\n sql = \" \".join((generatedsql, basesql, self.fromsql, joinsql, wheresql))\n\n cursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True)\n for i in cursors:\n d = self.formatCursor(i, size=100)\n if isinstance(d, list) or isinstance(d, GeneratorType):\n for elem in d:\n yield elem\n elif d:\n yield d"}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef execute(self, conn, dsType = \"\", dataset=\"\", transaction = False):\n sql = self.sql\n binds={}\n if not dsType and not dataset:\n pass\n elif dsType and dataset in (\"\", None, '%'):\n op = (\"=\", \"like\")[\"%\" in dsType]\n sql += \"WHERE PDT.PRIMARY_DS_TYPE %s :primdstype\"%op \n binds = {\"primdstype\":dsType}\n\telif dataset and dsType in (\"\", None, '%'):\n\t op = (\"=\", \"like\")[\"%\" in dataset]\n\t sql += \"JOIN %sPRIMARY_DATASETS PDS on PDS.PRIMARY_DS_TYPE_ID = PDT.PRIMARY_DS_TYPE_ID \\\n\t JOIN %sDATASETS DS ON DS.PRIMARY_DS_ID = PDS.PRIMARY_DS_ID \\\n\t WHERE DS.DATASET %s :dataset\" %(self.owner, self.owner, op)\n\t binds={\"dataset\":dataset}\n elif dataset and dsType:\n op = (\"=\", \"like\")[\"%\" in dsType]\n op1 = (\"=\", \"like\")[\"%\" in dataset]\n sql += \"JOIN %sPRIMARY_DATASETS PDS on PDS.PRIMARY_DS_TYPE_ID = PDT.PRIMARY_DS_TYPE_ID \\\n JOIN %sDATASETS DS ON DS.PRIMARY_DS_ID = PDS.PRIMARY_DS_ID \\\n WHERE DS.DATASET %s :dataset and PDT.PRIMARY_DS_TYPE %s :primdstype\" \\\n %(self.owner, self.owner, op1, op)\n binds = {\"primdstype\":dsType, \"dataset\":dataset}\n\telse:\n\t dbsExceptionHandler('dbsException-invalid-input', \"DAO Primary_DS_TYPE List accepts no input, or\\\n dataset,primary_ds_type as input.\", self.logger.exception)\n cursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True)\n result = []\n for c in cursors:\n result.extend(self.formatCursor(c, size=100))\n return result", "response": "List all primary dataset types."}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nexecuting a single query for the application", "response": "def execute(self, conn, dataset=\"\", logical_file_name=\"\", app=\"\", release_version=\"\", pset_hash=\"\", \n output_label =\"\", block_id=0, global_tag='', transaction = False):\n \"\"\"\n returns id for a given application\n \"\"\"\n\t#sql=self.sql\t\n binds = {}\n\tsetAnd=False\n\t#add search only block id only for migration dump block. \n\tif block_id==0:\n\t sql = self.sql1 + self.sql2\n\t if dataset:\n\t\tsql += \" JOIN %sDATASET_OUTPUT_MOD_CONFIGS DC ON DC.OUTPUT_MOD_CONFIG_ID=O.OUTPUT_MOD_CONFIG_ID\" % self.owner\n\t\tsql += \" JOIN %sDATASETS DS ON DS.DATASET_ID=DC.DATASET_ID\" % self.owner\n\t if logical_file_name:\n\t\tsql += \" JOIN %sFILE_OUTPUT_MOD_CONFIGS FC ON FC.OUTPUT_MOD_CONFIG_ID=O.OUTPUT_MOD_CONFIG_ID\" % self.owner\n\t\tsql += \" JOIN %sFILES FS ON FS.FILE_ID=FC.FILE_ID\" % self.owner\n\t if not app == \"\":\n\t\top = (\"=\", \"like\")[\"%\" in app]\n\t\tsql += \" WHERE A.APP_NAME %s :app_name\" % op\n \tbinds[\"app_name\"]=app\n\t\tsetAnd=True\n\t if not release_version == \"\":\n\t\top = (\"=\", \"like\")[\"%\" in release_version]\n\t\tif setAnd : sql += \" AND \" \n\t\telse : sql += \" WHERE \"\n\t\tsql += \" R.RELEASE_VERSION %s :release_version\" % op\n\t\tbinds[\"release_version\"]=release_version\n\t\tsetAnd=True\n\t if not pset_hash == \"\":\n\t\top = (\"=\", \"like\")[\"%\" in pset_hash]\n\t\tif setAnd : sql += \" AND \"\n\t\telse : sql += \" WHERE \"\n\t\tsql += \" P.PSET_HASH %s :pset_hash\" % op\n\t\tbinds[\"pset_hash\"]=pset_hash\n\t\tsetAnd=True\n\t if not output_label == \"\":\n\t\top = (\"=\", \"like\")[\"%\" in output_label]\n if setAnd : sql += \" AND \"\n\t\telse : sql += \" WHERE \"\n\t sql += \" O.OUTPUT_MODULE_LABEL %s :output_module_label\" % op\n\t binds[\"output_module_label\"]=output_label\n\t\tsetAnd=True\n if not global_tag == \"\":\n op = (\"=\", \"like\")[\"%\" in global_tag]\n if setAnd : sql += \" AND \"\n else : sql += \" WHERE \"\n sql += \" O.GLOBAL_TAG %s :global_tag\" % op\n binds[\"global_tag\"]=global_tag\n setAnd=True\n\t if dataset:\n\t\tif setAnd : sql += \" AND \"\n\t\telse : sql += \" WHERE \"\n\t\tsql += \"DS.DATASET=:dataset\"\n\t\tbinds[\"dataset\"]=dataset\n\t\tsetAnd=True\n\t if logical_file_name:\n\t\tif setAnd : sql += \" AND \"\n\t\telse : sql += \" WHERE \"\n\t\tsql += \"FS.LOGICAL_FILE_NAME=:logical_file_name\"\n\t\tbinds[\"logical_file_name\"]=logical_file_name\n\t\tsetAnd=True\n\telse:\n\t #select by block id and return config along with LFN\n\t sql= self.sql1 + \" , FS.LOGICAL_FILE_NAME LFN \" + self.sql2 \\\n\t + \" JOIN %sFILE_OUTPUT_MOD_CONFIGS FC ON FC.OUTPUT_MOD_CONFIG_ID=O.OUTPUT_MOD_CONFIG_ID\" % self.owner \\\n\t\t + \" JOIN %sFILES FS ON FS.FILE_ID=FC.FILE_ID\" % self.owner \\\n\t\t + \" WHERE FS.BLOCK_ID = :block_id \"\n\t binds[\"block_id\"]=block_id\n\tcursors = self.dbi.processData(sql, binds, conn, transaction=False, returnCursor=True)\n\t#assert len(cursors) == 1, \"output module config does not exist\"\n result = []\n\tfor c in cursors:\n result.extend(self.formatCursor(c, size=100))\n return result"}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef execute(self, conn, block_name=\"\", transaction = False):\n if not conn:\n msg='Oracle/BlockParent/List. No DB connection found'\n dbsExceptionHandler('dbsException-failed-connect2host', msg, self.logger.exception)\n\n sql = self.sql\n binds = {}\n\tif block_name:\n\t binds.update(block_name = block_name)\n else:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"Oracle/BlockParent/ListChild. block_name must be provided.\", self.logger.exception)\n\n\tcursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True)\n result = []\n for c in cursors:\n result.extend(self.formatCursor(c, size=100))\n return result", "response": "List all the related entries in a specific block."}
{"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nrequires parameter: full block_name or dataset name. No wildcards allowed. run_num is optional.", "response": "def listFileSummary(self, block_name=\"\", dataset=\"\", run_num=-1, validFileOnly=0, sumOverLumi=0):\n \"\"\"\n required parameter: full block_name or dataset name. No wildcards allowed. run_num is optional.\n \"\"\"\n if not block_name and not dataset:\n msg = \"Block_name or dataset is required for listFileSummary API\"\n dbsExceptionHandler('dbsException-invalid-input', msg, self.logger.exception)\n if '%' in block_name or '*' in block_name or '%' in dataset or '*' in dataset:\n msg = \"No wildcard is allowed in block_name or dataset for filesummaries API\"\n dbsExceptionHandler('dbsException-invalid-input', msg, self.logger.exception)\n #\n with self.dbi.connection() as conn:\n for item in self.filesummarylist.execute(conn, block_name, dataset, run_num,\n validFileOnly=validFileOnly, sumOverLumi=sumOverLumi):\n if item['num_file']==0 and item['num_block']==0 \\\n and item['num_event']==0 and item['file_size']==0:\n pass\n else:\n yield item"}
{"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nlists all file parents of a given logical file", "response": "def listFileParents(self, logical_file_name=\"\", block_id=0, block_name=\"\"):\n \"\"\"\n required parameter: logical_file_name or block_name\n returns: this_logical_file_name, parent_logical_file_name, parent_file_id\n \"\"\"\n #self.logger.debug(\"lfn %s, block_name %s, block_id :%s\" % (logical_file_name, block_name, block_id))\n if not logical_file_name and not block_name and not block_id:\n dbsExceptionHandler('dbsException-invalid-input', \\\n \"Logical_file_name, block_id or block_name is required for fileparents api\", self.logger.exception )\n with self.dbi.connection() as conn:\n sqlresult = self.fileparentlist.execute(conn, logical_file_name, block_id, block_name)\n d = {}\n #self.logger.debug(sqlresult)\n for i in sqlresult:\n k = i['this_logical_file_name']\n v = i['parent_logical_file_name']\n d.setdefault(k, []).append(v)\n for k, v in d.iteritems():\n yield {'logical_file_name':k, 'parent_logical_file_name': v}\n del d"}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef listFileParentsByLumi(self, block_name='', logical_file_name=[]):\n #self.logger.debug(\"lfn %s, block_name %s\" % (logical_file_name, block_name))\n if not block_name:\n dbsExceptionHandler('dbsException-invalid-input', \\\n \"Child block_name is required for fileparents/listFileParentsByLumi api\", self.logger.exception )\n with self.dbi.connection() as conn:\n sqlresult = self.fileparentbylumi.execute(conn, block_name, logical_file_name)\n return [{\"child_parent_id_list\":sqlresult}]", "response": "List all child parent IDs of a file."}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef listFileChildren(self, logical_file_name='', block_name='', block_id=0):\n conn = self.dbi.connection()\n try:\n if not logical_file_name and not block_name and not block_id:\n dbsExceptionHandler('dbsException-invalid-input',\\\n \"Logical_file_name, block_id or block_name is required for listFileChildren api\")\n sqlresult = self.filechildlist.execute(conn, logical_file_name, block_name, block_id)\n d = {}\n result = []\n for i in range(len(sqlresult)):\n k = sqlresult[i]['logical_file_name']\n v = sqlresult[i]['child_logical_file_name']\n if k in d:\n d[k].append(v)\n else:\n d[k] = [v]\n for k, v in d.iteritems():\n r = {'logical_file_name':k, 'child_logical_file_name': v}\n result.append(r)\n return result\n finally:\n if conn:\n conn.close()", "response": "List all file children of a file."}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef updateStatus(self, logical_file_name, is_file_valid, lost, dataset):\n\n conn = self.dbi.connection()\n trans = conn.begin()\n try :\n self.updatestatus.execute(conn, logical_file_name, is_file_valid, lost, dataset, trans)\n trans.commit()\n trans = None\n except Exception as ex:\n if trans:\n trans.rollback()\n trans = None\n raise ex\n\n finally:\n if trans:\n trans.rollback()\n if conn:\n conn.close()", "response": "Update the status of a file in the database."}
{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef listFiles(self, dataset=\"\", block_name=\"\", logical_file_name=\"\",\n release_version=\"\", pset_hash=\"\", app_name=\"\",\n output_module_label=\"\", run_num=-1,\n origin_site_name=\"\", lumi_list=[], detail=False, validFileOnly=0, sumOverLumi=0, input_body=-1):\n \"\"\"\n One of below parameter groups must be present:\n non-patterned dataset, non-patterned block, non-patterned dataset with lfn, non-patterned block with lfn,\n non-patterned lfn\n\tnon-patterned lfn list\n \"\"\"\n if input_body != -1 :\n\t try:\n logical_file_name = input_body.get(\"logical_file_name\", \"\")\n run_num = input_body.get(\"run_num\", -1)\n validFileOnly = input_body.get(\"validFileOnly\", 0)\n sumOverLumi = input_body.get(\"sumOverLumi\", 0) \n\t\tdetail = input_body.get(\"detail\", False)\n block_name = input_body.get(\"block_name\", \"\")\n\t\tdataset = input_body.get(\"dataset\", \"\") \n\t\trelease_version = input_body.get(\"release_version\", \"\")\n\t\tpset_hash = input_body.get(\"pset_hash\", \"\")\n\t app_name = input_body.get(\"app_name\", \"\")\n\t\toutput_module_label = input_body.get(\"output_module_label\", \"\")\n\t\torigin_site_name = input_body.get(\"origin_site_name\", \"\")\n\t\tlumi_list = input_body.get(\"lumi_list\", [])\t\n except cjson.DecodeError as de:\n msg = \"business/listFilss POST call requires at least dataset, block_name, or a list of logical_file_name %s\" % de\n dbsExceptionHandler('dbsException-invalid-input', \"Invalid input\", self.logger.exception, msg)\n\n if ('%' in block_name):\n dbsExceptionHandler('dbsException-invalid-input', \"You must specify exact block name not a pattern\", self.logger.exception)\n elif ('%' in dataset):\n\t print(\"***** in dataset name\")\n dbsExceptionHandler('dbsException-invalid-input', \" You must specify exact dataset name not a pattern\", self.logger.exception)\n elif (not dataset and not block_name and (not logical_file_name or '%'in logical_file_name) ):\n dbsExceptionHandler('dbsException-invalid-input', \"\"\"You must specify one of the parameter groups: \\\n non-pattern dataset, \\\n non-pattern block , non-pattern dataset with lfn ,\\\n non-pattern block with lfn or no-pattern lfn, \\\n\t\t non-patterned lfn list .\"\"\", self.logger.exception)\n elif (lumi_list and len(lumi_list) != 0):\n if run_num==-1:\n dbsExceptionHandler('dbsException-invalid-input', \"Lumi list must accompany A single run number, \\\n use run_num=123\", self.logger.exception)\n elif isinstance(run_num, basestring):\n try:\n run_num = int(run_num)\n except:\n dbsExceptionHandler('dbsException-invalid-input', \"Lumi list must accompany A single run number,\\\n use run_num=123\", self.logger.exception)\n elif isinstance(run_num, list):\n if len(run_num) == 1:\n try:\n run_num = int(run_num[0])\n except:\n dbsExceptionHandler('dbsException-invalid-input', \"Lumi list must accompany A single run number,\\\n use run_num=123\", self.logger.exception)\n else:\n dbsExceptionHandler('dbsException-invalid-input', \"Lumi list must accompany A single run number,\\\n use run_num=123\", self.logger.exception)\n\telse:\n pass\n with self.dbi.connection() as conn:\n dao = (self.filebrieflist, self.filelist)[detail]\n for item in dao.execute(conn, dataset, block_name, logical_file_name, release_version, pset_hash, app_name,\n output_module_label, run_num, origin_site_name, lumi_list, validFileOnly, sumOverLumi):\n\n yield item # we need to yield while connection is open", "response": "This method returns a list of files in a dataset."}
{"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef insertFile(self, businput, qInserts=False):\n\n # We do not want to go be beyond 10 files at a time\n # If user wants to insert over 10 files in one shot, we run into risks of locking the database\n # tables for longer time, and in case of error, it will be hard to see where error occured\n if len(businput) > 10:\n dbsExceptionHandler('dbsException-input-too-large', \"DBS cannot insert \\\n more than 10 files in one bulk call\")\n return\n\n conn = self.dbi.connection()\n tran = conn.begin()\n try:\n #Now we are dealing with independent files that have different dataset/block and so on.\n #See Trac #358.\n #The expected input data format is a list of dictionary to insert independent files into DBS,\n #inputdata={'files':[{}, {}, {}]}\n #YG 09/15/2011\n\n # AA- 01/06/2010 -- we have to do this file-by-file, there is no real good way to do this complex operation otherwise\n #files2insert = []\n #fidl = []\n fileInserted = False\n dataset = \"\"\n block_name = \"\"\n dataset_id = -1\n block_id = -1\n dsconfigs = []\n for f in businput:\n if not (\"logical_file_name\" in f and \"block_name\" in f and \"dataset\" in f ):\n dbsExceptionHandler('dbsException-invalid-input', \"DBSFile/insertFile must have logical_file_name, block_name and dataset as input\")\n if f[\"block_name\"].split('#')[0] != f[\"dataset\"]:\n dbsExceptionHandler('dbsException-invalid-input', \"DBSFile/insertFile: dataset and block_name NOT match\")\n # first check if the dataset exists\n # and block exists that files are suppose to be going to and is OPEN for writing\n if dataset != f[\"dataset\"]:\n dataset_id = self.datasetid.execute(conn, dataset=f[\"dataset\"])\n dataset = f[\"dataset\"]\n if dataset_id == -1 :\n dbsExceptionHandler('dbsException-missing-data', \"Required Dataset Not Found.\", None,\n \"Required Dataset %s does not exist\"%f[\"dataset\"] )\n # get the list of configs in for this dataset\n dsconfigs = [x['output_mod_config_id'] for x in self.dsconfigids.execute(conn, dataset=f[\"dataset\"])]\n fileconfigs = [] # this will hold file configs that we will list in the insert file logic below\n if block_name != f[\"block_name\"]:\n block_info = self.blocklist.execute(conn, block_name=f[\"block_name\"])\n\t\t for b in block_info:\n\t\t\tif not b : \n\t\t\t dbsExceptionHandler( \"dbsException-missing-data\", \"Required block not found\", None,\n \"Cannot found required block %s in DB\" %f[\"block_name\"])\n\t\t\telse:\t\n\t\t\t if b[\"open_for_writing\"] != 1 : \n\t\t\t\tdbsExceptionHandler(\"dbsException-conflict-data\", \"Block closed\", None,\n\t\t\t\t \"Block %s is not open for writting\" %f[\"block_name\"])\n\t\t\t if \"block_id\" in b:\n\t\t\t\tblock_id = b[\"block_id\"]\n\t\t\t else:\n\t\t\t\tdbsExceptionHandler(\"dbsException-missing-data\", \"Block not found\", None,\n \"Cannot found required block %s in DB\" %f[\"block_name\"])\n else: dbsExceptionHandler('dbsException-missing-data', \"Required block name Not Found in input.\",\n None, \"Required block Not Found in input.\")\n #make the default file_type=EDM\n file_type_id = self.ftypeid.execute( conn, f.get(\"file_type\", \"EDM\"))\n if file_type_id == -1:\n dbsExceptionHandler('dbsException-missing-data', \"File type not found.\", None,\n \"Required file type %s not found in DBS\"%f.get(\"file_type\", \"EDM\") )\n\n iFile = 0\n fileIncrement = 40\n fID = self.sm.increment(conn, \"SEQ_FL\", incCount=fileIncrement)\n #looping over the files, everytime create a new object 'filein' as you never know\n #whats in the original object and we do not want to know\n #for f in businput:\n file_clob = {}\n fparents2insert = []\n flumis2insert = []\n fconfigs2insert = []\n # create the file object from the original\n # taking care of defaults, and required\n filein = {\n \"logical_file_name\" : f[\"logical_file_name\"],\n \"is_file_valid\" : f.get(\"is_file_valid\", 1),\n \"check_sum\" : f.get(\"check_sum\", None),\n \"event_count\" : f.get(\"event_count\", -1),\n \"file_size\" : f.get(\"file_size\", -1),\n \"adler32\" : f.get(\"adler32\", None),\n \"md5\" : f.get(\"md5\", None),\n \"auto_cross_section\" : f.get(\"auto_cross_section\", -1),\n #\"creation_date\" : f.get(\"creation_date\", None), See Ticket #965 YG.\n #\"create_by\": f.get(\"create_by\", None),\n \"last_modification_date\": f.get(\"last_modification_date\", None),\n #\"last_modified_by\" : f.get(\"last_modified_by\", None)\n \"last_modified_by\" : dbsUtils().getCreateBy()\n }\n if filein[\"md5\"] is None and filein[\"check_sum\"] is None and filein[\"adler32\"] is None:\n dbsExceptionHandler('dbsException-invalid-input', \"Missing check_sum or adler32, or md5\")\n if iFile == fileIncrement:\n fID = self.sm.increment(conn, \"SEQ_FL\", incCount=fileIncrement)\n iFile = 0\n filein[\"file_id\"] = fID + iFile\n iFile += 1\n filein[\"dataset_id\"] = dataset_id\n filein[\"block_id\"] = block_id\n filein[\"file_type_id\"] = file_type_id\n #FIXME: Add this later if f.get(\"branch_hash\", \"\") not in (\"\", None):\n #filein[\"branch_hash\"]=self.fbranchid.execute( f.get(\"branch_hash\"), conn, transaction=tran)\n # insert file -- as decided, one file at a time\n # filein will be what goes into database\n try:\n if not qInserts:\n self.filein.execute(conn, filein, transaction=tran)\n fileInserted = True\n else:\n file_clob['file'] = filein\n except SQLAlchemyIntegrityError as ex:\n if str(ex).find(\"unique constraint\") != -1 or str(ex).lower().find(\"duplicate\") != -1:\n # Lets move on to NEXT file, we do not want to continue processing this file\n\n #Nothing about this file is updated when it is already in DB. No file parentage, block parentage, dataset parentage and so on.\n #Is this right? YG Oct. 24\n self.logger.warning(\"DBSFile/insertFile. File already exists in DBS, not changing it: %s\"\n %filein[\"logical_file_name\"] )\n continue\n else:\n raise\n\n #process file parents, file lumi, file outputmodconfigs, ...\n #file lumi sections\n if \"file_lumi_list\" in f:\n fllist = f[\"file_lumi_list\"]\n if len(fllist) > 0:\n for fl in fllist:\n fldao = {\n \"run_num\" : fl[\"run_num\"],\n \"lumi_section_num\" : fl[\"lumi_section_num\"]\n }\n if \"event_count\" in fl:\n fldao[\"event_count\"] = fl[\"event_count\"]\n fldao[\"file_id\"] = filein[\"file_id\"]\n flumis2insert.append(fldao)\n\n if \"file_parent_list\" in f:\n #file parents\n fplist = f[\"file_parent_list\"]\n\n for fp in fplist:\n fpdao = {}\n fpdao[\"this_file_id\"] = filein[\"file_id\"]\n fpdao[\"parent_logical_file_name\"] = fp[\"file_parent_lfn\"]\n fparents2insert.append(fpdao)\n if \"file_output_config_list\" in f:\n #file output config modules\n foutconfigs = f[\"file_output_config_list\"]\n if(len(foutconfigs) > 0):\n for fc in foutconfigs:\n fcdao = {}\n fcdao[\"file_id\"] = filein[\"file_id\"]\n fcdao[\"output_mod_config_id\"] = self.outconfigid.execute(conn, fc[\"app_name\"],\n fc[\"release_version\"], fc[\"pset_hash\"], fc[\"output_module_label\"],\n fc[\"global_tag\"])\n if fcdao[\"output_mod_config_id\"] == -1 :\n dbsExceptionHandler('dbsException-missing-data', 'Config Not found.', None, \"DBSFile/insertFile.\\\n Output module config (%s, %s, %s, %s) \\\n not found\" % (fc[\"app_name\"],\n fc[\"release_version\"], fc[\"pset_hash\"], fc[\"output_module_label\"]) )\n fileconfigs.append(fcdao[\"output_mod_config_id\"])\n fconfigs2insert.append(fcdao)\n #FIXME: file associations?-- in a later release\n #\n # insert file - lumi\n if flumis2insert:\n file_clob['file_lumi_list'] = flumis2insert\n if not qInserts:\n self.flumiin.execute(conn, flumis2insert, transaction=tran)\n # insert file parent mapping\n if fparents2insert:\n file_clob['file_parent_list'] = fparents2insert\n if not qInserts:\n self.fparentin.execute(conn, fparents2insert, transaction=tran)\n # First check to see if these output configs are mapped to THIS dataset as well, if not raise an exception\n if not set(fileconfigs).issubset(set(dsconfigs)) :\n dbsExceptionHandler('dbsException-conflict-data', 'Mismatched configure. ', None, \"DBSFile/insertFile. Output configs mismatch, \\\n output configs known to dataset: \\\n %s are different from what are being mapped to file : %s \" \\\n %(f[\"dataset\"], filein[\"logical_file_name\"]) )\n # insert output module config mapping\n if fconfigs2insert:\n file_clob['file_output_config_list'] = fconfigs2insert\n if not qInserts:\n self.fconfigin.execute(conn, fconfigs2insert, transaction=tran)\n if qInserts:\n try:\n self.logger.warning(file_clob)\n self.filebufin.execute(conn, filein['logical_file_name'], block_id, file_clob, transaction=tran)\n except SQLAlchemyIntegrityError as ex:\n if str(ex).find(\"unique constraint\") != -1 or str(ex).lower().find(\"duplicate\") != -1:\n pass\n else:\n raise\n\n #insert block parentages and dataset parentages based on file parentages\n # Do this one by one, as it is sure to have duplicate in dest table\n if fileInserted and fparents2insert:\n for fp in fparents2insert:\n try:\n bkParentage2insert={'this_block_id' : filein[\"block_id\"], 'parent_logical_file_name': fp['parent_logical_file_name']}\n self.blkparentin.execute(conn, bkParentage2insert, transaction=tran)\n dsParentage2insert={'this_dataset_id': filein[\"dataset_id\"], 'parent_logical_file_name' : fp['parent_logical_file_name']}\n self.dsparentin.execute(conn, dsParentage2insert, transaction=tran)\n except SQLAlchemyIntegrityError as ex:\n #ORA-00001\n if (str(ex).find(\"ORA-00001\") != -1 and str(ex).find(\"PK_DP\") != -1) or str(ex).find(\"PK_BP\") != -1 or str(ex).lower().find(\"duplicate\") != -1:\n pass\n elif str(ex).find(\"ORA-01400\") != -1:\n raise\n else:\n raise\n\n # Update block parameters, file_count, block_size\n if not qInserts:\n blkParams = self.blkstats.execute(conn, block_id,\n transaction=tran)\n blkParams['block_size'] = long(blkParams['block_size'])\n self.blkstatsin.execute(conn, blkParams, transaction=tran)\n\n # All good ?\n tran.commit()\n tran = None\n\n except Exception as ex:\n if tran:\n tran.rollback()\n tran = None\n raise\n\n finally:\n if tran:\n tran.rollback()\n if conn:\n conn.close()", "response": "This method inserts a file into the database."}
{"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef insertFileParents(self, businput):\n if \"block_name\" not in businput.keys() or \"child_parent_id_list\" not in businput.keys() or not businput[\"child_parent_id_list\"] or not businput[\"block_name\"]:\n dbsExceptionHandler(\"dbsException-invalid-input2\", \"DBSFile/insertFileParents: require child block_name and list of child/parent file id pairs\" , self.logger.exception, \"DBSFile/insertFileParents: require child block_name and list of child/parent file id pairs\")\n tran = None\n conn = None \n try:\n #We should get clean insert for both file/block parentage.\n #block parent duplication is handled at dao level. File parent should not have deplication. \n conn = self.dbi.connection()\n tran = conn.begin()\n self.logger.info(\"Insert File parentage mapping\") \n self.fparentin2.execute(conn, businput, tran)\n self.logger.info(\"Insert block parentage mapping\")\n self.blkparentin3.execute(conn, businput, tran)\n if tran:tran.commit()\n if conn:conn.close()\n except SQLAlchemyIntegrityError as ex:\n if tran:tran.rollback()\n if conn:conn.close()\n if str(ex).find(\"ORA-01400\") > -1:\n dbsExceptionHandler('dbsException-missing-data',\n 'Missing data when insert filei/block parent. ', self.logger.exception,\n 'Missing data when insert file/block parent. '+ str(ex))\n else:\n dbsExceptionHandler('dbsException-invalid-input2',\n 'Invalid data when insert file/block parent. ', self.logger.exception,\n 'Invalid data when insert file/block parent. '+ str(ex))\n finally:\n if tran:tran.rollback()\n if conn:conn.close()", "response": "Insert File Parents into DBS."}
{"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef increment(self, conn, seqName, transaction = False, incCount=1):\n\n #FIXME: Do we need to lock the tables here?\n\n sql = \"select %s%s.nextval as val from dual\" % (self.owner, seqName)\n result = self.dbi.processData(sql, conn=conn, transaction=transaction)\n resultlist = self.formatDict(result)\n return resultlist[0]['val']", "response": "Increment the sequence seqName by default Incremented by\n returns its value"}
{"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nlist release versions of a specific release version.", "response": "def listReleaseVersions(self, release_version=\"\", dataset='', logical_file_name=''):\n \"\"\"\n List release versions\n \"\"\"\n if dataset and ('%' in dataset or '*' in dataset):\n dbsExceptionHandler('dbsException-invalid-input',\n \" DBSReleaseVersion/listReleaseVersions. No wildcards are\" +\n \" allowed in dataset.\\n.\")\n\n if logical_file_name and ('%' in logical_file_name or '*' in logical_file_name):\n dbsExceptionHandler('dbsException-invalid-input',\n \" DBSReleaseVersion/listReleaseVersions. No wildcards are\" +\n \" allowed in logical_file_name.\\n.\")\n\n conn = self.dbi.connection()\n try:\n plist = self.releaseVersion.execute(conn, release_version.upper(), dataset, logical_file_name)\n result = [{}]\n if plist:\n t = []\n for i in plist:\n for k, v in i.iteritems():\n t.append(v)\n result[0]['release_version'] = t\n return result\n finally:\n if conn:\n conn.close()"}
{"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nsearches the path to the CA file that is used to authenticate the client side.", "response": "def __search_ca_path(self):\n \"\"\"\n Get CA Path to check the validity of the server host certificate on the client side\n \"\"\"\n if \"X509_CERT_DIR\" in os.environ:\n self._ca_path = os.environ['X509_CERT_DIR']\n\n elif os.path.exists('/etc/grid-security/certificates'):\n self._ca_path = '/etc/grid-security/certificates'\n\n else:\n raise ClientAuthException(\"Could not find a valid CA path\")"}
{"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef __search_cert_key(self):\n # Now we're trying to guess what the right cert/key combo is...\n # First preference to HOST Certificate, This is how it set in Tier0\n if 'X509_HOST_CERT' in os.environ:\n self._ssl_cert = os.environ['X509_HOST_CERT']\n self._ssl_key = os.environ['X509_HOST_KEY']\n\n # Second preference to User Proxy, very common\n elif 'X509_USER_PROXY' in os.environ and os.path.exists(os.environ['X509_USER_PROXY']):\n self._ssl_cert = os.environ['X509_USER_PROXY']\n self._ssl_key = self._ssl_cert\n\n # Third preference to User Cert/Proxy combinition\n elif 'X509_USER_CERT' in os.environ and 'X509_USER_KEY' in os.environ:\n self._ssl_cert = os.environ['X509_USER_CERT']\n self._ssl_key = os.environ['X509_USER_KEY']\n\n # TODO: only in linux, unix case, add other os case\n # look for proxy at default location /tmp/x509up_u$uid\n elif os.path.exists('/tmp/x509up_u%s' % str(os.getuid())):\n self._ssl_cert = '/tmp/x509up_u%s' % str(os.getuid())\n self._ssl_key = self._ssl_cert\n\n elif sys.stdin.isatty():\n home_dir = os.environ['HOME']\n user_cert = os.path.join(home_dir, '.globus/usercert.pem')\n user_key = os.path.join(home_dir, '.globus/userkey.pem')\n\n if os.path.exists(user_cert):\n self._ssl_cert = user_cert\n if os.path.exists(user_key):\n self._ssl_key = user_key\n #store password for convenience\n self._ssl_key_pass = getpass(\"Password for %s: \" % self._ssl_key)\n else:\n self._ssl_key = self._ssl_cert\n else:\n raise ClientAuthException(\"No valid X509 cert-key-pair found.\") \n\n else:\n raise ClientAuthException(\"No valid X509 cert-key-pair found.\")", "response": "Search the user credentials if they exist otherwise throw an exception."}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ninterface for submitting a migration request. Required input keys: MIGRATION_URL: The source DBS url for migration. MIGRATION_INPUT: The block or dataset names to be migrated.", "response": "def submit(self):\n \"\"\"\n Interface for submitting a migration request.\n Required input keys:\n MIGRATION_URL: The source DBS url for migration.\n MIGRATION_INPUT: The block or dataset names to be migrated.\n \"\"\"\n body = request.body.read()\n indata = cjson.decode(body)\n try:\n indata = validateJSONInputNoCopy(\"migration_rqst\", indata)\n indata.update({\"creation_date\": dbsUtils().getTime(),\n \"last_modification_date\" : dbsUtils().getTime(),\n \"create_by\" : dbsUtils().getCreateBy() ,\n \"last_modified_by\" : dbsUtils().getCreateBy(),\n \"migration_status\": 0})\n return self.dbsMigrate.insertMigrationRequest(indata)\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except Exception as ex:\n sError = \"DBSMigrateModle/submit. %s\\n Exception trace: \\n %s.\" \\\n % (ex, traceback.format_exc() )\n if hasattr(ex, 'status') and ex.status == 400:\n\t\tdbsExceptionHandler('dbsException-invalid-input2', str(ex), self.logger.exception, sError)\n\t else:\t\n\t\tdbsExceptionHandler('dbsException-server-error', str(ex), self.logger.exception, sError)"}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\ninterfacing to query status of a migration request In this preference order of input parameters : migration_rqst_id, block, dataset, user (if multi parameters are provided, only the precedence order is followed)", "response": "def status(self, migration_rqst_id=\"\", block_name=\"\", dataset=\"\", user=\"\"):\n \"\"\"\n Interface to query status of a migration request\n In this preference order of input parameters :\n migration_rqst_id, block, dataset, user\n (if multi parameters are provided, only the precedence order is followed)\n \"\"\"\n try:\n return self.dbsMigrate.listMigrationRequests(migration_rqst_id,\n block_name, dataset, user)\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except Exception as ex:\n sError = \"DBSMigrateModle/status. %s\\n Exception trace: \\n %s.\" \\\n % (ex, traceback.format_exc() )\n\t if hasattr(ex, 'status') and ex.status == 400:\n\t\tdbsExceptionHandler('dbsException-invalid-input2', str(ex), self.logger.exception, sError)\t\n else:\n\t\tdbsExceptionHandler('dbsException-server-error', str(ex), self.logger.exception, sError)"}
{"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\ninterfacing to remove a migration request from the queue. Only Permanent FAILED/9 and PENDING/0 requests can be removed (running and sucessed requests cannot be removed)", "response": "def remove(self):\n \"\"\"\n Interface to remove a migration request from the queue.\n Only Permanent FAILED/9 and PENDING/0 requests can be removed\n (running and sucessed requests cannot be removed)\n\n \"\"\"\n body = request.body.read()\n indata = cjson.decode(body)\n try:\n indata = validateJSONInputNoCopy(\"migration_rqst\", indata)\n return self.dbsMigrate.removeMigrationRequest(indata)\n except dbsException as he:\n dbsExceptionHandler(he.eCode, he.message, self.logger.exception, he.message)\n except Exception as e:\n if e.code == 400:\n dbsExceptionHandler('dbsException-invalid-input2', str(e), self.logger.exception, str(e)) \n else:\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, str(e))"}
{"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nexecutes the SQL SELECT query and returns the result as a generator.", "response": "def execute(self, conn, logical_file_name='', block_id=0, block_name='', transaction=False):\n \"\"\"\n return {} if condition is not provided.\n \"\"\"\n sql = ''\n binds = {}\n\n if logical_file_name:\n if isinstance(logical_file_name, basestring):\n wheresql = \"WHERE F.LOGICAL_FILE_NAME = :logical_file_name\"\n binds = {\"logical_file_name\": logical_file_name}\n sql = \"{sql} {wheresql}\".format(sql=self.sql, wheresql=wheresql)\n elif isinstance(logical_file_name, list): \n wheresql = \"WHERE F.LOGICAL_FILE_NAME in (SELECT TOKEN FROM TOKEN_GENERATOR)\"\n lfn_generator, binds = create_token_generator(logical_file_name)\n sql = \"{lfn_generator} {sql} {wheresql}\".format(lfn_generator=lfn_generator, sql=self.sql,\n wheresql=wheresql)\n elif block_id != 0:\n wheresql = \"WHERE F.BLOCK_ID = :block_id\"\n binds ={'block_id': block_id}\n sql = \"{sql} {wheresql}\".format(sql=self.sql, wheresql=wheresql)\n elif block_name:\n joins = \"JOIN {owner}BLOCKS B on B.BLOCK_ID = F.BLOCK_ID\".format(owner=self.owner)\n wheresql = \"WHERE B.BLOCK_NAME= :block_name\"\n binds ={'block_name': block_name}\n sql = \"{sql} {joins} {wheresql}\".format(sql=self.sql, joins=joins, wheresql=wheresql)\n else:\n return\n\n cursors = self.dbi.processData(sql, binds, conn, transaction=transaction, returnCursor=True)\n for i in cursors:\n d = self.formatCursor(i, size=100)\n if isinstance(d, list) or isinstance(d, GeneratorType):\n for elem in d:\n yield elem\n elif d: \n yield d"}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef listDatasetParents(self, dataset=\"\"):\n if( dataset == \"\" ):\n dbsExceptionHandler(\"dbsException-invalid-input\", \"DBSDataset/listDatasetParents. Child Dataset name is required.\")\n conn = self.dbi.connection()\n try:\n result = self.datasetparentlist.execute(conn, dataset)\n return result\n finally:\n if conn:\n conn.close()", "response": "returns only parent dataset name"}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef listDatasetChildren(self, dataset):\n if( dataset == \"\" ):\n dbsExceptionHandler(\"dbsException-invalid-input\", \"DBSDataset/listDatasetChildren. Parent Dataset name is required.\")\n conn = self.dbi.connection()\n try:\n result = self.datasetchildlist.execute(conn, dataset)\n return result\n finally:\n if conn:\n conn.close()", "response": "returns only children dataset name"}
{"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef updateStatus(self, dataset, is_dataset_valid):\n if( dataset == \"\" ):\n dbsExceptionHandler(\"dbsException-invalid-input\", \"DBSDataset/updateStatus. dataset is required.\")\n\n conn = self.dbi.connection()\n trans = conn.begin()\n\n try:\n self.updatestatus.execute(conn, dataset, is_dataset_valid, trans)\n trans.commit()\n trans = None\n except Exception as ex:\n if trans:\n trans.rollback()\n raise ex\n finally:\n if trans:\n trans.rollback()\n if conn:\n conn.close()", "response": "Update the status of a dataset"}
{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef updateType(self, dataset, dataset_access_type):\n if( dataset == \"\" ):\n dbsExceptionHandler(\"dbsException-invalid-input\", \"DBSDataset/updateType. dataset is required.\")\n\n conn = self.dbi.connection()\n trans = conn.begin()\n\n try :\n self.updatetype.execute(conn, dataset, dataset_access_type.upper(), trans)\n trans.commit()\n trans = None\n except SQLAlchemyDatabaseError as ex:\n if str(ex).find(\"ORA-01407\") != -1:\n dbsExceptionHandler(\"dbsException-invalid-input2\", \"Invalid Input\", None, \"DBSDataset/updateType. A Valid dataset_access_type is required.\")\n finally:\n if trans:\n trans.rollback()\n if conn:\n conn.close()", "response": "Update the type of a dataset."}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nlist all datasets if dataset parameter is not given. The parameter can include % character. all other parameters are not wild card ones.", "response": "def listDatasets(self, dataset=\"\", parent_dataset=\"\", is_dataset_valid=1,\n release_version=\"\", pset_hash=\"\", app_name=\"\",\n output_module_label=\"\", global_tag=\"\", processing_version=0, \n acquisition_era=\"\", run_num=-1, physics_group_name=\"\",\n logical_file_name=\"\", primary_ds_name=\"\",\n primary_ds_type=\"\", processed_ds_name=\"\", data_tier_name=\"\",\n dataset_access_type=\"VALID\", prep_id=\"\", create_by='', last_modified_by='', min_cdate=0, max_cdate=0,\n min_ldate=0, max_ldate=0, cdate=0, ldate=0, detail=False, dataset_id=-1):\n \"\"\"\n lists all datasets if dataset parameter is not given.\n The parameter can include % character. \n all other parameters are not wild card ones.\n \"\"\"\n if(logical_file_name and logical_file_name.find(\"%\")!=-1):\n dbsExceptionHandler('dbsException-invalid-input', 'DBSDataset/listDatasets API requires \\\n fullly qualified logical_file_name. NO wildcard is allowed in logical_file_name.')\n if(dataset and dataset.find(\"/%/%/%\")!=-1):\n dataset=''\n\twith self.dbi.connection() as conn:\n dao = (self.datasetbrieflist, self.datasetlist)[detail]\n if dataset_access_type: dataset_access_type = dataset_access_type.upper()\n if data_tier_name: data_tier_name = data_tier_name.upper()\n #if processing_version: processing_version = processing_version.upper()\n #if acquisition_era: acquisition_era = acquisition_era.upper()\n for item in dao.execute(conn, \n dataset, is_dataset_valid,\n parent_dataset,\n release_version,\n pset_hash,\n app_name,\n output_module_label,\n global_tag,\n processing_version,\n acquisition_era, \n run_num, physics_group_name,\n logical_file_name,\n primary_ds_name, primary_ds_type,\n processed_ds_name, data_tier_name,\n dataset_access_type, prep_id, create_by, last_modified_by, \n min_cdate, max_cdate, min_ldate, max_ldate,\n cdate, ldate, dataset_id):\n\t yield item"}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef insertDataset(self, businput):\n if not (\"primary_ds_name\" in businput and \"dataset\" in businput\n and \"dataset_access_type\" in businput and \"processed_ds_name\" in businput ):\n dbsExceptionHandler('dbsException-invalid-input', \"business/DBSDataset/insertDataset must have dataset,\\\n dataset_access_type, primary_ds_name, processed_ds_name as input\")\n\n if \"data_tier_name\" not in businput:\n dbsExceptionHandler('dbsException-invalid-input', \"insertDataset must have data_tier_name as input.\")\n\n conn = self.dbi.connection()\n tran = conn.begin()\n try:\n\n dsdaoinput = {}\n dsdaoinput[\"primary_ds_name\"] = businput[\"primary_ds_name\"]\n dsdaoinput[\"data_tier_name\"] = businput[\"data_tier_name\"].upper()\n dsdaoinput[\"dataset_access_type\"] = businput[\"dataset_access_type\"].upper()\n #not required pre-exist in the db. will insert with the dataset if not in yet\n #processed_ds_name=acquisition_era_name[-fileter_name][-processing_str]-vprocessing_version Changed as 4/30/2012 YG.\n #althrough acquisition era and processing version is not required for a dataset in the schema(the schema is build this way because\n #we need to accomdate the DBS2 data), but we impose the requirement on the API. So both acquisition and processing eras are required \n #YG 12/07/2011 TK-362\n if \"acquisition_era_name\" in businput and \"processing_version\" in businput:\n erals=businput[\"processed_ds_name\"].rsplit('-')\n if erals[0]==businput[\"acquisition_era_name\"] and erals[len(erals)-1]==\"%s%s\"%(\"v\", businput[\"processing_version\"]):\n dsdaoinput[\"processed_ds_name\"] = businput[\"processed_ds_name\"]\n else:\n dbsExceptionHandler('dbsException-invalid-input', \"insertDataset:\\\n processed_ds_name=acquisition_era_name[-filter_name][-processing_str]-vprocessing_version must be satisified.\")\n else:\n dbsExceptionHandler(\"dbsException-missing-data\", \"insertDataset: Required acquisition_era_name or processing_version is not found in the input\")\n \n if \"physics_group_name\" in businput:\n dsdaoinput[\"physics_group_id\"] = self.phygrpid.execute(conn, businput[\"physics_group_name\"])\n if dsdaoinput[\"physics_group_id\"] == -1:\n dbsExceptionHandler(\"dbsException-missing-data\", \"insertDataset. physics_group_name not found in DB\")\n else:\n dsdaoinput[\"physics_group_id\"] = None\n\n dsdaoinput[\"dataset_id\"] = self.sm.increment(conn, \"SEQ_DS\")\n # we are better off separating out what we need for the dataset DAO\n dsdaoinput.update({ \n \"dataset\" : \"/%s/%s/%s\" %\n (businput[\"primary_ds_name\"],\n businput[\"processed_ds_name\"],\n businput[\"data_tier_name\"].upper()),\n \"prep_id\" : businput.get(\"prep_id\", None),\n \"xtcrosssection\" : businput.get(\"xtcrosssection\", None),\n \"creation_date\" : businput.get(\"creation_date\", dbsUtils().getTime() ),\n \"create_by\" : businput.get(\"create_by\", dbsUtils().getCreateBy()) ,\n \"last_modification_date\" : businput.get(\"last_modification_date\", dbsUtils().getTime()),\n #\"last_modified_by\" : businput.get(\"last_modified_by\", dbsUtils().getModifiedBy())\n \"last_modified_by\" : dbsUtils().getModifiedBy()\n })\n \"\"\"\n repeated again, why? comment out by YG 3/14/2012\n #physics group\n if \"physics_group_name\" in businput:\n dsdaoinput[\"physics_group_id\"] = self.phygrpid.execute(conn, businput[\"physics_group_name\"])\n if dsdaoinput[\"physics_group_id\"] == -1:\n dbsExceptionHandler(\"dbsException-missing-data\", \"insertDataset. Physics Group : %s Not found\"\n % businput[\"physics_group_name\"])\n else: dsdaoinput[\"physics_group_id\"] = None\n \"\"\"\n # See if Processing Era exists\n if \"processing_version\" in businput and businput[\"processing_version\"] != 0:\n dsdaoinput[\"processing_era_id\"] = self.proceraid.execute(conn, businput[\"processing_version\"])\n if dsdaoinput[\"processing_era_id\"] == -1 :\n dbsExceptionHandler(\"dbsException-missing-data\", \"DBSDataset/insertDataset: processing_version not found in DB\") \n else:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"DBSDataset/insertDataset: processing_version is required\")\n\n # See if Acquisition Era exists\n if \"acquisition_era_name\" in businput:\n dsdaoinput[\"acquisition_era_id\"] = self.acqeraid.execute(conn, businput[\"acquisition_era_name\"])\n if dsdaoinput[\"acquisition_era_id\"] == -1:\n dbsExceptionHandler(\"dbsException-missing-data\", \"DBSDataset/insertDataset: acquisition_era_name not found in DB\")\n else:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"DBSDataset/insertDataset: acquisition_era_name is required\")\n try:\n # insert the dataset\n self.datasetin.execute(conn, dsdaoinput, tran)\n except SQLAlchemyIntegrityError as ex:\n if (str(ex).lower().find(\"unique constraint\") != -1 or\n str(ex).lower().find(\"duplicate\") != -1):\n # dataset already exists, lets fetch the ID\n self.logger.warning(\n \"Unique constraint violation being ignored...\")\n self.logger.warning(\"%s\" % ex)\n ds = \"/%s/%s/%s\" % (businput[\"primary_ds_name\"], businput[\"processed_ds_name\"], businput[\"data_tier_name\"].upper())\n dsdaoinput[\"dataset_id\"] = self.datasetid.execute(conn, ds )\n if dsdaoinput[\"dataset_id\"] == -1 :\n dbsExceptionHandler(\"dbsException-missing-data\", \"DBSDataset/insertDataset. Strange error, the dataset %s does not exist ?\" \n % ds )\n if (str(ex).find(\"ORA-01400\") ) != -1 :\n dbsExceptionHandler(\"dbsException-missing-data\", \"insertDataset must have: dataset,\\\n primary_ds_name, processed_ds_name, data_tier_name \")\n except Exception as e:\n raise \n\n #FIXME : What about the READ-only status of the dataset\n #There is no READ-oly status for a dataset.\n\n # Create dataset_output_mod_mod_configs mapping\n if \"output_configs\" in businput:\n for anOutConfig in businput[\"output_configs\"]:\n dsoutconfdaoin = {}\n dsoutconfdaoin[\"dataset_id\"] = dsdaoinput[\"dataset_id\"]\n dsoutconfdaoin[\"output_mod_config_id\"] = self.outconfigid.execute(conn, anOutConfig[\"app_name\"],\n anOutConfig[\"release_version\"],\n anOutConfig[\"pset_hash\"],\n anOutConfig[\"output_module_label\"],\n anOutConfig[\"global_tag\"]) \n if dsoutconfdaoin[\"output_mod_config_id\"] == -1 : \n\n dbsExceptionHandler(\"dbsException-missing-data\", \"DBSDataset/insertDataset: Output config (%s, %s, %s, %s, %s) not found\"\n % (anOutConfig[\"app_name\"],\n anOutConfig[\"release_version\"],\n anOutConfig[\"pset_hash\"],\n anOutConfig[\"output_module_label\"],\n anOutConfig[\"global_tag\"]))\n try:\n self.datasetoutmodconfigin.execute(conn, dsoutconfdaoin, tran)\n except Exception as ex:\n if str(ex).lower().find(\"unique constraint\") != -1 or str(ex).lower().find(\"duplicate\") != -1:\n pass\n else:\n raise\n # Dataset parentage will NOT be added by this API it will be set by insertFiles()--deduced by insertFiles\n # Dataset runs will NOT be added by this API they will be set by insertFiles()--deduced by insertFiles OR insertRun API call\n tran.commit()\n tran = None\n except Exception:\n if tran:\n tran.rollback()\n tran = None\n raise\n finally:\n if tran:\n tran.rollback()\n if conn:\n conn.close()", "response": "Insert a new dataset into DBS."}
{"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nupdates origin_site_name for a given block_name", "response": "def execute(self, conn, block_name, origin_site_name, transaction=False):\n \"\"\"\n Update origin_site_name for a given block_name\n \"\"\"\n if not conn:\n dbsExceptionHandler(\"dbsException-failed-connect2host\", \"Oracle/Block/UpdateStatus. \\\nExpects db connection from upper layer.\", self.logger.exception)\n binds = {\"block_name\": block_name, \"origin_site_name\": origin_site_name, \"mtime\": dbsUtils().getTime(),\n \"myuser\": dbsUtils().getCreateBy()}\n self.dbi.processData(self.sql, binds, conn, transaction)"}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script for\nincrementing the sequence seqName by default Incremented by one and returns its value", "response": "def increment(self, conn, seqName, transaction = False, incCount=1):\n \"\"\"\n increments the sequence `seqName` by default `Incremented by one`\n and returns its value\n \"\"\"\n\ttry:\n\t seqTable = \"%sS\" %seqName\n\t tlock = \"lock tables %s write\" %seqTable\n\t self.dbi.processData(tlock, [], conn, transaction)\n\t sql = \"select ID from %s\" % seqTable\n\t result = self.dbi.processData(sql, [], conn, transaction)\n\t resultlist = self.formatDict(result)\n\t newSeq = resultlist[0]['id']+incCount\n\t sql = \"UPDATE %s SET ID=:seq_count\" % seqTable\n\t seqparms={\"seq_count\" : newSeq}\n\t self.dbi.processData(sql, seqparms, conn, transaction)\n\t tunlock = \"unlock tables\"\n\t self.dbi.processData(tunlock, [], conn, transaction)\n\t return newSeq\n\texcept:\n\t #FIXME\n\t tunlock = \"unlock tables\"\n\t self.dbi.processData(tunlock, [], conn, transaction)\n\t raise"}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef listRuns(self, run_num=-1, logical_file_name=\"\",\n block_name=\"\", dataset=\"\"):\n \"\"\"\n List run known to DBS.\n \"\"\"\n if( '%' in logical_file_name or '%' in block_name or '%' in dataset ):\n dbsExceptionHandler('dbsException-invalid-input', \n \" DBSDatasetRun/listRuns. No wildcards are allowed in logical_file_name, block_name or dataset.\\n.\")\n conn = self.dbi.connection()\n tran = False\n try:\n ret = self.runlist.execute(conn, run_num, logical_file_name, block_name, dataset, tran)\n result = []\n rnum = []\n for i in ret:\n rnum.append(i['run_num'])\n result.append({'run_num' : rnum})\n return result\n\n finally:\n if conn:\n conn.close()", "response": "List all runs known to DBS."}
{"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef insertPrimaryDataset(self):\n try :\n body = request.body.read()\n indata = cjson.decode(body)\n indata = validateJSONInputNoCopy(\"primds\", indata)\n indata.update({\"creation_date\": dbsUtils().getTime(), \"create_by\": dbsUtils().getCreateBy() })\n self.dbsPrimaryDataset.insertPrimaryDataset(indata)\n except cjson.DecodeError as dc:\n dbsExceptionHandler(\"dbsException-invalid-input2\", \"Wrong format/data from insert PrimaryDataset input\", self.logger.exception, str(dc))\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message)\n except HTTPError as he:\n raise he\n except Exception as ex:\n sError = \"DBSWriterModel/insertPrimaryDataset. %s\\n Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)", "response": "API to insert A primary dataset in DBS."}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef updateAcqEraEndDate(self, acquisition_era_name =\"\", end_date=0):\n try:\n self.dbsAcqEra.UpdateAcqEraEndDate( acquisition_era_name, end_date)\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message)\n except HTTPError as he:\n raise he\n except Exception as ex:\n sError = \"DBSWriterModel/update.AcqEraEndDate %s\\n. Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)", "response": "API to update the end_date of an acquisition era"}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef insertBulkBlock(self):\n try:\n body = request.body.read()\n indata = cjson.decode(body)\n if (indata.get(\"file_parent_list\", []) and indata.get(\"dataset_parent_list\", [])): \n dbsExceptionHandler(\"dbsException-invalid-input2\", \"insertBulkBlock: dataset and file parentages cannot be in the input at the same time\", \n self.logger.exception, \"insertBulkBlock: datset and file parentages cannot be in the input at the same time.\") \n indata = validateJSONInputNoCopy(\"blockBulk\", indata)\n self.dbsBlockInsert.putBlock(indata)\n except cjson.DecodeError as dc:\n dbsExceptionHandler(\"dbsException-invalid-input2\", \"Wrong format/data from insert BulkBlock input\", self.logger.exception, str(dc))\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message)\n except HTTPError as he:\n raise he\n except Exception as ex:\n #illegal variable name/number\n if str(ex).find(\"ORA-01036\") != -1:\n dbsExceptionHandler(\"dbsException-invalid-input2\", \"illegal variable name/number from input\", self.logger.exception, str(ex))\n else:\n sError = \"DBSWriterModel/insertBulkBlock. %s\\n. Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)", "response": "API to insert a bulk block of datset and file parentages."}
{"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef insertBlock(self):\n try:\n body = request.body.read()\n indata = cjson.decode(body)\n indata = validateJSONInputNoCopy(\"block\", indata)\n self.dbsBlock.insertBlock(indata)\n except cjson.DecodeError as dc:\n dbsExceptionHandler(\"dbsException-invalid-input2\", \"Wrong format/data from insert Block input\", self.logger.exception, str(dc))\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message)\n except Exception as ex:\n sError = \"DBSWriterModel/insertBlock. %s\\n. Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)", "response": "API to insert a block into DBS"}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ninsert a list of files into DBS.", "response": "def insertFile(self, qInserts=False):\n \"\"\"\n API to insert a list of file into DBS in DBS. Up to 10 files can be inserted in one request.\n\n :param qInserts: True means that inserts will be queued instead of done immediately. INSERT QUEUE Manager will perform the inserts, within few minutes.\n :type qInserts: bool\n :param filesList: List of dictionaries containing following information\n :type filesList: list of dicts\n :key logical_file_name: File to be inserted (str) (Required)\n :key is_file_valid: (optional, default = 1): (bool)\n :key block: required: /a/b/c#d (str)\n :key dataset: required: /a/b/c (str)\n :key file_type: (optional, default = EDM) one of the predefined types, (str)\n :key check_sum: (optional, default = '-1') (str)\n :key event_count: (optional, default = -1) (int)\n :key file_size: (optional, default = -1.) (float)\n :key adler32: (optional, default = '') (str)\n :key md5: (optional, default = '') (str)\n :key auto_cross_section: (optional, default = -1.) (float)\n :key file_lumi_list: (optional, default = []) [{'run_num': 123, 'lumi_section_num': 12},{}....]\n :key file_parent_list: (optional, default = []) [{'file_parent_lfn': 'mylfn'},{}....]\n :key file_assoc_list: (optional, default = []) [{'file_parent_lfn': 'mylfn'},{}....]\n :key file_output_config_list: (optional, default = []) [{'app_name':..., 'release_version':..., 'pset_hash':...., output_module_label':...},{}.....]\n\n \"\"\"\n if qInserts in (False, 'False'): qInserts=False\n try:\n body = request.body.read()\n indata = cjson.decode(body)[\"files\"]\n if not isinstance(indata, (list, dict)):\n dbsExceptionHandler(\"dbsException-invalid-input\", \"Invalid Input DataType\", self.logger.exception, \\\n \"insertFile expects input as list or dirc\")\n businput = []\n if isinstance(indata, dict):\n indata = [indata]\n indata = validateJSONInputNoCopy(\"files\", indata)\n for f in indata:\n f.update({\n #\"dataset\":f[\"dataset\"],\n \"creation_date\": f.get(\"creation_date\", dbsUtils().getTime()),\n \"create_by\" : dbsUtils().getCreateBy(),\n \"last_modification_date\": f.get(\"last_modification_date\", dbsUtils().getTime()),\n \"last_modified_by\": f.get(\"last_modified_by\", dbsUtils().getCreateBy()),\n \"file_lumi_list\":f.get(\"file_lumi_list\", []),\n \"file_parent_list\":f.get(\"file_parent_list\", []),\n \"file_assoc_list\":f.get(\"assoc_list\", []),\n \"file_output_config_list\":f.get(\"file_output_config_list\", [])})\n businput.append(f)\n self.dbsFile.insertFile(businput, qInserts)\n except cjson.DecodeError as dc:\n dbsExceptionHandler(\"dbsException-invalid-input2\", \"Wrong format/data from insert File input\", self.logger.exception, str(dc))\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message)\n except HTTPError as he:\n raise he\n except Exception as ex:\n sError = \"DBSWriterModel/insertFile. %s\\n. Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)"}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef updateFile(self, logical_file_name=[], is_file_valid=1, lost=0, dataset=''):\n if lost in [1, True, 'True', 'true', '1', 'y', 'yes']:\n lost = 1\n if is_file_valid in [1, True, 'True', 'true', '1', 'y', 'yes']:\n dbsExceptionHandler(\"dbsException-invalid-input2\", dbsExceptionCode[\"dbsException-invalid-input2\"], self.logger.exception,\\\n \"Lost file must set to invalid\" )\n else: lost = 0\n \n for f in logical_file_name, dataset:\n if '*' in f or '%' in f:\n dbsExceptionHandler(\"dbsException-invalid-input2\", dbsExceptionCode[\"dbsException-invalid-input2\"], self.logger.exception, \"No \\\n wildcard allow in LFN or dataset for updatefile API.\" )\n try:\n self.dbsFile.updateStatus(logical_file_name, is_file_valid, lost, dataset)\n except HTTPError as he:\n raise he\n except Exception as ex:\n sError = \"DBSWriterModel/updateFile. %s\\n. Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)", "response": "API to update status of a file."}
{"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ncreates a new record set from a CIDR string.", "response": "def create_from_string(cls, cidr, label=None, whitelist=False):\n \"\"\"\n Converts a CIDR like 192.168.0.0/24 into 2 parts:\n start: 3232235520\n stop: 3232235775\n \"\"\"\n network = netaddr.IPNetwork(cidr)\n start = network.first\n stop = start + network.size - 1\n obj = cls.objects.create(label=label, start=start, stop=stop,\n whitelist=whitelist)\n return obj"}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef qs_for_ip(cls, ip_str):\n ip = int(netaddr.IPAddress(ip_str))\n\n # ignore IPv6 addresses for now (4294967295 is 0xffffffff, aka the\n # biggest 32-bit number)\n if ip > 4294967295:\n return cls.objects.none()\n\n ip_range_query = {\n 'start__lte': ip,\n 'stop__gte': ip\n }\n\n return cls.objects.filter(**ip_range_query)", "response": "Returns a queryset with matching IPNetwork objects for the given IP."}
{"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns True if the provided IP is in the blacklist and does not exist in the whitelist. Otherwise return False.", "response": "def matches_ip(cls, ip_str, read_preference=None):\n \"\"\"\n Return True if provided IP exists in the blacklist and doesn't exist\n in the whitelist. Otherwise, return False.\n \"\"\"\n qs = cls.qs_for_ip(ip_str).only('whitelist')\n if read_preference:\n qs = qs.read_preference(read_preference)\n\n # Return True if any docs match the IP and none of them represent\n # a whitelist\n return bool(qs) and not any(obj.whitelist for obj in qs)"}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef execute(self, conn, block_id=\"\", transaction=False):\n if not conn:\n dbsExceptionHandler(\"dbsException-db-conn-failed\", \"Oracle/FileBuffer/List. Expects db connection from upper layer.\")\n\n\tsql = self.sql\n binds = { \"block_id\" : block_id}\n cursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True)\n result = self.formatCursor(cursors[0])\n return result", "response": "Execute a read - only command."}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a list of all primary datasets that match the given pattern.", "response": "def execute(self, conn, primary_ds_name=\"\", primary_ds_type=\"\", transaction=False):\n \"\"\"\n Lists all primary datasets if pattern is not provided.\n \"\"\"\t\n sql = self.sql\n binds = {}\n #import pdb\n #pdb.set_trace()\n if primary_ds_name and primary_ds_type in ('', None, '%'):\n op = (\"=\", \"like\")[\"%\" in primary_ds_name]\n sql += \"WHERE P.PRIMARY_DS_NAME %s :primary_ds_name\" % op\n binds.update(primary_ds_name=primary_ds_name)\n elif primary_ds_type and primary_ds_name in ('', None, '%'):\n op = (\"=\", \"like\")[\"%\" in primary_ds_type]\n sql += \"WHERE PT.PRIMARY_DS_TYPE %s :primary_ds_type\" % op\n binds.update(primary_ds_type=primary_ds_type)\n elif primary_ds_name and primary_ds_type:\n op = (\"=\", \"like\")[\"%\" in primary_ds_name]\n op1 = (\"=\", \"like\")[\"%\" in primary_ds_type]\n sql += \"WHERE P.PRIMARY_DS_NAME %s :primary_ds_name and PT.PRIMARY_DS_TYPE %s :primary_ds_type\"\\\n %(op, op1)\n binds.update(primary_ds_name=primary_ds_name)\n binds.update(primary_ds_type=primary_ds_type)\n else:\n pass\n\tcursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True)\n result = []\n for c in cursors:\n result.extend(self.formatCursor(c, size=100))\n return result"}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef configure_proxy(self, curl_object):\n curl_object.setopt(curl_object.PROXY, self._proxy_hostname)\n curl_object.setopt(curl_object.PROXYPORT, self._proxy_port)\n curl_object.setopt(curl_object.PROXYTYPE, curl_object.PROXYTYPE_SOCKS5)\n if self._proxy_user and self._proxy_passwd:\n curl_object.setopt(curl_object.PROXYUSERPWD, '%s:%s' % (self._proxy_user, self._proxy_port))", "response": "configure pycurl proxy settings"}
{"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ninsert a new entry in DB.", "response": "def execute( self, conn, daoinput, transaction = False ):\n \"\"\"\n daoinput must be validated to have the following keys:\n child_parent_id__list[[cid, pid],...], block_name\n \"\"\"\n binds = {} \n bindlist=[]\n \n if isinstance(daoinput, dict) and \"block_name\" in daoinput.keys():\n binds = {\"block_name\": daoinput[\"block_name\"]}\n r = self.dbi.processData(self.sql_sel, binds, conn, False)\n bfile = self.format(r)\n bfile_list = []\n for f in bfile:\n bfile_list.append(f[0]) \n if \"child_parent_id_list\" in daoinput.keys():\n files = []\n for i in daoinput[\"child_parent_id_list\"]:\n files.append(i[0])\n if set(files)-set(bfile_list):\n dbsExceptionHandler('dbsException-invalid-input2', \"Files required in the same block for FileParent/insert2 dao.\", self.logger.exception) \n else:\n dbsExceptionHandler('dbsException-invalid-input2', \"child_parent_id_list required for FileParent/insert2 dao.\", self.logger.exception) \n else:\n dbsExceptionHandler('dbsException-invalid-input2', \"Block_name required in the same block for FileParent/insert2 dao.\", self.logger.exception)\n binds = {} \n for pf in daoinput[\"child_parent_id_list\"]:\n binds = {\"this_file_id\":pf[0], \"parent_file_id\": pf[1]}\n bindlist.append(binds) \n self.dbi.processData(self.sql, bindlist, conn, transaction)"}
{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef execute(self, conn, acquisition_era_name,end_date, transaction = False):\n\tif not conn:\n\t dbsExceptionHandler(\"dbsException-failed-connect2host\", \"dbs/dao/Oracle/AcquisitionEra/updateEndDate expects db connection from upper layer.\", self.logger.exception)\n binds = { \"acquisition_era_name\" :acquisition_era_name , \"end_date\" : end_date }\n result = self.dbi.processData(self.sql, binds, conn, transaction)", "response": "Update the end date of a given acquisitionera"}
{"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nupdates the migration block and the related resources.", "response": "def execute(self, conn, daoinput, transaction = False):\n \"\"\"\n\t daoinput keys:\n\t migration_status, migration_block_id, migration_request_id\n \"\"\"\n #print daoinput['migration_block_id']\n if not conn:\n\t dbsExceptionHandler(\"dbsException-failed-connect2host\", \"Oracle/MigrationBlock/Update. Expects db connection from upper layer.\" ,self.logger.exception)\n if daoinput['migration_status'] == 1:\n sql = self.sql + \" (MIGRATION_STATUS = 0 or MIGRATION_STATUS = 3)\" \n elif daoinput['migration_status'] == 2 or daoinput['migration_status'] == 3 or daoinput['migration_status'] == 9:\n sql = self.sql + \" MIGRATION_STATUS = 1 \"\n else: \n dbsExceptionHandler(\"dbsException-conflict-data\", \"Oracle/MigrationBlock/Update. Expected migration status to be 1, 2, 3, 0r 9\" ,self.logger.exception ) \n #print sql\n if 'migration_request_id' in daoinput:\n sql3 = sql + \"and MIGRATION_REQUEST_ID =:migration_request_id\"\n result = self.dbi.processData(sql3, daoinput, conn, transaction)\n elif 'migration_block_id' in daoinput:\n if type(daoinput['migration_block_id']) is not list:\n sql2 = sql+ \" and MIGRATION_BLOCK_ID =:migration_block_id\"\n result = self.dbi.processData(sql2, daoinput, conn, transaction)\n else:\n bk_id_generator, binds2 = create_token_generator(daoinput['migration_block_id']) \n newdaoinput = {}\n newdaoinput.update({\"migration_status\":daoinput[\"migration_status\"],\n \"last_modification_date\":daoinput[\"last_modification_date\"]})\n newdaoinput.update(binds2)\n sql2 = sql+ \"\"\" and MIGRATION_BLOCK_ID in ({bk_id_generator} SELECT TOKEN FROM TOKEN_GENERATOR)\n \"\"\".format(bk_id_generator=bk_id_generator)\n result = self.dbi.processData(sql2, newdaoinput, conn, transaction)\n else:\n dbsExceptionHandler(\"dbsException-conflict-data\", \"Oracle/MigrationBlock/Update. Required IDs not in the input\", self.logger.exception)"}
{"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets list of file_id from DB.", "response": "def execute(self, conn, file_id_list, transaction=False):\n\t\"\"\"\n\tfile_id_list : file_id_list \n\t\"\"\"\n\tsql=self.sql\n\tbinds={}\n\tif file_id_list:\n\t count=0\n\t for an_id in file_id_list:\n\t\tif count > 0: sql += \", \"\n\t\tsql += \":file_id_%s\" %count\n\t\tbinds.update({\"file_id_%s\" %count : an_id})\n\t\tcount+=1\n\t sql += \")\"\n\telse:\n dbsExceptionHandler('dbsException-invalid-input', \"Oracle/FileParentBlock/List. this_file_id not provided\", self.logger.exception)\n \n result = self.dbi.processData(sql, binds, conn, transaction)\n plist = self.formatDict(result)\n\treturn plist"}
{"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\nexecute the SQL query and returns the output module config id for the given application release version and global tag.", "response": "def execute(self, conn, app, release_version, pset_hash, output_label, global_tag, transaction = False):\n \"\"\"\n returns id for a given application\n\n This always requires all four variables to be set, because\n you better have them in blockInsert\n \"\"\"\n binds = {}\n binds[\"app_name\"]=app\n binds[\"release_version\"]=release_version\n binds[\"pset_hash\"]=pset_hash\n binds[\"output_module_label\"]=output_label\n binds[\"global_tag\"]=global_tag\n\n result = self.dbi.processData(self.sql, binds, conn, transaction)\n\n plist = self.formatDict(result)\n\n\tif len(plist) < 1: return -1\n return plist[0][\"output_mod_config_id\"]"}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef updateStatus(self, block_name=\"\", open_for_writing=0):\n if open_for_writing not in [1, 0, '1', '0']:\n msg = \"DBSBlock/updateStatus. open_for_writing can only be 0 or 1 : passed %s.\"\\\n % open_for_writing \n dbsExceptionHandler('dbsException-invalid-input', msg)\n conn = self.dbi.connection()\n trans = conn.begin()\n try :\n open_for_writing = int(open_for_writing)\n self.updatestatus.execute(conn, block_name, open_for_writing, dbsUtils().getTime(), trans)\n trans.commit()\n trans = None\n except Exception as ex:\n if trans:\n trans.rollback()\n if conn:conn.close()\n raise ex\n finally:\n if conn:conn.close()", "response": "Update status of a specific entry in DBS."}
{"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nupdates the origin_site_name for a given block name", "response": "def updateSiteName(self, block_name, origin_site_name):\n \"\"\"\n Update the origin_site_name for a given block name\n \"\"\"\n if not origin_site_name:\n dbsExceptionHandler('dbsException-invalid-input',\n \"DBSBlock/updateSiteName. origin_site_name is mandatory.\")\n conn = self.dbi.connection()\n trans = conn.begin()\n try:\n self.updatesitename.execute(conn, block_name, origin_site_name)\n except:\n if trans:\n trans.rollback()\n raise\n else:\n if trans:\n trans.commit()\n finally:\n if conn:\n conn.close()"}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef listBlockParents(self, block_name=\"\"):\n if not block_name:\n msg = \" DBSBlock/listBlockParents. Block_name must be provided as a string or a list. \\\n No wildcards allowed in block_name/s.\"\n dbsExceptionHandler('dbsException-invalid-input', msg)\n elif isinstance(block_name, basestring):\n try:\n block_name = str(block_name)\n if '%' in block_name or '*' in block_name:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"DBSReaderModel/listBlocksParents: \\\n NO WILDCARDS allowed in block_name.\")\n except:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"DBSBlock/listBlockParents. Block_name must be \\\n provided as a string or a list. No wildcards allowed in block_name/s .\")\n elif type(block_name) is list:\n for b in block_name:\n if '%' in b or '*' in b:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"DBSReaderModel/listBlocksParents: \\\n NO WILDCARDS allowed in block_name.\")\n else:\n msg = \"DBSBlock/listBlockParents. Block_name must be provided as a string or a list. \\\n No wildcards allowed in block_name/s .\"\n dbsExceptionHandler(\"dbsException-invalid-input\", msg)\n conn = self.dbi.connection()\n try:\n results = self.blockparentlist.execute(conn, block_name)\n return results\n finally:\n if conn:\n conn.close()", "response": "API to list parents of a block."}
{"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nlists all parents of a block", "response": "def listBlockChildren(self, block_name=\"\"):\n \"\"\"\n list parents of a block\n \"\"\"\n if (not block_name) or re.search(\"['%','*']\", block_name):\n dbsExceptionHandler(\"dbsException-invalid-input\", \"DBSBlock/listBlockChildren. Block_name must be provided.\" )\n conn = self.dbi.connection()\n try:\n results = self.blockchildlist.execute(conn, block_name)\n return results\n finally:\n if conn:\n conn.close()"}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef listBlocks(self, dataset=\"\", block_name=\"\", data_tier_name=\"\", origin_site_name=\"\",\n logical_file_name=\"\", run_num=-1, min_cdate=0, max_cdate=0,\n min_ldate=0, max_ldate=0, cdate=0, ldate=0, open_for_writing=-1, detail=False):\n \"\"\"\n dataset, block_name, data_tier_name or logical_file_name must be passed.\n \"\"\"\n if (not dataset) or re.search(\"['%','*']\", dataset):\n if (not block_name) or re.search(\"['%','*']\", block_name):\n if (not logical_file_name) or re.search(\"['%','*']\", logical_file_name):\n if not data_tier_name or re.search(\"['%','*']\", data_tier_name):\n msg = \"DBSBlock/listBlock. You must specify at least one parameter(dataset, block_name,\\\n\t\t\t \tdata_tier_name, logical_file_name) with listBlocks api\"\n dbsExceptionHandler('dbsException-invalid-input2', msg, self.logger.exception, msg)\n\n if data_tier_name:\n if not (min_cdate and max_cdate) or (max_cdate-min_cdate)>32*24*3600:\n msg = \"min_cdate and max_cdate are mandatory parameters. If data_tier_name parameter is used \\\n the maximal time range allowed is 31 days\"\n dbsExceptionHandler('dbsException-invalid-input2', msg, self.logger.exception, msg)\n if detail:\n msg = \"DBSBlock/listBlock. Detail parameter not allowed togther with data_tier_name\"\n dbsExceptionHandler('dbsException-invalid-input2', msg, self.logger.exception, msg)\n\n with self.dbi.connection() as conn:\n dao = (self.blockbrieflist, self.blocklist)[detail]\n for item in dao.execute(conn, dataset, block_name, data_tier_name, origin_site_name, logical_file_name, run_num,\n min_cdate, max_cdate, min_ldate, max_ldate, cdate, ldate):\n yield item", "response": "API to list all blocks in DBS."}
{"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef insertBlock(self, businput):\n if not (\"block_name\" in businput and \"origin_site_name\" in businput ):\n dbsExceptionHandler('dbsException-invalid-input', \"business/DBSBlock/insertBlock must have block_name and origin_site_name as input\")\n conn = self.dbi.connection()\n tran = conn.begin()\n try:\n blkinput = {\n \"last_modification_date\":businput.get(\"last_modification_date\", dbsUtils().getTime()),\n #\"last_modified_by\":businput.get(\"last_modified_by\", dbsUtils().getCreateBy()),\n \"last_modified_by\":dbsUtils().getCreateBy(),\n #\"create_by\":businput.get(\"create_by\", dbsUtils().getCreateBy()),\n \"create_by\":dbsUtils().getCreateBy(),\n \"creation_date\":businput.get(\"creation_date\", dbsUtils().getTime()),\n \"open_for_writing\":businput.get(\"open_for_writing\", 1),\n \"block_size\":businput.get(\"block_size\", 0),\n \"file_count\":businput.get(\"file_count\", 0),\n \"block_name\":businput.get(\"block_name\"),\n \"origin_site_name\":businput.get(\"origin_site_name\")\n }\n ds_name = businput[\"block_name\"].split('#')[0]\n blkinput[\"dataset_id\"] = self.datasetid.execute(conn, ds_name, tran)\n if blkinput[\"dataset_id\"] == -1 : \n msg = \"DBSBlock/insertBlock. Dataset %s does not exists\" % ds_name\n dbsExceptionHandler('dbsException-missing-data', msg)\n blkinput[\"block_id\"] = self.sm.increment(conn, \"SEQ_BK\", tran)\n self.blockin.execute(conn, blkinput, tran)\n\n tran.commit()\n tran = None\n except Exception as e:\n if str(e).lower().find(\"unique constraint\") != -1 or str(e).lower().find(\"duplicate\") != -1:\n pass\n else:\n if tran:\n tran.rollback()\n if conn: conn.close()\n raise\n \n finally:\n if tran:\n tran.rollback()\n if conn:\n conn.close()", "response": "Insert a new block into DBS."}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nlisting all data tiers in DBS.", "response": "def listDataTiers(self, data_tier_name=\"\"):\n \"\"\"\n List data tier(s)\n \"\"\"\n if not isinstance(data_tier_name, basestring) :\n dbsExceptionHandler('dbsException-invalid-input',\n 'data_tier_name given is not valid : %s' % data_tier_name)\n else:\n try:\n data_tier_name = str(data_tier_name)\n except:\n dbsExceptionHandler('dbsException-invalid-input',\n 'data_tier_name given is not valid : %s' % data_tier_name)\n conn = self.dbi.connection()\n try:\n result = self.dataTier.execute(conn, data_tier_name.upper())\n return result\n finally:\n if conn:\n conn.close()"}
{"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nreturning a list of all the available types for the given site.", "response": "def execute(self, conn, site_name= \"\", transaction = False):\n \"\"\"\n Lists all sites types if site_name is not provided.\n \"\"\"\n sql = self.sql\n if site_name == \"\":\n result = self.dbi.processData(sql, conn=conn, transaction=transaction)\n else:\n sql += \"WHERE S.SITE_NAME = :site_name\" \n binds = { \"site_name\" : site_name }\n result = self.dbi.processData(sql, binds, conn, transaction)\n return self.formatDict(result)"}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef listDataType(self, dataType=\"\", dataset=\"\"):\n conn = self.dbi.connection()\n try:\n if dataset and dataType:\n dbsExceptionHandler('dbsException-invalid-input',\n \"DBSDataType/listDataType. Data Type can be only searched by data_type or by dataset, not both.\")\n else:\n result = self.dataType.execute(conn, dataType, dataset)\n return result\n finally:\n if conn:\n conn.close()", "response": "List data type of a specific resource."}
{"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef getBlocks(self):\n try:\n conn = self.dbi.connection()\n result = self.buflistblks.execute(conn)\n return result\n finally:\n if conn:\n conn.close()", "response": "Get the blocks that need to be migrated"}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef getBufferedFiles(self, block_id):\n \n try:\n conn = self.dbi.connection()\n result = self.buflist.execute(conn, block_id)\n return result\n finally:\n if conn:\n conn.close()", "response": "Get some files from the insert buffer"}
{"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning a list of all the items in the database that match the given datatier name", "response": "def execute(self, conn, data_tier_name='', transaction = False, cache=None):\n \"\"\"\n returns id for a given datatier name\n \"\"\"\n\tif cache:\n ret=cache.get(\"DATA_TIERS\")\n if not ret==None:\n return ret\n sql = self.sql\n\tbinds={}\n\tif data_tier_name:\n op = ('=', 'like')['%' in data_tier_name]\n\t sql += \"WHERE DT.DATA_TIER_NAME %s :datatier\" %op \n\t binds = {\"datatier\":data_tier_name}\n result = self.dbi.processData(sql, binds, conn, transaction)\n plist = self.formatDict(result)\n return plist"}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef execute(self, conn, migration_url=\"\", migration_input=\"\", create_by=\"\", migration_request_id=\"\", transaction=False):\n binds = {}\n\tresult = self.dbi.processData(self.sql, binds, conn, transaction)\n result = self.formatDict(result)\n\tif len(result) == 0 :\n\t return []\n\tif result[0][\"migration_request_id\"] in ('', None) :\n\t return []\n return result", "response": "Executes the SQL query and returns the result."}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nreturn a list of all processing eras in the database.", "response": "def listProcessingEras(self, processing_version=''):\n \"\"\"\n Returns all processing eras in dbs\n \"\"\"\n conn = self.dbi.connection()\n try:\n result = self.pelst.execute(conn, processing_version)\n return result\n finally:\n if conn:\n conn.close()"}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef insertProcessingEra(self, businput):\n conn = self.dbi.connection()\n tran = conn.begin()\n try:\n businput[\"processing_era_id\"] = self.sm.increment(conn, \"SEQ_PE\", tran)\n businput[\"processing_version\"] = businput[\"processing_version\"]\n self.pein.execute(conn, businput, tran)\n tran.commit()\n tran = None\n except KeyError as ke:\n dbsExceptionHandler('dbsException-invalid-input',\n \"Invalid input:\" + ke.args[0])\n except Exception as ex:\n if (str(ex).lower().find(\"unique constraint\") != -1 or\n str(ex).lower().find(\"duplicate\") != -1):\n # already exist\n self.logger.warning(\"DBSProcessingEra/insertProcessingEras. \" +\n \"Unique constraint violation being ignored...\")\n self.logger.warning(ex)\n else:\n if tran:\n tran.rollback()\n tran = None\n raise\n finally:\n if tran:\n tran.rollback()\n if conn:\n conn.close()", "response": "Insert a new processingEra into the database."}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef listPhysicsGroups(self, physics_group_name=\"\"):\n if not isinstance(physics_group_name, basestring):\n dbsExceptionHandler('dbsException-invalid-input',\n 'physics group name given is not valid : %s' %\n physics_group_name)\n else:\n try:\n physics_group_name = str(physics_group_name)\n except:\n dbsExceptionHandler('dbsException-invalid-input',\n 'physics group name given is not valid : %s' %\n physics_group_name)\n\n conn = self.dbi.connection()\n try:\n result = self.pglist.execute(conn, physics_group_name)\n return result\n finally:\n if conn:\n conn.close()", "response": "Get all physics groups in DBS."}
{"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef create_token_generator(input_list):\n ###Generator trick from http://betteratoracle.com/posts/20-how-do-i-bind-a-variable-in-list\n ###The maximum length of the comma separated list is 4000 characters, therefore we need to split the list\n ###ORA-01460: unimplemented or unreasonable conversion requested will thrown if list is larger\n oracle_limit = 4000\n grp_list = []\n if type(input_list[0]) == int :\n input_str = ','.join(map(str, input_list))\n else:\n input_str = ','.join(input_list) \n\n if len(input_str) >= oracle_limit:\n index = 0\n while True:\n begin, end = index, index+oracle_limit\n\t if end > len(input_str):\n end = len(input_str)\n grp_list.append(input_str[begin:end])\n break\n\t else: \t\n\t\tindex = input_str.rfind(',', begin, end)\n\t\tif index == -1:\n\t\t\tbreak\n\t\tgrp_list.append(input_str[begin:index])\n\t\tindex += 1 #to remove the leading comma\n else:\n grp_list.append(input_str)\n\n token_generator = \"\"\"\n WITH TOKEN_GENERATOR AS (\n \"\"\"\n binds = {}\n for index, chunk in enumerate(grp_list):\n if index:\n token_generator += \"\"\"\n UNION ALL\n \"\"\"\n bind = \"token_%s\" % index\n token_generator += \"\"\"SELECT REGEXP_SUBSTR(:{bind}, '[^,]+', 1, LEVEL) token\n FROM DUAL\n CONNECT BY LEVEL <= LENGTH(:{bind}) - LENGTH(REPLACE(:{bind}, ',', '')) + 1\n \"\"\".format(bind=bind)\n binds.update({bind: chunk})\n token_generator += \")\"\n\n return token_generator, binds", "response": "Create SQL Generator to select from list of values in Oracle"}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef getServices(self):\n try:\n conn = self.dbi.connection()\n result = self.serviceslist.execute(conn)\n return result\n except Exception as ex:\n msg = ((\"%s DBSServicesRegistry/getServices.\" + \n \" %s\\n. Exception trace: \\n %s\") %\n (DBSEXCEPTIONS['dbsException-3'], ex,\n traceback.format_exc()))\n self.logger.exception(msg )\n raise Exception (\"dbsException-3\", msg )\n finally:\n conn.close()", "response": "Simple method that returns list of all known DBS instances"}
{"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nadds a service to the service registry", "response": "def addService(self):\n \"\"\"\n Add a service to service registry\n \"\"\"\n\n conn = self.dbi.connection()\n tran = conn.begin()\n try:\n \n body = request.body.read()\n service = cjson.decode(body)\n addthis = {}\n addthis['service_id'] = self.sm.increment(conn, \"SEQ_RS\", tran)\n addthis['name'] = service.get('NAME', '')\n if addthis['name'] == '':\n msg = ((\"%s DBSServicesRegistry/addServices.\" +\n \" Service Must be Named\\n\") %\n DBSEXCEPTIONS['dbsException-3'])\n raise Exception(\"dbsException-3\", msg)\n addthis['type'] = service.get('TYPE', 'GENERIC')\n addthis['location'] = service.get('LOCATION', 'HYPERSPACE')\n addthis['status'] = service.get('STATUS', 'UNKNOWN')\n addthis['admin'] = service.get('ADMIN', 'UNADMINISTRATED')\n addthis['uri'] = service.get('URI', '')\n if addthis['uri'] == '':\n msg = ((\"%s DBSServicesRegistry/addServices.\" + \n \" Service URI must be provided.\\n\") %\n DBSEXCEPTIONS['dbsException-3'])\n self.logger.exception(msg)\n raise Exception(\"dbsException-3\", msg)\n addthis['db'] = service.get('DB', 'NO_DATABASE')\n addthis['version'] = service.get('VERSION', 'UNKNOWN' )\n addthis['last_contact'] = dbsUtils().getTime()\n addthis['comments'] = service.get('COMMENTS', 'NO COMMENTS')\n addthis['alias'] = service.get('ALIAS', 'No Alias')\n self.servicesadd.execute(conn, addthis, tran)\n tran.commit()\n except exceptions.IntegrityError as ex:\n if (str(ex).find(\"unique constraint\") != -1 or\n str(ex).lower().find(\"duplicate\") != -1) :\n #Update the service instead\n try:\n self.servicesupdate.execute(conn, addthis, tran)\n tran.commit()\n except Exception as ex:\n msg = ((\"%s DBSServiceRegistry/addServices.\" + \n \" %s\\n. Exception trace: \\n %s\") %\n (DBSEXCEPTIONS['dbsException-3'], ex,\n traceback.format_exc()))\n self.logger.exception(msg ) \n raise Exception (\"dbsException-3\", msg )\n except Exception as ex:\n tran.rollback()\n msg = ((\"%s DBSServiceRegistry/addServices.\" + \n \" %s\\n. Exception trace: \\n %s\") %\n (DBSEXCEPTIONS['dbsException-3'], ex,\n traceback.format_exc()))\n self.logger.exception(msg )\n raise Exception (\"dbsException-3\", msg )\n finally:\n conn.close()"}
{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef execute(self, conn, daoinput, transaction = False):\n if not conn:\n\t dbsExceptionHandler(\"dbsException-failed-connect2host\", \"Oracle/MigrationRequests/UpdateRequestStatus. Expects db connection from upper layer.\", \n self.logger.exception)\n if daoinput['migration_status'] == 1:\n sql = self.sql2 \n elif daoinput['migration_status'] == 2:\n sql = self.sql + \" and MIGRATION_STATUS = 1 \"\n elif daoinput['migration_status'] == 3:\n sql = self.sql3 + \" and MIGRATION_STATUS = 1 \" \n else:\n dbsExceptionHandler(\"dbsException-conflict-data\", \"Oracle/MigrationRequest/UpdateRequestStatus. Expected migration status to be 1, 2 or 3\",\n self.logger.exception)\n \n\tresult = self.dbi.processData(sql, daoinput, conn, transaction)", "response": "Update the migration request status."}
{"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\nlists all the available migration resources.", "response": "def execute(self, conn, migration_url=\"\", migration_input=\"\", create_by=\"\", migration_request_id=\"\", oldest= False, transaction=False):\n \"\"\"\n Lists all requests if pattern is not provided.\n \"\"\"\n sql = self.sql\n binds = {}\n\tif migration_request_id:\n\t sql += \" WHERE MR.MIGRATION_REQUEST_ID=:migration_request_id\"\n\t binds['migration_request_id']=migration_request_id\n elif oldest:\n #FIXME: Need to write the sql.YG\n #current_date = dbsUtils().getTime()\n #we require waiting time for \n #retry_count=0 is 1 minutes\n #retry_count=1 is 2 minutes\n #retyr_count=2 is 4 minutes\n\n sql += \"\"\"\n WHERE MR.MIGRATION_STATUS=0 \n or (MR.migration_status=3 and MR.retry_count=0 and MR.last_modification_date <= :current_date-60) \n or (MR.migration_status=3 and MR.retry_count=1 and MR.last_modification_date <= :current_date-120) \n or (MR.migration_status=3 and MR.retry_count=2 and MR.last_modification_date <= :current_date-240)\n ORDER BY MR.creation_date\n \"\"\" \n binds['current_date'] = dbsUtils().getTime()\n #print \"time= \" + str(binds['current_date'])\n else: \n\t if migration_url or migration_input or create_by:\n\t\tsql += \" WHERE \"\n\t if migration_url:\n\t\tsql += \" MR.MIGRATION_URL=:migration_url\"\n\t\tbinds['migration_url']=migration_url\n\t if migration_input:\n\t\tif migration_url:\n\t\t sql += \" AND \"\n\t\top = (\"=\", \"like\")[\"%\" in migration_input]\n\t\tsql += \" MR.MIGRATION_INPUT %s :migration_input\" % op\n\t\tbinds['migration_input']=migration_input\n\t if create_by:\n\t\tif migration_url or migration_input:\n\t\t sql += \" AND \"\n\t\tsql += \" MR.CREATE_BY=:create_by\" %create_by\n\t\tbinds['create_by']=create_by\n\tcursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True)\n result = []\n for c in cursors:\n result.extend(self.formatCursor(c, size=100))\n return result"}
{"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef execute(self, conn, logical_file_name, transaction=False):\n\tif not conn:\n\t dbsExceptionHandler(\"dbsException-db-conn-failed\", \"Oracle/FileBuffer/DeleteDupicates. Expects db connection from upper layer.\")\n\n\tprint(self.sql)\n self.dbi.processData(self.sql, logical_file_name, conn, transaction)", "response": "Delete duplicate entries from DB."}
{"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ngets all the related resources for a given block.", "response": "def execute(self, conn, block_name=\"\", transaction = False):\n \"\"\"\n block: /a/b/c#d\n \"\"\"\n if not conn:\n\t dbsExceptionHandler(\"dbsException-failed-connect2host\", \"Oracle/BlockParent/List. Expects db connection from upper layer.\", self.logger.exception)\n\n sql = self.sql\n \n\tif isinstance(block_name, basestring):\n\t binds = {'block_name' :block_name}\n elif type(block_name) is list:\n binds = [{'block_name':x} for x in block_name]\n else: \n msg = \"Oracle/BlockParent/List. Block_name must be provided either as a string or as a list.\"\n dbsExceptionHandler('dbsException-invalid-input', msg, self.logger.exception)\n\tresult = self.dbi.processData(sql, binds, conn, transaction)\n return self.formatDict(result)"}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef execute(self, conn, origin_site_name=\"\", dataset=\"\", block_name=\"\", transaction = False):\n if not conn:\n dbsExceptionHandler(\"dbsException-db-conn-failed\",\n \"Oracle/Block/List. Expects db connection from upper layer.\", self.logger.exception)\n binds = {}\n if origin_site_name:\n wheresql = 'WHERE B.ORIGIN_SITE_NAME = :origin_site_name'\n binds.update(origin_site_name=origin_site_name)\n\n if dataset:\n if 'wheresql' in locals():\n wheresql += ' AND DS.DATASET = :dataset'\n else:\n wheresql = 'WHERE DS.DATASET = :dataset'\n binds.update(dataset=dataset)\n\n if block_name:\n if 'wheresql' in locals():\n wheresql += ' AND B.BLOCK_NAME = :block_name'\n else:\n wheresql = 'WHERE B.BLOCK_NAME = :block_name'\n binds.update(block_name=block_name)\n\n sql = '{sql} {wheresql}'.format(sql=self.sql, wheresql=wheresql)\n\n cursors = self.dbi.processData(sql, binds, conn, transaction, returnCursor=True)\n result = []\n for cursor in cursors:\n result.extend(self.formatCursor(cursor, size=100))\n return result", "response": "Returns list of all available buffer entries for a given origin site name dataset and block name."}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef listPrimaryDatasets(self, primary_ds_name=\"\", primary_ds_type=\"\"):\n conn = self.dbi.connection()\n try:\n result = self.primdslist.execute(conn, primary_ds_name, primary_ds_type)\n if conn: conn.close()\n return result\n finally:\n if conn:\n conn.close()", "response": "Returns a list of all primary datasets."}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef listPrimaryDSTypes(self, primary_ds_type=\"\", dataset=\"\"):\n conn = self.dbi.connection()\n try:\n result = self.primdstypeList.execute(conn, primary_ds_type, dataset)\n if conn: conn.close()\n return result\n finally:\n if conn:\n conn.close()", "response": "Returns a list of primary dataset types."}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef insertPrimaryDataset(self, businput):\n conn = self.dbi.connection()\n tran = conn.begin()\n #checking for required fields\n if \"primary_ds_name\" not in businput:\n dbsExceptionHandler(\"dbsException-invalid-input\",\n \" DBSPrimaryDataset/insertPrimaryDataset. \" +\n \"Primary dataset Name is required for insertPrimaryDataset.\")\n try:\n businput[\"primary_ds_type_id\"] = (self.primdstypeList.execute(conn, businput[\"primary_ds_type\"]\n ))[0][\"primary_ds_type_id\"]\n del businput[\"primary_ds_type\"]\n businput[\"primary_ds_id\"] = self.sm.increment(conn, \"SEQ_PDS\")\n self.primdsin.execute(conn, businput, tran)\n tran.commit()\n tran = None\n except KeyError as ke:\n dbsExceptionHandler(\"dbsException-invalid-input\",\n \" DBSPrimaryDataset/insertPrimaryDataset. Missing: %s\" % ke)\n self.logger.warning(\" DBSPrimaryDataset/insertPrimaryDataset. Missing: %s\" % ke)\n except IndexError as ie:\n dbsExceptionHandler(\"dbsException-missing-data\",\n \" DBSPrimaryDataset/insertPrimaryDataset. %s\" % ie)\n self.logger.warning(\" DBSPrimaryDataset/insertPrimaryDataset. Missing: %s\" % ie)\n except Exception as ex:\n if (str(ex).lower().find(\"unique constraint\") != -1 or\n str(ex).lower().find(\"duplicate\") != -1):\n self.logger.warning(\"DBSPrimaryDataset/insertPrimaryDataset:\" +\n \" Unique constraint violation being ignored...\")\n self.logger.warning(ex)\n else:\n if tran:\n tran.rollback()\n if conn: conn.close()\n raise\n finally:\n if tran:\n tran.rollback()\n if conn:\n conn.close()", "response": "Insert a new primary dataset into the database."}
{"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef execute(self, conn, name='', transaction = False):\n binds={}\n if name:\n op = ('=', 'like')['%' in name]\n sql = self.sql + \" WHERE pg.physics_group_name %s :physicsgroup\" % (op)\n binds = {\"physicsgroup\": name}\n else:\n sql = self.sql\n self.logger.debug(sql) \n result = self.dbi.processData(sql, binds, conn, transaction)\n plist = self.formatDict(result)\n self.logger.debug(plist)\n\tif len(plist) < 1: return []\n return plist", "response": "returns a list of physics group names"}
{"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef insertOutputConfig(self, businput):\n if not (\"app_name\" in businput and \"release_version\" in businput\\\n and \"pset_hash\" in businput and \"output_module_label\" in businput\n and \"global_tag\" in businput):\n dbsExceptionHandler('dbsException-invalid-input', \"business/DBSOutputConfig/insertOutputConfig require:\\\n app_name, release_version, pset_hash, output_module_label and global_tag\")\n\n conn = self.dbi.connection()\n tran = conn.begin()\n try:\n # Proceed with o/p module insertion\n businput['scenario'] = businput.get(\"scenario\", None)\n businput['pset_name'] = businput.get(\"pset_name\", None)\n self.outmodin.execute(conn, businput, tran)\n tran.commit()\n tran = None\n except SQLAlchemyIntegrityError as ex:\n if str(ex).find(\"unique constraint\") != -1 or str(ex).lower().find(\"duplicate\") != -1:\n #if the validation is due to a unique constrain break in OUTPUT_MODULE_CONFIGS\n if str(ex).find(\"TUC_OMC_1\") != -1: pass\n #otherwise, try again\n else:\n try:\n self.outmodin.execute(conn, businput, tran)\n tran.commit()\n tran = None\n except SQLAlchemyIntegrityError as ex1:\n if str(ex1).find(\"unique constraint\") != -1 and str(ex1).find(\"TUC_OMC_1\") != -1: pass\n except Exception as e1:\n if tran:\n tran.rollback()\n tran = None\n raise\n else:\n raise\n except Exception as e:\n if tran:\n tran.rollback()\n raise\n finally:\n if tran:\n tran.rollback()\n if conn:\n conn.close()", "response": "Insert the Output Config into DBS."}
{"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\ngets a list of supported REST APIs.", "response": "def getHelp(self, call=\"\"):\n \"\"\"\n API to get a list of supported REST APIs. In the case a particular API is specified,\n the docstring of that API is displayed.\n\n :param call: call to get detailed information about (Optional)\n :type call: str\n :return: List of APIs or detailed information about a specific call (parameters and docstring)\n :rtype: List of strings or a dictionary containing params and doc keys depending on the input parameter\n\n \"\"\"\n if call:\n params = self.methods['GET'][call]['args']\n doc = self.methods['GET'][call]['call'].__doc__\n return dict(params=params, doc=doc)\n else:\n return self.methods['GET'].keys()"}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef listDatasets(self, dataset=\"\", parent_dataset=\"\", is_dataset_valid=1,\n release_version=\"\", pset_hash=\"\", app_name=\"\", output_module_label=\"\", global_tag=\"\",\n processing_version=0, acquisition_era_name=\"\", run_num=-1,\n physics_group_name=\"\", logical_file_name=\"\", primary_ds_name=\"\", primary_ds_type=\"\",\n processed_ds_name='', data_tier_name=\"\", dataset_access_type=\"VALID\", prep_id='', create_by=\"\", last_modified_by=\"\",\n min_cdate='0', max_cdate='0', min_ldate='0', max_ldate='0', cdate='0',\n ldate='0', detail=False, dataset_id=-1):\n \"\"\"\n API to list dataset(s) in DBS\n * You can use ANY combination of these parameters in this API\n * In absence of parameters, all valid datasets known to the DBS instance will be returned\n\n :param dataset: Full dataset (path) of the dataset.\n :type dataset: str\n :param parent_dataset: Full dataset (path) of the dataset\n :type parent_dataset: str\n :param release_version: cmssw version\n :type release_version: str\n :param pset_hash: pset hash\n :type pset_hash: str\n :param app_name: Application name (generally it is cmsRun)\n :type app_name: str\n :param output_module_label: output_module_label\n :type output_module_label: str\n :param global_tag: global_tag\n :type global_tag: str\n :param processing_version: Processing Version\n :type processing_version: str\n :param acquisition_era_name: Acquisition Era\n :type acquisition_era_name: str\n :param run_num: Specify a specific run number or range. Possible format are: run_num, 'run_min-run_max' or ['run_min-run_max', run1, run2, ...]. run_num=1 is not allowed.\n :type run_num: int,list,str\n :param physics_group_name: List only dataset having physics_group_name attribute\n :type physics_group_name: str\n :param logical_file_name: List dataset containing the logical_file_name\n :type logical_file_name: str\n :param primary_ds_name: Primary Dataset Name\n :type primary_ds_name: str\n :param primary_ds_type: Primary Dataset Type (Type of data, MC/DATA)\n :type primary_ds_type: str\n :param processed_ds_name: List datasets having this processed dataset name\n :type processed_ds_name: str\n :param data_tier_name: Data Tier\n :type data_tier_name: str\n :param dataset_access_type: Dataset Access Type ( PRODUCTION, DEPRECATED etc.)\n :type dataset_access_type: str\n :param prep_id: prep_id\n :type prep_id: str\n :param create_by: Creator of the dataset\n :type create_by: str\n :param last_modified_by: Last modifier of the dataset\n :type last_modified_by: str\n :param min_cdate: Lower limit for the creation date (unixtime) (Optional)\n :type min_cdate: int, str\n :param max_cdate: Upper limit for the creation date (unixtime) (Optional)\n :type max_cdate: int, str\n :param min_ldate: Lower limit for the last modification date (unixtime) (Optional)\n :type min_ldate: int, str\n :param max_ldate: Upper limit for the last modification date (unixtime) (Optional)\n :type max_ldate: int, str\n :param cdate: creation date (unixtime) (Optional)\n :type cdate: int, str\n :param ldate: last modification date (unixtime) (Optional)\n :type ldate: int, str\n :param detail: List all details of a dataset\n :type detail: bool\n :param dataset_id: dataset table primary key used by CMS Computing Analytics.\n :type dataset_id: int, long, str\n :returns: List of dictionaries containing the following keys (dataset). If the detail option is used. The dictionary contain the following keys (primary_ds_name, physics_group_name, acquisition_era_name, create_by, dataset_access_type, data_tier_name, last_modified_by, creation_date, processing_version, processed_ds_name, xtcrosssection, last_modification_date, dataset_id, dataset, prep_id, primary_ds_type)\n :rtype: list of dicts\n\n \"\"\"\n dataset = dataset.replace(\"*\", \"%\")\n parent_dataset = parent_dataset.replace(\"*\", \"%\")\n release_version = release_version.replace(\"*\", \"%\")\n pset_hash = pset_hash.replace(\"*\", \"%\")\n app_name = app_name.replace(\"*\", \"%\")\n output_module_label = output_module_label.replace(\"*\", \"%\")\n global_tag = global_tag.replace(\"*\", \"%\")\n logical_file_name = logical_file_name.replace(\"*\", \"%\")\n physics_group_name = physics_group_name.replace(\"*\", \"%\")\n primary_ds_name = primary_ds_name.replace(\"*\", \"%\")\n primary_ds_type = primary_ds_type.replace(\"*\", \"%\")\n data_tier_name = data_tier_name.replace(\"*\", \"%\")\n dataset_access_type = dataset_access_type.replace(\"*\", \"%\")\n processed_ds_name = processed_ds_name.replace(\"*\", \"%\")\n acquisition_era_name = acquisition_era_name.replace(\"*\", \"%\")\n #processing_version = processing_version.replace(\"*\", \"%\")\n #create_by and last_modified_by have be full spelled, no wildcard will allowed.\n #We got them from request head so they can be either HN account name or DN.\n #This is depended on how an user's account is set up.\n #\n # In the next release we will require dataset has no wildcard in it. \n # DBS will reject wildcard search with dataset name with listDatasets call. \n # One should seperate the dataset into primary , process and datatier if any wildcard.\n # YG Oct 26, 2016\n # Some of users were overwhiled by the API change. So we split the wildcarded dataset in the server instead of by the client.\n # YG Dec. 9 2016\n #\n # run_num=1 caused full table scan and CERN DBS reported some of the queries ran more than 50 hours\n # We will disbale all the run_num=1 calls in DBS. Run_num=1 will be OK when logical_file_name is given.\n # YG Jan. 15 2019\n #\n if (run_num != -1 and logical_file_name ==''):\n for r in parseRunRange(run_num):\n if isinstance(r, basestring) or isinstance(r, int) or isinstance(r, long): \n if r == 1 or r == '1':\n dbsExceptionHandler(\"dbsException-invalid-input\", \"Run_num=1 is not a valid input.\",\n self.logger.exception)\n elif isinstance(r, run_tuple):\n if r[0] == r[1]:\n dbsExceptionHandler('dbsException-invalid-input', \"DBS run range must be apart at least by 1.\", \n self.logger.exception)\n elif r[0] <= 1 <= r[1]:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"Run_num=1 is not a valid input.\",\n self.logger.exception) \n\n if( dataset and ( dataset == \"/%/%/%\" or dataset== \"/%\" or dataset == \"/%/%\" ) ):\n dataset=''\n elif( dataset and ( dataset.find('%') != -1 ) ) :\n junk, primary_ds_name, processed_ds_name, data_tier_name = dataset.split('/')\n dataset = ''\n if ( primary_ds_name == '%' ):\n primary_ds_name = ''\n if( processed_ds_name == '%' ):\n processed_ds_name = ''\n if ( data_tier_name == '%' ):\n data_tier_name = ''\n\n try:\n dataset_id = int(dataset_id)\n except:\n dbsExceptionHandler(\"dbsException-invalid-input2\", \"Invalid Input for dataset_id that has to be an int.\",\n self.logger.exception, 'dataset_id has to be an int.')\n if create_by.find('*')!=-1 or create_by.find('%')!=-1 or last_modified_by.find('*')!=-1\\\n or last_modified_by.find('%')!=-1:\n dbsExceptionHandler(\"dbsException-invalid-input2\", \"Invalid Input for create_by or last_modified_by.\\\n No wildcard allowed.\", self.logger.exception, 'No wildcards allowed for create_by or last_modified_by')\n try:\n if isinstance(min_cdate, basestring) and ('*' in min_cdate or '%' in min_cdate):\n min_cdate = 0\n else:\n try:\n min_cdate = int(min_cdate)\n except:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"invalid input for min_cdate\")\n \n if isinstance(max_cdate, basestring) and ('*' in max_cdate or '%' in max_cdate):\n max_cdate = 0\n else:\n try:\n max_cdate = int(max_cdate)\n except:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"invalid input for max_cdate\")\n \n if isinstance(min_ldate, basestring) and ('*' in min_ldate or '%' in min_ldate):\n min_ldate = 0\n else:\n try:\n min_ldate = int(min_ldate)\n except:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"invalid input for min_ldate\")\n \n if isinstance(max_ldate, basestring) and ('*' in max_ldate or '%' in max_ldate):\n max_ldate = 0\n else:\n try:\n max_ldate = int(max_ldate)\n except:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"invalid input for max_ldate\")\n \n if isinstance(cdate, basestring) and ('*' in cdate or '%' in cdate):\n cdate = 0\n else:\n try:\n cdate = int(cdate)\n except:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"invalid input for cdate\")\n \n if isinstance(ldate, basestring) and ('*' in ldate or '%' in ldate):\n ldate = 0\n else:\n try:\n ldate = int(ldate)\n except:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"invalid input for ldate\")\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except Exception as ex:\n sError = \"DBSReaderModel/listDatasets. %s \\n. Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)\n\n detail = detail in (True, 1, \"True\", \"1\", 'true')\n try: \n return self.dbsDataset.listDatasets(dataset, parent_dataset, is_dataset_valid, release_version, pset_hash,\n app_name, output_module_label, global_tag, processing_version, acquisition_era_name, \n run_num, physics_group_name, logical_file_name, primary_ds_name, primary_ds_type, processed_ds_name,\n data_tier_name, dataset_access_type, prep_id, create_by, last_modified_by,\n min_cdate, max_cdate, min_ldate, max_ldate, cdate, ldate, detail, dataset_id)\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except Exception as ex:\n sError = \"DBSReaderModel/listdatasets. %s.\\n Exception trace: \\n %s\" % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)", "response": "This function returns a list of all valid datasets in the DBS instance."}
{"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef listDatasetArray(self):\n ret = []\n try :\n body = request.body.read()\n if body:\n data = cjson.decode(body)\n data = validateJSONInputNoCopy(\"dataset\", data, read=True)\n #Because CMSWEB has a 300 seconds responding time. We have to limit the array siz to make sure that\n #the API can be finished in 300 second. \n # YG Nov-05-2015\n max_array_size = 1000\n if ( 'dataset' in data.keys() and isinstance(data['dataset'], list) and len(data['dataset'])>max_array_size)\\\n or ('dataset_id' in data.keys() and isinstance(data['dataset_id'], list) and len(data['dataset_id'])>max_array_size):\n dbsExceptionHandler(\"dbsException-invalid-input\",\n \"The Max list length supported in listDatasetArray is %s.\" %max_array_size, self.logger.exception) \n ret = self.dbsDataset.listDatasetArray(data)\n except cjson.DecodeError as De:\n dbsExceptionHandler('dbsException-invalid-input2', \"Invalid input\", self.logger.exception, str(De))\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except HTTPError as he:\n raise he\n except Exception as ex:\n sError = \"DBSReaderModel/listDatasetArray. %s \\n Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)\n for item in ret:\n yield item", "response": "API to list datasets in DBS."}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef listBlocks(self, dataset=\"\", block_name=\"\", data_tier_name=\"\", origin_site_name=\"\",\n logical_file_name=\"\",run_num=-1, min_cdate='0', max_cdate='0',\n min_ldate='0', max_ldate='0', cdate='0', ldate='0', open_for_writing=-1, detail=False):\n\n \"\"\"\n API to list a block in DBS. At least one of the parameters block_name, dataset, data_tier_name or\n logical_file_name are required. If data_tier_name is provided, min_cdate and max_cdate have to be specified and\n the difference in time have to be less than 31 days.\n\n :param block_name: name of the block\n :type block_name: str\n :param dataset: dataset\n :type dataset: str\n :param data_tier_name: data tier\n :type data_tier_name: str\n :param logical_file_name: Logical File Name\n :type logical_file_name: str\n :param origin_site_name: Origin Site Name (Optional)\n :type origin_site_name: str\n :param open_for_writing: Open for Writting (Optional)\n :type open_for_writing: int (0 or 1)\n :param run_num: run_num numbers (Optional). Possible format are: run_num, 'run_min-run_max' or ['run_min-run_max', run1, run2, ...].\n :type run_num: int, list of runs or list of run ranges\n :param min_cdate: Lower limit for the creation date (unixtime) (Optional)\n :type min_cdate: int, str\n :param max_cdate: Upper limit for the creation date (unixtime) (Optional)\n :type max_cdate: int, str\n :param min_ldate: Lower limit for the last modification date (unixtime) (Optional)\n :type min_ldate: int, str\n :param max_ldate: Upper limit for the last modification date (unixtime) (Optional)\n :type max_ldate: int, str\n :param cdate: creation date (unixtime) (Optional)\n :type cdate: int, str\n :param ldate: last modification date (unixtime) (Optional)\n :type ldate: int, str\n :param detail: Get detailed information of a block (Optional)\n :type detail: bool\n :returns: List of dictionaries containing following keys (block_name). If option detail is used the dictionaries contain the following keys (block_id, create_by, creation_date, open_for_writing, last_modified_by, dataset, block_name, file_count, origin_site_name, last_modification_date, dataset_id and block_size)\n :rtype: list of dicts\n\n \"\"\"\n # run_num=1 caused full table scan and CERN DBS reported some of the queries ran more than 50 hours\n # We will disbale all the run_num=1 calls in DBS. Run_num=1 will be OK while logical_file_name is given.\n # YG Jan. 15 2019\n # \n if (run_num != -1 and logical_file_name ==''):\n for r in parseRunRange(run_num):\n if isinstance(r, basestring) or isinstance(r, int) or isinstance(r, long): \n if r == 1 or r == '1':\n dbsExceptionHandler(\"dbsException-invalid-input\", \"Run_num=1 is not a valid input.\",\n self.logger.exception)\n elif isinstance(r, run_tuple):\n if r[0] == r[1]:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"DBS run range must be apart at least by 1.\",\n self.logger.exception)\n elif r[0] <= 1 <= r[1]:\n dbsExceptionHandler(\"dbsException-invalid-input2\", \"Run_num=1 is not a valid input.\",\n self.logger.exception)\n \n dataset = dataset.replace(\"*\", \"%\")\n block_name = block_name.replace(\"*\", \"%\")\n logical_file_name = logical_file_name.replace(\"*\", \"%\")\n origin_site_name = origin_site_name.replace(\"*\", \"%\")\n #\n\tif isinstance(min_cdate, basestring) and ('*' in min_cdate or '%' in min_cdate):\n min_cdate = 0\n else:\n try:\n min_cdate = int(min_cdate)\n except:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"invalid input for min_cdate\")\n #\n if isinstance(max_cdate, basestring) and ('*' in max_cdate or '%' in max_cdate):\n max_cdate = 0\n else:\n try:\n max_cdate = int(max_cdate)\n except:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"invalid input for max_cdate\")\n #\n if isinstance(min_ldate, basestring) and ('*' in min_ldate or '%' in min_ldate):\n min_ldate = 0\n else:\n try:\n min_ldate = int(min_ldate)\n except:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"invalid input for max_cdate\")\n #\n\tif isinstance(max_ldate, basestring) and ('*' in max_ldate or '%' in max_ldate):\n max_ldate = 0\n else:\n try:\n max_ldate = int(max_ldate)\n except:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"invalid input for max_ldate\")\n #\n if isinstance(cdate, basestring) and ('*' in cdate or '%' in cdate):\n cdate = 0\n else:\n try:\n cdate = int(cdate)\n except:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"invalid input for cdate\")\n #\n if isinstance(cdate, basestring) and ('*' in ldate or '%' in ldate):\n ldate = 0\n else:\n try:\n ldate = int(ldate)\n except:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"invalid input for ldate\")\n #\n detail = detail in (True, 1, \"True\", \"1\", 'true')\n try:\n b= self.dbsBlock.listBlocks(dataset, block_name, data_tier_name, origin_site_name, logical_file_name,\n run_num, min_cdate, max_cdate, min_ldate, max_ldate, cdate, ldate, open_for_writing, detail)\n\t #for item in b:\n\t\t#yield item\n\t return b\t\n\texcept HTTPError:\n\t raise\t\n\texcept dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except Exception as ex:\n sError = \"DBSReaderModel/listBlocks. %s\\n. Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)", "response": "This method returns a list of all blocks in DBS."}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef listBlockOrigin(self, origin_site_name=\"\", dataset=\"\", block_name=\"\"):\n try:\n return self.dbsBlock.listBlocksOrigin(origin_site_name, dataset, block_name)\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except Exception as ex:\n sError = \"DBSReaderModel/listBlocks. %s\\n. Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'],\n self.logger.exception, sError)", "response": "API to list blocks in origin_site_name."}
{"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef listBlockChildren(self, block_name=\"\"):\n block_name = block_name.replace(\"*\", \"%\")\n try:\n return self.dbsBlock.listBlockChildren(block_name)\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except Exception as ex:\n sError = \"DBSReaderModel/listBlockChildren. %s\\n. Exception trace: \\n %s\" % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)", "response": "API to list block children."}
{"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef listBlockSummaries(self, block_name=\"\", dataset=\"\", detail=False):\n if bool(dataset)+bool(block_name)!=1:\n dbsExceptionHandler(\"dbsException-invalid-input2\",\n dbsExceptionCode[\"dbsException-invalid-input2\"],\n self.logger.exception,\n \"Dataset or block_names must be specified at a time.\")\n\n if block_name and isinstance(block_name, basestring):\n try:\n block_name = [str(block_name)]\n except:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"Invalid block_name for listBlockSummaries. \")\n\n for this_block_name in block_name:\n if re.search(\"[*, %]\", this_block_name):\n dbsExceptionHandler(\"dbsException-invalid-input2\",\n dbsExceptionCode[\"dbsException-invalid-input2\"],\n self.logger.exception,\n \"No wildcards are allowed in block_name list\")\n\n if re.search(\"[*, %]\", dataset):\n dbsExceptionHandler(\"dbsException-invalid-input2\",\n dbsExceptionCode[\"dbsException-invalid-input2\"],\n self.logger.exception,\n \"No wildcards are allowed in dataset\")\n data = [] \n try:\n with self.dbi.connection() as conn:\n data = self.dbsBlockSummaryListDAO.execute(conn, block_name, dataset, detail)\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except Exception as ex:\n sError = \"DBSReaderModel/listBlockSummaries. %s\\n. Exception trace: \\n %s\" % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error',\n dbsExceptionCode['dbsException-server-error'],\n self.logger.exception,\n sError)\n for item in data:\n yield item", "response": "API that returns total size file count and event counts of a dataset or a list of blocks."}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef listFiles(self, dataset = \"\", block_name = \"\", logical_file_name = \"\",\n release_version=\"\", pset_hash=\"\", app_name=\"\", output_module_label=\"\",\n run_num=-1, origin_site_name=\"\", lumi_list=\"\", detail=False, validFileOnly=0, sumOverLumi=0):\n \"\"\"\n API to list files in DBS. Either non-wildcarded logical_file_name, non-wildcarded dataset or non-wildcarded block_name is required.\n The combination of a non-wildcarded dataset or block_name with an wildcarded logical_file_name is supported.\n\n * For lumi_list the following two json formats are supported:\n - [a1, a2, a3,]\n - [[a,b], [c, d],]\n\t* lumi_list can be either a list of lumi section numbers as [a1, a2, a3,] or a list of lumi section range as [[a,b], [c, d],]. Thay cannot be mixed.\n * If lumi_list is provided run only run_num=single-run-number is allowed\n\t* When lfn list is present, no run or lumi list is allowed.\n\n *There are five dataset access types: VALID, INVALID, PRODUCTION, DEPRECATED and DELETED. \n * One file status: IS_FILE_VALID: 1 or 0.\n * When a dataset is INVALID/ DEPRECATED/ DELETED, DBS will consider all the files under it is invalid not matter what value is_file_valid has. \n In general, when the dataset is in one of INVALID/ DEPRECATED/ DELETED, is_file_valid should all marked as 0, but some old DBS2 data was not.\n * When Dataset is VALID/PRODUCTION, by default is_file_valid is all 1. But if individual file is invalid, then the file's is_file_valid is set to 0.\n * DBS use this logical in its APIs that have validFileOnly variable.\n \n *There are five dataset access types: VALID, INVALID, PRODUCTION, DEPRECATED and DELETED. \n * One file status: IS_FILE_VALID: 1 or 0.\n * When a dataset is INVALID/ DEPRECATED/ DELETED, DBS will consider all the files under it is invalid not matter what value is_file_valid has. \n In general, when the dataset is in one of INVALID/ DEPRECATED/ DELETED, is_file_valid should all marked as 0, but some old DBS2 data was not.\n * When Dataset is VALID/PRODUCTION, by default is_file_valid is all 1. But if individual file is invalid, then the file's is_file_valid is set to 0.\n * DBS use this logical in its APIs that have validFileOnly variable.\n\n :param logical_file_name: logical_file_name of the file\n :type logical_file_name: str\n :param dataset: dataset\n :type dataset: str\n :param block_name: block name\n :type block_name: str\n :param release_version: release version\n :type release_version: str\n :param pset_hash: parameter set hash\n :type pset_hash: str\n :param app_name: Name of the application\n :type app_name: str\n :param output_module_label: name of the used output module\n :type output_module_label: str\n :param run_num: run , run ranges, and run list. Possible format are: run_num, 'run_min-run_max' or ['run_min-run_max', run1, run2, ...].\n :type run_num: int, list, string\n :param origin_site_name: site where the file was created\n :type origin_site_name: str\n :param lumi_list: List containing luminosity sections\n :type lumi_list: list\n :param detail: Get detailed information about a file\n :type detail: bool\n :param validFileOnly: default=0 return all the files. when =1, only return files with is_file_valid=1 or dataset_access_type=PRODUCTION or VALID\n :type validFileOnly: int\n :param sumOverLumi: default=0 event_count is the event_count/file. When sumOverLumi=1 and run_num is specified, the event_count is sum of the event_count/lumi for that run; When sumOverLumi = 1, no other input can be a list, for example no run_num list, lumi list or lfn list.\n :type sumOverLumi: int\n :returns: List of dictionaries containing the following keys (logical_file_name). If detail parameter is true, the dictionaries contain the following keys (check_sum, branch_hash_id, adler32, block_id, event_count, file_type, create_by, logical_file_name, creation_date, last_modified_by, dataset, block_name, file_id, file_size, last_modification_date, dataset_id, file_type_id, auto_cross_section, md5, is_file_valid)\n :rtype: list of dicts\n\n \"\"\"\n logical_file_name = logical_file_name.replace(\"*\", \"%\")\n release_version = release_version.replace(\"*\", \"%\")\n pset_hash = pset_hash.replace(\"*\", \"%\")\n app_name = app_name.replace(\"*\", \"%\")\n block_name = block_name.replace(\"*\", \"%\")\n origin_site_name = origin_site_name.replace(\"*\", \"%\")\n dataset = dataset.replace(\"*\", \"%\")\n #\n # run_num=1 caused full table scan and CERN DBS reported some of the queries ran more than 50 hours\n # We will disbale all the run_num=1 calls in DBS. Run_num=1 will be OK when logical_file_name is given.\n # YG Jan. 15 2019\n #\n if (run_num != -1 and logical_file_name ==''):\n for r in parseRunRange(run_num):\n if isinstance(r, basestring) or isinstance(r, int) or isinstance(r, long): \n if r == 1 or r == '1':\n dbsExceptionHandler(\"dbsException-invalid-input\", \"Run_num=1 is not a valid input.\",\n self.logger.exception)\n elif isinstance(r, run_tuple):\n if r[0] == r[1]:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"DBS run range must be apart at least by 1.\",\n self.logger.exception)\n elif r[0] <= 1 <= r[1]:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"Run_num=1 is not a valid input.\",\n self.logger.exception)\n if lumi_list:\n if run_num ==-1 or not run_num :\n dbsExceptionHandler(\"dbsException-invalid-input\", \"When lumi_list is given, require a single run_num.\", self.logger.exception)\n elif sumOverLumi == 1:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"lumi_list and sumOverLumi=1 cannot be set at the same time becaue nesting of WITH clause within WITH clause not supported yet by Oracle. \", self.logger.exception)\n else:\n try:\n lumi_list = self.dbsUtils2.decodeLumiIntervals(lumi_list)\n except Exception as de:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"Invalid lumi_list input: \"+ str(de), self.logger.exception)\n else:\n if not isinstance(run_num, list):\n if run_num ==1 or run_num == '1':\n dbsExceptionHandler(\"dbsException-invalid-input\", \"files API does not supprt run_num=1 when no lumi.\", self.logger.exception)\n else:\n if 1 in run_num or '1' in run_num :\n dbsExceptionHandler(\"dbsException-invalid-input\", \"files API does not supprt run_num=1 when no lumi.\", self.logger.exception)\n if int(sumOverLumi) == 1 and (isinstance(run_num, list) or isinstance(logical_file_name, list)):\n dbsExceptionHandler(\"dbsException-invalid-input\", \"When sumOverLumi=1, no lfn list or run_num list allowed becaue nesting of WITH clause within WITH clause not supported yet by Oracle. \", self.logger.exception)\n detail = detail in (True, 1, \"True\", \"1\", 'true')\n output_module_label = output_module_label.replace(\"*\", \"%\")\n try:\n result = self.dbsFile.listFiles(dataset, block_name, logical_file_name, release_version, pset_hash, app_name,\n output_module_label, run_num, origin_site_name, lumi_list, detail, \n validFileOnly, sumOverLumi)\n \t for item in result:\n\t\tyield item\t\n\texcept HTTPError as he:\n\t raise he\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except Exception as ex:\n sError = \"DBSReaderModel/listFiles. %s \\n Exception trace: \\n %s\" % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', ex.message,\n self.logger.exception, sError)", "response": "This is a helper method to list files in DBS."}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef listFileSummaries(self, block_name='', dataset='', run_num=-1, validFileOnly=0, sumOverLumi=0):\n # run_num=1 caused full table scan and CERN DBS reported some of the queries ran more than 50 hours\n # We will disbale all the run_num=1 calls in DBS.\n # YG Jan. 16 2019\n #\n if (run_num != -1) :\n for r in parseRunRange(run_num):\n if isinstance(r, basestring) or isinstance(r, int) or isinstance(r, long): \n if r == 1 or r == '1':\n dbsExceptionHandler(\"dbsException-invalid-input\", \"Run_num=1 is not a valid input.\",\n self.logger.exception)\n elif isinstance(r, run_tuple):\n if r[0] == r[1]:\n dbsExceptionHandler('dbsException-invalid-input', \"DBS run range must be apart at least by 1.\",\n self.logger.exception)\n elif r[0] <= 1 <= r[1]:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"Run_num=1 is not a valid input.\",\n self.logger.exception)\n try:\n r = self.dbsFile.listFileSummary(block_name, dataset, run_num, validFileOnly=validFileOnly, sumOverLumi=sumOverLumi)\n\t for item in r:\n\t\tyield item\t\n\texcept HTTPError as he:\n\t raise he\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n\texcept HTTPerror as he:\n\t raise he\n except Exception as ex:\n sError = \"DBSReaderModel/listFileSummaries. %s\\n. Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', ex.message, self.logger.exception, sError)", "response": "This function returns a list of dicts containing the number of files event counts and number of lumis in a given block or dataset."}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef listDatasetParents(self, dataset=''):\n try:\n return self.dbsDataset.listDatasetParents(dataset)\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except Exception as ex:\n sError = \"DBSReaderModel/listDatasetParents. %s\\n. Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)", "response": "API to list Dataset Parents in DBS."}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef listFileParents(self, logical_file_name='', block_id=0, block_name=''):\n try:\n r = self.dbsFile.listFileParents(logical_file_name, block_id, block_name)\n\t for item in r:\n\t\tyield item\t\n except HTTPError as he:\n\t raise he\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except Exception as ex:\n sError = \"DBSReaderModel/listFileParents. %s\\n. Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', ex.message, self.logger.exception, sError)", "response": "API to list file parents of a file"}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef listFileParentsByLumi(self):\n try :\n body = request.body.read()\n if body:\n data = cjson.decode(body)\n data = validateJSONInputNoCopy('file_parent_lumi', data, read=True)\n else:\n data = {}\n\n #Because CMSWEB has a 300 seconds responding time. We have to limit the array siz to make sure that\n #the API can be finished in 300 second. \n max_array_size = 1000\n if ('logical_file_name' in data.keys() and isinstance(data['logical_file_name'], list) and len(data['logical_file_name'])>max_array_size):\n dbsExceptionHandler(\"dbsException-invalid-input\",\n \"The Max list length supported in listFilePArentsByLumi is %s.\" %max_array_size, self.logger.exception)\n\n lfn = []\n if \"block_name\" not in data.keys():\n dbsExceptionHandler('dbsException-invalid-input', \"block_name is required for fileparentsbylumi\")\n else:\n if \"logical_file_name\" in data.keys():\n lfn = data[\"logical_file_name\"]\n result = self.dbsFile.listFileParentsByLumi(block_name=data[\"block_name\"], logical_file_name=lfn)\n for r in result:\n yield r\n except cjson.DecodeError as De:\n dbsExceptionHandler('dbsException-invalid-input2', \"Invalid input\", self.logger.exception, str(De))\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except HTTPError as he:\n raise he\n except Exception as ex:\n sError = \"DBSReaderModel/listFileParentsByLumi. %s \\n Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', ex.message, self.logger.exception, sError)", "response": "This method returns a list of file parents for a given block with or w/o a list of LFN."}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef listFileChildren(self, logical_file_name='', block_name='', block_id=0):\n if isinstance(logical_file_name, list):\n for f in logical_file_name:\n if '*' in f or '%' in f:\n dbsExceptionHandler(\"dbsException-invalid-input2\", dbsExceptionCode[\"dbsException-invalid-input2\"], self.logger.exception, \"No \\\n wildcard allow in LFN list\" )\n\n try:\n return self.dbsFile.listFileChildren(logical_file_name, block_name, block_id)\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except Exception as ex:\n sError = \"DBSReaderModel/listFileChildren. %s\\n. Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)", "response": "API to list file children. One of the parameters in mandatory."}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef listFileLumis(self, logical_file_name=\"\", block_name=\"\", run_num=-1, validFileOnly=0):\n # run_num=1 caused full table scan and CERN DBS reported some of the queries ran more than 50 hours\n # We will disbale all the run_num=1 calls in DBS. Run_num=1 will be OK when logical_file_name is given.\n # YG Jan. 16 2019\n if (run_num != -1 and logical_file_name ==''):\n for r in parseRunRange(run_num):\n if isinstance(r, basestring) or isinstance(r, int) or isinstance(r, long): \n if r == 1 or r == '1':\n dbsExceptionHandler(\"dbsException-invalid-input\", \"Run_num=1 is not a valid input.\",\n self.logger.exception)\n elif isinstance(r, run_tuple):\n if r[0] == r[1]:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"DBS run range must be apart at least by 1.\",self.logger.exception)\n elif r[0] <= 1 <= r[1]:\n dbsExceptionHandler(\"dbsException-invalid-input\", \"Run_num=1 is not a valid input.\",\n self.logger.exception) \n try:\n return self.dbsFile.listFileLumis(logical_file_name, block_name, run_num, validFileOnly )\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except Exception as ex:\n sError = \"DBSReaderModel/listFileLumis. %s\\n. Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)", "response": "API to list Lumi for files."}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef listDataTypes(self, datatype=\"\", dataset=\"\"):\n try:\n return self.dbsDataType.listDataType(dataType=datatype, dataset=dataset)\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except Exception as ex:\n sError = \"DBSReaderModel/listDataTypes. %s\\n. Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)", "response": "API to list data types known to DBS."}
{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef listDatasetAccessTypes(self, dataset_access_type=''):\n if dataset_access_type:\n dataset_access_type = dataset_access_type.replace(\"*\", \"%\")\n try:\n return self.dbsDatasetAccessType.listDatasetAccessTypes(dataset_access_type)\n except dbsException as de:\n dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError)\n except Exception as ex:\n sError = \"DBSReaderModel/listDatasetAccessTypes. %s\\n. Exception trace: \\n %s\" \\\n % (ex, traceback.format_exc())\n dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)", "response": "API to list dataset access types."}
{"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef list():\n entries = lambder.list_events()\n for e in entries:\n click.echo(str(e))", "response": "List all events in a resource group"}
{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef add(name, function_name, cron):\n lambder.add_event(name=name, function_name=function_name, cron=cron)", "response": "Add an event to the current container."}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nloading events from a json file", "response": "def load(file):\n \"\"\" Load events from a json file \"\"\"\n with open(file, 'r') as f:\n contents = f.read()\n lambder.load_events(contents)"}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef functions(context):\n # find lambder.json in CWD\n config_file = \"./lambder.json\"\n if os.path.isfile(config_file):\n context.obj = FunctionConfig(config_file)\n pass", "response": "Manage AWS Lambda functions"}
{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef new(\n name,\n bucket,\n timeout,\n memory,\n description,\n subnet_ids,\n security_group_ids\n):\n \"\"\" Create a new lambda project \"\"\"\n config = {}\n if timeout:\n config['timeout'] = timeout\n if memory:\n config['memory'] = memory\n if description:\n config['description'] = description\n if subnet_ids:\n config['subnet_ids'] = subnet_ids\n if security_group_ids:\n config['security_group_ids'] = security_group_ids\n\n lambder.create_project(name, bucket, config)", "response": "Create a new lambda project"}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef deploy(\n config,\n name,\n bucket,\n timeout,\n memory,\n description,\n subnet_ids,\n security_group_ids\n):\n \"\"\" Deploy/Update a function from a project directory \"\"\"\n # options should override config if it is there\n myname = name or config.name\n mybucket = bucket or config.bucket\n mytimeout = timeout or config.timeout\n mymemory = memory or config.memory\n mydescription = description or config.description\n mysubnet_ids = subnet_ids or config.subnet_ids\n mysecurity_group_ids = security_group_ids or config.security_group_ids\n\n vpc_config = {}\n if mysubnet_ids and mysecurity_group_ids:\n vpc_config = {\n 'SubnetIds': mysubnet_ids.split(','),\n 'SecurityGroupIds': mysecurity_group_ids.split(',')\n }\n\n click.echo('Deploying {} to {}'.format(myname, mybucket))\n lambder.deploy_function(\n myname,\n mybucket,\n mytimeout,\n mymemory,\n mydescription,\n vpc_config\n )", "response": "Deploy a function from a project directory"}
{"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ndeletes lambda function role and zipfile", "response": "def rm(config, name, bucket):\n \"\"\" Delete lambda function, role, and zipfile \"\"\"\n # options should override config if it is there\n myname = name or config.name\n mybucket = bucket or config.bucket\n\n click.echo('Deleting {} from {}'.format(myname, mybucket))\n lambder.delete_function(myname, mybucket)"}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef invoke(config, name, input):\n # options should override config if it is there\n myname = name or config.name\n\n click.echo('Invoking ' + myname)\n output = lambder.invoke_function(myname, input)\n click.echo(output)", "response": "Invoke a function in AWS"}
{"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef putBlock(self, blockcontent, migration=False):\n #YG\n try:\n #1 insert configuration\n self.logger.debug(\"insert configuration\")\n configList = self.insertOutputModuleConfig(\n blockcontent['dataset_conf_list'], migration)\n #2 insert dataset\n self.logger.debug(\"insert dataset\")\n datasetId = self.insertDataset(blockcontent, configList, migration)\n #3 insert block & files\n self.logger.debug(\"insert block & files.\")\n self.insertBlockFile(blockcontent, datasetId, migration)\n except KeyError as ex:\n dbsExceptionHandler(\"dbsException-invalid-input2\", \"DBSBlockInsert/putBlock: \\\n KeyError exception: %s. \" %ex.args[0], self.logger.exception, \n\t \"DBSBlockInsert/putBlock: KeyError exception: %s. \" %ex.args[0]\t)\n except Exception as ex:\n raise", "response": "Insert the data in sereral steps and commit when each step finishes or rollback when there is a problem."}
{"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ninsert the output module config into the database.", "response": "def insertOutputModuleConfig(self, remoteConfig, migration=False):\n \"\"\"\n Insert Release version, application, parameter set hashes and the map(output module config).\n\n \"\"\"\n otptIdList = []\n missingList = []\n conn = self.dbi.connection()\n try:\n for c in remoteConfig:\n cfgid = self.otptModCfgid.execute(conn, app = c[\"app_name\"],\n release_version = c[\"release_version\"],\n pset_hash = c[\"pset_hash\"],\n output_label = c[\"output_module_label\"],\n global_tag=c['global_tag'])\n if cfgid <= 0 :\n missingList.append(c)\n else:\n key = (c['app_name'] + ':' + c['release_version'] + ':' +\n c['pset_hash'] + ':' +\n c['output_module_label'] + ':' + c['global_tag'])\n self.datasetCache['conf'][key] = cfgid\n otptIdList.append(cfgid)\n #print \"About to set cfgid: %s\" % str(cfgid)\n except KeyError as ex:\n if conn:conn.close()\n dbsExceptionHandler(\"dbsException-invalid-input2\", \"DBSBlockInsert/insertOutputModuleConfig: \\\n KeyError exception: %s. \" %ex.args[0], self.logger.exception,\n\t \"DBSBlockInsert/insertOutputModuleConfig: KeyError exception: %s. \" %ex.args[0]\t)\n except Exception as ex:\n if conn:conn.close()\n raise\n\n if len(missingList)==0:\n if conn:conn.close()\n return otptIdList\n #Now insert the missing configs\n try:\n #tran = conn.begin()\n for m in missingList:\n # Start a new transaction\n # This is to see if we can get better results\n # by committing early if we're submitting\n # multiple blocks with similar features\n tran = conn.begin()\n #Now insert the config\n # Sort out the mess\n # We're having some problems with different threads\n # committing different pieces at the same time\n # This makes the output module config ID wrong\n # Trying to catch this via exception handling on duplication\n # Start a new transaction\n #global_tag is now required. YG 03/08/2011\n try:\n cfgid = 0\n if not migration:\n m['create_by'] = dbsUtils().getCreateBy()\n m['creation_date'] = dbsUtils().getTime()\n configObj = {\"release_version\": m[\"release_version\"],\n \"pset_hash\": m[\"pset_hash\"], \"pset_name\":m.get('pset_name', None),\n \"app_name\": m[\"app_name\"],\n 'output_module_label' : m['output_module_label'],\n 'global_tag' : m['global_tag'],\n 'scenario' : m.get('scenario', None),\n 'creation_date' : m['creation_date'],\n 'create_by':m['create_by']\n }\n self.otptModCfgin.execute(conn, configObj, tran)\n tran.commit()\n tran = None\n except KeyError as ex:\n if tran:tran.rollback()\n if conn:conn.close()\n dbsExceptionHandler(\"dbsException-invalid-input2\", \"DBSBlockInsert/insertOutputModuleConfig: \\\n KeyError exception: %s. \" %ex.args[0],\n\t\t\t\t\t self.logger.exception, \n\t\t\t\t\t\"DBSBlockInsert/insertOutputModuleConfig: KeyError exception: %s. \" %ex.args[0])\n except exceptions.IntegrityError as ex:\n #Another job inserted it just 1/100000 second earlier than\n #you!! YG 11/17/2010\n if str(ex).find(\"ORA-00001\") != -1 or str(ex).lower().find(\"duplicate\") !=-1:\n if str(ex).find(\"TUC_OMC_1\") != -1:\n #the config is already in db, get the ID later\n pass\n else:\n #reinsert it if one or two or three of the three attributes (vresion, hash and app) are inserted\n #just 1/100000 second eailer.\n try:\n self.otptModCfgin.execute(conn, configObj, tran)\n tran.commit()\n tran = None\n except exceptions.IntegrityError as ex:\n if (str(ex).find(\"ORA-00001\") != -1 and str(ex).find(\"TUC_OMC_1\"))\\\n or str(ex).lower().find(\"duplicate\") != -1:\n pass\n else:\n if tran:tran.rollback()\n if conn:conn.close()\n dbsExceptionHandler('dbsException-invalid-input2',\n 'Invalid data when insert Configure. ',\n self.logger.exception,\n 'Invalid data when insert Configure. '+ str(ex))\n elif str(ex).find(\"ORA-01400\") > -1:\n if tran:tran.rollback()\n if conn:conn.close()\n dbsExceptionHandler(\"dbsException-missing-data\", \"Missing data when inserting Configure. \", \n\t\t\t\tself.logger.exception, str(ex))\n else:\n if tran:tran.rollback()\n if conn:conn.close()\n dbsExceptionHandler('dbsException-invalid-input2',\n 'Invalid data when insert Configure. ',\n self.logger.exception,\n 'Invalid data when insert Configure. '+ str(ex))\n except exceptions as ex3:\n if tran:tran.rollback()\n if conn:conn.close()\n raise ex3\n cfgid = self.otptModCfgid.execute(conn,\n app = m[\"app_name\"],\n release_version = m[\"release_version\"],\n pset_hash = m[\"pset_hash\"],\n output_label = m[\"output_module_label\"],\n global_tag=m['global_tag'])\n otptIdList.append(cfgid)\n key = (m['app_name'] + ':' + m['release_version'] + ':' +\n m['pset_hash'] + ':' +m['output_module_label'] + ':' +\n m['global_tag'])\n self.datasetCache['conf'][key] = cfgid\n finally:\n if tran:tran.rollback()\n if conn:conn.close()\n return otptIdList"}
{"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\ninsert a new dataset and only the ones that are not already in the database.", "response": "def insertDatasetWOannex(self, dataset, blockcontent, otptIdList, conn,\n insertDataset = True, migration = False):\n \"\"\"\n _insertDatasetOnly_\n\n Insert the dataset and only the dataset\n Meant to be called after everything else is put into place.\n\n The insertDataset flag is set to false if the dataset already exists\n \"\"\"\n\n tran = conn.begin()\n try:\n #8 Finally, we have everything to insert a dataset\n if insertDataset:\n # Then we have to get a new dataset ID\n dataset['dataset_id'] = self.datasetid.execute(conn,\n dataset['dataset'])\n if dataset['dataset_id'] <= 0:\n dataset['dataset_id'] = self.sm.increment(conn, \"SEQ_DS\")\n if not migration:\n dataset['last_modified_by'] = dbsUtils().getCreateBy()\n dataset['create_by'] = dbsUtils().getCreateBy()\n dataset['creation_date'] = dataset.get('creation_date', dbsUtils().getTime())\n dataset['last_modification_date'] = dataset.get('last_modification_date', dbsUtils().getTime())\n dataset['xtcrosssection'] = dataset.get('xtcrosssection', None)\n dataset['prep_id'] = dataset.get('prep_id', None)\n try:\n self.datasetin.execute(conn, dataset, tran)\n except exceptions.IntegrityError as ei:\n if str(ei).find(\"ORA-00001\") != -1 or str(ei).lower().find(\"duplicate\") !=-1:\n if conn.closed:\n conn = self.dbi.connection()\n dataset['dataset_id'] = self.datasetid.execute(conn, dataset['dataset'])\n if dataset['dataset_id'] <= 0:\n if tran:tran.rollback()\n if conn:conn.close()\n dbsExceptionHandler('dbsException-conflict-data',\n 'Dataset/[processed DS]/[dataset access type] not yet inserted by concurrent insert. ',\n self.logger.exception,\n 'Dataset/[processed DS]/[dataset access type] not yet inserted by concurrent insert. '+ str(ei))\n elif str(ei).find(\"ORA-01400\") > -1:\n if tran:tran.rollback()\n if conn:conn.close()\n dbsExceptionHandler('dbsException-missing-data',\n 'Missing data when insert Datasets. ',\n self.logger.exception,\n 'Missing data when insert Datasets. '+ str(ei))\n else:\n if tran: tran.rollback()\n if conn: conn.close()\n dbsExceptionHandler('dbsException-invalid-input2',\n 'Invalid data when insert Datasets. ',\n self.logger.exception,\n 'Invalid data when insert Datasets. '+ str(ei))\n\n except Exception:\n #should catch all above exception to rollback. YG Jan 17, 2013\n if tran:tran.rollback()\n if conn:conn.close()\n raise\n\n #9 Fill Dataset Parentage\n #All parentage are deduced from file parentage.\n\n #10 Before we commit, make dataset and output module configuration\n #mapping. We have to try to fill the map even if dataset is\n #already in dest db\n for c in otptIdList:\n try:\n dcObj = {\n 'dataset_id' : dataset['dataset_id'],\n 'output_mod_config_id' : c }\n self.dcin.execute(conn, dcObj, tran)\n except exceptions.IntegrityError as ei:\n #FIXME YG 01/17/2013\n if (str(ei).find(\"ORA-00001\") != -1 and str(ei).find(\"TUC_DC_1\") != -1) or \\\n str(ei).lower().find(\"duplicate\")!=-1:\n #ok, already in db\n #FIXME: What happens when there are partially in db?\n #YG 11/17/2010\n pass\n else:\n if tran:tran.rollback()\n if conn:conn.close()\n dbsExceptionHandler('dbsException-invalid-input2',\n 'Invalid data when insert dataset_configs. ',\n self.logger.exception,\n 'Invalid data when insert dataset_configs. '+ str(ei))\n except Exception as ex:\n if tran:tran.rollback()\n if conn:conn.close()\n raise\n #Now commit everything.\n tran.commit()\n except exceptions.IntegrityError as ei:\n # Then is it already in the database?\n # Not really. We have to check it again. YG Jan 17, 2013\n # we don't check the unique key here, since there are more than one unique key might\n # be violated: such as data_tier, processed_dataset, dataset_access_types.\n if str(ei).find(\"ORA-00001\") != -1 or str(ei).lower().find(\"duplicate\")!=-1:\n # For now, we assume most cases are the same dataset was instered by different thread. If not,\n # one has to call the insert dataset again. But we think this is a rare case and let the second\n # DBSBlockInsert call fix it if it happens.\n if conn.closed:\n conn = self.dbi.connection()\n dataset_id = self.datasetid.execute(conn, dataset['dataset'])\n if dataset_id <= 0:\n dbsExceptionHandler('dbsException-conflict-data',\n 'Dataset not yet inserted by concurrent insert',\n self.logger.exception,\n 'Dataset not yet inserted by concurrent insert')\n\n else:\n dataset['dataset_id'] = dataset_id\n else:\n if tran:tran.rollback()\n if conn:conn.close()\n dbsExceptionHandler('dbsException-invalid-input2',\n 'Invalid data when insert Datasets. ',\n self.logger.exception,\n 'Invalid data when insert Datasets. '+ str(ei))\n except Exception as ex:\n if tran:tran.rollback()\n if conn:conn.close()\n raise\n finally:\n if tran:tran.rollback()\n if conn:conn.close()\n return dataset['dataset_id']"}
{"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nreturning a list of all the sites in the database.", "response": "def listSites(self, block_name=\"\", site_name=\"\"):\n \"\"\"\n Returns sites.\n \"\"\"\n try:\n conn = self.dbi.connection()\n if block_name:\n result = self.blksitelist.execute(conn, block_name)\n else:\n result = self.sitelist.execute(conn, site_name)\n return result\n finally:\n if conn:\n conn.close()"}
{"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ninserts a new site into the database.", "response": "def insertSite(self, businput):\n \"\"\"\n Input dictionary has to have the following keys:\n site_name\n it builds the correct dictionary for dao input and executes the dao\n \"\"\"\n conn = self.dbi.connection()\n tran = conn.begin()\n try:\n siteobj = { # FIXME: unused?\n \"site_name\" : businput[\"site_name\"]\n }\n businput[\"site_id\"] = self.sm.increment(conn, \"SEQ_SI\", tran)\n self.sitein.execute(conn, businput, tran)\n tran.commit()\n except Exception as ex:\n if (str(ex).lower().find(\"unique constraint\") != -1 or\n str(ex).lower().find(\"duplicate\") != -1):\n # already exists, lets fetch the ID\n self.logger.warning(\"Ignoring unique constraint violation\")\n self.logger.warning(ex)\n else:\n if tran:\n tran.rollback()\n self.logger.exception(ex)\n raise\n finally:\n if tran:\n tran.close()\n if conn:\n conn.close()"}
{"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function to\ncreate the DAS mapping for the current user.", "response": "def _create_das_mapping(self):\n \"\"\"\n das_map = {'lookup' : [{params : {'param1' : 'required', 'param2' : 'optional', 'param3' : 'default_value' ...},\n url : 'https://cmsweb.cern.ch:8443/dbs/prod/global/DBSReader/acquisitioneras/',\n das_map : {'das_param1' : dbs_param1, ...}\n }]\n }\n \"\"\"\n with open(self._mapfile, 'r') as f:\n for entry in yaml.load_all(f):\n das2dbs_param_map = {}\n if 'lookup' not in entry:\n continue\n for param_map in entry['das_map']:\n if 'api_arg' in param_map:\n das2dbs_param_map[param_map['das_key']] = param_map['api_arg']\n\n self._das_map.setdefault(entry['lookup'], []).append({'params' : entry['params'],\n 'url' : entry['url'],\n 'das2dbs_param_map' : das2dbs_param_map})"}
{"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nreturn an iterator of the list of the specified size", "response": "def slicedIterator(sourceList, sliceSize):\n \"\"\"\n :param: sourceList: list which need to be sliced\n :type: list\n :param: sliceSize: size of the slice\n :type: int\n :return: iterator of the sliced list\n \"\"\"\n start = 0\n end = 0\n\n while len(sourceList) > end:\n end = start + sliceSize\n yield sourceList[start: end]\n start = end"}
{"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nchecking input parameters for the API call.", "response": "def checkInputParameter(method, parameters, validParameters, requiredParameters=None):\n \"\"\"\n Helper function to check input by using before sending to the server\n\n :param method: Name of the API\n :type method: str\n :param validParameters: Allow parameters for the API call\n :type validParameters: list\n :param requiredParameters: Required parameters for the API call (Default: None)\n :type requiredParameters: list\n\n \"\"\"\n for parameter in parameters:\n if parameter not in validParameters:\n raise dbsClientException(\"Invalid input\",\n \"API %s does not support parameter %s. Supported parameters are %s\" \\\n % (method, parameter, validParameters))\n\n if requiredParameters is not None:\n if 'multiple' in requiredParameters:\n match = False\n for requiredParameter in requiredParameters['multiple']:\n if requiredParameter!='detail' and requiredParameter in parameters:\n match = True\n break\n if not match:\n raise dbsClientException(\"Invalid input\",\n \"API %s does require one of the parameters %s\" \\\n % (method, requiredParameters['multiple']))\n\n if 'forced' in requiredParameters:\n for requiredParameter in requiredParameters['forced']:\n if requiredParameter not in parameters:\n raise dbsClientException(\"Invalid input\",\n \"API %s does require the parameter %s. Forced required parameters are %s\" \\\n % (method, requiredParameter, requiredParameters['forced']))\n\n if 'standalone' in requiredParameters:\n overlap = []\n for requiredParameter in requiredParameters['standalone']:\n if requiredParameter in parameters:\n overlap.append(requiredParameter)\n if len(overlap) != 1:\n raise dbsClientException(\"Invalid input\",\n \"API %s does requires only *one* of the parameters %s.\" \\\n % (method, requiredParameters['standalone']))"}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef list_parameter_splitting(data, key, size_limit=8000, method='GET'):\n values = list(data[key])\n data[key] = []\n\n for element in values:\n data[key].append(element)\n if method =='GET':\n size = len(urllib.urlencode(data))\n else:\n size = len(data)\n if size > size_limit:\n last_element = data[key].pop()\n yield data\n data[key] = [last_element]\n\n yield data", "response": "Helper function to split list of url parameters into list of lists of url parameters."}
{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef __callServer(self, method=\"\", params={}, data={}, callmethod='GET', content='application/json'):\n UserID = os.environ['USER']+'@'+socket.gethostname()\n try:\n UserAgent = \"DBSClient/\"+os.environ['DBS3_CLIENT_VERSION']+\"/\"+ self.userAgent\n except:\n UserAgent = \"DBSClient/Unknown\"+\"/\"+ self.userAgent\n request_headers = {\"Content-Type\": content, \"Accept\": content, \"UserID\": UserID, \"User-Agent\":UserAgent }\n\n method_func = getattr(self.rest_api, callmethod.lower())\n\n data = cjson.encode(data)\n\n try:\n self.http_response = method_func(self.url, method, params, data, request_headers)\n except HTTPError as http_error:\n self.__parseForException(http_error)\n\n if content != \"application/json\":\n return self.http_response.body\n\n try:\n json_ret=cjson.decode(self.http_response.body)\n except cjson.DecodeError:\n print(\"The server output is not a valid json, most probably you have a typo in the url.\\n%s.\\n\" % self.url, file=sys.stderr)\n raise dbsClientException(\"Invalid url\", \"Possible urls are %s\" %self.http_response.body)\n\n return json_ret", "response": "This method is used to make a HTTP call to the DBS Server."}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nparse the response from the server to raise an exception.", "response": "def __parseForException(self, http_error):\n \"\"\"\n An internal method, should not be used by clients\n\n :param httperror: Thrown httperror by the server\n \"\"\"\n data = http_error.body\n try:\n if isinstance(data, str):\n data = cjson.decode(data)\n except:\n raise http_error\n\n if isinstance(data, dict) and 'exception' in data:# re-raise with more details\n raise HTTPError(http_error.url, data['exception'], data['message'], http_error.header, http_error.body)\n\n raise http_error"}
{"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns the time needed to process the request by the frontend server in microseconds and the EPOC timestamp of the request.", "response": "def requestTimingInfo(self):\n \"\"\"\n Returns the time needed to process the request by the frontend server in microseconds\n and the EPOC timestamp of the request in microseconds.\n\n :rtype: tuple containing processing time and timestamp\n \"\"\"\n try:\n return tuple(item.split('=')[1] for item in self.http_response.header.get('CMS-Server-Time').split())\n except AttributeError:\n return None, None"}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef listFileParentsByLumi(self, **kwargs):\n validParameters = ['block_name', 'logical_file_name']\n\n requiredParameters = {'forced': ['block_name']}\n checkInputParameter(method=\"listFileParentsByLumi\", parameters=kwargs.keys(), validParameters=validParameters,\n requiredParameters=requiredParameters)\n return self.__callServer(\"fileparentsbylumi\", data=kwargs, callmethod='POST')", "response": "This method returns a list of dicts containing the cid and pid of the files who s parents need to be found under the specified lumi section info."}
{"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef listBlockParents(self, **kwargs):\n validParameters = ['block_name']\n\n requiredParameters = {'forced': validParameters}\n checkInputParameter(method=\"listBlockParents\", parameters=kwargs.keys(), validParameters=validParameters,\n requiredParameters=requiredParameters)\n if isinstance(kwargs[\"block_name\"], list):\n return self.__callServer(\"blockparents\", data=kwargs, callmethod='POST')\n else:\n return self.__callServer(\"blockparents\", params=kwargs)", "response": "API to list block parents."}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef listBlocks(self, **kwargs):\n validParameters = ['dataset', 'block_name', 'data_tier_name', 'origin_site_name',\n 'logical_file_name', 'run_num', 'open_for_writing', 'min_cdate',\n 'max_cdate', 'min_ldate', 'max_ldate',\n 'cdate', 'ldate', 'detail']\n\n #requiredParameters = {'multiple': validParameters}\n requiredParameters = {'multiple': ['dataset', 'block_name', 'data_tier_name', 'logical_file_name']}\n\n #set defaults\n if 'detail' not in kwargs.keys():\n kwargs['detail'] = False\n\n checkInputParameter(method=\"listBlocks\", parameters=kwargs.keys(), validParameters=validParameters,\n requiredParameters=requiredParameters)\n\n return self.__callServer(\"blocks\", params=kwargs)", "response": "This method returns a list of all the blocks in DBS."}
{"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef listDatasets(self, **kwargs):\n validParameters = ['dataset', 'parent_dataset', 'is_dataset_valid',\n 'release_version', 'pset_hash', 'app_name',\n 'output_module_label', 'processing_version', 'acquisition_era_name',\n 'run_num', 'physics_group_name', 'logical_file_name',\n 'primary_ds_name', 'primary_ds_type', 'processed_ds_name', 'data_tier_name',\n 'dataset_access_type', 'prep_id', 'create_by', 'last_modified_by',\n 'min_cdate', 'max_cdate', 'min_ldate', 'max_ldate', 'cdate', 'ldate',\n 'detail', 'dataset_id']\n\n #set defaults\n if 'detail' not in kwargs.keys():\n kwargs['detail'] = False\n\n checkInputParameter(method=\"listDatasets\", parameters=kwargs.keys(), validParameters=validParameters)\n\n return self.__callServer(\"datasets\", params=kwargs)", "response": "This method returns a list of all the datasets in the DBS instance."}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef listDatasetArray(self, **kwargs):\n validParameters = ['dataset', 'dataset_access_type', 'detail', 'dataset_id']\n\trequiredParameters = {'multiple': ['dataset', 'dataset_id']}\n\n checkInputParameter(method=\"listDatasetArray\", parameters=kwargs.keys(), validParameters=validParameters,\n requiredParameters=requiredParameters)\n\n #set defaults\n if 'detail' not in kwargs.keys():\n kwargs['detail'] = False\n\n return self.__callServer(\"datasetlist\", data=kwargs, callmethod='POST')", "response": "This method returns a list of datasets in DBS."}
{"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef listFileArray(self, **kwargs):\n validParameters = ['dataset', 'block_name', 'logical_file_name',\n 'release_version', 'pset_hash', 'app_name',\n 'output_module_label', 'run_num',\n 'origin_site_name', 'lumi_list', 'detail', 'validFileOnly', 'sumOverLumi']\n\n requiredParameters = {'multiple': ['dataset', 'block_name', 'logical_file_name']}\n\n #set defaults\n if 'detail' not in kwargs.keys():\n kwargs['detail'] = False\n\n checkInputParameter(method=\"listFileArray\", parameters=kwargs.keys(), validParameters=validParameters,\n requiredParameters=requiredParameters)\n # In order to protect DB and make sure the query can be return in 300 seconds, we limit the length of \n # logical file names, lumi and run num to 1000. These number may be adjusted later if \n # needed. YG May-20-2015.\n\n # CMS has all MC data with run_num=1. It almost is a full table scan if run_num=1 without lfn. So we will request lfn\n # to be present when run_num=1. YG Jan 14, 2016\n if 'logical_file_name' in kwargs.keys() and isinstance(kwargs['logical_file_name'], list)\\\n and len(kwargs['logical_file_name']) > 1:\n if 'run_num' in kwargs.keys() and isinstance(kwargs['run_num'],list) and len(kwargs['run_num']) > 1 :\n raise dbsClientException('Invalid input', 'files API does not supprt two lists: run_num and lfn. ')\n elif 'lumi_list' in kwargs.keys() and kwargs['lumi_list'] and len(kwargs['lumi_list']) > 1 :\n raise dbsClientException('Invalid input', 'files API does not supprt two lists: lumi_lis and lfn. ')\n \n elif 'lumi_list' in kwargs.keys() and kwargs['lumi_list']:\n if 'run_num' not in kwargs.keys() or not kwargs['run_num'] or kwargs['run_num'] ==-1 :\n raise dbsClientException('Invalid input', 'When Lumi section is present, a single run is required. ')\n else:\n if 'run_num' in kwargs.keys():\n if isinstance(kwargs['run_num'], list):\n if 1 in kwargs['run_num'] or '1' in kwargs['run_num']:\n raise dbsClientException('Invalid input', 'files API does not supprt run_num=1 when no lumi.')\n else:\n if kwargs['run_num']==1 or kwargs['run_num']=='1':\n raise dbsClientException('Invalid input', 'files API does not supprt run_num=1 when no lumi.')\n\n #check if no lfn is given, but run_num=1 is used for searching\n if ('logical_file_name' not in kwargs.keys() or not kwargs['logical_file_name']) and 'run_num' in kwargs.keys():\n if isinstance(kwargs['run_num'], list):\n if 1 in kwargs['run_num'] or '1' in kwargs['run_num']:\n raise dbsClientException('Invalid input', 'files API does not supprt run_num=1 without logical_file_name.')\n else:\n if kwargs['run_num'] == 1 or kwargs['run_num'] == '1':\n raise dbsClientException('Invalid input', 'files API does not supprt run_num=1 without logical_file_name.')\n \n results = []\n mykey = None\n total_lumi_len = 0\n split_lumi_list = []\n max_list_len = 1000 #this number is defined in DBS server\n for key, value in kwargs.iteritems():\n if key == 'lumi_list' and isinstance(kwargs['lumi_list'], list)\\\n and kwargs['lumi_list'] and isinstance(kwargs['lumi_list'][0], list):\n lapp = 0\n l = 0\n sm = []\n for i in kwargs['lumi_list']:\n while i[0]+max_list_len < i[1]:\n split_lumi_list.append([[i[0], i[0]+max_list_len-1]])\n i[0] = i[0] + max_list_len\n else:\n l += (i[1]-i[0]+1)\n if l <= max_list_len:\n sm.append([i[0], i[1]])\n lapp = l #number lumis in sm\n else:\n split_lumi_list.append(sm)\n sm=[]\n sm.append([i[0], i[1]])\n lapp = i[1]-i[0]+1\n if sm:\n split_lumi_list.append(sm)\n elif key in ('logical_file_name', 'run_num', 'lumi_list') and isinstance(value, list) and len(value)>max_list_len:\n mykey =key\n#\n if mykey: \n sourcelist = []\n #create a new list to slice\n sourcelist = kwargs[mykey][:]\n for slice in slicedIterator(sourcelist, max_list_len):\n kwargs[mykey] = slice\n results.extend(self.__callServer(\"fileArray\", data=kwargs, callmethod=\"POST\"))\n elif split_lumi_list:\n for item in split_lumi_list:\n kwargs['lumi_list'] = item\n results.extend(self.__callServer(\"fileArray\", data=kwargs, callmethod=\"POST\"))\n else:\n return self.__callServer(\"fileArray\", data=kwargs, callmethod=\"POST\")\n \n #make sure only one dictionary per lfn.\n #Make sure this changes when we move to 2.7 or 3.0\n #http://stackoverflow.com/questions/11092511/python-list-of-unique-dictionaries\n # YG May-26-2015\n return dict((v['logical_file_name'], v) for v in results).values()", "response": "This function lists the files in DBS."}
{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef listPrimaryDSTypes(self, **kwargs):\n validParameters = ['primary_ds_type', 'dataset']\n\n checkInputParameter(method=\"listPrimaryDSTypes\", parameters=kwargs.keys(), validParameters=validParameters)\n\n return self.__callServer(\"primarydstypes\", params=kwargs)", "response": "This method returns a list of dictionaries containing the primary dataset types and data types for that dataset."}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef listRuns(self, **kwargs):\n validParameters = ['run_num', 'logical_file_name', 'block_name', 'dataset']\n\n requiredParameters = {'multiple': validParameters}\n\n checkInputParameter(method=\"listRuns\", parameters=kwargs.keys(), validParameters=validParameters,\n requiredParameters=requiredParameters)\n\n return self.__callServer(\"runs\", params=kwargs)", "response": "This method returns a list of all runs in a given block and dataset."}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef updateAcqEraEndDate(self, **kwargs):\n validParameters = ['end_date', 'acquisition_era_name']\n\n requiredParameters = {'forced': validParameters}\n\n checkInputParameter(method=\"updateAcqEraEndDate\", parameters=kwargs.keys(), validParameters=validParameters,\n requiredParameters=requiredParameters)\n\n return self.__callServer(\"acquisitioneras\", params=kwargs, callmethod='PUT')", "response": "This method updates the end_date of an acquisition era"}
{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef execute(self, conn, logical_file_name='', block_name='', run_num=-1, validFileOnly=0, migration=False):\n\tsql = \"\"\n\twheresql = \"\"\n\tlfn_generator = \"\"\n\trun_generator = \"\"\n if logical_file_name and not isinstance(logical_file_name, list):\n binds = {'logical_file_name': logical_file_name}\n if int(validFileOnly) == 0:\n\t\tif migration: #migration always call with single file and include all files no matter valid or not.\n\t\t sql = self.sql + \"\"\" FROM {owner}FILE_LUMIS FL \n\t\t\t\t JOIN {owner}FILES F ON F.FILE_ID = FL.FILE_ID \n\t\t\t\t WHERE F.LOGICAL_FILE_NAME = :logical_file_name \n\t\t\t\t \"\"\".format(owner=self.owner)\n\t\telse:\n\t\t sql = self.sql + \"\"\" , F.LOGICAL_FILE_NAME as LOGICAL_FILE_NAME FROM {owner}FILE_LUMIS FL\n\t\t\t\t JOIN {owner}FILES F ON F.FILE_ID = FL.FILE_ID \n WHERE F.LOGICAL_FILE_NAME = :logical_file_name \n \"\"\".format(owner=self.owner)\n else:\n sql = self.sql + \"\"\" , F.LOGICAL_FILE_NAME as LOGICAL_FILE_NAME FROM {owner}FILE_LUMIS FL \n\t\t\t\t JOIN {owner}FILES F ON F.FILE_ID = FL.FILE_ID\n\t\t\t\t JOIN {owner}DATASETS D ON D.DATASET_ID = F.DATASET_ID\n\t\t\t\t JOIN {owner}DATASET_ACCESS_TYPES DT ON DT.DATASET_ACCESS_TYPE_ID = D.DATASET_ACCESS_TYPE_ID\t\t\n\t\t\t\t WHERE F.IS_FILE_VALID = 1 AND F.LOGICAL_FILE_NAME = :logical_file_name \n\t\t\t\t AND DT.DATASET_ACCESS_TYPE in ('VALID', 'PRODUCTION') \n\t\t\t\t \"\"\".format(owner=self.owner)\n elif logical_file_name and isinstance(logical_file_name, list):\n\t sql = self.sql + \"\"\" , F.LOGICAL_FILE_NAME as LOGICAL_FILE_NAME FROM {owner}FILE_LUMIS FL JOIN {owner}FILES F ON F.FILE_ID = FL.FILE_ID \"\"\".format(owner=self.owner)\t\n lfn_generator, binds = create_token_generator(logical_file_name)\n if int(validFileOnly) == 0:\n wheresql = \"WHERE F.LOGICAL_FILE_NAME in (SELECT TOKEN FROM TOKEN_GENERATOR)\"\n else:\n\t\tsql = sql + \"\"\" JOIN {owner}DATASETS D ON D.DATASET_ID = F.DATASET_ID \n\t\t\t\tJOIN {owner}DATASET_ACCESS_TYPES DT ON DT.DATASET_ACCESS_TYPE_ID = D.DATASET_ACCESS_TYPE_ID\n\t\t \"\"\".format(owner=self.owner)\t\t\t\n wheresql = \"\"\" WHERE F.IS_FILE_VALID = 1 AND F.LOGICAL_FILE_NAME in (SELECT TOKEN FROM TOKEN_GENERATOR) \n\t\t\t AND DT.DATASET_ACCESS_TYPE in ('VALID', 'PRODUCTION')\n\t\t\t \"\"\"\n sql = \"{lfn_generator} {sql} {wheresql}\".format(lfn_generator=lfn_generator, sql=sql, wheresql=wheresql)\n elif block_name:\n binds = {'block_name': block_name}\n if int(validFileOnly) == 0:\n sql = self.sql + \"\"\" , F.LOGICAL_FILE_NAME as LOGICAL_FILE_NAME FROM {owner}FILE_LUMIS FL JOIN {owner}FILES F ON F.FILE_ID = FL.FILE_ID \n\t\t\t\t JOIN {owner}BLOCKS B ON B.BLOCK_ID = F.BLOCK_ID \n\t\t\t\t WHERE B.BLOCK_NAME = :block_name\"\"\".format(owner=self.owner)\n else:\n sql = self.sql + \"\"\" , F.LOGICAL_FILE_NAME as LOGICAL_FILE_NAME FROM {owner}FILE_LUMIS FL JOIN {owner}FILES F ON F.FILE_ID = FL.FILE_ID \n\t\t\t\t JOIN {owner}DATASETS D ON D.DATASET_ID = F.DATASET_ID \n\t\t\t\t JOIN {owner}DATASET_ACCESS_TYPES DT ON DT.DATASET_ACCESS_TYPE_ID = D.DATASET_ACCESS_TYPE_ID \n\t\t\t\t JOIN {owner}BLOCKS B ON B.BLOCK_ID = F.BLOCK_ID\n\t\t\t\t WHERE F.IS_FILE_VALID = 1 AND B.BLOCK_NAME = :block_name \n\t\t\t\t AND DT.DATASET_ACCESS_TYPE in ('VALID', 'PRODUCTION')\t\n\t\t\t\t\"\"\".format(owner=self.owner)\n else:\n dbsExceptionHandler('dbsException-invalid-input2', \"FileLumi/List: Either logocal_file_name or block_name must be provided.\", self.logger.exception, \"FileLumi/List: Either logocal_file_name or block_name must be provided.\")\n #\n if run_num != -1:\n run_list = []\n wheresql_run_list=''\n wheresql_run_range=''\n for r in parseRunRange(run_num):\n if isinstance(r, basestring) or isinstance(r, int) or isinstance(r, long) or isinstance(r, str):\n run_list.append(str(r))\n if isinstance(r, run_tuple):\n if r[0] == r[1]:\n dbsExceptionHandler('dbsException-invalid-input2', \"DBS run range must be apart at least by 1.\",\n\t\t\tself.logger.exception, \"DBS run range must be apart at least by 1.\")\n wheresql_run_range = \" FL.RUN_NUM between :minrun and :maxrun \"\n binds.update({\"minrun\":r[0]})\n binds.update({\"maxrun\":r[1]})\n #\n if run_list:\n\t\tif len(run_list) == 1:\n\t\t wheresql_run_list = \" fl.RUN_NUM = :single_run \"\n\t\t binds.update({\"single_run\": long(run_list[0])})\n\n\t\telse:\n\t\t wheresql_run_list = \" fl.RUN_NUM in (SELECT TOKEN FROM TOKEN_GENERATOR) \"\n run_generator, run_binds = create_token_generator(run_list)\n sql = \"{run_generator}\".format(run_generator=run_generator) + sql\n binds.update(run_binds)\n\n if wheresql_run_range and wheresql_run_list:\n sql += \" and (\" + wheresql_run_range + \" or \" + wheresql_run_list + \" )\"\n elif wheresql_run_range and not wheresql_run_list:\n sql += \" and \" + wheresql_run_range\n elif not wheresql_run_range and wheresql_run_list:\n sql += \" and \" + wheresql_run_list\n self.logger.debug(sql) \n\tself.logger.debug(binds)\n\tif run_generator and lfn_generator:\n\t\tdbsExceptionHandler('dbsException-invalid-input2', \"listFileLumiArray support single list of lfn or run_num. \",\n\t\t\tself.logger.exception, \"listFileLumiArray support single list of lfn or run_num. \")\n cursors = self.dbi.processData(sql, binds, conn, transaction=False, returnCursor=True)\n result=[]\n\tfile_run_lumi={}\n event_ct=False\n for i in cursors:\n result.extend(self.formatCursor(i, size=100))\n #for migration, we need flat format to load the data into another DB.\n #self.logger.error(result) \n if migration:\n #YG 09/2015. \n\t for item in result:\n\t\tyield item\n\telse:\n if result and result[0]['event_count']: \n event_ct = True\n\t for i in result:\n\t\tr = i['run_num']\n\t\tf = i['logical_file_name']\n if event_ct:\n file_run_lumi.setdefault((f, r), []).append([i['lumi_section_num'], i['event_count']])\n else:\n file_run_lumi.setdefault((f, r), []).append(i['lumi_section_num'])\n\t for k, v in file_run_lumi.iteritems():\n if event_ct:\n lumi=[]\n event=[]\n for le in v:\n lumi.append(le[0])\n event.append(le[1])\n yield {'logical_file_name':k[0], 'run_num':k[1], 'lumi_section_num':lumi, 'event_count':event}\n else:\n yield {'logical_file_name':k[0], 'run_num':k[1], 'lumi_section_num':v}\n del file_run_lumi\n del result", "response": "Executes the SQL SELECT query."}
{"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nlists all files in a child block.", "response": "def execute(self, conn, child_block_name='', child_lfn_list=[], transaction=False):\n sql = ''\n binds = {}\n child_ds_name = ''\n child_where = ''\n if child_block_name:\n child_ds_name = child_block_name.split('#')[0]\n parent_where = \" where d.dataset = :child_ds_name ))\"\n binds ={\"child_ds_name\": child_ds_name}\n else:\n dbsExceptionHandler('dbsException-invalid-input', \"Missing child block_name for listFileParentsByLumi. \")\n #\n if not child_lfn_list:\n # most use cases \n child_where = \" where b.block_name = :child_block_name )\"\n binds.update({\"child_block_name\": child_block_name})\n sql = \"\"\"\n with\n parents as\n ( \n \"\"\" +\\\n self.parent_sql +\\\n parent_where +\\\n \"\"\"), \n \n \"\"\"+\\\n \"\"\"\n children as\n (\n \"\"\" +\\\n self.child_sql +\\\n child_where +\\\n \"\"\")\n select distinct cid, pid from children c\n inner join parents p on c.R = p.R and c.L = p.L \n \"\"\" \n else:\n # not commom \n child_where = \"\"\" where b.block_name = :child_block_name \n and f.logical_file_name in (SELECT TOKEN FROM TOKEN_GENERATOR) ))\n \"\"\"\n lfn_generator, bind = create_token_generator(child_lfn_list)\n binds.update(bind)\n sql = lfn_generator +\\\n \"\"\"\n with\n parents as\n ( \n \"\"\" +\\\n self.parent_sql +\\\n parent_where +\\\n \"\"\"), \n \n \"\"\"+\\\n \"\"\"\n children as\n (\n \"\"\" +\\\n self.child_sql +\\\n child_where +\\\n \"\"\")\n select distinct cid, pid from children c\n inner join parents p on c.R = p.R and c.L = p.L \n \"\"\"\n print(sql)\n\n\n r = self.dbi.processData(sql, binds, conn, transaction=transaction)\n #print(self.format(r))\n return self.format(r)\n \"\"\"\n cursors = self.dbi.processData(sql, binds, conn, transaction=transaction, returnCursor=True)\n for i in cursors:\n d = self.formatCursor(i, size=100)\n if isinstance(d, list) or isinstance(d, GeneratorType):\n for elem in d:\n yield elem\n elif d: \n yield d\n \"\"\""}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef execute(self, conn, run_num=-1, logical_file_name=\"\", block_name=\"\", dataset=\"\", trans=False):\n sql = self.sql\n binds = {}\n\tif logical_file_name and \"%\" not in logical_file_name:\n\t sql += \"\"\" inner join %sFILES FILES on FILES.FILE_ID = FL.FILE_ID\n\t\t WHERE FILES.LOGICAL_FILE_NAME = :logical_file_name\"\"\"%(self.owner)\n\t binds[\"logical_file_name\"] = logical_file_name\n\telif block_name and \"%\" not in block_name:\n sql += \"\"\" inner join %sFILES FILES on FILES.FILE_ID = FL.FILE_ID\n\t\t inner join %sBLOCKS BLOCKS on BLOCKS.BLOCK_ID = FILES.BLOCK_ID\n\t\t WHERE BLOCKS.BLOCK_NAME = :block_name \"\"\"%(self.owner, self.owner)\n binds[\"block_name\"] = block_name\n\telif dataset and \"%\" not in dataset:\n\t sql += \"\"\" inner join %sFILES FILES on FILES.FILE_ID = FL.FILE_ID\n\t inner join %sDATASETS DATASETS on DATASETS.DATASET_ID = FILES.DATASET_ID\n\t WHERE DATASETS.DATASET = :dataset \"\"\"%(self.owner, self.owner)\n\t binds[\"dataset\"] = dataset\n\telse:\n\t pass\n \n\tif run_num != -1:\n andorwhere = (\"WHERE\", \"AND\")[\"WHERE\" in sql]\n run_list = []\n wheresql_run_list = ''\n wheresql_run_range = ''\n #\n for r in parseRunRange(run_num):\n if isinstance(r, basestring) or isinstance(r, int) or isinstance(r, long):\n run_list.append(str(r))\n if isinstance(r, run_tuple):\n if r[0] == r[1]:\n dbsExceptionHandler('dbsException-invalid-input', \"DBS run_num range must be apart at least by 1.\", self.logger.exception)\n wheresql_run_range = \" FL.RUN_NUM between :minrun and :maxrun \"\n binds.update({\"minrun\":r[0]})\n binds.update({\"maxrun\":r[1]})\n # \n if run_list:\n wheresql_run_list = \" fl.RUN_NUM in (SELECT TOKEN FROM TOKEN_GENERATOR) \"\n run_generator, run_binds = create_token_generator(run_list)\n sql = \"{run_generator}\".format(run_generator=run_generator) + sql\n binds.update(run_binds)\n\n if wheresql_run_range and wheresql_run_list:\n sql += \" %s (\" %andorwhere + wheresql_run_range + \" or \" + wheresql_run_list + \" )\"\n elif wheresql_run_range and not wheresql_run_list:\n sql += \" %s \" %andorwhere + wheresql_run_range\n elif not wheresql_run_range and wheresql_run_list:\n sql += \" %s \" %andorwhere + wheresql_run_list\n #self.logger.debug(sql)\n\tcursors = self.dbi.processData(sql, binds, conn, transaction=trans, returnCursor=True)\n result=[]\n for c in cursors:\n result.extend(self.formatCursor(c, size=100))\n return result", "response": "This method returns a list of all primary datasets that match the given criteria."}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nreturn a list of dictionaries that represents one device.", "response": "def find_devices():\n \"\"\"Return a list of dictionaries. Each dictionary represents one device.\n\n The dictionary contains the following keys: port, unique_id and in_use.\n `port` can be used with :func:`open`. `serial_number` is the serial number\n of the device (and can also be used with :func:`open`) and `in_use`\n indicates whether the device was opened before and can currently not be\n opened.\n\n .. note::\n\n There is no guarantee, that the returned information is still valid\n when you open the device. Esp. if you open a device by the port, the\n unique_id may change because you've just opened another device. Eg. it\n may be disconnected from the machine after you call :func:`find_devices`\n but before you call :func:`open`.\n\n To open a device by its serial number, you should use the :func:`open`\n with the `serial_number` parameter.\n \"\"\"\n\n # first fetch the number of attached devices, so we can create a buffer\n # with the exact amount of entries. api expects array of u16\n num_devices = api.py_aa_find_devices(0, array.array('H'))\n _raise_error_if_negative(num_devices)\n\n # return an empty list if no device is connected\n if num_devices == 0:\n return list()\n\n ports = array.array('H', (0,) * num_devices)\n unique_ids = array.array('I', (0,) * num_devices)\n num_devices = api.py_aa_find_devices_ext(len(ports), len(unique_ids),\n ports, unique_ids)\n _raise_error_if_negative(num_devices)\n if num_devices == 0:\n return list()\n\n del ports[num_devices:]\n del unique_ids[num_devices:]\n\n devices = list()\n for port, uid in zip(ports, unique_ids):\n in_use = bool(port & PORT_NOT_FREE)\n dev = dict(\n port=port & ~PORT_NOT_FREE,\n serial_number=_unique_id_str(uid),\n in_use=in_use)\n devices.append(dev)\n\n return devices"}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nopen an aardvark device and return an Aardvark object.", "response": "def open(port=None, serial_number=None):\n \"\"\"Open an aardvark device and return an :class:`Aardvark` object. If the\n device cannot be opened an :class:`IOError` is raised.\n\n The `port` can be retrieved by :func:`find_devices`. Usually, the first\n device is 0, the second 1, etc.\n\n If you are using only one device, you can therefore omit the parameter\n in which case 0 is used.\n\n Another method to open a device is to use the serial number. You can either\n find the number on the device itself or in the in the corresponding USB\n property. The serial number is a string which looks like `NNNN-MMMMMMM`.\n\n Raises an :class:`IOError` if the port (or serial number) does not exist,\n is already connected or an incompatible device is found.\n\n .. note::\n\n There is a small chance that this function raises an :class:`IOError`\n although the correct device is available and not opened. The\n open-by-serial-number method works by scanning the devices. But as\n explained in :func:`find_devices`, the returned information may be\n outdated. Therefore, :func:`open` checks the serial number once the\n device is opened and if it is not the expected one, raises\n :class:`IOError`. No retry mechanism is implemented.\n\n As long as nobody comes along with a better idea, this failure case is\n up to the user.\n \"\"\"\n if port is None and serial_number is None:\n dev = Aardvark()\n elif serial_number is not None:\n for d in find_devices():\n if d['serial_number'] == serial_number:\n break\n else:\n _raise_error_if_negative(ERR_UNABLE_TO_OPEN)\n\n dev = Aardvark(d['port'])\n\n # make sure we opened the correct device\n if dev.unique_id_str() != serial_number:\n dev.close()\n _raise_error_if_negative(ERR_UNABLE_TO_OPEN)\n else:\n dev = Aardvark(port)\n\n return dev"}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nset this to True to enable the hardware I2C interface.", "response": "def enable_i2c(self):\n \"\"\"Set this to `True` to enable the hardware I2C interface. If set to\n `False` the hardware interface will be disabled and its pins (SDA and\n SCL) can be used as GPIOs.\n \"\"\"\n config = self._interface_configuration(CONFIG_QUERY)\n return config == CONFIG_GPIO_I2C or config == CONFIG_SPI_I2C"}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef enable_spi(self):\n config = self._interface_configuration(CONFIG_QUERY)\n return config == CONFIG_SPI_GPIO or config == CONFIG_SPI_I2C", "response": "Set this to True to enable the hardware SPI interface."}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef i2c_pullups(self):\n ret = api.py_aa_i2c_pullup(self.handle, I2C_PULLUP_QUERY)\n _raise_error_if_negative(ret)\n return ret", "response": "Enable or disable the I2C pullup resistors."}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef target_power(self):\n ret = api.py_aa_target_power(self.handle, TARGET_POWER_QUERY)\n _raise_error_if_negative(ret)\n return ret", "response": "Sets this to True will activate the power pins 4 and 6. If set to False will deactivate the power pins 4 and 6."}
{"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef i2c_bus_timeout(self):\n ret = api.py_aa_i2c_bus_timeout(self.handle, 0)\n _raise_error_if_negative(ret)\n return ret", "response": "I2C bus lock timeout in ms."}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef i2c_master_write(self, i2c_address, data, flags=I2C_NO_FLAGS):\n\n data = array.array('B', data)\n status, _ = api.py_aa_i2c_write_ext(self.handle, i2c_address, flags,\n len(data), data)\n _raise_i2c_status_code_error_if_failure(status)", "response": "Write data to the master i2c device."}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nmake an I2C read access.", "response": "def i2c_master_read(self, addr, length, flags=I2C_NO_FLAGS):\n \"\"\"Make an I2C read access.\n\n The given I2C device is addressed and clock cycles for `length` bytes\n are generated. A short read will occur if the device generates an early\n NAK.\n\n The transaction is finished with an I2C stop condition unless the\n I2C_NO_STOP flag is set.\n \"\"\"\n\n data = array.array('B', (0,) * length)\n status, rx_len = api.py_aa_i2c_read_ext(self.handle, addr, flags,\n length, data)\n _raise_i2c_status_code_error_if_failure(status)\n del data[rx_len:]\n return bytes(data)"}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef i2c_master_write_read(self, i2c_address, data, length):\n\n self.i2c_master_write(i2c_address, data, I2C_NO_STOP)\n return self.i2c_master_read(i2c_address, length)", "response": "This is a convenience method for making an I2C write and read access. This method is used to access the contents of the memory of the current I2C device."}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef poll(self, timeout=None):\n if timeout is None:\n timeout = -1\n\n ret = api.py_aa_async_poll(self.handle, timeout)\n _raise_error_if_negative(ret)\n\n events = list()\n for event in (POLL_I2C_READ, POLL_I2C_WRITE, POLL_SPI,\n POLL_I2C_MONITOR):\n if ret & event:\n events.append(event)\n return events", "response": "Wait for an event to occur."}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef enable_i2c_slave(self, slave_address):\n ret = api.py_aa_i2c_slave_enable(self.handle, slave_address,\n self.BUFFER_SIZE, self.BUFFER_SIZE)\n _raise_error_if_negative(ret)", "response": "Enable I2C slave mode."}
{"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nreads the bytes from an I2C slave reception.", "response": "def i2c_slave_read(self):\n \"\"\"Read the bytes from an I2C slave reception.\n\n The bytes are returned as a string object.\n \"\"\"\n data = array.array('B', (0,) * self.BUFFER_SIZE)\n status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,\n self.BUFFER_SIZE, data)\n _raise_i2c_status_code_error_if_failure(status)\n\n # In case of general call, actually return the general call address\n if addr == 0x80:\n addr = 0x00\n del data[rx_len:]\n return (addr, bytes(data))"}
{"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\nreturn the number of bytes transmitted by the slave.", "response": "def i2c_slave_last_transmit_size(self):\n \"\"\"Returns the number of bytes transmitted by the slave.\"\"\"\n ret = api.py_aa_i2c_slave_write_stats(self.handle)\n _raise_error_if_negative(ret)\n return ret"}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef i2c_monitor_read(self):\n data = array.array('H', (0,) * self.BUFFER_SIZE)\n ret = api.py_aa_i2c_monitor_read(self.handle, self.BUFFER_SIZE,\n data)\n _raise_error_if_negative(ret)\n del data[ret:]\n return data.tolist()", "response": "Read any data fetched by the monitor."}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef spi_bitrate(self):\n ret = api.py_aa_spi_bitrate(self.handle, 0)\n _raise_error_if_negative(ret)\n return ret", "response": "SPI bitrate in kHz."}
{"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nconfiguring the SPI interface.", "response": "def spi_configure(self, polarity, phase, bitorder):\n \"\"\"Configure the SPI interface.\"\"\"\n ret = api.py_aa_spi_configure(self.handle, polarity, phase, bitorder)\n _raise_error_if_negative(ret)"}
{"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nconfigures the SPI interface by the well known SPI modes.", "response": "def spi_configure_mode(self, spi_mode):\n \"\"\"Configure the SPI interface by the well known SPI modes.\"\"\"\n if spi_mode == SPI_MODE_0:\n self.spi_configure(SPI_POL_RISING_FALLING,\n SPI_PHASE_SAMPLE_SETUP, SPI_BITORDER_MSB)\n elif spi_mode == SPI_MODE_3:\n self.spi_configure(SPI_POL_FALLING_RISING,\n SPI_PHASE_SETUP_SAMPLE, SPI_BITORDER_MSB)\n else:\n raise RuntimeError('SPI Mode not supported')"}
{"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nwrites a stream of bytes to a SPI device.", "response": "def spi_write(self, data):\n \"\"\"Write a stream of bytes to a SPI device.\"\"\"\n data_out = array.array('B', data)\n data_in = array.array('B', (0,) * len(data_out))\n ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,\n len(data_in), data_in)\n _raise_error_if_negative(ret)\n return bytes(data_in)"}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\nchange the ouput polarity on the SS line.", "response": "def spi_ss_polarity(self, polarity):\n \"\"\"Change the ouput polarity on the SS line.\n\n Please note, that this only affects the master functions.\n \"\"\"\n ret = api.py_aa_spi_master_ss_polarity(self.handle, polarity)\n _raise_error_if_negative(ret)"}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef flatten_nested_hash(hash_table):\n def flatten(hash_table, brackets=True):\n f = {}\n for key, value in hash_table.items():\n _key = '[' + str(key) + ']' if brackets else str(key)\n if isinstance(value, dict):\n for k, v in flatten(value).items():\n f[_key + k] = v\n elif isinstance(value, list):\n temp_hash = {}\n for i, v in enumerate(value):\n temp_hash[str(i)] = v\n for k, v in flatten(temp_hash).items():\n f[_key + k] = v\n else:\n f[_key] = value\n return f\n return flatten(hash_table, False)", "response": "Flatten nested dictionary for GET POST DELETE API request\n "}
{"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef sailthru_http_request(url, data, method, file_data=None, headers=None, request_timeout=10):\n data = flatten_nested_hash(data)\n method = method.upper()\n params, data = (None, data) if method == 'POST' else (data, None)\n sailthru_headers = {'User-Agent': 'Sailthru API Python Client %s; Python Version: %s' % ('2.3.5', platform.python_version())}\n if headers and isinstance(headers, dict):\n for key, value in sailthru_headers.items():\n headers[key] = value\n else:\n headers = sailthru_headers\n try:\n response = requests.request(method, url, params=params, data=data, files=file_data, headers=headers, timeout=request_timeout)\n return SailthruResponse(response)\n except requests.HTTPError as e:\n raise SailthruClientError(str(e))\n except requests.RequestException as e:\n raise SailthruClientError(str(e))", "response": "Perform an HTTP request to the Sailthru API."}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _schema_from_verb(verb, partial=False):\n from .verbs import Verbs\n return getattr(Verbs, verb)(partial=partial)", "response": "Return an instance of schema for given verb."}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning resumption token serializer.", "response": "def serialize(pagination, **kwargs):\n \"\"\"Return resumption token serializer.\"\"\"\n if not pagination.has_next:\n return\n\n token_builder = URLSafeTimedSerializer(\n current_app.config['SECRET_KEY'],\n salt=kwargs['verb'],\n )\n schema = _schema_from_verb(kwargs['verb'], partial=False)\n data = dict(seed=random.random(), page=pagination.next_num,\n kwargs=schema.dump(kwargs).data)\n scroll_id = getattr(pagination, '_scroll_id', None)\n if scroll_id:\n data['scroll_id'] = scroll_id\n\n return token_builder.dumps(data)"}
{"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef make_request_validator(request):\n verb = request.values.get('verb', '', type=str)\n resumption_token = request.values.get('resumptionToken', None)\n\n schema = Verbs if resumption_token is None else ResumptionVerbs\n return getattr(schema, verb, OAISchema)(partial=False)", "response": "Validate arguments in incomming request."}
{"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nparsing an ISO8601 - formatted datetime and return a datetime object.", "response": "def from_iso_permissive(datestring, use_dateutil=True):\n \"\"\"Parse an ISO8601-formatted datetime and return a datetime object.\n\n Inspired by the marshmallow.utils.from_iso function, but also accepts\n datestrings that don't contain the time.\n \"\"\"\n dateutil_available = False\n try:\n from dateutil import parser\n dateutil_available = True\n except ImportError:\n dateutil_available = False\n import datetime\n\n # Use dateutil's parser if possible\n if dateutil_available and use_dateutil:\n return parser.parse(datestring)\n else:\n # Strip off timezone info.\n return datetime.datetime.strptime(datestring[:19],\n '%Y-%m-%dT%H:%M:%S')"}
{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef validate(self, data):\n if 'verb' in data and data['verb'] != self.__class__.__name__:\n raise ValidationError(\n # FIXME encode data\n 'This is not a valid OAI-PMH verb:{0}'.format(data['verb']),\n field_names=['verb'],\n )\n\n if 'from_' in data and 'until' in data and \\\n data['from_'] > data['until']:\n raise ValidationError('Date \"from\" must be before \"until\".')\n\n extra = set(request.values.keys()) - set([\n f.load_from or f.name for f in self.fields.values()\n ])\n if extra:\n raise ValidationError('You have passed too many arguments.')", "response": "Check range between dates under keys from_ and until."}
{"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef sets(self):\n if self.cache:\n return self.cache.get(\n self.app.config['OAISERVER_CACHE_KEY'])", "response": "Get list of sets."}
{"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef sets(self, values):\n # if cache server is configured, save sets list\n if self.cache:\n self.cache.set(self.app.config['OAISERVER_CACHE_KEY'], values)", "response": "Set list of sets."}
{"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nregistering signals to update the record record", "response": "def register_signals(self):\n \"\"\"Register signals.\"\"\"\n from .receivers import OAIServerUpdater\n # Register Record signals to update OAI informations\n self.update_function = OAIServerUpdater()\n records_signals.before_record_insert.connect(self.update_function,\n weak=False)\n records_signals.before_record_update.connect(self.update_function,\n weak=False)\n if self.app.config['OAISERVER_REGISTER_SET_SIGNALS']:\n self.register_signals_oaiset()"}
{"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nregisters signals to update records.", "response": "def register_signals_oaiset(self):\n \"\"\"Register OAISet signals to update records.\"\"\"\n from .models import OAISet\n from .receivers import after_insert_oai_set, \\\n after_update_oai_set, after_delete_oai_set\n listen(OAISet, 'after_insert', after_insert_oai_set)\n listen(OAISet, 'after_update', after_update_oai_set)\n listen(OAISet, 'after_delete', after_delete_oai_set)"}
{"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef init_config(self, app):\n app.config.setdefault(\n 'OAISERVER_BASE_TEMPLATE',\n app.config.get('BASE_TEMPLATE',\n 'invenio_oaiserver/base.html'))\n\n app.config.setdefault(\n 'OAISERVER_REPOSITORY_NAME',\n app.config.get('THEME_SITENAME',\n 'Invenio-OAIServer'))\n\n # warn user if ID_PREFIX is not set\n if 'OAISERVER_ID_PREFIX' not in app.config:\n import socket\n import warnings\n\n app.config.setdefault(\n 'OAISERVER_ID_PREFIX',\n 'oai:{0}:recid/'.format(socket.gethostname()))\n warnings.warn(\n \"\"\"Please specify the OAISERVER_ID_PREFIX configuration.\"\"\"\n \"\"\"default value is: {0}\"\"\".format(\n app.config.get('OAISERVER_ID_PREFIX')))\n\n for k in dir(config):\n if k.startswith('OAISERVER_'):\n app.config.setdefault(k, getattr(config, k))", "response": "Initialize configuration.\n\n :param app: An instance of :class:`flask.Flask`."}
{"SOURCE": "codesearchnet", "instruction": "How would you implement a function in Python 3 that\nextracts the values of a set of parameters recursing into nested dictionaries.", "response": "def extract_params(params):\n \"\"\"\n Extracts the values of a set of parameters, recursing into nested dictionaries.\n \"\"\"\n values = []\n if isinstance(params, dict):\n for key, value in params.items():\n values.extend(extract_params(value))\n elif isinstance(params, list):\n for value in params:\n values.extend(extract_params(value))\n else:\n values.append(params)\n return values"}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nreturning the unhashed signature string for an API call.", "response": "def get_signature_string(params, secret):\n \"\"\"\n Returns the unhashed signature string (secret + sorted list of param values) for an API call.\n @param params: dictionary values to generate signature string\n @param secret: secret string\n \"\"\"\n str_list = [str(item) for item in extract_params(params)]\n str_list.sort()\n return (secret + ''.join(str_list)).encode('utf-8')"}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef send(self, template, email, _vars=None, options=None, schedule_time=None, limit=None):\n _vars = _vars or {}\n options = options or {}\n data = {'template': template,\n 'email': email,\n 'vars': _vars,\n 'options': options.copy()}\n if limit:\n data['limit'] = limit.copy()\n if schedule_time is not None:\n data['schedule_time'] = schedule_time\n return self.api_post('send', data)", "response": "Remotely send an email template to a single email address."}
{"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef multi_send(self, template, emails, _vars=None, evars=None, schedule_time=None, options=None):\n _vars = _vars or {}\n evars = evars or {}\n options = options or {}\n data = {'template': template,\n 'email': ','.join(emails) if isinstance(emails, list) else emails,\n 'vars': _vars.copy(),\n 'evars': evars.copy(),\n 'options': options.copy()}\n if schedule_time is not None:\n data['schedule_time'] = schedule_time\n return self.api_post('send', data)", "response": "Remotely send an email template to multiple email addresses."}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef set_email(self, email, _vars=None, lists=None, templates=None, verified=0, optout=None, send=None, send_vars=None):\n _vars = _vars or {}\n lists = lists or []\n templates = templates or []\n send_vars = send_vars or []\n data = {'email': email,\n 'vars': _vars.copy(),\n 'lists': lists,\n 'templates': templates,\n 'verified': int(verified)}\n if optout is not None:\n data['optout'] = optout\n if send is not None:\n data['send'] = send\n if send_vars:\n data['send_vars'] = send_vars\n return self.api_post('email', data)", "response": "Set the email address of the user."}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting user by a given idvalue", "response": "def get_user(self, idvalue, options=None):\n \"\"\"\n get user by a given id\n http://getstarted.sailthru.com/api/user\n \"\"\"\n options = options or {}\n data = options.copy()\n data['id'] = idvalue\n return self.api_get('user', data)"}
{"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef save_user(self, idvalue, options=None):\n options = options or {}\n data = options.copy()\n data['id'] = idvalue\n return self.api_post('user', data)", "response": "save user by a given idvalue"}
{"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\nschedules a blast of a list of mails.", "response": "def schedule_blast(self, name, list, schedule_time, from_name, from_email, subject, content_html, content_text, options=None):\n \"\"\"\n Schedule a mass mail blast\n http://docs.sailthru.com/api/blast\n @param name: name to give to this new blast\n @param list: mailing list name to send to\n @param schedule_time: when the blast should send. Dates in the past will be scheduled for immediate delivery. Any English textual datetime format known to PHP's strtotime function is acceptable, such as 2009-03-18 23:57:22 UTC, now (immediate delivery), +3 hours (3 hours from now), or February 14, 9:30 EST. Be sure to specify a timezone if you use an exact time.\n @param from_name: name appearing in the \"From\" of the email\n @param from_email: email address to use as the \"from\" - choose from any of your verified emails\n @param subject: subject line of the email\n @param content_html: HTML format version of the email\n @param content_text: Text format version of the email\n @param options: optional parameters dictionary\n blast_id\n copy_blast\n copy_template\n replyto\n report_email\n is_link_tracking\n is_google_analytics\n is_public\n suppress_list\n test_vars\n email_hour_range\n abtest\n test_percent\n data_feed_url\n \"\"\"\n options = options or {}\n data = options.copy()\n data['name'] = name\n data['list'] = list\n data['schedule_time'] = schedule_time\n data['from_name'] = from_name\n data['from_email'] = from_email\n data['subject'] = subject\n data['content_html'] = content_html\n data['content_text'] = content_text\n return self.api_post('blast', data)"}
{"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef schedule_blast_from_template(self, template, list_name, schedule_time, options=None):\n options = options or {}\n data = options.copy()\n data['copy_template'] = template\n data['list'] = list_name\n data['schedule_time'] = schedule_time\n return self.api_post('blast', data)", "response": "Schedule a mass mail blast from a template"}
{"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nscheduling a blast from previous blast", "response": "def schedule_blast_from_blast(self, blast_id, schedule_time, options=None):\n \"\"\"\n Schedule a mass mail blast from previous blast\n http://docs.sailthru.com/api/blast\n @param blast_id: blast_id to copy from\n @param schedule_time\n @param options: additional optional params\n \"\"\"\n options = options or {}\n data = options.copy()\n data['copy_blast'] = blast_id\n data['schedule_time'] = schedule_time\n return self.api_post('blast', data)"}
{"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\ngets detailed metadata about a list.", "response": "def get_list(self, list_name, options=None):\n \"\"\"\n Get detailed metadata information about a list.\n \"\"\"\n options = options or {}\n data = {'list': list_name}\n data.update(options)\n return self.api_get('list', data)"}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef save_list(self, list_name, emails):\n data = {'list': list_name,\n 'emails': ','.join(emails) if isinstance(emails, list) else emails}\n return self.api_post('list', data)", "response": "Upload a list. The list import job is queued and will happen shortly after the API request.\n http://docs.sailthru.com/api/list\n @param list: list name\n @param emails: List of email values or comma separated string"}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef import_contacts(self, email, password, include_name=False):\n data = {'email': email,\n 'password': password}\n if include_name:\n data['names'] = 1\n return self.api_post('contacts', data)", "response": "Import email contacts from a user s address book on one of the major email websites."}
{"SOURCE": "codesearchnet", "instruction": "Can you create a Python 3 function that\npushes a piece of content to Sailthru.", "response": "def push_content(self, title, url,\n images=None, date=None, expire_date=None,\n description=None, location=None, price=None,\n tags=None,\n author=None, site_name=None,\n spider=None, vars=None):\n\n \"\"\"\n Push a new piece of content to Sailthru.\n\n Expected names for the `images` argument's map are \"full\" and \"thumb\"\n Expected format for `location` should be [longitude,latitude]\n\n @param title: title string for the content\n @param url: URL string for the content\n @param images: map of image names\n @param date: date string\n @param expire_date: date string for when the content expires\n @param description: description for the content\n @param location: location of the content\n @param price: price for the content\n @param tags: list or comma separated string values\n @param author: author for the content\n @param site_name: site name for the content\n @param spider: truthy value to force respidering content\n @param vars: replaceable vars dictionary\n\n \"\"\"\n vars = vars or {}\n data = {'title': title,\n 'url': url}\n if images is not None:\n data['images'] = images\n if date is not None:\n data['date'] = date\n if expire_date is not None:\n data['expire_date'] = date\n if location is not None:\n data['location'] = date\n if price is not None:\n data['price'] = price\n if description is not None:\n data['description'] = description\n if site_name is not None:\n data['site_name'] = images\n if author is not None:\n data['author'] = author\n if spider:\n data['spider'] = 1\n if tags is not None:\n data['tags'] = \",\".join(tags) if isinstance(tags, list) else tags\n if len(vars) > 0:\n data['vars'] = vars.copy()\n return self.api_post('content', data)"}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef save_alert(self, email, type, template, when=None, options=None):\n options = options or {}\n data = options.copy()\n data['email'] = email\n data['type'] = type\n data['template'] = template\n if type in ['weekly', 'daily']:\n data['when'] = when\n return self.api_post('alert', data)", "response": "Save an alert to a user."}
{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef purchase(self, email, items=None, incomplete=None, message_id=None, options=None, extid=None):\n items = items or {}\n options = options or {}\n data = options.copy()\n data['email'] = email\n data['items'] = items\n if incomplete is not None:\n data['incomplete'] = incomplete\n if message_id is not None:\n data['message_id'] = message_id\n if extid is not None:\n data['extid'] = extid\n return self.api_post('purchase', data)", "response": "This method is used to create a new item in a user s purchase total."}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef get_purchase(self, purchase_id, purchase_key='sid'):\n data = {'purchase_id': purchase_id,\n 'purchase_key': purchase_key}\n return self.api_get('purchase', data)", "response": "Retrieve information about a purchase using the system s unique ID or a client s ID."}
{"SOURCE": "codesearchnet", "instruction": "How would you code a function in Python 3 to\nretrieve information about your subscriber counts on a particular list on a particular day.", "response": "def stats_list(self, list=None, date=None, headers=None):\n \"\"\"\n Retrieve information about your subscriber counts on a particular list, on a particular day.\n http://docs.sailthru.com/api/stat\n \"\"\"\n data = {'stat': 'list'}\n if list is not None:\n data['list'] = list\n if date is not None:\n data['date'] = date\n return self._stats(data, headers)"}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef stats_blast(self, blast_id=None, start_date=None, end_date=None, options=None):\n options = options or {}\n data = options.copy()\n if blast_id is not None:\n data['blast_id'] = blast_id\n if start_date is not None:\n data['start_date'] = start_date\n if end_date is not None:\n data['end_date'] = end_date\n data['stat'] = 'blast'\n return self._stats(data)", "response": "Retrieve information about a particular blast or aggregated information from all of the blasts over a specified date range."}
{"SOURCE": "codesearchnet", "instruction": "How would you explain what the following Python 3 function does\ndef stats_send(self, template, start_date, end_date, options=None):\n options = options or {}\n data = options.copy()\n data = {'template': template,\n 'start_date': start_date,\n 'end_date': end_date\n }\n data['stat'] = 'send'\n return self._stats(data)", "response": "Retrieve information about a particular transactional or aggregated information from that template over a specified date range."}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef receive_verify_post(self, post_params):\n if isinstance(post_params, dict):\n required_params = ['action', 'email', 'send_id', 'sig']\n if not self.check_for_valid_postback_actions(required_params, post_params):\n return False\n else:\n return False\n\n if post_params['action'] != 'verify':\n return False\n\n sig = post_params['sig']\n post_params = post_params.copy()\n del post_params['sig']\n\n if sig != get_signature_hash(post_params, self.secret):\n return False\n\n send_response = self.get_send(post_params['send_id'])\n\n try:\n send_body = send_response.get_body()\n send_json = json.loads(send_body)\n if 'email' not in send_body:\n return False\n if send_json['email'] != post_params['email']:\n return False\n except ValueError:\n return False\n\n return True", "response": "Returns True if the incoming request is an authenticated verify post."}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\ncheck if the post_params is valid and return True if the post was valid False otherwise", "response": "def receive_update_post(self, post_params):\n \"\"\"\n Update postbacks\n \"\"\"\n\n if isinstance(post_params, dict):\n required_params = ['action', 'email', 'sig']\n if not self.check_for_valid_postback_actions(required_params, post_params):\n return False\n else:\n return False\n\n if post_params['action'] != 'update':\n return False\n\n signature = post_params['sig']\n post_params = post_params.copy()\n del post_params['sig']\n\n if signature != get_signature_hash(post_params, self.secret):\n return False\n\n return True"}
{"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nchecking if a hard bounce post is received and return the set of related resources.", "response": "def receive_hardbounce_post(self, post_params):\n \"\"\"\n Hard bounce postbacks\n \"\"\"\n if isinstance(post_params, dict):\n required_params = ['action', 'email', 'sig']\n if not self.check_for_valid_postback_actions(required_params, post_params):\n return False\n else:\n return False\n\n if post_params['action'] != 'hardbounce':\n return False\n\n signature = post_params['sig']\n post_params = post_params.copy()\n del post_params['sig']\n\n if signature != get_signature_hash(post_params, self.secret):\n return False\n\n # for sends\n if 'send_id' in post_params:\n send_id = post_params['send_id']\n send_response = self.get_send(send_id)\n if not send_response.is_ok():\n return False\n send_obj = send_response.get_body()\n if not send_obj or 'email' not in send_obj:\n return False\n\n # for blasts\n if 'blast_id' in post_params:\n blast_id = post_params['blast_id']\n blast_response = self.get_blast(blast_id)\n if not blast_response.is_ok():\n return False\n blast_obj = blast_response.get_body()\n if not blast_obj:\n return False\n\n return True"}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\nchecking if post_params contain required keys", "response": "def check_for_valid_postback_actions(self, required_keys, post_params):\n \"\"\"\n checks if post_params contain required keys\n \"\"\"\n for key in required_keys:\n if key not in post_params:\n return False\n return True"}
{"SOURCE": "codesearchnet", "instruction": "Can you generate the documentation for the following Python 3 function\ndef api_get(self, action, data, headers=None):\n return self._api_request(action, data, 'GET', headers)", "response": "Perform an HTTP GET request using the shared - secret auth hash."}
{"SOURCE": "codesearchnet", "instruction": "Create a Python 3 function for\nperforming an HTTP POST request using the shared - secret auth hash.", "response": "def api_post(self, action, data, binary_data_param=None):\n \"\"\"\n Perform an HTTP POST request, using the shared-secret auth hash.\n @param action: API action call\n @param data: dictionary values\n \"\"\"\n binary_data_param = binary_data_param or []\n if binary_data_param:\n return self.api_post_multipart(action, data, binary_data_param)\n else:\n return self._api_request(action, data, 'POST')"}
{"SOURCE": "codesearchnet", "instruction": "Can you generate a brief explanation for the following Python 3 code\ndef api_post_multipart(self, action, data, binary_data_param):\n binary_data = {}\n data = data.copy()\n\n try:\n file_handles = []\n for param in binary_data_param:\n if param in data:\n binary_data[param] = file_handle = open(data[param], 'r')\n file_handles.append(file_handle)\n del data[param]\n json_payload = self._prepare_json_payload(data)\n\n return self._http_request(action, json_payload, \"POST\", binary_data)\n finally:\n for file_handle in file_handles:\n file_handle.close()", "response": "Perform an HTTP Multipart POST request using the shared - secret auth hash."}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _api_request(self, action, data, request_type, headers=None):\n if 'file' in data:\n file_data = {'file': open(data['file'], 'rb')}\n else:\n file_data = None\n\n return self._http_request(action, self._prepare_json_payload(data), request_type, file_data, headers)", "response": "Make a request to Sailthru API with given data and api key format and signature hash"}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 script to\nget rate limit information for last API call", "response": "def get_last_rate_limit_info(self, action, method):\n \"\"\"\n Get rate limit information for last API call\n :param action: API endpoint\n :param method: Http method, GET, POST or DELETE\n :return: dict|None\n \"\"\"\n method = method.upper()\n if (action in self.last_rate_limit_info and method in self.last_rate_limit_info[action]):\n return self.last_rate_limit_info[action][method]\n\n return None"}
{"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nminting record identifiers. :param record_uuid: The record UUID. :param data: The record data. :returns: A :class:`invenio_pidstore.models.PersistentIdentifier` instance.", "response": "def oaiid_minter(record_uuid, data):\n \"\"\"Mint record identifiers.\n\n :param record_uuid: The record UUID.\n :param data: The record data.\n :returns: A :class:`invenio_pidstore.models.PersistentIdentifier` instance.\n \"\"\"\n pid_value = data.get('_oai', {}).get('id')\n if pid_value is None:\n fetcher_name = \\\n current_app.config.get('OAISERVER_CONTROL_NUMBER_FETCHER', 'recid')\n cn_pid = current_pidstore.fetchers[fetcher_name](record_uuid, data)\n pid_value = current_app.config.get('OAISERVER_ID_PREFIX', '') + str(\n cn_pid.pid_value\n )\n provider = OAIIDProvider.create(\n object_type='rec', object_uuid=record_uuid,\n pid_value=str(pid_value)\n )\n data.setdefault('_oai', {})\n data['_oai']['id'] = provider.pid.pid_value\n return provider.pid"}
{"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\nreturns a formatter validation error.", "response": "def validation_error(exception):\n \"\"\"Return formatter validation error.\"\"\"\n messages = getattr(exception, 'messages', None)\n if messages is None:\n messages = getattr(exception, 'data', {'messages': None})['messages']\n\n def extract_errors():\n \"\"\"Extract errors from exception.\"\"\"\n if isinstance(messages, dict):\n for field, message in messages.items():\n if field == 'verb':\n yield 'badVerb', '\\n'.join(message)\n else:\n yield 'badArgument', '\\n'.join(message)\n else:\n for field in exception.field_names:\n if field == 'verb':\n yield 'badVerb', '\\n'.join(messages)\n else:\n yield 'badArgument', '\\n'.join(messages)\n\n if not exception.field_names:\n yield 'badArgument', '\\n'.join(messages)\n\n return (etree.tostring(xml.error(extract_errors())),\n 422,\n {'Content-Type': 'text/xml'})"}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef create(cls, object_type=None, object_uuid=None, **kwargs):\n assert 'pid_value' in kwargs\n\n kwargs.setdefault('status', cls.default_status)\n if object_type and object_uuid:\n kwargs['status'] = PIDStatus.REGISTERED\n\n return super(OAIIDProvider, cls).create(\n object_type=object_type, object_uuid=object_uuid, **kwargs)", "response": "Create a new record identifier."}
{"SOURCE": "codesearchnet", "instruction": "Can you implement a function in Python 3 that\ncreates a percolator mapping for the given index and document type.", "response": "def _create_percolator_mapping(index, doc_type):\n \"\"\"Update mappings with the percolator field.\n\n .. note::\n\n This is only needed from ElasticSearch v5 onwards, because percolators\n are now just a special type of field inside mappings.\n \"\"\"\n if ES_VERSION[0] >= 5:\n current_search_client.indices.put_mapping(\n index=index, doc_type=doc_type,\n body=PERCOLATOR_MAPPING, ignore=[400, 404])"}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function for\ngetting results for a percolate query.", "response": "def _percolate_query(index, doc_type, percolator_doc_type, document):\n \"\"\"Get results for a percolate query.\"\"\"\n if ES_VERSION[0] in (2, 5):\n results = current_search_client.percolate(\n index=index, doc_type=doc_type, allow_no_indices=True,\n ignore_unavailable=True, body={'doc': document}\n )\n return results['matches']\n elif ES_VERSION[0] == 6:\n results = current_search_client.search(\n index=index, doc_type=percolator_doc_type, allow_no_indices=True,\n ignore_unavailable=True, body={\n 'query': {\n 'percolate': {\n 'field': 'query',\n 'document_type': percolator_doc_type,\n 'document': document,\n }\n }\n }\n )\n return results['hits']['hits']"}
{"SOURCE": "codesearchnet", "instruction": "Given the following Python 3 function, write the documentation\ndef _new_percolator(spec, search_pattern):\n if spec and search_pattern:\n query = query_string_parser(search_pattern=search_pattern).to_dict()\n for index in current_search.mappings.keys():\n # Create the percolator doc_type in the existing index for >= ES5\n # TODO: Consider doing this only once in app initialization\n percolator_doc_type = _get_percolator_doc_type(index)\n _create_percolator_mapping(index, percolator_doc_type)\n current_search_client.index(\n index=index, doc_type=percolator_doc_type,\n id='oaiset-{}'.format(spec),\n body={'query': query}\n )", "response": "Create new percolator associated with the new set."}
{"SOURCE": "codesearchnet", "instruction": "Write a Python 3 function that can\ndelete percolator associated with the new oaiset.", "response": "def _delete_percolator(spec, search_pattern):\n \"\"\"Delete percolator associated with the new oaiset.\"\"\"\n if spec:\n for index in current_search.mappings.keys():\n # Create the percolator doc_type in the existing index for >= ES5\n percolator_doc_type = _get_percolator_doc_type(index)\n _create_percolator_mapping(index, percolator_doc_type)\n current_search_client.delete(\n index=index, doc_type=percolator_doc_type,\n id='oaiset-{}'.format(spec), ignore=[404]\n )"}
{"SOURCE": "codesearchnet", "instruction": "Can you tell what is the following Python 3 function doing\ndef update_affected_records(spec=None, search_pattern=None):\n chunk_size = current_app.config['OAISERVER_CELERY_TASK_CHUNK_SIZE']\n record_ids = get_affected_records(spec=spec, search_pattern=search_pattern)\n\n group(\n update_records_sets.s(list(filter(None, chunk)))\n for chunk in zip_longest(*[iter(record_ids)] * chunk_size)\n )()", "response": "Update all affected records by OAISet change."}
{"SOURCE": "codesearchnet", "instruction": "Can you write a function in Python 3 where it\ncreates OAI - PMH envelope for response.", "response": "def envelope(**kwargs):\n \"\"\"Create OAI-PMH envelope for response.\"\"\"\n e_oaipmh = Element(etree.QName(NS_OAIPMH, 'OAI-PMH'), nsmap=NSMAP)\n e_oaipmh.set(etree.QName(NS_XSI, 'schemaLocation'),\n '{0} {1}'.format(NS_OAIPMH, NS_OAIPMH_XSD))\n e_tree = ElementTree(element=e_oaipmh)\n\n if current_app.config['OAISERVER_XSL_URL']:\n e_oaipmh.addprevious(etree.ProcessingInstruction(\n 'xml-stylesheet', 'type=\"text/xsl\" href=\"{0}\"'\n .format(current_app.config['OAISERVER_XSL_URL'])))\n\n e_responseDate = SubElement(\n e_oaipmh, etree.QName(\n NS_OAIPMH, 'responseDate'))\n # date should be first possible moment\n e_responseDate.text = datetime_to_datestamp(datetime.utcnow())\n e_request = SubElement(e_oaipmh, etree.QName(NS_OAIPMH, 'request'))\n for key, value in kwargs.items():\n if key == 'from_' or key == 'until':\n value = datetime_to_datestamp(value)\n elif key == 'resumptionToken':\n value = value['token']\n e_request.set(key, value)\n e_request.text = url_for('invenio_oaiserver.response', _external=True)\n return e_tree, e_oaipmh"}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef verb(**kwargs):\n e_tree, e_oaipmh = envelope(**kwargs)\n e_element = SubElement(e_oaipmh, etree.QName(NS_OAIPMH, kwargs['verb']))\n return e_tree, e_element", "response": "Create OAI - PMH envelope for response with verb."}
{"SOURCE": "codesearchnet", "instruction": "Implement a function in Python 3 to\ncreate OAI - PMH response for verb Identify.", "response": "def identify(**kwargs):\n \"\"\"Create OAI-PMH response for verb Identify.\"\"\"\n cfg = current_app.config\n\n e_tree, e_identify = verb(**kwargs)\n\n e_repositoryName = SubElement(\n e_identify, etree.QName(NS_OAIPMH, 'repositoryName'))\n e_repositoryName.text = cfg['OAISERVER_REPOSITORY_NAME']\n\n e_baseURL = SubElement(e_identify, etree.QName(NS_OAIPMH, 'baseURL'))\n e_baseURL.text = url_for('invenio_oaiserver.response', _external=True)\n\n e_protocolVersion = SubElement(e_identify,\n etree.QName(NS_OAIPMH, 'protocolVersion'))\n e_protocolVersion.text = cfg['OAISERVER_PROTOCOL_VERSION']\n\n for adminEmail in cfg['OAISERVER_ADMIN_EMAILS']:\n e = SubElement(e_identify, etree.QName(NS_OAIPMH, 'adminEmail'))\n e.text = adminEmail\n\n e_earliestDatestamp = SubElement(\n e_identify, etree.QName(\n NS_OAIPMH, 'earliestDatestamp'))\n earliest_date = datetime(MINYEAR, 1, 1)\n earliest_record = OAIServerSearch(\n index=current_app.config['OAISERVER_RECORD_INDEX']).sort({\n \"_created\": {\"order\": \"asc\"}})[0:1].execute()\n if len(earliest_record.hits.hits) > 0:\n created_date_str = earliest_record.hits.hits[0].get(\n \"_source\", {}).get('_created')\n if created_date_str:\n earliest_date = arrow.get(\n created_date_str).to('utc').datetime.replace(tzinfo=None)\n\n e_earliestDatestamp.text = datetime_to_datestamp(earliest_date)\n\n e_deletedRecord = SubElement(e_identify,\n etree.QName(NS_OAIPMH, 'deletedRecord'))\n e_deletedRecord.text = 'no'\n\n e_granularity = SubElement(e_identify,\n etree.QName(NS_OAIPMH, 'granularity'))\n assert cfg['OAISERVER_GRANULARITY'] in DATETIME_FORMATS\n e_granularity.text = cfg['OAISERVER_GRANULARITY']\n\n compressions = cfg['OAISERVER_COMPRESSIONS']\n if compressions != ['identity']:\n for compression in compressions:\n e_compression = SubElement(e_identify,\n etree.QName(NS_OAIPMH, 'compression'))\n e_compression.text = compression\n\n for description in cfg.get('OAISERVER_DESCRIPTIONS', []):\n e_description = SubElement(e_identify,\n etree.QName(NS_OAIPMH, 'description'))\n e_description.append(etree.fromstring(description))\n\n return e_tree"}
{"SOURCE": "codesearchnet", "instruction": "Here you have a function in Python 3, explain what it does\ndef resumption_token(parent, pagination, **kwargs):\n # Do not add resumptionToken if all results fit to the first page.\n if pagination.page == 1 and not pagination.has_next:\n return\n\n token = serialize(pagination, **kwargs)\n e_resumptionToken = SubElement(parent, etree.QName(NS_OAIPMH,\n 'resumptionToken'))\n if pagination.total:\n expiration_date = datetime.utcnow() + timedelta(\n seconds=current_app.config[\n 'OAISERVER_RESUMPTION_TOKEN_EXPIRE_TIME'\n ]\n )\n e_resumptionToken.set('expirationDate', datetime_to_datestamp(\n expiration_date\n ))\n e_resumptionToken.set('cursor', str(\n (pagination.page - 1) * pagination.per_page\n ))\n e_resumptionToken.set('completeListSize', str(pagination.total))\n\n if token:\n e_resumptionToken.text = token", "response": "Attach resumption token element to a parent element."}
{"SOURCE": "codesearchnet", "instruction": "Make a summary of the following Python 3 code\ndef listsets(**kwargs):\n e_tree, e_listsets = verb(**kwargs)\n\n page = kwargs.get('resumptionToken', {}).get('page', 1)\n size = current_app.config['OAISERVER_PAGE_SIZE']\n oai_sets = OAISet.query.paginate(page=page, per_page=size, error_out=False)\n\n for oai_set in oai_sets.items:\n e_set = SubElement(e_listsets, etree.QName(NS_OAIPMH, 'set'))\n e_setSpec = SubElement(e_set, etree.QName(NS_OAIPMH, 'setSpec'))\n e_setSpec.text = oai_set.spec\n e_setName = SubElement(e_set, etree.QName(NS_OAIPMH, 'setName'))\n e_setName.text = sanitize_unicode(oai_set.name)\n if oai_set.description:\n e_setDescription = SubElement(e_set, etree.QName(NS_OAIPMH,\n 'setDescription'))\n e_dc = SubElement(\n e_setDescription, etree.QName(NS_OAIDC, 'dc'),\n nsmap=NSMAP_DESCRIPTION\n )\n e_dc.set(etree.QName(NS_XSI, 'schemaLocation'), NS_OAIDC)\n e_description = SubElement(e_dc, etree.QName(NS_DC, 'description'))\n e_description.text = oai_set.description\n\n resumption_token(e_listsets, oai_sets, **kwargs)\n return e_tree", "response": "Create OAI - PMH response for ListSets verb."}
{"SOURCE": "codesearchnet", "instruction": "Explain what the following Python 3 code does\ndef listmetadataformats(**kwargs):\n cfg = current_app.config\n e_tree, e_listmetadataformats = verb(**kwargs)\n\n if 'identifier' in kwargs:\n # test if record exists\n OAIIDProvider.get(pid_value=kwargs['identifier'])\n\n for prefix, metadata in cfg.get('OAISERVER_METADATA_FORMATS', {}).items():\n e_metadataformat = SubElement(\n e_listmetadataformats, etree.QName(NS_OAIPMH, 'metadataFormat')\n )\n e_metadataprefix = SubElement(\n e_metadataformat, etree.QName(NS_OAIPMH, 'metadataPrefix')\n )\n e_metadataprefix.text = prefix\n e_schema = SubElement(\n e_metadataformat, etree.QName(NS_OAIPMH, 'schema')\n )\n e_schema.text = metadata['schema']\n e_metadataNamespace = SubElement(\n e_metadataformat, etree.QName(NS_OAIPMH, 'metadataNamespace')\n )\n e_metadataNamespace.text = metadata['namespace']\n\n return e_tree", "response": "Create OAI - PMH response for ListMetadataFormats verb."}
{"SOURCE": "codesearchnet", "instruction": "Implement a Python 3 function for\nattaching a header element to a parent element.", "response": "def header(parent, identifier, datestamp, sets=None, deleted=False):\n \"\"\"Attach ``
and
') or stripped.startswith('