query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Function to test add furniture functionality.
def test_add_furniture(self): add_furniture('invoice.csv', 'Elisa Miles', 'LR04', 'Leather Sofa', 25) add_furniture('invoice.csv', 'Edward Data', 'KT78', 'Kitchen Table', 10) add_furniture('invoice.csv', 'Alex Gonzales', 'BR02', 'Queen Mattress', 17) # Generate list of rentals with open('invoice.csv', 'r') as csvfile: rentals = [] for row in csvfile: rentals.append(row) print(rentals) # Assert statements self.assertEqual(rentals[0], ('Elisa Miles,LR04,Leather Sofa,25\n')) self.assertEqual(rentals[1], ('Edward Data,KT78,Kitchen Table,10\n')) self.assertEqual(rentals[2], ('Alex Gonzales,BR02,Queen Mattress,17\n'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_add_data():\n add_furniture(\"invoice_file.csv\", \"Elisa Miles\", \"LR04\", \"Leather Sofa\", 25.00)\n add_furniture(\"invoice_file.csv\", \"Edward Data\", \"KT78\", \"Kitchen Table\", 10.00)\n add_furniture(\"invoice_file.csv\", \"Alex Gonzales\", \"BR02\", \"Queen Mattress\", 17.00)", "def s...
[ "0.80027395", "0.70377046", "0.6918816", "0.68016565", "0.6707079", "0.66921055", "0.66690326", "0.6534658", "0.6481919", "0.6451947", "0.64004886", "0.63858724", "0.63445914", "0.6246608", "0.6238948", "0.62372917", "0.6225444", "0.62142056", "0.6196843", "0.61662406", "0.61...
0.7485811
1
Tests single customer functionality.
def test_single_customer(self): create_invoice = single_customer("Susan Wong", "invoice.csv") create_invoice("test_items.csv") # Generate list of rentals with open('invoice.csv', 'r') as csvfile: rentals = [] for row in csvfile: rentals.append(row) print(rentals) # Assert statements self.assertEqual(rentals[3], ('Susan Wong,AT92,Office Chair,13\n')) self.assertEqual(rentals[4], ('Susan Wong,KE25,Espresso Machine,30\n'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_customer(self):\n # get the id of a customer\n test_customer = self._create_customers(\"Alex\")\n logging.debug(test_customer)\n test_customer.create() \n resp = self.app.get(\n \"/customers/{}\".format(test_customer.id), content_type=\"application/json\"\n ...
[ "0.79642403", "0.76802677", "0.75478774", "0.7539025", "0.73661083", "0.7350192", "0.7332087", "0.72675115", "0.72450405", "0.7234708", "0.72308517", "0.71525866", "0.7051875", "0.6995165", "0.6978112", "0.69741607", "0.69428545", "0.6925339", "0.6922476", "0.6905763", "0.690...
0.7136031
12
Return the match method once, then stop
def __iter__(self): yield self.match raise StopIteration
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def match(self) -> bool:", "def handleMatch(self, m):\r\n pass", "def match(self) -> \"MatchResult\":\n raise NotImplementedError", "def continue_running(self, method):", "def __iter__(self): \n yield self.match \n raise StopIteration", "def __iter__(self):\n ...
[ "0.6595691", "0.6266482", "0.62587726", "0.621846", "0.61685926", "0.60233533", "0.5958962", "0.5932863", "0.5927277", "0.5927277", "0.5921931", "0.5910934", "0.590172", "0.5840986", "0.5780704", "0.57200015", "0.5675008", "0.5612232", "0.5565016", "0.55173606", "0.5446127", ...
0.5906426
18
Indicate whether or not to enter a case suite
def match(self, *args): if self.fall or not args: return True elif self.value in args: # changed for v1.5, see below self.fall = True return True else: return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_case(self) -> bool:\n return pulumi.get(self, \"test_case\")", "def should_run(self, case: Tuple[Dict[str, Any], ...]) -> bool:\n return True", "def CASE10( self, main ):\n import time\n from tests.CHOTestMonkey.dependencies.events.Event import EventType\n from tests...
[ "0.7348757", "0.64489675", "0.6420823", "0.6398555", "0.63852155", "0.6344665", "0.6292689", "0.6013752", "0.594612", "0.59133625", "0.5888049", "0.58311087", "0.5821088", "0.58115166", "0.5791672", "0.5789133", "0.57623816", "0.5759933", "0.5748338", "0.5702276", "0.5696946"...
0.0
-1
runs an automatic check to see if any transcriptions need to be started or are already finished and need to be reuploded\n\n Needs dbConnection & an integer representing the max concurrent transcriptons that can be ran at a time\n\n This is a function that you dont want to parse and upload files from the 'transcripts' folder into. because you really dont know which files are in progress or not whatever. ill fix later .
def runAutoCheck(dbConnection, maxConcurrent): # checks if any shows are pending. fileContent = DatabaseInteract.checkPre(dbConnection) if(len(fileContent) > 0 and Tools.numRunningProcesses() < maxConcurrent): cursor = dbConnection.cursor() cursor.execute("UPDATE transcriptions SET pending = TRUE WHERE id = '" + str(fileContent[1]) + "';") dbConnection.commit() cursor.close() url = fileContent[0] indexID = str(fileContent[1]) # get the ID instead of the filename service = str(fileContent[3]) # podcastName = fileContent[2] Tools.transcribeAll(service, url, indexID) # download the mp3 will print when done
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parseUpload(dbconnection, fileName):\n nhContent = ParseText.nohupTranscriptionContent(fileName)\n count = 0\n while count < len(nhContent[0]):\n try:\n rtf = nhContent[0][count]\n transcription = nhContent[1][count].replace(\"'\", \"''\").replace(\...
[ "0.5762406", "0.56196094", "0.5543294", "0.54251677", "0.54135394", "0.53959346", "0.5356444", "0.5315741", "0.5314573", "0.5207271", "0.5122057", "0.510715", "0.50821364", "0.50786346", "0.5072173", "0.50637865", "0.50570357", "0.505323", "0.50277853", "0.5014339", "0.497700...
0.7170837
0
scans all rss feeds for new
def updateScript(dbconnection): cursor = dbconnection.cursor() cursor.execute("select rss, name, source from podcasts;") rssArray = cursor.fetchall() for rss in rssArray: print("chekcing name " + str(rss[1])) url = str(rss[0]) name = str(rss[1]) source = str(rss[2]) rssArray = DatabaseInteract.rssCheck(name, source, url) for item in rssArray: if(DatabaseInteract.checkIfExists(dbconnection, item[0]) == False): DatabaseInteract.insertClip(dbconnection, item[2], name, item[3], item[1], item[0])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def scanFeedList(self): \r\n data = self.feed_handler.listScanFeeds()\r\n data = data[:MAX_FEEDS_SCAN]\r\n for idx, feed in enumerate(data):\r\n print \"feeds ... / [%s/%s] (%s docs:%s passed)\" % (idx, len(data),self.feed_item_ctr, self.fe...
[ "0.7721408", "0.7200292", "0.6963098", "0.67966145", "0.67705613", "0.6618283", "0.6454477", "0.6439002", "0.6414561", "0.63198656", "0.62931746", "0.6283974", "0.6249403", "0.62478334", "0.62359893", "0.6225703", "0.6203846", "0.6194489", "0.6180839", "0.6162735", "0.6159219...
0.0
-1
Waits for the running transcription processes to end (2 min intervals). \n Then deletes everything in the 'podcasts' folder, parses all transcripts, and updates the databases
def resetScript(dbConnection, maxConcurrent): while (Tools.numRunningProcesses() != 0): # wait for the transcriptions to end. Pings every 2 mins time.sleep(120) emptyPodcastFolder = Tools.cleanupFolder("podcasts") DatabaseInteract.refreshDatabase(dbConnection)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def task_queue_podcasts():\n data = get_task_arguments()\n user_uid = data[\"user_uid\"]\n\n client = google.cloud.storage.Client()\n bucket = client.get_bucket(settings.PODCAST_STORAGE_BUCKET)\n podcasts = Podcast.get_user_podcasts(user_uid)\n for podcast in podcasts:\n old_entries = [ent...
[ "0.5957652", "0.5755388", "0.54659915", "0.5391661", "0.5354948", "0.5351097", "0.5270798", "0.5212245", "0.5207515", "0.5192871", "0.5167002", "0.5137688", "0.5125688", "0.51148933", "0.5111066", "0.5101877", "0.5099718", "0.509771", "0.50841075", "0.5051348", "0.5043869", ...
0.67138106
0
Requires dbconnection and the filename (location) of the file being parsed
def parseUpload(dbconnection, fileName): nhContent = ParseText.nohupTranscriptionContent(fileName) count = 0 while count < len(nhContent[0]): try: rtf = nhContent[0][count] transcription = nhContent[1][count].replace("'", "''").replace("_", "") dbID = nhContent[2][count].replace(".", "") duration = nhContent[3][count] DatabaseInteract.insertTranscription(dbconnection, rtf, transcription, duration, dbID) count += 1 except: print("couldnt upload one at index " + str(count)) count += 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, db_file):\n pass", "def import_db(import_file):\n import_data(import_file)", "def load_file():\n global list_of_table, data_base, new_data\n open_name = askopenfilename()\n\n if Path(open_name).suffix == '.db':\n data_base = open_name\n data_base = str(data_bas...
[ "0.67936474", "0.6777492", "0.6385129", "0.6375183", "0.63454866", "0.6284782", "0.6269853", "0.6250333", "0.6244061", "0.6234011", "0.6226429", "0.6214333", "0.6204602", "0.61976486", "0.61935747", "0.61779267", "0.61766285", "0.6129", "0.6126773", "0.60768723", "0.60636026"...
0.0
-1
This parses the content of nohup. The size of nohup is basically unlimited but each line has to be under 300000 characters(?). This then returns the following...\n\n index 0 a list of all the occurences of realTimeFactor\n index 1 a list of all the occurences of transcriptions\n index 2 a list of all the occurences of the transcription ID\n index 3 a list of all the occurences of the total transcription time.\n\n \n\n \\Example usage\n parsedContent = nohupTranscriptionContent("ok.txt")
def nohupTranscriptionContent(filePath): try: continu = True fileContent = "" f = open(filePath, 'r') while (continu): temp = f.readline(900000) if(len(temp) == 0): continu = False else: fileContent += temp results = [] realTimeFactor = re.findall(r'Timing stats: real-time factor for offline decoding was (.*?) = ', fileContent) results.append(realTimeFactor) transcription = re.findall(r'utterance-id(.*?) (.*?)\n', fileContent) transcriptionList = [] transcriptionIDList = [] for item in transcription: if(len(item[1]) > 1000): transcriptionIDList.append(item[0]) transcriptionList.append(item[1]) results.append(transcriptionList) results.append(transcriptionIDList) transcriptionTime = re.findall(r'seconds / (.*?) seconds\.', fileContent) results.append(transcriptionTime) return results except Exception as e: Tools.writeException("nohupTranscriptionContent", e) return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fileTranscriptionContent(filePath):\n try:\n continu = True\n f = open(filePath, 'r')\n fileContent = \"\"\n while (continu):\n temp = f.readline(300000)\n if(len(temp) == 0):\n continu = False\n ...
[ "0.5664039", "0.5516081", "0.520067", "0.50546473", "0.5002102", "0.49736872", "0.48318633", "0.47706318", "0.47663316", "0.4757304", "0.47494912", "0.47263288", "0.47130182", "0.47129261", "0.4693467", "0.46848562", "0.46799994", "0.4672537", "0.46613166", "0.4635302", "0.46...
0.754473
0
This parses the content of the transcription file. The size of the file can basically be unlimited but each line has to be under 300000 characters(?). This then returns the following...\n\n index 0 url\n index 1 realTimeFactor\n index 2 transcription\n
def fileTranscriptionContent(filePath): try: continu = True f = open(filePath, 'r') fileContent = "" while (continu): temp = f.readline(300000) if(len(temp) == 0): continu = False else: fileContent += temp results = [] f.close() url = re.findall(r'URL:(.*?)\n', fileContent) results.append(url) realTimeFactor = re.findall(r'Timing stats: real-time factor for offline decoding was (.*?) = ', fileContent) results.append(realTimeFactor) transcription = re.findall(r'utterance-id1 (.*?)\n', fileContent) for item in transcription: if(len(item) > 500): results.append(item.replace("'", "''")) if((len(results[0]) > 0) and (len(results[1]) > 0) and (len(results[2]) > 0)): return results else: Tools.writeException("fileTranscriptionContent", "ERROR attempted to parse " + filePath + " but got " + str(results)) return False except Exception as e: Tools.writeException("fileTranscriptionContent", e)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def nohupTranscriptionContent(filePath):\n try:\n continu = True\n fileContent = \"\"\n f = open(filePath, 'r')\n while (continu):\n temp = f.readline(900000)\n if(len(temp) == 0):\n continu = False\n ...
[ "0.67881304", "0.626287", "0.6229017", "0.615032", "0.61068505", "0.61058336", "0.60824823", "0.6019963", "0.59705794", "0.5939803", "0.5923286", "0.5885868", "0.5883228", "0.583081", "0.5810255", "0.5807429", "0.5763637", "0.5748194", "0.5746666", "0.5737573", "0.5734013", ...
0.7538108
0
deletes all contents of the specified folder (but not the folder itself).\n returns true if successful. False if an error was thrown or the number of running processes is not = 0
def cleanupFolder(folderName): try: if(Tools.numRunningProcesses() == 0): process = subprocess.call('rm -r ./' + folderName + '/*', shell=True) return True else: return False except Exception as e: Tools.writeException("cleanupFolder", e) return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_folder(path: str):\n try:\n if os.path.exists(path):\n shutil.rmtree(path)\n return True\n except:\n print(\"An error occured.\")", "def _rm(folder):\n import os\n import shutil\n for the_file in os.listdir(folder):\n file_path = os...
[ "0.65608245", "0.6207403", "0.60895413", "0.6053084", "0.60113263", "0.5985281", "0.5914889", "0.58811086", "0.58790207", "0.5868364", "0.5852713", "0.5824594", "0.5818425", "0.58163047", "0.5810179", "0.57717085", "0.5769049", "0.5744798", "0.5744438", "0.5740823", "0.572649...
0.70612246
0
gets the number of runnning transcription processes
def numRunningProcesses(): try: proc = subprocess.run("ps -Af|grep -i \"online2-wav-nnet3-latgen-faster\"", stdout=subprocess.PIPE, shell=True) np = (len(str(proc.stdout).split("\\n")) - 3) if(np == None): np = 0 return np except Exception as e: Tools.writeException("numRunningProcesses", e) return -1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def num_processes():\n return 1", "def num_processes(self):\n return 1", "def num_processes(self, new_value):", "def n_worker(self):\n return self.redis.pubsub_numsub(MSG)[0][-1]", "def GetNumberOfResultsProcessed(self) -> int:\n return self.i", "def numRunning(self):\n #with self....
[ "0.7056694", "0.6808125", "0.6725539", "0.66032845", "0.64222586", "0.6406968", "0.6369945", "0.6350684", "0.6319151", "0.6316894", "0.631224", "0.62300086", "0.62215966", "0.6156739", "0.6135421", "0.6128718", "0.61258286", "0.611203", "0.6101045", "0.60927176", "0.6082708",...
0.73324955
0
Writes Exception given the string format of the class name and the 'e' in any Exception as e premise
def writeException(className, exceptionString): errorFile = open("error.log", 'a') errorFile.write("ERROR occured in " + className + " at " + str(datetime.now()) + " with the following message\n" + str(exceptionString) + "\n\n") errorFile.close()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def log_format_error(caught_exception, event_str):\n\tcheck_type(caught_exception, Exception)\n\tcheck_type(event_str, StringType)\n\t\n\treturn '{0}, Class: {1}:{2}'.format(event_str, str(type(caught_exception)), caught_exception)", "def create_exception(self, msg: str):", "def format_exception(exception_type...
[ "0.6879372", "0.66124326", "0.66071355", "0.6428653", "0.6300448", "0.62846094", "0.617269", "0.60398024", "0.60369265", "0.5990471", "0.59839076", "0.5983831", "0.5976452", "0.5947421", "0.59470266", "0.5942072", "0.5938443", "0.58877414", "0.58695066", "0.58431464", "0.5832...
0.65568435
3
Returns with the filename of the first file in the given directory. Just provide the directory's name with no leading './'
def getFirstFile(folderName): listFiles = subprocess.run("ls ./" + folderName, shell=True, stdout=subprocess.PIPE) fileName = re.search(r"b'(.*?)\\n", str(listFiles.stdout))[1] if(len(fileName) > 0): return fileName else: return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_filename(self):\n \n for f in os.listdir(self.get_directory()):\n if os.path.isfile(os.path.join(self.get_directory(), f)):\n return f\n \n return None", "def get_first_file(cmds):\n for cmd in cmds:\n all_files = glob.glob(c...
[ "0.7349085", "0.7295504", "0.7234868", "0.6979272", "0.6957857", "0.68544894", "0.6841921", "0.67179877", "0.66709983", "0.66419125", "0.65900755", "0.65814984", "0.65654624", "0.65107995", "0.6479264", "0.64599794", "0.6448274", "0.64449316", "0.6437284", "0.6429767", "0.640...
0.6773866
7
Does everything you need to transcribe a podcast given the filename\n Download podcast, wait 40 seconds, change podcast to .wav, wait 10 seconds, remove the .mp3 file, run the transcription
def transcribeAll(service, url, fileName): if(service == "omny.fm"): url = url.replace(".mp3","") + ".mp3" subprocess.Popen("wget -c -O ./podcasts/" + fileName + ".mp3 " + url + " && sleep 40 && ffmpeg -i ./podcasts/" + fileName + ".mp3 -acodec pcm_s16le -ac 1 -ar 8000 ./podcasts/" + fileName + ".wav && sleep 10 && rm ./podcasts/" + fileName + ".mp3 && nohup ./online2-wav-nnet3-latgen-faster --online=false --do-endpointing=false " + "--frame-subsampling-factor=3 --config=online.conf --max-mem=2000000000 --max-active=7000 --beam=15.0 --lattice-beam=6.0 " + "--acoustic-scale=1.0 --word-symbol-table=words.txt final.mdl HCLG.fst 'ark:echo utterance-id" + fileName + " utterance-id" + fileName + "|' 'scp:echo utterance-id" + fileName + " ./podcasts/" + fileName + ".wav|' 'ark:/dev/null' &", shell=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def transcribe_audio_file(filename):\n url = 'https://api.nexiwave.com/SpeechIndexing/file/storage/' + USERNAME +'/recording/?authData.passwd=' + PASSWORD + '&auto-redirect=true&response=application/json'\n\n # To receive transcript in plain text, instead of html format, comment this line out (for SMS, for e...
[ "0.66069883", "0.6560382", "0.6439472", "0.626135", "0.61909765", "0.6083509", "0.6073147", "0.5968924", "0.5962577", "0.5942388", "0.58873165", "0.58815217", "0.5879562", "0.5855016", "0.5853225", "0.5847647", "0.58247757", "0.5802723", "0.5779541", "0.5740425", "0.57340455"...
0.7504941
0
HomePage > the homepage of the podcast (NOT NULL)\n Name > The name of the podcast (NOT NULL)\n Description > a short description of the podcast\n Category > The category of the podcast\n Source > The service of which the podcast is being accessed through\n ImageURI > Podcast cover art\n Web > The website of the podcaster\n Twitter > The twitter account of the podcaster\n Facebook > the facebook account of the podcaster\n LastUpdated > the date that this was last updated.\n RSS > The URL of the podcasts RSS feed\n If you dont have values for a certain field just pass it in as an empty string
def uploadPodcast(dbConnection, homepage, name, description, category, source, imageurl, web, twitter, facebook, rss): try: cursor = dbConnection.cursor() name = name.replace("'", "''") description = description.replace("'", "''") cursor.execute("""INSERT INTO podcasts(homepage, name, description, category, source, imageuri, web, twitter, Facebook, rss) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);""", (homepage, name, description, category, source, imageurl, web, twitter, facebook, rss)) dbConnection.commit() cursor.close() return True except Exception as e: Tools.writeException("insertHeader", "e") return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def homepage():\n return (\n f\"Welcome to Hawaii - Climate Page<br/>\"\n f\"<br/>\"\n f\"This site has data from 01-01-2010 to 08-23-2017<br/>\"\n f\"<br/>\"\n f\"Available Pages:<br/>\"\n f\"<br/>\"\n f\"<br/>\"\n f\" Station Information<br/>\"\n ...
[ "0.5254778", "0.522776", "0.5127181", "0.50627685", "0.48750266", "0.4832583", "0.4781134", "0.47785923", "0.47693622", "0.47292462", "0.47047243", "0.46810403", "0.4663331", "0.46538934", "0.4653328", "0.46179616", "0.45948377", "0.4588498", "0.45725253", "0.45705068", "0.45...
0.52180666
2
audiourl > url of the transcriptions mp3 is stored here (NOT NULL)\n PodcastName > THe name of the show (references podcast(name))\n Description > The provided summary of that days podcast\n Date > The date that podcast aired (parsed to mmddyyyy\n Title > The title of that specific podcast\n Duration > the running time of that podcast (use strptime to parse, need mmddyyyy\n pending > right now will be false because were not transcribing\n (dateTranscribed) > date of transcription (updated later)\n
def insertClip(dbConnection, audiourl, podcastName, description, parsedDate, title): try: cursor = dbConnection.cursor() title = title.replace("'", "''") cursor.execute("INSERT INTO transcriptions(audiourl, realtimefactor, podcastname, transcription, description, date, title, pending, datetranscribed) VALUES('" + audiourl + "', NULL, '" + podcastName + "', NULL, '" + description + "', '" + parsedDate + "', '" + title + "', FALSE, NULL);") dbConnection.commit() cursor.close() return True except: return False return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def tokenize_podcast_transcript(args):\n DATA_DIR = os.path.join(os.getcwd(), 'data', args.project_id)\n story_file = os.path.join(DATA_DIR, 'podcast-transcription.txt')\n\n # Read all words and tokenize them\n with open(story_file, 'r') as fp:\n data = fp.readlines()\n\n data = [item.spl...
[ "0.5720898", "0.56445146", "0.5573998", "0.55513567", "0.55373365", "0.5462284", "0.5397166", "0.53909075", "0.5349029", "0.53120935", "0.51405776", "0.51242185", "0.51071036", "0.50734013", "0.5052101", "0.50449437", "0.50282156", "0.5022951", "0.49973148", "0.49737933", "0....
0.5656862
1
This basically uploads the arguents to the database, returning false and throwing an error if unsuccesful (or true otherwise)\n
def insertTranscription(dbConnection, realtimefactor, transcription, duration, dbID): try: cursor = dbConnection.cursor() cursor.execute("UPDATE transcriptions SET realtimefactor = '" + realtimefactor + "', transcription = '" + transcription + "', datetranscribed = now(), duration = '" + duration + "' WHERE id = '" + str(dbID) + "';") dbConnection.commit() cursor.close() return True except Exception as e: Tools.writeException("uploadTranscriptionData", e) return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write_to_db( self, *args ):\n try:\n toSave = [ a for a in args ]\n # save them\n self.session.add_all( toSave )\n self.session.commit()\n self._fire_save_notification()\n return True\n except Exception as e:\n print( \"...
[ "0.6018636", "0.56575793", "0.5548777", "0.5302964", "0.52453786", "0.52212244", "0.5169866", "0.51535535", "0.5146376", "0.5134887", "0.5131358", "0.51230496", "0.51160276", "0.5075478", "0.506222", "0.5057487", "0.5046628", "0.50436354", "0.50288576", "0.50007904", "0.49962...
0.0
-1
checks the database for empty transcription entries, returns a list with \n\n index 0 audiourl\n index 1 id\n index 2 podcast name\n index 3 service of podcast
def checkPre(dbConnection): cursor = dbConnection.cursor() cursor.execute("SELECT audiourl, T.id, podcastName, source FROM transcriptions AS T JOIN podcasts as P ON P.name = T.podcastname WHERE COALESCE(T.transcription, '') = '' AND pending = FALSE LIMIT 1;") entry = cursor.fetchone() cursor.close() return entry
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def listMissingItems():\n global doc\n texts = doc.getElementsByTagName(\"text\")\n for t in texts:\n xmlid = t.getAttribute(\"id\")\n for table in langtables:\n execute(\"SELECT * FROM %s WHERE xmlid=\\\"%s\\\"\" % (table, xmlid))\n rows = cursor.fetchall()\n ...
[ "0.5676291", "0.5263004", "0.5223297", "0.52066535", "0.5155342", "0.51544493", "0.51198375", "0.50936806", "0.50230044", "0.49982882", "0.49966055", "0.49849313", "0.4925892", "0.48985046", "0.48830864", "0.48502585", "0.48242763", "0.48231605", "0.48141956", "0.4793201", "0...
0.6268205
0
This is to be used when both the podcasts folder and transcripts folder are empty.\n For every entry in the database that has an empty transcript and a pending flag set to true, change the pending flag to false. Honestly this is used to deal with a weird bug and should be run every now and then
def refreshDatabase(dbConnection): try: cursor = dbConnection.cursor() cursor.execute("UPDATE transcriptions SET pending = FALSE WHERE COALESCE(transcription, '') = '';") dbConnection.commit() cursor.close() except Exception as e: Tools.writeException("refreshDatabase", e)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def checkPre(dbConnection):\n cursor = dbConnection.cursor()\n cursor.execute(\"SELECT audiourl, T.id, podcastName, source FROM transcriptions AS T JOIN podcasts as P ON P.name = T.podcastname WHERE COALESCE(T.transcription, '') = '' AND pending = FALSE LIMIT 1;\")\n entry = cursor.fetchone()\...
[ "0.56695527", "0.5449261", "0.5254157", "0.52069896", "0.519826", "0.5033168", "0.4992053", "0.4885781", "0.48527905", "0.48117176", "0.48087612", "0.4767459", "0.47640154", "0.47548455", "0.47459507", "0.47442943", "0.47228667", "0.47036612", "0.46930197", "0.468822", "0.467...
0.503461
5
given title, if the podcast is in the database already return true. False if the podcast does not exist in the database
def checkIfExists(dbconnection, title): cursor = dbconnection.cursor() output = "" title = title.replace("'", "''") try: cursor.execute("SELECT * FROM transcriptions WHERE title = '" + title + "';") dbconnection.commit() output = cursor.fetchone() cursor.close() if(output is None): return False else: return True except: dbconnection.rollback() cursor.execute("SELECT * FROM transcriptions WHERE title = '" + title + "';") dbconnection.commit() output = cursor.fetchone() cursor.close() if(output is None): return False else: return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_if_entry_exists(title: str) -> bool:\n conn = sqlite3.connect('rss.db')\n c = conn.cursor()\n try:\n c.execute(\n \"\"\"select * from entries where title = ?\"\"\",\n (title,)\n )\n records = c.fetchall()\n return len(records) > 0\n except...
[ "0.7373473", "0.63370997", "0.6241188", "0.62162536", "0.6169572", "0.6139767", "0.6122975", "0.5888012", "0.5878836", "0.58756447", "0.58756447", "0.58615744", "0.58349437", "0.583252", "0.5811973", "0.5811524", "0.58094037", "0.57614815", "0.57470703", "0.57385516", "0.5726...
0.7353994
1
Checks the rss urls in the database and returns an array of each of the important fields
def rssCheck(podcastName, source, url): try: headers = {'Accept':'text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8' ,'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/18.17763'} req = requests.get(url, headers=headers) root = etree.fromstring(req.text) rssArray = [] for element in root[0].iter('item'): try: title = element.find("title").text.replace("''", "'") description = element.find("description").text.replace("<strong>", "").replace("</strong>", "").replace("&amp;", "and").replace("'","''") date = element.find("pubDate").text date = date.split(" ") date = datetime.strptime(date[1] + date[2] + date[3], "%d%b%Y") dateString = str(date.month) + "-" + str(date.day) + "-" + str(date.year) url = ResolveRouter.urlRouter(podcastName, source, element) except: print("error in XMLDetailsDebug parsing issue") if(len(title) > 0 and len(description) > 0 and len(dateString) > 0 and len(url) > 0): rssArray.append([title, dateString, url, description]) else: print("error in XMLDetailsDebug parsing issue") return rssArray except Exception as e: print(e) Tools.writeException("getXMLDetailsDebug", e)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_rss_infos():\n\n url_rss_lib = \"http://www.liberation.fr/rss\"\n soup = utils.recovery_flux_url_rss(url_rss_lib)\n\n rss_items = soup.find_all(\"li\")\n\n rss_list = []\n\n link_rss = []\n\n for ri in rss_items:\n if ri.get(\"class\") == ['rss-item']:\n rss_list.append(...
[ "0.7014149", "0.65106124", "0.6413672", "0.63931924", "0.6380389", "0.6300671", "0.6237712", "0.62265265", "0.62018305", "0.6194729", "0.6165013", "0.61139023", "0.6107573", "0.6098303", "0.60845596", "0.6067579", "0.60539395", "0.6052752", "0.60521805", "0.58949435", "0.5880...
0.64719254
2
generate the CUSPARSE FFI definition
def generate_cffi_cdef( cuda_include_path=cuda_include_path, cusparse_header=cusparse_header, cffi_out_file=None): with open(cusparse_header, 'r') as f: cusparse_hdr = f.readlines() # in some version cusparse_v2.h just points to cusparse.h, so read it # instead for line in cusparse_hdr: # if v2 header includes cusparse.h, read that one instead if line.startswith('#include "cusparse.h"'): cusparse_header = os.path.join(cuda_include_path, 'cusparse.h') with open(cusparse_header, 'r') as f: cusparse_hdr = f.readlines() cusparse_hdr = [_remove_comment(l) for l in cusparse_hdr] # skip lines leading up to first typedef for idx, line in enumerate(cusparse_hdr): if line.startswith('typedef'): start_line = idx break # skip closing #if defined logic for idx, line in enumerate(cusparse_hdr[start_line:]): if line.startswith('#if defined(__cplusplus)') or \ 'Define the following symbols for the new API' in line: # second match is to avoid CFFI compilation errror due to the final # define statements in v4.1 through v5.5 end_line = start_line + idx break # define other data types needed by FFI # ... will be filled in from cuComplex.h by the C compiler cffi_cdef = """ typedef struct CUstream_st *cudaStream_t; typedef struct float2 { ...; } float2; typedef float2 cuFloatComplex; typedef float2 cuComplex; typedef struct double2 { ...; } double2; typedef double2 cuDoubleComplex; typedef float cufftReal; typedef double cufftDoubleReal; typedef cuComplex cufftComplex; typedef cuDoubleComplex cufftDoubleComplex; typedef enum cudaDataType_t { CUDA_R_16F= 2, // real as a half CUDA_C_16F= 6, // complex as a pair of half numbers CUDA_R_32F= 0, // real as a float CUDA_C_32F= 4, // complex as a pair of float numbers CUDA_R_64F= 1, // real as a double CUDA_C_64F= 5, // complex as a pair of double numbers CUDA_R_8I= 3, // real as a signed char CUDA_C_8I= 7, // complex as a pair of signed char numbers CUDA_R_8U= 8, // real as a unsigned char CUDA_C_8U= 9, // complex as a pair of unsigned char numbers CUDA_R_32I= 10, // real as a signed int CUDA_C_32I= 11, // complex as a pair of signed int numbers CUDA_R_32U= 12, // real as a unsigned int CUDA_C_32U= 13 // complex as a pair of unsigned int numbers } cudaDataType; typedef enum libraryPropertyType_t //GRL: added this for cuda 8.0 { MAJOR_VERSION, MINOR_VERSION, PATCH_LEVEL } libraryPropertyType; /* definitions from cusparse header below this point */ """ cffi_cdef += ''.join(cusparse_hdr[start_line:end_line]) """ don't use the _v2 versions of the function names defined in CUDA v4.1 through v5.5 """ cffi_cdef = cffi_cdef.replace('_v2(', '(') if os.name == 'nt': # Win cffi_cdef = cffi_cdef.replace('CUSPARSEAPI', '__stdcall') else: # posix, etc cffi_cdef = cffi_cdef.replace('CUSPARSEAPI', '') if cffi_out_file is not None: # create specified output directory if it doesn't already exist out_dir = os.path.dirname(cffi_out_file) if out_dir and not os.path.exists(out_dir): os.makedirs(out_dir) with open(cffi_out_file, 'w') as f: f.write(cffi_cdef) return cffi_cdef
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_c_header(self):\n res = \\\n\"\"\"PyThreadState* ___madz_LANG_python_thread_state; //Holds Thread State for this interpreter\nPyObject *___madz_LANG_python_wrapper_module; //Hold Pointer to the _madz.py file representing this plugin\ntypedef struct{{\n{function_pointers}\n}}___madz_LANG_python_TYPE...
[ "0.6575036", "0.6416403", "0.62135506", "0.61577857", "0.6081804", "0.6024244", "0.58753806", "0.5823587", "0.573958", "0.5726746", "0.56846005", "0.56105137", "0.55705476", "0.5551445", "0.5547371", "0.55256623", "0.55013", "0.54184884", "0.539809", "0.53861326", "0.5378367"...
0.6747278
0
generate python_wrapper function body
def build_func_body(func_name, arg_dict, return_type): body = "" arg_list = "" # the following are pointers to scalar outputs # Note: pBufferSize was renamed pBufferSizeInBytes in v6.5 scalar_ptr_outputs = ['nnzTotalDevHostPtr', 'pBufferSize', 'pBufferSizeInBytes', 'resultDevHostPtr'] is_creator = 'cusparseCreate' in func_name is_getter = 'cusparseGet' in func_name if return_type == 'cusparseStatus_t' and not (is_creator or is_getter): is_return = False else: is_return = True # else: return_str = '' for k, v in arg_dict.items(): """ set some flags based on the name/type of the argument will use these flags to determine whether and how to call ffi.new or ffi.cast on each variable """ is_ptr = '*' in v is_cusparse_type = '_t' in v is_cusparse_ptr = is_ptr and is_cusparse_type is_output_scalar = k in scalar_ptr_outputs if k in ['alpha', 'beta']: is_scalar = True else: is_scalar = False if is_getter: is_gpu_array = False else: is_gpu_array = is_ptr and (not is_cusparse_ptr) and (not is_scalar) if 'Complex' in v: is_complex = True else: is_complex = False # convert variable to appropriate type for the FFI if is_output_scalar: # for scalar outputs make a new pointer body += "%s = ffi.cast('%s', %s)\n" % (k, v, k) elif is_getter and is_ptr and (return_type == 'cusparseStatus_t'): # any pointers in cusparseGet* are new outputs to be created body += "%s = ffi.new('%s')\n" % (k, v) elif is_gpu_array: # pass pointer to GPU array data (use either .ptr or .gpudata) body += "%s = ffi.cast('%s', %s.ptr)\n" % (k, v, k) elif is_cusparse_ptr: if is_creator: # generate custom cusparse type body += "%s = ffi.new('%s')\n" % (k, v) else: # cast to the custom cusparse type body += "%s = ffi.cast('%s', %s)\n" % (k, v, k) elif is_ptr and is_scalar: # create new pointer, with value initialized to scalar if is_complex: # complex case is a bit tricky. requires ffi.buffer body += "%sffi = ffi.new('%s')\n" % (k, v) if 'cusparseC' in func_name: body += "ffi.buffer(%sffi)[:] = \ np.complex64(%s).tostring()\n" % (k, k) elif 'cusparseZ' in func_name: body += "ffi.buffer(%sffi)[:] = \ np.complex128(%s).tostring()\n" % (k, k) else: body += "%s = ffi.new('%s', %s)\n" % (k, v, k) elif is_ptr or v == 'cudaStream_t': # case non-scalar pointer to appropriate type body += "%s = ffi.cast('%s', %s)\n" % (k, v, k) else: # don't need explicit cast for plain int, float, etc pass # build the list of arguments to pass to the API if is_ptr and is_scalar and is_complex: # take into account modified argument name for complex scalars arg_list += "%sffi, " % k else: arg_list += "%s, " % k # add the function call and optionally return the result last_key = k arg_list = arg_list[:-2] # remove trailing ", " if is_getter and return_type != 'cusparseStatus_t': body += "return ffi_lib.%s(%s)\n" % (func_name, arg_list) else: # check cusparseStatus_t state before returning call_str = "status = ffi_lib.%s(%s)\n" % (func_name, arg_list) body += split_line(call_str, break_pattern=', ', nmax=76) body += "cusparseCheckStatus(status)\n" if is_return: # len(arg_dict) == 2) is to avoid return for cusparseGetLevelInfo if is_creator or (is_getter and (len(arg_dict) == 2)): body += "return %s[0]\n" % last_key else: body += "#TODO: return the appropriate result" body += '\n\n' return reindent(body, numSpaces=4, lstrip=False)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wrapper(*args):", "def make_c_function_stubs(self):\n fn =\\\n\"\"\"{rettype} {fnname}({args}){{\n {rettype} ret;\n\n ret = {cast_and_deref}___madz_LANG_python_OUTPUT.{nodename}({argnames});\n\n return ret;\n}}\n\n\"\"\"\n fn_no_return =\\\n\"\"\"{rettype} {fnname}({args}){{\n ___ma...
[ "0.6718689", "0.6527611", "0.64795184", "0.6186041", "0.6184181", "0.61080533", "0.60333836", "0.5982096", "0.59634656", "0.5940715", "0.5934572", "0.59340453", "0.59037447", "0.59012115", "0.58594495", "0.585627", "0.58455956", "0.5824993", "0.58232915", "0.5799322", "0.5793...
0.54531014
47
Test that both transformer implementations produce the same outputs when applied to a properlysized sequence.
def test_basic_equivalence(cell_cls, num_layers): with tf.Graph().as_default(): with tf.Session() as sess: pos_enc = positional_encoding(4, 6, dtype=tf.float64) in_seq = tf.get_variable('in_seq', shape=(3, 4, 6), initializer=tf.truncated_normal_initializer(), dtype=tf.float64) cell = cell_cls(pos_enc, num_layers=num_layers, num_heads=2, hidden=24) actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64) with tf.variable_scope('rnn', reuse=True): with tf.variable_scope('transformer', reuse=True): expected = in_seq + pos_enc for _ in range(num_layers): expected = transformer_layer(expected, num_heads=2, hidden=24) sess.run(tf.global_variables_initializer()) actual, expected = sess.run((actual, expected)) assert not np.isnan(actual).any() assert not np.isnan(expected).any() assert actual.shape == expected.shape assert np.allclose(actual, expected)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_random_transform_generator():\n # Check shapes are correct Batch Size = 1 - Pass\n batch_size = 1\n transforms = layer_util.random_transform_generator(batch_size, 0)\n assert transforms.shape == (batch_size, 4, 3)\n\n # Check numerical outputs are correct for a given seed - Pass\n batch_...
[ "0.6028225", "0.5944386", "0.58593905", "0.5800506", "0.57926905", "0.57714605", "0.5770468", "0.57545334", "0.57405823", "0.57333404", "0.5712127", "0.5697243", "0.56768686", "0.56742346", "0.56673914", "0.56589234", "0.5644305", "0.56440634", "0.5639191", "0.56258535", "0.5...
0.5383693
61
Test the cell when the input sequence is longer than the time horizon.
def test_past_horizon(cell_cls): with tf.Graph().as_default(): with tf.Session() as sess: pos_enc = positional_encoding(4, 6, dtype=tf.float64) in_seq = tf.get_variable('in_seq', shape=(3, 5, 6), initializer=tf.truncated_normal_initializer(), dtype=tf.float64) cell = cell_cls(pos_enc, num_layers=3, num_heads=2, hidden=24) actual, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64) def apply_regular(sequence): with tf.variable_scope('rnn', reuse=True): with tf.variable_scope('transformer', reuse=True): expected = sequence + pos_enc for _ in range(3): expected = transformer_layer(expected, num_heads=2, hidden=24) return expected expected = tf.concat([apply_regular(in_seq[:, :-1]), apply_regular(in_seq[:, 1:])[:, -1:]], axis=1) sess.run(tf.global_variables_initializer()) actual, expected = sess.run((actual, expected)) assert not np.isnan(actual).any() assert not np.isnan(expected).any() assert actual.shape == expected.shape assert np.allclose(actual, expected)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def exceeded(self):\r\n return int(time.time()) - self.start_time >= self.length", "def is_time(self) -> bool:\n return self.times > 1", "def check_time():\n times = get_times()\n time_difference = abs((times['local'] - times['target']).total_seconds())\n return time_difference < post_ti...
[ "0.6468879", "0.6448804", "0.5934962", "0.5849655", "0.57931024", "0.57145846", "0.5672567", "0.56684816", "0.5625143", "0.56138664", "0.5613715", "0.55911756", "0.55447626", "0.55374813", "0.5525327", "0.5507069", "0.54900634", "0.54690355", "0.54398084", "0.5436942", "0.543...
0.0
-1
Test the cell when the states are split up and recombined from different timesteps.
def test_mismatched_starts(cell_cls): with tf.Graph().as_default(): with tf.Session() as sess: pos_enc = positional_encoding(5, 6, dtype=tf.float64) in_seq = tf.get_variable('in_seq', shape=(3, 5, 6), initializer=tf.truncated_normal_initializer(), dtype=tf.float64) cell = cell_cls(pos_enc, num_layers=3, num_heads=2, hidden=24) _, states_1 = tf.nn.dynamic_rnn(cell, in_seq[:, :1], dtype=tf.float64) _, states_2 = tf.nn.dynamic_rnn(cell, in_seq[:, :2], dtype=tf.float64) _, states_3 = tf.nn.dynamic_rnn(cell, in_seq[:, :3], dtype=tf.float64) new_states = tuple(tf.stack([s2[0], s3[1], s1[2]], axis=0) for s1, s2, s3 in zip(states_1, states_2, states_3)) full_seq, _ = tf.nn.dynamic_rnn(cell, in_seq, dtype=tf.float64) expected = tf.stack([full_seq[0, 2:4], full_seq[1, 3:5], full_seq[2, 1:3]], axis=0) inputs = tf.stack([in_seq[0, 2:4], in_seq[1, 3:5], in_seq[2, 1:3]], axis=0) actual, _ = tf.nn.dynamic_rnn(cell, inputs, initial_state=new_states) sess.run(tf.global_variables_initializer()) actual, expected = sess.run((actual, expected)) assert not np.isnan(actual).any() assert not np.isnan(expected).any() assert actual.shape == expected.shape assert np.allclose(actual, expected)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_transition_function_empty_grid(self):\r\n map_file_path = os.path.abspath(os.path.join(__file__, MAPS_DIR, 'empty-8-8/empty-8-8.map'))\r\n grid = MapfGrid(parse_map_file(map_file_path))\r\n\r\n # agents are starting a\r\n agent_starts = ((0, 0), (7, 7))\r\n agents_goals ...
[ "0.62299633", "0.6125553", "0.59904784", "0.5965185", "0.5964256", "0.59597164", "0.5947608", "0.5937805", "0.58110744", "0.5780508", "0.5778231", "0.5771809", "0.57424587", "0.5713898", "0.57090956", "0.5698011", "0.567906", "0.56286466", "0.5621524", "0.56178564", "0.561581...
0.5819505
8
Set up the Opple light platform.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: name = config[CONF_NAME] host = config[CONF_HOST] entity = OppleLight(name, host) add_entities([entity]) _LOGGER.debug("Init light %s %s", host, entity.unique_id)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def configure(self):\n\n self.platform.configure()", "def platform_start(self):\n self.platform.start()", "def setUp(self):\n self.hass = get_test_home_assistant()\n controller_mock = mock.MagicMock()\n dev_dict = {\"address\": \"a1\", \"name\": \"fake_light\", \"brightness_l...
[ "0.670556", "0.669467", "0.64281476", "0.6402788", "0.6392791", "0.6329436", "0.6317174", "0.6247289", "0.61651385", "0.6147113", "0.6105949", "0.60822815", "0.6056726", "0.60057616", "0.5982987", "0.5977405", "0.59728897", "0.5957383", "0.59512126", "0.59488773", "0.59368527...
0.73331904
0
Initialize an Opple light.
def __init__(self, name, host): self._device = OppleLightDevice(host) self._name = name self._is_on = None self._brightness = None self._color_temp = None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, **kwargs) -> None:\n super(Light, self).__init__(**kwargs)\n\n get = kwargs.get\n if get('light') is None:\n raise Exception('Light is required')\n if get('button') is None:\n raise Exception('Button is required')\n\n self._light: LED = LE...
[ "0.72170395", "0.717331", "0.69877267", "0.69179374", "0.67970073", "0.6705011", "0.6625122", "0.662029", "0.6540736", "0.6490158", "0.64509183", "0.6437352", "0.6358101", "0.63348967", "0.6321011", "0.6236198", "0.62147874", "0.6201365", "0.6063074", "0.6034815", "0.6011192"...
0.69397825
3
Return True if light is available.
def available(self) -> bool: return self._device.is_online
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def light_is_on(self):\r\n return self._light == \"ON\"", "def light_is_on(self):\n return self._light == \"ON\"", "def have_light(self, light):\n if light > 1:\n return False\n return bool(self.light_array[light])", "def is_light(self) -> bool:\n return ATTRIBUT...
[ "0.7484702", "0.73775405", "0.7287511", "0.72777367", "0.72777367", "0.7260376", "0.72128695", "0.6995078", "0.692183", "0.6848789", "0.68302953", "0.6803893", "0.6738999", "0.6737111", "0.67068386", "0.6706666", "0.669279", "0.66660124", "0.6653982", "0.6653982", "0.66485125...
0.6739951
12
Return unique ID for light.
def unique_id(self): return self._device.mac
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unique_id(self):\n return self._light.address", "def unique_id(self):\n return self.heater.id + \"_switch\"", "def unique_id(self) -> str:\n return 'remo_device_' + self._remo_device.id + '_' + self._sensor_class", "def unique_id(self) -> str:\n return get_frigate_entity_uniqu...
[ "0.7778015", "0.72051173", "0.7008974", "0.69517404", "0.69132495", "0.6900635", "0.6889376", "0.6859422", "0.6858554", "0.68494785", "0.6847968", "0.68355125", "0.68354523", "0.68086004", "0.6788117", "0.67518526", "0.6732352", "0.6727304", "0.66908014", "0.6683743", "0.6648...
0.0
-1
Return the display name of this light.
def name(self): return self._name
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def name(self):\n return self._light.name", "def get_display_name(self):\n return DisplayText(self._display_name)", "def name(self):\n return f\"{get_device_name(self._data, self._device.id)} Light\"", "def get_display_name(self):\n return self.display_name", "def get_display_na...
[ "0.8419186", "0.79949266", "0.7932655", "0.7864694", "0.77409434", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7726647", "0.7716824", "0.7705757", "0.76947725", "0.7663317", ...
0.0
-1
Return true if light is on.
def is_on(self): return self._is_on
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def light_is_on(self):\r\n return self._light == \"ON\"", "def light_is_on(self):\n return self._light == \"ON\"", "def is_on(self):\n return self._light_on", "def is_light(self) -> bool:\n return ATTRIBUTE.Light.value in self.type_data.attributes", "def is_light(self) -> bool:\...
[ "0.9035416", "0.89275813", "0.86664206", "0.8008696", "0.8008696", "0.78880787", "0.7866302", "0.7610281", "0.7565963", "0.7415077", "0.7345493", "0.7335999", "0.73116577", "0.7303292", "0.72991306", "0.7243393", "0.71792775", "0.71722543", "0.7157757", "0.7157757", "0.715775...
0.67288214
71
Return the brightness of the light.
def brightness(self): return self._brightness
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def brightness(self):\n return brightness_from_percentage(self._brightness_pct)", "def brightness(self):\n return self.get_value('bri')", "def brightness(self):\n return round((self._device.current_percentage / 100) * 255)", "def get_brightness(self):\n _lib.caca_get_dither_bright...
[ "0.8602627", "0.84503806", "0.8426228", "0.8375639", "0.8349791", "0.8339884", "0.83344954", "0.8283124", "0.81917197", "0.79680145", "0.7894601", "0.78586304", "0.78586304", "0.77162117", "0.76703614", "0.7597096", "0.75513947", "0.7543812", "0.7524217", "0.7521598", "0.7497...
0.8533625
12
Return the color temperature of this light.
def color_temp(self): return kelvin_to_mired(self._color_temp)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def color_temp(self):\n return self._color_temp", "def getTemperature(self):\n return self.temperature", "def temperature(self):\n return self._temperature", "def temperature(self):\n return self._temperature", "def temperature(self):\n return float(self._current_observat...
[ "0.8550451", "0.75972027", "0.75929296", "0.75929296", "0.7584848", "0.7554157", "0.74379694", "0.7430193", "0.73825926", "0.7336957", "0.73212904", "0.72820795", "0.72075", "0.72032225", "0.71964854", "0.71878976", "0.7180456", "0.7172465", "0.7114398", "0.7094845", "0.70741...
0.8436134
1
Return minimum supported color temperature.
def min_mireds(self): return 175
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def min_temperature(self):\n mini, maxi = ct.c_int(), ct.c_int()\n self.lib.GetTemperatureRange(ct.pointer(mini), ct.pointer(maxi))\n return mini.value", "def min_temp(self):\n if self.temperature_unit == UnitOfTemperature.CELSIUS:\n return MIN_TEMP_C\n return MIN_TE...
[ "0.75069237", "0.7210723", "0.7091037", "0.70822674", "0.70354617", "0.6973797", "0.682857", "0.6697634", "0.66812134", "0.66597176", "0.65733534", "0.6456474", "0.6444758", "0.6424821", "0.6410424", "0.63718545", "0.6371298", "0.63557136", "0.6349745", "0.6336295", "0.631036...
0.0
-1
Return maximum supported color temperature.
def max_mireds(self): return 333
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def max_temperature(self):\n mini, maxi = ct.c_int(), ct.c_int()\n self.lib.GetTemperatureRange(ct.pointer(mini), ct.pointer(maxi))\n return maxi.value", "def max_temp(self):\n if self.temperature_unit == UnitOfTemperature.CELSIUS:\n return MAX_TEMP_C\n return MAX_TE...
[ "0.75112236", "0.72960573", "0.7186571", "0.7073784", "0.6881457", "0.683345", "0.6819592", "0.6797262", "0.6717002", "0.6712087", "0.6707936", "0.6685103", "0.66778046", "0.66512823", "0.66321784", "0.65648353", "0.6525134", "0.64380866", "0.63266563", "0.6323054", "0.630750...
0.0
-1
Instruct the light to turn on.
def turn_on(self, **kwargs: Any) -> None: _LOGGER.debug("Turn on light %s %s", self._device.ip, kwargs) if not self.is_on: self._device.power_on = True if ATTR_BRIGHTNESS in kwargs and self.brightness != kwargs[ATTR_BRIGHTNESS]: self._device.brightness = kwargs[ATTR_BRIGHTNESS] if ATTR_COLOR_TEMP in kwargs and self.color_temp != kwargs[ATTR_COLOR_TEMP]: color_temp = mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]) self._device.color_temperature = color_temp
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_light_on(self):\r\n self._light = \"ON\"", "def set_light_on(self):\n self._light = \"ON\"", "def turn_on(self):\n GPIO.output(self.gpio, True) # turn on light", "def turn_on(self, **kwargs: Any) -> None:\n self._set_light(ON_STATE)", "def lightning_turnon(self):\n ...
[ "0.8915807", "0.8871167", "0.88496405", "0.88148886", "0.86926144", "0.8564589", "0.82041657", "0.819282", "0.8009568", "0.7997265", "0.798326", "0.7978372", "0.7975517", "0.7933295", "0.79127806", "0.7911189", "0.7742512", "0.7726695", "0.76945895", "0.76829916", "0.7646578"...
0.7831892
16
Instruct the light to turn off.
def turn_off(self, **kwargs: Any) -> None: self._device.power_on = False _LOGGER.debug("Turn off light %s", self._device.ip)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def turn_off(self):\n GPIO.output(self.gpio, False) # turn off light", "def set_light_off(self):\n self._light = \"OFF\"", "def set_light_off(self):\r\n self._light = \"OFF\"", "def lightning_turnoff(self):\n self.turnOff()", "def turn_off(self, **kwargs: Any) -> None:\n ...
[ "0.9141917", "0.89695406", "0.89557284", "0.89015293", "0.8897358", "0.8873913", "0.8805522", "0.87862444", "0.86670524", "0.86476165", "0.847877", "0.8473624", "0.84669805", "0.8463841", "0.8435901", "0.8370482", "0.83494097", "0.8275936", "0.82717854", "0.82705396", "0.8253...
0.87764317
8
Synchronize state with light.
def update(self) -> None: prev_available = self.available self._device.update() if ( prev_available == self.available and self._is_on == self._device.power_on and self._brightness == self._device.brightness and self._color_temp == self._device.color_temperature ): return if not self.available: _LOGGER.debug("Light %s is offline", self._device.ip) return self._is_on = self._device.power_on self._brightness = self._device.brightness self._color_temp = self._device.color_temperature if not self.is_on: _LOGGER.debug("Update light %s success: power off", self._device.ip) else: _LOGGER.debug( "Update light %s success: power on brightness %s color temperature %s", self._device.ip, self._brightness, self._color_temp, )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def light_sync(self):", "async def async_update(self):\n self._state = await self._gate.is_light_on(self._light_id)", "def update(self) -> None:\n self._light.update()\n self._state = self._light.is_on()\n self._brightness = self._light.brightness", "def lock (self):\n self...
[ "0.83932275", "0.730228", "0.6771389", "0.6599177", "0.6598202", "0.65861017", "0.65388566", "0.6511652", "0.65106404", "0.65080667", "0.646516", "0.64101493", "0.6382941", "0.63396305", "0.63342994", "0.6315549", "0.6311331", "0.630048", "0.62958086", "0.6278275", "0.6262956...
0.6158366
29
This function returns the number of elements in the numbers list that are divisible by divide.
def listDivide(numbers, divide = 2): divisible_count = 0 for i in numbers: if i % divide == 0: divisible_count += 1 return divisible_count
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def listDivide(numbers, divide=2): \n counter = 0\n for num in numbers:\n if num % divide == 0:\n counter+=1\n return counter", "def listDivide(numbers, divide=2):\n newList = []\n for i in numbers:\n if i % divide == 0:\n newList.append(i)\n return len(newL...
[ "0.82095575", "0.8028148", "0.7676638", "0.724413", "0.7203808", "0.6955737", "0.6939475", "0.6889885", "0.6795731", "0.6774087", "0.6745991", "0.6704762", "0.66760755", "0.66736186", "0.6646036", "0.65987384", "0.6588523", "0.6577694", "0.6576061", "0.6554679", "0.64939314",...
0.8605654
0
This function tests the listDivide function.
def testListDivide(): assert listDivide([1,2,3,4,5]) == 2 assert listDivide([2,4,6,8,10]) == 5 assert listDivide([30, 54, 63,98, 100], divide = 10) == 2 assert listDivide([]) == 0 assert listDivide([1,2,3,4,5], 1) == 5
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testListDivide():\n listDivide([1, 2, 3, 4, 5])\n listDivide([2, 4, 6, 8, 10])\n listDivide([30, 54, 63, 98, 100], divide=10)\n listDivide([])\n listDivide([1, 2, 3, 4, 5], 1)", "def testListDivide():\n #a\n numbers = [1,2,3,4,5]\n expected = 2\n \n try:\n assert listDivi...
[ "0.8978412", "0.8315685", "0.72600734", "0.7163162", "0.6961623", "0.69122976", "0.6898121", "0.68613684", "0.68511623", "0.6756895", "0.6750342", "0.66283035", "0.66096985", "0.65842646", "0.6559477", "0.64508224", "0.6335196", "0.6329726", "0.6322206", "0.6280527", "0.62706...
0.88921094
1
saves the source content of the webpage to a file
def url_to_file(url): try: r = get(url) print(r.status_code) if r.status_code == 200: try: with open(f'print-{date}.html', 'w') as f: f.write(r.text) except UnicodeEncodeError as e: print("Unicode error :using encodeing utf-8") with open(f'print-{date}.html', 'w', encoding="utf-8") as f: f.write(r.text) else: print("passing headers") headers = {"user-agent":"Edg/87.0.664.66"} r = get(url, headers=headers) print(r.status_code) if r.status_code == 200: try: with open(f'print-{date}.html', 'w') as f: f.write(r.text) except UnicodeEncodeError as e: print("Unicode error: using encodeing utf-8") with open(f'print-{date}.html', 'w', encoding="utf-8") as f: f.write(r.text) else: print(f"Unable to send requests {r.status_code}") return r except Exception as e: print("Error occured",e)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save_page_as(browser, file_name):\n\n with open(file_name, \"w\") as fout:\n fout.write(browser.find_element_by_tag_name(\"pre\").text)", "def save(self, filename):\n outfile = open(filename, \"w\")\n outfile.write(self.html.encode('utf8'))\n outfile.close()", "def saveToFile...
[ "0.7480921", "0.70431954", "0.6845408", "0.67692447", "0.6768163", "0.67273104", "0.66346675", "0.65480995", "0.64376026", "0.6430084", "0.64230853", "0.6403235", "0.6334453", "0.63271886", "0.6322628", "0.63213557", "0.62946224", "0.62572914", "0.62547946", "0.62540007", "0....
0.5828391
45
Drops an Operation, identified by it's Operation Id and it's children recursively Drop deletes the Operations from Database
def drop_operation(cls,operation_id): db = cls._core.get_db() stmnt = "SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS IN (0, 2) ;" cur = db.query(cls._core,stmnt,(operation_id,)) for row in cur.fetchallmap(): cls.drop_operation(row["OPE_ID"]) stmnt = "DELETE FROM OPERATIONS WHERE OPE_ID = ? AND OPE_STATUS IN (0, 2) ;" db.query(cls._core,stmnt,(operation_id,),commit=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cancel_operation(cls,operation_id):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 0 ;\"\n cur = db.query(cls._core,stmnt,(operation_id,))\n for row in cur.fetchallmap():\n cls.cancel_operation(row[\"OPE_ID\...
[ "0.68534726", "0.59218234", "0.5495888", "0.5355895", "0.5291793", "0.5289697", "0.52653944", "0.52528286", "0.5250576", "0.52433366", "0.5235218", "0.5228821", "0.5226976", "0.522246", "0.5191076", "0.5159619", "0.5126623", "0.50397605", "0.50218856", "0.50218856", "0.502188...
0.79740757
0
Resets the state of an operation and it's children recursively to 0 (PENDING) The operation is identified by a given operationId
def retry_operation(cls,operation_id): db = cls._core.get_db() stmnt = "SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 2 ;" cur = db.query(cls._core,stmnt,(operation_id,)) for row in cur.fetchallmap(): cls.retry_operation(row["OPE_ID"]) stmnt = "UPDATE OPERATIONS SET OPE_STATUS = 0 WHERE OPE_ID = ? AND OPE_STATUS = 2 ;" db.query(cls._core,stmnt,(operation_id,),commit=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cancel_operation(cls,operation_id):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 0 ;\"\n cur = db.query(cls._core,stmnt,(operation_id,))\n for row in cur.fetchallmap():\n cls.cancel_operation(row[\"OPE_ID\...
[ "0.6283896", "0.55792785", "0.5552252", "0.55377895", "0.55284655", "0.53993994", "0.5203521", "0.5042159", "0.50319934", "0.50242394", "0.5023247", "0.5015806", "0.50039274", "0.4977529", "0.4977529", "0.4977529", "0.4977529", "0.48844925", "0.4862212", "0.48437867", "0.4841...
0.6371056
0
Cancels an Operation, identified by it's Operation Id and it's children recursively Cancel Deletes the Operation from Database
def cancel_operation(cls,operation_id): db = cls._core.get_db() stmnt = "SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 0 ;" cur = db.query(cls._core,stmnt,(operation_id,)) for row in cur.fetchallmap(): cls.cancel_operation(row["OPE_ID"]) stmnt = "DELETE FROM OPERATIONS WHERE OPE_ID = ? AND OPE_STATUS = 0 ;" db.query(cls._core,stmnt,(operation_id,),commit=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def drop_operation(cls,operation_id):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS IN (0, 2) ;\"\n cur = db.query(cls._core,stmnt,(operation_id,))\n for row in cur.fetchallmap():\n cls.drop_operation(row[\"OPE_I...
[ "0.66223377", "0.63557035", "0.63557035", "0.63557035", "0.62062633", "0.5849474", "0.5820822", "0.57708067", "0.5647814", "0.56469494", "0.55416864", "0.55382013", "0.54923594", "0.54598767", "0.5451226", "0.5448454", "0.54425424", "0.54425424", "0.54425424", "0.5434237", "0...
0.8243501
0
Restore an Operationobject stored in the database by a Dataset consisting of
def restore_operation(cls, operation_record): classname = operation_record["OPE_TYPE"] module = "" #TODO Implement modulename from database if Operation belongs to Module is_operation_of_module = False exec """ try: type(%(class)s) except NameError,e: is_operation_of_module = True"""%{'class':classname} if is_operation_of_module: exec """ from %(module)s import %(class)s operation = %(class)s(cls._core)"""%{'class':classname,'module':module} else: exec """ operation = %(class)s(cls._core)"""%{'class':classname} operation.set_id(operation_record['OPE_ID']) db = cls._core.get_db() stmnt = "SELECT OPD_KEY, OPD_VALUE, OPD_TYPE FROM OPERATIONDATA WHERE OPD_OPE_ID = ? ;" cur = db.query(cls._core,stmnt,(operation_record["OPE_ID"],)) for row in cur.fetchallmap(): val = row["OPD_VALUE"] exec """val = %s(val)"""%row["OPD_TYPE"] operation.set_value(row["OPD_KEY"], val) return operation
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def restore(self, oid, serial, data, version, prev_txn, transaction):\n assert not version\n self._check_trans(transaction, 'restore')\n self._async('restorea', oid, serial, data, prev_txn, id(transaction))", "def restore(self, checkpoint):\n raise NotImplementedError", "def mos_object(...
[ "0.6201652", "0.6147818", "0.5943375", "0.5855127", "0.5704961", "0.56648844", "0.56335723", "0.55953205", "0.55918145", "0.55536777", "0.55432135", "0.5536391", "0.5470216", "0.54262424", "0.539617", "0.5361314", "0.5206639", "0.51915765", "0.5184326", "0.5164573", "0.514952...
0.6955749
0
Recursively executes the workloads of Operation's Childoperations It hereby catches exceptions in the workloads, sets the OPE_STATUS to 2 (FAILED) if a catch occurs, then passes the exception on to the higher layer. If an Operation succeeds, it's entry in DB gets deleted
def process_children(cls, operation): db = cls._core.get_db() stmnt = "SELECT OPE_ID, OPE_TYPE FROM OPERATIONS WHERE OPE_OPE_PARENT = ? ORDER BY OPE_INVOKED ;" stmnt_lock = "UPDATE OPERATIONS SET OPE_STATUS = 1 WHERE OPE_ID = ? ;" cur = db.query(cls._core,stmnt,(operation.get_id(),)) for row in cur.fetchallmap(): child_operation = cls.restore_operation(row) db.query(cls._core,stmnt_lock,(child_operation.get_id(),),commit=True) try: cls.process_children(child_operation) child_operation.do_workload() except Exception,e: stmnt_err = "UPDATE OPERATIONS SET OPE_STATUS = 2 WHERE OPE_ID = ? ;" db.query(cls._core,stmnt_err,(int(row["OPE_ID"]),),commit=True) #TODO GENERATE ERROR IN LOG raise e stmnt_delete = "DELETE FROM OPERATIONS WHERE OPE_ID = ?;" db.query(cls._core,stmnt_delete,(child_operation.get_id(),),commit=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def retry_operation(cls,operation_id):\n db = cls._core.get_db()\n\n stmnt = \"SELECT OPE_ID FROM OPERATIONS WHERE OPE_OPE_PARENT = ? AND OPE_STATUS = 2 ;\"\n cur = db.query(cls._core,stmnt,(operation_id,))\n for row in cur.fetchallmap():\n cls.retry_operation(row[\"OPE_ID\"]...
[ "0.6481785", "0.6258558", "0.6212218", "0.5927201", "0.5507661", "0.5277229", "0.5242107", "0.5207474", "0.51513904", "0.5112994", "0.5106702", "0.5086462", "0.50150937", "0.4995987", "0.49878561", "0.497371", "0.49726665", "0.4905259", "0.49032328", "0.48811585", "0.48808104...
0.766613
0
Sets the status of the next toplevel operation to 1 (ACTIVE) Fetches the next topleveloperation from the database, applies a FILESYSTEMLOCK! Which is /tmp/scv_operating.lck !!!
def process_next(cls): db = cls._core.get_db() configuration = cls._core.get_configuration() if os.path.exists(configuration.get_entry("core.webpath")+"/scv_operating.lck"): return False lockfile = open(configuration.get_entry("core.webpath")+"/scv_operating.lck","w") lockfile.close() stmnt_lock = "UPDATE OPERATIONS SET OPE_STATUS = 1 \ WHERE OPE_ID IN ( \ SELECT OPE_ID FROM OPERATIONS \ WHERE OPE_OPE_PARENT IS NULL AND OPE_STATUS = 0 \ AND OPE_INVOKED = ( \ SELECT MIN(OPE_INVOKED) FROM OPERATIONS \ WHERE OPE_OPE_PARENT IS NULL AND OPE_STATUS = 0) \ ) ;" stmnt = "SELECT OPE_ID, OPE_TYPE FROM OPERATIONS WHERE OPE_OPE_PARENT IS NULL AND OPE_STATUS = 1 ;" db.query(cls._core,stmnt_lock,commit=True) cur = db.query(cls._core,stmnt) res = cur.fetchallmap() if len(res) > 0: operation = cls.restore_operation(res[0]) try: cls.process_children(operation) operation.do_workload() except Exception, e: stmnt_err = "UPDATE OPERATIONS SET OPE_STATUS = 2 WHERE OPE_ID = ? ;" db.query(cls._core,stmnt_err,(operation.get_id(),),commit=True) error = StringIO() print_exc(None,error) cls._core.log(error.getvalue()) ret = True else: ret = False stmnt_delete = "DELETE FROM OPERATIONS WHERE OPE_STATUS = 1 ;" db.query(cls._core,stmnt_delete,commit=True) db.commit() try: os.unlink(configuration.get_entry("core.webpath")+"/scv_operating.lck") except OSError,e : raise OperationException(OperationException.get_msg(0)) return ret
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def arm_oplock_future(self):\n self.oplock_future = self.tree.session.client.oplock_break_future(self.file_id)", "def processLock(self):\r\n self.controller.executionLock()", "def active(value):\r\n self.context.active = threading.BoundedSemaphore(value=value)", "def state_wait_enter(cfg, ap...
[ "0.54746443", "0.5449821", "0.54388434", "0.5305737", "0.52546555", "0.52230906", "0.5174595", "0.5144526", "0.5143462", "0.5060711", "0.5041392", "0.50204164", "0.5015011", "0.5006248", "0.50048566", "0.5001075", "0.49896038", "0.4973004", "0.49551147", "0.49547556", "0.4952...
0.7192307
0
Returns all Operations in an associative array. The array's indices are the operationIDs The Objects contain all information about the operations, including the Data
def get_current_operations_for_gui(cls, operation_types=None): db = cls._core.get_db() #TODO CHECK HOW LISTS ARE HANDLED IN FDB if operation_types is not None and type(operation_types) == list: stmnt = "SELECT OPE_ID, OPE_OPE_PARENT, OPE_INVOKED, OPE_TYPE, OPE_STATUS FROM OPERATIONS WHERE OPE_TYPE IN (?) ORDER BY OPE_INVOKED ;" cur = db.query(cls._core,stmnt,(operation_types)) else: stmnt = "SELECT OPE_ID, OPE_OPE_PARENT, OPE_INVOKED, OPE_TYPE, OPE_STATUS FROM OPERATIONS ORDER BY OPE_INVOKED ;" cur = db.query(cls._core,stmnt) ret = {} for row in cur.fetchallmap(): operation = cls.restore_operation(row) custom_values = operation.get_values() ret[row["OPE_ID"]] = {"id":row["OPE_ID"], "parent":row["OPE_OPE_PARENT"], "invoked":str(row["OPE_INVOKED"]), "type":row["OPE_TYPE"], "status":row["OPE_STATUS"], "data":custom_values} return ret
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def operation_list(self):\n return self._operation_list", "def operation_list(self):\n return self._operation_list", "def operation_list(self):\n return self._operation_list", "def operation_list(self):\n return self._operation_list", "def get_operations(self):\n return s...
[ "0.7069516", "0.7069516", "0.7069516", "0.7069516", "0.6523942", "0.6325791", "0.63029575", "0.6191638", "0.6191638", "0.61165065", "0.60694075", "0.6018854", "0.60050935", "0.5908322", "0.58018446", "0.5792496", "0.5778409", "0.5778409", "0.5778409", "0.575747", "0.5727349",...
0.628271
7
Get a new Operation Id from the Database and assign it to this Operation if this Operation's id is null. Afterwards return the new Id
def set_db_id(self): if self._id is None: db = self._core.get_db() self._id = db.get_seq_next('OPE_GEN') return self._id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def operation_id(self) -> Optional[str]:\n return pulumi.get(self, \"operation_id\")", "def get_unique_operation_id(self) -> np.uint64:\n\n counter_key = serialize_key('counter')\n\n # Incrementer row keys start with an \"i\"\n row_key = serialize_key(\"ioperations\")\n append_...
[ "0.7058039", "0.6560749", "0.64451057", "0.63077974", "0.6068187", "0.5920608", "0.58303344", "0.58039576", "0.5725634", "0.5725634", "0.56132185", "0.55941796", "0.55935115", "0.55935115", "0.55935115", "0.55935115", "0.55935115", "0.55935115", "0.5580039", "0.55765927", "0....
0.0
-1
Stores this Operation to database. Also saves every user defined value in $_values as long as it is a valid type
def store(self): db = self._core.get_db() if self._id is None: db = self._core.get_db() self._id = db.get_seq_next('OPE_GEN') stmnt = "UPDATE OR INSERT INTO OPERATIONS (OPE_ID, OPE_OPE_PARENT, OPE_INVOKED, OPE_TYPE) \ VALUES (?,?,CURRENT_TIMESTAMP,?) MATCHING (OPE_ID);" db.query(self._core,stmnt,(self._id,self._parent,self.__class__.__name__),commit=True) stmnt = "UPDATE OR INSERT INTO OPERATIONDATA (OPD_OPE_ID, OPD_KEY, OPD_VALUE, OPD_TYPE) \ VALUES ( ?, ?, ?, ?) MATCHING(OPD_OPE_ID,OPD_KEY);" for key, value in self._values.items(): typ = str(type(value)).replace("<type '","",1).replace("'>","",1) if typ not in Operation.VALID_STORAGE_TYPES: continue db.query(self._core,stmnt,(self._id,key,value,typ),commit=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save_vals (self):\n raise NotImplementedError", "def save(self, values):", "def persist(self, values):\n pass", "def save(self):\n data = self.serialize()\n\n self.validate(data)\n\n saved_data = DATABASE_CONNECTION.insert(self.__class__.__name__, data)\n\n self....
[ "0.74617493", "0.7196511", "0.7009068", "0.6361609", "0.62717307", "0.6257779", "0.6165036", "0.608339", "0.60653645", "0.60352385", "0.6013189", "0.5978669", "0.59265023", "0.58840144", "0.58840144", "0.58840144", "0.5868349", "0.5846248", "0.58364046", "0.5794738", "0.57928...
0.6750283
3
This method must be overridden by inheriting classes. The code inside this method will be executed when the
def do_workload(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def override(self):\n return None", "def __call__(self):\r\n raise NotImplementedError('override me')", "def __call__(self):\n\t\treturn", "def run(self):\n raise Exception('derived class should redefine this function')", "def __call__( self ):\n pass", "def runThis(self):\n ...
[ "0.75245625", "0.7466125", "0.73410434", "0.7315841", "0.7287032", "0.72735256", "0.7255045", "0.72209936", "0.7202259", "0.7187639", "0.71060145", "0.7067497", "0.7067497", "0.70535463", "0.6988847", "0.6977445", "0.6951473", "0.6951473", "0.6951473", "0.6951473", "0.6951473...
0.0
-1
Sets this operations values from module metadata
def set_values(self,module): if type(module) == dict: self.set_value("name",module["name"]) self.set_value("hrname",module["hrname"]) self.set_value("version_major",module["version_major"]) self.set_value("version_minor",module["version_minor"]) self.set_value("revision",module["revision"]) if module.has_key("signature"): self.set_value("signature",module["signature"]) elif module.__class__.__name__ == "Module": pass #TODO IMPLEMENT / DISCUSS AFTER IMPLEMENTING MODULE-SUBSYSTEM
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_metadata(self, data):\r\n pass", "def __init__(self):\n\n self.operations = {}", "def PopulateModuleMetadata(self, mod, mojom_file):\n mod.name = os.path.basename(mojom_file.file_name)\n mod.path = mojom_file.file_name\n mod.namespace = mojom_file.module_namespace\n if mojom_f...
[ "0.59990793", "0.58392733", "0.5662214", "0.5603085", "0.55330473", "0.55237305", "0.5490515", "0.5485604", "0.5479347", "0.54730034", "0.54730034", "0.54730034", "0.54730034", "0.54730034", "0.54730034", "0.5461847", "0.54556865", "0.54516035", "0.54180896", "0.54075307", "0...
0.6323762
0
Returns an Array of ModuleOperationObjects that are currently listedin the queue
def get_currently_processed_modules(cls): db = cls._core.get_db() stmnt = "SELECT OPE_ID, OPE_OPE_PARENT, OPE_TYPE FROM OPERATIONS \ WHERE OPE_TYPE = 'ModuleInstallOperation' \ or OPE_TYPE = 'ModuleUninstallOperation' ;" cur = db.query(cls._core,stmnt); ret = [] for row in cur.fetchallmap(): ret.append(Operation.restore_operation(row).get_meta()) return ret
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def operation_list(self):\n return self._operation_list", "def operation_list(self):\n return self._operation_list", "def operation_list(self):\n return self._operation_list", "def operation_list(self):\n return self._operation_list", "def get_queue_list(self):\n return s...
[ "0.63590986", "0.63590986", "0.63590986", "0.63590986", "0.6177426", "0.5978019", "0.5920926", "0.5920926", "0.5920926", "0.5920926", "0.5920926", "0.5920926", "0.5920926", "0.5920926", "0.5920926", "0.5798638", "0.57853013", "0.5771161", "0.5731278", "0.5724055", "0.56982976...
0.7111473
0
tell the module manager to install a specific module.
def do_workload(self): module_manager = self._core.get_module_manager() module_manager.install_module(self.get_meta())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def install_package(self, package):\n raise NotImplementedError(\"install_package not implemented!\")", "def install(*module_names):\n for m_name in module_names:\n if not find_spec(m_name):\n if input(NOT_FOUND_MSG.format(m_name)).lower() not in 'Nn':\n if main(['insta...
[ "0.6969152", "0.69388556", "0.6872978", "0.68555695", "0.6801365", "0.67761844", "0.6767428", "0.6733695", "0.6714173", "0.66525847", "0.6616081", "0.65094006", "0.6481745", "0.64168596", "0.63249385", "0.6309751", "0.6267477", "0.6245471", "0.6231327", "0.6215618", "0.620006...
0.6157249
21
tell the module manager to install a specific module.
def do_workload(self): module_manager = self._core.get_module_manager() module = module_manager.get_module_by_name(self._values["name"]) module_manager.uninstall_module(module)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def install_package(self, package):\n raise NotImplementedError(\"install_package not implemented!\")", "def install(*module_names):\n for m_name in module_names:\n if not find_spec(m_name):\n if input(NOT_FOUND_MSG.format(m_name)).lower() not in 'Nn':\n if main(['insta...
[ "0.6969152", "0.69388556", "0.6872978", "0.68555695", "0.6801365", "0.67761844", "0.6767428", "0.6733695", "0.6714173", "0.66525847", "0.6616081", "0.65094006", "0.6481745", "0.64168596", "0.63249385", "0.6309751", "0.6267477", "0.6245471", "0.6231327", "0.6215618", "0.620006...
0.0
-1
tell the module manager to install a specific module.
def do_workload(self): module_manager = self._core.get_module_manager() module = module_manager.get_module_by_name(self._values["name"]) module_manager.update_module(module)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def install_package(self, package):\n raise NotImplementedError(\"install_package not implemented!\")", "def install(*module_names):\n for m_name in module_names:\n if not find_spec(m_name):\n if input(NOT_FOUND_MSG.format(m_name)).lower() not in 'Nn':\n if main(['insta...
[ "0.6969152", "0.69388556", "0.6872978", "0.68555695", "0.6801365", "0.67761844", "0.6767428", "0.6733695", "0.6714173", "0.66525847", "0.6616081", "0.65094006", "0.6481745", "0.64168596", "0.63249385", "0.6309751", "0.6267477", "0.6245471", "0.6231327", "0.6215618", "0.620006...
0.0
-1
Do work if there is work to do, otherwise check every two seconds for new work.
def run(self): operation_manager = self._core.get_operation_manager() while True: while operation_manager.process_next(): pass sleep(2)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run_checks():\n while True:\n if datetime.now() > core.misc_data.check_date+timedelta(minutes=45):\n for stuff in stuff_to_do:\n threading.Thread(target=stuff).start()\n core.misc_data.check_date = datetime.now() + config.utils.tasks.repeat_every\n time.sle...
[ "0.6984714", "0.6409769", "0.6315232", "0.62860876", "0.6256095", "0.62228227", "0.61101943", "0.60830945", "0.6035169", "0.60236955", "0.59735036", "0.59445417", "0.5925701", "0.5867354", "0.5861029", "0.5783809", "0.5782785", "0.5782785", "0.5774008", "0.57409346", "0.57183...
0.5247155
97
Return squarefree decomposition of a polynomial in ``K[X]``. Examples ======== >>> R, x, y = ring('x y', ZZ) >>> R.sqf_list(x5 + 2x4y + x3y2) (1, [(x + y, 2), (x, 3)]) >>> R, x, y = ring('x y', FF(5)) >>> f = x5y5 + 1
def sqf_list(self, f): domain = self.domain if domain.is_Field: coeff, f = f.LC, f.monic() else: coeff, f = f.primitive() if domain.is_FiniteField: return coeff, self._gf_sqf_list(f) return coeff, self._rr_yun0_sqf_list(f)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def zzX_sqr(f):\n if poly_univariate_p(f):\n return zzx_sqr(f)\n\n if zzX_zero_p(f):\n return f\n\n df = zzX_degree(f)\n l = poly_level(f)-1\n\n h = []\n\n for i in xrange(0, 2*df+1):\n coeff = zzX_zero(l)\n\n jmin = max(0, i-df)\n jmax = min(i, df)\n\n n...
[ "0.58898497", "0.5696768", "0.55792755", "0.5505899", "0.53948593", "0.53656983", "0.51418346", "0.51252043", "0.5098417", "0.50928617", "0.50540054", "0.5037502", "0.49890068", "0.48362672", "0.48262566", "0.48185673", "0.4812812", "0.48055914", "0.47933468", "0.47783032", "...
0.55302864
3
Compute squarefree decomposition of the monic ``f`` in ``GF(q)[X]``. Notes ===== Uses a modified version of Musser's algorithm for squarefree decomposition of univariate polynomials over finite fields. References ==========
def _gf_sqf_list(self, f): domain = self.domain n, factors, p = 1, [], int(domain.characteristic) m = int(domain.order // p) while not f.is_ground: df = [f.diff(x) for x in self.gens] if any(_ for _ in df): g = f for q in df: g = self.gcd(g, q) h, f, i = f // g, g, 1 while h != 1: g = self.gcd(f, h) h //= g if not h.is_ground: factors.append((h, i*n)) f //= g h = g i += 1 n *= p g = self.zero for monom, coeff in f.items(): g[tuple(_ // p for _ in monom)] = coeff**m f = g return factors
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_squarefree(self, f):\n if f.is_ground:\n return True\n g = f\n for x in self.gens:\n g = self.gcd(g, f.diff(x))\n if g.is_ground:\n return True\n return False", "def sqf_part(self, f):\n domain = self.domain\n\n if d...
[ "0.6348645", "0.6082395", "0.60524696", "0.60362184", "0.5977433", "0.5936218", "0.5890577", "0.58653134", "0.5856254", "0.57929945", "0.5784882", "0.5635512", "0.56248885", "0.554542", "0.55379504", "0.5530155", "0.5527634", "0.55048215", "0.54990107", "0.5371403", "0.536764...
0.6371153
0
Compute squarefree decomposition of ``f`` in zerocharacteristic ring ``K[X]``. References ==========
def _rr_yun0_sqf_list(self, f): if f.is_ground: return [] result, count = [], 1 qs = [f.diff(x) for x in self.gens] g = f for q in qs: g = self.gcd(g, q) while f != 1: qs = [q // g for q in qs] f //= g qs = [q - f.diff(x) for x, q in zip(self.gens, qs)] g = f for q in qs: g = self.gcd(g, q) if g != 1: result.append((g, count)) count += 1 return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def vsfun(Q_slm, theta, phi,f=[]):\n vsf_th=numpy.zeros(theta.shape, dtype='complex')\n vsf_ph=numpy.zeros(theta.shape, dtype='complex')\n for (s,l,m) in Q_slm:\n vsh_th,vsh_ph=K(s, l, m, theta, phi)\n c_slm=Q_slm.getBysnm(s, l, m) if not(f) else Q_slm.getBysnm(s, l, m)(f)\n vsf_th=vsf_th+c_slm*vsh_th\...
[ "0.5876295", "0.5828523", "0.58153415", "0.57921195", "0.5763062", "0.5645726", "0.5505161", "0.54806006", "0.54626274", "0.5399158", "0.5388178", "0.53604126", "0.53176093", "0.5301814", "0.52727515", "0.5272213", "0.52575696", "0.52096814", "0.52035517", "0.51917213", "0.51...
0.0
-1
Return ``True`` if ``f`` is a squarefree polynomial in ``K[X]``. Examples ======== >>> _, x, y = ring('x y', ZZ) >>> ((x + y)2).is_squarefree False >>> (x2 + y2).is_squarefree True
def is_squarefree(self, f): if f.is_ground: return True g = f for x in self.gens: g = self.gcd(g, f.diff(x)) if g.is_ground: return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_squarefree_hilbert_number(n):\n return is_hilbert_number(n) and is_hilbert_squarefree_number(n)", "def isNodeSheaf(_session, _node):\n return checkIncToSets(_session, _node, [keynodes.info.stype_sheaf], sc.SC_A_CONST | sc.SC_POS)", "def is_sqf(f):\n return dmp_sqf_p(f.rep, f.lev, f.dom)", ...
[ "0.56360364", "0.5465797", "0.5397887", "0.5356589", "0.533036", "0.51120263", "0.50754285", "0.5052217", "0.49830848", "0.49745223", "0.49274656", "0.4885014", "0.48708126", "0.484963", "0.48122284", "0.47914714", "0.47727177", "0.47523925", "0.47462133", "0.47416347", "0.47...
0.74997777
0
Returns squarefree part of a polynomial in ``K[X]``. Examples ======== >>> R, x, y = ring('x y', ZZ) >>> R.sqf_part(x3 + 2x2y + xy2) x2 + xy
def sqf_part(self, f): domain = self.domain if domain.is_FiniteField: g = self.one for f, _ in self.sqf_list(f)[1]: g *= f return g if not f: return f gcd = f for x in self.gens: gcd = self.gcd(gcd, f.diff(x)) sqf = f // gcd if domain.is_Field: return sqf.monic() return sqf.primitive()[1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def zzX_sqf_part(f):\n quo = zzX_quo(f, zzX_gcd(f, zzX_diff(f)))\n return zzX_primitive(quo)[1]", "def rfpart(x):\n return 1 - fpart(x)", "def sqf_part(f):\n return f.per(dmp_sqf_part(f.rep, f.lev, f.dom))", "def zzx_sqf_part(f):\n quo = zzx_quo(f, zzx_gcd(f, zzx_diff(f)))\n return zzx_...
[ "0.59264237", "0.5611723", "0.55813473", "0.5479996", "0.5371169", "0.51204914", "0.50550634", "0.50538117", "0.49067897", "0.47249427", "0.4711605", "0.46479887", "0.46400973", "0.4625992", "0.4623234", "0.45929667", "0.4567981", "0.45518592", "0.45361853", "0.4516747", "0.4...
0.50990486
6
Squarefree norm of ``f`` in ``K[X]``, useful over algebraic domains. Returns ``s``, ``f``, ``r``, such that ``g(x) = f(xsa)`` and ``r(x) = Norm(g(x))`` is a squarefree polynomial over K, where ``a`` is the algebraic extension of ``K``. Examples ======== >>> _, x, y = ring('x y', QQ.algebraic_field(I)) >>> (xy + y2).sqf_norm() (1, xy Ix + y2 3Iy 2, x2y2 + x2 + 2xy3 + 2xy + y4 + 5y2 + 4)
def sqf_norm(self, f): domain = self.domain if not domain.is_AlgebraicField: raise DomainError(f'ground domain must be algebraic, got {domain}') new_ring = self.to_ground().inject(*domain.symbols, front=True) g = domain.mod.set_ring(new_ring) s = 0 while True: h = f.inject(front=True) r = g.resultant(h) if r.is_squarefree: return s, f, r f = f.compose({x: x - domain.unit for x in self.gens}) s += 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sqf_norm(f):\n s, g, r = dmp_sqf_norm(f.rep, f.lev, f.dom)\n return s, f.per(g), f.per(r, dom=f.dom.dom)", "def squared_frobenius_norm(x):\n # http://mathworld.wolfram.com/FrobeniusNorm.html\n # The gradient of KL[p,q] is not defined when p==q. The culprit is\n # tf.norm, i.e., we cann...
[ "0.6828397", "0.6347827", "0.58674204", "0.58420646", "0.57301825", "0.56841624", "0.56582105", "0.56257766", "0.5603542", "0.55873054", "0.5552461", "0.55447716", "0.55371946", "0.5521278", "0.5477447", "0.54771346", "0.5461877", "0.54501307", "0.54418135", "0.53796417", "0....
0.7036989
0
tries number of times to retry starting the broker. < 0 means infinitely many. delay number of seconds to wait after the first failed attempt backoff factor by which the delay will be incremented after a failure.
def startSTOMPBroker(config, serverUpEvent, tries=-1, delay=1, backoff=1.5): #stomp broker mtries = tries mdelay = delay coilserver = None from coilmq.config import config as coilconfig if config.has_section('coilmq'): for k,v in config.items('coilmq'): coilconfig.set('coilmq', k, v) logger.debug("Set %s to %s for coilmq config." % (k,v)) while True: try: coilserver = coilmq.start.server_from_config(coilconfig) logger.info("Stomp server listening on %s:%s" % \ coilserver.server_address) serverUpEvent.set() coilserver.serve_forever() except IOError as ex: logger.error("Exception while starting coilmq broker: '%s'", ex) if mtries != 0: logger.debug("Retrying coilmq startup in %.1f seconds...", mdelay) time.sleep(mdelay) mdelay *= backoff mtries -= 1 else: logger.debug("Ran out of trials (tried %d times) for coilmq startup. Giving up.", tries) break finally: if coilserver: coilserver.server_close()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_retry_delay(self, last_delay):\n return last_delay * 2", "def retry(times):\n return repeat_with_success_at_least(times, 1)", "def retry(tries, delay=3, backoff=2):\n tries = math.floor(tries)\n if tries < 0:\n raise ValueError(\"tries must be 0 or greater\")\n\n\n ...
[ "0.68163663", "0.67791754", "0.6515797", "0.64622295", "0.64621955", "0.6419269", "0.6208343", "0.6206229", "0.61321574", "0.60527027", "0.60336995", "0.6022903", "0.594526", "0.5920799", "0.58964133", "0.5895078", "0.5889684", "0.5877148", "0.5873413", "0.58332604", "0.58207...
0.0
-1
Start twisted event loop and the fun should begin... brokerTimeout how long to wait for a broker a negative number upon failure. Otherwise, it never returns.
def start(config, brokerTimeout = 60.0): manager = multiprocessing.Manager() serverUpEvent = manager.Event() broker = multiprocessing.Process(target=startSTOMPBroker, args=(config,serverUpEvent)) broker.daemon = True broker.name = 'STOMP-Broker' broker.start() serverUpEvent.wait(brokerTimeout) if not serverUpEvent.is_set(): logger.fatal("Broker not available after %.1f seconds. Giving up", brokerTimeout) return -1 #host side logic host = config.get('Broker', 'host') port = int(config.get('Broker', 'port')) username = config.get('Broker', 'username') password = config.get('Broker', 'password') hostEngine = HostStompEngine(config) stompProtocolFactory = StompProtocolFactory(hostEngine, username, password) HostXMLRPCService(config).makeEngineAccesible(hostEngine) reactor.connectTCP(host, port, stompProtocolFactory) reactor.run()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def on_running():\n log.msg('reactor_loop Starting')\n try:\n conn = client.connect(reactor)\n si446x_do = Si446xComponent(conn)\n conn.addCallback(si446x_do.start)\n conn.addErrback(si446x_do.on_error)\n except error.DBusException, e:\n l...
[ "0.6220351", "0.62159324", "0.61877704", "0.6160593", "0.61132985", "0.59028834", "0.58682054", "0.5843688", "0.5822215", "0.5798466", "0.57124496", "0.57084095", "0.566796", "0.56646776", "0.56603914", "0.5605297", "0.55968124", "0.55797726", "0.55603266", "0.55498093", "0.5...
0.6690256
0
Initializes the object with a tuple of information.
def __init__(self, info=None): self.astral = None if info is None: self.name = 'Greenwich' self.country = 'England' self._latitude = 51.168 self._longitude = 0 self._timezone_group = 'Europe' self._timezone_location = 'London' else: self._latitude = 0 self._longitude = 0 self._timezone_group = '' self._timezone_location = '' try: self.name = info[0].encode('utf-8') self.country = info[1].encode('utf-8') self.latitude = info[2] self.longitude = info[3] self.timezone = info[4] except: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, fields):\n self.__init_handle_by_constructor__(_make.TupleType, fields)", "def __init__(self, *args):\n _snap.TAttrPair_swiginit(self, _snap.new_TAttrPair(*args))", "def __init__(self, config: Tuple):", "def __init__(self):\n\n data_extract=DataExtracter()\n sel...
[ "0.76838994", "0.6909169", "0.69027334", "0.6892625", "0.6837433", "0.6632949", "0.65825474", "0.656346", "0.6476574", "0.6431456", "0.6426662", "0.6349654", "0.6341158", "0.63148826", "0.6312479", "0.62745905", "0.6267313", "0.62587535", "0.6253988", "0.6251733", "0.6246671"...
0.6001026
64
Calculates the time in the morning when the sun is a certain number of degrees below the horizon. By default this is 6 degrees but can be changed
def dawn(self, date=None, local=True): if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() dawn = self.astral.dawn_utc(date, self.latitude, self.longitude) if local: return dawn.astimezone(self.tz) else: return dawn
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def equation_of_time(cls, date):\n offset = cls.sine(cls.mean_position(date, cls.ANOMALISTIC_YEAR))\n equation_sun = (offset * angle(57, 18, 0) * (14/360 - (abs(offset) / 1080)))\n return ((cls.daily_motion(date) / 360) * (equation_sun / 360) * cls.SIDEREAL_YEAR)", "def sundial_time(cls, tee...
[ "0.6528656", "0.63950557", "0.60731083", "0.5948047", "0.58831996", "0.58726454", "0.586126", "0.58426833", "0.58409107", "0.5811609", "0.57901573", "0.5728057", "0.56637347", "0.56392944", "0.56149286", "0.5611691", "0.55851465", "0.5576015", "0.55250823", "0.5508363", "0.54...
0.0
-1
Return sunrise time. Calculates the time in the morning when the sun is a 0.833 degrees below the horizon. This is to account for refraction.
def sunrise(self, date=None, local=True): if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() sunrise = self.astral.sunrise_utc(date, self.latitude, self.longitude) if local: return sunrise.astimezone(self.tz) else: return sunrise
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def alt_sunrise(cls, date):\n rise = cls.UJJAIN.dawn(date, angle(0, 47, 0))\n return 1/24 * 1/60 * iround(rise * 24 * 60)", "def sundial_time(cls, tee):\n date = Clock.fixed_from_moment(tee)\n time = mod(tee, 1)\n q = ifloor(4 * time)\n if q == 0:\n a = cls...
[ "0.7141806", "0.7119291", "0.70426834", "0.68516636", "0.6563814", "0.64964134", "0.6399623", "0.6321392", "0.63076687", "0.60870713", "0.5994337", "0.59702235", "0.5949885", "0.5923036", "0.5906305", "0.5899862", "0.5849699", "0.57508343", "0.57498085", "0.5634182", "0.55695...
0.53256434
39
Calculates the solar noon (the time when the sun is at its highest point.)
def solar_noon(self, date=None, local=True): if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() noon = self.astral.solar_noon_utc(date, self.longitude) if local: return noon.astimezone(self.tz) else: return noon
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solar_noon_local(LonDegE):\n return 12.", "def solar_noon_utc(self, date, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n newt = self._jday_to_jcentury(julianday + 0.5 + longitude / 360.0)\n\n eqtime = self._eq_of_time(newt)\n timeUTC = ...
[ "0.6816998", "0.66865885", "0.6304783", "0.6012745", "0.60126984", "0.59854287", "0.592383", "0.585429", "0.5843106", "0.5810727", "0.58008647", "0.5759068", "0.5738313", "0.57033736", "0.570194", "0.56586534", "0.5638657", "0.5631576", "0.5618385", "0.5601817", "0.55713826",...
0.6865988
0
Calculates sunset time (the time in the evening when the sun is a 0.833 degrees below the horizon. This is to account for refraction.)
def sunset(self, date=None, local=True): if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() sunset = self.astral.sunset_utc(date, self.latitude, self.longitude) if local: return sunset.astimezone(self.tz) else: return sunset
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sundial_time(cls, tee):\n date = Clock.fixed_from_moment(tee)\n time = mod(tee, 1)\n q = ifloor(4 * time)\n if q == 0:\n a = cls.sunset(date - 1)\n b = cls.sunrise(date)\n t = Clock.days_from_hours(-6)\n elif q == 3:\n a = cls.su...
[ "0.74437636", "0.6815162", "0.6379348", "0.6302862", "0.6234464", "0.61841786", "0.61803925", "0.591679", "0.57105523", "0.57086027", "0.5666004", "0.5648924", "0.56333864", "0.5610499", "0.55877644", "0.55800503", "0.556043", "0.5549913", "0.55466384", "0.5520775", "0.549118...
0.5021731
80
Calculates the dusk time (the time in the evening when the sun is a certain number of degrees below the horizon. By default this is 6 degrees but can be changed
def dusk(self, date=None, local=True): if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() dusk = self.astral.dusk_utc(date, self.latitude, self.longitude) if local: return dusk.astimezone(self.tz) else: return dusk
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dusk_utc(self, date, latitude, longitude):\n \n julianday = self._julianday(date.day, date.month, date.year)\n\n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n t = self._jday_to_jcentury(julianda...
[ "0.6691584", "0.6364968", "0.6130741", "0.59310466", "0.583694", "0.5834618", "0.5786861", "0.5731586", "0.56987673", "0.5689336", "0.568822", "0.56476253", "0.56455123", "0.56278443", "0.5611392", "0.5609227", "0.5550629", "0.5508177", "0.55058354", "0.5498458", "0.54921347"...
0.0
-1
Returns dawn, sunrise, noon, sunset and dusk as a dictionary.
def sun(self, date=None, local=True): if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() sun = self.astral.sun_utc(date, self.latitude, self.longitude) if local: for key, dt in sun.items(): sun[key] = dt.astimezone(self.tz) return sun
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sun_utc(self, date, latitude, longitude):\n \n dawn = self.dawn_utc(date, latitude, longitude)\n sunrise = self.sunrise_utc(date, latitude, longitude)\n noon = self.solar_noon_utc(date, longitude)\n sunset = self.sunset_utc(date, latitude, longitude)\n dusk = self.dusk...
[ "0.6244639", "0.5948716", "0.5810174", "0.5733287", "0.5685426", "0.5663543", "0.56323534", "0.5594207", "0.55913526", "0.5588648", "0.5566428", "0.555051", "0.5539472", "0.5533902", "0.5526651", "0.5480028", "0.5478169", "0.54696566", "0.5455117", "0.54445684", "0.5444169", ...
0.0
-1
Calculates the period of rahukaalam.
def rahukaalam(self, date=None, local=True): if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() rahukaalam = self.astral.rahukaalam_utc(date, self.latitude, self.longitude) if local: for key, dt in rahukaalam.items(): rahukaalam[key] = dt.astimezone(self.tz) return rahukaalam
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def period(self) -> int:", "def generate_rapel(self):\n config = self.env['ka_hr_payroll.config'].default_config()\n last_period = self.get_last_period(self.status_id.id, self.company_payroll_id.id, config=config)\n if last_period:\n date_done = datetime.strptime(self.date_done, D...
[ "0.68010175", "0.60649586", "0.59108144", "0.58845204", "0.58199346", "0.5665051", "0.56304914", "0.562789", "0.5617035", "0.55706686", "0.5552009", "0.5500564", "0.5400052", "0.53994274", "0.53955257", "0.5372322", "0.5367489", "0.5362566", "0.5361659", "0.5354063", "0.53483...
0.0
-1
Calculates the solar azimuth angle for a specific date/time.
def solar_azimuth(self, dateandtime=None): if self.astral is None: self.astral = Astral() if dateandtime is None: dateandtime = datetime.datetime.now(tz=self.tz) return self.astral.solar_azimuth(dateandtime, self.latitude, self.longitude)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solar_azimuth(self, dateandtime, latitude, longitude):\n \n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n \n zone = -dateandtime.utcoffset().seconds / 3600.0\n utc_datetime = dateandtime.astimezone(pytz.ut...
[ "0.78250027", "0.67446834", "0.6695796", "0.64115757", "0.6395283", "0.63717794", "0.63140875", "0.62267244", "0.6145953", "0.6141899", "0.6060071", "0.598316", "0.5980102", "0.59746766", "0.5884954", "0.58290213", "0.5812057", "0.58084035", "0.57966334", "0.5779464", "0.5756...
0.7764275
1
Calculates the solar elevation angle for a specific time.
def solar_elevation(self, dateandtime=None): if self.astral is None: self.astral = Astral() if dateandtime is None: dateandtime = datetime.datetime.now(tz=self.tz) return self.astral.solar_elevation(dateandtime, self.latitude, self.longitude)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solar_elevation(self, dateandtime, latitude, longitude):\n \n if latitude > 89.8:\n latitude = 89.8\n \n if latitude < -89.8:\n latitude = -89.8\n\n zone = -dateandtime.utcoffset().seconds / 3600.0\n utc_datetime = dateandtime.astimezone(pytz.utc)...
[ "0.71530664", "0.67977107", "0.67431724", "0.6220381", "0.61309683", "0.6082073", "0.6003304", "0.5945914", "0.5920471", "0.58973986", "0.5858497", "0.58510685", "0.58294576", "0.579033", "0.57864755", "0.57316357", "0.5688304", "0.56844896", "0.56828004", "0.56804633", "0.56...
0.70600396
1
Access to each timezone group. For example London is in timezone group Europe. Attribute lookup is case insensitive
def __getattr__(self, key): key = str(key).lower().encode('utf-8') for name, value in self._groups.items(): if name == key: return value raise AttributeError('Group \'%s\' not found' % key)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_athlete_zones(self):\n pass", "def grouping_timezone(self):\n return tz.gettz(self._summariser.grouping_time_zone)", "def associate_timezones_to_countries(self):\n\t\t\n\t\tresult = {}\n\t\twith open(\"/usr/share/zoneinfo/zone.tab\", \"r\") as f:\n\t\t\tfor line in f.readlines():\n\t\t\t\...
[ "0.5835346", "0.57775724", "0.57361513", "0.5698909", "0.55084604", "0.55003285", "0.545441", "0.5390807", "0.53817374", "0.5315582", "0.526018", "0.52240425", "0.51877755", "0.5176649", "0.516878", "0.51196074", "0.5056693", "0.50521755", "0.49813348", "0.49468496", "0.49349...
0.45075953
76
Lookup a city within all timezone groups. Item lookup is case insensitive.
def __getitem__(self, key): key = str(key).lower().encode('utf-8') for group in self._groups.values(): try: return group[key] except KeyError: pass raise KeyError('Unrecognised city name - %s' % key)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_cities(self, city_name: str = None):", "def get_cities(self, city_name: str = \"\"):", "async def _timein_city(self, *, city_name):\n\t\t\n\t\tapiKey = self.settings['api_key']\n\t\tif \".com\" in apiKey:\n\t\t\tawait self.bot.say(\"You have to set your API key, see data/timein/settings.json for detail...
[ "0.65049624", "0.63830185", "0.6244422", "0.61362547", "0.61103976", "0.5962785", "0.5956759", "0.5944463", "0.5939915", "0.5865012", "0.58639216", "0.58566123", "0.5840458", "0.58180654", "0.58092016", "0.58036107", "0.58036107", "0.5790917", "0.57779676", "0.5772209", "0.57...
0.61644584
3
Initialise the city database and set the default depression.
def __init__(self): self._citydb = CityDB() self._depression = 6 # Set default depression in degrees
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setUp(self):\n self.my_city = City()", "def initialize():\n sql_db = SQLConnection()\n with SQLCursor(sql_db) as cur:\n cur.execute('SELECT position from govt_info')\n row = cur.fetchone()\n for pos in Government.positions:\n if row is None or len(row) != len(Gove...
[ "0.6182034", "0.6132606", "0.60481834", "0.60060346", "0.5950479", "0.5950479", "0.59373367", "0.59160274", "0.5874279", "0.58517987", "0.5830317", "0.58109874", "0.5791189", "0.57841307", "0.5774677", "0.57559514", "0.5754031", "0.57528317", "0.5746476", "0.5727893", "0.5670...
0.77128774
0
Returns the City instance specified by ``key``.
def __getitem__(self, key): city = self._citydb[key] city.astral = self return city
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_city(self, name: str):\n key = name.lower()\n try:\n return self._cities[key]\n except KeyError:\n city = City(name=name, state=self)\n self._cities[key] = city\n return city", "def __getitem__(self, key):\n \n key = str(key)....
[ "0.6803799", "0.64028823", "0.61282915", "0.6064786", "0.598396", "0.5972638", "0.59683293", "0.59556264", "0.5922655", "0.5832542", "0.5792213", "0.5784531", "0.57582283", "0.573862", "0.57019925", "0.5670712", "0.56591344", "0.56591344", "0.56481266", "0.56079686", "0.55630...
0.769488
0
Calculate all the info for the sun at once.
def sun_utc(self, date, latitude, longitude): dawn = self.dawn_utc(date, latitude, longitude) sunrise = self.sunrise_utc(date, latitude, longitude) noon = self.solar_noon_utc(date, longitude) sunset = self.sunset_utc(date, latitude, longitude) dusk = self.dusk_utc(date, latitude, longitude) return {'dawn': dawn, 'sunrise': sunrise, 'noon': noon, 'sunset': sunset, 'dusk': dusk}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def computeSunTime(self, latitude, longitude, startDate, endDate): \n self.sun = sun(lat=latitude, long=longitude)\n dateTime = datetime.datetime.combine(startDate, datetime.time(hour=8))\n while dateTime.date() <= endDate: \n daytimeStart, daytimeEnd = self.computeDaytime...
[ "0.60909706", "0.6054457", "0.5922382", "0.56137407", "0.5576481", "0.5530526", "0.55205214", "0.5497912", "0.5490896", "0.5478395", "0.5460385", "0.5441609", "0.54134166", "0.5389697", "0.53693914", "0.53531724", "0.5340516", "0.53140557", "0.5308317", "0.52656955", "0.52567...
0.56515837
3
Calculate dawn time in the UTC timezone.
def dawn_utc(self, date, latitude, longitude): julianday = self._julianday(date.day, date.month, date.year) if latitude > 89.8: latitude = 89.8 if latitude < -89.8: latitude = -89.8 t = self._jday_to_jcentury(julianday) eqtime = self._eq_of_time(t) solarDec = self._sun_declination(t) try: hourangle = self._hour_angle_sunrise(latitude, solarDec) except: raise AstralError('Sun remains below horizon on this day, at this location.') delta = longitude - degrees(hourangle) timeDiff = 4.0 * delta timeUTC = 720.0 + timeDiff - eqtime newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0) eqtime = self._eq_of_time(newt) solarDec = self._sun_declination(newt) hourangle = self._hour_angle_dawn(latitude, solarDec, self._depression) delta = longitude - degrees(hourangle) timeDiff = 4 * delta timeUTC = 720 + timeDiff - eqtime timeUTC = timeUTC/60.0 hour = int(timeUTC) minute = int((timeUTC - hour) * 60) second = int((((timeUTC - hour) * 60) - minute) * 60) if second > 59: second -= 60 minute += 1 elif second < 0: second += 60 minute -= 1 if minute > 59: minute -= 60 hour += 1 elif minute < 0: minute += 60 hour -= 1 if hour > 23: hour -= 24 date += datetime.timedelta(days=1) elif hour < 0: hour += 24 date -= datetime.timedelta(days=1) dawn = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc) return dawn
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dawn(self, date=None, local=True):\n\n if self.astral is None:\n self.astral = Astral()\n\n if date is None:\n date = datetime.date.today()\n\n dawn = self.astral.dawn_utc(date, self.latitude, self.longitude)\n\n if local:\n return dawn.astimezone(se...
[ "0.628965", "0.59754497", "0.58763033", "0.5851417", "0.5806555", "0.5735901", "0.56585526", "0.56538594", "0.55857354", "0.55760634", "0.5519843", "0.55160165", "0.55128175", "0.55116653", "0.53910935", "0.5379949", "0.536428", "0.5360612", "0.53601545", "0.5338035", "0.5333...
0.76470286
0
Calculate sunrise time in the UTC timezone.
def sunrise_utc(self, date, latitude, longitude): julianday = self._julianday(date.day, date.month, date.year) t = self._jday_to_jcentury(julianday) eqtime = self._eq_of_time(t) solarDec = self._sun_declination(t) try: hourangle = self._hour_angle_sunrise(latitude, solarDec) except: raise AstralError('Sun remains below horizon on this day, at this location.') delta = longitude - degrees(hourangle) timeDiff = 4.0 * delta timeUTC = 720.0 + timeDiff - eqtime newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0) eqtime = self._eq_of_time(newt) solarDec = self._sun_declination(newt) hourangle = self._hour_angle_sunrise(latitude, solarDec) delta = longitude - degrees(hourangle) timeDiff = 4 * delta timeUTC = 720 + timeDiff - eqtime timeUTC = timeUTC/60.0 hour = int(timeUTC) minute = int((timeUTC - hour) * 60) second = int((((timeUTC - hour) * 60) - minute) * 60) if second > 59: second -= 60 minute += 1 elif second < 0: second += 60 minute -= 1 if minute > 59: minute -= 60 hour += 1 elif minute < 0: minute += 60 hour -= 1 if hour > 23: hour -= 24 date += datetime.timedelta(days=1) elif hour < 0: hour += 24 date -= datetime.timedelta(days=1) sunrise = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc) return sunrise
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_sun_rise_set_time(self, sun_time):\n if sun_time:\n return datetime.fromtimestamp(sun_time).strftime(self.time_format)\n return sun_time", "def alt_sunrise(cls, date):\n rise = cls.UJJAIN.dawn(date, angle(0, 47, 0))\n return 1/24 * 1/60 * iround(rise * 24 * 60)", ...
[ "0.74266624", "0.7356725", "0.72597265", "0.7091791", "0.70251125", "0.7006317", "0.6827473", "0.68216705", "0.68117666", "0.66464955", "0.65655184", "0.6516135", "0.6488186", "0.6406055", "0.63925016", "0.6312478", "0.62507564", "0.6104449", "0.60976666", "0.6094293", "0.605...
0.79015124
0
Calculate solar noon time in the UTC timezone.
def solar_noon_utc(self, date, longitude): julianday = self._julianday(date.day, date.month, date.year) newt = self._jday_to_jcentury(julianday + 0.5 + longitude / 360.0) eqtime = self._eq_of_time(newt) timeUTC = 720.0 + (longitude * 4.0) - eqtime timeUTC = timeUTC/60.0 hour = int(timeUTC) minute = int((timeUTC - hour) * 60) second = int((((timeUTC - hour) * 60) - minute) * 60) if second > 59: second -= 60 minute += 1 elif second < 0: second += 60 minute -= 1 if minute > 59: minute -= 60 hour += 1 elif minute < 0: minute += 60 hour -= 1 if hour > 23: hour -= 24 date += datetime.timedelta(days=1) elif hour < 0: hour += 24 date -= datetime.timedelta(days=1) noon = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc) return noon
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solar_noon(self, date=None, local=True):\n \n if self.astral is None:\n self.astral = Astral()\n\n if date is None:\n date = datetime.date.today()\n\n noon = self.astral.solar_noon_utc(date, self.longitude)\n\n if local:\n return noon.astimezo...
[ "0.6777714", "0.6724988", "0.6343755", "0.626581", "0.62460506", "0.59971875", "0.59616363", "0.594989", "0.5928303", "0.5893481", "0.5877765", "0.58015263", "0.5749765", "0.5749219", "0.5719723", "0.571865", "0.571865", "0.56694263", "0.5617303", "0.5612241", "0.56063336", ...
0.74279106
0
Calculate sunset time in the UTC timezone.
def sunset_utc(self, date, latitude, longitude): julianday = self._julianday(date.day, date.month, date.year) t = self._jday_to_jcentury(julianday) eqtime = self._eq_of_time(t) solarDec = self._sun_declination(t) try: hourangle = self._hour_angle_sunset(latitude, solarDec) except: raise AstralError('Sun remains below horizon on this day, at this location.') delta = longitude - degrees(hourangle) timeDiff = 4.0 * delta timeUTC = 720.0 + timeDiff - eqtime newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0) eqtime = self._eq_of_time(newt) solarDec = self._sun_declination(newt) hourangle = self._hour_angle_sunset(latitude, solarDec) delta = longitude - degrees(hourangle) timeDiff = 4 * delta timeUTC = 720 + timeDiff - eqtime timeUTC = timeUTC/60.0 hour = int(timeUTC) minute = int((timeUTC - hour) * 60) second = int((((timeUTC - hour) * 60) - minute) * 60) if second > 59: second -= 60 minute += 1 elif second < 0: second += 60 minute -= 1 if minute > 59: minute -= 60 hour += 1 elif minute < 0: minute += 60 hour -= 1 if hour > 23: hour -= 24 date += datetime.timedelta(days=1) elif hour < 0: hour += 24 date -= datetime.timedelta(days=1) sunset = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc) return sunset
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sunset(self, date=None, local=True):\n \n if self.astral is None:\n self.astral = Astral()\n\n if date is None:\n date = datetime.date.today()\n\n sunset = self.astral.sunset_utc(date, self.latitude, self.longitude)\n\n if local:\n return suns...
[ "0.65699303", "0.64950114", "0.63881093", "0.61808205", "0.61730003", "0.6134258", "0.60923696", "0.60374415", "0.60219973", "0.5875478", "0.5835468", "0.5818931", "0.5818664", "0.58087045", "0.5799634", "0.57659054", "0.57374483", "0.5717565", "0.5715173", "0.5711675", "0.57...
0.74293166
0
Calculate dusk time in the UTC timezone.
def dusk_utc(self, date, latitude, longitude): julianday = self._julianday(date.day, date.month, date.year) if latitude > 89.8: latitude = 89.8 if latitude < -89.8: latitude = -89.8 t = self._jday_to_jcentury(julianday) eqtime = self._eq_of_time(t) solarDec = self._sun_declination(t) try: hourangle = self._hour_angle_sunset(latitude, solarDec) except: raise AstralError('Sun remains below horizon on this day, at this location.') delta = longitude - degrees(hourangle) timeDiff = 4.0 * delta timeUTC = 720.0 + timeDiff - eqtime newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + timeUTC / 1440.0) eqtime = self._eq_of_time(newt) solarDec = self._sun_declination(newt) hourangle = self._hour_angle_dusk(latitude, solarDec, self._depression) delta = longitude - degrees(hourangle) timeDiff = 4 * delta timeUTC = 720 + timeDiff - eqtime timeUTC = timeUTC/60.0 hour = int(timeUTC) minute = int((timeUTC - hour) * 60) second = int((((timeUTC - hour) * 60) - minute) * 60) if second > 59: second -= 60 minute += 1 elif second < 0: second += 60 minute -= 1 if minute > 59: minute -= 60 hour += 1 elif minute < 0: minute += 60 hour -= 1 if hour > 23: hour -= 24 date += datetime.timedelta(days=1) elif hour < 0: hour += 24 date -= datetime.timedelta(days=1) dusk = datetime.datetime(date.year, date.month, date.day, hour, minute, second, tzinfo=pytz.utc) return dusk
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def utcTime():\r\n return calendar.timegm(time.gmtime())", "def _get_tz():\n return 'UTC'", "def get_time():\n\teastern = timezone('US/Eastern')\n\tnow = datetime.datetime.now(eastern).time()\n\treturn(now)", "def timezone():\n \n pass", "def nowUTC():\n return datetime.datetime.now(pytz.utc)...
[ "0.69508314", "0.6756243", "0.646933", "0.64476836", "0.64054435", "0.6378315", "0.6367498", "0.6344899", "0.630024", "0.6293508", "0.62492496", "0.62225604", "0.6172558", "0.615256", "0.6152383", "0.6115891", "0.61116207", "0.610577", "0.6104182", "0.6066876", "0.606603", ...
0.7123817
0
Calculate ruhakaalam times in the UTC timezone.
def rahukaalam_utc(self, date, latitude, longitude): if date is None: date = datetime.date.today() try: sunrise = self.sunrise_utc(date, latitude, longitude) sunset = self.sunset_utc(date, latitude, longitude) except: raise AstralError('Sun remains below horizon on this day, at this location.') octant_duration = (sunset - sunrise) / 8 # Mo,Sa,Fr,We,Th,Tu,Su octant_index = [1,6,4,5,3,2,7] weekday = date.weekday() octant = octant_index[weekday] start = sunrise + (octant_duration * octant) end = start + octant_duration return {'start': start, 'end': end}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rahukaalam(self, date=None, local=True):\n\n if self.astral is None:\n self.astral = Astral()\n\n if date is None:\n date = datetime.date.today()\n\n rahukaalam = self.astral.rahukaalam_utc(date, self.latitude, self.longitude)\n\n if local:\n for key...
[ "0.6330134", "0.61681116", "0.60523003", "0.59781736", "0.5969716", "0.56076247", "0.55986744", "0.55825686", "0.5552908", "0.5434768", "0.53342485", "0.53334755", "0.53267014", "0.5321076", "0.5287703", "0.5276938", "0.5276938", "0.5272016", "0.52586037", "0.5246418", "0.522...
0.6230036
1
Calculate the azimuth of the sun in the UTC timezone.
def solar_azimuth(self, dateandtime, latitude, longitude): if latitude > 89.8: latitude = 89.8 if latitude < -89.8: latitude = -89.8 zone = -dateandtime.utcoffset().seconds / 3600.0 utc_datetime = dateandtime.astimezone(pytz.utc) timenow = utc_datetime.hour + (utc_datetime.minute / 60.0) + (utc_datetime.second / 3600) JD = self._julianday(dateandtime.day, dateandtime.month, dateandtime.year) t = self._jday_to_jcentury(JD + timenow / 24.0) theta = self._sun_declination(t) Etime = self._eq_of_time(t) eqtime = Etime solarDec = theta # in degrees solarTimeFix = eqtime - (4.0 * longitude) + (60 * zone) trueSolarTime = dateandtime.hour * 60.0 + dateandtime.minute + dateandtime.second / 60.0 + solarTimeFix # in minutes while trueSolarTime > 1440: trueSolarTime = trueSolarTime - 1440 hourangle = trueSolarTime / 4.0 - 180.0 # Thanks to Louis Schwarzmayr for the next line: if hourangle < -180: hourangle = hourangle + 360.0 harad = radians(hourangle) csz = sin(radians(latitude)) * sin(radians(solarDec)) + \ cos(radians(latitude)) * cos(radians(solarDec)) * cos(harad) if csz > 1.0: csz = 1.0 elif csz < -1.0: csz = -1.0 zenith = degrees(acos(csz)) azDenom = (cos(radians(latitude)) * sin(radians(zenith))) if (abs(azDenom) > 0.001): azRad = ((sin(radians(latitude)) * cos(radians(zenith))) - sin(radians(solarDec))) / azDenom if abs(azRad) > 1.0: if azRad < 0: azRad = -1.0 else: azRad = 1.0 azimuth = 180.0 - degrees(acos(azRad)) if hourangle > 0.0: azimuth = -azimuth else: if latitude > 0.0: azimuth = 180.0 else: azimuth = 0# if azimuth < 0.0: azimuth = azimuth + 360.0 return azimuth
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solar_azimuth(self, dateandtime=None):\n\n if self.astral is None:\n self.astral = Astral()\n\n if dateandtime is None:\n dateandtime = datetime.datetime.now(tz=self.tz)\n \n return self.astral.solar_azimuth(dateandtime, self.latitude, self.longitude)", "...
[ "0.67131555", "0.6504118", "0.64523375", "0.6439935", "0.6436878", "0.641925", "0.6345609", "0.6120051", "0.6109093", "0.6081307", "0.6055947", "0.6041341", "0.6014488", "0.5996403", "0.5920246", "0.5873155", "0.586566", "0.5822988", "0.58073103", "0.5778373", "0.573385", "...
0.6639568
1
Calculate the elevation of the sun.
def solar_elevation(self, dateandtime, latitude, longitude): if latitude > 89.8: latitude = 89.8 if latitude < -89.8: latitude = -89.8 zone = -dateandtime.utcoffset().seconds / 3600.0 utc_datetime = dateandtime.astimezone(pytz.utc) timenow = utc_datetime.hour + (utc_datetime.minute / 60.0) + (utc_datetime.second / 3600) JD = self._julianday(dateandtime.day, dateandtime.month, dateandtime.year) t = self._jday_to_jcentury(JD + timenow / 24.0) theta = self._sun_declination(t) Etime = self._eq_of_time(t) eqtime = Etime solarDec = theta # in degrees solarTimeFix = eqtime - (4.0 * longitude) + (60 * zone) trueSolarTime = dateandtime.hour * 60.0 + dateandtime.minute + dateandtime.second / 60.0 + solarTimeFix # in minutes while trueSolarTime > 1440: trueSolarTime = trueSolarTime - 1440 hourangle = trueSolarTime / 4.0 - 180.0 # Thanks to Louis Schwarzmayr for the next line: if hourangle < -180: hourangle = hourangle + 360.0 harad = radians(hourangle) csz = sin(radians(latitude)) * sin(radians(solarDec)) + \ cos(radians(latitude)) * cos(radians(solarDec)) * cos(harad) if csz > 1.0: csz = 1.0 elif csz < -1.0: csz = -1.0 zenith = degrees(acos(csz)) azDenom = (cos(radians(latitude)) * sin(radians(zenith))) if (abs(azDenom) > 0.001): azRad = ((sin(radians(latitude)) * cos(radians(zenith))) - sin(radians(solarDec))) / azDenom if abs(azRad) > 1.0: if azRad < 0: azRad = -1.0 else: azRad = 1.0 azimuth = 180.0 - degrees(acos(azRad)) if hourangle > 0.0: azimuth = -azimuth else: if latitude > 0.0: azimuth = 180.0 else: azimuth = 0 if azimuth < 0.0: azimuth = azimuth + 360.0 exoatmElevation = 90.0 - zenith if exoatmElevation > 85.0: refractionCorrection = 0.0 else: te = tan(radians(exoatmElevation)) if exoatmElevation > 5.0: refractionCorrection = 58.1 / te - 0.07 / (te * te * te) + 0.000086 / (te * te * te * te * te) elif exoatmElevation > -0.575: step1 = (-12.79 + exoatmElevation * 0.711) step2 = (103.4 + exoatmElevation * (step1)) step3 = (-518.2 + exoatmElevation * (step2)) refractionCorrection = 1735.0 + exoatmElevation * (step3) else: refractionCorrection = -20.774 / te refractionCorrection = refractionCorrection / 3600.0 solarzen = zenith - refractionCorrection solarelevation = 90.0 - solarzen return solarelevation
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def elevation(self):\n return self.altitude - self.heightAboveGround", "def elevation(x, y):\n file = os.path.abspath(\"..\") + \"\\Shape\\Shape.vrt\"\n layer = gdal.Open(file)\n gt = layer.GetGeoTransform()\n rasterx = int((x - gt[0]) / gt[1])\n rastery = int((y - gt[3]) / gt[5])\n prin...
[ "0.6999895", "0.668499", "0.6425622", "0.6422953", "0.6396147", "0.6377398", "0.63741803", "0.61598486", "0.61377054", "0.61298066", "0.5996655", "0.59889287", "0.5973628", "0.58740777", "0.58685416", "0.5855382", "0.58081555", "0.57917005", "0.5758174", "0.5704655", "0.57044...
0.5579604
27
Calculates the phase of the moon on the specified date.
def moon_phase(self, date): jd = self._julianday(date.day, date.month, date.year) DT = pow((jd - 2382148), 2) / (41048480*86400) T = (jd + DT - 2451545.0) / 36525 T2 = pow(T,2) T3 = pow(T,3) D = 297.85 + (445267.1115*T) - (0.0016300*T2) + (T3/545868) D = radians(self._proper_angle(D)) M = 357.53 + (35999.0503*T) M = radians(self._proper_angle(M)) M1 = 134.96 + (477198.8676*T) + (0.0089970*T2) + (T3/69699) M1 = radians(self._proper_angle(M1)) elong = degrees(D) + 6.29*sin(M1) elong -= 2.10*sin(M) elong += 1.27*sin(2*D - M1) elong += 0.66*sin(2*D) elong = self._proper_angle(elong) moon = int(floor(((elong + 6.43) / 360) * 28)) if moon == 28: moon = 0 return moon
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def moon_phase(self, date=None):\n self._set_site_date(date)\n self.moon.compute(self.site)\n return self.moon.moon_phase", "def equation_of_time(cls, date):\n offset = cls.sine(cls.mean_position(date, cls.ANOMALISTIC_YEAR))\n equation_sun = (offset * angle(57, 18, 0) * (14/360...
[ "0.7753409", "0.64832145", "0.6266615", "0.6266615", "0.5898424", "0.5834775", "0.5758287", "0.5736949", "0.5586432", "0.5580027", "0.5443291", "0.5376677", "0.53231025", "0.53126574", "0.5306432", "0.5282793", "0.52764946", "0.5266146", "0.5248463", "0.5236514", "0.5230845",...
0.7782467
0
Reorder 'shape' according to the chosen data layout to optimize data distribution.
def _optimizeshape(shape): shape.sort() if ORDER == 'C': shape[:] = shape[::-1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unchanged_shape(input_shape):\n return input_shape", "def restore_backup_shape(self):\n\n self.shape = self.shape_backup", "def backup_shape(self):\n\n self.shape_backup = np.copy(self.shape)", "def set_shape(self, shape):\n self._shape = self._shape.merge_with(shape)", "def change_...
[ "0.601357", "0.6012796", "0.59265906", "0.59111005", "0.58404654", "0.5758127", "0.57051116", "0.5653655", "0.5619018", "0.5616662", "0.5612885", "0.56128573", "0.561095", "0.5574861", "0.5568789", "0.55197614", "0.5517362", "0.551245", "0.54615873", "0.54567415", "0.54471827...
0.7083111
0
returns the communicator used to build this topology
def parent(self): return self._mpis.comm
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_comm() -> Communication:\n return __default_comm", "def get_comm(self):\n return self.comm", "def comm(self):\n return self._comm", "def GetComm(self):\n return _hypre.HypreParMatrix_GetComm(self)", "def topology(self):\n return self._topology", "def object_communic...
[ "0.72166127", "0.6937673", "0.66808903", "0.65805095", "0.65750676", "0.6487292", "0.62338114", "0.62185067", "0.5998818", "0.5994917", "0.59699655", "0.5910966", "0.5877223", "0.5876523", "0.58492416", "0.58371776", "0.58371776", "0.58371776", "0.58118683", "0.5713875", "0.5...
0.64308983
6
returns ghost layer width.
def ghosts(self): return self.mesh.discretization.ghosts
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_width(self):\n\t\treturn len(self._background) if self._background else 0", "def get_dimension_width(self):\n pass", "def width(self) -> int:\n return self._obj[self.x_dim].size", "def getWidth(self):\n return _libsbml.Dimensions_getWidth(self)", "def getWidth(self):\n r...
[ "0.74909455", "0.73224", "0.71234566", "0.7045834", "0.69717586", "0.6938515", "0.69376487", "0.693364", "0.693364", "0.69244164", "0.692004", "0.68859637", "0.6870889", "0.6870504", "0.6858993", "0.6841644", "0.68349195", "0.68349195", "0.68349195", "0.6833873", "0.68273956"...
0.0
-1
returns id of the task that owns this topology
def task_id(self): return self._mpis.task_id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def task_id(self):\n return self._task_id", "def task_id(self):\n return self._task_id", "def task_id(self):\n return self._task_id", "def task_id(self):\n return self._task_id", "def taskid(self):\n raise NotImplementedError('Must be implemented by subclass.')", "def t...
[ "0.75476515", "0.75476515", "0.75476515", "0.75476515", "0.7357828", "0.72557056", "0.7198645", "0.6637944", "0.6620676", "0.65653884", "0.6474", "0.6436455", "0.63189775", "0.6293882", "0.6293882", "0.62764555", "0.6194142", "0.61492884", "0.6114325", "0.611248", "0.60441566...
0.75019264
4
Defines a 'plane' (1D) topology for a given mesh resolution. This function is to be used when topo/discretization features come from an external routine (e.g. scales or fftw)
def plane_precomputed(cls, localres, global_start, cdir=None, **kwds): msg = 'parameter is not required for plane_precomputed' msg += ' topology construction.' assert 'dim' not in kwds, 'dim ' + msg assert 'shape ' not in kwds, 'shape ' + msg assert 'cutdir ' not in kwds, 'cutdir ' + msg # Local mesh : global_start = npw.asdimarray(global_start) localres = npw.asdimarray(localres) mesh = Mesh(kwds['domain'], kwds['discretization'], localres, global_start) # MPI layout domain = kwds['domain'] cutdir = npw.zeros(domain.dimension, dtype=npw.bool) if cdir is not None: cutdir[cdir] = True else: if ORDER == 'C': cutdir[0] = True else: cutdir[-1] = True return cls(mesh=mesh, cutdir=cutdir, **kwds)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def WritePlane(self):\n if not self.__train:\n print('ERROR: Must use Train before WritePlane')\n sys.exit(-1)\n if not self.__openPlaneO:\n print('ERROR: Must use OpenPlaneO before WritePlane')\n sys.exit(-1)\n\n # Defines angular dimensions\n self.__nc_RSoft_O.createDimension('typ...
[ "0.5991863", "0.5981517", "0.59631455", "0.5854901", "0.58076966", "0.5753462", "0.56212896", "0.5567714", "0.55646545", "0.55594105", "0.55340147", "0.551879", "0.54897666", "0.5471865", "0.5409033", "0.53919226", "0.5324614", "0.53135514", "0.5296597", "0.5288494", "0.52642...
0.59658796
2
Comparison of two topologies. Two topos are equal if they have the same mesh, shape and domain.
def __eq__(self, other): if self.__class__ != other.__class__: return False return self.mesh == other.mesh and \ npw.equal(self.shape, other.shape).all() and \ self.domain == other.domain
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def have_same_topology(first_mesh, second_mesh):\n return attr_has_same_shape(first_mesh, second_mesh, \"v\") and attr_is_equal(\n first_mesh, second_mesh, \"f\"\n )", "def equivalent(kls, first, second):\n if first.empty() and second.empty():\n return True\n elif first.vertices.shape[0] ...
[ "0.7391109", "0.69211054", "0.66308445", "0.6267532", "0.6095765", "0.5980267", "0.5966569", "0.5945673", "0.59418166", "0.5940273", "0.59177405", "0.5912851", "0.59052336", "0.59029365", "0.5899751", "0.5899751", "0.589094", "0.587771", "0.58488256", "0.5842145", "0.5818348"...
0.651133
3
Not equal operator. Seems to be required in addition to __eq__ to avoid 'cornercase' behaviors.
def __ne__(self, other): result = self.__eq__(other) if result is NotImplemented: return result return not result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __ne__(self, other):\n return not (self == other) # opposite of __eq__", "def __ne__(self, other):\n return not (self == other) # opposite of __eq__", "def __ne__(self, rhs):\n return not self.__eq__(rhs)", "def __ne__(self, other):\r\n return not self.__eq__...
[ "0.8394335", "0.8386683", "0.8160081", "0.81121725", "0.81121725", "0.81121725", "0.81121725", "0.8075682", "0.80685496", "0.80672383", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072", "0.8056072...
0.0
-1
True if ghost layer length is not zero.
def has_ghosts(self): return not np.all(self.mesh.discretization.ghosts == 0)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def empty(self):\n return len(self.layers) == 0", "def is_empty(self):\n return ch.prod(ch.tensor(self.x.shape)).item() == 0", "def is_ghost(self):\n\t\treturn False", "def is_trivial(self):\n return self.dims == 0", "def is_empty(self) -> bool:\n return self.num_grna() == 0", ...
[ "0.7561698", "0.72112876", "0.7065583", "0.7046145", "0.7030016", "0.69214237", "0.6830907", "0.6792221", "0.67822015", "0.67809653", "0.67568576", "0.6733361", "0.6685363", "0.6656976", "0.6638194", "0.6637561", "0.6615059", "0.660577", "0.65900713", "0.6586594", "0.658374",...
0.72264445
1
return the id of the present topology. This id is unique among all defined topologies.
def get_id(self): return self.__id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unique_id(self) -> str:\n return str(self.coordinator.gios.station_id)", "def topology_name(self):\n return self._topology_name", "def topology(self):\n return self._topology", "def portlet_id(self):\n return id(self)", "def establish_id(self):\n if self.config.node_i...
[ "0.69869524", "0.66642356", "0.6493194", "0.6485879", "0.64025915", "0.63910806", "0.6356197", "0.62523925", "0.62455714", "0.62014794", "0.6190886", "0.61564124", "0.6125926", "0.60740584", "0.6072235", "0.60693944", "0.60588557", "0.6046706", "0.60322815", "0.60287", "0.602...
0.0
-1