function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
list
def aidCommon(self, card): if not card.routingAttr: return False return set(sim_card.FILES_AID).issubset(set(card.routingAttr.filesCommon))
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def getFileHandler(self, file): #by default execute apdu in card 0 cards = [self.cardsDict[0][MAIN_INTERFACE]] for cardDict in self.cardsDict: if cardDict == self.cardsDict[0]: #cardDict already in cards continue card = cardDict[MAIN_INTERFACE] if file in card.routingAttr.filesCommon: cards.append(card) elif file in card.routingAttr.filesReplaced: return [card] return cards
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def addLeftHandlers(self, cards): for cardDict in self.cardsDict: card = cardDict[MAIN_INTERFACE] if card in cards: continue cards.append(card) return cards
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def handleApdu(self, cardData, apdu): card = cardData[0] sendData = cardData[1] if card == None: raise Exception("card not initialized") ins = types.insName(apdu) if card != self.getMainCard(0): origApdu = apdu if ( self.aidCommon(card) and card.routingAttr.aidToSelect and self.getMainCard(0).routingAttr.aidToSelect == hextools.bytes2hex(apdu) and #origin apdu is AID int(card.routingAttr.aidToSelect[0:2], 16) == apdu[0]): #check the same class apdu = hextools.hex2bytes(card.routingAttr.aidToSelect) card.routingAttr.aidToSelect = None elif ( self.aidCommon(card) and card.routingAttr.getFileSelected(apdu[0]) == 'EF_DIR' and ins == 'READ_RECORD' and card.routingAttr.recordEfDirLength): apdu[4] = card.routingAttr.recordEfDirLength if origApdu != apdu: self.loggingApdu.info("") self.loggingApdu.info("*C-APDU%d: %s" %(self.getSimId(card), hextools.bytes2hex(apdu))) if self.simType == types.TYPE_SIM and (apdu[0] & 0xF0) != 0xA0: #force 2G on USIM cards sw = types_g.sw.CLASS_NOT_SUPPORTED sw1 = sw>>8 sw2 = sw & 0x00FF responseApdu = [sw1, sw2] elif ins == 'GET_RESPONSE' and card.routingAttr.getResponse: responseApdu = card.routingAttr.getResponse card.routingAttr.getResponse = None else: responseApdu = card.apdu(apdu) if card != self.getMainCard(0): if (self.aidCommon(card) and card.routingAttr.getFileSelected(apdu[0]) == 'EF_DIR' and ins == 'GET_RESPONSE' and types.swNoError(responseApdu) and len(responseApdu) > 7): card.routingAttr.recordEfDirLength = responseApdu[7] if (TRY_ANOTHER_CARD_ON_AUTH_FAILURE and self.getNbrOfCards() > 1 and card.routingAttr.getFileSelected(apdu[0]) == 'AUTH' and types.sw(responseApdu) == types_g.sw.AUTHENTICATION_ERROR_APPLICATION_SPECIFIC): sw1Name, swName = types.swName(types.sw(responseApdu) >> 8, types.sw(responseApdu) & 0x00FF) self.logging.warning("Response not expected. SW1: %s, SW: %s" %(sw1Name, swName)) self.logging.warning("Change card to process AUTHENTICATION") if card == self.getMainCard(0): cardTmp = self.getMainCard(1) else: cardTmp = self.getMainCard(0) responseApdu = cardTmp.apdu(apdu) cardTmp.routingAttr.setFileSelected('AUTH', apdu[0]) card.routingAttr.setFileSelected(None, apdu[0]) # TODO: check if exist cardTmp.routingAttr.insReplaced.append('INTERNAL_AUTHENTICATE') if types.sw1(responseApdu) in [types_g.sw1.RESPONSE_DATA_AVAILABLE_2G, types_g.sw1.RESPONSE_DATA_AVAILABLE_3G]: # cache 'GET_RESPONSE' getResponseLength = types.sw2(responseApdu) cla = apdu[0] apduTmp = "%02XC00000%02X" %(cla, getResponseLength) self.loggingApdu.info("**C-APDU%d: %s" %(self.getSimId(cardTmp), apduTmp)) cardTmp.routingAttr.getResponse = cardTmp.apdu(apduTmp) if card.routingAttr.getFileSelected(apdu[0]) == 'EF_IMSI' and types.swNoError(responseApdu): #cache imsi responseData = types.responseData(responseApdu) if ins == 'READ_BINARY' and types.p1(apdu) == 0 and types.p2(apdu) == 0: #When P1=8X then SFI is used to select the file. #Remove the check when SFI checking is implemented imsi = hextools.decode_BCD(responseData)[3:] #TODO: remove length check when name for the file comes from #the whole path and not fid. 6f07 is also in ADF_ISIM if len(imsi) > 10: card.imsi = imsi #update associated interface if self.isCardCtrl(card): self.getRelatedMainCard(card).imsi = imsi else: self.getRelatedCtrlCard(card).imsi = imsi elif ins == 'UPDATE_BINARY': card.imsi = None responseApduHex = hextools.bytes2hex(responseApdu) #example of APDU modification if responseApduHex == "02542D4D6F62696C652E706CFFFFFFFFFF9000": #change SPN name 'T-mobile.pl' for 'Tmobile-SPN' responseApdu = hextools.hex2bytes("02546D6F62696C652D53504EFFFFFFFFFF9000") if sendData: if ((types.sw(responseApdu) == types_g.sw.NO_ERROR or types.sw1(responseApdu) == types_g.sw1.NO_ERROR_PROACTIVE_DATA) and self.getNbrOfCards() > 1): # Check for pending SAT command for cardDict in self.cardsDict: cardTmp = cardDict[MAIN_INTERFACE] if card == cardTmp: continue if set(sim_card.SAT_INS) <= set(cardTmp.routingAttr.insReplaced): swNoError = cardTmp.swNoError if types.unpackSw(swNoError)[0] == types_g.sw1.NO_ERROR_PROACTIVE_DATA: #update r-apdu with proactive data information responseApdu[-2] = swNoError >> 8 responseApdu[-1] = swNoError & 0x00FF break self.sendResponseApdu(responseApdu) if card == self.getMainCard(0) or sendData: self.pretty_apdu(apdu) responseApduHex = hextools.bytes2hex(responseApdu) self.loggingApdu.info("R-APDU%d: %s" %(self.getSimId(card), responseApduHex)) # gsmtap.log(apdu,responseApdu) # Uncomment for wireshark return responseApdu
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def tick(self): with self.lock: inject = INJECT_READY evt, apdu = self.receiveCommandApdu() if evt == EVT_RESET: self.resetCards() return if not apdu: if (not self.inject or self.rapduInject): # Wait until rapduInject is consumed return else: inject = self.inject apdu = self.apduInjectedData self.apduInjectedData = None if not apdu: raise Exception("APDU is empty") self.lastUpdate = time.time() cardsData = self.getHandlers(apdu, inject) responseApdu = None for cardData in cardsData: if cardData == cardsData[0]: apduHex = hextools.bytes2hex(apdu) self.loggingApdu.info("") self.loggingApdu.info("C-APDU%d: %s" %(self.getSimId(cardData[0]), apduHex)) responseApduTemp = self.handleApdu(cardData, apdu) if cardData[1]: if cardData[0] != self.getMainCard(0): self.loggingApdu.info("*R-APDU%d" %self.getSimId(cardData[0])) responseApdu = responseApduTemp self.updateHandler(cardData, apdu, responseApduTemp) if not responseApdu and not inject: raise Exception("No response received") if inject: self.rapduInject = responseApduTemp
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def getNbrOfCards(self): return len(self.cardsDict)
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def getCardDictFromId(self, simId): if simId >= self.getNbrOfCards() or simId < 0: raise Exception("simId: " + str(simId) + " not found") return self.cardsDict[simId]
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def getMainCard(self, simId): cardDict = self.getCardDictFromId(simId) return cardDict[MAIN_INTERFACE]
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def getRelatedMainCard(self, cardCtrl): for cardDict in self.cardsDict: if cardDict[CTRL_INTERFACE] == cardCtrl: return cardDict[MAIN_INTERFACE] return None
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def swapCards(self, simId1, simId2): cardDict1 = self.getCardDictFromId(simId1) cardDict2 = self.getCardDictFromId(simId2) #with self.lock: self.cardsDict[simId1] = cardDict2 self.cardsDict[simId2] = cardDict1
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def getATR(self): if self.atr is not None: return self.atr else: return self.getMainCard(0).getATR()
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def waitRapduInject(self, timeout=30): startTime = time.time() while True: with self.lock: rapduInject = self.rapduInject if rapduInject: self.rapduInject = None self.inject = INJECT_READY return rapduInject currentTime = time.time() if currentTime - startTime > timeout: self.inject = INJECT_READY raise Exception("Timeout. No rapdu for injected data received within %ds" %timeout) time.sleep(0.001)
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def setPowerSkip(self, skip): self.command(CMD_SET_SKIP, hextools.u32(skip))
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def run(self, mode=ROUTER_MODE_INTERACTIVE): if self.loop and self.routerMode == ROUTER_MODE_DISABLED: self.shell.updateInteractive(self.getInteractiveFromMode(mode)) self.startPlacServer(mode) return self.routerMode = mode time.sleep(0.1) # Truncated logs self.loggingApdu.info("============") self.loggingApdu.info("== simLAB ==") self.loggingApdu.info("== ver %s==" %_version_) self.loggingApdu.info("============") self.command(CMD_SET_ATR, self.getATR()) self.setPowerSkip(skip=1) self.powerHalt() self.loop = MainLoopThread(self) self.loop.setDaemon(True) # Start handling incoming phone C-APDUs. self.loop.start() # Default card control interface. if self.simType == types.TYPE_SIM: self.simCtrl = sim_ctrl_2g.SimCtrl(self) else: self.simCtrl = sim_ctrl_3g.SimCtrl(self) self.simCtrl.init() interactive = self.getInteractiveFromMode(mode) # Plac telnet server works without interactive mode self.shell = sim_shell.SimShell(self.simCtrl, interactive) self.startPlacServer(mode)
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def startPlacServer(self, mode): if mode == ROUTER_MODE_DISABLED: return self.interpreter = plac.Interpreter(self.shell) if mode == ROUTER_MODE_TELNET: self.interpreter.start_server() # Loop elif mode == ROUTER_MODE_DBUS: from util import dbus_ctrl dbus_ctrl.startDbusProcess(self) # Loop elif mode == ROUTER_MODE_INTERACTIVE: path = self.simCtrl.getCurrentFile().path self.interpreter.interact(prompt="\n%s>"%path) else: raise Exception("Unexpected mode")
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def setupLogger(self): logger = logging.getLogger("router") #dont't propagate to root logger logger.propagate=False logger.handlers = [] consoleHandler = logging.StreamHandler() consoleHandler.setLevel(logging.DEBUG) # create file handler which logs even debug messages dir = os.path.dirname(__file__) resultFile = dir + "/../apdu.log" fileHandler = logging.FileHandler(resultFile, mode='w') fileHandler.setLevel(logging.INFO) # create formatter and add it to the handlers consoleFormatter = logging.Formatter(fmt='%(message)s') fileFormatter = logging.Formatter(fmt='%(asctime)s %(message)s', datefmt='%H:%M:%S') consoleHandler.setFormatter(consoleFormatter) fileHandler.setFormatter(fileFormatter) # add the handlers to the logger logger.addHandler(consoleHandler) logger.addHandler(fileHandler) if extHandler: #add handler for test runner logger.addHandler(extHandler) return logger
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def pretty_apdu(self, apdu): str = types.insName(apdu) if str == 'SELECT_FILE': str += " " + self.fileName(apdu) self.loggingApdu.info(str)
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def setLoggerExtHandler(handler): global extHandler extHandler = handler
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def __init__(self, simRouter): threading.Thread.__init__(self) self.simRouter = simRouter threading.Thread.setName(self, 'MainLoopThread') self.__lock = threading.Lock()
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def stop(self): self.join()
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def __init__(self, simRouter): threading.Thread.__init__(self) self.simRouter = simRouter threading.Thread.setName(self, 'ResetThread') self.__lock = threading.Lock()
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def softReset(self): self.simRouter.logging.info("\n") self.simRouter.logging.info("<- Soft reset") for cardDict in self.simRouter.cardsDict: if (not cardDict[MAIN_INTERFACE].routingAttr or #skip SIM with no common instruction cardDict[MAIN_INTERFACE].routingAttr.insCommon == [] or not self.simRouter.simCtrl): continue #select MF if self.simRouter.simType == types.TYPE_USIM: apdu = "00A40004023F00" else: apdu = "A0A40000023F00" rapdu = self.simRouter.injectApdu(apdu, cardDict[MAIN_INTERFACE], mode=INJECT_NO_FORWARD) if not rapdu: #Skip resetting if there is USB apdu to handle self.simRouter.logging.info("Soft reset not completed, USB apdu ongoing") return # Close opened logical channel so the are not exhousted when UE # assign new channels after SIM reset. ctrlLogicalChannel = self.simRouter.simCtrl.logicalChannel for channel in range(1,4): if channel != ctrlLogicalChannel: #skip control logical channel originChannel = 0 if self.simRouter.simType == types.TYPE_SIM: cla = 0xA0 else: cla = 0x00 cla = cla | (originChannel & 0x0F) apdu = "%02X7080%02X00" %(cla, channel) rapdu = self.simRouter.injectApdu(apdu, cardDict[MAIN_INTERFACE], mode=INJECT_NO_FORWARD) if not rapdu: #Skip resetting if there is USB apdu to handle self.simRouter.logging.info("Soft reset not completed, USB apdu ongoing") break self.simRouter.logging.info("-> reset end")
kamwar/simLAB
[ 74, 31, 74, 4, 1461177205 ]
def testGladman_dev_vec(self): """ All 25 combinations of block and key size. These test vectors were generated by Dr Brian Gladman using the program aes_vec.cpp <brg@gladman.uk.net> 24th May 2001. vectors in file: dev_vec.txt http://fp.gladman.plus.com/cryptography_technology/rijndael/index.htm """ def RijndaelTestVec(i, key, pt, ct): """ Run single AES test vector with any legal blockSize and any legal key size. """ bkey, plainText, cipherText = a2b_hex(key), a2b_hex(pt), a2b_hex(ct) kSize = len(bkey) bSize = len(cipherText) # set block size to length of block alg = Rijndael(bkey, keySize=kSize, blockSize=bSize, padding=noPadding()) self.assertEqual( alg.encrypt(plainText), cipherText ) self.assertEqual( alg.decrypt(cipherText), plainText ) RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 16 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c', pt = '3243f6a8885a308d313198a2e0370734', ct = '3925841d02dc09fbdc118597196a0b32') RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 20 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160', pt = '3243f6a8885a308d313198a2e0370734', ct = '231d844639b31b412211cfe93712b880') RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 24 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5', pt = '3243f6a8885a308d313198a2e0370734', ct = 'f9fb29aefc384a250340d833b87ebc00') RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 28 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90', pt = '3243f6a8885a308d313198a2e0370734', ct = '8faa8fe4dee9eb17caa4797502fc9d3f') RijndaelTestVec( i = 'dev_vec.txt 16 byte block, 32 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe', pt = '3243f6a8885a308d313198a2e0370734', ct = '1a6e6c2c662e7da6501ffb62bc9e93f3') RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 16 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c', pt = '3243f6a8885a308d313198a2e03707344a409382', ct = '16e73aec921314c29df905432bc8968ab64b1f51') RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 20 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160', pt = '3243f6a8885a308d313198a2e03707344a409382', ct = '0553eb691670dd8a5a5b5addf1aa7450f7a0e587') RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 24 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5', pt = '3243f6a8885a308d313198a2e03707344a409382', ct = '73cd6f3423036790463aa9e19cfcde894ea16623') RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 28 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90', pt = '3243f6a8885a308d313198a2e03707344a409382', ct = '601b5dcd1cf4ece954c740445340bf0afdc048df') RijndaelTestVec( i = 'dev_vec.txt 20 byte block, 32 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe', pt = '3243f6a8885a308d313198a2e03707344a409382', ct = '579e930b36c1529aa3e86628bacfe146942882cf') RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 16 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c', pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d', ct = 'b24d275489e82bb8f7375e0d5fcdb1f481757c538b65148a') RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 20 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160', pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d', ct = '738dae25620d3d3beff4a037a04290d73eb33521a63ea568') RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 24 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5', pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d', ct = '725ae43b5f3161de806a7c93e0bca93c967ec1ae1b71e1cf') RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 28 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90', pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d', ct = 'bbfc14180afbf6a36382a061843f0b63e769acdc98769130') RijndaelTestVec( i = 'dev_vec.txt 24 byte block, 32 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe', pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d', ct = '0ebacf199e3315c2e34b24fcc7c46ef4388aa475d66c194c') RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 16 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c', pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9', ct = 'b0a8f78f6b3c66213f792ffd2a61631f79331407a5e5c8d3793aceb1') RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 20 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160', pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9', ct = '08b99944edfce33a2acb131183ab0168446b2d15e958480010f545e3') RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 24 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5', pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9', ct = 'be4c597d8f7efe22a2f7e5b1938e2564d452a5bfe72399c7af1101e2') RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 28 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90', pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9', ct = 'ef529598ecbce297811b49bbed2c33bbe1241d6e1a833dbe119569e8') RijndaelTestVec( i = 'dev_vec.txt 28 byte block, 32 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe', pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa9', ct = '02fafc200176ed05deb8edb82a3555b0b10d47a388dfd59cab2f6c11') RijndaelTestVec( i = 'dev_vec.txt 32 byte block, 16 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c', pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa98ec4e6c8', ct = '7d15479076b69a46ffb3b3beae97ad8313f622f67fedb487de9f06b9ed9c8f19') RijndaelTestVec( i = 'dev_vec.txt 32 byte block, 20 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160', pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa98ec4e6c8', ct = '514f93fb296b5ad16aa7df8b577abcbd484decacccc7fb1f18dc567309ceeffd') RijndaelTestVec( i = 'dev_vec.txt 32 byte block, 24 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da5', pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa98ec4e6c8', ct = '5d7101727bb25781bf6715b0e6955282b9610e23a43c2eb062699f0ebf5887b2') RijndaelTestVec( i = 'dev_vec.txt 32 byte block, 28 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d90', pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa98ec4e6c8', ct = 'd56c5a63627432579e1dd308b2c8f157b40a4bfb56fea1377b25d3ed3d6dbf80') RijndaelTestVec( i = 'dev_vec.txt 32 byte block, 32 byte key', key = '2b7e151628aed2a6abf7158809cf4f3c762e7160f38b4da56a784d9045190cfe', pt = '3243f6a8885a308d313198a2e03707344a4093822299f31d0082efa98ec4e6c8', ct = 'a49406115dfb30a40418aafa4869b7c6a886ff31602a7dd19c889dc64f7e4e7a')
repotvsupertuga/tvsupertuga.repository
[ 1, 8, 1, 4, 1493763534 ]
def execute(self): # Option sanity check self.forbidden(self.options.fsnames, "-f, see -m") self.forbidden(self.options.labels, "-l") self.forbidden(self.options.indexes, "-i") self.forbidden(self.options.failover, "-F") rc = RC_OK if not self.options.model: raise CommandHelpException("Lustre model file path" "(-m <model_file>) argument required.", self) eh = FSGlobalEventHandler(self) # Use this Shine.FSUtils convenience function. lmf = self.get_lmf_path() if lmf: print("Using Lustre model file %s" % lmf) else: raise CommandHelpException("Lustre model file for ``%s'' not found:" " please use filename or full LMF path.\n" "Your default model files directory (lmf_dir) is: %s" % (self.options.model, Globals().get_lmf_dir()), self) install_nodes = self.options.nodes excluded_nodes = self.options.excludes fs_conf, fs = create_lustrefs(self.get_lmf_path(), event_handler=eh, nodes=install_nodes, excluded=excluded_nodes) # Register the filesystem in backend print("Registering FS %s to backend..." % fs.fs_name) if self.options.dryrun: rc = 0 else: rc = self.register_fs(fs_conf) if rc: msg = "Error: failed to register FS to backend (rc=%d)" % rc print(msg, file=sys.stderr) else: print("Filesystem %s registered." % fs.fs_name) # Helper message. # If user specified nodes which were not used, warn him about it. actual_nodes = fs.components.managed().servers() if not self.check_valid_list(fs_conf.get_fs_name(), \ actual_nodes, "install"): return RC_FAILURE # Install file system configuration files; normally, this should # not be done by the Shine.Lustre.FileSystem object itself, but as # all proxy methods are currently handled by it, it is more # convenient this way... try: fs.install(fs_conf.get_cfg_filename(), dryrun=self.options.dryrun) tuning_conf = Globals().get_tuning_file() if tuning_conf: fs.install(tuning_conf, dryrun=self.options.dryrun) except FSRemoteError as error: print("WARNING: Due to error, installation skipped on %s" % error.nodes) rc = RC_FAILURE if not install_nodes and not excluded_nodes: # Give pointer to next user step. print("Use `shine format -f %s' to initialize the file system." % fs_conf.get_fs_name()) return rc
cea-hpc/shine
[ 20, 5, 20, 38, 1427733899 ]
def __init__(self,screen): ConverterBase.__init__(self,screen) ui=self.ui ys=self.makeTab(10,94,'CFG settings') ui.add(sockgui.Label(ui,[20,ys+10],'Expansion name:')) ui.add(sockgui.Label(ui,[20,ys+26],'Author name:')) ui.add(sockgui.Label(ui,[20,ys+42],'Orig. Author name:')) ui.add(sockgui.Label(ui,[20,ys+58],'Shortname:')) ui.add(sockgui.Label(ui,[20,ys+74],'Filename:'))
foone/7gen
[ 5, 1, 5, 2, 1422166989 ]
def refreshList(self,junk): self.files.setItems(self.getOBJList())
foone/7gen
[ 5, 1, 5, 2, 1422166989 ]
def statusCallback(self,text): self.errortext.setText(text) self.ui.draw()
foone/7gen
[ 5, 1, 5, 2, 1422166989 ]
def copyAuthorToOrigAuthor(self,junk): self.origauthorbox.setText(self.authorbox.getText())
foone/7gen
[ 5, 1, 5, 2, 1422166989 ]
def getEnhanceColor(self): try: val=self.config.get('obj2vxp','enhance') return sockgui.BoolConv(val) except: return False
foone/7gen
[ 5, 1, 5, 2, 1422166989 ]
def getOBJList(self): out=[] for file in os.listdir('.'): flower=file.lower() if flower.endswith('.obj'): out.append(file) return out
foone/7gen
[ 5, 1, 5, 2, 1422166989 ]
def RunConverter(title): pygame.display.set_caption(title+'obj2vxpGUI '+obj2vxp.version) screen=pygame.display.set_mode((375,397)) gui=obj2vxpGUI(screen) return gui.run()
foone/7gen
[ 5, 1, 5, 2, 1422166989 ]
def re_glob(s): """ Tests if a string is a shell wildcard. """ global _re_compiled_glob_match if _re_compiled_glob_match is None: _re_compiled_glob_match = re.compile(r'[*?]|\[.+\]').search return _re_compiled_glob_match(s)
rpm-software-management/dnf
[ 1066, 367, 1066, 40, 1331307069 ]
def re_full_search_needed(s): """ Tests if a string needs a full nevra match, instead of just name. """ global _re_compiled_full_match if _re_compiled_full_match is None: # A glob, or a "." or "-" separator, followed by something (the ".") one = re.compile(r'.*([-.*?]|\[.+\]).').match # Any epoch, for envra two = re.compile('[0-9]+:').match _re_compiled_full_match = (one, two) for rec in _re_compiled_full_match: if rec(s): return True return False
rpm-software-management/dnf
[ 1066, 367, 1066, 40, 1331307069 ]
def __init__(self, iter=None): self.__iter = iter
rpm-software-management/dnf
[ 1066, 367, 1066, 40, 1331307069 ]
def __getitem__(self, item): if hasattr(self, item): return getattr(self, item) else: raise KeyError(item)
rpm-software-management/dnf
[ 1066, 367, 1066, 40, 1331307069 ]
def merge_lists(self, other): """ Concatenate the list attributes from 'other' to ours. """ for (key, val) in other.all_lists().items(): vars(self).setdefault(key, []).extend(val) return self
rpm-software-management/dnf
[ 1066, 367, 1066, 40, 1331307069 ]
def keyInstalled(ts, keyid, timestamp): ''' Return if the GPG key described by the given keyid and timestamp are installed in the rpmdb. The keyid and timestamp should both be passed as integers. The ts is an rpm transaction set object Return values: - -1 key is not installed - 0 key with matching ID and timestamp is installed - 1 key with matching ID is installed but has an older timestamp - 2 key with matching ID is installed but has a newer timestamp No effort is made to handle duplicates. The first matching keyid is used to calculate the return result. ''' # Search for hdr in ts.dbMatch('name', 'gpg-pubkey'): if hdr['version'] == keyid: installedts = int(hdr['release'], 16) if installedts == timestamp: return 0 elif installedts < timestamp: return 1 else: return 2 return -1
rpm-software-management/dnf
[ 1066, 367, 1066, 40, 1331307069 ]
def getCacheDir(): """return a path to a valid and safe cachedir - only used when not running as root or when --tempcache is set""" uid = os.geteuid() try: usertup = pwd.getpwuid(uid) username = dnf.i18n.ucd(usertup[0]) prefix = '%s-%s-' % (dnf.const.PREFIX, username) except KeyError: prefix = '%s-%s-' % (dnf.const.PREFIX, uid) # check for /var/tmp/prefix-* - dirpath = '%s/%s*' % (dnf.const.TMPDIR, prefix) cachedirs = sorted(glob.glob(dirpath)) for thisdir in cachedirs: stats = os.lstat(thisdir) if S_ISDIR(stats[0]) and S_IMODE(stats[0]) == 448 and stats[4] == uid: return thisdir # make the dir (tempfile.mkdtemp()) cachedir = tempfile.mkdtemp(prefix=prefix, dir=dnf.const.TMPDIR) return cachedir
rpm-software-management/dnf
[ 1066, 367, 1066, 40, 1331307069 ]
def unlink_f(filename): """ Call os.unlink, but don't die if the file isn't there. This is the main difference between "rm -f" and plain "rm". """ try: os.unlink(filename) except OSError as e: if e.errno != errno.ENOENT: raise
rpm-software-management/dnf
[ 1066, 367, 1066, 40, 1331307069 ]
def _getloginuid(): """ Get the audit-uid/login-uid, if available. os.getuid() is returned instead if there was a problem. Note that no caching is done here. """ # We might normally call audit.audit_getloginuid(), except that requires # importing all of the audit module. And it doesn't work anyway: BZ 518721 try: with open("/proc/self/loginuid") as fo: data = fo.read() return int(data) except (IOError, ValueError): return os.getuid()
rpm-software-management/dnf
[ 1066, 367, 1066, 40, 1331307069 ]
def getloginuid(): """ Get the audit-uid/login-uid, if available. os.getuid() is returned instead if there was a problem. The value is cached, so you don't have to save it. """ global _cached_getloginuid if _cached_getloginuid is None: _cached_getloginuid = _getloginuid() return _cached_getloginuid
rpm-software-management/dnf
[ 1066, 367, 1066, 40, 1331307069 ]
def __init__(self, username, password,\ server='imap.gmail.com', port=993): """ It returns -1 if there is no connection otherwise it returns the number of unread mails. """
fsquillace/pycious
[ 5, 2, 5, 3, 1322312279 ]
def __connect(self): self.M=imaplib.IMAP4_SSL(self.server , self.port) #First field is imap login (gmail uses login with #domain and '@' character), second - password self.M.login(self.username, self.password)
fsquillace/pycious
[ 5, 2, 5, 3, 1322312279 ]
def __call__(self): """ It returns -1 if it's not available the information otherwise returns the number of unread mail. """ try: if not self.M: self.__connect()
fsquillace/pycious
[ 5, 2, 5, 3, 1322312279 ]
def __init__(self, username, password): """ It returns -1 if there is no connection otherwise it returns the number of unread news. """
fsquillace/pycious
[ 5, 2, 5, 3, 1322312279 ]
def __connect(self): st, out = subprocess.getstatusoutput('curl -fs '+\ '"https://www.google.com/accounts/ClientLogin?'+\ 'service=reader&Email='+self.username+\ '&Passwd='+self.password+'"') if not out or out=="": raise Exception()
fsquillace/pycious
[ 5, 2, 5, 3, 1322312279 ]
def __call__(self):
fsquillace/pycious
[ 5, 2, 5, 3, 1322312279 ]
def get_migrated_vm_obj(src_vm_obj, target_provider): """Returns migrated_vm obj from target_provider""" collection = target_provider.appliance.provider_based_collection(target_provider) migrated_vm = collection.instantiate(src_vm_obj.name, target_provider) return migrated_vm
RedHatQE/cfme_tests
[ 69, 165, 69, 133, 1360187957 ]
def ansible_repository(appliance): """Fixture to add ansible repository""" appliance.wait_for_embedded_ansible() repositories = appliance.collections.ansible_repositories try: repository = repositories.create( name=fauxfactory.gen_alpha(), url=cfme_data.ansible_links.playbook_repositories.v2v, description=fauxfactory.gen_alpha() ) except KeyError: pytest.skip("Skipping since no such key found in yaml") view = navigate_to(repository, "Details") wait_for(lambda: view.entities.summary("Properties").get_text_of("Status") == "successful", delay=10, timeout=60, fail_func=view.toolbar.refresh.click) yield repository if repository.exists: repository.delete()
RedHatQE/cfme_tests
[ 69, 165, 69, 133, 1360187957 ]
def _cleanup(): if cat_item.exists: cat_item.delete()
RedHatQE/cfme_tests
[ 69, 165, 69, 133, 1360187957 ]
def __init__(self, id, params): super(Speaker, self).__init__(id, params) try: self.path_to_audio = params["path_to_audio"] self.repetitions = int(params["repetitions"]) except ValueError as ve: # if repetitions can't be parsed as int logging.error("Speaker: Wasn't able to initialize the device, please check your configuration: %s" % ve) self.corrupted = True return except KeyError as ke: # if config parameters are missing in file logging.error("Speaker: Wasn't able to initialize the device, it seems there is a config parameter missing: %s" % ke) self.corrupted = True return logging.debug("Speaker: Audio device initialized")
SecPi/SecPi
[ 245, 40, 245, 32, 1431606302 ]
def execute(self): if not self.corrupted: self.play_audio() else: logging.error("Speaker: Wasn't able to play sound because of an initialization error")
SecPi/SecPi
[ 245, 40, 245, 32, 1431606302 ]
def test_imslp_xml_to_marc(): example = """<?xml version="1.0"?> <document docID="imslpvalsskramstadhans"> <localClass localClassName="col">imslp</localClass> <localClass localClassName="vifa">vifamusik</localClass> <identifier identifierEncodingSchema="originalID">valsskramstadhans</identifier> <creator> <mainForm>Skramstad, Hans</mainForm> </creator> <title>Vals for pianoforte</title> <subject> <mainForm>Romantic</mainForm> </subject> <music_arrangement_of>Piano</music_arrangement_of> <url urlEncodingSchema="originalDetailView">http://imslp.org/wiki/Vals_(Skramstad,_Hans)</url> <vifatype>Internetressource</vifatype> <fetchDate>2018-04-25T00:00:00.01Z</fetchDate> <vifaxml><![CDATA[<document docID="imslpvalsskramstadhans"><localClass localClassName="col">imslp</localClass><localClass localClassName="vifa">vifamusik</localClass><identifier identifierEncodingSchema="originalID">valsskramstadhans</identifier><creator><mainForm>Skramstad, Hans</mainForm></creator><title>Vals for pianoforte</title><subject><mainForm>Romantic</mainForm></subject><music_arrangement_of>Piano</music_arrangement_of><url urlEncodingSchema="originalDetailView">http://imslp.org/wiki/Vals_(Skramstad,_Hans)</url><vifatype>Internetressource</vifatype></document>]]></vifaxml> </document> """ result = imslp_xml_to_marc(example) assert result is not None assert isinstance(result, pymarc.Record) assert result["001"].value() == "finc-15-dmFsc3NrcmFtc3RhZGhhbnM" assert result["100"]["a"] == "Skramstad, Hans" assert result["245"]["a"] == "Vals for pianoforte" assert result["856"]["u"] == "http://imslp.org/wiki/Vals_(Skramstad,_Hans)"
miku/siskin
[ 21, 4, 21, 1, 1403546397 ]
def find_comment_by_id(id): return Comment.get_by_id(id)
kianby/stacosys
[ 17, 1, 17, 2, 1430325952 ]
def publish_comment(comment: Comment): comment.published = datetime.now().strftime(TIME_FORMAT) comment.save()
kianby/stacosys
[ 17, 1, 17, 2, 1430325952 ]
def find_not_notified_comments(): return Comment.select().where(Comment.notified.is_null())
kianby/stacosys
[ 17, 1, 17, 2, 1430325952 ]
def find_published_comments_by_url(url): return Comment.select(Comment).where((Comment.url == url) & (Comment.published.is_null(False))).order_by( +Comment.published)
kianby/stacosys
[ 17, 1, 17, 2, 1430325952 ]
def load_person_by_id(person_id: int, season_id: Optional[int] = None) -> Person: return load_person(f'p.id = {person_id}', season_id=season_id)
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def load_person_by_discord_id(discord_id: int, season_id: Optional[int] = None) -> Person: return load_person(f'p.discord_id = {discord_id}', season_id=season_id)
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def load_person_by_discord_id_or_username(person: str, season_id: int = 0) -> Person: # It would probably be better if this method did not exist but for now it's required by the API. # The problem is that Magic Online usernames can be integers so we cannot be completely unambiguous here. # We can make a really good guess, though. # See https://discordapp.com/developers/docs/reference#snowflakes # Unix timestamp (ms) for 2015-01-01T00:00:00.0000 = 1420070400000 # Unix timestamp (ms) for 2015-01-01T00:00:00.0001 = 1420070400001 # Unix timestamp (ms) for 2015-02-01T00:00:00.0000 = 1422748800000 # Unix timestamp (ms) for 2100-01-01T00:00:00.0000 = 4102444800000 # Discord timestamp (ms) for 2015-01-01T00:00:00.0000 = 0 # Discord timestamp (ms) for 2015-01-01T00:00:00.0001 = 1 # Discord timestamp (ms) for 2015-02-01T00:00:00.0000 = 2678400000 # Min Discord snowflake for 2015-01-01T00:00:00.0000 = 0 ( 00000000000000000000000 in binary) # Min Discord snowflake for 2015-01-01T00:00:00.0001 = 4194304 ( 10000000000000000000000 in binary) # Min Discord snowflake for 2015-02-01T00:00:00.0000 = 11234023833600000 ( 100111111010010100100100000000000000000000000000000000 in binary) # Min Discord snowflake for 2100-01-01T00:00:00.0000 = 5625346837708800000 (100111000010001001111110010010100000000000000000000000000000000 in binary) # Discord snowflakes created between 2015-01-01T00:00:00.001Z and 2100-01-01T00:00:00.000Z will therefore fall in the range 2097152-5625346837708800000 if created before the year 2100. # We use 2015-02-01T00:00:00.000Z (11234023833600000) as the start of the range instead because it greatly reduces the range and we have seen no evidence of Discord snowflakes from before December 28th 2015. # This function will fail or (very unlikely) return incorrect results if we ever have a player with a Magic Online username that falls numerically between MIN_DISCORD_ID and MAX_DISCORD_ID. MIN_DISCORD_ID = 11234023833600000 MAX_DISCORD_ID = 5625346837708800000 if person.isdigit() and int(person) >= MIN_DISCORD_ID and int(person) <= MAX_DISCORD_ID: return load_person_by_discord_id(int(person), season_id=season_id) return load_person_by_mtgo_username(person, season_id=season_id)
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def maybe_load_person_by_discord_id(discord_id: Optional[int]) -> Optional[Person]: if discord_id is None: return None return guarantee.at_most_one(load_people(f'p.discord_id = {discord_id}'))
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def maybe_load_person_by_tappedout_name(username: str) -> Optional[Person]: return guarantee.at_most_one(load_people('p.tappedout_username = {username}'.format(username=sqlescape(username))))
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def maybe_load_person_by_mtggoldfish_name(username: str) -> Optional[Person]: return guarantee.at_most_one(load_people('p.mtggoldfish_username = {username}'.format(username=sqlescape(username))))
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def load_person_statless(where: str = 'TRUE', season_id: Optional[int] = None) -> Person: person_query = query.person_query() sql = f""" SELECT p.id, {person_query} AS name, p.mtgo_username, p.tappedout_username, p.mtggoldfish_username, p.discord_id, p.elo, p.locale FROM person AS p WHERE {where} """ people = [Person(r) for r in db().select(sql)] for p in people: p.season_id = season_id return guarantee.exactly_one(people)
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def load_people(where: str = 'TRUE', order_by: str = 'num_decks DESC, p.name', limit: str = '', season_id: Optional[Union[str, int]] = None) -> Sequence[Person]: person_query = query.person_query() season_join = query.season_join() if season_id else '' season_query = query.season_query(season_id, 'season.id') sql = f""" SELECT p.id, {person_query} AS name, p.mtgo_username, p.tappedout_username, p.mtggoldfish_username, p.discord_id, p.elo, p.locale, SUM(1) AS num_decks, SUM(dc.wins) AS wins, SUM(dc.losses) AS losses, SUM(dc.draws) AS draws, SUM(wins - losses) AS record, SUM(CASE WHEN dc.wins >= 5 AND dc.losses = 0 AND d.source_id IN (SELECT id FROM source WHERE name = 'League') THEN 1 ELSE 0 END) AS perfect_runs, SUM(CASE WHEN d.finish = 1 THEN 1 ELSE 0 END) AS tournament_wins, SUM(CASE WHEN d.finish <= 8 THEN 1 ELSE 0 END) AS tournament_top8s, IFNULL(ROUND((SUM(dc.wins) / NULLIF(SUM(dc.wins + dc.losses), 0)) * 100, 1), '') AS win_percent, SUM(DISTINCT CASE WHEN d.competition_id IS NOT NULL THEN 1 ELSE 0 END) AS num_competitions FROM person AS p LEFT JOIN deck AS d ON d.person_id = p.id LEFT JOIN deck_cache AS dc ON d.id = dc.deck_id {season_join} WHERE ({where}) AND ({season_query}) GROUP BY p.id ORDER BY {order_by} {limit} """ people = [Person(r) for r in db().select(sql)] for p in people: p.season_id = season_id return people
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def preaggregate() -> None: achievements.preaggregate_achievements() preaggregate_head_to_head()
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def set_achievements(people: List[Person], season_id: int = None) -> None: people_by_id = {person.id: person for person in people} sql = achievements.load_query(people_by_id, season_id) results = [Container(r) for r in db().select(sql)] for result in results: people_by_id[result['id']].num_achievements = len([k for k, v in result.items() if k != 'id' and v > 0]) people_by_id[result['id']].achievements = result people_by_id[result['id']].achievements.pop('id')
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def load_head_to_head_count(person_id: int, where: str = 'TRUE', season_id: Optional[int] = None) -> int: season_query = query.season_query(season_id) sql = f'SELECT COUNT(*) FROM _head_to_head_stats AS hths INNER JOIN person AS opp ON hths.opponent_id = opp.id WHERE ({where}) AND (hths.person_id = {person_id}) AND ({season_query})' return db().value(sql)
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def load_head_to_head(person_id: int, where: str = 'TRUE', order_by: str = 'num_matches DESC, record DESC, win_percent DESC, wins DESC, opp_mtgo_username', limit: str = '', season_id: int = None) -> Sequence[Container]: season_query = query.season_query(season_id) sql = f""" SELECT hths.person_id AS id, LOWER(opp.mtgo_username) AS opp_mtgo_username, SUM(num_matches) AS num_matches, SUM(wins) - SUM(losses) AS record, SUM(wins) AS wins, SUM(losses) AS losses, SUM(draws) AS draws, IFNULL(ROUND((SUM(wins) / NULLIF(SUM(wins + losses), 0)) * 100, 1), '') AS win_percent FROM _head_to_head_stats AS hths INNER JOIN person AS opp ON hths.opponent_id = opp.id WHERE ({where}) AND (hths.person_id = {person_id}) AND ({season_query}) GROUP BY hths.person_id, hths.opponent_id ORDER BY {order_by} {limit} """ return [Container(r) for r in db().select(sql)]
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def is_allowed_to_retire(deck_id: Optional[int], discord_id: Optional[int]) -> bool: if not deck_id: return False if not discord_id: return True person = maybe_load_person_by_discord_id(discord_id) if person is None: return True return any(int(deck_id) == deck.id for deck in person.decks)
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def load_aliases() -> List[Container]: sql = """ SELECT pa.person_id, pa.alias, p.mtgo_username FROM person_alias AS pa INNER JOIN person AS p ON p.id = pa.person_id """ return [Container(r) for r in db().select(sql)]
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def load_notes(person_id: int = None) -> List[Container]: where = f'subject_id = {person_id}' if person_id else 'TRUE' sql = """ SELECT pn.created_date, pn.creator_id, {creator_query} AS creator, pn.subject_id, {subject_query} AS subject, note FROM person_note AS pn INNER JOIN person AS c ON pn.creator_id = c.id INNER JOIN person AS s ON pn.subject_id = s.id WHERE {where} ORDER BY s.id, pn.created_date DESC """.format(creator_query=query.person_query('c'), subject_query=query.person_query('s'), where=where) notes = [Container(r) for r in db().select(sql)] for n in notes: n.created_date = dtutil.ts2dt(n.created_date) n.display_date = dtutil.display_date(n.created_date) return notes
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def link_discord(mtgo_username: str, discord_id: int) -> Person: person_id = deck.get_or_insert_person_id(mtgo_username, None, None) p = load_person_by_id(person_id) if p.discord_id is not None: raise AlreadyExistsException('Player with mtgo username {mtgo_username} already has discord id {old_discord_id}, cannot add {new_discord_id}'.format(mtgo_username=mtgo_username, old_discord_id=p.discord_id, new_discord_id=discord_id)) sql = 'UPDATE person SET discord_id = %s WHERE id = %s' db().execute(sql, [discord_id, p.id]) return p
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def remove_discord_link(discord_id: int) -> int: sql = 'UPDATE person SET discord_id = NULL WHERE discord_id = %s' return db().execute(sql, [discord_id])
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def squash(p1id: int, p2id: int, col1: str, col2: str) -> None: logger.warning('Squashing {p1id} and {p2id} on {col1} and {col2}'.format(p1id=p1id, p2id=p2id, col1=col1, col2=col2)) db().begin('squash') new_value = db().value('SELECT {col2} FROM person WHERE id = %s'.format(col2=col2), [p2id]) db().execute('UPDATE deck SET person_id = %s WHERE person_id = %s', [p1id, p2id]) db().execute('DELETE FROM person WHERE id = %s', [p2id]) db().execute('UPDATE person SET {col2} = %s WHERE id = %s'.format(col2=col2), [new_value, p1id]) db().commit('squash')
PennyDreadfulMTG/Penny-Dreadful-Discord-Bot
[ 33, 26, 33, 354, 1474960935 ]
def __init__(self, word2vec_model): super().__init__(word2vec_model)
apmoore1/semeval
[ 17, 8, 17, 4, 1487765768 ]
def __init__(self, layer_name): self.gs_catalog_obj = Layer.objects.gs_catalog self.layer_name = layer_name self.err_found = False self.err_msgs = [] self.layer_metadata = None # LayerMetadata object
cga-harvard/cga-worldmap
[ 96, 31, 96, 30, 1288628631 ]
def create_layer_metadata(self, layer_name): if layer_name is None: self.layer_metadata = None return #self.layer_metadata = LayerMetadata(**dict(geonode_layer_name=layer_name)) self.layer_metadata = LayerMetadata.create_metadata_using_layer_name(layer_name)
cga-harvard/cga-worldmap
[ 96, 31, 96, 30, 1288628631 ]
def add_sld_to_layer(self, formatted_sld_object): # update layer via 2 PUT calls to the geoserver return self.add_sld_xml_to_layer_via_puts(formatted_sld_object,\ self.layer_name) # use direct python, but doesn't properly clear tile cache #return self.add_sld_xml_to_layer(formatted_sld_object)
cga-harvard/cga-worldmap
[ 96, 31, 96, 30, 1288628631 ]
def get_set_default_style_url(self, layer_name): """ Given a layer name, return the REST url to set a default style """ if not layer_name: return None url_fragment = 'rest/layers/%s:%s' % (settings.DEFAULT_WORKSPACE, layer_name) full_url = urljoin(settings.GEOSERVER_BASE_URL, url_fragment) return full_url
cga-harvard/cga-worldmap
[ 96, 31, 96, 30, 1288628631 ]
def add_sld_xml_to_layer(self, formatted_sld_object): """ NOT USING, tiles were not getting refreshed properly Keeping code around in case needed in the future """ if not formatted_sld_object: return False print 'type(formatted_sld_object)', type(formatted_sld_object) # (1) Verify the XML if not self.is_xml_verified(formatted_sld_object.formatted_sld_xml): self.add_err_msg('The style information contains invalid XML') return False # (2) Retrieve the layer layer_obj = self.gs_catalog_obj.get_layer(self.layer_name) if layer_obj is None: self.add_err_msg('The layer "%s" does not exist' % self.layer_name) return False self.show_layer_style_list(layer_obj) #self.clear_alternate_style_list(layer_obj) # (3) Create a style name #stylename = self.layer_name + self.get_random_suffix() #while self.is_style_name_in_catalog(stylename): # stylename = self.layer_name + self.get_random_suffix() style_name = formatted_sld_object.sld_name # (4) Add the xml style to the catalog, with the new name try: # sync names self.gs_catalog_obj.create_style(style_name, formatted_sld_object.formatted_sld_xml) except: self.add_err_msg('Failed to add style to the catalog: %s' % style_name) return False # (5) Pull the style object back from the catalog new_style_obj = self.gs_catalog_obj.get_style(style_name) if new_style_obj is None: self.add_err_msg('Failed to find recently added style in the catalog: %s' % style_name) return False # (6) Set the new style as the default for the layer layer_obj.default_style = new_style_obj # Save it! try: self.gs_catalog_obj.save(layer_obj) except: self.add_err_msg('Failed to save new default style with layer' % (style_name)) return False self.create_layer_metadata(self.layer_name) print ('layer %s saved with style %s' % (self.layer_name, style_name)) return True
cga-harvard/cga-worldmap
[ 96, 31, 96, 30, 1288628631 ]
def get_style_from_name(self, style_name): """ Get the style object from the style name :returns: Style object or None """ if not style_name: return None return self.gs_catalog_obj.get_style(style_name)
cga-harvard/cga-worldmap
[ 96, 31, 96, 30, 1288628631 ]
def clear_alternate_style_list(self, layer_obj): """ Clear existing alternate styles from layer (ask Matt how to delete a style) """ if not layer_obj.__class__.__name__ == 'Layer': return False # clear style list layer_obj._set_alternate_styles([]) # save cleared list self.gs_catalog_obj.save(layer_obj) return True
cga-harvard/cga-worldmap
[ 96, 31, 96, 30, 1288628631 ]
def show_layer_style_list(self, layer_obj): print('Show layer styles') if not layer_obj.__class__.__name__ == 'Layer': print ('not a layer', type(layer_obj)) return sl = [layer_obj.default_style.name] for s in layer_obj._get_alternate_styles(): sl.append(s.name) for idx, sname in enumerate(sl): if idx == 0: print('%s (default)' % sname) continue print (sname)
cga-harvard/cga-worldmap
[ 96, 31, 96, 30, 1288628631 ]
def init_client(self, client, headers={}): ssl_context = ssl.create_default_context(cafile=certifi.where()) conn = aiohttp.TCPConnector(ssl=ssl_context) if client: self.client_owned, self.client = False, client else: self.client_owned, self.client = True, aiohttp.ClientSession( connector=conn, headers=headers, skip_auto_headers=["Content-Type", "User-Agent"], )
syncrypt/client
[ 11, 1, 11, 3, 1464178408 ]
def __init__(self, url, client=None): super(URLReader, self).__init__() self.url = url self.response = None self.init_client(client)
syncrypt/client
[ 11, 1, 11, 3, 1464178408 ]
def __init__(self, url, size=None, client=None): super(URLWriter, self).__init__() self.url = url self._done = False self.response = None self.bytes_written = 0 self.size = size self.etag = None self.init_client(client)
syncrypt/client
[ 11, 1, 11, 3, 1464178408 ]
def __init__(self, urls, chunksize, total_size=None, client=None): super(ChunkedURLWriter, self).__init__() self._urls = urls self._chunksize = chunksize self._url_idx = 0 self.init_client(client) self.bytes_written = 0 self.total_size = total_size self.etags = [] # type: List[str]
syncrypt/client
[ 11, 1, 11, 3, 1464178408 ]
def __init__(self, db_connection): self.adba_connection = None self.db_connection = db_connection
MediaKraken/MediaKraken_Deployment
[ 10, 4, 10, 10, 1470774651 ]
def parse_geste(infile_name): """ Parses a GESTE file and retuns an OrderedDict with: {"Population_name":[Freq_ref_allele_on SNP_1,Freq_ref_allele_on SNP_2,...]} """ infile = open(infile_name, "r") pop_freqs = OrderedDict() pop_starter = "[pop]=" popname = "" for line in infile: # Neat trick to ignore data that is not SNP info # This code section should be very performant since it replaces most # if - else tests with try -> except statements line = line.split() try: int(line[0]) except ValueError: # In case it's a new section if line[0].startswith(pop_starter): popname = "Pop %s" % line[0].strip().replace(pop_starter, "") pop_freqs[popname] = [] continue except IndexError: # In case it's an empty line continue try: ref_frequency = round(int(line[3]) / int(line[1]), 3) except ZeroDivisionError: ref_frequency = 9 pop_freqs[popname].append(ref_frequency) infile.close() return pop_freqs
StuntsPT/pyRona
[ 9, 2, 9, 2, 1478901588 ]
def colorize(cls, field, color): """ Color background of the field with specified color. :param field: Field handler. :param color: Desired color. :return: """ color_name = color.name() class_name = field.__class__.__name__ field.setStyleSheet('%s { background-color: %s }' % ( class_name, color_name))
GeoMop/GeoMop
[ 3, 1, 3, 76, 1420793228 ]
def colorize_frame(cls, field, color): """ Color border of the field with specified color. :param field: Field handler. :param color: Desired color. :return: """ color_name = color.name() class_name = field.__class__.__name__ field.setStyleSheet('%s { border: 1px solid %s; border-radius: 3px; }' % ( class_name, color_name))
GeoMop/GeoMop
[ 3, 1, 3, 76, 1420793228 ]
def colorize_default(cls, field): """ Convenience method for white coloring. :param field: Field handler. :return: """ if isinstance( field, QtWidgets.QLineEdit): cls.colorize(field, ValidationColors.white.value) if isinstance( field, QtWidgets.QComboBox): cls.colorize_frame(field, ValidationColors.grey.value)
GeoMop/GeoMop
[ 3, 1, 3, 76, 1420793228 ]
def colorize_red(cls, field): """ Convenience method for red coloring. :param field: Field handler. :return: """ if isinstance( field, QtWidgets.QLineEdit): cls.colorize(field, ValidationColors.red.value) if isinstance( field, QtWidgets.QComboBox): cls.colorize_frame(field, ValidationColors.red.value)
GeoMop/GeoMop
[ 3, 1, 3, 76, 1420793228 ]
def __init__(self): self.controls={} """dictionary of validated controls"""
GeoMop/GeoMop
[ 3, 1, 3, 76, 1420793228 ]
def add(self, key, control): """add control for validation""" self.controls[key] = control
GeoMop/GeoMop
[ 3, 1, 3, 76, 1420793228 ]
def colorize(self, errors): """Colorized associated control and return if any control was colorized""" valid = True for key, control in self.controls.items(): if key in errors: control.setToolTip(errors[key]) ValidationColorizer.colorize_red(control) valid = False else: ValidationColorizer.colorize_default(control) control.setToolTip("") return valid
GeoMop/GeoMop
[ 3, 1, 3, 76, 1420793228 ]
def reset_colorize(self): """Colorized associated control to white""" for key, control in self.controls.items(): ValidationColorizer.colorize_default(control) control.setToolTip("")
GeoMop/GeoMop
[ 3, 1, 3, 76, 1420793228 ]
def run(self): from Cython.Build import cythonize if USE_ASAN: from Cython.Compiler import Options # make asan/valgrind's memory leak results better Options.generate_cleanup_code = True compiler_directives = {'language_level': 3, 'embedsignature': True} if linetrace: compiler_directives['linetrace'] = True self.extensions = cythonize(self.extensions, compiler_directives=compiler_directives) _build_ext.run(self) run_setup(os.path.join(os.getcwd(), "setup.py"), ['build_py'] + extra_args)
piqueserver/piqueserver
[ 165, 59, 165, 87, 1482889715 ]
def utcoffset(self, dt): return ZERO
sprinkler/rainmachine-developer-resources
[ 26, 38, 26, 4, 1436776616 ]