function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
list
def addToAddr(self,toAddr): """ """ if self.hasToAddr(toAddr.__class__): return try: if self._toAddrGenerator: pass except AttributeError, e: self._toAddrGenerator = Counter() id = toAddr.getId() if id == -1: id = int(self._toAddrGenerator.newCount()) toAddr.includeInTpl(self,id) self.getToAddrList().append(toAddr)
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def recoverToAddr(self, toAddr): self.addToAddr(toAddr) toAddr.recover()
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def getToAddrList(self): """ """ try: if self._toAddrs: pass except AttributeError, e: self._toAddrs = PersistentList() return self._toAddrs
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def hasToAddr(self,toAddrKlass): """Returns True if the TPL contains a "toAddr" which class is "toAddrKlass" """ for toAddr in self.getToAddrList(): if toAddr.__class__ == toAddrKlass: return True return False
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def addCondition(self,cond): """ """ if cond in self._conditions: return id = cond.getId() if id == -1: id = int(self._condGenerator.newCount()) cond.includeInTpl(self, id) self._conditions.append(cond)
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def recoverCondition(self, cond): self.addCondition(cond) cond.recover()
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def getConditionById(self,id): """ """ for cond in self._conditions: if cond.getId()==int(id): return cond return None
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def satisfies(self,abs): """ """ for cond in self._conditions: if cond.satisfies(abs): return True return False
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def parseTplContentUndo(self, content, varList): # The body content is shown without "%()" and with "%" in instead of "%%" but it is not modified result = content for var in varList: result = result.replace("%("+var.getName()+")s", "{"+var.getName()+"}") # replace the %% by % result = result.replace("%%", "%") return result
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def __init__(self): self._tpl=None self._id=-1
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def delete(self): TrashCanManager().add(self)
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def includeInTpl(self,newTpl,newId): self._tpl=newTpl self._id=newId
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def getId(self): return self._id
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def getToAddrList(self,abs): l = [] l.append(abs.getSubmitter()) return l
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def getToAddrList(self,abs): l = [] for pa in abs.getPrimaryAuthorList(): l.append(pa) return l
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def __init__(self): self._tpl=None self._id=-1
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def delete(self): TrashCanManager().add(self)
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def includeInTpl(self,newTpl,newId): self._tpl=newTpl self._id=newId
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def getId(self): return self._id
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def __init__(self, track="--any--", contribType="--any--"): NotifTplCondition.__init__(self) self._track = track self._contrib_type_id = contribType if isinstance(contribType, basestring) else contribType.id
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def setContribType(self, ct="--any--"): self._contrib_type_id = '--any--' if ct == '--any--' else ct.id
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def setTrack(self, tr="--any--"): self._track = tr
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def _satifiesContribType(self, abs_wrap): abstract_type = abs_wrap.getCurrentStatus().getAbstract().as_new.accepted_type if self._contrib_type_id == "--any--": return True else: if self._contrib_type_id == '--none--': return not abstract_type if not abstract_type: return False # TODO: use ids in db, instead of objects! return abstract_type.id == self._contrib_type_id return False
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def satisfies(self,abs): if not isinstance(abs.getCurrentStatus(), AbstractStatusAccepted): return False else: return self._satifiesContribType(abs) and self._satifiesTrack(abs)
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def satisfies(self,abs): return isinstance(abs.getCurrentStatus(),AbstractStatusRejected)
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def __init__(self, track=None, contrib_type=None): NotifTplCondition.__init__(self) self._track = track self._contrib_type_id = contrib_type if isinstance(contrib_type, basestring) else contrib_type.id
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def _satisfiesContribType(self, abs_wrap): if self._contrib_type_id == '--any--': return True else: abstract_type = abs_wrap.getCurrentStatus().getAbstract().as_new.type if self._contrib_type_id == '--none--': return not abstract_type if not abstract_type: return False # TODO: use ids in db, instead of objects! return abstract_type.id == int(self._contrib_type_id)
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def getTrack(self): return self._track
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def clone(self, conference, template): ntcm = NotifTplCondMerged() for newtrack in conference.getTrackList(): if newtrack.getTitle() == self.getTrack().getTitle(): ntcm.setTrack(newtrack) break for newtype in conference.as_event.contribution_types: if newtype.name == self.getContribType(): ntcm.setContribType(newtype) break return ntcm
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def __init__(self,abstract): self._abstract=abstract self._entries=PersistentList()
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def addEntry(self,newEntry): if newEntry!=None and newEntry not in self._entries: self._entries.append(newEntry)
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def removeEntry(self, entry): if entry!=None and entry in self._entries: self._entries.remove(entry) entry.delete()
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def clearEntryList(self): while len(self.getEntryList()) > 0: self.removeEntry(self.getEntryList()[0])
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def __init__(self,responsible,tpl): self._setDate(nowutc()) self._setResponsible(responsible) self._setTpl(tpl)
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def getDate(self): return self._date
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def getResponsible(self): return self._responsible
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def getTpl(self): return self._tpl
belokop/indico_bare
[ 1, 1, 1, 5, 1465204236 ]
def test_clark_dist(self): """Test abydos.distance.Clark.dist.""" # Base cases self.assertEqual(self.cmp.dist('', ''), 0.0) self.assertEqual(self.cmp.dist('a', ''), 1.0) self.assertEqual(self.cmp.dist('', 'a'), 1.0) self.assertEqual(self.cmp.dist('a', 'a'), 0.0) self.assertEqual(self.cmp.dist('abc', ''), 1.0) self.assertEqual(self.cmp.dist('', 'abc'), 1.0) self.assertEqual(self.cmp.dist('abc', 'abc'), 0.0) self.assertEqual(self.cmp.dist('abcd', 'efgh'), 1.0) self.assertAlmostEqual(self.cmp.dist('Nigel', 'Niall'), 0.8164965809) self.assertAlmostEqual(self.cmp.dist('Niall', 'Nigel'), 0.8164965809) self.assertAlmostEqual(self.cmp.dist('Colin', 'Coiln'), 0.8164965809) self.assertAlmostEqual(self.cmp.dist('Coiln', 'Colin'), 0.8164965809) self.assertAlmostEqual( self.cmp.dist('ATCAACGAGT', 'AACGATTAG'), 0.707106781 )
chrislit/abydos
[ 154, 26, 154, 63, 1398235847 ]
def starter(): isRaise = False try: context.plgMain (importLI=ITD_FILE)
Taifxx/xxtrep
[ 9, 6, 9, 4, 1460037342 ]
def service(externalAbort, report): ## Load monitor ... monitor = context.xbmc.Monitor()
Taifxx/xxtrep
[ 9, 6, 9, 4, 1460037342 ]
def run(self): '''It runs the analysis.''' contigs_path = self._get_input_fpaths()['contigs'] contigs_ext = contigs_path.extension reference_dir = self._create_output_dirs()['result'] reference_fpath = os.path.join(reference_dir, BACKBONE_BASENAMES['mapping_reference'] + '.' + \ contigs_ext) if os.path.exists(reference_fpath): os.remove(reference_fpath) rel_symlink(contigs_path.last_version, reference_fpath)
JoseBlanca/franklin
[ 23, 3, 23, 25, 1272293875 ]
def run(self): '''It runs the analysis.''' self._log({'analysis_started':True}) project_settings = self._project_settings settings = project_settings['Mappers'] tmp_dir = project_settings['General_settings']['tmpdir'] project_path = project_settings['General_settings']['project_path'] unmapped_fhand = None if 'keep_unmapped_reads_in_bam' in settings: if settings['keep_unmapped_reads_in_bam'] == False: unmapped_fpath = os.path.join(project_path, BACKBONE_DIRECTORIES['mappings'][0], BACKBONE_BASENAMES['unmapped_list']) unmapped_fhand = GzipFile(unmapped_fpath, 'w') inputs = self._get_input_fpaths() reads_fpaths = inputs['reads'] output_dir = self._create_output_dirs(timestamped=True)['result'] # define color and sequence references reference_path = inputs['reference'] mapping_index_dir = inputs['mapping_index'] #print reference_path, mapping_index_dir #memory for the java programs java_mem = self._project_settings['Other_settings']['java_memory'] picard_path = self._project_settings['Other_settings']['picard_path'] for read_fpath in reads_fpaths: mapping_parameters = {} read_info = scrape_info_from_fname(read_fpath) platform = read_info['pl'] #which maper are we using for this platform mapper = settings['mapper_for_%s' % platform] (reference_fpath, color_space) = self._prepare_mapper_index(mapping_index_dir, reference_path, platform, mapper) mapping_parameters['unmapped_fhand'] = unmapped_fhand mapping_parameters['colorspace'] = color_space out_bam_fpath = os.path.join(output_dir, read_fpath.basename + '.bam') if platform in ('454', 'sanger'): mapping_parameters['reads_length'] = 'long' else: mapping_parameters['reads_length'] = 'short' if not os.path.exists(out_bam_fpath): mapping_parameters['threads'] = self.threads mapping_parameters['java_conf'] = {'java_memory':java_mem, 'picard_path':picard_path} mapping_parameters['tmp_dir'] = tmp_dir map_reads(mapper, reads_fpath=read_fpath.last_version, reference_fpath=reference_fpath, out_bam_fpath=out_bam_fpath, parameters=mapping_parameters) # Now we run the select _last mapping self._spawn_analysis(DEFINITIONS['_select_last_mapping'], silent=self._silent) self._log({'analysis_finished':True})
JoseBlanca/franklin
[ 23, 3, 23, 25, 1272293875 ]
def run(self): '''It runs the analysis.''' self._log({'analysis_started':True}) settings = self._project_settings project_path = settings['General_settings']['project_path'] tmp_dir = settings['General_settings']['tmpdir'] inputs = self._get_input_fpaths() bam_paths = inputs['bams'] reference_path = inputs['reference'] output_dir = self._create_output_dirs()['result'] merged_bam_path = VersionedPath(os.path.join(output_dir, BACKBONE_BASENAMES['merged_bam'])) merged_bam_fpath = merged_bam_path.next_version #Do we have to add the default qualities to the sam file? #do we have characters different from ACTGN? add_qualities = settings['Sam_processing']['add_default_qualities'] #memory for the java programs java_mem = settings['Other_settings']['java_memory'] picard_path = settings['Other_settings']['picard_path'] if add_qualities: default_sanger_quality = settings['Other_settings']['default_sanger_quality'] default_sanger_quality = int(default_sanger_quality) else: default_sanger_quality = None temp_dir = NamedTemporaryDir() for bam_path in bam_paths: bam_basename = bam_path.basename temp_sam = NamedTemporaryFile(prefix='%s.' % bam_basename, suffix='.sam') sam_fpath = os.path.join(temp_dir.name, bam_basename + '.sam') bam2sam(bam_path.last_version, temp_sam.name) sam_fhand = open(sam_fpath, 'w') # First we need to create the sam with added tags and headers add_header_and_tags_to_sam(temp_sam, sam_fhand) temp_sam.close() sam_fhand.close() #the standardization temp_sam2 = NamedTemporaryFile(prefix='%s.' % bam_basename, suffix='.sam', delete=False) standardize_sam(open(sam_fhand.name), temp_sam2, default_sanger_quality, add_def_qual=add_qualities, only_std_char=True) temp_sam2.flush() shutil.move(temp_sam2.name, sam_fhand.name) temp_sam2.close() get_sam_fpaths = lambda dir_: [os.path.join(dir_, fname) for fname in os.listdir(dir_) if fname.endswith('.sam')] # Once the headers are ready we are going to merge sams = get_sam_fpaths(temp_dir.name) sams = [open(sam) for sam in sams] temp_sam = NamedTemporaryFile(suffix='.sam') reference_fhand = open(reference_path.last_version) try: merge_sam(sams, temp_sam, reference_fhand) except Exception: if os.path.exists(merged_bam_fpath): os.remove(merged_bam_fpath) raise reference_fhand.close() # close files for sam in sams: sam.close() # Convert sam into a bam,(Temporary) temp_bam = NamedTemporaryFile(suffix='.bam') sam2bam(temp_sam.name, temp_bam.name) # finally we need to order the bam #print 'unsorted.bam', temp_bam.name #raw_input() sort_bam_sam(temp_bam.name, merged_bam_fpath, java_conf={'java_memory':java_mem, 'picard_path':picard_path}, tmp_dir=tmp_dir ) temp_bam.close() temp_sam.close() create_bam_index(merged_bam_fpath) self._log({'analysis_finished':True})
JoseBlanca/franklin
[ 23, 3, 23, 25, 1272293875 ]
def run(self): '''It runs the analysis.''' self._log({'analysis_started':True}) inputs = self._get_input_fpaths() bam_path = inputs['bam'] bam_fpath = bam_path.last_version reference_fpath = inputs['reference'].last_version out_fhand = open(bam_path.next_version, 'w') cmd = ['samtools', 'calmd', '-Abr', bam_fpath, reference_fpath] call(cmd, raise_on_error=True, stdout=out_fhand) create_bam_index(out_fhand.name) out_fhand.close() self._log({'analysis_finished':True})
JoseBlanca/franklin
[ 23, 3, 23, 25, 1272293875 ]
def run(self): '''It runs the analysis.''' self._log({'analysis_started':True}) settings = self._project_settings project_path = settings['General_settings']['project_path'] tmp_dir = settings['General_settings']['tmpdir'] inputs = self._get_input_fpaths() bam_path = inputs['bam'] bam_fpath = bam_path.last_version reference_path = inputs['reference'] #memory for the java programs osettings = settings['Other_settings'] java_mem = osettings['java_memory'] picard_path = osettings['picard_path'] gatk_path = osettings['gatk_path'] #we need a temporary path temp_bam = NamedTemporaryFile(suffix='.bam') temp_bam_fpath = temp_bam.name temp_bam.close() #do the realigment realign_bam(bam_fpath=bam_fpath, reference_fpath=reference_path.last_version, out_bam_fpath=temp_bam_fpath, java_conf={'java_memory':java_mem, 'picard_path':picard_path, 'gatk_path':gatk_path}, threads=self.threads, tmp_dir=tmp_dir) #a new version for the original bam out_bam_fpath = bam_path.next_version shutil.move(temp_bam_fpath, out_bam_fpath) self._log({'analysis_finished':True})
JoseBlanca/franklin
[ 23, 3, 23, 25, 1272293875 ]
def run(self): '''It runs the analysis.''' self._log({'analysis_started':True}) settings = self._project_settings self._create_output_dirs()['result'] project_name = settings['General_settings']['project_name'] sample_size = settings['Sam_stats']['sampling_size'] project_path = settings['General_settings']['project_path'] inputs = self._get_input_fpaths() bam_path = inputs['bam'] bam_fpath = bam_path.last_version bam_fhand = open(bam_fpath) out_dir = os.path.abspath(self._get_output_dirs()['result']) summary_fname = os.path.join(out_dir, BACKBONE_BASENAMES['statistics_file']) summary_fhand = open(summary_fname, 'w') # non mapped_reads_fhand unmapped_fpath = os.path.join(project_path, BACKBONE_DIRECTORIES['mappings'][0], BACKBONE_BASENAMES['unmapped_list']) if os.path.exists(unmapped_fpath): unmapped_fhand = GzipFile(unmapped_fpath) else: unmapped_fhand = None #The general statistics bam_general_stats(bam_fhand, summary_fhand, unmapped_fhand) for kind in ('coverage', 'mapq'): basename = os.path.join(out_dir, "%s" % (project_name)) bam_fhand.seek(0) bam_distribs(bam_fhand, kind, basename=basename, sample_size=sample_size, summary_fhand=summary_fhand, plot_file_format=PLOT_FILE_FORMAT) bam_fhand.close() if unmapped_fhand is not None: unmapped_fhand.close()
JoseBlanca/franklin
[ 23, 3, 23, 25, 1272293875 ]
def __init__(self): self.env = Environment.getInstance() self.log = logging.getLogger(__name__) self.log.info("initializing workflow upload handler")
gonicus/gosa
[ 13, 3, 13, 1, 1350561752 ]
def __init__(self): self.chipLib = GetLibraryHandle() self.__InitNativeFunctions(self.chipLib) self.attributes = {} self.vendor_attributes = {} def AddAttribute(name, value): self.attributes[name.decode()] = value.decode() def AddVendorAttribute(tag, value): self.vendor_attributes[tag] = value.decode() self.attribute_visitor = SetupPayload.AttributeVisitor(AddAttribute) self.vendor_attribute_visitor = SetupPayload.VendorAttributeVisitor( AddVendorAttribute)
project-chip/connectedhomeip
[ 5774, 1369, 5774, 982, 1583255110 ]
def ParseManualPairingCode(self, manualPairingCode: str): self.Clear() err = self.chipLib.pychip_SetupPayload_ParseManualPairingCode(manualPairingCode.encode(), self.attribute_visitor, self.vendor_attribute_visitor) if err != 0: raise ChipStackError(err) return self
project-chip/connectedhomeip
[ 5774, 1369, 5774, 982, 1583255110 ]
def Print(self): for name, value in self.attributes.items(): decorated_value = self.__DecorateValue(name, value) decorated_value = f" [{decorated_value}]" if decorated_value else "" print(f"{name}: {value}{decorated_value}") for tag in self.vendor_attributes: print( f"Vendor attribute '{tag:>3}': {self.vendor_attributes[tag]}")
project-chip/connectedhomeip
[ 5774, 1369, 5774, 982, 1583255110 ]
def __DecorateValue(self, name, value): if name == "RendezvousInformation": rendezvous_methods = [] if int(value) & 0b001: rendezvous_methods += ["SoftAP"] if int(value) & 0b010: rendezvous_methods += ["BLE"] if int(value) & 0b100: rendezvous_methods += ["OnNetwork"] return ', '.join(rendezvous_methods) return None
project-chip/connectedhomeip
[ 5774, 1369, 5774, 982, 1583255110 ]
def test_default_automl_with_regression_task(): ds = import_dataset('regression') aml = H2OAutoML(max_models=2, project_name='aml_regression') aml.train(y=ds.target, training_frame=ds.train, validation_frame=ds.valid, leaderboard_frame=ds.test) print(aml.leader) print(aml.leaderboard) assert aml.leaderboard.columns == ["model_id", "mean_residual_deviance", "rmse", "mse", "mae", "rmsle"]
h2oai/h2o-3
[ 6169, 1943, 6169, 208, 1393862887 ]
def _log_thread_running_at_shutdown(name: str, ident: int) -> None: """Log the stack of a thread that was still running at shutdown.""" frames = sys._current_frames() # pylint: disable=protected-access stack = frames.get(ident) formatted_stack = traceback.format_stack(stack) _LOGGER.warning( "Thread[%s] is still running at shutdown: %s", name, "".join(formatted_stack).strip(), )
home-assistant/home-assistant
[ 58698, 22318, 58698, 2794, 1379402988 ]
def shutdown(self, *args, **kwargs) -> None: # type: ignore """Shutdown backport from cpython 3.9 with interrupt support added.""" with self._shutdown_lock: # type: ignore[attr-defined] self._shutdown = True # Drain all work items from the queue, and then cancel their # associated futures. while True: try: work_item = self._work_queue.get_nowait() except queue.Empty: break if work_item is not None: work_item.future.cancel() # Send a wake-up to prevent threads calling # _work_queue.get(block=True) from permanently blocking. self._work_queue.put(None) # The above code is backported from python 3.9 # # For maintainability join_threads_or_timeout is # a separate function since it is not a backport from # cpython itself # self.join_threads_or_timeout()
home-assistant/home-assistant
[ 58698, 22318, 58698, 2794, 1379402988 ]
def render_matrix(request): # get the user selection from session simple_selection = request.session.get('selection', False) # create an alignment object a = Alignment() # load data from selection into the alignment a.load_proteins_from_selection(simple_selection) a.load_segments_from_selection(simple_selection) # build the alignment data matrix a.build_alignment() # NOTE: NOT necessary for similarity matrix # calculate consensus sequence + amino acid and feature frequency # a.calculate_statistics() # calculate identity and similarity of each row compared to the reference a.calculate_similarity_matrix() return render(request, 'similaritymatrix/matrix.html', {'p': a.proteins, 'm': a.similarity_matrix})
protwis/protwis
[ 27, 66, 27, 10, 1453280568 ]
def configure_loader_modules(): return {modjk: {}}
saltstack/salt
[ 13089, 5388, 13089, 3074, 1298233016 ]
def test_get_running(): """ Test for get the current running config (not from disk) """ with patch.object(modjk, "_do_http", return_value={}): assert modjk.get_running() == {}
saltstack/salt
[ 13089, 5388, 13089, 3074, 1298233016 ]
def test_list_configured_members(): """ Test for return a list of member workers from the configuration files """ with patch.object(modjk, "_do_http", return_value={}): assert modjk.list_configured_members("loadbalancer1") == [] with patch.object( modjk, "_do_http", return_value={"worker.loadbalancer1.balance_workers": "SALT"}, ): assert modjk.list_configured_members("loadbalancer1") == ["SALT"]
saltstack/salt
[ 13089, 5388, 13089, 3074, 1298233016 ]
def test_recover_all(): """ Test for set the all the workers in lbn to recover and activate them if they are not """ with patch.object(modjk, "_do_http", return_value={}): assert modjk.recover_all("loadbalancer1") == {} with patch.object( modjk, "_do_http", return_value={"worker.loadbalancer1.balance_workers": "SALT"}, ): with patch.object( modjk, "worker_status", return_value={"activation": "ACT", "state": "OK"}, ): assert modjk.recover_all("loadbalancer1") == { "SALT": {"activation": "ACT", "state": "OK"} }
saltstack/salt
[ 13089, 5388, 13089, 3074, 1298233016 ]
def test_lb_edit(): """ Test for edit the loadbalancer settings """ with patch.object(modjk, "_do_http", return_value={"worker.result.type": "OK"}): assert modjk.lb_edit("loadbalancer1", {"vlr": 1, "vlt": 60})
saltstack/salt
[ 13089, 5388, 13089, 3074, 1298233016 ]
def test_bulk_activate(): """ Test for activate all the given workers in the specific load balancer """ with patch.object(modjk, "_do_http", return_value={"worker.result.type": "OK"}): assert modjk.bulk_activate(["node1", "node2", "node3"], "loadbalancer1")
saltstack/salt
[ 13089, 5388, 13089, 3074, 1298233016 ]
def test_bulk_recover(): """ Test for recover all the given workers in the specific load balancer """ with patch.object(modjk, "_do_http", return_value={"worker.result.type": "OK"}): assert modjk.bulk_recover(["node1", "node2", "node3"], "loadbalancer1")
saltstack/salt
[ 13089, 5388, 13089, 3074, 1298233016 ]
def test_worker_recover(): """ Test for set the worker to recover this module will fail if it is in OK state """ with patch.object(modjk, "_do_http", return_value={}): assert modjk.worker_recover("node1", "loadbalancer1") == {}
saltstack/salt
[ 13089, 5388, 13089, 3074, 1298233016 ]
def test_worker_activate(): """ Test for set the worker to activate state in the lbn load balancer """ with patch.object(modjk, "_do_http", return_value={"worker.result.type": "OK"}): assert modjk.worker_activate("node1", "loadbalancer1")
saltstack/salt
[ 13089, 5388, 13089, 3074, 1298233016 ]
def read_input(message): if sys.version_info[0] >= 3: return input(message) else: # This is not defined in python3, and the linter will thus # throw an undefined-variable<E0602> error on this line. # Suppress it here. return raw_input(message) # pylint: disable=E0602
Azure/WALinuxAgent
[ 495, 376, 495, 85, 1339008955 ]
def __init__(self, func, args=None, kwargs=None): if args is None: args = [] if kwargs is None: kwargs = {} self.func = func self.args = args self.kwargs = kwargs
Azure/WALinuxAgent
[ 495, 376, 495, 85, 1339008955 ]
def __init__(self): self.osutil = get_osutil() self.protocol_util = get_protocol_util() self.actions_running = False signal.signal(signal.SIGINT, self.handle_interrupt_signal)
Azure/WALinuxAgent
[ 495, 376, 495, 85, 1339008955 ]
def del_user(self, warnings, actions): try: ovfenv = self.protocol_util.get_ovf_env() except ProtocolError: warnings.append("WARNING! ovf-env.xml is not found.") warnings.append("WARNING! Skip delete user.") return username = ovfenv.username warnings.append(("WARNING! {0} account and entire home directory " "will be deleted.").format(username)) actions.append(DeprovisionAction(self.osutil.del_account, [username]))
Azure/WALinuxAgent
[ 495, 376, 495, 85, 1339008955 ]
def stop_agent_service(self, warnings, actions): warnings.append("WARNING! The waagent service will be stopped.") actions.append(DeprovisionAction(self.osutil.stop_agent_service))
Azure/WALinuxAgent
[ 495, 376, 495, 85, 1339008955 ]
def del_files(self, warnings, actions): # pylint: disable=W0613 files = ['/root/.bash_history', conf.get_agent_log_file()] actions.append(DeprovisionAction(fileutil.rm_files, files)) # For OpenBSD actions.append(DeprovisionAction(fileutil.rm_files, ["/etc/random.seed", "/var/db/host.random", "/etc/isakmpd/local.pub", "/etc/isakmpd/private/local.key", "/etc/iked/private/local.key", "/etc/iked/local.pub"]))
Azure/WALinuxAgent
[ 495, 376, 495, 85, 1339008955 ]
def del_dhcp_lease(self, warnings, actions): warnings.append("WARNING! Cached DHCP leases will be deleted.") dirs_to_del = ["/var/lib/dhclient", "/var/lib/dhcpcd", "/var/lib/dhcp"] actions.append(DeprovisionAction(fileutil.rm_dirs, dirs_to_del)) # For FreeBSD and OpenBSD actions.append(DeprovisionAction(fileutil.rm_files, ["/var/db/dhclient.leases.*"])) # For FreeBSD, NM controlled actions.append(DeprovisionAction(fileutil.rm_files, ["/var/lib/NetworkManager/dhclient-*.lease"]))
Azure/WALinuxAgent
[ 495, 376, 495, 85, 1339008955 ]
def del_lib_dir_files(self, warnings, actions): # pylint: disable=W0613 known_files = [ 'HostingEnvironmentConfig.xml', 'Incarnation', 'partition', 'Protocol', 'SharedConfig.xml', 'WireServerEndpoint' ] known_files_glob = [ 'Extensions.*.xml', 'ExtensionsConfig.*.xml', 'GoalState.*.xml' ] lib_dir = conf.get_lib_dir() files = [f for f in \ [os.path.join(lib_dir, kf) for kf in known_files] \ if os.path.isfile(f)] for p in known_files_glob: files += glob.glob(os.path.join(lib_dir, p)) if len(files) > 0: actions.append(DeprovisionAction(fileutil.rm_files, files))
Azure/WALinuxAgent
[ 495, 376, 495, 85, 1339008955 ]
def setup(self, deluser): warnings = [] actions = [] self.stop_agent_service(warnings, actions) if conf.get_regenerate_ssh_host_key(): self.regen_ssh_host_key(warnings, actions) self.del_dhcp_lease(warnings, actions) self.reset_hostname(warnings, actions) if conf.get_delete_root_password(): self.del_root_password(warnings, actions) self.del_dirs(warnings, actions) self.del_files(warnings, actions) self.del_resolv(warnings, actions) if deluser: self.del_user(warnings, actions) self.del_persist_firewall_rules(actions) return warnings, actions
Azure/WALinuxAgent
[ 495, 376, 495, 85, 1339008955 ]
def run(self, force=False, deluser=False): warnings, actions = self.setup(deluser) self.do_warnings(warnings) if self.do_confirmation(force=force): self.do_actions(actions)
Azure/WALinuxAgent
[ 495, 376, 495, 85, 1339008955 ]
def do_actions(self, actions): self.actions_running = True for action in actions: action.invoke() self.actions_running = False
Azure/WALinuxAgent
[ 495, 376, 495, 85, 1339008955 ]
def do_warnings(self, warnings): for warning in warnings: print(warning)
Azure/WALinuxAgent
[ 495, 376, 495, 85, 1339008955 ]
def to_return(self): result = {} for returnable in self.returnables: result[returnable] = getattr(self, returnable) result = self._filter_params(result) return result
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def search(self): result = [] if self._values['search'] is None: return None for server in self._values['search']: result.append(str(server)) return result
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def name_servers(self): result = [] if self._values['name_servers'] is None: return None for server in self._values['name_servers']: result.append(str(server)) return result
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def cache(self): if str(self._values['cache']) in ['enabled', 'enable']: return 'enable' else: return 'disable'
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def dhcp(self): valid = ['enable', 'enabled'] return True if self._values['dhcp'] in valid else False
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def forwarders(self): if self._values['forwarders'] is None: return None else: raise F5ModuleError( "The modifying of forwarders is not supported." )
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def ip_version(self): if self._values['ip_version'] in [6, '6', 'options inet6']: return "options inet6" elif self._values['ip_version'] in [4, '4', '']: return "" else: return None
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def __init__(self, client): self.client = client self.have = None self.want = Parameters(self.client.module.params) self.changes = Parameters()
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def exec_module(self): changed = False result = dict() state = self.want.state try: if state == "present": changed = self.update() elif state == "absent": changed = self.absent() except iControlUnexpectedHTTPError as e: raise F5ModuleError(str(e)) changes = self.changes.to_return() result.update(**changes) result.update(dict(changed=changed)) return result
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def update(self): self.have = self.read_current_from_device() if not self.should_update(): return False if self.client.check_mode: return True self.update_on_device() return True
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def update_on_device(self): params = self.want.api_params() tx = self.client.api.tm.transactions.transaction with BigIpTxContext(tx) as api: cache = api.tm.sys.dbs.db.load(name='dns.cache') dns = api.tm.sys.dns.load() # Empty values can be supplied, but you cannot supply the # None value, so we check for that specifically if self.want.cache is not None: cache.update(value=self.want.cache) if params: dns.update(**params)
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def should_absent(self): result = self._absent_changed_options() if result: return True return False
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def absent_on_device(self): params = self.changes.api_params() tx = self.client.api.tm.transactions.transaction with BigIpTxContext(tx) as api: dns = api.tm.sys.dns.load() dns.update(**params)
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def __init__(self): self.supports_check_mode = True self.argument_spec = dict( cache=dict( required=False, choices=['disabled', 'enabled', 'disable', 'enable'], default=None ), name_servers=dict( required=False, default=None, type='list' ), forwarders=dict( required=False, default=None, type='list' ), search=dict( required=False, default=None, type='list' ), ip_version=dict( required=False, default=None, choices=[4, 6], type='int' ), state=dict( required=False, default='present', choices=['absent', 'present'] ) ) self.required_one_of = [ ['name_servers', 'search', 'forwarders', 'ip_version', 'cache'] ] self.f5_product_name = 'bigip'
mcgonagle/ansible_f5
[ 10, 27, 10, 1, 1478300759 ]
def get_human_type(cls, abbreviated_type): """ Args: abbreviated_type(str): An attribute of DDBType Returns: str: The human readable form of the DDBType. """ return cls._human_type_mapping.get(abbreviated_type, abbreviated_type)
spulec/moto
[ 6700, 1808, 6700, 82, 1361221859 ]
def __init__(self, type_as_dict): if type(type_as_dict) == DynamoType: self.type = type_as_dict.type self.value = type_as_dict.value else: self.type = list(type_as_dict)[0] self.value = list(type_as_dict.values())[0] if self.is_list(): self.value = [DynamoType(val) for val in self.value] elif self.is_map(): self.value = dict((k, DynamoType(v)) for k, v in self.value.items())
spulec/moto
[ 6700, 1808, 6700, 82, 1361221859 ]
def __hash__(self): return hash((self.type, self.value))
spulec/moto
[ 6700, 1808, 6700, 82, 1361221859 ]
def __ne__(self, other): return self.type != other.type or self.value != other.value
spulec/moto
[ 6700, 1808, 6700, 82, 1361221859 ]
def __le__(self, other): return self.cast_value <= other.cast_value
spulec/moto
[ 6700, 1808, 6700, 82, 1361221859 ]
def __ge__(self, other): return self.cast_value >= other.cast_value
spulec/moto
[ 6700, 1808, 6700, 82, 1361221859 ]
def __add__(self, other): if self.type != other.type: raise TypeError("Different types of operandi is not allowed.") if self.is_number(): self_value = float(self.value) if "." in self.value else int(self.value) other_value = float(other.value) if "." in other.value else int(other.value) return DynamoType( {DDBType.NUMBER: "{v}".format(v=self_value + other_value)} ) else: raise IncorrectDataType()
spulec/moto
[ 6700, 1808, 6700, 82, 1361221859 ]
def __getitem__(self, item): if isinstance(item, str): # If our DynamoType is a map it should be subscriptable with a key if self.type == DDBType.MAP: return self.value[item] elif isinstance(item, int): # If our DynamoType is a list is should be subscriptable with an index if self.type == DDBType.LIST: return self.value[item] raise TypeError( "This DynamoType {dt} is not subscriptable by a {it}".format( dt=self.type, it=type(item) ) )
spulec/moto
[ 6700, 1808, 6700, 82, 1361221859 ]
def cast_value(self): if self.is_number(): try: return int(self.value) except ValueError: return float(self.value) elif self.is_set(): sub_type = self.type[0] return set([DynamoType({sub_type: v}).cast_value for v in self.value]) elif self.is_list(): return [DynamoType(v).cast_value for v in self.value] elif self.is_map(): return dict([(k, DynamoType(v).cast_value) for k, v in self.value.items()]) else: return self.value
spulec/moto
[ 6700, 1808, 6700, 82, 1361221859 ]
def size(self): if self.is_number(): value_size = len(str(self.value)) elif self.is_set(): sub_type = self.type[0] value_size = sum([DynamoType({sub_type: v}).size() for v in self.value]) elif self.is_list(): value_size = sum([v.size() for v in self.value]) elif self.is_map(): value_size = sum( [bytesize(k) + DynamoType(v).size() for k, v in self.value.items()] ) elif type(self.value) == bool: value_size = 1 else: value_size = bytesize(self.value) return value_size
spulec/moto
[ 6700, 1808, 6700, 82, 1361221859 ]