function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
list
def updateb(event=None): global position position -= nperscreen position = max(0,position) position = min(nplots-nperscreen, position) plotparslider.set_val(position)
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def updaten(event=None): global position position += nperscreen position = max(0,position) position = min(nplots-nperscreen, position) plotparslider.set_val(position)
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def update(tmp=0): global position, plotparslider position = tmp position = max(0,position) position = min(nplots-nperscreen, position) t = tic() for i,ax in enumerate(plotparsaxs): ax.cla() for item in ax.get_xticklabels() + ax.get_yticklabels(): item.set_fontsize(fontsize) ax.hold(True) nplt = i+position if nplt<nplots: for pd,plotdata in enumerate(allplotdata): try: this = plotdata[nplt,:] ax.set_title(this[0]) if isinstance(this[1], dict): if len(this[1].keys())==1: this[1] = this[1][0] # Actually needs to be an odict elif len(this[1].keys())>1: raise OptimaException('Expecting a number or an array or even an odict with one key, but got an odict with multiple keys (%s)' % this[0]) if isnumber(this[1]): ax.plot(tvec, 0*tvec+this[1]) elif len(this[1])==0: ax.set_title(this[0]+' is empty') elif len(this[1])==1: ax.plot(tvec, 0*tvec+this[1]) elif len(this[1])==len(tvec): ax.plot(tvec, this[1]) else: pass # Population size, doesn't use control points printv('Plot %i/%i...' % (i*len(allplotdata)+pd+1, len(plotparsaxs)*len(allplotdata)), 2, verbose) except Exception as E: if die: raise E else: print('??????: %s' % repr(E)) try: if not(hasattr(this[3],'__len__') and len(this[3])==0): ax.scatter(this[2],this[3]) except Exception: pass # print('Problem with "%s": "%s"' % (this[0], repr(E))) if pd==len(allplotdata)-1: # Do this for the last plot only ax.set_ylim((0,1.1*ax.get_ylim()[1])) ax.set_xlim((tvec[0],tvec[-1])) toc(t)
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def showplots(plots=None, figsize=None): ''' This function can be used to show plots (in separate figure windows, independently of generating them.
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def loadplot(filename=None): ''' Load a plot from a file and reanimate it.
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def makenewfigure(**figargs): ''' PyQt-specific function for maximizing the current figure ''' global scrwid, scrhei
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def closegui(event=None): ''' Close all GUI windows ''' global panelfig, plotfig try: close(plotfig) except: pass try: close(panelfig) except: pass return None
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def clearselections(event=None): global check for box in range(len(check.lines)): for i in [0,1]: check.lines[box][i].set_visible(False) updateplots() return None
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def advancedselections(event=None): ''' Toggle advance doptions ''' global check, checkboxes, updatebutton, clearbutton, defaultsbutton, advancedbutton, closebutton, plotfig, panelfig, results, plotargs, globaladvanced globaladvanced = not(globaladvanced) # Toggle try: close(plotfig) # These work better here than caling closegui() directly except: pass try: close(panelfig) except: pass check, checkboxes, updatebutton, clearbutton, defaultsbutton, advancedbutton, closebutton, plotfig, panelfig, plotargs = [None]*10 # Clear the bejesus out of everything pygui(results, advanced=globaladvanced) print('Switching to/from advanced; if GUI hangs, press enter in console') # Unfortunately, this happens from time to time pause(0.2) # Without this, it doesn't work...siiiigh return None
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def zoomplots(event=None, ratio=1.0): ''' Zoom in or out ''' global plotfig for ax in plotfig.axes: axpos = ax.get_position() x0 = axpos.x0 x1 = axpos.x1 y0 = axpos.y0 y1 = axpos.y1 xdiff = x1-x0 ydiff = y1-y0 xchange = xdiff*(1-ratio)/2.0 ychange = ydiff*(1-ratio)/2.0 ax.set_position([x0+xchange, y0+ychange, xdiff*ratio, ydiff*ratio]) return None
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def zoomout(event=None): ''' Zoom out of plots ''' zoomplots(event=event, ratio=0.9) return None
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def updateplots(event=None, tmpresults=None, **kwargs): ''' Close current window if it exists and open a new one based on user selections ''' global plotfig, check, checkboxes, results, plotargs if tmpresults is not None: results = tmpresults
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def resetbudget(): ''' Replace current displayed budget with default from portfolio ''' global globalportfolio, objectiveinputs totalbudget = 0 for project in globalportfolio.projects.values(): totalbudget += sum(project.progsets[0].getdefaultbudget().values()) objectiveinputs['budget'].setText(str(totalbudget/budgetfactor)) return None
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def gui_loadproj(): ''' Helper function to load a project, since used more than once ''' filepath = pyqt.QFileDialog.getOpenFileName(caption='Choose project file', filter='*'+prjext) project = None if filepath: try: project = loadproj(filepath, verbose=0) except Exception as E: print('Could not load file "%s": "%s"' % (filepath, repr(E))) if type(project)==Project: return project else: print('File "%s" is not an Optima project file' % filepath) else: print('No filepath provided') return project
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def gui_makesheet(): ''' Create a geospatial spreadsheet template based on a project file '''
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def gui_makeproj(): ''' Create a series of project files based on a seed file and a geospatial spreadsheet ''' project = gui_loadproj() spreadsheetpath = pyqt.QFileDialog.getOpenFileName(caption='Choose geospatial spreadsheet', filter='*.xlsx') destination = pyqt.QFileDialog.getExistingDirectory(caption='Choose output folder') makegeoprojects(project=project, spreadsheetpath=spreadsheetpath, destination=destination) warning('Created projects from spreadsheet') return None
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def gui_addproj(): ''' Add a project -- same as creating a portfolio except don't overwrite ''' gui_create(doadd=True) resetbudget() # And reset the budget return None
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def gui_rungeo(): ''' Actually run geospatial analysis!!! ''' global globalportfolio, globalobjectives, objectiveinputs starttime = time() if globalobjectives is None: globalobjectives = defaultobjectives() globalobjectives['budget'] = 0.0 # Reset for key in objectiveinputs.keys(): globalobjectives[key] = eval(str(objectiveinputs[key].text())) # Get user-entered values globalobjectives['budget'] *= budgetfactor # Convert back to internal representation BOCobjectives = dcp(globalobjectives) try: globalportfolio.genBOCs(objectives=BOCobjectives, maxtime=30, mc=0) globalportfolio.runGA(objectives=globalobjectives, maxtime=30, reoptimize=True, mc=0, batch=True, verbose=2, die=False, strict=True) except Exception as E: warning('Geospatial analysis failed: %s' % repr(E)) warning('Geospatial analysis finished running; total time: %0.0f s' % (time() - starttime)) return None
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def gui_plotgeo(): ''' Actually plot geospatial analysis!!! ''' global globalportfolio if globalportfolio is None: warning('Please load a portfolio first') return None globalportfolio.plotBOCs(deriv=False) return None
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def gui_saveport(): ''' Save the current portfolio ''' global globalportfolio filepath = pyqt.QFileDialog.getSaveFileName(caption='Save portfolio file', filter='*'+prtext) saveobj(filepath, globalportfolio) return None
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def geogui(): ''' Open the GUI for doing geospatial analysis.
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def updateprojectinfo(): global globalportfolio, projectslistbox, projectinfobox ind = projectslistbox.currentRow() project = globalportfolio.projects[ind] projectinfobox.setText(repr(project)) return None
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def removeproject(): global projectslistbox, projectinfobox, globalportfolio ind = projectslistbox.currentRow() globalportfolio.projects.pop(globalportfolio.projects.keys()[ind]) # Remove from portfolio projectslistbox.takeItem(ind) # Remove from list return None
optimamodel/Optima
[ 7, 1, 7, 8, 1406655820 ]
def __init__(self, file, **settings): self.file = file self.settings = {"profile_set": None, "material": None} for key, value in settings.items(): self.settings[key] = value
IfcOpenShell/IfcOpenShell
[ 1191, 546, 1191, 377, 1439197394 ]
def test_recent_filtering(self): def _get_recent(data): recent = set() for link in data['recent_links']: recent.add(link['url']) return recent username = 'none' req = get_request_with_user(username) user = get_user_model(req) u1 = 'http://example.com?q=1' u2 = 'http://example.com?q=2' v1 = VisitedLinkNew.objects.create(user=user, url=u1) self.assertTrue(get_recent_link_by_id(req, v1.pk)) v2 = VisitedLinkNew.objects.create(user=user, url=u2) data = get_quicklink_data(req) recent = _get_recent(data) self.assertEquals(len(recent), 2) self.assertTrue(u1 in recent) self.assertTrue(u2 in recent) plink = PopularLink.objects.create(url=u2) self.assertTrue(get_popular_link_by_id(plink.pk)) self.assertIsNotNone(plink.json_data()) self.assertIsNotNone(str(plink)) data = get_quicklink_data(req) recent = _get_recent(data) self.assertEquals(len(recent), 1) self.assertTrue(u1 in recent) CustomLink.objects.create(user=user, url=u1) data = get_quicklink_data(req) recent = _get_recent(data) self.assertEquals(len(recent), 0) for i in range(10): VisitedLinkNew.objects.create(user=user, url="http://example.com?q=%s" % i) data = get_quicklink_data(req) recent = _get_recent(data) self.assertEquals(len(recent), 5)
uw-it-aca/myuw
[ 13, 6, 13, 3, 1417029795 ]
def test_hidden_link(self): req = get_request_with_user('none') url = "http://s.ss.edu" link = add_hidden_link(req, url) self.assertEquals(link.url, url) # second time link1 = add_hidden_link(req, url) self.assertEquals(link.pk, link1.pk) self.assertIsNotNone(delete_hidden_link(req, link.pk)) # second time self.assertIsNone(delete_hidden_link(req, link.pk))
uw-it-aca/myuw
[ 13, 6, 13, 3, 1417029795 ]
def test_delete_custom_link(self): username = 'none' req = get_request_with_user(username) url = "http://s.ss.edu" link = add_custom_link(req, url) self.assertIsNotNone(delete_custom_link(req, link.pk)) # second time self.assertIsNone(delete_custom_link(req, link.pk))
uw-it-aca/myuw
[ 13, 6, 13, 3, 1417029795 ]
def test_get_quicklink_data(self): data = { "affiliation": "student", "url": "http://iss1.washington.edu/", "label": "ISS1", "campus": "seattle", "pce": False, "affiliation": "{intl_stud: True}", } plink = PopularLink.objects.create(**data) username = "jinter" req = get_request_with_user(username) affiliations = get_all_affiliations(req) user = get_user_model(req) link_data = { "user": user, "url": "http://iss.washington.edu/", "label": "ISS1", "is_anonymous": False, "is_student": affiliations.get('student', False), "is_undegrad": affiliations.get('undergrad', False), "is_grad_student": affiliations.get('grad', False), "is_employee": affiliations.get('employee', False), "is_faculty": affiliations.get('faculty', False), "is_seattle": affiliations.get('seattle', False), "is_tacoma": affiliations.get('tacoma', False), "is_bothell": affiliations.get('bothell', False), "is_pce": affiliations.get('pce', False), "is_student_employee": affiliations.get('stud_employee', False), "is_intl_stud": affiliations.get('intl_stud', False) } l1 = VisitedLinkNew.objects.create(**link_data) qls = get_quicklink_data(req) self.assertEqual(qls['recent_links'][0]['label'], "ISS1") self.assertEqual(qls['default_links'][0]['label'], "International Student Services (ISS)")
uw-it-aca/myuw
[ 13, 6, 13, 3, 1417029795 ]
def test_tac_quicklinks(self): username = "tacgrad" req = get_request_with_user(username) tac_qls = get_quicklink_data(req) self.assertEqual(tac_qls['default_links'][0]['label'], "International Student and Scholar Services (ISSS)")
uw-it-aca/myuw
[ 13, 6, 13, 3, 1417029795 ]
def splice(act, **info): ''' Form a splice event from a given act name and info. Args: act (str): The name of the action. **info: Additional information about the event. Example: splice = splice('add:node', form='inet:ipv4', valu=0) self.fire(splice) Notes: Splice events were reworked in v0.0.45 and now contain a sub-event of the (act, info) under the 'mesg' key. Returns: (str, dict): The splice event. ''' return (act, info)
vivisect/synapse
[ 280, 64, 280, 16, 1433978981 ]
def __init__(self, input_graph, output_node_names, perchannel, start_node_name): super(FuseNodeStartWithPad, self).__init__(input_graph, output_node_names, perchannel, start_node_name)
mlperf/training_results_v0.7
[ 11, 25, 11, 1, 1606268455 ]
def _apply_pad_conv_fusion(self): for _, value in self.node_name_mapping.items(): if value.node.op in ("Pad") and self.node_name_mapping[ value. output[0]].node.op == "Conv2D" and self._find_relu_node( value.node): paddings_tensor = tensor_util.MakeNdarray( self.node_name_mapping[value.node.input[1]].node. attr["value"].tensor).flatten() if any(paddings_tensor): new_node = node_def_pb2.NodeDef() new_node.CopyFrom(value.node) self.add_output_graph_node(new_node) else: self.node_name_mapping[ value.output[0]].node.input[0] = value.node.input[0] helper.set_attr_int_list( self.node_name_mapping[value.output[0]].node, "padding_list", paddings_tensor) else: new_node = node_def_pb2.NodeDef() new_node.CopyFrom(value.node) self.add_output_graph_node(new_node)
mlperf/training_results_v0.7
[ 11, 25, 11, 1, 1606268455 ]
def __init__(self, input_tensor, dilation_factor): assert (layer_util.check_spatial_dims( input_tensor, lambda x: x % dilation_factor == 0)) self._tensor = input_tensor self.dilation_factor = dilation_factor # parameters to transform input tensor self.spatial_rank = layer_util.infer_spatial_rank(self._tensor) self.zero_paddings = [[0, 0]] * self.spatial_rank self.block_shape = [dilation_factor] * self.spatial_rank
NifTK/NiftyNet
[ 1325, 408, 1325, 103, 1504079743 ]
def __exit__(self, *args): if self.dilation_factor > 1: self._tensor = tf.batch_to_space_nd(self._tensor, self.block_shape, self.zero_paddings, name='de-dilate')
NifTK/NiftyNet
[ 1325, 408, 1325, 103, 1504079743 ]
def tensor(self): return self._tensor
NifTK/NiftyNet
[ 1325, 408, 1325, 103, 1504079743 ]
def emnist_classes(): return ( [str(i) for i in range(10)] + [chr(i + ord('A')) for i in range(26)] + [chr(i + ord('a')) for i in range(26)] )
e2crawfo/dps
[ 1, 2, 1, 3, 1491848389 ]
def _validate_emnist(path): if not os.path.isdir(path): return False return set(os.listdir(path)) == set(emnist_filenames)
e2crawfo/dps
[ 1, 2, 1, 3, 1491848389 ]
def _emnist_load_helper(path_img, path_lbl): with gzip.open(path_lbl, 'rb') as file: magic, size = struct.unpack(">II", file.read(8)) if magic != 2049: raise ValueError('Magic number mismatch, expected 2049,' 'got {}'.format(magic)) labels = array("B", file.read()) with gzip.open(path_img, 'rb') as file: magic, size, rows, cols = struct.unpack(">IIII", file.read(16)) if magic != 2051: raise ValueError('Magic number mismatch, expected 2051,' 'got {}'.format(magic)) image_data = array("B", file.read()) images = np.zeros((size, rows * cols), dtype=np.uint8) for i in range(size): images[i][:] = image_data[i * rows * cols:(i + 1) * rows * cols] return np.array(images, dtype=np.uint8), np.array(labels, dtype=np.uint8)
e2crawfo/dps
[ 1, 2, 1, 3, 1491848389 ]
def maybe_download_emnist(data_dir, quiet=0, shape=None): """ Download emnist data if it hasn't already been downloaded. Do some post-processing to put it in a more useful format. End result is a directory called `emnist-byclass` which contains a separate pklz file for each emnist class. Pixel values of stored images are uint8 values up to 255. Images for each class are put into a numpy array with shape (n_images_in_class, 28, 28). This numpy array is pickled and stored in a zip file with name <class char>.pklz. Parameters ---------- data_dir: str Directory where files should be stored. """ emnist_dir = os.path.join(data_dir, 'emnist') if _validate_emnist(emnist_dir): print("EMNIST data seems to be present already.") else: print("EMNIST data not found, downloading and processing...") try: shutil.rmtree(emnist_dir) except FileNotFoundError: pass raw_dir = _download_emnist(data_dir) with cd(raw_dir): images, labels = _emnist_load_helper(emnist_gz_names[0], emnist_gz_names[1]) images1, labels1 = _emnist_load_helper(emnist_gz_names[2], emnist_gz_names[3]) with cd(data_dir): os.makedirs('emnist', exist_ok=False) print("Processing...") with cd('emnist'): x = np.concatenate((images, images1), 0) y = np.concatenate((labels, labels1), 0) # Give images the right orientation so that plt.imshow(x[0]) just works. x = np.moveaxis(x.reshape(-1, 28, 28), 1, 2) for i in sorted(set(y.flatten())): keep = y == i x_i = x[keep.flatten(), :] if i >= 36: char = chr(i-36+ord('a')) elif i >= 10: char = chr(i-10+ord('A')) else: char = str(i) if quiet >= 2: pass elif quiet == 1: print(char) elif quiet <= 0: print(char) print(image_to_string(x_i[0, ...])) file_i = char + '.pklz' with gzip.open(file_i, 'wb') as f: dill.dump(x_i, f, protocol=dill.HIGHEST_PROTOCOL) if shape is not None: maybe_convert_emnist_shape(data_dir, shape)
e2crawfo/dps
[ 1, 2, 1, 3, 1491848389 ]
def __init__(self): super(ConnectionEventHandler, self).__init__()
kgiusti/pyngus
[ 5, 3, 5, 7, 1386944542 ]
def connection_remote_closed(self, connection, pn_condition): """Peer has closed its end of the connection.""" LOG.debug("connection_remote_closed condition=%s", pn_condition) connection.close()
kgiusti/pyngus
[ 5, 3, 5, 7, 1386944542 ]
def __init__(self, count): self._count = count self._msg = Message() self.calls = 0 self.total_ack_latency = 0.0 self.stop_time = None self.start_time = None
kgiusti/pyngus
[ 5, 3, 5, 7, 1386944542 ]
def _send_message(self, link): now = time.time() self._msg.body = {'tx-timestamp': now} self._last_send = now link.send(self._msg, self)
kgiusti/pyngus
[ 5, 3, 5, 7, 1386944542 ]
def sender_remote_closed(self, sender_link, pn_condition): LOG.debug("Sender peer_closed condition=%s", pn_condition) sender_link.close()
kgiusti/pyngus
[ 5, 3, 5, 7, 1386944542 ]
def __init__(self, count, capacity): self._count = count self._capacity = capacity self._msg = Message() self.receives = 0 self.tx_total_latency = 0.0
kgiusti/pyngus
[ 5, 3, 5, 7, 1386944542 ]
def receiver_remote_closed(self, receiver_link, pn_condition): """Peer has closed its end of the link.""" LOG.debug("receiver_remote_closed condition=%s", pn_condition) receiver_link.close()
kgiusti/pyngus
[ 5, 3, 5, 7, 1386944542 ]
def message_received(self, receiver, message, handle): now = time.time() receiver.message_accepted(handle) self.tx_total_latency += now - message.body['tx-timestamp'] self.receives += 1 if self._count: self._count -= 1 if self._count == 0: receiver.close() return lc = receiver.capacity cap = self._capacity if lc < (cap / 2): receiver.add_capacity(cap - lc)
kgiusti/pyngus
[ 5, 3, 5, 7, 1386944542 ]
def zookeeper_server(configure_security): service_options = { "service": {"name": config.ZOOKEEPER_SERVICE_NAME, "virtual_network_enabled": True} } zk_account = "test-zookeeper-service-account" zk_secret = "test-zookeeper-secret" try: sdk_install.uninstall(config.ZOOKEEPER_PACKAGE_NAME, config.ZOOKEEPER_SERVICE_NAME) if sdk_utils.is_strict_mode(): service_options = sdk_utils.merge_dictionaries( {"service": {"service_account": zk_account, "service_account_secret": zk_secret}}, service_options, ) sdk_security.setup_security( config.ZOOKEEPER_SERVICE_NAME, service_account=zk_account, service_account_secret=zk_secret, ) sdk_install.install( config.ZOOKEEPER_PACKAGE_NAME, config.ZOOKEEPER_SERVICE_NAME, config.ZOOKEEPER_TASK_COUNT, package_version=config.ZOOKEEPER_PACKAGE_VERSION, additional_options=service_options, timeout_seconds=30 * 60, insert_strict_options=False, ) yield {**service_options, **{"package_name": config.ZOOKEEPER_PACKAGE_NAME}} finally: sdk_install.uninstall(config.ZOOKEEPER_PACKAGE_NAME, config.ZOOKEEPER_SERVICE_NAME) if sdk_utils.is_strict_mode(): sdk_security.delete_service_account( service_account_name=zk_account, service_account_secret=zk_secret )
mesosphere/dcos-kafka-service
[ 31, 34, 31, 1, 1453069144 ]
def kafka_server(zookeeper_server): try: # Get the zookeeper DNS values zookeeper_dns = sdk_cmd.svc_cli( zookeeper_server["package_name"], zookeeper_server["service"]["name"], "endpoint clientport", parse_json=True, )[1]["dns"] sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME) config.install( config.PACKAGE_NAME, config.SERVICE_NAME, config.DEFAULT_BROKER_COUNT, additional_options={"kafka": {"kafka_zookeeper_uri": ",".join(zookeeper_dns)}}, ) # wait for brokers to finish registering before starting tests test_utils.broker_count_check(config.DEFAULT_BROKER_COUNT, service_name=config.SERVICE_NAME) yield {"package_name": config.PACKAGE_NAME, "service": {"name": config.SERVICE_NAME}} finally: sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
mesosphere/dcos-kafka-service
[ 31, 34, 31, 1, 1453069144 ]
def fetch_topic(kafka_server: dict): _, topic_list, _ = sdk_cmd.svc_cli( config.PACKAGE_NAME, kafka_server["service"]["name"], "topic list", parse_json=True ) return topic_list
mesosphere/dcos-kafka-service
[ 31, 34, 31, 1, 1453069144 ]
def __init__(self, **kwargs): super(OrderNamespace, self).__init__(**kwargs)
henrysher/opslib
[ 2, 2, 2, 1, 1385896350 ]
def add_arguments(group, args): """ Add Arguments to CLI """ for kkk, vvv in args.iteritems(): if 'type' in vvv and vvv['type'] in type_map: vvv['type'] = type_map[vvv['type']] if 'help' in vvv and not vvv['help']: vvv['help'] = argparse.SUPPRESS changed = xform_name(kkk, "-") if kkk != changed: kkk = "-".join(["", changed]) group.add_argument(kkk, **vvv) return group
henrysher/opslib
[ 2, 2, 2, 1, 1385896350 ]
def parse_args(args): """ Create the Command Line Interface :type args: dict :param args: describes the command structure for the CLI """ parser = argparse.ArgumentParser(description=args.get('Description', '')) for k, v in args.iteritems(): if k == 'Subparsers': parser = recursive_parser(parser, v) elif k == 'Arguments': parser = add_arguments(parser, v) autocomplete(parser) return parser.parse_args(None, OrderNamespace())
henrysher/opslib
[ 2, 2, 2, 1, 1385896350 ]
def cancel_reserved_instances_listing(self): if self.is_not_dryrun("CancelReservedInstances"): raise NotImplementedError( "ReservedInstances.cancel_reserved_instances_listing is not yet implemented" )
spulec/moto
[ 6700, 1808, 6700, 82, 1361221859 ]
def describe_reserved_instances(self): raise NotImplementedError( "ReservedInstances.describe_reserved_instances is not yet implemented" )
spulec/moto
[ 6700, 1808, 6700, 82, 1361221859 ]
def describe_reserved_instances_offerings(self): raise NotImplementedError( "ReservedInstances.describe_reserved_instances_offerings is not yet implemented" )
spulec/moto
[ 6700, 1808, 6700, 82, 1361221859 ]
def __init__(self, notification): self._device_id = notification[self.DEVICE_ID_KEY] self._id = notification[self.ID_KEY] self._notification = notification[self.NOTIFICATION_KEY] self._parameters = notification[self.PARAMETERS_KEY] self._timestamp = notification[self.TIMESTAMP_KEY]
devicehive/devicehive-python
[ 30, 22, 30, 3, 1354125752 ]
def device_id(self): return self._device_id
devicehive/devicehive-python
[ 30, 22, 30, 3, 1354125752 ]
def id(self): return self._id
devicehive/devicehive-python
[ 30, 22, 30, 3, 1354125752 ]
def notification(self): return self._notification
devicehive/devicehive-python
[ 30, 22, 30, 3, 1354125752 ]
def parameters(self): return self._parameters
devicehive/devicehive-python
[ 30, 22, 30, 3, 1354125752 ]
def __init__(self, col_name, col_type): self.name = col_name self.type = col_type
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def setUp(self): self.kwargs = dict( table='table', partition=dict(col='col', value='value'), metastore_conn_id='metastore_conn_id', presto_conn_id='presto_conn_id', mysql_conn_id='mysql_conn_id', task_id='test_hive_stats_collection_operator', ) super().setUp()
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def test_get_default_exprs_excluded_cols(self): col = 'excluded_col' self.kwargs.update(dict(excluded_columns=[col])) default_exprs = HiveStatsCollectionOperator(**self.kwargs).get_default_exprs(col, None) assert default_exprs == {}
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def test_get_default_exprs_boolean(self): col = 'col' col_type = 'boolean' default_exprs = HiveStatsCollectionOperator(**self.kwargs).get_default_exprs(col, col_type) assert default_exprs == { (col, 'false'): f'SUM(CASE WHEN NOT {col} THEN 1 ELSE 0 END)', (col, 'non_null'): f'COUNT({col})', (col, 'true'): f'SUM(CASE WHEN {col} THEN 1 ELSE 0 END)', }
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def test_execute(self, mock_hive_metastore_hook, mock_presto_hook, mock_mysql_hook, mock_json_dumps): mock_hive_metastore_hook.return_value.get_table.return_value.sd.cols = [fake_col] mock_mysql_hook.return_value.get_records.return_value = False hive_stats_collection_operator = HiveStatsCollectionOperator(**self.kwargs) hive_stats_collection_operator.execute(context={}) mock_hive_metastore_hook.assert_called_once_with( metastore_conn_id=hive_stats_collection_operator.metastore_conn_id ) mock_hive_metastore_hook.return_value.get_table.assert_called_once_with( table_name=hive_stats_collection_operator.table ) mock_presto_hook.assert_called_once_with(presto_conn_id=hive_stats_collection_operator.presto_conn_id) mock_mysql_hook.assert_called_once_with(hive_stats_collection_operator.mysql_conn_id) mock_json_dumps.assert_called_once_with(hive_stats_collection_operator.partition, sort_keys=True) field_types = { col.name: col.type for col in mock_hive_metastore_hook.return_value.get_table.return_value.sd.cols } exprs = {('', 'count'): 'COUNT(*)'} for col, col_type in list(field_types.items()): exprs.update(hive_stats_collection_operator.get_default_exprs(col, col_type)) exprs = OrderedDict(exprs) rows = [ ( hive_stats_collection_operator.ds, hive_stats_collection_operator.dttm, hive_stats_collection_operator.table, mock_json_dumps.return_value, ) + (r[0][0], r[0][1], r[1]) for r in zip(exprs, mock_presto_hook.return_value.get_first.return_value) ] mock_mysql_hook.return_value.insert_rows.assert_called_once_with( table='hive_stats', rows=rows, target_fields=[ 'ds', 'dttm', 'table_name', 'partition_repr', 'col', 'metric', 'value', ], )
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def test_execute_with_assignment_func( self, mock_hive_metastore_hook, mock_presto_hook, mock_mysql_hook, mock_json_dumps
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def assignment_func(col, _): return {(col, 'test'): f'TEST({col})'}
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def test_execute_with_assignment_func_no_return_value( self, mock_hive_metastore_hook, mock_presto_hook, mock_mysql_hook, mock_json_dumps
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def assignment_func(_, __): pass
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def test_execute_no_query_results(self, mock_hive_metastore_hook, mock_presto_hook, mock_mysql_hook): mock_hive_metastore_hook.return_value.get_table.return_value.sd.cols = [fake_col] mock_mysql_hook.return_value.get_records.return_value = False mock_presto_hook.return_value.get_first.return_value = None with pytest.raises(AirflowException): HiveStatsCollectionOperator(**self.kwargs).execute(context={})
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def test_execute_delete_previous_runs_rows( self, mock_hive_metastore_hook, mock_presto_hook, mock_mysql_hook, mock_json_dumps
apache/incubator-airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def main(): parser = argparse.ArgumentParser() parser.add_argument("--power", required=True, default=None, choices=["on", "off", "reset", "cycle"], help="Control power state of all overcloud nodes") args = parser.parse_args() os_auth_url, os_tenant_name, os_username, os_password, \ os_user_domain_name, os_project_domain_name = \ CredentialHelper.get_undercloud_creds() kwargs = {'os_username': os_username, 'os_password': os_password, 'os_auth_url': os_auth_url, 'os_tenant_name': os_tenant_name, 'os_user_domain_name': os_user_domain_name, 'os_project_domain_name': os_project_domain_name} ironic = client.get_client(1, **kwargs) for node in ironic.node.list(detail=True): ip, username, password = \ CredentialHelper.get_drac_creds_from_node(node) cmd = "ipmitool -H {} -I lanplus -U {} -P '{}' chassis power {}". \ format(ip, username, password, args.power) print(cmd) os.system(cmd)
dsp-jetpack/JetPack
[ 28, 63, 28, 1, 1509550389 ]
def __init__(self): """Initialise the object.""" # Setting Host Test Logger instance ht_loggers = { "BasePlugin": HtrunLogger("PLGN"), "CopyMethod": HtrunLogger("COPY"), "ResetMethod": HtrunLogger("REST"), } self.plugin_logger = ht_loggers.get(self.type, ht_loggers["BasePlugin"])
ARMmbed/greentea
[ 28, 45, 28, 2, 1418315948 ]
def setup(self, *args, **kwargs): """Configure plugin. This function should be called before plugin execute() method is used. """ return False
ARMmbed/greentea
[ 28, 45, 28, 2, 1418315948 ]
def is_os_supported(self, os_name=None): """Check if the OS is supported by this plugin. In some cases a plugin will not work under a particular OS. Usually because the command line tool used to implement the plugin functionality is not available. Args: os_name: String describing OS. See self.host_os_support() and self.host_os_info() Returns: True if plugin works under certain OS. """ return True
ARMmbed/greentea
[ 28, 45, 28, 2, 1418315948 ]
def print_plugin_error(self, text): """Print error messages to the console. Args: text: Text to print. """ self.plugin_logger.prn_err(text) return False
ARMmbed/greentea
[ 28, 45, 28, 2, 1418315948 ]
def print_plugin_char(self, char): """Print a char to stdout.""" stdout.write(char) stdout.flush() return True
ARMmbed/greentea
[ 28, 45, 28, 2, 1418315948 ]
def check_serial_port_ready(self, serial_port, target_id=None, timeout=60): """Check and update serial port name information for DUT. If no target_id is specified return the old serial port name. Args: serial_port: Current serial port name. target_id: Target ID of a device under test. timeout: Serial port pooling timeout in seconds. Returns: Tuple with result (always True) and serial port read from mbed-ls. """ # If serial port changed (check using mbed-ls), use new serial port new_serial_port = None if target_id: # Sometimes OSes take a long time to mount devices (up to one minute). # Current pooling time: 120x 500ms = 1 minute self.print_plugin_info( "Waiting up to %d sec for '%s' serial port (current is '%s')..." % (timeout, target_id, serial_port) ) timeout_step = 0.5 timeout = int(timeout / timeout_step) for i in range(timeout): # mbed_lstools.main.create() should be done inside the loop. Otherwise # it will loop on same data. mbeds = create() mbed_list = mbeds.list_mbeds() # list of mbeds present # get first item in list with a matching target_id, if present mbed_target = next( (x for x in mbed_list if x["target_id"] == target_id), None ) if mbed_target is not None: # Only assign if serial port is present and known (not None) if ( "serial_port" in mbed_target and mbed_target["serial_port"] is not None ): new_serial_port = mbed_target["serial_port"] if new_serial_port != serial_port: # Serial port changed, update to new serial port self.print_plugin_info( "Serial port for tid='%s' changed from '%s' to '%s'..." % (target_id, serial_port, new_serial_port) ) break sleep(timeout_step) else: new_serial_port = serial_port return new_serial_port
ARMmbed/greentea
[ 28, 45, 28, 2, 1418315948 ]
def run_command(self, cmd, shell=True, stdin=None): """Run a shell command as a subprocess. Prints 'cmd' return code if execution failed. Args: cmd: Command to execute. shell: True if shell command should be executed (eg. ls, ps). stdin: A custom stdin for the process running the command (defaults to None). Returns: True if command successfully executed, otherwise False. """ result = True try: ret = call(cmd, shell=shell, stdin=stdin) if ret: self.print_plugin_error("[ret=%d] Command: %s" % (int(ret), cmd)) return False except Exception as e: result = False self.print_plugin_error("[ret=%d] Command: %s" % (int(ret), cmd)) self.print_plugin_error(str(e)) return result
ARMmbed/greentea
[ 28, 45, 28, 2, 1418315948 ]
def isScalar(v, val=None): """Check if v is scalar, i.e. int, float or complex. Optional compare with val. Examples -------- >>> import pygimli as pg >>> print(pg.isScalar(0)) True >>> print(pg.isScalar(1.0)) True >>> print(pg.isScalar(1.0, 0.0)) False >>> print(pg.isScalar(1.0, 1.0)) True >>> print(pg.isScalar(1+1j)) True >>> print(pg.isScalar([0.0, 1.0])) False """ if val is None: return isinstance(v, (int, float, complex, np.complex128)) # maybe add some tolerance check return isinstance(v, (int, float, complex, np.complex128)) and v == val
gimli-org/gimli
[ 257, 109, 257, 13, 1378329047 ]
def isComplex(vals): """Check numpy or pg.Vector if have complex data type""" if isScalar(vals): if isinstance(vals, (np.complex128, complex)): return True elif isArray(vals): return isComplex(vals[0]) return False
gimli-org/gimli
[ 257, 109, 257, 13, 1378329047 ]
def isR3Array(v, N=None): """Check if v is an array of size(N,3), a R3Vector or a list of pg.Pos. Examples -------- >>> import pygimli as pg >>> print(pg.isR3Array([[0.0, 0.0, 1.], [1.0, 0.0, 1.]])) True >>> print(pg.isR3Array(np.ones((33, 3)), N=33)) True >>> print(pg.isR3Array(pg.meshtools.createGrid(2,2).positions())) True """ if N is None: return isinstance(v, R3Vector) or \ ( isinstance(v, list) and isPos(v[0])) or \ (not isinstance(v, list) and hasattr(v, '__iter__') and \ not isinstance(v, (str)) and v.ndim == 2 and isPos(v[0])) return isR3Array(v) and len(v) == N
gimli-org/gimli
[ 257, 109, 257, 13, 1378329047 ]
def construct_params_from_message(message: str) -> dict: message_type = 'tagged_message' if '@[' in message else 'message' return { message_type: message }
box/box-python-sdk
[ 375, 216, 375, 21, 1423182655 ]
def reply(self, message: str) -> 'Comment': """ Add a reply to the comment. :param message: The content of the reply comment. """ url = self.get_type_url() data = self.construct_params_from_message(message) data['item'] = { 'type': 'comment', 'id': self.object_id } box_response = self._session.post(url, data=json.dumps(data)) response = box_response.json() return self.translator.translate( session=self._session, response_object=response, )
box/box-python-sdk
[ 375, 216, 375, 21, 1423182655 ]
def __init__(self, response): """Construct an API response from a Requests response :param response: a ``requests`` library response """ super(APIResponse, self).__init__() self.status = response.status_code self.content = response.content if self.content: self.body = jsonutils.loads(self.content) self.headers = response.headers
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def __init__(self, message=None, response=None): self.response = response if not message: message = 'Unspecified error' if response: _status = response.status_code _body = response.content message = ('%(message)s\nStatus Code: %(_status)s\n' 'Body: %(_body)s' % {'message': message, '_status': _status, '_body': _body}) super(OpenStackApiException, self).__init__(message)
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def __init__(self, response=None, message=None): if not message: message = "Authentication error" super(OpenStackApiAuthenticationException, self).__init__(message, response)
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def __init__(self, response=None, message=None): if not message: message = "Authorization error" super(OpenStackApiAuthorizationException, self).__init__(message, response)
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def __init__(self, response=None, message=None): if not message: message = "Item not found" super(OpenStackApiNotFoundException, self).__init__(message, response)
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def __init__(self, auth_user, auth_key, auth_uri, project_id=None): super(TestOpenStackClient, self).__init__() self.auth_result = None self.auth_user = auth_user self.auth_key = auth_key self.auth_uri = auth_uri if project_id is None: self.project_id = "6f70656e737461636b20342065766572" else: self.project_id = project_id self.microversion = None
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def _authenticate(self): if self.auth_result: return self.auth_result auth_uri = self.auth_uri headers = {'X-Auth-User': self.auth_user, 'X-Auth-Key': self.auth_key, 'X-Auth-Project-Id': self.project_id} response = self.request(auth_uri, headers=headers) http_status = response.status_code LOG.debug("%(auth_uri)s => code %(http_status)s", {'auth_uri': auth_uri, 'http_status': http_status}) if http_status == 401: raise OpenStackApiAuthenticationException(response=response) self.auth_result = response.headers return self.auth_result
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def _decode_json(self, response): resp = APIResponse(status=response.status_code) if response.content: resp.body = jsonutils.loads(response.content) return resp
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def api_post(self, relative_uri, body, **kwargs): kwargs['method'] = 'POST' if body: headers = kwargs.setdefault('headers', {}) headers['Content-Type'] = 'application/json' kwargs['body'] = jsonutils.dumps(body) kwargs.setdefault('check_response_status', [200, 202]) return APIResponse(self.api_request(relative_uri, **kwargs))
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def api_delete(self, relative_uri, **kwargs): kwargs['method'] = 'DELETE' kwargs.setdefault('check_response_status', [200, 202, 204]) return APIResponse(self.api_request(relative_uri, **kwargs))
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def get_server(self, server_id): return self.api_get('/servers/%s' % server_id).body['server']
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def post_server(self, server): response = self.api_post('/servers', server).body if 'reservation_id' in response: return response else: return response['server']
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def post_server_action(self, server_id, data): return self.api_post('/servers/%s/action' % server_id, data).body
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def get_image(self, image_id): return self.api_get('/images/%s' % image_id).body['image']
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]
def post_image(self, image): return self.api_post('/images', image).body['image']
cernops/nova
[ 5, 2, 5, 2, 1418819480 ]