query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Zoom in on a specific station on a map
def station_on_map(request, station_number): data_stations = stations_with_data() station_number = int(station_number) down, problem, up = status_lists() station = get_object_or_404(Station, number=station_number) center = station.latest_location() if center['latitude'] is None and center['longitude'] is None: raise Http404 subclusters = [] for subcluster in Cluster.objects.all(): stations = [] for station in (Station.objects.select_related('cluster__parent', 'cluster__country') .filter(cluster=subcluster, pc__is_test=False)): link = station in data_stations status = get_station_status(station.number, down, problem, up) location = station.latest_location() station_data = {'number': station.number, 'name': station.name, 'cluster': station.cluster, 'link': link, 'status': status} station_data.update(location) stations.append(station_data) subclusters.append({'name': subcluster.name, 'stations': stations}) return render(request, 'map.html', {'subclusters': subclusters, 'center': center})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def zoom_to(self):\n # Will seek user feedback. QGIS will\n # Pan to first layer loaded", "def zoomIn(self, x, y):\n\n # set view state\n (map_x, map_y) = self.getMapCoordsFromView((x,y))\n self.view_offset_x = map_x*2 - self.view_width/2\n self.view_offset_y = map_y*2 -...
[ "0.62788576", "0.6039453", "0.58890414", "0.58117414", "0.5672202", "0.5654756", "0.56279266", "0.55998397", "0.55568296", "0.55527616", "0.5512002", "0.54508984", "0.5449918", "0.5449918", "0.5449918", "0.5449918", "0.54433745", "0.54276776", "0.5427263", "0.53882915", "0.53...
0.6076124
1
Show all stations from a subcluster on a map
def stations_on_map(request, country=None, cluster=None, subcluster=None): data_stations = stations_with_data() down, problem, up = status_lists() if country: get_object_or_404(Country, name=country) if cluster: get_object_or_404(Cluster, name=cluster, parent=None, country__name=country) if subcluster: if cluster == subcluster: get_object_or_404(Cluster, name=subcluster, parent=None) else: get_object_or_404(Cluster, name=subcluster, parent__name=cluster) focus = (Cluster.objects.filter(name=subcluster) .values_list('name', flat=True)) else: focus = [Cluster.objects.get(name=cluster, parent=None).name] focus.extend(Cluster.objects.filter(parent__name=cluster) .values_list('name', flat=True)) else: focus = (Cluster.objects.filter(country__name=country) .values_list('name', flat=True)) else: focus = Cluster.objects.all().values_list('name', flat=True) subclusters = [] for subcluster in Cluster.objects.all(): stations = [] for station in (Station.objects.select_related('cluster__parent', 'cluster__country') .filter(cluster=subcluster, pc__is_test=False)): link = station in data_stations status = get_station_status(station.number, down, problem, up) location = station.latest_location() station_data = {'number': station.number, 'name': station.name, 'cluster': station.cluster, 'link': link, 'status': status} station_data.update(location) stations.append(station_data) subclusters.append({'name': subcluster.name, 'stations': stations}) return render(request, 'map.html', {'subclusters': subclusters, 'focus': focus})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def station_on_map(request, station_number):\n\n data_stations = stations_with_data()\n station_number = int(station_number)\n down, problem, up = status_lists()\n\n station = get_object_or_404(Station, number=station_number)\n center = station.latest_location()\n if center['latitude'] is None an...
[ "0.67217124", "0.6456021", "0.6094351", "0.59413135", "0.57239014", "0.56707394", "0.5667912", "0.5660193", "0.56526005", "0.56276786", "0.5618511", "0.56184906", "0.5581086", "0.55698586", "0.5552247", "0.5542267", "0.5477948", "0.5476224", "0.54496205", "0.5442602", "0.5442...
0.7505736
0
Helper shortcut for creating subcommands. Accepts arguments for `add_subparsers`, creating a new subparser and returning a partial function wrapping `add_subcommand` for the new subparser. If the `dest` argument isn't specified, it defaults to `'subcmd'`. Example cmd_foo = CommandParser('foo', 'Does foo stuff') foo_adder = cmd_foo.make_adder(metavar='OPERATION', required=True) bar_subcmd = foo_adder('bar', description='Does bar stuff to foo')
def make_adder(self, *args, **kwargs): kwargs.setdefault("dest", "subcmd") subp = self.add_subparsers(*args, **kwargs) return partial(self.add_subcommand, subp)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def subcommand(self, func=None, **subcommand_options):\n\n def decorator(subcommand_func):\n subcommand_sig = inspect.signature(subcommand_func)\n\n @functools.wraps(subcommand_func)\n def wrapped(args):\n\n final_args = []\n final_kwargs = {}\n...
[ "0.7137559", "0.6991422", "0.69432056", "0.6767474", "0.66648597", "0.6645159", "0.6587919", "0.6517023", "0.6504958", "0.6392979", "0.6344873", "0.63118166", "0.623115", "0.62270325", "0.62064266", "0.6119958", "0.61081535", "0.60894066", "0.60719246", "0.6052931", "0.600301...
0.84089714
0
Helper method for adding subcommands. Wrapper around `add_parser` that simplifies adding subcommands to ZeroBot commands. The same string is used for both the `description` and `help` parameters of `add_parser`.
def add_subcommand( subp: _SubParsersAction, name: str, description: Optional[str] = None, **kwargs ) -> "CommandParser": desc_help = {"description": description, "help": description} return subp.add_parser(name, **desc_help, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_subcommands(cls, parser: argparse.ArgumentParser) -> None:\n if cls.SUBCOMMANDS:\n subparsers = parser.add_subparsers(title=\"subcommands\", metavar=\"\", dest='cmd')\n for subcmd_class in cls.SUBCOMMANDS:\n parsers = subcmd_class.get_args()\n subc...
[ "0.76693493", "0.7625279", "0.761014", "0.74660397", "0.7304024", "0.7300243", "0.6937958", "0.687252", "0.6824965", "0.68249416", "0.6797736", "0.6796231", "0.67945975", "0.6792037", "0.6770885", "0.67248243", "0.6711912", "0.66614723", "0.6639564", "0.66390103", "0.6635527"...
0.81267315
0
The module that this command is registered to. Will return `None` if this command has not yet been registered.
def module(self) -> Optional[Module]: return self._module
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_module(self):\n return self.module", "def module(self):\n return self._module", "def module(self):\n return self._module", "def module(self):\n return self._module", "def module(self):\n return self._module", "def module(self):\n return self._module", "...
[ "0.8003115", "0.7536935", "0.7536935", "0.7536935", "0.7536935", "0.7536935", "0.75261885", "0.7365347", "0.70169824", "0.68556315", "0.67025244", "0.66628325", "0.66449714", "0.6640801", "0.6626384", "0.652614", "0.6519318", "0.648883", "0.64871985", "0.64552623", "0.6444489...
0.74596643
7
The User that invoked the command.
def invoker(self) -> User: return self.msg.source
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def user(self):\n return self.getattr('user')", "def user(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"user\")", "def user(self) -> Optional[str]:\n _args: list[Arg] = []\n _ctx = self._select(\"user\", _args)\n return _ctx.execute_sync(Optional[str])", "def use...
[ "0.80029744", "0.78830194", "0.7869089", "0.7831611", "0.7831611", "0.7831611", "0.7831611", "0.7830656", "0.77808654", "0.77667016", "0.77667016", "0.77667016", "0.7764792", "0.77462137", "0.7722865", "0.7709639", "0.77054626", "0.77054626", "0.7655964", "0.7575146", "0.7570...
0.74655455
22
Where the command was sent from. Can be either directly from a user, or from a user within a channel.
def source(self) -> Union[User, Channel]: return self.msg.destination
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def whoami( self, mess, args):\n return mess.getFrom()", "def whoami(self, mess, args):\n return mess.getFrom().getStripped()", "def invoker(self) -> User:\n return self.msg.source", "def getFromUser(self):\n return self.fromUser", "def reply_to(self):\n return self.recei...
[ "0.7382036", "0.6963548", "0.6660693", "0.6343135", "0.62228334", "0.6178447", "0.6127587", "0.61155206", "0.6074623", "0.60027945", "0.59703183", "0.59151745", "0.5872834", "0.58546007", "0.5829677", "0.5820818", "0.5783627", "0.5776797", "0.57764834", "0.5776311", "0.577293...
0.6407649
3
The invoked subcommand name, if one was invoked. For subcommands with aliases, the name returned is always the canonical name that the aliases are associated with. For this reason, this attribute should be preferred to extracting the subcommand name from `ParsedCommand.args`.
def subcmd(self) -> Optional[str]: return self._subcmd
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def command_name(self):\n return None", "def get_commandname(self):\n for line in self.helplines:\n if \"Usage:\" in line and self.parser_type is 'optparse':\n tmp = line.split()\n return tmp[1]\n if \"usage:\" in line and self.parser_type is 'arg...
[ "0.6581749", "0.6499524", "0.6446358", "0.6352551", "0.6317428", "0.62414986", "0.6189401", "0.6147004", "0.6130716", "0.61187094", "0.60870796", "0.60323155", "0.5961086", "0.5867416", "0.5867416", "0.5861084", "0.58426297", "0.58400583", "0.5821133", "0.58019865", "0.579916...
0.6821528
0
Get the name of a nested subcommand. Like the `subcmd` property, the name returned is always the canonical name for the subcommand. The `depth` parameter determines how many levels of nesting to traverse; the default of ``2`` gets the first nested subcommand. As a consequence, a value of ``1`` is the same as `subcmd`.
def nested_subcmd(self, depth: int = 2) -> Optional[str]: # pylint: disable=protected-access current = 0 subparser = self.parser try: while current < depth: action = subparser._actions[0] if isinstance(action, _SubParsersAction): subparser = action.choices[self.args[action.dest]] current += 1 else: return None return subparser.name.split()[-1] except (IndexError, KeyError, TypeError): return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_subcmd(self, name: str) -> \"CommandHelp\":\n try:\n return self.subcmds[name]\n except KeyError:\n # Try looking up by alias\n for sub_name, sub_help in self.subcmds.items():\n for alias in sub_help.aliases:\n if name == alia...
[ "0.61825335", "0.5816208", "0.57917714", "0.5630253", "0.55556434", "0.55294657", "0.54540044", "0.5216835", "0.5201424", "0.49779016", "0.49343693", "0.48652846", "0.48608866", "0.48450202", "0.4835011", "0.47898704", "0.47898704", "0.47851962", "0.476425", "0.472209", "0.47...
0.80556154
0
Get the `CommandHelp` object for the given subcommand. `name` may be an alias, in which case it is resolved to the appropriate subcommand.
def get_subcmd(self, name: str) -> "CommandHelp": try: return self.subcmds[name] except KeyError: # Try looking up by alias for sub_name, sub_help in self.subcmds.items(): for alias in sub_help.aliases: if name == alias: return self.subcmds[sub_name] raise
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fetch_subcommand(self, name):\n try:\n subcommand_class = self.subcommands[name]\n except KeyError:\n self.print_command_unkown_error(name)\n sys.exit(1)\n return subcommand_class(self.prog, name, self.argv[2:], self.stdout)", "def HelpForCmd(self, name):...
[ "0.70336854", "0.70271784", "0.664786", "0.6645423", "0.647728", "0.6419414", "0.6092256", "0.60677403", "0.6052447", "0.5968284", "0.59641016", "0.5962373", "0.5933287", "0.5915913", "0.59141576", "0.5910189", "0.59012985", "0.5898384", "0.5896387", "0.5896387", "0.5896387",...
0.78726345
0
Loads a multilayer png file and return a list of all feature loaded as numpy arrays. You can use np.concatenate(x,2) to create a 3D array of size
def load_png_data(): m=1 #训练文件个数 n=1 #测试文件个数 train_set_x=[]#训练数据集 train_set_y=[]#训练标签集 test_set_x=[]#测试数据集 test_set_y=[]#测试标签集 train_data={} train_path=r".\dataset\train_label\\" dirs=os.listdir(train_path) for file in dirs: srcImg=cv2.imread(train_path+file) #将label数据集保存为numpy格式并保存 npImg=np.array(srcImg) np.save(train_path+str(m)+'.npy',npImg) train_set_x.append(npImg) NoiseImg = GaussianNoise(srcImg, 25, 4, 0.8) npNoiseImg = np.array(NoiseImg) cv2.imwrite(r".\dataset\trainset\\"+str(m)+'.png', NoiseImg, [int(cv2.IMWRITE_PNG_STRATEGY_DEFAULT)]) np.save(r".\dataset\trainset\\" + str(m) + '.npy', npNoiseImg) train_set_y.append(npNoiseImg) m=m+1 train_data['train_set_x']=train_set_x train_data['train_set_y']=train_set_y test_path = r".\dataset\test_label\\" dirs_test = os.listdir(test_path) for file in dirs_test: srcImg=cv2.imread(test_path+file) #将label数据集保存为numpy格式并保存 npImg=np.array(srcImg) np.save(test_path+str(n)+'.npy',npImg) test_set_x.append(npImg) NoiseImg = GaussianNoise(srcImg, 25, 4, 0.8) npNoiseImg = np.array(NoiseImg) cv2.imwrite(r".\dataset\testset\\"+str(n)+'.png', NoiseImg, [int(cv2.IMWRITE_PNG_STRATEGY_DEFAULT)]) np.save(r".\dataset\testset\\" + str(n) + '.npy', npNoiseImg) test_set_y.append(npNoiseImg) n=n+1 train_data['test_set_x']=test_set_x train_data['test_set_y']=test_set_y np.savez(r"E:\DeepLearning\CNNDenoiser\dataset\train_data.npz",**train_data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_image_patch(filename):\n im = Image.open(filename) # .convert('L')\n width, height = im.size\n pixels = list(im.getdata())\n features = [pixels[i * width:(i + 1) * width] for i in range(height)]\n features = np.asarray(im, dtype=np.float32).flatten()\n features /= 255.0\n return feat...
[ "0.71335703", "0.6792827", "0.67380095", "0.67065763", "0.6583074", "0.6564765", "0.6550392", "0.6415319", "0.6408479", "0.639627", "0.63891566", "0.63802946", "0.6377724", "0.6368443", "0.6361748", "0.6327807", "0.6314867", "0.6306744", "0.629919", "0.62990034", "0.6297572",...
0.0
-1
adjust nodes and edges
def add_graphics_theme_to_nx_graph( nx_graph, edge_color=None, node_size_factor=50, edge_size_factor=500): # node size, stroke for node_name, node_attrs in nx_graph.nodes(data=True): #node_size = nx_graph.nodes[node_name]["numexamples"] / float(node_size_factor) #node_size = nx_graph.nodes[node_name]["numexamples"] / float(nx_graph.graph["numexamples"]) #node_size *= node_size_factor node_size = 100 graphics = { "type": "ellipse", "w": node_size, "h": node_size, "fill": "#FFFFFF", "outline": "#000000", "width": 1.0, "fontSize": 14 } if nx_graph.nodes[node_name].get("graphics") is not None: nx_graph.nodes[node_name]["graphics"].update(graphics) else: nx_graph.nodes[node_name]["graphics"] = graphics # edges for start_node, end_node in nx_graph.edges(): for edge_idx in xrange(len(nx_graph[start_node][end_node])): #edge_width = nx_graph[start_node][end_node][edge_idx]["numexamples"] / float( # edge_size_factor) #edge_width = nx_graph[start_node][end_node][edge_idx]["numexamples"] / float( # nx_graph.graph["numexamples"]) #edge_width *= edge_size_factor edge_width = 1.0 graphics = { "type": "arc", "width": edge_width, "targetArrow": "delta" } if edge_color is not None: graphics["fill"] = edge_color if nx_graph[start_node][end_node][edge_idx].get("graphics") is not None: nx_graph[start_node][end_node][edge_idx]["graphics"].update(graphics) else: nx_graph[start_node][end_node][edge_idx]["graphics"] = graphics return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update(self):\n\n for node in self.nodes:\n for edge in node.edges:\n for i, edge_node in enumerate(edge.nodes):\n if edge_node.id != node.id:\n edge_node.add_edge(edge)\n\n return self", "def clean_edges(self):", "def update...
[ "0.6937063", "0.69099474", "0.63684916", "0.6333464", "0.6302931", "0.62989634", "0.62662923", "0.62522674", "0.6176229", "0.6170671", "0.6169153", "0.615478", "0.61263245", "0.6126319", "0.6120976", "0.6110921", "0.60948086", "0.60822624", "0.6066578", "0.60385305", "0.60174...
0.0
-1
preparatory function for writing out to gml
def stringize_nx_graph(nx_graph): # graph attributes for key in nx_graph.graph.keys(): if isinstance(nx_graph.graph[key], (list, set, np.ndarray)): nx_graph.graph[key] = ",".join([ str(val) for val in list(nx_graph.graph[key])]) # node attributes for node_name, node_attrs in nx_graph.nodes(data=True): for key in node_attrs.keys(): if isinstance(nx_graph.nodes[node_name][key], (list, set, np.ndarray)): nx_graph.nodes[node_name][key] = ",".join([ str(val) for val in nx_graph.nodes[node_name][key]]) # adjust node name for nice output in cytoscape new_node_name = re.sub(r"HCLUST.\d+_", "", node_name) new_node_name = new_node_name.replace(".UNK.0.A", "") nx_graph.nodes[node_name]["name"] = new_node_name # edge attributes for start_node, end_node in nx_graph.edges(): for edge_idx in xrange(len(nx_graph[start_node][end_node])): edge_attrs = nx_graph[start_node][end_node][edge_idx] for key in edge_attrs.keys(): if isinstance(edge_attrs[key], (list, set, np.ndarray)): nx_graph[start_node][end_node][edge_idx][key] = ",".join([ str(val) for val in nx_graph[start_node][end_node][edge_idx][key]]) return nx_graph
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_graphml_output(self, path):\n self.restructure_edge_info()\n self.restructure_node_info()\n return nx.write_graphml(self.G, path)", "def write_mm(g, fn):\n f = open(fn, \"w\")\n f.write(\"%d %d %d\\n\" % (g.vcount(), g.vcount(), g.ecount()))\n\n if g.is_weighted():\n for e...
[ "0.6463148", "0.6297059", "0.62645066", "0.6240671", "0.6203814", "0.6203347", "0.61121655", "0.60968256", "0.60381806", "0.6023182", "0.6010435", "0.5995649", "0.5977328", "0.5961852", "0.59262574", "0.5919297", "0.5915878", "0.59035015", "0.58848506", "0.5878267", "0.587032...
0.0
-1
build a network view
def main(): # files summary_file = sys.argv[1] pwms_to_tfs_file = sys.argv[2] expressed_tfs_file = sys.argv[3] # TODO # TODO pull in num regions to resize things? but complicated with overlaps etc # TODO edit edges with type of interaction # TODO may want to color by trajectory, to demonstrate waves of trajectory # read in data summary = pd.read_csv(summary_file, sep="\t") pwms_to_tfs = pd.read_csv(pwms_to_tfs_file, sep="\t") pwms_to_tfs = pwms_to_tfs[pwms_to_tfs["expressed"].notna()] pwms_to_filt_tfs = {} # dict: key - pwm names, vals - dict of ensembl ids to hgnc ids for line_idx in range(pwms_to_tfs.shape[0]): pwm_info = pwms_to_tfs.iloc[line_idx,:] pwm_name = pwm_info["hclust_model_name"] pwm_to_tf = dict(zip(pwm_info["expressed"].split(";"), pwm_info["expressed_hgnc"].split(";"))) pwms_to_filt_tfs[pwm_name] = pwm_to_tf # filter expressed hgncs for dynamic ones only tfs_filt = pd.read_csv(expressed_tfs_file, sep="\t", index_col=0) for pwm_name in pwms_to_filt_tfs.keys(): tfs_tmp = pwms_to_filt_tfs[pwm_name] for ensembl_tf in tfs_tmp.keys(): if ensembl_tf not in tfs_filt.index: del tfs_tmp[ensembl_tf] if len(tfs_tmp.keys()) == 0: del pwms_to_filt_tfs[pwm_name] pwms_to_filt_tfs[pwm_name] = tfs_tmp # add in tfs column tf1 = [] for pwm in summary["pwm1"]: tf_str = [] for ensembl_id in pwms_to_filt_tfs[pwm]: tf_str.append(pwms_to_filt_tfs[pwm][ensembl_id]) # TODO try add in max point expression = tfs_filt.loc[ensembl_id,:] max_idx = np.argmax(expression.values) tf_str.append(str(max_idx)) tf_str = (";").join(tf_str) tf1.append(tf_str) summary["tf1"] = tf1 tf2 = [] for pwm in summary["pwm2"]: tf_str = [] for ensembl_id in pwms_to_filt_tfs[pwm]: tf_str.append(pwms_to_filt_tfs[pwm][ensembl_id]) expression = tfs_filt.loc[ensembl_id,:] max_idx = np.argmax(expression.values) tf_str.append(str(max_idx)) tf_str = (";").join(tf_str) tf2.append(tf_str) summary["tf2"] = tf2 # remove failed rules summary = summary[~summary["interaction"].str.contains("FAILED")] # make graph graph = nx.from_pandas_edgelist(summary, "tf1", "tf2") # set up positions #pos = graphviz_layout(graph, prog="dot") pos = graphviz_layout(graph, prog="neato") scale_factor = 3 for key in pos.keys(): coords = pos[key] pos[key] = {"x": scale_factor*coords[0], "y": -scale_factor*coords[1]} nx.set_node_attributes(graph, pos, "graphics") # note this is diff from v1 to v2 in networkx # add graphics add_graphics_theme_to_nx_graph(graph) # write gml out_file = "summary.gml" nx.write_gml(stringize_nx_graph(graph), out_file, stringizer=str) # tfs: for each tf, get gene column return
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _build_network(self):\n pass", "def networks(view):\n return \"network?\" \\\n \"_return_fields=\" \\\n \"extattrs,\" \\\n \"comment,\" \\\n \"network,\" \\\n \"network_view,\" \\\n \"utilization&\" \\\n ...
[ "0.69119674", "0.65324295", "0.64664465", "0.6227124", "0.61709285", "0.6079168", "0.5990852", "0.59398776", "0.5891403", "0.58894837", "0.587379", "0.5862538", "0.5839957", "0.5753144", "0.5751551", "0.567196", "0.56632817", "0.5620824", "0.5600299", "0.5599422", "0.55967724...
0.0
-1
Guard the given spatial reference object against axis swapping, when running with GDAL 3. Does nothing if GDAL < 3. Modifies the object in place.
def preventGdal3axisSwap(sr): if hasattr(sr, 'SetAxisMappingStrategy'): sr.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def swap_axis(ogr_geom):\n\n osr_sref = ogr_geom.GetSpatialReference()\n sref = SpatialRef.from_osr(osr_sref)\n if (sref.epsg == 4326) and GDAL_3_ENABLED and (osr_sref.GetAxisMappingStrategy() == 1):\n ogr_geom.SwapXY()\n osr_sref.SetAxisMappingStrategy(0)\n ogr_geom.AssignSpatialRefe...
[ "0.60711205", "0.57531947", "0.54899454", "0.54608077", "0.5264879", "0.52497613", "0.5123439", "0.51041955", "0.50515485", "0.50315267", "0.5019805", "0.4985836", "0.49436423", "0.4911243", "0.48981285", "0.48816335", "0.4864694", "0.4860169", "0.48571274", "0.48423705", "0....
0.6333744
0
Given a polygon Geometry object in lat/long, work out what would be a suitable projection to use with this area, in order to avoid things like the international date line wraparound, or the north/sourth pole discontinuities. This only makes sense for tiled products, as opposed to long strips which cross multiple zones, etc. Main possible options are UTM in a suitable zone, UPS when near the poles. Return the EPSG number of the projection.
def findSensibleProjection(geom): coords = getCoords(geom) y = coords[:, 1] x = coords[:, 0] yMin = y.min() yMax = y.max() if (yMax - yMin) > 90: # We are crossing a lot of latitude, which suggests that we have a # long strip> In this case, we don't even bother to suggest an EPSG. epsg = None elif yMin < -80: # We are nearing the south pole, so go with UPS south epsg = 32761 elif yMax > 80: # Nearing north pole, so UPS North epsg = 32661 else: # Work out a UTM zone. Note that we use the median value to get a rough # idea of the centre, rather than the mean, because the mean is subject to all # sorts of problems when crossing the date line xMedian = numpy.median(x) yMedian = numpy.median(y) zone = int((xMedian + 180)/6) % 60 + 1 if yMedian < 0: epsgBase = 32700 else: epsgBase = 32600 epsg = epsgBase + zone return epsg
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def polygon_area(polygon):\n if not PYPROJ_INSTALLED:\n raise ImportError(\"`pyproj` must be installed to use this feature!\")\n poly = wkt_loads(polygon)\n poly_area = shapely.ops.transform(\n partial(\n pyproj.transform,\n pyproj.Proj(init='EPSG:4326'),\n p...
[ "0.6414271", "0.63134813", "0.62757397", "0.60515285", "0.603324", "0.59443223", "0.59352905", "0.5911644", "0.58656824", "0.57909673", "0.5745204", "0.57246166", "0.57246166", "0.5703267", "0.5702607", "0.5696144", "0.56794995", "0.5671271", "0.56611097", "0.5648878", "0.561...
0.693248
0
Make a pair of ogr.CoordinateTransformation objects, for transforming between the two given EPSG projections. Return a tuple (tr1to2, tr2to1)
def makeTransformations(epsg1, epsg2): sr1 = osr.SpatialReference() sr1.ImportFromEPSG(epsg1) preventGdal3axisSwap(sr1) sr2 = osr.SpatialReference() sr2.ImportFromEPSG(epsg2) preventGdal3axisSwap(sr2) tr1to2 = osr.CoordinateTransformation(sr1, sr2) tr2to1 = osr.CoordinateTransformation(sr2, sr1) return (tr1to2, tr2to1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def transform_proj(p1, p2, x, y, nocopy=False):\n\n try:\n # This always makes a copy, even if projections are equivalent\n return _transform_internal(p1, p2, x, y, always_xy=True)\n except TypeError:\n if proj_is_same(p1, p2):\n if nocopy:\n return x, y\n ...
[ "0.6856417", "0.683055", "0.6792389", "0.6792389", "0.67460185", "0.64127856", "0.64124286", "0.63331443", "0.6282391", "0.62185615", "0.6211598", "0.61743957", "0.6105238", "0.60964894", "0.6039987", "0.59297293", "0.591202", "0.5792901", "0.5790143", "0.57694685", "0.570458...
0.8401615
0
Given a geometry as a lat/long polygon, find the lat/long centroid, by first projecting into the preferred EPSG, so as to avoid discontinuities. The preferredEpsg is one in which the polygon ought to make sense (as found, hopefully, by the findSensibleProjection() function). Returns a pair [centroidX, centroidY] in lat/long
def findCentroid(geom, preferredEpsg): (projTr, llTr) = makeTransformations(4326, preferredEpsg) geomProj = copyGeom(geom) geomProj.Transform(projTr) geomCentroid = geomProj.Centroid() geomCentroid.Transform(llTr) centroidDict = eval(geomCentroid.ExportToJson()) centroidXY = centroidDict['coordinates'] return centroidXY
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compute_polygon_centroid_2d(polygon):\r\n return geometry.gmComputePolygonCentroid(polygon)", "def findSensibleProjection(geom):\n coords = getCoords(geom)\n y = coords[:, 1]\n x = coords[:, 0]\n yMin = y.min()\n yMax = y.max()\n if (yMax - yMin) > 90:\n # We are crossing a lot of...
[ "0.6208906", "0.60699695", "0.5872511", "0.5834941", "0.5801932", "0.5733032", "0.56430256", "0.5637194", "0.5616599", "0.5605853", "0.55601245", "0.5548161", "0.55228645", "0.5449253", "0.5448886", "0.5407162", "0.5400743", "0.5362452", "0.526248", "0.52553886", "0.5202955",...
0.7754564
0
Return a copy of the geometry. OGR does not provide a good method for doing this.
def copyGeom(geom): geomJson = geom.ExportToJson() newGeom = ogr.CreateGeometryFromJson(geomJson) return newGeom
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _prepare_with_copy(geometry):\n geometry = pygeos.apply(geometry, lambda x: x) # makes a copy\n pygeos.prepare(geometry)\n return geometry", "def getGeometry(self):\n return self.geometry", "def getGeometry(self):\n return self.geometry", "def geometry(self):\n return self....
[ "0.7537405", "0.73882526", "0.73882526", "0.7358205", "0.7358205", "0.7251656", "0.71966004", "0.71324015", "0.7120845", "0.6842374", "0.6735027", "0.6728724", "0.67227095", "0.6678602", "0.6662164", "0.6536648", "0.65043837", "0.6306422", "0.6269604", "0.6267922", "0.6248901...
0.74882305
1
Return the coordinates of the given OGR geometry. Assumes that this is a single polygon, and returns a numpy array of the x, y coords, of shape (numPts, 2). If the polygon has holes, they will be discarded this is just the outer polygon. If the geometry is a MultiPoint geom, also return a 2d array of coords.
def getCoords(geom): geomDict = eval(geom.ExportToJson()) coords = geomDict['coordinates'] if geomDict['type'] == 'Polygon': coordsArray = numpy.array(coords[0]) elif geomDict['type'] == 'MultiPoint': coordsArray = numpy.array(coords) else: coordsArray = None return coordsArray
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def geomFromInteriorPoints(coords):\n if isinstance(coords, numpy.ndarray):\n coords = coords.tolist()\n geomDict = {'type':'MultiPoint', 'coordinates':coords}\n geomPoints = ogr.CreateGeometryFromJson(repr(geomDict))\n return geomPoints", "def point_coords(geom):\n # Return a tuple with th...
[ "0.6378374", "0.6318399", "0.6230891", "0.6092643", "0.598644", "0.59413916", "0.5929878", "0.5911129", "0.5879037", "0.5745938", "0.56728566", "0.5662459", "0.5643619", "0.56382567", "0.560755", "0.5599817", "0.55802184", "0.5575683", "0.557035", "0.55689305", "0.55629367", ...
0.72319037
0
The given list of pairs (or 2d numpy array) is the (x, y) coords of the polygon outline. Return a Polygon ogr.Geometry object.
def geomFromOutlineCoords(coords): if isinstance(coords, numpy.ndarray): coords = coords.tolist() geomDict = {'type':'Polygon', 'coordinates':[coords]} geom = ogr.CreateGeometryFromJson(repr(geomDict)) return geom
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_polygon(*coords):\n global GEOMETRY_SURF, POLYGONS,col\n if len(coords) < 3:\n print(\"Warning: Invalid polygon passed, ignoring...\")\n return\n start = coords[0]\n prev = coords[0]\n for coord in coords:\n POLYGONS |= {coord}\n line = Boundary(prev[0],prev[1],c...
[ "0.7088763", "0.6743673", "0.6743673", "0.65676457", "0.65600157", "0.6526626", "0.64702845", "0.64233387", "0.6330234", "0.6271293", "0.6227688", "0.6217067", "0.6208036", "0.6129953", "0.610706", "0.6104611", "0.6093786", "0.60640717", "0.6050751", "0.60261005", "0.6024823"...
0.63739526
8
The given list of pairs (or 2d numpy array) is the (x, y) coords of a set of internal points inside a polygon. Returns a MultiPoint Geometry.
def geomFromInteriorPoints(coords): if isinstance(coords, numpy.ndarray): coords = coords.tolist() geomDict = {'type':'MultiPoint', 'coordinates':coords} geomPoints = ogr.CreateGeometryFromJson(repr(geomDict)) return geomPoints
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_MultiPolyLists_xy(mpoly):\n # Get the x or y coordinates\n x = []\n y = []\n if isinstance(mpoly,Polygon):\n mpoly = [mpoly]\n for poly in mpoly: # the polygon objects return arrays, it's important they be lists or Bokeh fails\n exterior_coords_x = poly.exterior.coords.xy[0].to...
[ "0.68716544", "0.65765846", "0.6421751", "0.6229348", "0.6228022", "0.62237096", "0.61734176", "0.6153338", "0.59607905", "0.58115107", "0.58115107", "0.57220775", "0.5716397", "0.5716397", "0.5684545", "0.5646158", "0.56197923", "0.5619396", "0.55598253", "0.55252755", "0.55...
0.63562393
3
Given a MultiPoint geometry object in lat/long, create a polygon of the convex hull of these points. First project the lat/long points into the preferred EPSG, so that when we find the convex hull, we are not crossing any discontinuities such as the international date line. Return a single polygon geometry in lat/long.
def polygonFromInteriorPoints(geom, preferredEpsg): (projTr, llTr) = makeTransformations(4326, preferredEpsg) geomProj = copyGeom(geom) geomProj.Transform(projTr) geomOutline = geomProj.ConvexHull() geomOutline.Transform(llTr) return geomOutline
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def convex_hull(self):\n if isinstance(self.crs, GeographicalCRS):\n raise CRSError(\"not implemented for geographical coordinate \"\n \"systems. Project to a projected coordinate system.\")\n\n points = [pt for pt in self]\n\n # Find the lowermost (left?) ...
[ "0.7075242", "0.6431152", "0.64101326", "0.63916564", "0.63589376", "0.627315", "0.61870193", "0.6043088", "0.6030819", "0.59753376", "0.5926664", "0.59115064", "0.5896636", "0.589403", "0.5877571", "0.5867895", "0.58546466", "0.5851499", "0.5834695", "0.58292884", "0.5810926...
0.7046996
1
Given a Polygon Geometry object, in lat/long, detect whether it crosses the dateline. Do this in the projection of the preferred EPSG, so we remove (reduce?) the ambiguity about inside/outside.
def crossesDateline(geom, preferredEpsg): (xMin, xMax, yMin, yMax) = geom.GetEnvelope() (projTr, llTr) = makeTransformations(4326, preferredEpsg) geomProj = copyGeom(geom) geomProj.Transform(projTr) dateLineGeom = ogr.Geometry(wkt='LINESTRING(180 {}, 180 {})'.format(yMin, yMax)) try: dateLineGeom.Transform(projTr) crosses = geomProj.Intersects(dateLineGeom) except Exception: # If we can't transform into the preferred EPSG, then it seems likely that # the geom is nowhere near the date line. crosses = False return crosses
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def point_inside_polygon(xxx_todo_changeme,poly):\n (x,y) = xxx_todo_changeme\n n = len(poly)\n inside = False\n\n p1x, p1y = poly[0]\n for i in range(n + 1):\n p2x, p2y = poly[i % n]\n if y > min(p1y, p2y):\n if y <= max(p1y, p2y):\n if x <= max(p1x, p2x):\n ...
[ "0.69540954", "0.6897939", "0.6799872", "0.6788985", "0.67285305", "0.66589004", "0.6657012", "0.6512406", "0.64687806", "0.64495367", "0.64015037", "0.63911974", "0.6390966", "0.6381956", "0.6351574", "0.62924826", "0.62201375", "0.61754906", "0.611618", "0.6106009", "0.6104...
0.7383287
0
Given a Polygon Geometry object in lat/long, determine whether it crosses the date line, and if so, split it into a multipolygon with a part on either side. Use the given preferred EPSG to perform calculations. Return a new Geometry in lat/long.
def splitAtDateline(geom, preferredEpsg): crosses = crossesDateline(geom, preferredEpsg) if crosses: (projTr, llTr) = makeTransformations(4326, preferredEpsg) coords = getCoords(geom) (x, y) = (coords[:, 0], coords[:, 1]) (yMin, yMax) = (y.min(), y.max()) xMinPositive = None xMaxNegative = None xGe0 = (x >= 0) xLt0 = (x < 0) if xGe0.any() > 0 and xLt0.any() > 0: xMaxNegative = x[xLt0].max() xMinPositive = x[xGe0].min() # Create rectangles for the east and west hemispheres, constrained by the # extent of this polygon. Note that this assumes that we do not # cross both the date line, and also the prime (zero) meridian. This may not # always be true, notably when we are close to the pole. eastHemiRectCoords = [[xMinPositive, yMax], [xMinPositive, yMin], [180, yMin], [180, yMax], [xMinPositive, yMax]] eastHemiRectJson = repr({'type':'Polygon', 'coordinates':[eastHemiRectCoords]}) westHemiRectCoords = [[-180, yMax], [-180, yMin], [xMaxNegative, yMin], [xMaxNegative, yMax], [-180, yMax]] westHemiRectJson = repr({'type':'Polygon', 'coordinates':[westHemiRectCoords]}) eastHemiRect = ogr.CreateGeometryFromJson(eastHemiRectJson) westHemiRect = ogr.CreateGeometryFromJson(westHemiRectJson) geomProj = copyGeom(geom) geomProj.Transform(projTr) eastHemiRect.Transform(projTr) westHemiRect.Transform(projTr) eastHemiPart = geomProj.Intersection(eastHemiRect) westHemiPart = geomProj.Intersection(westHemiRect) eastHemiPart.Transform(llTr) westHemiPart.Transform(llTr) # Put these together as a single multipolygon eastPartCoords = getCoords(eastHemiPart) westPartCoords = getCoords(westHemiPart) # Discard any vertices which are still no the wrong side of the 180 line. I # do not understand what is going on here, but I have invested far more of # my valuable time than I should, and this kludge will be a reasonable approximation. eastPartCoords = eastPartCoords[eastPartCoords[:, 0] > 0, :] westPartCoords = westPartCoords[westPartCoords[:, 0] < 0, :] # Convert to lists eastPartCoords = eastPartCoords.tolist() westPartCoords = westPartCoords.tolist() # Discard anything left with only 2 points if len(eastPartCoords) < 3: eastPartCoords = [] if len(westPartCoords) < 3: westPartCoords = [] # Close polygons. What a kludge..... if len(eastPartCoords) > 2: if eastPartCoords[-1][0] != eastPartCoords[0][0] or eastPartCoords[-1][1] != eastPartCoords[0][1]: eastPartCoords.append(eastPartCoords[0]) if len(westPartCoords) > 2: if westPartCoords[-1][0] != westPartCoords[0][0] or westPartCoords[-1][1] != westPartCoords[0][1]: westPartCoords.append(westPartCoords[0]) # Make a multi-polygon from the two parts coordsMulti = [[eastPartCoords], [westPartCoords]] jsonStr = repr({'type':'MultiPolygon', 'coordinates':coordsMulti}) newGeom = ogr.CreateGeometryFromJson(jsonStr) else: # It didn't really cross the date line, but seems to due to rounding # error in crossesDateline(). newGeom = copyGeom(geom) else: newGeom = copyGeom(geom) return newGeom
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def crossesDateline(geom, preferredEpsg):\n (xMin, xMax, yMin, yMax) = geom.GetEnvelope()\n (projTr, llTr) = makeTransformations(4326, preferredEpsg)\n\n geomProj = copyGeom(geom)\n geomProj.Transform(projTr)\n dateLineGeom = ogr.Geometry(wkt='LINESTRING(180 {}, 180 {})'.format(yMin, yMax))\n try...
[ "0.67136943", "0.63109124", "0.5897794", "0.56662834", "0.5617019", "0.5594572", "0.5547453", "0.55368793", "0.5471673", "0.5448653", "0.54439175", "0.54206467", "0.5395042", "0.53801703", "0.5376376", "0.5372196", "0.5371982", "0.5348414", "0.53426564", "0.53149515", "0.5310...
0.76047856
0
Test that invalid tokens works
def test_invalid_tokens(self): self.assertTrue(1 + 1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_unused_token_is_valid(self):\n assert self.token.is_valid()", "def test_regex_invalid_tokens(self):\n tokens = (\n \"\",\n \"lemon wins\",\n \"..\",\n \"x.y\",\n \"x.y.\",\n \".y.z\",\n \".y.\",\n \"..z...
[ "0.8261625", "0.7524087", "0.74162", "0.72980505", "0.7251938", "0.7226254", "0.714365", "0.71125466", "0.70883507", "0.70597327", "0.7019325", "0.7010936", "0.7009519", "0.6961416", "0.6934247", "0.68621945", "0.6833543", "0.6818326", "0.68157536", "0.6811885", "0.6800639", ...
0.87907505
0
This function prints and plots the confusion matrix. Normalization can be applied by setting `normalize=True`.
def plot_confusion_matrix(cm, classes, normalize=False, title=None, cmap=plt.cm.Blues, image_save=False, image_save_dir="/home/temp/moriz/validation/", save_suffix=None, save_format="pdf" ): if not title: title = 'Confusion matrix' if normalize: cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis] fig, ax = plt.subplots() im = ax.imshow(cm, interpolation='nearest', cmap=cmap) ax.figure.colorbar(im, ax=ax) # We want to show all ticks... ax.set(xticks=np.arange(cm.shape[1]), yticks=np.arange(cm.shape[0]), # ... and label them with the respective list entries xticklabels=classes, yticklabels=classes, title=title, ylabel='True label', xlabel='Predicted label') # Rotate the tick labels and set their alignment. plt.setp(ax.get_xticklabels(), rotation=45, ha="right", rotation_mode="anchor") # Loop over data dimensions and create text annotations. fmt = '.2f' if normalize else 'd' thresh = cm.max() / 2. for i in range(cm.shape[0]): for j in range(cm.shape[1]): ax.text(j, i, format(cm[i, j], fmt), ha="center", va="center", color="white" if cm[i, j] > thresh else "black") fig.tight_layout() if image_save: if save_suffix is None: save_name = "/confusion_matrix." + save_format else: save_name = "/confusion_matrix_" + save_suffix + "." + save_format plt.savefig(image_save_dir + save_name, dpi='figure', format=save_format) plt.show() plt.close(fig)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def plot_confusion_matrix(cm, classes=[0,1], normalize=False, title='Confusion matrix', print_matrix=False):\n\n if normalize:\n cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n print(\"Normalized confusion matrix\")\n else:\n print('Confusion matrix, without normalization')\n\n...
[ "0.8194862", "0.80949175", "0.8029915", "0.8019153", "0.79941195", "0.7991258", "0.7980955", "0.7976606", "0.79610753", "0.79590565", "0.79378676", "0.7934962", "0.7934504", "0.79313844", "0.7926313", "0.7924577", "0.79241234", "0.7923211", "0.7923023", "0.7921931", "0.791787...
0.0
-1
Proxy to turn of transcypt when calling img.get/set methods
def image_proxy(img): def _set(*args): __pragma__("noalias", "set") value = img.set(*args) __pragma__("alias", "set", "py_set") return value def _get(*args): __pragma__("noalias", "get") value = img.get(*args) __pragma__("alias", "get", "py_get") return value img.set = _set img.get = _get return img
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __call__(self, img, *args, **kwargs):\n raise NotImplementedError", "def __call__(self, img, *args, **kwargs):\n raise NotImplementedError", "def __init__(self, img, settings):\r\n self.img_orig = img\r\n self.settings = settings", "def _apply_transform(self, img: np.ndarray):...
[ "0.6246118", "0.6246118", "0.62181085", "0.6194361", "0.6194361", "0.60827804", "0.606501", "0.6059099", "0.59579223", "0.59201604", "0.5910337", "0.58504593", "0.5800288", "0.5793381", "0.5784016", "0.5776674", "0.5700956", "0.56607634", "0.5656199", "0.56306195", "0.5605271...
0.7732635
0
We need to run this before the actual draw to insert and update p5 env variables
def pre_draw(p5_instance, draw_func): global _CTX_MIDDLE, _DEFAULT_FILL, _DEFAULT_LEADMULT, _DEFAULT_STROKE, _DEFAULT_TEXT_FILL global ADD, ALT, ARROW, AUTO, AUDIO, AXES, BACKSPACE, BASELINE, BEVEL, BEZIER, BLEND, BLUR, BOLD, BOLDITALIC global BOTTOM, BURN, CENTER, CHORD, CLAMP, CLOSE, CONTROL, CORNER, CORNERS, CROSS, CURVE, DARKEST global DEG_TO_RAD, DEGREES, DELETE, DIFFERENCE, DILATE, DODGE, DOWN_ARROW, ENTER, ERODE, ESCAPE, EXCLUSION global FILL, GRAY, GRID, HALF_PI, HAND, HARD_LIGHT, HSB, HSL, IMAGE, IMMEDIATE, INVERT, ITALIC, LANDSCAPE global LEFT, LEFT_ARROW, LIGHTEST, LINE_LOOP, LINE_STRIP, LINEAR, LINES, MIRROR, MITER, MOVE, MULTIPLY, NEAREST global NORMAL, OPAQUE, OPEN, OPTION, OVERLAY, P2D, PI, PIE, POINTS, PORTRAIT, POSTERIZE, PROJECT, QUAD_STRIP, QUADRATIC global QUADS, QUARTER_PI, RAD_TO_DEG, RADIANS, RADIUS, REPEAT, REPLACE, RETURN, RGB, RIGHT, RIGHT_ARROW global ROUND, SCREEN, SHIFT, SOFT_LIGHT, SQUARE, STROKE, SUBTRACT, TAB, TAU, TEXT, TEXTURE, THRESHOLD, TOP global TRIANGLE_FAN, TRIANGLE_STRIP, TRIANGLES, TWO_PI, UP_ARROW, VIDEO, WAIT, WEBGL global frameCount, focused, displayWidth, displayHeight, windowWidth, windowHeight, width, height global disableFriendlyErrors, deviceOrientation, accelerationX, accelerationY, accelerationZ global pAccelerationX, pAccelerationY, pAccelerationZ, rotationX, rotationY, rotationZ global pRotationX, pRotationY, pRotationZ, turnAxis, keyIsPressed, key, keyCode, mouseX, mouseY, pmouseX, pmouseY global winMouseX, winMouseY, pwinMouseX, pwinMouseY, mouseButton, mouseIsPressed, touches, pixels _CTX_MIDDLE = p5_instance._CTX_MIDDLE _DEFAULT_FILL = p5_instance._DEFAULT_FILL _DEFAULT_LEADMULT = p5_instance._DEFAULT_LEADMULT _DEFAULT_STROKE = p5_instance._DEFAULT_STROKE _DEFAULT_TEXT_FILL = p5_instance._DEFAULT_TEXT_FILL ADD = p5_instance.ADD ALT = p5_instance.ALT ARROW = p5_instance.ARROW AUDIO = p5_instance.AUDIO AUTO = p5_instance.AUTO AXES = p5_instance.AXES BACKSPACE = p5_instance.BACKSPACE BASELINE = p5_instance.BASELINE BEVEL = p5_instance.BEVEL BEZIER = p5_instance.BEZIER BLEND = p5_instance.BLEND BLUR = p5_instance.BLUR BOLD = p5_instance.BOLD BOLDITALIC = p5_instance.BOLDITALIC BOTTOM = p5_instance.BOTTOM BURN = p5_instance.BURN CENTER = p5_instance.CENTER CHORD = p5_instance.CHORD CLAMP = p5_instance.CLAMP CLOSE = p5_instance.CLOSE CONTROL = p5_instance.CONTROL CORNER = p5_instance.CORNER CORNERS = p5_instance.CORNERS CROSS = p5_instance.CROSS CURVE = p5_instance.CURVE DARKEST = p5_instance.DARKEST DEG_TO_RAD = p5_instance.DEG_TO_RAD DEGREES = p5_instance.DEGREES DELETE = p5_instance.DELETE DIFFERENCE = p5_instance.DIFFERENCE DILATE = p5_instance.DILATE DODGE = p5_instance.DODGE DOWN_ARROW = p5_instance.DOWN_ARROW ENTER = p5_instance.ENTER ERODE = p5_instance.ERODE ESCAPE = p5_instance.ESCAPE EXCLUSION = p5_instance.EXCLUSION FILL = p5_instance.FILL GRAY = p5_instance.GRAY GRID = p5_instance.GRID HALF_PI = p5_instance.HALF_PI HAND = p5_instance.HAND HARD_LIGHT = p5_instance.HARD_LIGHT HSB = p5_instance.HSB HSL = p5_instance.HSL IMAGE = p5_instance.IMAGE IMMEDIATE = p5_instance.IMMEDIATE INVERT = p5_instance.INVERT ITALIC = p5_instance.ITALIC LANDSCAPE = p5_instance.LANDSCAPE LEFT = p5_instance.LEFT LEFT_ARROW = p5_instance.LEFT_ARROW LIGHTEST = p5_instance.LIGHTEST LINE_LOOP = p5_instance.LINE_LOOP LINE_STRIP = p5_instance.LINE_STRIP LINEAR = p5_instance.LINEAR LINES = p5_instance.LINES MIRROR = p5_instance.MIRROR MITER = p5_instance.MITER MOVE = p5_instance.MOVE MULTIPLY = p5_instance.MULTIPLY NEAREST = p5_instance.NEAREST NORMAL = p5_instance.NORMAL OPAQUE = p5_instance.OPAQUE OPEN = p5_instance.OPEN OPTION = p5_instance.OPTION OVERLAY = p5_instance.OVERLAY P2D = p5_instance.P2D P3D = p5_instance.WEBGL PI = p5_instance.PI PIE = p5_instance.PIE POINTS = p5_instance.POINTS PORTRAIT = p5_instance.PORTRAIT POSTERIZE = p5_instance.POSTERIZE PROJECT = p5_instance.PROJECT QUAD_STRIP = p5_instance.QUAD_STRIP QUADRATIC = p5_instance.QUADRATIC QUADS = p5_instance.QUADS QUARTER_PI = p5_instance.QUARTER_PI RAD_TO_DEG = p5_instance.RAD_TO_DEG RADIANS = p5_instance.RADIANS RADIUS = p5_instance.RADIUS REPEAT = p5_instance.REPEAT REPLACE = p5_instance.REPLACE RETURN = p5_instance.RETURN RGB = p5_instance.RGB RIGHT = p5_instance.RIGHT RIGHT_ARROW = p5_instance.RIGHT_ARROW ROUND = p5_instance.ROUND SCREEN = p5_instance.SCREEN SHIFT = p5_instance.SHIFT SOFT_LIGHT = p5_instance.SOFT_LIGHT SQUARE = p5_instance.SQUARE STROKE = p5_instance.STROKE SUBTRACT = p5_instance.SUBTRACT TAB = p5_instance.TAB TAU = p5_instance.TAU TEXT = p5_instance.TEXT TEXTURE = p5_instance.TEXTURE THRESHOLD = p5_instance.THRESHOLD TOP = p5_instance.TOP TRIANGLE_FAN = p5_instance.TRIANGLE_FAN TRIANGLE_STRIP = p5_instance.TRIANGLE_STRIP TRIANGLES = p5_instance.TRIANGLES TWO_PI = p5_instance.TWO_PI UP_ARROW = p5_instance.UP_ARROW VIDEO = p5_instance.VIDEO WAIT = p5_instance.WAIT WEBGL = p5_instance.WEBGL frameCount = p5_instance.frameCount focused = p5_instance.focused displayWidth = p5_instance.displayWidth displayHeight = p5_instance.displayHeight windowWidth = p5_instance.windowWidth windowHeight = p5_instance.windowHeight width = p5_instance.width height = p5_instance.height disableFriendlyErrors = p5_instance.disableFriendlyErrors deviceOrientation = p5_instance.deviceOrientation accelerationX = p5_instance.accelerationX accelerationY = p5_instance.accelerationY accelerationZ = p5_instance.accelerationZ pAccelerationX = p5_instance.pAccelerationX pAccelerationY = p5_instance.pAccelerationY pAccelerationZ = p5_instance.pAccelerationZ rotationX = p5_instance.rotationX rotationY = p5_instance.rotationY rotationZ = p5_instance.rotationZ pRotationX = p5_instance.pRotationX pRotationY = p5_instance.pRotationY pRotationZ = p5_instance.pRotationZ turnAxis = p5_instance.turnAxis keyIsPressed = p5_instance.keyIsPressed key = p5_instance.key keyCode = p5_instance.keyCode mouseX = p5_instance.mouseX mouseY = p5_instance.mouseY pmouseX = p5_instance.pmouseX pmouseY = p5_instance.pmouseY winMouseX = p5_instance.winMouseX winMouseY = p5_instance.winMouseY pwinMouseX = p5_instance.pwinMouseX pwinMouseY = p5_instance.pwinMouseY mouseButton = p5_instance.mouseButton mouseIsPressed = p5_instance.mouseIsPressed touches = p5_instance.touches pixels = p5_instance.pixels return draw_func()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setup_draw(self):\n pass", "def pointsSetUp(self):\r\n self.background.draw(self.surface)\r\n for i in range(len(self.points)):\r\n self.points[i].organize()\r\n self.points[i].update()\r\n self.points[i].addNumber(i)\r\n self.points[i].setActi...
[ "0.6523757", "0.6017574", "0.5880544", "0.5814766", "0.57777816", "0.5713455", "0.5674024", "0.56632835", "0.56570673", "0.5616937", "0.56138486", "0.5583289", "0.5570645", "0.55119085", "0.5494234", "0.546833", "0.54369044", "0.54284686", "0.54177946", "0.5411796", "0.540187...
0.6062119
1
Injects the p5js's skecth instance as a global variable to setup and draw functions
def global_p5_injection(p5_sketch): def decorator(f): def wrapper(): global _P5_INSTANCE _P5_INSTANCE = p5_sketch return pre_draw(_P5_INSTANCE, f) return wrapper return decorator
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setup():\r\n #this happens just once\r\n size(width, height) #instead of create_canvas\r", "def __init__(self, parent):\n super(P5, self).__init__(parent)\n self.shapes = []\n self.scenes = []\n self.current_scene = 0\n self.objects = []\n self.lighting = True\...
[ "0.6492295", "0.6341241", "0.6277463", "0.6121529", "0.5968284", "0.5964794", "0.5882622", "0.5853329", "0.5788281", "0.5773192", "0.5767879", "0.5749697", "0.5735698", "0.5676923", "0.56400704", "0.5595668", "0.55945295", "0.558435", "0.5566732", "0.55556136", "0.55504155", ...
0.63667154
1
Calculates the local density of states of a hamiltonian and writes it in file
def ldos0d(h,e=0.0,delta=0.01): if h.dimensionality==0: # only for 0d iden = np.identity(h.intra.shape[0],dtype=np.complex) # create identity g = ( (e+1j*delta)*iden -h.intra ).I # calculate green function else: raise # not implemented... d = [ -(g[i,i]).imag/np.pi for i in range(len(g))] # get imaginary part d = spatial_dos(h,d) # convert to spatial resolved DOS g = h.geometry # store geometry write_ldos(g.x,g.y,d,z=g.z) # write in file return d
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write_density(fname, density):\n K, M, N = density.shape\n output = open(fname, \"w\")\n output.write(\"ARMA_CUB_TXT_FN008\\n\")\n output.write(\"%d %d %d\\n\" % (K, M, N))\n for i in range(N):\n for k in range(K):\n for m in range(M):\n output.write(\" %+.6e\" ...
[ "0.55582106", "0.54553246", "0.5443375", "0.5435445", "0.53110385", "0.5228803", "0.51917344", "0.5187912", "0.51358545", "0.5119606", "0.5118766", "0.5114473", "0.5104518", "0.5102728", "0.5058224", "0.5031976", "0.50309247", "0.5023144", "0.5012087", "0.50105166", "0.500671...
0.0
-1
Calculates the local density of states of a hamiltonian and writes it in file, using arpack
def ldos0d_wf(h,e=0.0,delta=0.01,num_wf = 10,robust=False,tol=0): if h.dimensionality==0: # only for 0d intra = csc_matrix(h.intra) # matrix else: raise # not implemented... if robust: # go to the imaginary axis for stability eig,eigvec = slg.eigs(intra,k=int(num_wf),which="LM", sigma=e+1j*delta,tol=tol) eig = eig.real # real part only else: # Hermitic Hamiltonian eig,eigvec = slg.eigsh(intra,k=int(num_wf),which="LM",sigma=e,tol=tol) d = np.array([0.0 for i in range(intra.shape[0])]) # initialize for (v,ie) in zip(eigvec.transpose(),eig): # loop over wavefunctions v2 = (np.conjugate(v)*v).real # square of wavefunction fac = delta/((e-ie)**2 + delta**2) # factor to create a delta d += fac*v2 # add contribution # d /= num_wf # normalize d /= np.pi # normalize d = spatial_dos(h,d) # resum if necessary g = h.geometry # store geometry write_ldos(g.x,g.y,d,z=g.z) # write in file
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write_density(fname, density):\n K, M, N = density.shape\n output = open(fname, \"w\")\n output.write(\"ARMA_CUB_TXT_FN008\\n\")\n output.write(\"%d %d %d\\n\" % (K, M, N))\n for i in range(N):\n for k in range(K):\n for m in range(M):\n output.write(\" %+.6e\" ...
[ "0.56314754", "0.55677277", "0.55518115", "0.53848726", "0.5382426", "0.53711474", "0.5332129", "0.53105676", "0.52964735", "0.52770823", "0.5270333", "0.52593464", "0.524486", "0.52432036", "0.5242412", "0.5241068", "0.5220861", "0.52199006", "0.5214131", "0.52032506", "0.51...
0.0
-1
Use arpack to calculate hte local density of states at a certain energy
def ldos_arpack(intra,num_wf=10,robust=False,tol=0,e=0.0,delta=0.01): if robust: # go to the imaginary axis for stability eig,eigvec = slg.eigs(intra,k=int(num_wf),which="LM", sigma=e+1j*delta,tol=tol) eig = eig.real # real part only else: # Hermitic Hamiltonian eig,eigvec = slg.eigsh(intra,k=int(num_wf),which="LM",sigma=e,tol=tol) d = np.array([0.0 for i in range(intra.shape[0])]) # initialize for (v,ie) in zip(eigvec.transpose(),eig): # loop over wavefunctions v2 = (np.conjugate(v)*v).real # square of wavefunction fac = delta/((e-ie)**2 + delta**2) # factor to create a delta d += fac*v2 # add contribution # d /= num_wf # normalize d /= np.pi # normalize return d
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_density(\n self,\n states: FlowFieldMap,\n additional_states: FlowFieldMap,\n ) -> FlowFieldVal:\n zz = additional_states.get('zz', [tf.constant(0, dtype=TF_DTYPE)] *\n self._params.nz)\n\n if 'T' in states:\n t = states['T']\n elif 'theta' in ...
[ "0.639904", "0.6349566", "0.6122259", "0.587715", "0.5850544", "0.5828956", "0.5805845", "0.57823133", "0.57640857", "0.5762716", "0.575468", "0.5743525", "0.57068014", "0.568855", "0.56624216", "0.56600386", "0.5653631", "0.56313336", "0.5620443", "0.56074494", "0.56047225",...
0.0
-1
Calculate the DOS in a set of energies by full diagonalization
def ldos_waves(intra,es = [0.0],delta=0.01): es = np.array(es) # array with energies eig,eigvec = lg.eigh(intra) ds = [] # empty list for energy in es: # loop over energies d = np.array([0.0 for i in range(intra.shape[0])]) # initialize for (v,ie) in zip(eigvec.transpose(),eig): # loop over wavefunctions v2 = (np.conjugate(v)*v).real # square of wavefunction fac = delta/((energy-ie)**2 + delta**2) # factor to create a delta d += fac*v2 # add contribution d /= np.pi # normalize ds.append(d) # store ds = np.array(ds) # convert to array return ds
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dos_integral(E,dos,m=0):\n somma = 0.0\n h = 0.5*(E[2]-E[0])\n for j in range(0,len(dos)-3,3):\n somma += 3.0*pow(E[j],m)*dos[j]+3.0*pow(E[j+1],m)*dos[j+1]+2.0*pow(E[j+2],m)*dos[j+2]\n \n return h*somma*3.0/8.0;", "def multi_ldos(h,es=[0.0],delta=0.001,nrep=3,nk=2,numw=3,random=Fals...
[ "0.61792886", "0.59270054", "0.586323", "0.57872564", "0.57571954", "0.57146597", "0.5687018", "0.5637377", "0.56015885", "0.55990046", "0.5590492", "0.5574188", "0.55615056", "0.55407554", "0.5533155", "0.5531143", "0.5525705", "0.5469803", "0.545624", "0.5441904", "0.543674...
0.51186496
77
Write a map of the ldos using full diagonalization
def ldosmap(h,energies=np.linspace(-1.0,1.0,40),delta=None,nk=40): if delta is None: delta = (np.max(energies)-np.min(energies))/len(energies) # delta hkgen = h.get_hk_gen() # get generator dstot = np.zeros((len(energies),h.intra.shape[0])) # initialize for ik in range(nk): print("Random k-point",ik,nk,end="\r") k = np.random.random(3) # random k-point hk = hkgen(k) # ge Hamiltonian ds = ldos_waves(hk,es=energies,delta=delta) # LDOS for this kpoint dstot += ds # add print("LDOS finished") dstot /=nk # normalize dstot = [spatial_dos(h,d) for d in dstot] # convert to spatial resolved DOS return np.array(dstot)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def slabldos(h,energies=np.linspace(-1.0,1.0,40),delta=None,nk=40):\n if h.dimensionality!=2: raise # nope\n ds = ldosmap(h,energies=energies,delta=delta,nk=nk)\n if len(ds[0])!=len(h.geometry.z): \n print(\"Wrong dimensions\",len(ds[0]),len(h.geometry.z))\n raise\n f = open(\"DOSMAP.OUT\",\"w\")\n f.wr...
[ "0.655839", "0.6347875", "0.59854907", "0.5929425", "0.58443075", "0.5822576", "0.5790556", "0.577992", "0.57649004", "0.5739247", "0.5735068", "0.5722867", "0.5628976", "0.5559443", "0.5539078", "0.55376244", "0.55363584", "0.55267847", "0.5493117", "0.5472624", "0.5431082",...
0.60297304
2
Computes the DOS for each site of an slab, only for 2d
def slabldos(h,energies=np.linspace(-1.0,1.0,40),delta=None,nk=40): if h.dimensionality!=2: raise # nope ds = ldosmap(h,energies=energies,delta=delta,nk=nk) if len(ds[0])!=len(h.geometry.z): print("Wrong dimensions",len(ds[0]),len(h.geometry.z)) raise f = open("DOSMAP.OUT","w") f.write("# energy, index, DOS, zposition\n") for ie in range(len(energies)): for iz in range(len(h.geometry.z)): f.write(str(energies[ie])+" ") f.write(str(iz)+" ") f.write(str(ds[ie,iz])+" ") f.write(str(h.geometry.z[iz])+"\n") f.close() return energies,np.transpose(ds) # retunr LDOS
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def calc_msd(pos_x, pos_y, pos_z):\n particles = pos_x.shape[0]\n N = pos_x.shape[1] \n tamsd = np.zeros(shape = (particles, N - 1)) \n\n for p in np.arange(start = 0, stop = particles, step = 1): \n for n in np.arange(start = 1, stop = N, step = 1): \n sumdis = np.array([((pos_x[p, i...
[ "0.5823131", "0.56665945", "0.5654268", "0.56067395", "0.5549059", "0.5487201", "0.5425105", "0.53680533", "0.5335588", "0.5313948", "0.53035074", "0.52599955", "0.52019894", "0.51405936", "0.5140454", "0.51258785", "0.5124321", "0.5116561", "0.5109169", "0.5090838", "0.50795...
0.63072735
0
Calculate DOS for a 1d system
def ldos1d(h,e=0.0,delta=0.001,nrep=3): import green if h.dimensionality!=1: raise # only for 1d gb,gs = green.green_renormalization(h.intra,h.inter,energy=e,delta=delta) d = [ -(gb[i,i]).imag for i in range(len(gb))] # get imaginary part d = spatial_dos(h,d) # convert to spatial resolved DOS g = h.geometry # store geometry x,y = g.x,g.y # get the coordinates go = h.geometry.copy() # copy geometry go = go.supercell(nrep) # create supercell write_ldos(go.x,go.y,d.tolist()*nrep) # write in file return d
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _dsurface_domega(self):\n\n dsdo = 0.\n\n return dsdo", "def get_dos(self):\n\n return self.get_array('dos')", "def dVdx(self, sys):\n dx2 = sys.positions * sys.positions - self.x0 * self.x0\n return 4 * self.A * sys.positions * dx2", "def dVdx(self, sys):\n dx =...
[ "0.57057214", "0.55718416", "0.54609364", "0.5383514", "0.5373876", "0.52895164", "0.52246433", "0.5127203", "0.5026564", "0.5012178", "0.5005394", "0.49850887", "0.49414775", "0.49326056", "0.49182996", "0.4915729", "0.49154025", "0.4866256", "0.48534217", "0.48473454", "0.4...
0.0
-1
Calculate DOS for a 2d system
def ldos2d(h,e=0.0,delta=0.001,nrep=3,nk=None,mode="green", random=True,num_wf=20): if mode=="green": import green if h.dimensionality!=2: raise # only for 1d if nk is not None: print("LDOS using normal integration with nkpoints",nk) gb,gs = green.bloch_selfenergy(h,energy=e,delta=delta,mode="full",nk=nk) d = [ -(gb[i,i]).imag for i in range(len(gb))] # get imaginary part else: print("LDOS using renormalization adaptative Green function") gb,gs = green.bloch_selfenergy(h,energy=e,delta=delta,mode="adaptive") d = [ -(gb[i,i]).imag for i in range(len(gb))] # get imaginary part elif mode=="arpack": # arpack diagonalization import klist if nk is None: nk = 10 hkgen = h.get_hk_gen() # get generator ds = [] # empty list for k in klist.kmesh(h.dimensionality,nk=nk): # loop over kpoints print("Doing",k) if random: print("Random k-point") k = np.random.random(3) # random k-point hk = csc_matrix(hkgen(k)) # get Hamiltonian ds += [ldos_arpack(hk,num_wf=num_wf,robust=False, tol=0,e=e,delta=delta)] d = ds[0]*0.0 # inititlize for di in ds: d += di # add d /=len(ds) # normalize d = spatial_dos(h,d) # convert to spatial resolved DOS g = h.geometry # store geometry x,y = g.x,g.y # get the coordinates go = h.geometry.copy() # copy geometry go = go.supercell(nrep) # create supercell write_ldos(go.x,go.y,d.tolist()*(nrep**2),z=go.z) # write in file
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dVdx(self, sys):\n dx2 = sys.positions * sys.positions - self.x0 * self.x0\n return 4 * self.A * sys.positions * dx2", "def _dsurface_domega(self):\n\n dsdo = 0.\n\n return dsdo", "def dVdx(self, sys):\n dx = sys.positions - self.x0\n k = self.omega*self.omega*sys....
[ "0.6104026", "0.5681574", "0.5631583", "0.545737", "0.53376925", "0.5288433", "0.52243865", "0.5200089", "0.5097595", "0.5092457", "0.50678277", "0.50439966", "0.5032084", "0.5018616", "0.50028914", "0.50016", "0.4991276", "0.49865207", "0.49861157", "0.49838954", "0.4969009"...
0.0
-1
Calculate many LDOS, by diagonalizing the Hamiltonian
def multi_ldos(h,es=[0.0],delta=0.001,nrep=3,nk=2,numw=3,random=False): print("Calculating eigenvectors in LDOS") if h.is_sparse: # sparse Hamiltonian from bandstructure import smalleig print("SPARSE Matrix") evals,ws = [],[] # empty list ks = klist.kmesh(h.dimensionality,nk=nk) # get grid hk = h.get_hk_gen() # get generator for k in ks: # loop print("Diagonalizing in LDOS, SPARSE mode") if random: k = np.random.random(3) # random vector print("RANDOM vector in LDOS") e,w = smalleig(hk(k),numw=numw,evecs=True) evals += [ie for ie in e] ws += [iw for iw in w] # evals = np.concatenate([evals,e]) # store # ws = np.concatenate([ws,w]) # store # raise # (evals,ws) = h.eigenvectors(nk) # get the different eigenvectors else: print("DENSE Matrix") (evals,ws) = h.eigenvectors(nk) # get the different eigenvectors ds = [(np.conjugate(v)*v).real for v in ws] # calculate densities del ws # remove the wavefunctions os.system("rm -rf MULTILDOS") # remove folder os.system("mkdir MULTILDOS") # create folder go = h.geometry.copy() # copy geometry go = go.supercell(nrep) # create supercell fo = open("MULTILDOS/MULTILDOS.TXT","w") # files with the names for e in es: # loop over energies print("MULTILDOS for energy",e) out = np.array([0.0 for i in range(h.intra.shape[0])]) # initialize for (d,ie) in zip(ds,evals): # loop over wavefunctions fac = delta/((e-ie)**2 + delta**2) # factor to create a delta out += fac*d # add contribution out /= np.pi # normalize out = spatial_dos(h,out) # resum if necessary name0 = "LDOS_"+str(e)+"_.OUT" # name of the output name = "MULTILDOS/" + name0 write_ldos(go.x,go.y,out.tolist()*(nrep**h.dimensionality), output_file=name) # write in file fo.write(name0+"\n") # name of the file fo.flush() # flush fo.close() # close file # Now calculate the DOS from dos import calculate_dos es2 = np.linspace(min(es),max(es),len(es)*10) ys = calculate_dos(evals,es2,delta) # use the Fortran routine from dos import write_dos write_dos(es2,ys,output_file="MULTILDOS/DOS.OUT")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def diagonals_in_hd():\n number_of_pairs = 100000\n angles_for_d = {}\n for d in (10, 100, 1000):\n number_of_corners = 2 ** d - 1\n first_corner = [random.randint(0, number_of_corners) for _ in range(0, number_of_pairs)]\n second_corner = [random.randint(0, number_of_corners) for _ i...
[ "0.6510543", "0.6441997", "0.616607", "0.6134413", "0.607191", "0.59936696", "0.59168214", "0.58911335", "0.5871839", "0.58278966", "0.5818667", "0.58146787", "0.5806394", "0.5788211", "0.57797736", "0.57464814", "0.57270575", "0.5663071", "0.56431705", "0.56420153", "0.56268...
0.68332195
0
Resums a certain DOS to show only the spatial dependence
def spatial_dos(h,dos): if h.has_spin == False and h.has_eh==False: return np.array(dos) elif h.has_spin == True and h.has_eh==False: return np.array([dos[2*i]+dos[2*i+1] for i in range(len(dos)//2)]) elif h.has_spin == False and h.has_eh==True: return np.array([dos[2*i]+dos[2*i+1] for i in range(len(dos)//2)]) elif h.has_spin == True and h.has_eh==True: return np.array([dos[4*i]+dos[4*i+1]+dos[4*i+2]+dos[4*i+3] for i in range(len(dos)//4)]) else: raise
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def grass_drass():", "def system_fleet_dimensioning(self):", "def display_ds9(ds9_name, image_id):\n os.system('xpaset {} fits < {}'.format(ds9_name, image_id))", "def execute(self, parameters, messages):\n\n\n\n arcpy.AddMessage(\"default.gdb_path: %s\" % arcpy.env.workspace)\n\n\n arcpy.Im...
[ "0.53796166", "0.5276114", "0.5230221", "0.5218842", "0.5199459", "0.5192862", "0.519138", "0.5182484", "0.5131913", "0.5114713", "0.51103306", "0.5051443", "0.50511736", "0.5021749", "0.4996543", "0.49938875", "0.498227", "0.49754798", "0.49719897", "0.49481302", "0.49481302...
0.0
-1
Write LDOS in a file
def write_ldos(x,y,dos,output_file="LDOS.OUT",z=None): fd = open(output_file,"w") # open file fd.write("# x, y, local density of states\n") ii = 0 for (ix,iy,idos) in zip(x,y,dos): # write everything fd.write(str(ix) +" "+ str(iy) + " "+ str(idos)) if z is not None: fd.write(" "+str(z[ii])) fd.write("\n") ii += 1 fd.close() # close file
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write_file(l_dta, outputfile):\n l_dta2 = []\n for row in l_dta:\n s = '\\t'.join(row)\n l_dta2.append(s)\n s_dta = \"\\r\\n\".join(l_dta2)\n try:\n with open(outputfile, 'w') as fd:\n fd.write(s_dta)\n except (IOError,) as e:\n tracker()\n return None",...
[ "0.63052565", "0.5946495", "0.5859641", "0.58320624", "0.58302605", "0.57833433", "0.5733278", "0.5729144", "0.5711805", "0.56135124", "0.5603127", "0.55708736", "0.55650675", "0.55446815", "0.5522803", "0.552071", "0.55121875", "0.5507822", "0.5500877", "0.5473531", "0.54703...
0.6843431
0
Calculate the density of states for a finite system
def ldos_finite(h,e=0.0,n=10,nwf=4,delta=0.0001): if h.dimensionality!=1: raise # if it is not one dimensional intra = csc(h.intra) # convert to sparse inter = csc(h.inter) # convert to sparse interH = inter.H # hermitian m = [[None for i in range(n)] for j in range(n)] # full matrix for i in range(n): # add intracell m[i][i] = intra for i in range(n-1): # add intercell m[i][i+1] = inter m[i+1][i] = interH m = bmat(m) # convert to matrix (ene,wfs) = slg.eigsh(m,k=nwf,which="LM",sigma=0.0) # diagonalize wfs = wfs.transpose() # transpose wavefunctions dos = (wfs[0].real)*0.0 # calculate dos for (ie,f) in zip(ene,wfs): # loop over waves c = 1./(1.+((ie-e)/delta)**2) # calculate coefficient dos += np.abs(f)*c # add contribution odos = spatial_dos(h,dos) # get the spatial distribution go = h.geometry.supercell(n) # get the supercell write_ldos(go.x,go.y,odos) # write in a file return dos # return the dos
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getDensityOfStates(self, Elist):\n\t\treturn _modes.freerotor_densityofstates(Elist, self.frequencies, 1 if self.linear else 0)", "def getDensityOfStates(self, Elist):\n\t\trho = np.zeros((len(Elist)), np.float64)\n\t\trho0 = _modes.hinderedrotor_densityofstates(Elist, self.frequency, self.barrier)\n\t\tfor ...
[ "0.6842676", "0.6838163", "0.66934264", "0.66780734", "0.6619658", "0.6589051", "0.6474599", "0.63931894", "0.63873273", "0.63801605", "0.6272464", "0.62401986", "0.6229511", "0.61976075", "0.6176228", "0.6112314", "0.6104218", "0.60965395", "0.60946393", "0.60946393", "0.609...
0.0
-1
Calculates the LDOS of a cell with a defect, writting the n neighring cells
def ldos_defect(h,v,e=0.0,delta=0.001,n=1): raise # still not finished import green # number of repetitions rep = 2*n +1 # calculate pristine green function g,selfe = green.supercell_selfenergy(h,e=e,delta=delta,nk=100,nsuper=rep) # now calculate defected green function ez = e + 1j*delta # complex energy emat = np.matrix(np.identity(len(g)))*ez # E +i\delta import supercell pintra = supercell.intra_super2d(h,n=rep) # pristine vintra = supercell.intra_super2d(h,n=rep,central=v) # defective selfe = emat - pintra - g.I # dyson euqation, get selfenergy gv = (emat - vintra -selfe).I # Green function of a vacancy, with selfener return
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def slabldos(h,energies=np.linspace(-1.0,1.0,40),delta=None,nk=40):\n if h.dimensionality!=2: raise # nope\n ds = ldosmap(h,energies=energies,delta=delta,nk=nk)\n if len(ds[0])!=len(h.geometry.z): \n print(\"Wrong dimensions\",len(ds[0]),len(h.geometry.z))\n raise\n f = open(\"DOSMAP.OUT\",\"w\")\n f.wr...
[ "0.6147432", "0.5673565", "0.5628075", "0.5538174", "0.5529582", "0.54913837", "0.54828596", "0.5471573", "0.54613155", "0.5456147", "0.5440528", "0.5422581", "0.54167837", "0.5389832", "0.5379038", "0.53487825", "0.53432304", "0.53047895", "0.53021795", "0.52806985", "0.5250...
0.572953
1
Calculate next best sample location
def acquisition(self): fs, _ = self.gp.predict(self.gp.X) next_fs, vars = self.gp.predict(self.X_s) opt = np.min(fs) improves = opt - next_fs - self.xsi Z = improves / vars eis = improves * norm.cdf(Z) + vars * norm.pdf(Z) return self.X_s[np.argmax(eis)], eis
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_next_sample(self):", "def decide_next_query(self):\n for gp in self.gps:\n build_gp_posterior(gp)\n # Find the best mean values for each gp.\n best_f, best_pt, best_gain = None, None, float('-inf')\n queries = self._get_queried_pts()\n for f_idx, f_name in en...
[ "0.71639574", "0.6434643", "0.6302643", "0.6135785", "0.6110737", "0.60888207", "0.601396", "0.60047734", "0.5961023", "0.59551257", "0.5950009", "0.59325665", "0.59085697", "0.58706766", "0.58677566", "0.58429563", "0.58406365", "0.58211887", "0.58168864", "0.5816688", "0.57...
0.0
-1
Optimize for black box function
def optimize(self, iterations=1000): prev = None finalx = None finaly = None while iterations: maxei, eis = self.acquisition() new_y = self.f(maxei) if maxei == prev: break self.gp.update(maxei, new_y) pycodehack = finaly is None or self.minimize and finaly > new_y if ((pycodehack or not self.minimize and finaly < new_y)): finaly = new_y finalx = maxei prev = maxei iterations -= 1 return finalx, finaly
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def blackwin(x):\n print('blackwin is untested')\n if isinstance(x, (list, tuple, np.ndarray)):\n n = x.shape[1]\n f = blackwin(n)\n\n if len(x.shape) == 3:\n f, _, _ = np.meshgrid(f[0, :], np.arange(\n x.shape[0]), np.arange(x.shape[2]))\n else:\n ...
[ "0.60105205", "0.5797661", "0.57564193", "0.57291317", "0.56770235", "0.56659347", "0.5652914", "0.5614355", "0.56009203", "0.5584697", "0.5571387", "0.5517217", "0.5487182", "0.5478953", "0.54679054", "0.5436867", "0.54337466", "0.53844154", "0.53843486", "0.5379643", "0.536...
0.0
-1
Delete the created network when site creation failed
def _delete_vpn(self, request, vpn): try: #api.quantum.network_delete(request, network.id) msg = _('Delete the created VPN "%s" ' 'due to site addition failure.') % vpn_name LOG.debug(msg) redirect = self.get_failure_url() messages.info(request, msg) raise exceptions.Http302(redirect) #return exceptions.RecoverableError except: msg = _('Failed to delete VPN %s') % vpn_id LOG.info(msg) redirect = self.get_failure_url() exceptions.handle(request, msg, redirect=redirect)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_delete_network(self):\n pass", "def delete_networks(self):\n logging.debug(\"cleanup called\")\n # for network in self.networks.key():\n # self.networks[network].delete()\n for network in self.networks.values():\n logging.warn(\"Deleting network '%s'\" %...
[ "0.67859447", "0.6550001", "0.65075684", "0.64984155", "0.646287", "0.6276366", "0.61979026", "0.6197258", "0.61300117", "0.6093675", "0.6090884", "0.5948053", "0.5943114", "0.5920346", "0.5890928", "0.58596253", "0.5844363", "0.5840034", "0.58133364", "0.5790481", "0.5786879...
0.53581345
51
Take JSON as string returned from a Playfield API request and parse data section into list of dicts {field_name=data}
def parse_json(self, json_to_parse): json_obj = json.loads(json_to_parse) return_data = [] for row in json_obj['data']: row_dict = {} for key, value in row.items(): row_dict[key] = value return_data.append(row_dict) return return_data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dig_fields(json_data):\n data = json.loads(json_data)\n fields = [f for f in data]\n return fields", "def parse(data, datetime_field=None):\n\n parsed_data = json.loads(data)\n if datetime_field:\n if isinstance(parsed_data, list):\n for item in parsed_data:\n ...
[ "0.68718296", "0.66720295", "0.6531521", "0.6174297", "0.6158747", "0.6148615", "0.60345334", "0.60218924", "0.5973685", "0.5921073", "0.5912757", "0.5910022", "0.58769304", "0.58715916", "0.5844883", "0.58414644", "0.5825254", "0.5811341", "0.57988477", "0.57882863", "0.5775...
0.60061646
8
Constructor parent reference to the parent widget QWidget
def __init__(self, parent=None): super(MainWindow, self).__init__(parent) self.setupUi(self)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create(self, parent):\n self.widget = QtCore.QObject(parent)", "def __init__(self, parent=None):\n super(Dialog, self).__init__(parent)\n self.setupUi(self)", "def __init__(self, parent=None):\n super(Form, self).__init__(parent)\n self.setupUi(self)", "def __init__(sel...
[ "0.80446535", "0.790637", "0.7829028", "0.7829028", "0.7818371", "0.76782995", "0.76558757", "0.76455885", "0.7594716", "0.7536031", "0.75227404", "0.751839", "0.7513868", "0.7468696", "0.74049985", "0.7376986", "0.736306", "0.73554814", "0.7326882", "0.7282546", "0.7282546",...
0.770407
8
Slot documentation goes here.
def on_pushButton_clicked(self): # TODO: not implemented yet print("加载数据") boston = datasets.load_boston() train = boston.data target = boston.target self.X_train,self.x_test,self.y_train,self.y_true = train_test_split(train,target,test_size=0.2)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def slot(self, name):\n raise ClixxException(\"Not implemented.\")", "def play_slot__(self):\n print(\"play_slot__\")\n self.valkkafs_manager.play()", "def add_slot(self, slot):\n slot.set_location(len(self.slots)+1)\n self.slots.append(slot)", "def visit_slot(self, slot_name: str, slo...
[ "0.7317067", "0.6809478", "0.6723371", "0.6583449", "0.63984954", "0.63675296", "0.63494873", "0.6319528", "0.63195074", "0.631188", "0.6152515", "0.6127954", "0.61003345", "0.6093386", "0.60838413", "0.6083497", "0.6056153", "0.60259384", "0.6022296", "0.6002864", "0.5981298...
0.0
-1
Slot documentation goes here.
def on_pushButton_2_clicked(self): # TODO: not implemented yet print("模型预测") # 模型加载 lr_m = joblib.load("model/LR_model.m") rr_m = joblib.load("model/RR_model.m") llr_m = joblib.load("model/LLR_model.m") knnr_m = joblib.load("model/KNNR_model.m") dr_m = joblib.load("model/DR_model.m") svmr_m = joblib.load("model/SVMR_model.m") try: y_LR = lr_m.predict(self.x_test) y_RR = rr_m.predict(self.x_test) y_LLR = llr_m.predict(self.x_test) y_KNNR = knnr_m.predict(self.x_test) y_DR = dr_m.predict(self.x_test) y_SVMR = svmr_m.predict(self.x_test) model_pre = pd.DataFrame({'LinearRegression()':list(y_LR),'Ridge()':list(y_RR),'Lasso()':list(y_LLR), \ 'KNeighborsRegressor()':list(y_KNNR),'DecisionTreeRegressor()':list(y_DR),'SVR()':list(y_SVMR)}) self.model_plot(self.y_true, model_pre) self.graphicsView.setStyleSheet("border-image: url(model_plot.png);") except: my_button_w3=QMessageBox.warning(self,"严重警告", '请务必先加载数据然后再点击模型预测!!!', QMessageBox.Ok|QMessageBox.Cancel, QMessageBox.Ok)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def slot(self, name):\n raise ClixxException(\"Not implemented.\")", "def play_slot__(self):\n print(\"play_slot__\")\n self.valkkafs_manager.play()", "def add_slot(self, slot):\n slot.set_location(len(self.slots)+1)\n self.slots.append(slot)", "def visit_slot(self, slot_name: str, slo...
[ "0.73193145", "0.6811926", "0.6725808", "0.65843606", "0.6397118", "0.6368233", "0.63534164", "0.63223046", "0.6321543", "0.6312873", "0.6151129", "0.6127017", "0.6102243", "0.60954756", "0.60865855", "0.6085341", "0.60555625", "0.6027709", "0.6023868", "0.60039294", "0.59816...
0.0
-1
Slot documentation goes here.
def on_action_triggered(self): # TODO: not implemented yet print('打开') my_button_open = QMessageBox.about(self, '打开', '点击我打开某些文件')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def slot(self, name):\n raise ClixxException(\"Not implemented.\")", "def play_slot__(self):\n print(\"play_slot__\")\n self.valkkafs_manager.play()", "def add_slot(self, slot):\n slot.set_location(len(self.slots)+1)\n self.slots.append(slot)", "def visit_slot(self, slot_name: str, slo...
[ "0.73193145", "0.6811926", "0.6725808", "0.65843606", "0.6397118", "0.6368233", "0.63534164", "0.63223046", "0.6321543", "0.6312873", "0.6151129", "0.6127017", "0.6102243", "0.60954756", "0.60865855", "0.6085341", "0.60555625", "0.6027709", "0.6023868", "0.60039294", "0.59816...
0.0
-1
Slot documentation goes here.
def on_action_2_triggered(self): # TODO: not implemented yet print('关闭') sys.exit(0)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def slot(self, name):\n raise ClixxException(\"Not implemented.\")", "def play_slot__(self):\n print(\"play_slot__\")\n self.valkkafs_manager.play()", "def add_slot(self, slot):\n slot.set_location(len(self.slots)+1)\n self.slots.append(slot)", "def visit_slot(self, slot_name: str, slo...
[ "0.73193145", "0.6811926", "0.6725808", "0.65843606", "0.6397118", "0.6368233", "0.63534164", "0.63223046", "0.6321543", "0.6312873", "0.6151129", "0.6127017", "0.6102243", "0.60954756", "0.60865855", "0.6085341", "0.60555625", "0.6027709", "0.6023868", "0.60039294", "0.59816...
0.0
-1
Slot documentation goes here.
def on_action_3_triggered(self): # TODO: not implemented yet print('联系我们') my_button_con_me = QMessageBox.about(self, '联系我们', '这个位置放的是联系我们的介绍')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def slot(self, name):\n raise ClixxException(\"Not implemented.\")", "def play_slot__(self):\n print(\"play_slot__\")\n self.valkkafs_manager.play()", "def add_slot(self, slot):\n slot.set_location(len(self.slots)+1)\n self.slots.append(slot)", "def visit_slot(self, slot_name: str, slo...
[ "0.7317067", "0.6809478", "0.6723371", "0.6583449", "0.63984954", "0.63675296", "0.63494873", "0.6319528", "0.63195074", "0.631188", "0.6152515", "0.6127954", "0.61003345", "0.6093386", "0.60838413", "0.6083497", "0.6056153", "0.60259384", "0.6022296", "0.6002864", "0.5981298...
0.0
-1
Slot documentation goes here.
def on_action_4_triggered(self): # TODO: not implemented yet print('关于我们') my_button_about_me = QMessageBox.about(self, '关于我们', '这个位置放的是关于我们的介绍')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def slot(self, name):\n raise ClixxException(\"Not implemented.\")", "def play_slot__(self):\n print(\"play_slot__\")\n self.valkkafs_manager.play()", "def add_slot(self, slot):\n slot.set_location(len(self.slots)+1)\n self.slots.append(slot)", "def visit_slot(self, slot_name: str, slo...
[ "0.73193145", "0.6811926", "0.6725808", "0.65843606", "0.6397118", "0.6368233", "0.63534164", "0.63223046", "0.6321543", "0.6312873", "0.6151129", "0.6127017", "0.6102243", "0.60954756", "0.60865855", "0.6085341", "0.60555625", "0.6027709", "0.6023868", "0.60039294", "0.59816...
0.0
-1
Slot documentation goes here.
def on_action_QT_triggered(self): # TODO: not implemented yet print('关于qt') my_button_about_QT = QMessageBox.aboutQt(self, '关于QT')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def slot(self, name):\n raise ClixxException(\"Not implemented.\")", "def play_slot__(self):\n print(\"play_slot__\")\n self.valkkafs_manager.play()", "def add_slot(self, slot):\n slot.set_location(len(self.slots)+1)\n self.slots.append(slot)", "def visit_slot(self, slot_name: str, slo...
[ "0.73193145", "0.6811926", "0.6725808", "0.65843606", "0.6397118", "0.6368233", "0.63534164", "0.63223046", "0.6321543", "0.6312873", "0.6151129", "0.6127017", "0.6102243", "0.60954756", "0.60865855", "0.6085341", "0.60555625", "0.6027709", "0.6023868", "0.60039294", "0.59816...
0.0
-1
Create a new Band. Created band is a python representation of the Band.Create case class in the scala datamodel
def __init__(self, name, number, wavelength): self.name = name self.number = number self.wavelength = wavelength
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_bands(band_number):\n try:\n band_dict = band_lookup[band_number]\n except KeyError:\n raise KeyError('Band %s does not exist', band_number)\n\n return [Band(**band_dict)]", "def __init__(self, lbda=None, bandname=None, zp=None, \n mjd=None, empty=False,**kwa...
[ "0.6518197", "0.64347064", "0.5994854", "0.5890965", "0.57064843", "0.57064843", "0.5659268", "0.56355095", "0.5517736", "0.54783046", "0.5404359", "0.5397878", "0.5396625", "0.53284055", "0.5327761", "0.5327761", "0.53230125", "0.5319953", "0.52875245", "0.5253581", "0.52532...
0.0
-1
Associates the specified blobs with the given encryption keys.
def WriteBlobEncryptionKeys( self, key_names: Dict[rdf_objects.BlobID, str], ) -> None: self.blob_keys.update(key_names)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def volume_encryption_keys(self, volume_encryption_keys):\n\n self._volume_encryption_keys = volume_encryption_keys", "def test_blob_key_stored():\n\tbackup_and_restore(\n\t\tlambda context: put_keys(lib.SET, BLOB_KEYS, \"foobar\", True),\n\t\tNone,\n\t\tlambda context: check_keys(lib.SET, BLOB_KEYS, \"fo...
[ "0.5732032", "0.55105233", "0.54380447", "0.5313522", "0.5286251", "0.5265419", "0.5222959", "0.52177703", "0.5150052", "0.51047397", "0.5063412", "0.50368476", "0.49367145", "0.49267337", "0.49244836", "0.4919951", "0.49074957", "0.49044165", "0.49000615", "0.4885995", "0.48...
0.7061965
0
Retrieves encryption keys associated with blobs.
def ReadBlobEncryptionKeys( self, blob_ids: Collection[rdf_objects.BlobID], ) -> Dict[rdf_objects.BlobID, Optional[str]]: return dict(zip(blob_ids, map(self.blob_keys.get, blob_ids)))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_keys(self):\n bucket = self.resource.Bucket(self.bucketname)\n return [key.key for key in bucket.objects.all()]", "def get_encrypted_data_keys(self, data_key, encryption_context):\n encrypted_data_keys = [message.header.EncryptedDataKey(b'aws-kms',\n ...
[ "0.6437575", "0.6124688", "0.61170375", "0.5970704", "0.5920652", "0.5918394", "0.5872749", "0.5832748", "0.58217853", "0.58021", "0.5690256", "0.56733716", "0.5656133", "0.56275576", "0.56275576", "0.5610084", "0.56076646", "0.55941993", "0.55817664", "0.55722076", "0.557199...
0.7701036
0
Intercept http and mock client (get_repo)
def test_branch_can_be_copied(): setup_org() setup_repo() responses.add(responses.GET, "https://api.github.com/repos/my-org/my-repo/branches/master", body=my_repo_branch, content_type='text/json', status=200) responses.add(responses.POST, "https://api.github.com/repos/my-org/my-repo/git/refs", body=my_new_ref, content_type='text/json', status=201) responses.add(responses.GET, "https://api.github.com/repos/my-org/my-repo/branches/main", body=my_repo_branch, content_type='text/json', status=200) token = '__dummy__' org = "my-org" client = GithubRestClient(token) new_branch_name = "main" repo = get_repository(client, org, "my-repo") new_branch = copy_branch(repo, repo.default_branch, new_branch_name) assert None is not new_branch
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_issue_50():\n utils.set_http_mock()\n\n client = Github(proxy_host=\"my.proxy.com\", proxy_port=9000)\n setup_args = client.request._http.called_with\n assert_equals(type(setup_args['proxy_info']), httplib2.ProxyInfo)\n assert_equals(setup_args['proxy_info'].proxy_host, 'my.proxy.com')\n ...
[ "0.66696745", "0.6602459", "0.63437665", "0.6236709", "0.6184174", "0.61553884", "0.6148627", "0.6107372", "0.5980647", "0.593272", "0.59284574", "0.5912947", "0.58175915", "0.58170277", "0.5788988", "0.5725378", "0.5725306", "0.5720646", "0.56785136", "0.5651482", "0.5646013...
0.0
-1
Intercept http and mock client (get_repo)
def test_protection_can_be_copied(): setup_org("octocat") protection_url = "https://api.github.com/repos/octocat/Hello-World/branches/master/protection" responses.add(responses.GET, protection_url, status=200, content_type='text/json', body=branch_protection) put_url = "https://api.github.com/repos/octocat/Hello-World/branches/main/protection" responses.add(responses.PUT, put_url) token = '__dummy__' org = "octocat" repo = "Hello-World" client = GithubRestClient(token) success = copy_branch_protection(client, org, repo, 'master', 'main') assert True == success last_request = json.loads(responses.calls[-1].request.body) assert True == last_request['enforce_admins']
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_issue_50():\n utils.set_http_mock()\n\n client = Github(proxy_host=\"my.proxy.com\", proxy_port=9000)\n setup_args = client.request._http.called_with\n assert_equals(type(setup_args['proxy_info']), httplib2.ProxyInfo)\n assert_equals(setup_args['proxy_info'].proxy_host, 'my.proxy.com')\n ...
[ "0.6668953", "0.6600891", "0.63424224", "0.62350804", "0.61830807", "0.6153527", "0.61463815", "0.6108667", "0.5978955", "0.59318805", "0.5927703", "0.59109044", "0.58161503", "0.5816118", "0.57876474", "0.5726691", "0.57236135", "0.5717952", "0.5676887", "0.5649835", "0.5643...
0.0
-1
Supports sum([dataset1, dataset2, dataset3]).
def __radd__(self, other): if other == 0: return self else: return self.__add__(other)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sum_datasets(dslist):\n #Assume all same length, same axis values\n newds = zeros_like(dslist[0])\n AddCifMetadata.add_standard_metadata(newds)\n title_info = \"\"\n proc_info = \"\"\"This dataset was created by summing points from multiple datasets. Points were \n assumed to coincide exactly...
[ "0.69257575", "0.6655116", "0.6303091", "0.6137", "0.59186286", "0.5906148", "0.580326", "0.576853", "0.576853", "0.5768271", "0.5721376", "0.5690767", "0.5661961", "0.56405276", "0.56123817", "0.56020796", "0.55628824", "0.55342525", "0.5516941", "0.55149585", "0.55093265", ...
0.0
-1
Parses data list and returns the number of person IDs and the number of camera views.
def parse_data(self, data): pids = set() cams = set() for info in data: pids.add(info[1]) cams.add(info[2]) return len(pids), len(cams)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_num_cams(self, data):\n cams = set()\n for items in data:\n camid = items[2]\n cams.add(camid)\n return len(cams)", "def get_num_pids(self, data):\n pids = set()\n for items in data:\n pid = items[1]\n pids.add(pid)\n r...
[ "0.6483834", "0.5873225", "0.56902164", "0.559616", "0.5581715", "0.55694574", "0.55310565", "0.5526454", "0.5519276", "0.5515513", "0.55004495", "0.543157", "0.53432065", "0.5338561", "0.5330016", "0.5312213", "0.5268463", "0.5256779", "0.52174443", "0.520235", "0.5201785", ...
0.6852167
0
Returns the number of training person identities.
def get_num_pids(self, data): return self.parse_data(data)[0]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def training_set_count(self) -> int:\n return pulumi.get(self, \"training_set_count\")", "def get_number_of_training(self):\n return self.n_train", "def num_training_examples(self):", "def get_nb_personne(self):\n return self.nb_personne", "def participant_count(self) -> int:\n ...
[ "0.7130858", "0.7117933", "0.67800456", "0.664693", "0.63542926", "0.63365936", "0.6317348", "0.62643784", "0.62291634", "0.60861695", "0.6079785", "0.6076607", "0.604624", "0.6041788", "0.5999601", "0.59986293", "0.5977678", "0.5972249", "0.5956189", "0.59479755", "0.5901012...
0.5565302
65
Returns the number of training cameras.
def get_num_cams(self, data): return self.parse_data(data)[1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_total_cameras(self) -> int:\n return self.num_cameras", "def get_cameras_number():\n lib.initlib()\n return lib.is_GetNumberOfCameras()", "def ncameras(self):\n n = ct.c_long()\n self.lib.GetAvailableCameras(ct.pointer(n))\n return n.value", "def numberOfCamera():\n ...
[ "0.8353758", "0.80624014", "0.7873767", "0.7767602", "0.7643237", "0.7341902", "0.73000854", "0.71402013", "0.67232484", "0.6706012", "0.6674748", "0.6601394", "0.65858144", "0.6535598", "0.6527929", "0.64885134", "0.6479306", "0.6477255", "0.6447356", "0.6396966", "0.6382521...
0.0
-1
Combines train, query and gallery in a dataset for training.
def combine_all(self): combined = copy.deepcopy(self.train) def _combine_data(data): for img_path, pid, camid in data: if pid in self._junk_pids: continue #pdb.set_trace() pid = self.dataset_name + "_" + str(pid) camid = self.dataset_name + "_" + str(camid) combined.append((img_path, pid, camid)) _combine_data(self.query) _combine_data(self.gallery) self.train = combined self.num_train_pids = self.get_num_pids(self.train)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def combine_all(self):\n if self._train_only:\n return\n\n combined = copy.deepcopy(self.train)\n\n # relabel pids in gallery (query shares the same scope)\n g_pids = set()\n for items in self.gallery:\n pid = items[1]\n if pid in self._junk_pids:...
[ "0.69598746", "0.67574775", "0.6717249", "0.66458243", "0.66314375", "0.65966815", "0.65671974", "0.65517074", "0.65076643", "0.64948964", "0.64792395", "0.6467504", "0.6462277", "0.64389426", "0.64315575", "0.641234", "0.6410118", "0.6398891", "0.63852507", "0.63804436", "0....
0.69694567
0
Checks if required files exist before going deeper.
def check_before_run(self, required_files): if isinstance(required_files, str): required_files = [required_files] for fpath in required_files: if not os.path.exists(fpath): raise RuntimeError('"{}" is not found'.format(fpath))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def init_check(self):\n for required_file in self._required_files:\n # Check if required files are there\n # FIXME Sometimes it doesn't work :?\n if required_file not in self.files:\n self.valid = False", "def check_missing_files(self):\n files = [get...
[ "0.7485762", "0.7367102", "0.7227755", "0.70789534", "0.70283055", "0.69488746", "0.6946507", "0.672183", "0.6712726", "0.6712726", "0.6712726", "0.6712726", "0.6712726", "0.6712726", "0.6712726", "0.6698533", "0.66718006", "0.66628796", "0.6643828", "0.6639172", "0.66012675"...
0.7092478
3
Perform an insert or update.
def _do_upsert(self, conn, item, spider): query_check = "select * from %s where url = %%s" % spider.name conn.execute(query_check, (item['url'], )) result = conn.fetchone() if result: query_udpate = "UPDATE %s SET price=%ss" % spider.name conn.execute(query_udpate, (item['price'])) log.msg("Item updated in db: %s" % item, level=log.DEBUG) else: query_insert = "INSERT INTO %s (title, company, description, price, status, image, url, category) VALUES (%%s, %%s, %%s, %%s, %%s, %%s, %%s, %%s)" % spider.name conn.execute(query_insert, (item['title'], item['company'], item['description'], item['price'], item['status'], item['image'], item['url'], item['category'])) log.msg("Item stored in db: %s" % item, level=log.DEBUG)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def insert_or_update(self, table, record):\n try:\n request = s.query(table=table, query={'sys_id': record['sys_id']})\n #request.get_single()\n response = request.update(record)\n print >> sys.stderr, 'update'\n except NoResults:\n # Record does...
[ "0.6802625", "0.6760875", "0.67485595", "0.66731614", "0.66411924", "0.621796", "0.6103661", "0.60958934", "0.6045349", "0.6037797", "0.6003509", "0.60019463", "0.5977113", "0.59489703", "0.5928445", "0.5923789", "0.59120774", "0.5863009", "0.58497566", "0.582207", "0.5809673...
0.54526764
80
Assigns every training point a weight equal to 1/N, where N is the number of training points. Returns a dictionary mapping points to weights.
def initialize_weights(training_points): N = len(training_points) ans = {} for p in training_points: ans[p] = make_fraction(1, N) return ans
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def default_weights(n):\n return np.array([1/n for _ in range(n)])", "def uniform_weights(n):\n return np.ones((n, 1)) / n", "def update_weights(point_to_weight, misclassified_points, error_rate):\n for p in point_to_weight:\n if p in misclassified_points:\n point_to_weight[p] *= mak...
[ "0.6437919", "0.6396687", "0.624045", "0.61508226", "0.6023781", "0.6023781", "0.5951131", "0.59024775", "0.58835477", "0.58784914", "0.5861894", "0.58606887", "0.5855883", "0.5820768", "0.5816859", "0.5815168", "0.58121", "0.57764375", "0.57757556", "0.5774974", "0.5753624",...
0.8002009
0
Given a dictionary mapping training points to their weights, and another dictionary mapping classifiers to the training points they misclassify, returns a dictionary mapping classifiers to their error rates.
def calculate_error_rates(point_to_weight, classifier_to_misclassified): ans = {} for c in classifier_to_misclassified: misclassified = classifier_to_misclassified[c] ans[c] = 0 for p in misclassified: ans[c] += point_to_weight[p] return ans
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_weights(point_to_weight, misclassified_points, error_rate):\n for p in point_to_weight:\n if p in misclassified_points:\n point_to_weight[p] *= make_fraction(1,2)*make_fraction(1, error_rate)\n else:\n point_to_weight[p] *= make_fraction(1,2)*make_fraction(1, 1-err...
[ "0.6313117", "0.5606973", "0.55354184", "0.55307525", "0.5491061", "0.54466426", "0.5413339", "0.5376725", "0.5321494", "0.5300928", "0.5286332", "0.52586734", "0.5237726", "0.5230793", "0.52304846", "0.5226977", "0.5218962", "0.5217092", "0.51976115", "0.5183116", "0.5169867...
0.76960677
0
Given a dictionary mapping classifiers to their error rates, returns the best classifier, or raises NoGoodClassifiersError if best classifier has error rate 1/2. best means 'smallest error rate' if use_smallest_error is True, otherwise 'error rate furthest from 1/2'.
def pick_best_classifier(classifier_to_error_rate, use_smallest_error=True): best_classifier = None if use_smallest_error: best_classifier = min(classifier_to_error_rate, key=classifier_to_error_rate.get) else: best_classifier = max(classifier_to_error_rate, key=lambda x : abs(classifier_to_error_rate[x]-0.5)) if make_fraction(classifier_to_error_rate[best_classifier]) == make_fraction(1,2): raise NoGoodClassifiersError #find a classifier that comes before this one alphabetically for c in classifier_to_error_rate: if use_smallest_error and classifier_to_error_rate[c] == classifier_to_error_rate[best_classifier]: if c < best_classifier: best_classifier = c if not use_smallest_error: error = make_fraction(abs(classifier_to_error_rate[best_classifier] - 0.5)) check_error = make_fraction(abs(classifier_to_error_rate[c] -0.5)) if error == check_error: if c < best_classifier: best_classifier = c return best_classifier
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find_best_classifier(data, possible_classifiers, target_classifier):\n best_disorder_score = 10000000\n best_classifier = None\n try:\n for classifier in possible_classifiers:\n total_disorder = average_test_disorder(data, classifier, target_classifier)\n if total_disorder...
[ "0.65992683", "0.6348734", "0.60387164", "0.56676745", "0.5634381", "0.5621993", "0.55739546", "0.55624396", "0.5539638", "0.5454468", "0.5443642", "0.5441887", "0.5408296", "0.53730357", "0.531757", "0.526106", "0.5247903", "0.52269906", "0.52153784", "0.52098244", "0.519527...
0.80622786
0
Given a classifier's error rate (a number), returns the voting power (aka alpha, or coefficient) for that classifier.
def calculate_voting_power(error_rate): if error_rate == 0: return INF if error_rate == 1: return -INF return 0.5*ln(make_fraction(1-error_rate, error_rate))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def exponential_vote(score, category):\n status = \"\"\n\n try:\n max_vote = constants.MAX_VOTE[category]\n except:\n max_vote = constants.MAX_TASK_REQUEST\n\n else:\n power = constants.EXP_POWER\n weight = pow(\n score / 100.0,\n power - (score / 100.0...
[ "0.6483174", "0.60067403", "0.58584934", "0.57399225", "0.5617777", "0.55273247", "0.5520179", "0.5502458", "0.5488472", "0.54872227", "0.5482623", "0.5475527", "0.5460961", "0.5439238", "0.54219", "0.54015714", "0.53638995", "0.53533477", "0.53349483", "0.5307904", "0.529213...
0.7368655
0
Given an overall classifier H, a list of all training points, and a dictionary mapping classifiers to the training points they misclassify, returns a set containing the training points that H misclassifies. H is represented as a list of (classifier, voting_power) tuples.
def get_overall_misclassifications(H, training_points, classifier_to_misclassified): misclassified = [] for p in training_points: score = 0 for tup in H: c = tup[0] voting_power = tup[1] if p in classifier_to_misclassified[c]: score -= voting_power else: score += voting_power if score <= 0: misclassified.append(p) return set(misclassified)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def misclassified_training_points(svm):\n wrong = []\n for point in svm.training_points:\n if point.classification is not classify(svm, point):\n wrong.append(point)\n return set(wrong)", "def digits_make_classifiers_to_misclassified(X,Y,classifiers,ids_to_points):\n\toutput = {key: []...
[ "0.7052224", "0.6119313", "0.54236513", "0.5231023", "0.5223641", "0.51209354", "0.510333", "0.5085148", "0.5023613", "0.5006669", "0.49687526", "0.4961643", "0.49522075", "0.49459502", "0.49397606", "0.49372053", "0.49112162", "0.48705956", "0.48250222", "0.48217845", "0.481...
0.85402024
0
Given an overall classifier H, a list of all training points, a dictionary mapping classifiers to the training points they misclassify, and a mistake tolerance (the maximum number of allowed misclassifications), returns False if H misclassifies more points than the tolerance allows, otherwise True. H is represented as a list of (classifier, voting_power) tuples.
def is_good_enough(H, training_points, classifier_to_misclassified, mistake_tolerance=0): misclassified = get_overall_misclassifications(H, training_points, classifier_to_misclassified) if len(misclassified) > mistake_tolerance: return False return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_overall_misclassifications(H, training_points, classifier_to_misclassified):\n misclassified = []\n\n for p in training_points:\n score = 0\n for tup in H:\n c = tup[0]\n voting_power = tup[1]\n if p in classifier_to_misclassified[c]:\n sc...
[ "0.68727624", "0.59430206", "0.58152044", "0.5540413", "0.55316037", "0.5448521", "0.5405066", "0.5398594", "0.5388817", "0.5313519", "0.53004795", "0.5284414", "0.5269668", "0.5265305", "0.5240204", "0.5234198", "0.520831", "0.5201402", "0.5200734", "0.51976657", "0.5176463"...
0.7659174
0
Given a dictionary mapping training points to their old weights, a list of training points misclassified by the current weak classifier, and the error rate of the current weak classifier, returns a dictionary mapping training points to their new weights. This function is allowed (but not required) to modify the input dictionary point_to_weight.
def update_weights(point_to_weight, misclassified_points, error_rate): for p in point_to_weight: if p in misclassified_points: point_to_weight[p] *= make_fraction(1,2)*make_fraction(1, error_rate) else: point_to_weight[p] *= make_fraction(1,2)*make_fraction(1, 1-error_rate) return point_to_weight
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def calculate_error_rates(point_to_weight, classifier_to_misclassified):\n ans = {}\n for c in classifier_to_misclassified:\n misclassified = classifier_to_misclassified[c]\n ans[c] = 0\n for p in misclassified:\n ans[c] += point_to_weight[p]\n return ans", "def keypoint_...
[ "0.67305875", "0.58836514", "0.5748413", "0.5478308", "0.54563385", "0.53294194", "0.5201591", "0.51845413", "0.5084596", "0.50814384", "0.5060465", "0.50252044", "0.5015587", "0.50075173", "0.49619642", "0.49579272", "0.49395669", "0.4937159", "0.49219316", "0.490734", "0.48...
0.78441054
0
Performs the Adaboost algorithm for up to max_rounds rounds. Returns the resulting overall classifier H, represented as a list of (classifier, voting_power) tuples.
def adaboost(training_points, classifier_to_misclassified, use_smallest_error=True, mistake_tolerance=0, max_rounds=INF): point_to_weight = initialize_weights(training_points) H = [] # (classifier, voting_power) while True: # exit conditions if is_good_enough(H, training_points, classifier_to_misclassified, mistake_tolerance): break if max_rounds == 0: break classifier_to_error_rate = calculate_error_rates(point_to_weight, classifier_to_misclassified) best_classifier = None try: best_classifier = pick_best_classifier(classifier_to_error_rate, use_smallest_error) except NoGoodClassifiersError: break max_rounds -= 1 error_rate = classifier_to_error_rate[best_classifier] H.append((best_classifier, calculate_voting_power(error_rate))) point_to_weight = update_weights(point_to_weight, classifier_to_misclassified[best_classifier], error_rate) return H
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def modelAdaBoost():\n num_estimators = [1,5,10,50,100,150]\n learning_rate = 0.1\n max_depth = 3\n base_estimate = DecisionTreeClassifier(max_depth=max_depth)\n random_state = 20 # Do not change this random_state\n \n obj_boost = []\n \n \"\"\" \n Create a list of objects for the cla...
[ "0.5816989", "0.5509983", "0.541834", "0.53876275", "0.53350043", "0.5324034", "0.5317509", "0.5210553", "0.51253355", "0.51249367", "0.5117827", "0.50774145", "0.50676054", "0.5053001", "0.5049434", "0.50463754", "0.50307447", "0.5024344", "0.50045466", "0.4988225", "0.49871...
0.6702093
0
Create a plane through a given point with given normal and surface material
def __init__(self, point, normal, material): self.point = point self.norm = unit(normal) self.mat = material
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def project_point_plane(point, plane):\n base, normal = plane\n normal = normalize_vector(normal)\n vector = subtract_vectors(point, base)\n snormal = scale_vector(normal, dot_vectors(vector, normal))\n return subtract_vectors(point, snormal)", "def plot_plane(unit_normal, x_array, y_array, fore):...
[ "0.6926132", "0.67918247", "0.66280645", "0.6622853", "0.6570255", "0.6499382", "0.64442146", "0.6332076", "0.62797856", "0.62646264", "0.6246257", "0.6151091", "0.6120761", "0.6096576", "0.60693115", "0.59628874", "0.5912617", "0.58920056", "0.58715844", "0.585937", "0.58575...
0.683124
1
Returns a hit, or None if the ray is parallel to the plane
def intersect(self, ray): t = None hit = None angle = ray.dir.dot(self.norm) if angle != 0: t = (self.point - ray.start).dot(self.norm) / angle if angle < 0: hit = Hit(self, ray, t, float('inf'), self.norm, self.mat) else: hit = Hit(self, ray, float('-inf'), t, self.norm, self.mat) else: vector = unit(ray.start - self.point) if vector.dot(self.norm) < 0: hit = Hit(self, ray, float('-inf'), float('inf'), self.norm, self.mat) else: return None if (self.mat.texture is not None and not isninf(hit.entry)) > 0: hit.texCords = self.texCords(ray.pos(t)) return hit
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rayIntersection(self, ray):\n #t = \"what we are trying to find\"\n l = -ray.mDirection\n l0 = ray.mOrigin\n n = self.mNormal\n p0 = self.mDistance * n\n #p = l0 + l * t\n\n if l.dot(n) > 0:\n v = p0 - l0\n t = -(v.dot(n) / l.dot(n))\n ...
[ "0.724688", "0.7169482", "0.7158089", "0.71482855", "0.67915195", "0.67516196", "0.67201257", "0.66962445", "0.6689992", "0.6683316", "0.65686566", "0.6530607", "0.64233273", "0.6218107", "0.6107053", "0.60904384", "0.60822034", "0.60539466", "0.60471374", "0.5973455", "0.595...
0.7633641
0
return the number of scalar components
def getNumberOfScalarComponents(self): return self.numberOfComponents
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def n_components(self):\n return 1", "def n_components(self):\n return self._components.shape[0]", "def n_cs(self):\n return np.size(self._cs, 0)", "def components(self):\n return self.num_components", "def __len__(self):\n num_x, num_y = self.conv_dims()\n return ...
[ "0.74888134", "0.7376706", "0.7100258", "0.70853853", "0.70716816", "0.6938483", "0.69184744", "0.68926495", "0.68802017", "0.68690693", "0.6788687", "0.67648643", "0.6762822", "0.6739765", "0.67380327", "0.67363364", "0.671714", "0.6700786", "0.66510797", "0.6639145", "0.663...
0.8905581
0
Set the color transfer function
def setColorTransferFunction(self, ctf): self.ctf = ctf
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_color(self):\r\n \r\n \r\n colorset = self.colorset\r\n \r\n self.grfx[0].colorset = colorset\r\n pass", "def setColorDiffuse(*args):", "def getColorTransferFunction(self):\n\t\treturn self.ctf", "def set_color(self):\n self.image[self.x, self.y] = self.color\n if s...
[ "0.67156917", "0.6633878", "0.6617592", "0.63400304", "0.6332681", "0.6234133", "0.62047946", "0.6202683", "0.61825573", "0.60994345", "0.6095942", "0.601829", "0.5967596", "0.5921991", "0.5918888", "0.5858037", "0.5846267", "0.5846243", "0.5836248", "0.5822841", "0.5788105",...
0.81864583
0
Return a flag indicating whether the images are 2D or 3D images
def is3DImage(self): return self.is3D
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_depth_image(self):\n return False", "def is_2d(self) -> bool:\n return self.layers == 1 and self.times == 1", "def check_niimg_3d(niimg, dtype=None):\n return check_niimg(niimg, ensure_ndim=3, dtype=dtype)", "def is3_d(self):\n return self.container['is3_d']", "def are_compat...
[ "0.68862396", "0.65322673", "0.6502295", "0.64808524", "0.64616615", "0.64155763", "0.63741845", "0.6256185", "0.62417966", "0.6234979", "0.6150245", "0.6141367", "0.60545796", "0.6043418", "0.603095", "0.60253567", "0.5992711", "0.5932484", "0.5897953", "0.5897953", "0.58671...
0.7844598
0
set the filenames that will be read
def setFilenames(self, filenames): self.filenames = filenames if len(filenames) == 0: return if not self.dimensions: self.retrieveImageInfo(filenames[0]) if not self.checkImageDimensions(filenames): raise Logging.GUIError("Image dimensions do not match", \ "Some of the selected files have differing dimensions, \ and cannot be imported into the same dataset.") self.getReadersFromFilenames() self.numberOfImages = len(filenames) if self.is3D: if self.readers: self.numberOfImages = 0 for rdr in self.readers: self.numberOfImages += rdr.GetNumberOfSubFiles()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_filename(self, file_name):", "def setfiles(self, filelist):\r\n self._filelist=filelist\r\n self._energy=self.readenergy(filelist)", "def filenames(self):\n pass", "def fileset(self):\n pass", "def __init__(self, files):\n self.files = files and [NamedFile(data=d, filenam...
[ "0.6955265", "0.6887402", "0.6878499", "0.67892617", "0.6675026", "0.6572736", "0.65357256", "0.65231085", "0.64586145", "0.64167434", "0.6371263", "0.6240702", "0.6175739", "0.6168266", "0.61507344", "0.6141928", "0.61352116", "0.6123154", "0.6068659", "0.6062177", "0.605273...
0.72363704
0
Set a flag indicating whether the image should be flipped vertically
def setVerticalFlip(self, flag): if self.ext.lower() in ["png", "jpg", "jpeg"]: self.flipVertically = not flag else: self.flipVertically = flag
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def flip_vertical(img):\r\n #reading image\r\n im = Image.open(\"filename\")\r\n\r\n #flipping image vertically\r\n newimg = im.transpose(PIL.Image.FLIP_TOP_BOTTOM)\r\n return img", "def flip_image_vertical(image):\n return cv.flip(image, 1)", "def flip_image(img, vert=True):\n if vert:\n ...
[ "0.73209965", "0.70142406", "0.68551445", "0.66779745", "0.66683435", "0.666765", "0.65579903", "0.64983195", "0.64376324", "0.64257395", "0.6424698", "0.6389282", "0.63263226", "0.63111866", "0.63103646", "0.630324", "0.6244989", "0.61426216", "0.6092576", "0.60797507", "0.6...
0.8431753
0
Set a flag indicating whether the image should be flipped horizontally
def setHorizontalFlip(self, flag): self.flipHorizontally = flag
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def flip_horizontal(img):\r\n #reading image\r\n im = Image.open(\"filename\")\r\n\r\n #flipping image horizontally\r\n newimg = im.transpose(PIL.Image.FLIP_LEFT_RIGHT)\r\n \r\n return img", "def flip_image_horizontal(image):\n return cv.flip(image, 0)", "def flip(self, horizontally):\n\t\...
[ "0.7160271", "0.6985837", "0.6883737", "0.6870575", "0.6787338", "0.67704475", "0.6695351", "0.6677402", "0.6635156", "0.6454874", "0.64387655", "0.64263225", "0.6393341", "0.6339728", "0.63132757", "0.6289396", "0.61795443", "0.61762446", "0.61736774", "0.6157254", "0.614742...
0.83549047
0
check that each image in the list has the same dimensions
def checkImageDimensions(self, filenames): s = None hashStr = filenames[:] hashStr.sort() hashStr = str(hashStr) # check to see if there's already a result of the check for these filenames in the cache if hashStr in self.dimensionCheck: Logging.info("Using cached result for dimensions check: %s"%(str(self.dimensionCheck[hashStr]))) return self.dimensionCheck[hashStr] for file in filenames: if file not in self.imageDims: print "Trying to open",type(file) try: self.ext = file.split(".")[-1].upper() if self.ext == "TIF": self.ext = "TIFF" if self.ext == "JPG": self.ext = "JPEG" if self.ext == "VTI": reader = vtk.vtkXMLImageReader() else: reader = eval("vtk.vtk%sReader()"%self.ext) reader.SetFileName(file) reader.UpdateInformation() except IOError, ex: traceback.print_exc() raise Logging.GUIError("Cannot open image file", "Cannot open image file %s" % file) extent = reader.GetDataExtent() fSize = (extent[1],extent[3]) self.imageDims[file] = fSize else: fSize = self.imageDims[file] if s and fSize != s: x0, y0 = s x1, y1 = fSize self.dimensionCheck[hashStr] = False return 0 s = fSize fn = file self.dimensionCheck[hashStr] = True return 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def images_are_present(file_info):\n currentdir = os.path.join(WORKDIR, file_info['folder'])\n if not os.path.exists(currentdir):\n return False\n count = len([x for x in os.listdir(currentdir) if x.endswith('.png')])\n if count != file_info['size']:\n print([x for x in os.listdir(current...
[ "0.66642034", "0.6607947", "0.65018415", "0.6470456", "0.64090717", "0.63401216", "0.6332631", "0.63291144", "0.6313849", "0.6313849", "0.62913483", "0.6274798", "0.62315774", "0.6190616", "0.6132303", "0.6132303", "0.6086374", "0.60784054", "0.6073663", "0.60491264", "0.6013...
0.66137314
1
return a VTK image reader based on file extension
def getReaderByExtension(self, ext, isRGB = 0): assert ext in self.extMapping, "Extension not recognized: %s" % ext mpr = self.extMapping[ext] prefix="vtk" # If it's a tiff file, we use our own, extended TIFF reader if self.extMapping[ext] == "TIFF": mpr = "ExtTIFF" prefix="vtkbxd" self.rdrstr = "%s.vtk%sReader()" % (prefix, mpr) rdr = eval(self.rdrstr) if ext == "bmp": rdr.Allow8BitBMPOn() if ext == "tiff": rdr.SetFileLowerLeft(self.flipVertically) return rdr
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getReadersFromFilenames(self):\n\t\tfor i in self.readers:\n\t\t\tdel i\n\t\tself.readers = []\n\n\t\tif not self.filenames:\n\t\t\traise Logging.GUIError(\"No files could be found\", \\\n\t\t\t\t\t\t\t\t\t\"For some reason, no files were listed to be imported.\")\t\t \n\t\t\t\t\t\n\t\tfiles = self.filenames\n...
[ "0.64241487", "0.5978514", "0.59677935", "0.5905505", "0.5868323", "0.5654292", "0.5641504", "0.5602225", "0.5563264", "0.55544156", "0.55478626", "0.55258757", "0.5498806", "0.54907084", "0.5490219", "0.5475998", "0.5461645", "0.5444304", "0.54409236", "0.541251", "0.5400047...
0.75270355
0
create the reader list from a given set of file names and parameters
def getReadersFromFilenames(self): for i in self.readers: del i self.readers = [] if not self.filenames: raise Logging.GUIError("No files could be found", \ "For some reason, no files were listed to be imported.") files = self.filenames print "Determining readers from ", self.filenames isRGB = 1 self.ext = files[0].split(".")[-1].lower() dim = self.dimMapping[self.ext] # Initially flip the image if it's tiff, png or jpg. # In setVerticalFlip we negate the setting to have it set correctly. if self.ext.lower() in ["png", "jpg", "jpeg"]: self.flipVertically = True if self.ext in ["tif", "tiff"]: reader = vtkbxd.vtkExtTIFFReader() reader.SetFileName(files[0]) reader.UpdateInformation() if reader.GetNumberOfScalarComponents() >= 3: print "MODE IS RGB, IS AN RGB IMAGE" else: print "MODE ISN'T RGB, THEREFORE NOT RGB" isRGB = 0 rdr = self.getReaderByExtension(self.ext, isRGB) rdr.SetFileName(files[0]) if rdr.GetNumberOfSubFiles() > 1: dim = 3 self.isRGB = isRGB self.is3D = (dim == 3) dirName = os.path.dirname(files[0]) print "THERE ARE", self.slicesPerTimepoint, "SLICES PER TIMEPOINT" self.ext = files[0].split(".")[-1].lower() if dim == 3: totalFiles = len(files) for i, file in enumerate(files): rdr = self.getReaderByExtension(self.ext, isRGB) rdr.SetFileName(file) self.readers.append(rdr) return totalFiles = len(files) / self.slicesPerTimepoint imgAmnt = len(files) if totalFiles == 1: rdr = self.getReaderByExtension(self.ext, isRGB) arr = vtk.vtkStringArray() for fileName in files: arr.InsertNextValue(os.path.join(dirName, fileName)) rdr.SetFileNames(arr) self.readers.append(rdr) return if imgAmnt > 1: # If the pattern doesn't have %, then we just use # the given filenames and allocate them to timepoints # using slicesPerTimepoint slices per timepoint ntps = len(files) / self.slicesPerTimepoint filelst = files[:] # dirn #TODO: what was this? for tp in range(0, ntps): rdr = self.getReaderByExtension(self.ext, isRGB) arr = vtk.vtkStringArray() for i in range(0, self.slicesPerTimepoint): arr.InsertNextValue(filelst[0]) filelst = filelst[1:] rdr.SetFileNames(arr) rdr.SetDataExtent(0, self.x - 1, 0, self.y - 1, 0, self.slicesPerTimepoint - 1) rdr.SetDataSpacing(self.spacing) rdr.SetDataOrigin(0, 0, 0) self.readers.append(rdr) return elif imgAmnt == 1: # If only one file rdr = self.getReaderByExtension(self.ext, isRGB) rdr.SetDataExtent(0, self.x - 1, 0, self.y - 1, 0, self.slicesPerTimepoint - 1) rdr.SetDataSpacing(self.spacing) rdr.SetDataOrigin(0, 0, 0) rdr.SetFileName(files[0]) Logging.info("Reader = ", rdr, kw = "io") self.readers.append(rdr)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initialize_file_readers():\n savefile_path = os.path.join(os.getcwd()+ \"/../data/\", SAVE_FILE)\n file_reader_list = []\n for file in os.listdir(savefile_path):\n file_reader = open(os.path.join(savefile_path,file), \"r\")\n file_reader_list.append({\"file_reader\": file_reader, \"last...
[ "0.6492544", "0.63382894", "0.6250799", "0.611366", "0.6069598", "0.6018633", "0.59182394", "0.5909983", "0.58632267", "0.58579755", "0.5853619", "0.5849561", "0.5829838", "0.580928", "0.57897955", "0.5785899", "0.57765526", "0.57752377", "0.57527864", "0.57526433", "0.574977...
0.679022
0
return the number of slices per timepoint
def getSlicesPerTimepoint(self): return self.slicesPerTimepoint
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getDataSetCount(self):\n\t\treturn int(self.numberOfImages / self.slicesPerTimepoint)", "def size(self):\n size = 1\n for current_slice in self.slices:\n size *= current_slice.stop - current_slice.start\n return size", "def n(self):\n return self._time_axis.size", "...
[ "0.7135924", "0.6884999", "0.64077276", "0.6373212", "0.6304518", "0.6302463", "0.6214839", "0.6137181", "0.6126625", "0.61182857", "0.6099939", "0.6055298", "0.6052316", "0.6023152", "0.59912443", "0.5985234", "0.59840417", "0.59639126", "0.59609234", "0.59437424", "0.594127...
0.76755035
0
Set the number of slices that belong to a given timepoint
def setSlicesPerTimepoint(self, n): assert n > 0, "Slices per timepoint needs to be greater than 0" print "Setting slices per timepoint to ", n self.slicesPerTimepoint = n self.z = n self.readers = []
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setNSlices(self,n):\n assert(n> 0)\n self._c_param.lee_richards_n_slices = n", "def setNumTimeSubSteps(*argv):", "def getSlicesPerTimepoint(self):\n\t\treturn self.slicesPerTimepoint", "def _set_window_time(slices, times):\n t_idx_ = [t[-1] for t in slices]\n return times[t_idx_]", ...
[ "0.6350953", "0.6137186", "0.58842385", "0.5869937", "0.56822366", "0.56016797", "0.5523093", "0.5513194", "0.54768455", "0.5377458", "0.5337342", "0.5313137", "0.5259041", "0.5251777", "0.52457607", "0.5220166", "0.5212587", "0.52124", "0.52096623", "0.5208932", "0.52069575"...
0.7975833
0
Returns the number of individual DataSets (=time points) managed by this DataSource
def getDataSetCount(self): return int(self.numberOfImages / self.slicesPerTimepoint)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def count(self):\n return self.data_container.count", "def get_num_datasets(self, data):\n dsets = set()\n for items in data:\n dsetid = items[3]\n dsets.add(dsetid)\n return len(dsets)", "def count_data(self):\n try:\n ndata = len(self.x)\n ...
[ "0.7276192", "0.7221659", "0.72136873", "0.7015325", "0.69837743", "0.68426126", "0.6835689", "0.6830701", "0.6756762", "0.66132236", "0.6597357", "0.65931714", "0.65570873", "0.6531572", "0.6491252", "0.64376664", "0.6436097", "0.6429143", "0.642638", "0.6403021", "0.6397654...
0.79010725
0
Timepoint i i The timepoint to return
def getDataSet(self, i, raw = 0): data = self.getTimepoint(i) if self.isRGB and self.numberOfComponents == 4: extract = vtk.vtkImageExtractComponents() extract.SetComponents(0, 1, 2) extract.SetInput(data) data = extract.GetOutput() if self.flipVertically: flip = vtk.vtkImageFlip() flip.SetFilteredAxis(1) flip.SetInput(data) data = flip.GetOutput() if self.flipHorizontally: flip = vtk.vtkImageFlip() flip.SetFilteredAxis(0) flip.SetInput(data) data = flip.GetOutput() return data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_time_points(self):\n return self._time", "def time(self):\r\n raise NotImplementedError", "def setTimepoint(self, tp):\n\t\tpass", "def time(self):\n return self._begin", "def t0(self):\n return self._time_axis.start", "def get_time(self):\n start=''\n end=''...
[ "0.679934", "0.6443112", "0.64257336", "0.64229566", "0.6388925", "0.63801783", "0.6370203", "0.6363844", "0.63452727", "0.6335522", "0.63265383", "0.63182783", "0.63152707", "0.6278807", "0.61801505", "0.61801505", "0.61801505", "0.6171835", "0.6104696", "0.6076395", "0.6036...
0.0
-1
A method that reads information from an image
def retrieveImageInfo(self, filename): assert filename, "Filename must be defined" assert os.path.exists(filename), "File that we're retrieving information \ from (%s) needs to exist, but doesn't." % filename self.ext = filename.split(".")[-1].lower() rdr = self.getReaderByExtension(self.ext) if self.ext == "bmp": rdr.Allow8BitBMPOn() rdr.SetFileName(filename) if rdr.IsA("vtkExtTIFFReader"): rdr.UpdateInformation() if rdr.GetNumberOfScalarComponents() == 1: rdr.RawModeOn() data = rdr.GetOutput() data.Update() self.numberOfComponents = data.GetNumberOfScalarComponents() if not self.ctf: bd = self.getDataBitDepth(data) self.ctf = vtk.vtkColorTransferFunction() if bd == 8 or bd == 12: self.ctf.AddRGBPoint(0, 0, 0, 0) self.ctf.AddRGBPoint((2 ** bd) - 1, 0, 1, 0) else: range = data.GetScalarRange() self.ctf.AddRGBPoint(range[0], 0, 0, 0) self.ctf.AddRGBPoint(range[1], 0, 1, 0) self.x, self.y, z = data.GetDimensions() self.dimensions = (self.x, self.y, self.slicesPerTimepoint) if z > 1: self.slicesPerTimepoint = z self.z = z self.dimensions = (self.x, self.y, self.slicesPerTimepoint) lib.messenger.send(self, "update_dimensions") self.originalDimensions = self.dimensions
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def image_info(img):\n\tprint(img.format)\n\tprint(img.size)\n\tprint(img.mode)", "def test_read_image(self):\n pass", "def image_info(path):\n global working_img\n working_img = Image.open(path)\n print('=======================================')\n print(f'이미지 파일 이름:{working_img.filename}')\...
[ "0.72628486", "0.72226435", "0.6992954", "0.6965782", "0.6876386", "0.68329227", "0.6822123", "0.68075496", "0.6758192", "0.6675034", "0.6649149", "0.6627219", "0.6605855", "0.660208", "0.6594365", "0.65857846", "0.6574458", "0.6571832", "0.6551282", "0.65477014", "0.65150887...
0.64804757
25
Return the nth timepoint
def getTimepoint(self, n, onlyDims = 0): if not self.readers: self.getReadersFromFilenames() if self.is3DImage(): if not self.readers: raise Logging.GUIError("Attempt to read bad timepoint", "Timepoint %d is not defined by the given filenames" % n) self.reader = self.readers[0] minZ = n * self.slicesPerTimepoint maxZ = (n+1) * self.slicesPerTimepoint - 1 extract = vtk.vtkExtractVOI() extract.SetInput(self.reader.GetOutput()) extract.SetVOI(0, self.x - 1, 0, self.y - 1, minZ, maxZ) changeInfo = vtk.vtkImageChangeInformation() changeInfo.SetInput(extract.GetOutput()) changeInfo.SetOutputOrigin(0, 0, 0) changeInfo.SetExtentTranslation((0,0,-minZ)) data = changeInfo.GetOutput() else: if n >= len(self.readers): n = 0 raise Logging.GUIError("Attempt to read bad timepoint", "Timepoint %d is not defined by the given filenames" % n) self.reader = self.readers[n] data = self.reader.GetOutput() if not self.voxelsize: size = data.GetSpacing() x, y, z = [size.GetElement(x) for x in range(0, 3)] self.voxelsize = (x, y, z) print "Read voxel size", self.voxelsize if onlyDims: return return data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def nthPersonGetsNthSeat(self, n: int) -> float:\n if n == 1:\n return 1.0\n\n return 1 / 2", "def get_second(time_index):\n return np.array(time_index.second).reshape(-1,1)", "def get_times_slice(self, index, next_index):\n next_index = tf.minimum(next_index, self.Nt)\n ...
[ "0.6113835", "0.6054137", "0.5898435", "0.58786285", "0.58263016", "0.57462263", "0.5740125", "0.5694311", "0.5676445", "0.56450284", "0.5632503", "0.55841124", "0.5574055", "0.55318683", "0.55112934", "0.5497918", "0.54749393", "0.54705596", "0.5468096", "0.5458759", "0.5441...
0.59078765
2
Returns the (x,y,z) dimensions of the datasets this dataunit contains
def getDimensions(self): print "Returning",self.x,self.y,self.slicesPerTimepoint return (self.x, self.y, self.slicesPerTimepoint)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getDimensions():", "def dimensions():", "def get_data_dimensions(self):\n return image_utils.convert_shape_indexing(self._get_data_dimensions_rc(),\"rc\",self.image_indexing)", "def get_dimensions(self):\r\n x = []\r\n y = []\r\n z = []\r\n for i in self.verts:\r\n ...
[ "0.7814758", "0.77764654", "0.77693594", "0.76322365", "0.74653226", "0.7423212", "0.7418739", "0.7339198", "0.7303002", "0.7278066", "0.7271514", "0.72714305", "0.7247757", "0.7213372", "0.7206813", "0.71931577", "0.7168946", "0.7104327", "0.71030354", "0.7093483", "0.708581...
0.7157837
17
Returns the spacing of the datasets this dataunit contains
def getSpacing(self): if not self.spacing: a, b, c = self.getVoxelSize() self.spacing = [1, b / a, c / a] return self.spacing
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def GetSpacing(self):\r\n\r\n return self._spacing", "def spacings(self):\n return np.array([self.pixel_spacing,\n self.pixel_spacing,\n self.slice_spacing])", "def spacing(self):\r\n\r\n return self.dx, self.dy, self.dz", "def margin(self)...
[ "0.69162726", "0.6651873", "0.66186863", "0.6618427", "0.6541836", "0.64272296", "0.6390034", "0.6284166", "0.6187805", "0.61713886", "0.6085297", "0.6026972", "0.6021646", "0.60199386", "0.5960843", "0.5940496", "0.59104943", "0.5868685", "0.573976", "0.5731771", "0.57220376...
0.7266339
0
Returns the voxel size of the datasets this dataunit contains
def getVoxelSize(self): return self.voxelsize
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def voxel_size(self):\n return self.calculation.voxel_size", "def dataset_size(self):\n return self.dataset.size", "def GetVoxelSize(vDataSet):\r\n nx = vDataSet.GetSizeX()\r\n ny = vDataSet.GetSizeY()\r\n nz = vDataSet.GetSizeZ()\r\n\r\n if nx > 0: nx = abs(vDataSet.GetExtendMaxX()-v...
[ "0.8266003", "0.76687074", "0.7574417", "0.75476074", "0.7534628", "0.7282379", "0.72617334", "0.7169072", "0.7134597", "0.7131639", "0.71055424", "0.7064898", "0.70379966", "0.7025604", "0.7004166", "0.6994787", "0.69689655", "0.69596523", "0.6953735", "0.69417727", "0.69085...
0.7754449
1
set the voxel sizes of the images that are read
def setVoxelSize(self, vxs): self.voxelsize = vxs a, b, c = vxs self.spacing = [1, b / a, c / a]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _assign_sizes(self):", "def setinputsizes(self, sizes):\n pass", "def setImageDimensions(*args):", "def update_size(self):\n self.size = self.image.size\n self.width, self.height = self.size", "def set_size(self, size=None):\n if not size:\n size = self.output_siz...
[ "0.66988456", "0.6689091", "0.6536548", "0.61790437", "0.6176843", "0.61598647", "0.6149251", "0.6120889", "0.6050634", "0.6040322", "0.60017174", "0.59967524", "0.59520566", "0.5908052", "0.58848417", "0.5881362", "0.5838115", "0.5830686", "0.5829773", "0.57882386", "0.57879...
0.7176527
0
Loads the specified .oif file and imports data from it.
def loadFromFile(self, filename): return []
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_data(self) -> None:", "def _load(self):\n op_type_file_path = os.path.join(\n self._profiling_dir,\n self._csv_file_to_analyse.format(self._device_id)\n )\n op_type_file_path = validate_and_normalize_path(\n op_type_file_path, raise_key=\"Invalid op_...
[ "0.6358744", "0.62189686", "0.6196685", "0.6185245", "0.6043738", "0.5999552", "0.59746337", "0.5964088", "0.5944215", "0.5927775", "0.5897965", "0.58831894", "0.5869329", "0.5848849", "0.58181727", "0.5810083", "0.5804512", "0.577736", "0.576331", "0.57621175", "0.5762104", ...
0.0
-1
Returns the name of the dataset series which this datasource operates on
def getName(self): return self.name
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dataset_name(self):\n return self.dataset.name", "def get_dataset_name(self):\n raise NotImplementedError", "def get_dataset_name(self):\n return self.dataset_name", "def dataset_name(self):\n return self._dataset_name", "def series_names(self):\r\n return self.names"...
[ "0.76386154", "0.7613764", "0.7546239", "0.75300765", "0.7156447", "0.6760011", "0.6729094", "0.6698387", "0.66739553", "0.6590775", "0.6491004", "0.64055467", "0.6366336", "0.63660336", "0.63432425", "0.63402456", "0.63139105", "0.6290302", "0.6242632", "0.62166566", "0.6213...
0.0
-1
Returns the ctf of the dataset series which this datasource operates on
def getColorTransferFunction(self): return self.ctf
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getCDF(self):\n return self.cdfSample", "def CFL(self):\n return self.__CFL", "def get_cffts(self):\n return [\n rfft(self.nx, self.dx, fft=self.tfft, ny=self.ny,\n dy=self.dy).get_cfft(),\n rfft(self.nx, self.dx, fft=self.efft, ny=self.ny,\n ...
[ "0.6114998", "0.5954782", "0.5948087", "0.5920047", "0.59179395", "0.58852816", "0.5781732", "0.57313186", "0.57007056", "0.56663144", "0.5602908", "0.5576217", "0.55340517", "0.5467268", "0.5431777", "0.539905", "0.539905", "0.5388791", "0.5385939", "0.53720737", "0.53636706...
0.5846474
6
Returns number of images in this data source.
def getNumberOfImages(self): return self.numberOfImages
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_image_count(self):\n return self._num_images", "def get_num_of_images(self):", "def num_of_images(self):\n return len(self.data['image_infos'])", "def numberOfImages(self):\n return len(self.imageList)", "def __len__(self):\n return self.num_images", "def __len__(self)...
[ "0.84259796", "0.83975625", "0.83702207", "0.80581206", "0.77916324", "0.77916324", "0.77916324", "0.77916324", "0.77916324", "0.77843183", "0.7672272", "0.7612054", "0.74140126", "0.7412375", "0.72965276", "0.7274649", "0.7274649", "0.72326416", "0.72228235", "0.7218599", "0...
0.85196555
0