query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
True if target and current object are equal and have the same parent. Equal means same mesh, same shape and same domain.
def is_consistent_with(self, target): same_parent = self.parent() == target.parent() # Note FP. Is it really required to have the # same parent? Inclusion of all proc may be enough? return npw.equal(self.shape, target.shape).all() and same_parent
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __eq__(self, other):\n if self.__class__ != other.__class__:\n return False\n return self.mesh == other.mesh and \\\n npw.equal(self.shape, other.shape).all() and \\\n self.domain == other.domain", "def __eq__(self, other):\n parent_same = self.parent1.ri...
[ "0.70844996", "0.66548616", "0.66513604", "0.6646044", "0.64481336", "0.6433953", "0.63558435", "0.63483995", "0.6299958", "0.6295003", "0.62756413", "0.6267289", "0.6267289", "0.6267289", "0.6267289", "0.6267289", "0.626677", "0.62649465", "0.62575334", "0.6204619", "0.62005...
0.7603044
0
True if current topo is complient with target.
def can_communicate_with(self, target): if self == target: return True msg = 'You try to connect topologies belonging to' msg += ' two different mpi tasks. Set taskids properly or use' msg += ' InterBridge.' assert self.task_id() == target.task_id(), msg # Parent communicator # Todo : define some proper conditions for compatibility # between topo_from, topo_to and parent: # - same size # - same domain # - common processus ... # At the time we check that both topo have # the same comm_origin. return self.is_consistent_with(target)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def independent(self) -> bool:\n parent = self._parent()\n if parent is None:\n return True\n connections = parent._graph.connections\n path = self._path\n lp = len(path)\n for con in connections:\n if con[\"type\"] == \"connection\":\n ...
[ "0.63338524", "0.61239785", "0.6074758", "0.5993989", "0.59502983", "0.5931595", "0.59294623", "0.59263664", "0.5918986", "0.58928543", "0.5868036", "0.5846863", "0.58454454", "0.5810941", "0.5808112", "0.58062047", "0.57958764", "0.5795005", "0.5779186", "0.5778966", "0.5777...
0.735502
0
Collect global indices of local meshes on each process of topo
def gather_global_indices(topo, toslice=True, root=None, comm=None): if comm is None: comm = topo.parent() size = comm.size start = topo.mesh.start() end = topo.mesh.stop() - 1 # communicator that owns the topology rank = comm.Get_rank() dimension = topo.domain.dimension iglob = npw.int_zeros((dimension * 2, size)) iglob_res = npw.int_zeros((dimension * 2, size)) iglob[0::2, rank] = start iglob[1::2, rank] = end # iglob is saved as a numpy array and then transform into # a dict of slices since mpi send operations are much # more efficient with numpy arrays. if root is None: comm.Allgather([iglob[:, rank], MPI.INT], [iglob_res, MPI.INT]) else: comm.Gather([iglob[:, rank], MPI.INT], [iglob_res, MPI.INT], root=root) if toslice: return utils.arrayToDict(iglob_res) else: return iglob_res
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_local_indices(self, part, ctx):\n return self.map_to_global(\n F.arange(0, self.local_size(part), ctx=ctx), part\n )", "def _exchange_ghosts_local(self):\n for d in xrange(self._dim):\n self._exchange_ghosts_local_d(d)", "def getGlobalIndices( self, indices: l...
[ "0.6674267", "0.607246", "0.59368324", "0.58657676", "0.5651199", "0.56479245", "0.56453234", "0.5612734", "0.5533967", "0.5532873", "0.5474459", "0.5465615", "0.5442727", "0.5435518", "0.54162186", "0.5397795", "0.535609", "0.5344314", "0.534363", "0.5338216", "0.5314248", ...
0.70098007
0
This functions does the same thing as gather_global_indices but may also work when topo is None. The function is usefull if you need to collect global indices on a topo define only on a subset of comm, when for the procs not in this subset, topo will be equal to None. In such a case, comm and dom are required. This may happen when you want to build a bridge between two topologies that do not handle the same number of processes but with an overlap between the two groups of processes of the topologies. In that case, a call to gather_global_indices(topo, comm, dom) will work on all processes belonging to comm, topo being None or not. The values corresponding to ranks not in topo will be empty slices.
def gather_global_indices_overlap(topo=None, comm=None, dom=None, toslice=True, root=None): if topo is None: assert comm is not None and dom is not None size = comm.Get_size() rank = comm.Get_rank() dimension = dom.dimension iglob = npw.int_zeros((dimension * 2, size)) iglob_res = npw.int_zeros((dimension * 2, size)) iglob[1::2, rank] = -1 if root is None: comm.Allgather([iglob[:, rank], MPI.INT], [iglob_res, MPI.INT]) else: comm.Gather([iglob[:, rank], MPI.INT], [iglob_res, MPI.INT], root=root) if toslice: return utils.arrayToDict(iglob_res) else: return iglob_res else: return TopoTools.gather_global_indices(topo, toslice, root, comm)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def gather_global_indices(topo, toslice=True, root=None, comm=None):\n if comm is None:\n comm = topo.parent()\n size = comm.size\n start = topo.mesh.start()\n end = topo.mesh.stop() - 1\n # communicator that owns the topology\n rank = comm.Get_rank()\n d...
[ "0.78130454", "0.55268973", "0.5226445", "0.51116353", "0.50954676", "0.48668343", "0.4836725", "0.47469142", "0.4703786", "0.4642443", "0.45836034", "0.45445976", "0.45384085", "0.45181572", "0.44909367", "0.4485813", "0.4471921", "0.44507834", "0.4396532", "0.4384738", "0.4...
0.7679277
1
Return true if all mpi processes of child belong to parent
def is_parent(child, parent): # Get the list of processes assert child is not None assert parent is not None #child_ranks = [i for i in xrange(child.Get_size())] child_group = child.Get_group() parent_group = parent.Get_group() inter_group = MPI.Group.Intersect(child_group, parent_group) return child_group.Get_size() == inter_group.Get_size()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def has_mpi_peer_processes():\n return mpi4py_available and MPI.COMM_WORLD.Get_size() > 1", "def contains_parent(self, pid):\n return pid in self._parent_ids", "def check_parent_processes_alive():\n cur_process = psutil.Process()\n parent = cur_process.parent()\n while True:\n ...
[ "0.6905076", "0.6545807", "0.6406559", "0.63527286", "0.6351458", "0.6243226", "0.62221795", "0.6163074", "0.6159543", "0.615525", "0.6027681", "0.5970372", "0.5956478", "0.5952117", "0.5878544", "0.58695066", "0.5852495", "0.579879", "0.57776415", "0.5767349", "0.57645994", ...
0.82558614
0
Number of processess common to comm_1 and comm_2
def intersection_size(comm_1, comm_2): if comm_1 == MPI.COMM_NULL or comm_2 == MPI.COMM_NULL: return None group_1 = comm_1.Get_group() group_2 = comm_2.Get_group() inter_group = MPI.Group.Intersect(group_1, group_2) return inter_group.Get_size()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def common_count(self, node_1, node_2):\n return int(len(set(nx.neighbors(self.graph, node_1)).intersection(set(nx.neighbors(self.graph, node_2)))))", "def communities_with_protesters(partition, active_nodes):\n return len(set([partition[node] for node in active_nodes]))", "def _num_of_consolidated(s...
[ "0.71106726", "0.64436406", "0.6228776", "0.6228776", "0.62096745", "0.6076676", "0.60679585", "0.59844345", "0.59690386", "0.5953923", "0.5925899", "0.5922579", "0.59189546", "0.5907016", "0.58420223", "0.5753761", "0.57528996", "0.5745631", "0.574225", "0.5684263", "0.56555...
0.71126
0
Compare two mpi communicators. Returns true if the two communicators are handles for the same group of proc and for the same communication context.
def compare_comm(comm_1, comm_2): assert comm_1 != MPI.COMM_NULL assert comm_2 != MPI.COMM_NULL result = MPI.Comm.Compare(comm_1, comm_2) res = [MPI.IDENT, MPI.CONGRUENT, MPI.SIMILAR, MPI.UNEQUAL] return result == res[0]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compare_groups(comm_1, comm_2):\n assert comm_1 != MPI.COMM_NULL\n assert comm_2 != MPI.COMM_NULL\n result = MPI.Comm.Compare(comm_1, comm_2)\n res = [MPI.IDENT, MPI.CONGRUENT, MPI.SIMILAR, MPI.UNEQUAL]\n return result in res[:-1]", "def has_mpi_peer_processes():\n retur...
[ "0.74933475", "0.61300033", "0.6039855", "0.59951395", "0.5934048", "0.58377117", "0.5620288", "0.55401844", "0.5411162", "0.5404919", "0.53369236", "0.5305763", "0.53046554", "0.52937305", "0.52503824", "0.52165604", "0.52162445", "0.51967853", "0.51824045", "0.5172295", "0....
0.76038
0
Compare the groups of two mpi communicators. Returns true if each comm handles the same group of mpi processes.
def compare_groups(comm_1, comm_2): assert comm_1 != MPI.COMM_NULL assert comm_2 != MPI.COMM_NULL result = MPI.Comm.Compare(comm_1, comm_2) res = [MPI.IDENT, MPI.CONGRUENT, MPI.SIMILAR, MPI.UNEQUAL] return result in res[:-1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compare_comm(comm_1, comm_2):\n assert comm_1 != MPI.COMM_NULL\n assert comm_2 != MPI.COMM_NULL\n result = MPI.Comm.Compare(comm_1, comm_2)\n res = [MPI.IDENT, MPI.CONGRUENT, MPI.SIMILAR, MPI.UNEQUAL]\n return result == res[0]", "def same_group(self,i,j):\n if self.g...
[ "0.7372509", "0.61725867", "0.60191995", "0.597187", "0.5756517", "0.56709945", "0.5612317", "0.5581606", "0.5556505", "0.5444479", "0.5426828", "0.541855", "0.5415436", "0.54028773", "0.53394014", "0.52934015", "0.52109265", "0.519534", "0.5183995", "0.51758873", "0.506649",...
0.8524684
0
Find the values of ranks in target from ranks in source.
def convert_ranks(source, target): assert source != MPI.COMM_NULL and target != MPI.COMM_NULL g_source = source.Get_group() g_target = target.Get_group() size_source = g_source.Get_size() r_source = [i for i in xrange(size_source)] res = MPI.Group.Translate_ranks(g_source, r_source, g_target) return {r_source[i]: res[i] for i in xrange(size_source)}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def findRanks(toBeRanked, values):\n\treturn list(map(lambda e: findRank(e, values), toBeRanked))", "def rank_results(result_index, source2target):\n result2rank = defaultdict(lambda: [])\n for term, targets in result_index.items():\n ranked = sorted(targets, key=lambda tup: tup[1], reverse=True)\n ...
[ "0.6195965", "0.61417454", "0.57947445", "0.554299", "0.5502083", "0.53975844", "0.533245", "0.528168", "0.5239981", "0.5233992", "0.5226889", "0.5212047", "0.5168843", "0.5151811", "0.5149276", "0.5147061", "0.5105847", "0.5105718", "0.50940347", "0.5074896", "0.505985", "...
0.64136374
0
Create a MPI subarray mask to be used in send/recv operations between some topologies.
def create_subarray(sl_dict, data_shape): from hysop.constants import HYSOP_MPI_REAL, ORDERMPI subtypes = {} dim = len(data_shape) for rk in sl_dict.keys(): subvshape = tuple((sl_dict[rk][i].stop - sl_dict[rk][i].start for i in xrange(dim))) substart = tuple((sl_dict[rk][i].start for i in xrange(dim))) subtypes[rk] = \ HYSOP_MPI_REAL.Create_subarray(data_shape, subvshape, substart, order=ORDERMPI) subtypes[rk].Commit() return subtypes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def subgraph_mask(self, size):\n init_matrix = np.random.randn(size,size)\n Tcs = csgraph.minimum_spanning_tree(init_matrix)\n mask_matrix = Tcs.toarray()\n return mask_matrix", "def generate_mask(self):\n\n polymer_length = len(self.sequence)\n protein_length = len(self...
[ "0.6550779", "0.6427817", "0.6351284", "0.6048119", "0.5883029", "0.5829637", "0.58208704", "0.58193535", "0.5818369", "0.58154434", "0.57871544", "0.5785224", "0.57692295", "0.5747028", "0.5712273", "0.5712273", "0.56396306", "0.5627762", "0.5626429", "0.56193084", "0.556814...
0.0
-1
Return rules for checking.
def rules(cls): rules_CityscapesConfig = {"batch_size": {"type": int}, "root_path": {"type": str}, "num_parallel_batches": {"type": int}, "fixed_size": {"type": bool} } return rules_CityscapesConfig
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_rules(cls):\n raise NotImplementedError()", "def get_rules(self):\n rules = []\n for item in self.name:\n rules.append(item)\n return rules", "def get_rules(self):\n rules = []\n for item in self.rule:\n rules.append(item)\n return ...
[ "0.76105815", "0.7469598", "0.73660934", "0.7312732", "0.721728", "0.71480995", "0.70150936", "0.68892413", "0.68892413", "0.67432487", "0.66999936", "0.6694647", "0.6694647", "0.669131", "0.6645618", "0.6565479", "0.65333295", "0.6530434", "0.65208477", "0.651483", "0.650718...
0.59890103
57
Return rules for checking.
def rules(cls): rules_CityscapesTrainConfig = {"batch_size": {"type": int}, "list_path": {"type": str} } return rules_CityscapesTrainConfig
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_rules(cls):\n raise NotImplementedError()", "def get_rules(self):\n rules = []\n for item in self.name:\n rules.append(item)\n return rules", "def get_rules(self):\n rules = []\n for item in self.rule:\n rules.append(item)\n return ...
[ "0.76105815", "0.7469598", "0.73660934", "0.7312732", "0.721728", "0.71480995", "0.70150936", "0.68892413", "0.68892413", "0.67432487", "0.66999936", "0.6694647", "0.6694647", "0.669131", "0.6645618", "0.6565479", "0.65333295", "0.6530434", "0.65208477", "0.651483", "0.650718...
0.59536374
61
Return rules for checking.
def rules(cls): rules_CityscapesValConfig = {"batch_size": {"type": int}, "list_path": {"type": str} } return rules_CityscapesValConfig
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_rules(cls):\n raise NotImplementedError()", "def get_rules(self):\n rules = []\n for item in self.name:\n rules.append(item)\n return rules", "def get_rules(self):\n rules = []\n for item in self.rule:\n rules.append(item)\n return ...
[ "0.76105815", "0.7469598", "0.73660934", "0.7312732", "0.721728", "0.71480995", "0.70150936", "0.68892413", "0.68892413", "0.67432487", "0.66999936", "0.6694647", "0.6694647", "0.669131", "0.6645618", "0.6565479", "0.65333295", "0.6530434", "0.65208477", "0.651483", "0.650718...
0.60942304
45
Return rules for checking.
def rules(cls): rules_CityscapesTestConfig = {"batch_size": {"type": int}, "list_path": {"type": str} } return rules_CityscapesTestConfig
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_rules(cls):\n raise NotImplementedError()", "def get_rules(self):\n rules = []\n for item in self.name:\n rules.append(item)\n return rules", "def get_rules(self):\n rules = []\n for item in self.rule:\n rules.append(item)\n return ...
[ "0.76105815", "0.7469598", "0.73660934", "0.7312732", "0.721728", "0.71480995", "0.70150936", "0.68892413", "0.68892413", "0.67432487", "0.66999936", "0.6694647", "0.6694647", "0.669131", "0.6645618", "0.6565479", "0.65333295", "0.6530434", "0.65208477", "0.651483", "0.650718...
0.6011419
54
Return rules for checking.
def rules(cls): rules_Cityscapes = {"common": {"type": dict}, "train": {"type": dict}, "val": {"type": dict}, "test": {"type": dict} } return rules_Cityscapes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_rules(cls):\n raise NotImplementedError()", "def get_rules(self):\n rules = []\n for item in self.name:\n rules.append(item)\n return rules", "def get_rules(self):\n rules = []\n for item in self.rule:\n rules.append(item)\n return ...
[ "0.76105815", "0.7469598", "0.73660934", "0.7312732", "0.721728", "0.71480995", "0.70150936", "0.68892413", "0.68892413", "0.66999936", "0.6694647", "0.6694647", "0.669131", "0.6645618", "0.6565479", "0.65333295", "0.6530434", "0.65208477", "0.651483", "0.65071875", "0.64957"...
0.67432487
9
Builds the architecture of the network
def _model_definition(self, net): # Input filtering and downsampling with max pooling print(net.shape) #channels must be specified first otherwise keras assumes channels last print('resnet17_scp') net = Conv2D( filters=128, kernel_size=5, activation=None, padding='same', data_format="channels_first", input_shape=(1, 100, 100))(net) net = BatchNormalization(axis=1)(net) #axis is set to the dimension which hold the colour channels net = LeakyReLU()(net) net= MaxPooling2D(pool_size=(2,2))(net) net = Conv2D( filters=64, kernel_size=3, activation=None, padding='same', data_format="channels_first")(net) net = BatchNormalization(axis=1)(net) #axis is set to the dimension which hold the colour channels net = LeakyReLU()(net) net= MaxPooling2D(pool_size=(2,2))(net) net = Conv2D( filters=64, kernel_size=3,activation=None, padding='same', data_format="channels_first")(net) net = BatchNormalization(axis=1)(net) #axis is set to the dimension which hold the colour channels net = LeakyReLU()(net) net= MaxPooling2D(pool_size=(2,2))(net) return net
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _build_network(self):\n pass", "def build_network(self, inputs, targets, training=False):\n raise NotImplementedError", "def _build_networks(self):\n self.online_convnet = self._create_network(name='Online')\n self.target_convnet = self._create_network(name='Target')\n self._net_outp...
[ "0.70051813", "0.66093147", "0.6482595", "0.6386774", "0.62803894", "0.62662494", "0.623352", "0.6173588", "0.6158841", "0.61208886", "0.60760844", "0.6056186", "0.60528004", "0.5953922", "0.59380317", "0.59286124", "0.59246135", "0.5907775", "0.58998877", "0.58750767", "0.58...
0.0
-1
Calculate cumulative probability from a list of probabilities
def cum_sum_prob(prob_dict): if not math.isclose(sum(prob_dict.values()), 1, rel_tol=1e-3): ValueError('Input probabilities do not sum to 1.') out = [] cur_sum = 0 for k, v in prob_dict.items(): cur_sum += v out.append((k, cur_sum)) return out
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cumulative_probability_distribution(self):\n return list(accumulate(self.probability_distribution()))", "def cumprob(self):\r\n return self.probabilities.cumsum(-1)", "def custom_pdf(self, cum_probs, values):\n rnd_num = random()\n for p in range(len(cum_probs)):\n if...
[ "0.7402393", "0.6922577", "0.6584128", "0.6549101", "0.65145403", "0.63271004", "0.62952936", "0.6243871", "0.6198528", "0.6177253", "0.61713296", "0.6147572", "0.6130507", "0.61138874", "0.610889", "0.60820377", "0.60630465", "0.6000701", "0.59468305", "0.5934774", "0.592816...
0.68047005
2
Select an item random with given discrete pdf
def select_item_with_prob(items_prob, n_inst): items = [] for i in range(n_inst): pick_prob = np.random.uniform() values, probs = zip(*cum_sum_prob(items_prob)) idx = bisect_left(probs, pick_prob) items.append(values[idx]) return items
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def randomly_drawn_via_pdf_gen_from(self, total_samples: int):\r\n\r\n def func_gen(fit: af.Fit, total_samples: int) -> List[object]:\r\n samples = fit.value(name=\"samples\")\r\n\r\n return [\r\n self.object_via_gen_from(\r\n fit=fit,\r\n ...
[ "0.6026738", "0.597825", "0.5953137", "0.5908189", "0.58714074", "0.5860779", "0.58515704", "0.58300185", "0.57529527", "0.57335967", "0.5725942", "0.5704148", "0.5658167", "0.565592", "0.5649849", "0.56484205", "0.5635234", "0.5629565", "0.56148976", "0.561193", "0.56036764"...
0.58877283
4
Normalize the data using zscore
def normalize(ref_df_col, df_col): col_mean = ref_df_col.mean() col_std = ref_df_col.std() ref_df_norm_col = (ref_df_col - col_mean) / col_std df_norm_col = (df_col - col_mean) / col_std return ref_df_norm_col, df_norm_col
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def normalize(X):\n # z-score\n mean = np.mean(X, axis=(0, 1, 2, 3))\n std = np.std(X, axis=(0, 1, 2, 3))\n # avoid dividing zero by adding a very small number\n X = (X - mean) / (std + 1e-7)\n\n return X", "def z_score_norm(data: np.ndarray) -> np.ndarray:\n mean = np.mean(data)\n std = ...
[ "0.78604925", "0.7845451", "0.7444941", "0.72904104", "0.70869136", "0.70312285", "0.69701195", "0.6964201", "0.69579893", "0.688568", "0.6840598", "0.67974013", "0.6757654", "0.6757654", "0.67462033", "0.6714735", "0.6691225", "0.66248816", "0.6613352", "0.65668243", "0.6546...
0.0
-1
Find the difference in unique counts of two distributions and return as percentage
def compute_unique_count_drift(df_prob, ref_df_prob): df_diff = set(df_prob.keys()) - set(ref_df_prob.keys()) ref_df_diff = set(ref_df_prob.keys()) - set(df_prob.keys()) return sum([df_prob[k] for k in df_diff] + [ref_df_prob[k] for k in ref_df_diff])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _distr_stat(col1, col2, f):\n bin_threshold = 10\n vcs1, col1_len = col1.value_counts().to_dict(), float(len(col1))\n vcs1[\"_Empty_\"] = sum(col1.map(lambda x: is_null_flag(x)))\n vcs2, col2_len = col2.value_counts().to_dict(), float(len(col2))\n vcs2[\"_Empty_\"] = sum(col2.map(lambda x: is_nu...
[ "0.6724862", "0.6445435", "0.6334011", "0.6307524", "0.62770367", "0.6188019", "0.61835897", "0.6182407", "0.6136989", "0.60972196", "0.60946876", "0.6093589", "0.60753244", "0.6063225", "0.60029024", "0.5996274", "0.5993236", "0.5984397", "0.5978963", "0.59674275", "0.596382...
0.62579167
5
Compute drift score as the percentage of overlapping probabilities
def compute_drift_score(ref_col_prob, col_prob): return sum(abs(np.asarray(ref_col_prob) - np.array(col_prob)) * 100)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def calculate(self):\n\n gt = self.ground_truth.flatten().astype(np.int8)\n seg = self.segmentation.flatten().astype(np.int8)\n\n probability_difference = np.absolute(gt - seg).sum()\n probability_joint = (gt * seg).sum()\n\n if probability_joint != 0:\n return probabi...
[ "0.6732613", "0.6518283", "0.6426524", "0.6424136", "0.6334499", "0.6254541", "0.6188233", "0.61744064", "0.6095827", "0.6077343", "0.6045876", "0.5990594", "0.5986692", "0.59864265", "0.59614843", "0.5960128", "0.5936167", "0.5935803", "0.59078395", "0.590509", "0.58760846",...
0.7712985
0
Combine training and inference datasets as one data frame
def combine_train_infer(train_file, infer_dir): train_df = pd.read_feather(train_file) time_range = range(len([f for f in os.listdir(infer_dir) if 'feather' in f])) infer_df_list = [pd.read_feather(f'{infer_dir}/{t}.feather') for t in time_range] comb_df_list = [] train_df.index = [-1] * len(train_df) comb_df_list.append(train_df) for t in time_range: df = infer_df_list[t] df.index = [t] * len(df) comb_df_list.append(df) return pd.concat(comb_df_list), train_df, infer_df_list
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_training_data():\n \n X = pd.read_csv('../data/train_values.csv').set_index('sequence_id')\n y = pd.read_csv('../data/train_labels.csv').set_index('sequence_id')\n return X, y", "def triples(self):\n return pd.concat((self._load_train(), self._load_valid(), self._load_test()))", "def...
[ "0.6369666", "0.63546294", "0.63196236", "0.62988764", "0.6283459", "0.62768555", "0.6249582", "0.6194825", "0.6187834", "0.61582404", "0.6153339", "0.6152186", "0.6096578", "0.6096578", "0.6080581", "0.60757023", "0.6061717", "0.6044782", "0.60430914", "0.6011937", "0.597610...
0.66650474
0
Call the shell script that handles BLAST database formatting.
def format_blast(makeblastdb_path, fname): # The script is written in shell, so this function just calls it and # checks the output # Build the shell command cmd = ['bash', DBFORMAT_SCRIPT, makeblastdb_path, fname] # Execute the script # shell=False to ensure that we aren't executing commands from untrusted # sources p = subprocess.Popen( cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() return (out, err)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def command_dbtool(self):\n dbtool.main(*self.args())", "def makeblastdb(files, db_name, db_type):\n with open(db_name + \".pin\", \"w\") as f:\n f.write(\"\\n\".join(db_name))\n return subprocess.run([\"makeblastdb\", \"-in\", db_name + \".pin\", \"-dbtype\", db_type)", "def blastn_command...
[ "0.615836", "0.6075764", "0.59140235", "0.58454984", "0.58075655", "0.5771629", "0.5752413", "0.57355833", "0.5695151", "0.5693321", "0.5688143", "0.5677002", "0.5629271", "0.56139135", "0.56128824", "0.55926937", "0.55580354", "0.5531508", "0.55243224", "0.55176175", "0.5516...
0.7314453
0
Which listofvalues does every element of series match first?
def which_lov(series: pd.Series, patterns: Sequence[Sequence[Any]], method: Optional[Union[Callable, str]] = None, **kwargs) -> np.ndarray: elov = [(i + 1, v) for i, lov in enumerate(patterns) for v in lov] if not elov: return np.zeros(series.size, int) num, value = zip(*elov) lov_idx_plus = np.concatenate(([0], num)) if method is None: mm = series.to_numpy() == np.array(value)[:, np.newaxis] elif not callable(method): # assume name of pd.Series.str method ptns = pd.Series(value) kwargs['na'] = False do_match = getattr(series.str, method) mm = ptns.apply(do_match, **kwargs).values else: mm = method(series, value, **kwargs) return lov_idx_plus[mm.any(axis=0) + mm.argmax(axis=0)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def verify_value_occurence_in_series(value, series):\n \n series_values_occurence = series.value_counts()\n if value in series_values_occurence:\n return series_values_occurence[value]", "def map_values_to_value_list(value_list, values):\n return [value_list.index(x) for x in values]", ...
[ "0.6192266", "0.5891128", "0.5607933", "0.5490202", "0.53656596", "0.5315335", "0.53010494", "0.5252702", "0.52464557", "0.5246342", "0.522789", "0.51933503", "0.51531285", "0.51512164", "0.513468", "0.5120946", "0.5081794", "0.5081263", "0.50539905", "0.50522214", "0.5044985...
0.48023164
44
Returns tag of the first matched ListofValues. For each element in ``series`` returned is the tag of the listofvalues in the dictionary of LoVs ``taglov`` which first matches the element with one of its values OR value from donor with the same index OR ``na``.
def which_tag(series: pd.Series, taglov: Union[TagLoV, Any], na: Any, donor: pd.Series = None, method: Optional[Union[Callable, str]] = None, **kwargs): if series.empty: return series if not isinstance(taglov, TagLoV): taglov = TagLoV(taglov) lov_idx_plus = which_lov(series, taglov.lovs, method, **kwargs) tags_plus = np.array((na, *taglov.tags)) result = pd.Series(tags_plus[lov_idx_plus], index=series.index) if isinstance(donor, pd.Series): # take unmatched values from donor unmatched_idx = series.index[~lov_idx_plus.astype(bool)] if not unmatched_idx.empty: take_idx = unmatched_idx.intersection(donor.index) if not take_idx.empty: result[take_idx] = donor[take_idx] return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def tag_one(self, tokens, index, history):\n tag = None\n for tagger in self._taggers:\n tag = tagger.choose_tag(tokens, index, history)\n if tag is not None:\n break\n return tag", "def find_usefull_tags(tags, tagmodel, tag_count_vect):\n\n final_tags...
[ "0.61236465", "0.5222297", "0.52140766", "0.49421668", "0.49218923", "0.49208298", "0.47892955", "0.47874418", "0.47012714", "0.46522093", "0.4625487", "0.46219954", "0.45822042", "0.45629826", "0.45510745", "0.45039132", "0.44938043", "0.44818074", "0.44803494", "0.44793394", ...
0.7129057
0
prepro 200x235x3 uint8 frame into 8300 (83x100) 1D float vector
def prepro(I): # """ prepro 200x235x3 uint8 frame into 10000 (100x100) 1D float vector """ I = I[35:200] # crop - remove 35px from start & 35px from end of image in x, to reduce redundant parts of image (i.e. after ball passes paddle) I = I[::2,::2,0] # downsample by factor of 2 I[I == 43] = 0 # erase background (background type 1) I[I != 0] = 1 # everything else (paddles, ball) just set to 1 return I.astype(np.float).ravel()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def preprocess(self, frame: np.ndarray) -> torch.TensorType:\n tensor = cv.resize(frame, (self.IMGSZ, self.IMGSZ)) \n tensor = tensor.transpose(2, 0, 1)\n tensor = torch.from_numpy(tensor)\n tensor = torch.unsqueeze(tensor, 0)\n tensor = tensor.half() if self.half else tensor.f...
[ "0.6424506", "0.6271997", "0.6116359", "0.6106592", "0.6086693", "0.6025813", "0.6025813", "0.59201217", "0.586013", "0.5815289", "0.5804463", "0.564739", "0.5637826", "0.56362927", "0.56270623", "0.5616849", "0.5541839", "0.5488203", "0.54803914", "0.54658365", "0.54447323",...
0.6721163
0
take 1D float array of rewards and compute discounted reward
def discount_rewards(self, r, gamma): discounted_r = np.zeros_like(r) running_add = 0 for t in reversed(range(0, r.size)): if r[t] != 0: running_add = 0 # reset the sum, since this was a game boundary (pong specific!) running_add = running_add * gamma + r[t] discounted_r[t] = running_add return np.array(discounted_r, dtype=np.float64)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def discount_rewards(rewards):\r\n discounted_r = np.zeros_like(rewards)\r\n running_add = 0\r\n for t in reversed(range(0, len(rewards))): \r\n running_add = running_add * reward_discount + rewards[t]\r\n discounted_r[t] = running_add\r\n return discounted_r", "def discoun...
[ "0.8100923", "0.78397125", "0.7792625", "0.7742053", "0.7653779", "0.75987715", "0.7583268", "0.75820553", "0.7579539", "0.7536054", "0.7519005", "0.75047183", "0.7467692", "0.7456702", "0.7433794", "0.7426565", "0.74062765", "0.73950297", "0.7370545", "0.73564714", "0.732851...
0.69414437
29
backward pass. (eph is array of intermediate hidden states)
def policy_backward(self, eph, epx, epdlogp, model_type): db2 = sum(epdlogp)[0] dW2 = np.dot(eph.T, epdlogp).ravel() dh = np.outer(epdlogp, self.model['W2_' + model_type]) dh[eph <= 0] = 0 # backpro prelu db1 = sum(dh) dW1 = np.dot(dh.T, epx) return {'W1_' + model_type: dW1, 'W2_' + model_type: dW2, 'b1_' + model_type: db1, 'b2_' + model_type: db2}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def backward(self, dout):\n \n ########################\n # PUT YOUR CODE HERE #\n #######################\n for l in range(len(self.layers)-1,-1,-1):\n act_dout = self.activations[l].backward(dout)\n dout = self.layers[l].backward(act_dout)\n ########################\n # END OF YOU...
[ "0.6939449", "0.66526806", "0.658141", "0.65265906", "0.65174395", "0.6487711", "0.64826727", "0.6444466", "0.643047", "0.64284015", "0.64284015", "0.6420275", "0.6388756", "0.6362412", "0.63552135", "0.6349587", "0.630616", "0.6306138", "0.62945306", "0.6274134", "0.62633884...
0.0
-1
Main function to process user input and then generate the description files for each run
def run_main(): parser = argparse.ArgumentParser(description="Scan a run directory and create files to ") parser.add_argument('--run-directory', dest='run_directory', action='store', default='', help='path to directory with xed files to process') args = parser.parse_args(sys.argv[1:]) if not os.path.isdir(args.run_directory): sys.stderr.write("{0} is not a directory, exiting\n".format(args.run_directory)) return 1 run_name = os.path.abspath(args.run_directory) if os.path.basename(run_name): run_name = os.path.basename(run_name) else: run_name = os.path.split(run_name)[0].split('/')[-1] if not os.path.exists('info'): os.mkdir('info') for directory in os.listdir(args.run_directory): if not os.path.isdir(os.path.join(args.run_directory, directory)): continue csv_filename = "info/{0}_{1}_files.csv".format(run_name, directory) entries = glob.glob(os.path.join(args.run_directory, directory, '*.xed')) if len(entries) == 0: continue with open(csv_filename, 'w') as file_obj: csv_writer = csv.writer(file_obj) csv_writer.writerow(['Run', 'Data Set', 'File']) for entry in entries: uri = "srm://ceph-se.osgconnect.net:8443/srm/v2/" + \ "server?SFN=/cephfs/srm/xenon/" + \ entry.replace('/xenon/', '') csv_writer.writerow([run_name, directory, uri])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main():\n args = parse_args()\n if check_args(args):\n read_descriptions(args)\n generate_deletes(args)", "def main():\r\n# Checking if argument was provided\r\n if len(sys.argv) <=1:\r\n print_usage()\r\n sys.exit(1)\r\n \r\n for arg in sys.argv:\r\n # C...
[ "0.714176", "0.6949852", "0.68220896", "0.67618006", "0.66505563", "0.6622678", "0.6581008", "0.6578231", "0.65768015", "0.65547335", "0.6551714", "0.6534173", "0.6517532", "0.6512704", "0.6500041", "0.64553374", "0.6445541", "0.644022", "0.6437129", "0.6432362", "0.64092803"...
0.6324489
33
Get the adjoint for an arbitrary dimension input.
def get_reduced_indices(*indices, axis, keepdims): # get all indices indices_list = list(indices) # list of reduction axis: transform negative indices into positive # axis in this list wont exist after the reduction axis_list = ft_util.refine_reduce_axis(indices_list, list(axis)) # get indices after reduction if keepdims: grad_indices_list = [index_i if i not in axis_list else 0 for i, index_i in enumerate(indices_list)] else: grad_indices_list = [index_i for i, index_i in enumerate(indices_list) if i not in axis_list] grad_ind = tuple(grad_indices_list) return grad_ind
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def adjoint(self: T) -> types.Matrix:", "def adjoint(self):\n if self.domain.field != self.range.field:\n raise NotImplementedError('adjoint not defined since fields '\n 'of domain and range differ ({} != {})'\n ''.format...
[ "0.6461707", "0.627058", "0.62327474", "0.6097227", "0.60359955", "0.60107726", "0.598409", "0.5767014", "0.563796", "0.56376064", "0.5606035", "0.5533599", "0.5419022", "0.52535206", "0.5236404", "0.50762033", "0.5064964", "0.5020197", "0.49542275", "0.49254653", "0.49106458...
0.0
-1
Test _arrange_test_result method with only one module.
def test_arrange_test_result_one_module(self): pass_1 = self._create_test_result(status=test_runner_base.PASSED_STATUS) pass_2 = self._create_test_result(status=test_runner_base.PASSED_STATUS) pass_3 = self._create_test_result(status=test_runner_base.PASSED_STATUS) fail_1 = self._create_test_result(status=test_runner_base.FAILED_STATUS) fail_2 = self._create_test_result(status=test_runner_base.FAILED_STATUS) ignore_1 = self._create_test_result(status=test_runner_base.IGNORED_STATUS) reporter_1 = result_reporter.ResultReporter() reporter_1.all_test_results.extend([pass_1, pass_2, pass_3]) reporter_2 = result_reporter.ResultReporter() reporter_2.all_test_results.extend([fail_1, fail_2, ignore_1]) info_dict = {} aei.AtestExecutionInfo._arrange_test_result(info_dict, [reporter_1, reporter_2]) expect_summary = {aei._STATUS_IGNORED_KEY : 1, aei._STATUS_FAILED_KEY : 2, aei._STATUS_PASSED_KEY : 3} self.assertEqual(expect_summary, info_dict[aei._TOTAL_SUMMARY_KEY])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_arrange_test_result_multi_module(self):\n group_a_pass_1 = self._create_test_result(group_name='grpup_a',\n status=test_runner_base.PASSED_STATUS)\n group_b_pass_1 = self._create_test_result(group_name='grpup_b',\n ...
[ "0.7826515", "0.6627111", "0.6378898", "0.6037766", "0.58032465", "0.5796728", "0.57169634", "0.5695606", "0.5693893", "0.56901085", "0.5688211", "0.5681043", "0.5648296", "0.56379336", "0.5626959", "0.56251144", "0.56239635", "0.5623645", "0.5619237", "0.5617106", "0.5615719...
0.8463349
0
Test _arrange_test_result method with multi module.
def test_arrange_test_result_multi_module(self): group_a_pass_1 = self._create_test_result(group_name='grpup_a', status=test_runner_base.PASSED_STATUS) group_b_pass_1 = self._create_test_result(group_name='grpup_b', status=test_runner_base.PASSED_STATUS) group_c_pass_1 = self._create_test_result(group_name='grpup_c', status=test_runner_base.PASSED_STATUS) group_b_fail_1 = self._create_test_result(group_name='grpup_b', status=test_runner_base.FAILED_STATUS) group_c_fail_1 = self._create_test_result(group_name='grpup_c', status=test_runner_base.FAILED_STATUS) group_c_ignore_1 = self._create_test_result(group_name='grpup_c', status=test_runner_base.IGNORED_STATUS) reporter_1 = result_reporter.ResultReporter() reporter_1.all_test_results.extend([group_a_pass_1, group_b_pass_1, group_c_pass_1]) reporter_2 = result_reporter.ResultReporter() reporter_2.all_test_results.extend([group_b_fail_1, group_c_fail_1, group_c_ignore_1]) info_dict = {} aei.AtestExecutionInfo._arrange_test_result(info_dict, [reporter_1, reporter_2]) expect_group_a_summary = {aei._STATUS_IGNORED_KEY : 0, aei._STATUS_FAILED_KEY : 0, aei._STATUS_PASSED_KEY : 1} self.assertEqual( expect_group_a_summary, info_dict[aei._TEST_RUNNER_KEY]['someRunner']['grpup_a'][aei._SUMMARY_KEY]) expect_group_b_summary = {aei._STATUS_IGNORED_KEY : 0, aei._STATUS_FAILED_KEY : 1, aei._STATUS_PASSED_KEY : 1} self.assertEqual( expect_group_b_summary, info_dict[aei._TEST_RUNNER_KEY]['someRunner']['grpup_b'][aei._SUMMARY_KEY]) expect_group_c_summary = {aei._STATUS_IGNORED_KEY : 1, aei._STATUS_FAILED_KEY : 1, aei._STATUS_PASSED_KEY : 1} self.assertEqual( expect_group_c_summary, info_dict[aei._TEST_RUNNER_KEY]['someRunner']['grpup_c'][aei._SUMMARY_KEY]) expect_total_summary = {aei._STATUS_IGNORED_KEY : 1, aei._STATUS_FAILED_KEY : 2, aei._STATUS_PASSED_KEY : 3} self.assertEqual(expect_total_summary, info_dict[aei._TOTAL_SUMMARY_KEY])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_arrange_test_result_one_module(self):\n pass_1 = self._create_test_result(status=test_runner_base.PASSED_STATUS)\n pass_2 = self._create_test_result(status=test_runner_base.PASSED_STATUS)\n pass_3 = self._create_test_result(status=test_runner_base.PASSED_STATUS)\n fail_1 = self...
[ "0.8273559", "0.71171457", "0.59634614", "0.59479153", "0.58932537", "0.5843216", "0.57562256", "0.57490045", "0.57445383", "0.5740733", "0.57388484", "0.5723813", "0.57204384", "0.5707784", "0.5704445", "0.57039195", "0.56746626", "0.56736004", "0.5648056", "0.56323606", "0....
0.8312768
0
Test _arrange_test_result method with multi runner.
def test_arrange_test_result_multi_runner(self): runner_a_pass_1 = self._create_test_result(runner_name='runner_a', status=test_runner_base.PASSED_STATUS) runner_a_pass_2 = self._create_test_result(runner_name='runner_a', status=test_runner_base.PASSED_STATUS) runner_a_pass_3 = self._create_test_result(runner_name='runner_a', status=test_runner_base.PASSED_STATUS) runner_b_fail_1 = self._create_test_result(runner_name='runner_b', status=test_runner_base.FAILED_STATUS) runner_b_fail_2 = self._create_test_result(runner_name='runner_b', status=test_runner_base.FAILED_STATUS) runner_b_ignore_1 = self._create_test_result(runner_name='runner_b', status=test_runner_base.IGNORED_STATUS) reporter_1 = result_reporter.ResultReporter() reporter_1.all_test_results.extend([runner_a_pass_1, runner_a_pass_2, runner_a_pass_3]) reporter_2 = result_reporter.ResultReporter() reporter_2.all_test_results.extend([runner_b_fail_1, runner_b_fail_2, runner_b_ignore_1]) info_dict = {} aei.AtestExecutionInfo._arrange_test_result(info_dict, [reporter_1, reporter_2]) expect_group_a_summary = {aei._STATUS_IGNORED_KEY : 0, aei._STATUS_FAILED_KEY : 0, aei._STATUS_PASSED_KEY : 3} self.assertEqual( expect_group_a_summary, info_dict[aei._TEST_RUNNER_KEY]['runner_a']['someModule'][aei._SUMMARY_KEY]) expect_group_b_summary = {aei._STATUS_IGNORED_KEY : 1, aei._STATUS_FAILED_KEY : 2, aei._STATUS_PASSED_KEY : 0} self.assertEqual( expect_group_b_summary, info_dict[aei._TEST_RUNNER_KEY]['runner_b']['someModule'][aei._SUMMARY_KEY]) expect_total_summary = {aei._STATUS_IGNORED_KEY : 1, aei._STATUS_FAILED_KEY : 2, aei._STATUS_PASSED_KEY : 3} self.assertEqual(expect_total_summary, info_dict[aei._TOTAL_SUMMARY_KEY])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_arrange_test_result_multi_module(self):\n group_a_pass_1 = self._create_test_result(group_name='grpup_a',\n status=test_runner_base.PASSED_STATUS)\n group_b_pass_1 = self._create_test_result(group_name='grpup_b',\n ...
[ "0.8005941", "0.7942821", "0.6458603", "0.6281796", "0.61934733", "0.6125403", "0.61176115", "0.6106937", "0.6105472", "0.6088446", "0.6071222", "0.6060519", "0.60560644", "0.6031968", "0.60201895", "0.60119915", "0.59998757", "0.5996369", "0.5989808", "0.5985094", "0.5966215...
0.84070575
0
A Helper to create TestResult
def _create_test_result(self, **kwargs): test_info = test_runner_base.TestResult(**RESULT_TEST_TEMPLATE._asdict()) return test_info._replace(**kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _makeResult(self):\n\n result = super(CustomTextTestRunner, self)._makeResult()\n result.test_case_count = self.test_case_count\n return result", "def getTestResults():", "def create_success(test, time):\n return _TestInfo(test, time)", "def create_result(main_test):\n ...
[ "0.73380005", "0.707614", "0.6962716", "0.68611425", "0.67847985", "0.671325", "0.67094016", "0.67094016", "0.66369814", "0.64441335", "0.64401895", "0.64096403", "0.62658703", "0.6241152", "0.6234344", "0.6229282", "0.6228823", "0.62096107", "0.61986804", "0.6181871", "0.611...
0.8006094
0
initializes the Minesweeper instance with a width, height, and the number of mines. Sets up a default game table, generates random mine locations and updates another table for the solution.
def __init__(self, height, width, mines): self.x = int(width) self.y = int(height) self.table_state = [ ['-' for i in xrange(0, self.x)] for j in xrange(0, self.y)] self.mine_locations = self.generate_mines(int(mines)) self.final_table = self.generate_answer()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, rows, cols, mines):\n self.rows = rows\n self.cols = cols\n self.mines = mines\n self.opened = 0\n self.game_won = False\n self.game_lost = False\n self.board = self.__init__minefield__()\n self.tiles = self.__init__tiles__()", "def __ini...
[ "0.72239524", "0.71592915", "0.7094593", "0.6917509", "0.6810791", "0.67366654", "0.67292786", "0.6627893", "0.6587037", "0.65448916", "0.65435743", "0.6459289", "0.6438166", "0.64203", "0.6411018", "0.640888", "0.6347741", "0.6320402", "0.62783587", "0.6267229", "0.62347484"...
0.715291
2
prints the table, regardless whether it's a game state table or the answer table.
def print_table(table, exploded_at=[-1, -1]): # color codes just to look pretty NORMAL = '\33[10m' BLUE_START = '\33[104m' RED_START = '\33[31m' PURPLE_START = '\33[35m' GREEN_START = '\33[92m' ORANGE_START = '\33[93m' END = '\033[0m' s = ' %s' % BLUE_START # print number headers along x-axis for i in range(0, width): s += " %s" % i if i < 10: s += " " * 2 else: s += " " s += "%s\n" % END # print letters for y-axis, + the relevant values in each coordinate # depending on table. for y in range(0, height): s += "%s %s %s \t" % (BLUE_START, Minesweeper.letters[y], END) for x in range(0, width): value = table[y][x] if value == "0": s += "%s%s%s" % (NORMAL, value, END) elif value == "1": s += "%s%s%s" % (GREEN_START, value, END) elif value == "2": s += "%s%s%s" % (ORANGE_START, value, END) elif value == "3": s += "%s%s%s" % (RED_START, value, END) elif value == "4" or value == "5" or value == "6" or value == "7" or value == "8": s += "%s%s%s" % (PURPLE_START, value, END) # special elif value == "-": s += "%s%s%s" % (NORMAL, value, END) elif value == Minesweeper.BOMB: if y == exploded_at[0] and x == exploded_at[1]: # Make the bomb at the casualty site explode! s += "%s%s%s" % (RED_START, Minesweeper.EXPLOSION, END) else: # show normal bomb s += "%s%s%s" % (RED_START, value, END) elif value == Minesweeper.FLAG: s += "%s%s%s" % (RED_START, value, END) s += " " * 3 s += "\n" # use tabbing to space them nicely print s.expandtabs(3)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def showState(self):\n for i in self.state[0]:\n for j in self.state[1]:\n print(self.table[i][j], end=\"\")\n print(\"\")", "def displayGame(self):\n # row1 & row2 longer, row3 & row4 shorter, proper indented below\n print 'current table:'\n for k...
[ "0.71382636", "0.71104014", "0.70632297", "0.70341116", "0.6963186", "0.68770564", "0.6833327", "0.6793822", "0.6778215", "0.6727708", "0.6712857", "0.66761214", "0.654547", "0.654547", "0.65261686", "0.6415763", "0.6326378", "0.62906164", "0.6262944", "0.6261385", "0.6230227...
0.5767268
64
generate a list of viable coordinates for mines, and randomly choose them.
def generate_mines(self, number): mine_locations = [] available_places = [[j, i] for i in xrange(0, self.x) for j in xrange(0, self.y)] while number > 0: # the chosen coordinate for a mine is appended into the list and is # removed from the list of choices to prevent duplicates. choice = random.choice(available_places) available_places.remove(choice) mine_locations.append(choice) number -= 1 return mine_locations
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def random_coordinates():\n return Coordinates(random.randint(0, 14), random.randint(0, 14))", "def add_mines(self):\n for x, y in sample(list(itertools.product(range(self.width), range(self.height))), self.num_mines):\n self.grid[y][x] = self.mine", "def _generate_mines(self):\r\n ...
[ "0.6889577", "0.68553376", "0.6830914", "0.6830659", "0.66476494", "0.64785975", "0.64664406", "0.646031", "0.6445049", "0.6439096", "0.6436462", "0.64131314", "0.6397925", "0.6376756", "0.6361325", "0.6299357", "0.62326545", "0.6220271", "0.6188136", "0.61843365", "0.615677"...
0.7681125
0
populate answer table with numbers and mines
def get_neighbour(self, y, x): if [y, x] in self.mine_locations: return Minesweeper.BOMB count = 0 # (x-1, y-1), (x, y-1), (x+1, y-1), # (x-1, y), (x, y), (x+1, y), # (x-1, y+1), (x, y+1), (x+1, y+1) for xe in range(x - 1, x + 2): for ye in range(y - 1, y + 2): if [ye, xe] in self.mine_locations: count += 1 return str(count)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main(num_q: int, point_list: List[int])-> int:\n dp_table", "def __init__(self, height, width, mines):\n self.x = int(width)\n self.y = int(height)\n self.table_state = [\n ['-' for i in xrange(0, self.x)] for j in xrange(0, self.y)]\n self.mine_locations = self.gene...
[ "0.55837727", "0.5510911", "0.5427294", "0.541556", "0.5354501", "0.5270408", "0.5229382", "0.5213455", "0.52047086", "0.5183162", "0.5158478", "0.5134398", "0.509664", "0.5092691", "0.5080551", "0.50666213", "0.50628793", "0.50590247", "0.5057949", "0.5032783", "0.5013241", ...
0.0
-1
gets number of flags nearby
def flags_nearby(self, y, x): count = 0 l = [[ye, xe] for xe in range( x - 1, x + 2) if xe >= 0 for ye in range(y - 1, y + 2) if ye >= 0] for ye, xe in l: if xe >= self.x or ye >= self.y: continue if self.table_state[ye][xe] == Minesweeper.FLAG: count += 1 return str(count)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def count_neighbor_flags(self, i, j):\n return np.count_nonzero(self.flags[(i-1 if i > 0 else 0):i+2, (j-1 if j > 0 else 0):j+2])", "def get_flag_count(self):\n done = self.cur.execute(\"SELECT video_ID FROM flags\")\n return done", "def count_neighbor_flags(self, x, y):\n\t\treturn sum(se...
[ "0.66515917", "0.6642287", "0.6426211", "0.6261821", "0.61500776", "0.60658044", "0.6009201", "0.5997175", "0.5990798", "0.5918034", "0.5895515", "0.5893722", "0.5876723", "0.58467567", "0.5845648", "0.5832519", "0.5824113", "0.5818109", "0.5780171", "0.57786083", "0.5770653"...
0.6600916
2
Open neighbours if the flag number matches the count.
def special_open_neighbours(self, y, x): if self.table_state[y][x] != "-" and self.table_state[y][x] == self.flags_nearby(y, x): l = [[ye, xe] for xe in range( x - 1, x + 2) if xe >= 0 for ye in range(y - 1, y + 2) if ye >= 0] for ye, xe in l: if xe >= self.x or ye >= self.y: # do not open out of bounds continue # if it is a bomb but not flagged if self.final_table[ye][xe] == Minesweeper.BOMB and self.table_state[ye][xe] != Minesweeper.FLAG: self.show_answer_board([ye, xe]) print "KABOOM!" return Minesweeper.IS_A_BOMB self.open_neighbours(y, x) self.print_table(self.table_state) return Minesweeper.NOT_A_BOMB
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_open(self, n_faces):\r\n count_used = Counter([item for sublist in self.tiles\r\n for item in sublist\r\n if item in self.get_borders()])\r\n if min(count_used.values()) == n_faces:\r\n self.open = False", "def check...
[ "0.6551041", "0.6264699", "0.6198644", "0.6008779", "0.59324574", "0.58290005", "0.58284974", "0.58284974", "0.58235085", "0.5753008", "0.57382154", "0.5685492", "0.56439877", "0.5639313", "0.56212676", "0.5600274", "0.55936295", "0.55705774", "0.55526507", "0.5544796", "0.55...
0.65664035
0
Open neighbours if the current coordinates are 0 and neighbours are untouched. Recursively opens if the neighbours are also 0.
def open_neighbours(self, y, x): if [y, x] in self.mine_locations: return [y, x] # generate neighbours with positive indexes l = [[ye, xe] for xe in range( x - 1, x + 2) if xe >= 0 for ye in range(y - 1, y + 2) if ye >= 0] for ye, xe in l: # if the indexes are out of the game table, skip if xe >= self.x or ye >= self.y: continue # if the current coordinates are still untouched, update their values if self.table_state[ye][xe] == '-': self.table_state[ye][xe] = self.final_table[ye][xe] # if the coordinate has a value of 0, recursively open it's neighbours. if self.final_table[ye][xe] == '0': self.open_neighbours(ye, xe)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def open_neighbour_cells(self, my_board, x, y):\n for _x in range(x-1, x+2):\n for _y in range(y-1, y+2):\n if is_valid(_x, _y):\n if is_new_move(my_board, _x, _y):\n my_board[_x, _y] = self.count_neighbour_mines(_x, _y)\n ...
[ "0.6919871", "0.6919871", "0.68459386", "0.6506167", "0.64691454", "0.615098", "0.61087185", "0.6104565", "0.60708123", "0.60645074", "0.60544246", "0.5988625", "0.592371", "0.5901404", "0.5862205", "0.5847411", "0.5810795", "0.5798435", "0.5754326", "0.57496643", "0.5704948"...
0.75434107
0
set a flag to the desired coordinates.
def flag(self, y, x): if self.table_state[y][x] == '-': self.table_state[y][x] = Minesweeper.FLAG Minesweeper.print_table(self.table_state)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setFlag(self, flag, value) -> None:\n ...", "def set_at(self,x,y,set=True):\n\t\tif ( not self._validate(x,y )):\n\t\t\treturn\n\n\t\t# set the bit in the grid\n\t\tif set:\n\t\t\tself.Grid[y] = self.Grid[y] | (1 << x)\n\t\telse:\n\t\t\tself.Grid[y] = self.Grid[y] & ~(1 << x)", "def set_flag(self, n...
[ "0.68833566", "0.688153", "0.68004435", "0.65121055", "0.64788616", "0.6427263", "0.6359339", "0.6310714", "0.6294104", "0.62276906", "0.6186711", "0.618563", "0.61684364", "0.6153594", "0.61314297", "0.61314297", "0.61314297", "0.61314297", "0.61314297", "0.61314297", "0.613...
0.5479736
92
come here when the coordinates do not have a bomb. update the table_state with the selected coordinate.
def tease_user(self, y, x): self.table_state[y][x] = self.final_table[y][x] # if there are no neighbouring 0s, open neighbours if self.table_state[y][x] == '0': self.open_neighbours(y, x) self.print_table(self.table_state)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def click_cell(self, event):\n if (self.world_setable):\n x, y = event.x, event.y\n row = y / self.cell_size\n col = x / self.cell_size\n if ((row in range(self.cell_row)) and\n (col in range(self.cell_col))):\n status_now = not self....
[ "0.61604476", "0.57963514", "0.5728119", "0.5681672", "0.5650815", "0.5578061", "0.55363643", "0.55130213", "0.548478", "0.5473019", "0.54568815", "0.54379356", "0.5364507", "0.5332647", "0.53126705", "0.5300106", "0.52811605", "0.5278566", "0.52776027", "0.5275714", "0.52650...
0.6044062
1
prints the answer table with print_table.
def show_answer_board(self, coords): Minesweeper.print_table(self.final_table, coords)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def print_table(self):\n print(\"%-12s%-12s%-12s%-12s%-12s\" % (\"index\",\"balance\",\"payment\",\"interest\",\"amortization\"))\n print(\"-------------------------------------------------------------\")\n for i in self.table[\"index\"]:\n print(\"%-12i%-12i%-12i%-12i%-12i\" % (sel...
[ "0.75249547", "0.731208", "0.72350246", "0.72027415", "0.7168257", "0.7144765", "0.71109647", "0.7010002", "0.68452394", "0.6828683", "0.6754718", "0.6754718", "0.6742807", "0.6638073", "0.66361946", "0.66236633", "0.66209453", "0.6590905", "0.656231", "0.65480316", "0.654068...
0.6439023
29
opens a tile at the respective coordinates on the table_state list.
def open_tile(self, y, x): # Find the letter index and convert into a y-coordinate. # Checks if it is a mine if [y, x] in self.mine_locations: # explode self.show_answer_board([y, x]) print "Boomz." return Minesweeper.IS_A_BOMB else: # strip(?)tease to the user (oh damn sexy numbers) self.tease_user(y, x) return Minesweeper.NOT_A_BOMB
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def new_tile(self):\n # replace with your code\n pass", "def new_tile(self):\n rowm, colm = self.get_ava_index()\n value = 2 if random() <= 0.90 else 4\n self.set_tile(rowm, colm, value)\n print rowm,colm,value", "def new_tile(self):\r\n # replace with your code...
[ "0.6234986", "0.6104286", "0.60611725", "0.5864138", "0.5863451", "0.58588135", "0.58218133", "0.5803234", "0.5781789", "0.5778895", "0.57396734", "0.57103294", "0.56988585", "0.56791496", "0.565901", "0.5655893", "0.56288236", "0.5625432", "0.5617881", "0.5593936", "0.558787...
0.55911297
20
Method that check if file at provided url exist.
def file_exist(file_url): try: response = requests.head(file_url) if 200 <= response.status_code < 300: return True return False except ConnectionError: return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _url_exists(self, url):\n return url_exists(url)", "def exists(self, url):\n return (self.base_path / url).exists()", "def url_exists(url):\n # Check for URLs we can't validate\n if url.startswith(\"https://kiwiirc.com\"):\n return True\n if url.startswith(\"https://www.projec...
[ "0.7852359", "0.7844551", "0.77893347", "0.77855074", "0.7775254", "0.7766845", "0.7766845", "0.76954746", "0.76517665", "0.75399566", "0.7395437", "0.7388874", "0.7315557", "0.72969633", "0.72895604", "0.7279086", "0.7263453", "0.7241822", "0.71523726", "0.7136986", "0.70900...
0.84588355
0
Method that based on file url return appropriate hash.
def get_hash(file_url): file_extension = os.path.splitext(file_url)[1] return str(HASHES.get(file_extension))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_hash(self, filepath):\n if (os.path.isfile(filepath) and not (\n os.path.islink(filepath) and self.ignorelinks)):\n file_hash = self.hashfile(open(filepath, 'rb'))\n else:\n file_hash = self.hashstring(filepath)\n if not self._increment_hash:\n ...
[ "0.7292291", "0.72585297", "0.72363", "0.7177594", "0.7159621", "0.70332694", "0.70052016", "0.6999285", "0.69315827", "0.6879491", "0.6835787", "0.6830834", "0.6828008", "0.6765713", "0.67623746", "0.67612416", "0.6758131", "0.6731684", "0.6731684", "0.67308", "0.67212594", ...
0.8334774
0
Fahrenheit to Celsius conversion. Requests temperature in Fahrenheit degrees and computes temperature in Celsius degrees and prints in Celsius scale.
def f2c_qa_function(): F = float(input("Provide a Fahrenheit temperature in degrees: ")) C = 5/9.0*F - 32 print("The temperatire in Celcius is {:g}".format(C))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fahrenheit_to_celsius():\n fahrenheit = ent_temperature.get()\n celsius = (5 / 9) * (float(fahrenheit) - 32)\n lbl_result[\"text\"] = f\"{round(celsius, 2)} \\N{DEGREE CELSIUS}\"", "def fahrenheit_to_celsius(fahrenheit):\n offset = 32\n multiplier = 5 / 9\n celsius = (fahrenheit...
[ "0.80117464", "0.7945186", "0.7945186", "0.7777677", "0.7635342", "0.7612416", "0.75738084", "0.75246674", "0.7467973", "0.7394001", "0.73162985", "0.7294444", "0.7292795", "0.72802883", "0.72540873", "0.7165619", "0.7144465", "0.709325", "0.7084599", "0.7056874", "0.7048489"...
0.74533796
9
Take an argument as input from the command line
def f2c_cml_function(): import sys F = float(sys.argv[1]) C = 5/9.0*F - 32 print("The temperatire in Celcius is {:g}".format(C))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cli(args): # noqa; pylint: disable=unused-argument", "def main(args=None):", "def main(args=None):", "def main(args):", "def main(args):", "def main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"input\", help=\"Fasta rDNA input\")\n parser.add_argument(\"output\", help=\"G...
[ "0.68680656", "0.67012274", "0.67012274", "0.66923463", "0.66923463", "0.66071945", "0.64984864", "0.6440137", "0.63924545", "0.6384571", "0.6353774", "0.63000405", "0.6298342", "0.62937367", "0.6274231", "0.6258619", "0.6252007", "0.62427706", "0.621087", "0.61711746", "0.61...
0.0
-1
Read temp from a file
def f2c_file_read_function(): with open('data.txt', 'r') as infile: data = [i.strip().split() for i in infile] # store data as list F = float(data[-1][-1]) # last item in data should be value C = 5/9.0*F - 32 print("The temperatire in Celcius is {:g}".format(C))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_file(self, *args):\n with open(os.path.join(self.temp_path, *args)) as fp:\n return fp.read()", "def read_file(self, file_name):\n f = file(file_name, \"r\")\n temp = f.read()\n f.close()", "def GetFileContents(self, filename):\n with tempfile.NamedTemporaryFi...
[ "0.7447779", "0.7349098", "0.6541486", "0.65207756", "0.6515084", "0.64710087", "0.6465426", "0.6465426", "0.6429184", "0.6283047", "0.62467045", "0.62438434", "0.6242521", "0.62389153", "0.62389153", "0.6197708", "0.617634", "0.6135359", "0.61328983", "0.60876274", "0.608428...
0.0
-1
Read temp from a file
def f2c_file_read_write_function(): with open('Fdeg.dat', 'r') as infile: data = [i.strip().split() for i in infile] # store data as list data = data[3:] # get lines with numerical values only F_list = [float(line[-1]) for line in data] C_list = [5/9.0*F - 32 for F in F_list] for i in range(len(C_list)): print("{:6g}F {:10.2f}C".format(F_list[i], C_list[i])) return F_list
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_file(self, *args):\n with open(os.path.join(self.temp_path, *args)) as fp:\n return fp.read()", "def read_file(self, file_name):\n f = file(file_name, \"r\")\n temp = f.read()\n f.close()", "def GetFileContents(self, filename):\n with tempfile.NamedTemporaryFi...
[ "0.7447779", "0.7349098", "0.6541486", "0.65207756", "0.6515084", "0.64710087", "0.6465426", "0.6465426", "0.6429184", "0.6283047", "0.62467045", "0.62438434", "0.6242521", "0.62389153", "0.62389153", "0.6197708", "0.617634", "0.6135359", "0.61328983", "0.60876274", "0.608428...
0.0
-1
Take an argument as input from the command line
def f2c_cml_exc_function(): import sys try: F = float(sys.argv[1]) C = 5/9.0*F - 32 print("The temperatire in Celcius is {:g}".format(C)) except: print("Format should be {} with a temperature in Farenheit" \ .format(sys.argv[0]))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cli(args): # noqa; pylint: disable=unused-argument", "def main(args=None):", "def main(args=None):", "def main(args):", "def main(args):", "def main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"input\", help=\"Fasta rDNA input\")\n parser.add_argument(\"output\", help=\"G...
[ "0.68680656", "0.67012274", "0.67012274", "0.66923463", "0.66923463", "0.66071945", "0.64984864", "0.6440137", "0.63924545", "0.6384571", "0.6353774", "0.63000405", "0.6298342", "0.62937367", "0.6274231", "0.6258619", "0.6252007", "0.62427706", "0.621087", "0.61711746", "0.61...
0.0
-1
Newton's second law of motion for measuring stoppnig distance Newton's second law of motion is d = (1/2)(v02/(mug)) so the stopping distance of an object in motion, like a car, can be measured. The friction coefficient measures how slick a road is with a default of 0.3.
def stopping_length_function(initial_velocity=120, friction_coefficient=0.3): g = 9.81 v0 = initial_velocity/3.6 mu = friction_coefficient return (1/2)*(v0**2/(mu*g))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def calc_force_from_damping(v, damping, masses):\n F = masses*damping*np.diff(v, 0)\n\n return F", "def duty_cycle_by_force(newton: float, profile: GripForceProfile) -> float:\n if profile.min <= newton <= profile.max:\n return sum(ele[1] * (newton ** ele[0]) for ele in profile.polynomial...
[ "0.6221095", "0.5993694", "0.59182566", "0.5850141", "0.5800747", "0.5787989", "0.56625694", "0.55726403", "0.55664736", "0.555712", "0.5540242", "0.5521722", "0.5505632", "0.54962945", "0.5486384", "0.54847544", "0.5474211", "0.5469343", "0.5457595", "0.545249", "0.54498357"...
0.70825905
0
Integration function Using scitools.StringFunction to do integration. >>> integration.py 'sin(x)' 0 pi/2
def integrate_function(): def midpoint_integration(f, a, b, n=100): h = (b - a)/float(n) I = 0 for i in range(n): I += f(a + i*h + 0.5*h) return h*I f_formula = sys.argv[1] a = eval(sys.argv[2]) b = eval(sys.argv[3]) if len (sys.argv) >= 5: n = int(sys.arvg[4]) else: n = 200 from scitools.StringFunction import StringFunction f = StringFunction(f_formula) # turn formula into f(x) func. """ >>> g = StringFunction('A*exp(-a*t)*sin(omega*x)', independent_variable='t', A=1, a=0.1, omega=pi, x=0.5) >>> g.set_parameters(omega=0.1) >>> g.set_parameters(omega=0.1, A=5, x=0) >>> g(0) 0.0 >>> g(pi) 2.8382392288852166e-15 """ I = midpoint_integration(f, a, b, n) print("Integral of {:s} on [{:g}, {:g}] with n ={:d}: {:g}" \ .format(f_formula, a, b, n, I))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def integrate(f, inf_lim, sup_lim):\n function = get_function_from_text(f)\n return sp_integrate.quad(function, inf_lim, sup_lim)[0]", "def sin(x):\r\n # see decorator for function body\r", "def f(x):\n return (2.0*math.sin(10.0*x+1.0)+1.0)", "def a_math_function():\n return np.sin(2*np.pi)", ...
[ "0.63227785", "0.62122375", "0.61956286", "0.60972846", "0.6085416", "0.60581106", "0.60526955", "0.60383034", "0.6019527", "0.5990838", "0.5950136", "0.5950136", "0.5950136", "0.5950136", "0.58858174", "0.588468", "0.58532083", "0.58137757", "0.5803429", "0.5779814", "0.5709...
0.74676055
0
Hmmm. There should aways be some common base path.
def _find_base_path(self): paths = [path for path, content in self._templates] if len(paths) == 1: return os.path.dirname(paths[0]) return common_path_prefix(paths)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def base_dir(self):\n pass", "def get_base_path(self) -> str:\n raise NotImplementedError()", "def base_path(self):\n return self.setup.base_path", "def base(path1, *paths):\r\n return BASE_DIR.relpathto(path1.joinpath(*paths))", "def base():\n print(CFG.base.path)", "def base_...
[ "0.75021005", "0.7379301", "0.71568936", "0.71341574", "0.70445836", "0.7044528", "0.6859555", "0.68217397", "0.6766423", "0.6734894", "0.6719331", "0.6719189", "0.67174464", "0.657178", "0.6568198", "0.6524185", "0.64243567", "0.6399701", "0.6332476", "0.63293785", "0.632134...
0.7126339
4
sets attribute 'view' to false to close menu
def set_view_false(self) -> None: self.view = False pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def close_menu():\n m = GD.gui.menu.item('Tools')\n if m :\n m.remove()", "def close_menu(self):\n self.menu.quit()", "def close(self):\n self.dismiss()\n screenmanager.change_to('main_menu')", "def OnClose(self, event):\n self.Show(False)", "def OnClose(self, event...
[ "0.6946226", "0.68482953", "0.66403145", "0.6547416", "0.651479", "0.64087963", "0.6365546", "0.6273542", "0.6257272", "0.6194157", "0.6145123", "0.6080416", "0.60505515", "0.60492814", "0.60181034", "0.60166675", "0.60109", "0.60109", "0.60109", "0.6010135", "0.6009737", "...
0.61704594
10
extract each line of text from the specified text file
def load_links(self) -> Tuple[List[str], List[str]]: with open(URL_FILE, 'r') as txt_file: lines = txt_file.read().split() urls = [] for line in lines: urls.append(line.split(',')[0]) return lines, urls
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_text_file(file_path):\n with open(file_path, 'r') as f:\n for line in f:\n line = line.rstrip()\n if not line:\n continue\n yield line", "def read_text_file(file_name):\n target_file = open(file_name)\n lines = target_file.readlines()\n\n ...
[ "0.722885", "0.7041475", "0.69846565", "0.6924913", "0.69187284", "0.6857259", "0.68436337", "0.6830081", "0.6812712", "0.68080425", "0.6784779", "0.6754171", "0.6750703", "0.6750703", "0.6634743", "0.6614622", "0.66041446", "0.65703064", "0.65577686", "0.6539482", "0.651655"...
0.0
-1
prints out index, url link
def view_registry(self) -> None: arr = self.load_links()[0] for i,v in enumerate(arr): print(f"<{i}: {v}>\n") pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def printLinks(self):\n self.browsed.sort()\n sys.stderr.write(\"\\n+ \" + _(\"URLs\") + \":\\n\")\n for lien in self.browsed:\n print(lien)", "def index():\n g.data['api_version'] = API_VERSION\n g.data['apilib_version'] = API_VERSION\n g.data['oar_version'] = VERSION\n ...
[ "0.6662168", "0.63025343", "0.62125635", "0.62048537", "0.6174066", "0.61573774", "0.61266476", "0.60956967", "0.60887426", "0.6074477", "0.60500443", "0.6042261", "0.6034719", "0.6016362", "0.5997981", "0.59627646", "0.59619063", "0.59611434", "0.5957981", "0.59331363", "0.5...
0.0
-1
this method is called if the item is not a book return cannot be of type str, as later on an iterator incorrectly iterates through each char instead of the string as a whole
def inp_item_price(self) -> List[str]: return [str(input("Enter desired price for item: "))]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _is_str(item):\n return isinstance(item, str)", "def test_str(self):\n item = self.item\n\n self.assertEqual(str(item), self.item_raw['name'])", "def nonstringiter(obj):\n return not isinstance(obj, string_types) and isinstance(obj, Iterable)", "def _is_good_iterable(obj):\n return...
[ "0.67216665", "0.59875196", "0.5834853", "0.56345963", "0.5633404", "0.5623628", "0.5504701", "0.550421", "0.5502145", "0.5502145", "0.5502145", "0.5437296", "0.54353356", "0.54093117", "0.5380066", "0.5354463", "0.5319337", "0.52930754", "0.5278437", "0.52694666", "0.5256960...
0.0
-1
appends entry to text document
def add_registry(self) -> None: # inits functions corresponding to user input and takes in url input item_options = {'n': self.inp_item_price, 'y': self.inp_book_prices} url = str(input("Enter URL to amazon item: ")) # validates url input - prevents inputting duplicate and/or blank URLs if(url == "" or url in self.load_links()[1]): print("Item not added - URL already exists or is blank") return # user-input price(s) -> then -> validates price input prices = item_options.get(self.input_item_category())() try: for price in prices: float(price) except ValueError: print("Do not include any letters or symbols other than '.' - Item not added!") return # writes input as a line of text to text file with open(URL_FILE, 'a') as text_file: text_file.write(self.format_string(url, prices)) pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add(self, entry):\n \"An entry is a tuple of (id, datatime, text).\"\n id = entry[0]\n datee = entry[1]\n text = re.sub('[^A-Za-z0-9]+', ' ', entry[2].lower())\n self.recordsDict[id].create(id, datee, entry[2])\n for word in text.split():\n self.wordDict[wor...
[ "0.7126714", "0.6999089", "0.685365", "0.67971444", "0.6738247", "0.64986163", "0.64700305", "0.64590263", "0.645142", "0.6435323", "0.64345604", "0.6418391", "0.6415381", "0.639371", "0.6388531", "0.63824403", "0.63644826", "0.63609344", "0.63544655", "0.6342864", "0.6342211...
0.0
-1
user enters an integer and the corresponding link is deleted
def delete_registry(self) -> None: self.view_registry() links = self.load_links()[0] try: url_to_delete = links[abs(int(input("Enter no. of URL to delete: ")))] except IndexError: print('Item not found - Nothing was deleted') return with open(URL_FILE, 'w') as f: for link in links: if(link != url_to_delete): f.write(link+'\n')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete():\n id_num = int(input('Enter the ID number of the item you wish to delete\\n'))\n db_actions.remove(id_num)", "def unlink(self, link_id):", "def remove_link():", "def delete():", "def delete_secret_link(link_id):\n\n Secret_Link.objects.filter(link_id=link_id).delete()", "def delete...
[ "0.7272275", "0.7188536", "0.6581015", "0.6359355", "0.63263273", "0.62977743", "0.6175496", "0.6145932", "0.6075425", "0.607078", "0.6014249", "0.59680676", "0.59213567", "0.59148896", "0.5872791", "0.5862901", "0.5848978", "0.58246154", "0.5816256", "0.58000165", "0.5792212...
0.71173656
2
perform_destroy is used to performance a logic delete
def perform_destroy(self, instance): instance.is_active = not instance.is_active instance.save()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def perform_destroy(self, instance):\n pass", "def do_destroy(self, arg):\n obj = self.verify(arg, 2)\n if obj:\n del storage.all()[obj]\n storage.save()", "def destroy(self):", "def destroy(self):", "def destroy(self):", "def delete():", "def _destroy(self):"...
[ "0.7563987", "0.6974102", "0.69603705", "0.69603705", "0.69603705", "0.68129027", "0.67679864", "0.6733365", "0.67058355", "0.6675282", "0.6674819", "0.66600496", "0.66232634", "0.66232634", "0.66232634", "0.66232634", "0.65740126", "0.6546043", "0.6532895", "0.6532895", "0.6...
0.6041425
63
This function sleeps for 1 second, then prints out the current time. Notice that this is a task that "blocks" execution of other code
def sundial(): time.sleep(1) print(f"Sundial: {dt.now()}")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def task_display_funny_time():\n print(\"funny time is %s\" % datetime.datetime.now())\n logger.info(\"Hurray its working\")", "def print_time(s, start_time):\n print(\"%s, time %ds, %s.\" % (s, (time.time() - start_time), time.ctime()))\n sys.stdout.flush()\n return time.time()", "def sleep_for...
[ "0.6798046", "0.66427785", "0.66389567", "0.66341007", "0.66341007", "0.6586261", "0.6557557", "0.6542507", "0.6538116", "0.65356725", "0.64685106", "0.6458485", "0.64469516", "0.6442502", "0.6440117", "0.6428229", "0.64215976", "0.6406094", "0.63931483", "0.638817", "0.63853...
0.6198006
36
Create and return a function that will extract a date, validate it, and return an ISO formatted date if it is valid, or an empty string if it is not. We need this because the "date recorded" field is directly from the Talking Book, and, as such, is very likely to contain garbage.
def make_date_extractor(md_field: str) -> Callable: def extract(props: Dict[str, str]) -> str: ds = '' v = props.get(md_field, '') try: d = datetime.strptime(v, '%Y/%m/%d') ds = d.strftime('%Y%m%d') except Exception: pass return ds return extract
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_isodate(date_str):\n iso_date = None\n\n if not date_str:\n return None\n\n #first, is it already a valid isodate?\n try:\n isodate.parse_date(date_str)\n return date_str\n except isodate.ISO8601Error, e:\n # if not, try to parse it\n try:\n iso_...
[ "0.65887856", "0.63256896", "0.6298983", "0.622432", "0.62181807", "0.61893153", "0.614591", "0.61219233", "0.61208475", "0.6105145", "0.6070044", "0.6062799", "0.6033025", "0.6019465", "0.59857774", "0.59718645", "0.59547275", "0.593682", "0.5933253", "0.59316033", "0.593141...
0.61388487
7
r"""Pass the inputs (and mask) through the decoder layer.
def forward(self, tgt, memory, tgt_mask=None, memory_mask=None, tgt_key_padding_mask=None, memory_key_padding_mask=None): # type: (Tensor, Tensor, Optional[Tensor], Optional[Tensor], Optional[Tensor], Optional[Tensor]) -> Tensor #tgt=SpeechFeature #memory=Text Feature torch.cuda.empty_cache() tgt2 = self.self_attn(tgt, tgt, tgt, attn_mask=tgt_mask, key_padding_mask=tgt_key_padding_mask)[0] tgt = tgt + self.dropout1(tgt2) tgt = self.norm1(tgt) tgt2 = self.multihead_attn(tgt, memory, memory, attn_mask=memory_mask, key_padding_mask=memory_key_padding_mask)[0] tgt = tgt + self.dropout2(tgt2) tgt = self.norm2(tgt) tgt2 = self.linear2(self.dropout(self.activation(self.linear1(tgt)))) tgt = tgt + self.dropout3(tgt2) tgt = self.norm3(tgt) #tgt=tgt.cpu() tgt2=tgt2.cpu() return tgt
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def decoder(self, embedded_inputs, decoder_input0,\n decoder_hidden0, encoder_outputs):\n pass", "def forward(self, src, tgt, enc_mask=None, dec_mask=None):\n m, enc_embed = self.encoder(src, enc_mask)\n # mask = mask.flatten(1)\n out, dec_embed = self.decoder(tgt, m, e...
[ "0.72808146", "0.70303583", "0.6751413", "0.6578204", "0.65489453", "0.6506799", "0.6493319", "0.6480627", "0.6457797", "0.643704", "0.6367421", "0.6358158", "0.6303986", "0.62878895", "0.6264638", "0.6258904", "0.62526155", "0.62208277", "0.6188875", "0.6128146", "0.6125585"...
0.0
-1
Calculating the psi operator for the transport and production of the enstrophy
def psi_enstrophy( Tau, # SGS; (6,64,64,64) h = False, # spatial step size flag = True): # spectral flag; default is gradient tool #---------------------------------------------------------------------# # Default variables # #---------------------------------------------------------------------# if h is False: Pi = np.pi N = 64 h = (2.0*Pi)/N #---------------------------------------------------------------------# # Preallocation variables # #---------------------------------------------------------------------# dim = np.shape(Tau)[1] Psi = np.zeros((9, dim, dim, dim)) #---------------------------------------------------------------------# # Calculating psi using spectral methods # #---------------------------------------------------------------------# if flag is False: kspec = np.fft.fftfreq(dim) * dim Kfield = np.array(np.meshgrid(kspec, kspec, kspec, indexing='ij')) #-----------------------------------------------------------------# # Psi_{11} # #-----------------------------------------------------------------# Psi[0] = np.fft.ifftn(1j*Kfield[1]*np.fft.fftn(Tau[2])).real -\ np.fft.ifftn(1j*Kfield[0]*np.fft.fftn(Tau[1])).real #-----------------------------------------------------------------# # Psi_{12} # #-----------------------------------------------------------------# Psi[1] = np.fft.ifftn(1j*Kfield[1]*np.fft.fftn(Tau[4])).real -\ np.fft.ifftn(1j*Kfield[0]*np.fft.fftn(Tau[3])).real #-----------------------------------------------------------------# # Psi_{13} # #-----------------------------------------------------------------# Psi[2] = np.fft.ifftn(1j*Kfield[1]*np.fft.fftn(Tau[5])).real -\ np.fft.ifftn(1j*Kfield[0]*np.fft.fftn(Tau[4])).real #-----------------------------------------------------------------# # Psi_{21} # #-----------------------------------------------------------------# Psi[3] = np.fft.ifftn(1j*Kfield[0]*np.fft.fftn(Tau[0])).real -\ np.fft.ifftn(1j*Kfield[2]*np.fft.fftn(Tau[2])).real #-----------------------------------------------------------------# # Psi_{22} # #-----------------------------------------------------------------# Psi[4] = np.fft.ifftn(1j*Kfield[0]*np.fft.fftn(Tau[1])).real -\ np.fft.ifftn(1j*Kfield[2]*np.fft.fftn(Tau[4])).real #-----------------------------------------------------------------# # Psi_{23} # #-----------------------------------------------------------------# Psi[5] = np.fft.ifftn(1j*Kfield[0]*np.fft.fftn(Tau[2])).real -\ np.fft.ifftn(1j*Kfield[2]*np.fft.fftn(Tau[5])).real #-----------------------------------------------------------------# # Psi_{31} # #-----------------------------------------------------------------# Psi[6] = np.fft.ifftn(1j*Kfield[2]*np.fft.fftn(Tau[1])).real -\ np.fft.ifftn(1j*Kfield[1]*np.fft.fftn(Tau[0])).real #-----------------------------------------------------------------# # Psi_{32} # #-----------------------------------------------------------------# Psi[7] = np.fft.ifftn(1j*Kfield[2]*np.fft.fftn(Tau[3])).real -\ np.fft.ifftn(1j*Kfield[1]*np.fft.fftn(Tau[1])).real #-----------------------------------------------------------------# # Psi_{33} # #-----------------------------------------------------------------# Psi[8] = np.fft.ifftn(1j*Kfield[2]*np.fft.fftn(Tau[4])).real -\ np.fft.ifftn(1j*Kfield[1]*np.fft.fftn(Tau[2])).real #---------------------------------------------------------------------# # Calculating psi using gradient tool # #---------------------------------------------------------------------# else: #-----------------------------------------------------------------# # Psi_{11} # #-----------------------------------------------------------------# Psi[0] = np.gradient(Tau[2],h, edge_order=2)[1] -\ np.gradient(Tau[1], h, edge_order=2)[0] #-----------------------------------------------------------------# # Psi_{12} # #-----------------------------------------------------------------# Psi[1] = np.gradient(Tau[4],h, edge_order=2)[1] -\ np.gradient(Tau[3], h, edge_order=2)[0] #-----------------------------------------------------------------# # Psi_{13} # #-----------------------------------------------------------------# Psi[2] = np.gradient(Tau[5],h, edge_order=2)[1] -\ np.gradient(Tau[4], h, edge_order=2)[0] #-----------------------------------------------------------------# # Psi_{21} # #-----------------------------------------------------------------# Psi[3] = np.gradient(Tau[0],h, edge_order=2)[0] -\ np.gradient(Tau[2], h, edge_order=2)[2] #-----------------------------------------------------------------# # Psi_{22} # #-----------------------------------------------------------------# Psi[4] = np.gradient(Tau[1],h, edge_order=2)[0] -\ np.gradient(Tau[4], h, edge_order=2)[2] #-----------------------------------------------------------------# # Psi_{23} # #-----------------------------------------------------------------# Psi[5] = np.gradient(Tau[2],h, edge_order=2)[0] -\ np.gradient(Tau[5], h, edge_order=2)[2] #-----------------------------------------------------------------# # Psi_{31} # #-----------------------------------------------------------------# Psi[6] = np.gradient(Tau[1],h, edge_order=2)[2] -\ np.gradient(Tau[0], h, edge_order=2)[1] #-----------------------------------------------------------------# # Psi_{32} # #-----------------------------------------------------------------# Psi[7] = np.gradient(Tau[3],h, edge_order=2)[2] -\ np.gradient(Tau[1], h, edge_order=2)[1] #-----------------------------------------------------------------# # Psi_{33} # #-----------------------------------------------------------------# Psi[8] = np.gradient(Tau[4],h, edge_order=2)[2] -\ np.gradient(Tau[2], h, edge_order=2)[1] return Psi
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _phi2psi(self):\n try:\n locq = self.param_q(self.rhotor)\n except:\n self._readeqdsk(self.shot)\n locq = self.param_q(self.rhotor)\n \n locphi = self.rhotor**2\n psi = integrate.cumtrapz(1/locq,locphi)\n psi = np.concatenate([[...
[ "0.66981435", "0.6571289", "0.64863443", "0.64510655", "0.6450011", "0.63740313", "0.6351212", "0.6321734", "0.6310165", "0.62904805", "0.62640244", "0.623965", "0.61741835", "0.61319333", "0.6116678", "0.60592675", "0.6031023", "0.6012221", "0.600168", "0.59927183", "0.59659...
0.66243196
1
Route the incoming request based on type (LaunchRequest, IntentRequest, etc.) The JSON body of the request is provided in the event parameter.
def execute_request(self): print( self.LOG_CLASS, '[method: main]', 'MyCityDataModel received:\n', str(self._mcd) ) # TODO: This section should be generalized for all platforms if possible """ Uncomment this if statement and populate with your skill's application ID to prevent someone else from configuring a skill that sends requests to this function. """ # if (mcd.application_id != # "amzn1.echo-sdk-ams.app.[unique-value-here]"): # raise ValueError("Invalid Application ID") if self._mcd.is_new_session: self.on_session_started() if self._mcd.request_type == "LaunchRequest": return self.on_launch() elif self._mcd.request_type == "IntentRequest": return self.on_intent() elif self._mcd.request_type == "SessionEndedRequest": return self.on_session_ended()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def handle_event(event, context):\n print(\"Executing...\")\n router = Router(ROUTE_MAP)\n return router.route_request(event, context)", "def lambda_handler(event, context):\r\n print(\"Incoming request...\")\r\n\r\n \"\"\"\r\n Uncomment this if statement and populate with your skill's applicat...
[ "0.7192836", "0.65895516", "0.64038146", "0.63481766", "0.6275576", "0.625636", "0.6239826", "0.6221745", "0.611795", "0.60871387", "0.6013772", "0.5998908", "0.5991289", "0.59416825", "0.59129614", "0.5898017", "0.5889853", "0.5842317", "0.58409756", "0.58315444", "0.5790977...
0.0
-1
Called when the session starts.
def on_session_started(self): print( MyCityController.LOG_CLASS, '[method: on_session_started]', '[requestId: ' + str(self._mcd.request_id) + ']', '[sessionId: ' + str(self._mcd.session_id) + ']' )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def on_start(self, session):\n pass", "def on_session_started():\n #print(\"on_session_started\")", "def on_session_started():\n #print(\"on_session_started\")", "def on_session_started(session_started_request, session):", "def init_session(self):\n pass", "def init_session(self):\n ...
[ "0.87699443", "0.83996505", "0.83996505", "0.78695554", "0.7841736", "0.7841736", "0.7705618", "0.7561237", "0.75006485", "0.7434301", "0.7434301", "0.7434301", "0.7434301", "0.7385277", "0.7357132", "0.7287912", "0.7287912", "0.7287912", "0.7287912", "0.7287912", "0.7287912"...
0.71602905
25
Called when the user launches the skill without specifying what they want.
def on_launch(self): print( MyCityController.LOG_CLASS, '[method: on_launch]', '[requestId: ' + str(self._mcd.request_id) + ']', '[sessionId: ' + str(self._mcd.session_id) + ']' ) # Dispatch to your skill's launch return self.get_welcome_response()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def on_launch(launch_request, session):\n # Dispatch to your skill's launch\n return get_welcome_response()", "def on_launch(launch_request, session):\n # Dispatch to your skill's launch message\n return get_welcome_response()", "def on_launch(launch_request, session):\n print(\"on_launch reques...
[ "0.6812588", "0.6779727", "0.6764538", "0.6764138", "0.65563494", "0.65384686", "0.6528976", "0.64852035", "0.64852035", "0.6459305", "0.64153165", "0.64153165", "0.64153165", "0.64153165", "0.64153165", "0.64153165", "0.64153165", "0.64153165", "0.64153165", "0.64153165", "0...
0.6200907
31
If the event type is "request" and the request type is "IntentRequest", this function is called to execute the logic associated with the provided intent and build a response.
def on_intent(self): mcd = self._mcd print( self.LOG_CLASS, '[method: on_intent]', '[intent: ' + mcd.intent_name + ']', 'MyCityDataModel received:', mcd ) # Check if the user is setting the address. This is special cased # since they may have been prompted for this info from another intent if mcd.intent_name == "SetAddressIntent": set_address_in_session(mcd) if intent_constants.ADDRESS_PROMPTED_FROM_INTENT \ in mcd.session_attributes: # User was prompted for address from another intent. # Set our current intent to be that original intent now that # we have set the address. mcd.intent_name = mcd.session_attributes[intent_constants.ADDRESS_PROMPTED_FROM_INTENT] print("Address set after calling another intent. Redirecting " "intent to {}".format(mcd.intent_name)) # Delete the session key indicating this intent was called # from another intent. del mcd.session_attributes[intent_constants.ADDRESS_PROMPTED_FROM_INTENT] else: return get_address_from_session(mcd) # session_attributes = session.get("attributes", {}) if mcd.intent_name == "GetAddressIntent": return get_address_from_session(mcd) elif mcd.intent_name == "TrashDayIntent": return request_user_address_response(mcd) \ if intent_constants.CURRENT_ADDRESS_KEY \ not in mcd.session_attributes \ else get_trash_day_info(mcd) elif mcd.intent_name == "SnowParkingIntent": return request_user_address_response(mcd) \ if intent_constants.CURRENT_ADDRESS_KEY \ not in mcd.session_attributes \ else get_snow_emergency_parking_intent(mcd) elif mcd.intent_name == "GetAlertsIntent": return get_alerts_intent(mcd) elif mcd.intent_name == "AMAZON.HelpIntent": return self.get_welcome_response() elif mcd.intent_name == "AMAZON.StopIntent" or \ mcd.intent_name == "AMAZON.CancelIntent": return self.handle_session_end_request() elif mcd.intent_name == "UnhandledIntent": return unhandled_intent(mcd) else: raise ValueError("Invalid intent")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def on_intent(intent_request, session):\r\n\r\n intent = intent_request['intent']\r\n intent_name = intent_request['intent']['name']\r\n\r\n # Dispatch to your skill's intent handlers\r\n if intent_name == \"test\":\r\n return get_test_response()\r\n elif intent_name==\"inputoutputIntent\":\r...
[ "0.70845103", "0.704562", "0.69918424", "0.6985099", "0.69391847", "0.69371176", "0.6915305", "0.68666846", "0.6809054", "0.67749316", "0.67594165", "0.6753595", "0.6742854", "0.67342067", "0.6727003", "0.67222506", "0.6720902", "0.67015886", "0.6687733", "0.6676576", "0.6642...
0.57320946
70
Called when the user ends the session. Is not called when the skill returns should_end_session=true
def on_session_ended(self): print( self.LOG_CLASS, '[method: on_session_ended]', 'MyCityDataModel received:', str(self._mcd) ) return self._mcd # add cleanup logic here
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def handle_session_end_request():\n card_title = \"Session Ended\"\n speech_output = \"Thank you for trying Cuni Control. \" \\\n \"Have a nice day! \"\n \n # Setting this to true ends the session and exits the skill.\n should_end_session = True\n return build_response({}, buil...
[ "0.79537493", "0.7887866", "0.7854766", "0.77201885", "0.7516735", "0.75162214", "0.7487292", "0.7487292", "0.7487292", "0.7487292", "0.7487292", "0.7487292", "0.7487292", "0.7487292", "0.7487292", "0.7487292", "0.7487292", "0.7487292", "0.7487292", "0.7487292", "0.7487292", ...
0.0
-1
If we wanted to initialize the session to have some attributes we could add those here.
def get_welcome_response(self): print( self.LOG_CLASS, '[method: get_welcome_response]' ) self._mcd.intent_name = "Welcome" self._mcd.output_speech = \ "Welcome to the Boston Public Services skill. How can I help you? " # If the user either does not reply to the welcome message or says # something that is not understood, they will be prompted again with # this text. self._mcd.reprompt_text = \ "For example, you can tell me your address by saying, " \ "\"my address is\" followed by your address." self._mcd.should_end_session = False return self._mcd
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def init_session(self):\n pass", "def init_session(self):\n pass", "def __init__(self, session):\n self.session = session", "def build_session_attributes(session):\n if 'attributes' in session.keys():\n if session['attributes']:\n session_attributes = session['attrib...
[ "0.74988097", "0.74988097", "0.73190755", "0.71198505", "0.70534444", "0.69491816", "0.68204165", "0.6755643", "0.6719715", "0.6653046", "0.6649403", "0.6623757", "0.65735793", "0.655451", "0.6542385", "0.65337366", "0.6529596", "0.64412206", "0.6429274", "0.6427235", "0.6380...
0.0
-1
Gets the info of this atom. Returns
def __repr__(self): s = '{\n' s += 'symbol: \'' + self.symbol + '\',\n' s += 'label: \'' + self.label + '\',\n' s += 'coords: ' + str(self.coords) + ',\n' s += 'mass: ' + str(self.m) + ',\n' s += 'radius: ' + str(self.radius) + '\n' s += '}\n' return s
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_atom_info(self):\n return", "def getInfo(self):\n return self._info", "def getInfo(self):\n return self.info", "def info(self):\n return self._info", "def info(self):\n return self._info", "def get_info(self) -> str:\n return self.info", "def get_atom_infos...
[ "0.8621765", "0.8073575", "0.8061986", "0.786965", "0.77836376", "0.76870424", "0.7581794", "0.7577282", "0.74306035", "0.74306035", "0.7390646", "0.7379589", "0.7364209", "0.73237324", "0.73167586", "0.73095566", "0.73095566", "0.72781247", "0.72658557", "0.7199898", "0.7180...
0.0
-1
Gets the coordinates of this atom. Returns
def get_coords(self): return self.coords
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def coordinates(self):\n return self.xy", "def get_coordinates(self):\n return self.coordinates", "def coordinates(self):\n return self._coordinates", "def coordinates(self):\n return self._coordinates", "def getCoords(self):\r\n \r\n return self.coords", "def co...
[ "0.8518098", "0.8345379", "0.82986754", "0.82986754", "0.82948023", "0.8146323", "0.8145044", "0.8112127", "0.8061573", "0.8036323", "0.8036323", "0.7918564", "0.7912341", "0.7905923", "0.789621", "0.7851619", "0.77691483", "0.77299666", "0.76837", "0.76726896", "0.766294", ...
0.8384263
1
Gets the x coordinate of this atom. Returns
def get_x(self): return self.coords[0]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def Getxcoord(self):\n return self.x_coord", "def _get_x(self):\n return self.position.x", "def getXCoordinate(self) -> float:\n return self.x_coord", "def get_x_position(self):\n return self.actual_coordinates[0]", "def x(self):\n return self._coords[0]", "def get_pos_...
[ "0.8656004", "0.8442269", "0.84205234", "0.83736426", "0.8353082", "0.83035123", "0.82412916", "0.8212211", "0.82038295", "0.81553376", "0.8097587", "0.80942667", "0.7829044", "0.78141165", "0.78131527", "0.7794782", "0.7767253", "0.77473986", "0.773306", "0.76320076", "0.762...
0.8588003
1
Gets the y coordinate of this atom. Returns
def get_y(self): return self.coords[1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def y(self):\n return self._coords[1]", "def y(self):\n return _libsbml.Point_y(self)", "def getYCoordinate(self) -> float:\n return self.y_coord", "def getY(self):\n return self.__y", "def GetY(self):\r\n\r\n return self._y", "def getY(self):\r\n\t\treturn self._y", ...
[ "0.85608596", "0.8548253", "0.8522086", "0.84701955", "0.84196323", "0.8416182", "0.8400373", "0.8396566", "0.8396566", "0.8329309", "0.8296547", "0.82906246", "0.8284484", "0.82126814", "0.8207803", "0.8192792", "0.81803006", "0.79841197", "0.795317", "0.7928298", "0.7902805...
0.8524346
2
Gets the z coordinate of this atom. Returns
def get_z(self): return self.coords[2]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getZ(self):\n return self.position.getZ()", "def getZ(self):\n\t\treturn self.coords.z", "def z(self):\n return self._coords[2]", "def z(self):\n return self.coords[2]", "def get_z(self) -> int:\n return self.__z", "def z(self):\r\n return self.position.z", "def getZ(...
[ "0.8616124", "0.86023843", "0.8559942", "0.84333587", "0.83052397", "0.82192296", "0.80853426", "0.80163383", "0.80163383", "0.80163383", "0.79635745", "0.79482794", "0.78137195", "0.77216375", "0.7505032", "0.74130934", "0.73766494", "0.7360839", "0.730008", "0.729796", "0.7...
0.8734822
0
Gets the mass of this atom. Returns
def get_mass(self): return self.m
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mass(self):\n return self._mass", "def mass(self):\n return self._mass", "def get_mass(self):\n _pal.lib.geometry_get_mass.restype = c.c_float\n return _pal.lib.geometry_get_mass(self._geometry)", "def mass(self):\n return self._getAttribute(Attribute.mass)", "def getMole...
[ "0.8615101", "0.8615101", "0.8449791", "0.827885", "0.81125116", "0.7995724", "0.7925729", "0.7887645", "0.78556985", "0.78556985", "0.7763608", "0.76362807", "0.7610759", "0.7542108", "0.735307", "0.7303788", "0.7278605", "0.7205126", "0.7170194", "0.7108265", "0.7056702", ...
0.8783727
0
Gets the radius of this atom. Returns
def get_radius(self): return self.radius
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def radius(self):\n return self._radius", "def radius(self):\n return self._radius", "def radius(self):\n return self._radius", "def radius(self):\n return self._radius", "def radius(self):\n return self._radius", "def getRadius(self):\n return self.__radius", ...
[ "0.87892956", "0.87892956", "0.87892956", "0.87892956", "0.87892956", "0.87633866", "0.87077355", "0.8659776", "0.8640172", "0.86317974", "0.85356", "0.8518339", "0.847131", "0.83863753", "0.83786553", "0.8247246", "0.8189835", "0.8085827", "0.79818517", "0.7850247", "0.77838...
0.85236245
12
Gets the van Der Waals radius of this atom. Returns
def get_van_Der_Waals_radius(self): return self.van_Der_Waals_radius
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_radius(self):\n return self.r", "def get_radius(self):\n return self.R", "def get_radius(self):\n return self.__radius", "def radius(self) -> float:\n return get_radius_from_element(self.element)", "def get_radius(self):\n return self.radius", "def get_radius(se...
[ "0.79551435", "0.78757066", "0.78437364", "0.7793091", "0.7788298", "0.7788298", "0.7774703", "0.7747598", "0.77394575", "0.77394575", "0.77394575", "0.77394575", "0.77394575", "0.7731752", "0.7719796", "0.7713599", "0.7675112", "0.7668061", "0.7660242", "0.7640078", "0.75554...
0.86208606
0
Gets the euler tensor of this atom. Returns
def get_euler(self): return array([ coord * self.coords for coord in self.coords ])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def imu_get_euler(self):\n return self.imu.get_euler()", "def euler_integrator(self, t, y, tau):\n\n return self.plant.rhs(t, y, tau)", "def getTensor(self):\n\t\treturn self.cur_tensor", "def get_deltaE(self):\n return self.deltaE", "def e(self):\n return np.matrix([self.y - se...
[ "0.65126354", "0.6043174", "0.59547913", "0.590338", "0.5897418", "0.58401287", "0.58401287", "0.5825734", "0.5797117", "0.5762484", "0.572737", "0.5713381", "0.5662122", "0.5651244", "0.5643783", "0.56348675", "0.56310534", "0.56149095", "0.56149095", "0.56149095", "0.559066...
0.64894354
1
Rotates this atom by the given rotation matrix.
def rotate(self, rotation): self.coords = dot(rotation, self.coords) return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rotate(mat,angle):\n return np.dot(Mueller.rotator(angle), np.dot(mat, Mueller.rotator(-angle)))", "def rotate(self, matrix):\n newCoord = np.zeros(self.coord.shape)\n newCoord[:,0]=matrix[0,0]+matrix[0,1]*self.coord[:,0]+matrix[0,2]*self.coord[:,1]+matrix[0,3]*self.coord[:,2]\n n...
[ "0.73639774", "0.7307685", "0.7284679", "0.7252222", "0.7239165", "0.71252877", "0.7052873", "0.69820964", "0.69736505", "0.6949628", "0.69119763", "0.6898012", "0.6892073", "0.6847489", "0.6815046", "0.6808609", "0.6807251", "0.68026036", "0.6777418", "0.677454", "0.6742357"...
0.67754483
19
Translates this atom by the given translation vector.
def translate(self, translation): self.coords = self.coords - translation return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def translate(self, vector):\n locations = self.locations.translate(vector)\n # do not translate the orientations!\n pcs = self.pcs.translate(vector)\n self.locations = locations\n self.pcs = pcs\n return self", "def translate(self, vector):\n \n matrix = wf.tr...
[ "0.7729765", "0.7498974", "0.7481727", "0.7118544", "0.70570266", "0.6959108", "0.69552064", "0.692206", "0.68971455", "0.6878331", "0.68672305", "0.6747671", "0.6685557", "0.66658247", "0.6580302", "0.6569017", "0.65219915", "0.6508473", "0.6450745", "0.6443207", "0.6412782"...
0.6492934
18
Scales this atom by the given scale value vector.
def scale(self, scale): self.coords = self.coords * scale return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def scale_vector(vector, scale):\n return vector[0] * scale, vector[1] * scale, vector[2] * scale", "def apply_scale( vectors, scale ):\n # create a scaling matrix\n matrix = numpy.array([\n [ scale[ 0 ], 0.0, 0.0 ],\n [ 0.0, scale[ 1 ], 0.0 ],\n [ 0.0, 0.0, scale[ 2 ] ]\n ])...
[ "0.7549334", "0.7472794", "0.74334973", "0.7333459", "0.728429", "0.719025", "0.7162344", "0.71475494", "0.71475494", "0.7143308", "0.7092872", "0.7072481", "0.70674723", "0.7027254", "0.69800884", "0.6965245", "0.6939701", "0.69374204", "0.6909835", "0.68787146", "0.6864351"...
0.73805004
3
Gets the symbol of this atom. Returns
def get_symbol(self): return self.symbol
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def symbol(self):\n return self._symbol", "def symbol(self): \n return self.__symbol", "def symbol_id(self) -> str:\n return self._symbol", "def getSymbol(self):\n return _libsbml.InitialAssignment_getSymbol(self)", "def getElementSymbol(self):\n dataDict = self.__dict__\n ...
[ "0.85209036", "0.8121533", "0.76356006", "0.75384325", "0.73964155", "0.7318919", "0.73038465", "0.7173422", "0.7173422", "0.70371056", "0.69875854", "0.6953391", "0.6953391", "0.6947048", "0.6933806", "0.6931096", "0.68797255", "0.68131906", "0.6801363", "0.6762182", "0.6737...
0.85047024
1
Gets the label of this atom. Returns
def get_label(self): return self.label
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_label(self):\n\n return self._label", "def get_label(self):\n return self._label", "def GetLabel(self):\r\n\r\n return self._label", "def get_label(self, ):\n return self.attrs.get(self.AttributeNames.LABEL, None)", "def GetLabel(self) -> str:\n return self._label...
[ "0.844582", "0.843851", "0.8382816", "0.8357652", "0.8327101", "0.8326299", "0.82825094", "0.8282123", "0.82630473", "0.82630473", "0.82630473", "0.82630473", "0.82630473", "0.82630473", "0.82630473", "0.82630473", "0.82630473", "0.82630473", "0.8249067", "0.8249067", "0.8157...
0.8260186
19
Gets the chain sequence number of the amminoacid this atom belongs to. Returns
def get_ammino_chain_seq(self): return self.ammino_chain_seq
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sequence_number(self):\n return self._sequence_number", "def sequence_number(self):\n # type: () -> int\n return self._sequence_number", "def attempt_sequence_number(self):\n return self._attempt_sequence_number", "def chain_serial(self):\n return self.structure.chain_s...
[ "0.7083817", "0.6912228", "0.63440174", "0.6332537", "0.62717706", "0.62422633", "0.6234725", "0.6123495", "0.61229354", "0.6113672", "0.6113672", "0.60696846", "0.6031649", "0.6021344", "0.6019921", "0.5965752", "0.59603554", "0.59326804", "0.59226", "0.5900438", "0.58660275...
0.80430615
0
Gets the euclid distance from this atom to the given atom. Returns
def get_euclid_distance_to(self, atom): return linalg.norm(self.get_coords() - atom.get_coords())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_distance(self) -> int:\n return self.get_measurement_data().distance", "def distance(cls, atom_1, atom_2):\n\t\t\n\t\treturn np.linalg.norm((atom_1-atom_2).atom_loc)", "def __get_distance(self, game_object):\n obj_x, obj_y = game_object.get_coordinates()\n self_x, self_y = self._co...
[ "0.66581625", "0.6649904", "0.6637657", "0.6630217", "0.6601542", "0.6598605", "0.6596507", "0.63966656", "0.636073", "0.63055176", "0.62724113", "0.62593937", "0.6219049", "0.62136334", "0.62126756", "0.6210324", "0.61876845", "0.61710095", "0.616928", "0.61606264", "0.61590...
0.8762926
0
Create a PLaSM cuboid with a color an put it on this atom coords.
def plasm_cube(self, size=0.1, color=WHITE): return COLOR(color)(T([1,2,3])(self.coords)(CUBOID([size, size, size])))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_cube(color=COLOR_WHITE):\n a = Point3(-1.0, -1.0, -1.0)\n b = Point3(1.0, -1.0, -1.0)\n c = Point3(1.0, -1.0, 1.0)\n d = Point3(-1.0, -1.0, 1.0)\n e = Point3(-1.0, 1.0, -1.0)\n f = Point3(1.0, 1.0, -1.0)\n g = Point3(1.0, 1.0, 1.0)\n h = Point3(-1.0, 1.0, 1.0)\n\n obj = glGenL...
[ "0.6723132", "0.6348297", "0.60369486", "0.5786166", "0.57097834", "0.5662898", "0.56111765", "0.5570667", "0.5565409", "0.5560575", "0.55361265", "0.5518773", "0.5501009", "0.5469816", "0.54623425", "0.54447734", "0.54447734", "0.5434046", "0.54167914", "0.53902054", "0.5389...
0.77502567
0
Checks that the GsmModem in PDU mode accepts outgoing SMS, when the text is within ASCII chars 22 126.
def testSendSmsPduMode(self): # setup expectation to raise a timeout error with prompt err = errors.GsmReadTimeoutError(">") when(self.mockDevice).read_lines().thenRaise(err).thenReturn(self.oklines) self.gsm.send_sms("1234", "Test Message") # must see command with size verify(self.mockDevice, times=1).write("AT+CMGS=21\r") # must see command with text and terminating char verify(self.mockDevice, times=1).write("00110004A821430000AA0CD4F29C0E6A96E7F3F0B90C\x1a") # allow any number of reads verify(self.mockDevice, atleast=1).read_lines() verifyNoMoreInteractions(self.mockDevice)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_sms_valid(text=''):\n try:\n text.decode('ascii')\n except:\n return False\n if len(text) > 160:\n return False\n\n return True", "def testSendSmsPduModeError(self):\n\n # setup expectation to raise a non-timeout error with prompt\n when(self.mockDevice).read...
[ "0.7446568", "0.6175791", "0.59026414", "0.5805947", "0.56742376", "0.5664919", "0.55345494", "0.5473586", "0.5392989", "0.5327824", "0.53224766", "0.53210604", "0.5278678", "0.52711344", "0.5263743", "0.52219886", "0.52177036", "0.51783454", "0.5167334", "0.51671714", "0.516...
0.6486989
1
Checks that the GsmModem in PDU mode does not send message if error, when the text is within ASCII chars 22 126.
def testSendSmsPduModeError(self): # setup expectation to raise a non-timeout error with prompt when(self.mockDevice).read_lines().thenRaise(Exception("something other than timeout")) self.gsm.send_sms("1234", "Test Message") # must see command with size verify(self.mockDevice, times=1).write("AT+CMGS=21\r") # must see command to break out of command prompt verify(self.mockDevice, times=1).write("\x1b") # must NOT see command with text and terminating char verify(self.mockDevice, times=0).write("00110004A821430000AA0CD4F29C0E6A96E7F3F0B90C\x1a") # allow any number of reads verify(self.mockDevice, atleast=1).read_lines() verifyNoMoreInteractions(self.mockDevice)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_sms_valid(text=''):\n try:\n text.decode('ascii')\n except:\n return False\n if len(text) > 160:\n return False\n\n return True", "def testSendSmsPduMode(self):\n \n # setup expectation to raise a timeout error with prompt\n err = errors.GsmReadTimeout...
[ "0.68313634", "0.67669386", "0.61789453", "0.60250825", "0.579996", "0.57878447", "0.5629621", "0.5615489", "0.55831856", "0.5521425", "0.54795057", "0.5427085", "0.54028183", "0.53781205", "0.5342483", "0.53372145", "0.533206", "0.5329173", "0.5322715", "0.53093356", "0.5289...
0.71887165
0
Returns True if the content type is valid.
def is_valid_content_type(cls, content_type: str) -> bool: return content_type in cls.CONTENT_TYPES.value
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _is_valid_ct(content_type: str) -> bool:\n content_type = content_type.strip()\n return _is_valid_regex(CT_CONTENT_TYPE_REGEX_PATTERN, content_type)", "def _is_valid_content_type_format(content_type: str) -> bool:\n return (\n _is_valid_ct(content_type)\n or _is_valid_pt(content_type)\...
[ "0.7517771", "0.7417243", "0.70675975", "0.6998834", "0.68573606", "0.6832993", "0.67981493", "0.6714344", "0.6592863", "0.6578096", "0.6575198", "0.6568513", "0.6541694", "0.65321887", "0.65321887", "0.6491889", "0.6491021", "0.6470095", "0.6450657", "0.63938487", "0.6392137...
0.8173388
0
Constructor for facebook sdk
def init_fb(self, **kwargs): try: self.graph = facebook.GraphAPI(access_token=fb_token, version='2.4') except Exception as e: sys.exit(str(e))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, access_token, endpoint='/me',\r\n version='2.5'):\r\n self.access_token = access_token\r\n self.endpoint = endpoint", "def initialize_facebook():\n session = FacebookSession(APP_ID, APP_SECRET, ACCESS_TOKEN)\n return FacebookAdsApi(session)", "def __init__...
[ "0.70955795", "0.70331687", "0.65805393", "0.6543465", "0.6526601", "0.64303684", "0.63264066", "0.625907", "0.6256634", "0.61950535", "0.61830455", "0.618123", "0.61768824", "0.61615527", "0.6148158", "0.6142532", "0.6112654", "0.6108066", "0.6090049", "0.6088559", "0.608694...
0.760726
0
Save event to database
def save_event(self, data): rdb.table(self.rdb_table).insert(data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save(self, event):\n self.saved_events.append(event)", "def insert_event_to_db(self):\n try:\n events_coll.insert_one(self.event_info_to_dic())\n except Exception as e:\n print(e)", "def save(self, db):\n pass", "def save_event(id):\n event = Event.que...
[ "0.7487547", "0.7325725", "0.7049459", "0.6976891", "0.69406134", "0.6901806", "0.68859524", "0.6789551", "0.66951", "0.66863483", "0.66196334", "0.65969324", "0.65858656", "0.6583082", "0.6570057", "0.6570057", "0.6570057", "0.6570057", "0.6570057", "0.6570057", "0.6570057",...
0.8077102
0
Iterate through all events pages
def get_events(self): url = '/v2.4/'+self.page_id+'/events' data = self.graph.request(url) while 'next' in data['paging'].keys(): print data['paging']['next'] data = self.graph.request(url, args={ 'limit' : 100, 'after' : data['paging']['cursors']['after'] }) return data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_event_list(self, response):\n for event in response.css(\".view-content .article-title a::attr(href)\"):\n event_url = event.extract()\n yield scrapy.Request(\n response.urljoin(event_url),\n callback=self.parse_event_page,\n dont_...
[ "0.7203284", "0.6670896", "0.66397905", "0.65484786", "0.65141577", "0.64590824", "0.64310527", "0.6378027", "0.6360895", "0.63342434", "0.6316285", "0.6311675", "0.6298735", "0.6272081", "0.6216796", "0.61802506", "0.61441034", "0.6130424", "0.6043177", "0.60396385", "0.6032...
0.7202085
1
Setup the Binary Sensor platform fo EnOcean.
def setup_platform(hass, config, add_devices, discovery_info=None): dev_id = config.get(CONF_ID, None) devname = config.get(CONF_NAME, "EnOcean binary sensor") add_devices([EnOceanBinarySensor(dev_id, devname)])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setup_platform(\n hass: HomeAssistant,\n config: ConfigType,\n add_entities: AddEntitiesCallback,\n discovery_info: DiscoveryInfoType | None = None,\n) -> None:\n if discovery_info is None:\n return\n binary_sensors = []\n for name in hass.data[DOMAIN]:\n if name in BINARY_SE...
[ "0.6923445", "0.6406349", "0.6367884", "0.630438", "0.6297725", "0.62513864", "0.61724937", "0.61561346", "0.61022043", "0.60586834", "0.60506696", "0.60256183", "0.59961325", "0.59727365", "0.59395474", "0.5925092", "0.5920433", "0.5904643", "0.58785045", "0.5876473", "0.587...
0.7185503
0
Initialize the EnOcean binary sensor.
def __init__(self, dev_id, devname): enocean.EnOceanDevice.__init__(self) self.stype = "listener" self.dev_id = dev_id self.which = -1 self.onoff = -1 self.devname = devname
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self):\n self._read_calibration_data()\n self.configure_sensor(\n TemperatureOversamplings.x08,\n PressureOversamplings.x16,\n HumidityOversamplings.x08,\n IIRFilterCoefficients.FC_003,\n 250,\n 250)", "def _initialize(s...
[ "0.6457708", "0.64529383", "0.61910915", "0.604235", "0.60021144", "0.59438455", "0.59427816", "0.59158796", "0.59125113", "0.5882629", "0.58545095", "0.5852518", "0.5806404", "0.5801691", "0.58013505", "0.57857406", "0.5775398", "0.57515305", "0.573593", "0.57229674", "0.571...
0.5603905
32
The default name for the binary sensor.
def name(self): return self.devname
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def name(self):\n return f\"{DEFAULT_NAME}_{BINARY_SENSOR}\"", "def name(self):\n return f\"{self._name} {SENSOR_TYPES[self.sensor][0]}\"", "def name(self):\n return f\"{self.sensor_type['name']} ({self._mac[-5:]})\"", "def name(self):\n return f\"{self._name}_{self._sensor}\"", ...
[ "0.88304996", "0.7638713", "0.7363179", "0.73616314", "0.7301961", "0.7292507", "0.7264081", "0.7222781", "0.71043813", "0.70989096", "0.7048955", "0.7047866", "0.70316464", "0.701503", "0.69721663", "0.6962419", "0.6932336", "0.6925805", "0.6794133", "0.67933375", "0.6758342...
0.0
-1
Fire an event with the data that have changed. This method is called when there is an incoming packet associated with this platform.
def value_changed(self, value, value2): self.update_ha_state() if value2 == 0x70: self.which = 0 self.onoff = 0 elif value2 == 0x50: self.which = 0 self.onoff = 1 elif value2 == 0x30: self.which = 1 self.onoff = 0 elif value2 == 0x10: self.which = 1 self.onoff = 1 self.hass.bus.fire('button_pressed', {"id": self.dev_id, 'pushed': value, 'which': self.which, 'onoff': self.onoff})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def data_changed(self):\n self.data_changed_signal.emit(self)", "def notify(self, packet):\n\t\tself.update_listeners(packet)", "def on_state_notification(self, data):\n\n self.channel_data.update(data)\n\n # synchronize DataManager data with processed update & entity data\n self.sy...
[ "0.70797074", "0.68603", "0.6457842", "0.6428471", "0.64076215", "0.63471305", "0.63120216", "0.62468994", "0.62399685", "0.6223958", "0.61906064", "0.618284", "0.61713535", "0.6107015", "0.6090246", "0.597505", "0.59542114", "0.5942936", "0.59262264", "0.5895548", "0.5893129...
0.0
-1
Construct an empty ARFF structure.
def __init__(self): self.relation = '' self.attributes = [] self.attribute_types = dict() self.attribute_data = dict() self.comment = [] self.data = [] pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _create_empty_trace(self):\n self.data = np.zeros(0, dtype=np.float32)\n self.header = SEGYTraceHeader(header=None, endian=self.endian)", "def __init__(self):\n self._header = self._Node(None, None, None)\n self._trailer = self._Node(None, None, None)\n self._header._next =...
[ "0.5822378", "0.5721686", "0.56984425", "0.56752235", "0.5657628", "0.5654186", "0.5644643", "0.5617209", "0.5603904", "0.55908096", "0.557001", "0.55659646", "0.5524445", "0.5469858", "0.54600406", "0.5455272", "0.54444385", "0.5356642", "0.5347853", "0.53235525", "0.5297517...
0.0
-1