query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
generates initial hidden states for each agent
def generate_initial_hidden_states(self, batch_size, test_mode=False, caller=None): # Set up hidden states for all levels - and propagate through the runner! hidden_dict = {} hidden_dict["level1"] = th.stack([Variable(th.zeros(batch_size, 1, self.args.agents_hidden_state_size)) for _ in range(self.n_agents if self.is_obs_noise(test_mode) and caller != "learner" else 1)]) hidden_dict["level2"] = th.stack([Variable(th.zeros(batch_size, 1, self.args.agents_hidden_state_size)) for _ in range(len(sorted(combinations(list(range(self.n_agents)), 2)))*2 if self.is_obs_noise(test_mode) and caller != "learner" else len(sorted(combinations(list(range(self.n_agents)), 2))))]) hidden_dict["level3"] = th.stack([Variable(th.zeros(batch_size, 1, self.args.agents_hidden_state_size)) for _ in range(self.n_agents)]) if self.args.use_cuda: hidden_dict = {_k:_v.cuda() for _k, _v in hidden_dict.items()} return hidden_dict, "?*bs*v*t"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initial_state(self):\n # Network details elided.\n return self.agent.initial_state()", "def initial_state(self):\n # Network details elided.\n return self.agent.initial_state()", "def initial_state(self):\n # Network details elided.\n return self.agent.initial_state()", "def registerIni...
[ "0.6404829", "0.6404829", "0.6404829", "0.6275053", "0.62659883", "0.62305397", "0.6203651", "0.6194984", "0.61897767", "0.6183285", "0.60839427", "0.6075158", "0.6037105", "0.6035622", "0.60091317", "0.59804654", "0.59778255", "0.59626013", "0.5937987", "0.5926602", "0.59236...
0.73379165
0
Each learner has it's own logging routine, which logs directly to the pythonwide logger if log_directly==True, and returns a logging string otherwise Logging is triggered in run.py
def log(self, test_mode=None, T_env=None, log_directly = True): test_suffix = "" if not test_mode else "_test" stats = self.get_stats() try: stats["pair_action_unavail_rate"+test_suffix] = _seq_mean(stats["pair_action_unavail_rate__runner"+test_suffix]) self._add_stat("pair_action_unavail_rate", stats["pair_action_unavail_rate"+test_suffix], T_env=T_env, suffix=test_suffix, to_sacred=True) except: pass if stats == {}: self.logging_struct.py_logger.warning("Stats is empty... are you logging too frequently?") return "", {} logging_dict = dict(T_env=T_env) try: logging_dict["pair_action_unavail_rate"+test_suffix] =stats["pair_action_unavail_rate"+test_suffix] except: pass logging_str = "" logging_str += _make_logging_str(_copy_remove_keys(logging_dict, ["T_env"+test_suffix])) if log_directly: self.logging_struct.py_logger.info("{} MC INFO: {}".format("TEST" if self.test_mode else "TRAIN", logging_str)) return logging_str, logging_dict
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def log(self, log_directly = True):\n stats = self.get_stats()\n logging_dict = dict(advantage_mean = _seq_mean(stats[\"advantage_mean\"]),\n critic_grad_norm = _seq_mean(stats[\"critic_grad_norm\"]),\n critic_loss =_seq_mean(stats[\"critic_los...
[ "0.7626581", "0.63579595", "0.62751955", "0.61822915", "0.6168312", "0.6166976", "0.6134176", "0.61169195", "0.6112573", "0.6073991", "0.6049023", "0.6018304", "0.6004898", "0.5980599", "0.59766626", "0.59521484", "0.59347326", "0.5919445", "0.5915726", "0.5904169", "0.586773...
0.5844291
23
Sends a message to TCP server
def send(self, msg): if self.verbose: print('<- out ' + msg) self._socket.send_string(msg) return
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def send_message(self, message):\r\n\t\tself.__tcpSocket.write(message.encode('utf8'))", "def send(self, msg):\n self.__sock.send(msg)", "def send(self, message):\n self.sock.send(message)", "def sendMessage(self, msg):\n # Socket Object\n self.sock.connect((self.host, self.port))...
[ "0.857833", "0.8029346", "0.7997576", "0.7927077", "0.7885072", "0.7848192", "0.7783163", "0.7779231", "0.77461135", "0.77231276", "0.7711107", "0.7656483", "0.7619312", "0.7602865", "0.75940025", "0.75879824", "0.75759625", "0.7557794", "0.7549099", "0.7539888", "0.7537408",...
0.7415012
27
Checks the ZeroMQ for data
def recv(self): return self._socket.recv()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_invalid_data_is_empty(self, app, data_queues):\n res = self._call(app, {\"invalid\": 0}, ip=self.test_ip, status=200)\n self.check_response(data_queues, res, \"ok\")\n self.check_queue(data_queues, 0)", "def _chk_empty(self, queue, receiver):\n try:\n msg = receive...
[ "0.6370523", "0.6197868", "0.61302525", "0.6033723", "0.5991278", "0.59654814", "0.58306265", "0.58306265", "0.57430387", "0.57194114", "0.567935", "0.56438", "0.5607466", "0.5605082", "0.559692", "0.5590848", "0.5575481", "0.5566702", "0.55629605", "0.5525354", "0.5514238", ...
0.0
-1
Initializes and returns an LSL outlet
def initializeOutlet(interface): info = StreamInfo('OpenBCI_EEG', 'EEG', 4, 256, 'float32', 'openbci12345') outlet = StreamOutlet(info) return outlet
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _add_lamp_outlet(self, model):\r\n\r\n # Create a new CameraItem and set the model\r\n item = LampOutletItem()\r\n item.setModel(model)\r\n\r\n # Create a new CameraInfoWidget and set the model\r\n widget = LampOutletInfoWidget()\r\n widget.setModel(model)\r\n\r\n ...
[ "0.5946556", "0.5688111", "0.55620337", "0.55175424", "0.53838944", "0.5277127", "0.52567214", "0.52559364", "0.5208461", "0.5208461", "0.51927763", "0.51246226", "0.50718874", "0.5064975", "0.5049424", "0.5043597", "0.50424546", "0.50129515", "0.5001687", "0.49760997", "0.49...
0.58739966
1
This function builds a dictionary of managers to manager nodes.
def buildHierarchy(self, test_input): for entry in test_input: if entry['manager']not in self.relations: self.relations[entry['manager']] = Node(entry['manager'], entry['name']) else: self.relations[entry['manager']].employees.append(entry['name'])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_managers():\n return {'managers': get_users('managers')}", "def map_uses(self):\n out = {}\n for node in self.nodes.values():\n baddies = set()#track incomplete connections and relegate to attributes\n for rtype, dest in node.outgoing_relations:\n try...
[ "0.6298359", "0.62928385", "0.59723586", "0.59544337", "0.586242", "0.58006024", "0.57427794", "0.5736032", "0.56940466", "0.56494904", "0.5589373", "0.5572555", "0.5556493", "0.5530863", "0.55166465", "0.5495922", "0.5472622", "0.5469694", "0.5432187", "0.54266393", "0.54139...
0.5909479
4
This function recursively builds a string of manager to employee relationships starting from the managers that do not have managers.
def findHierarchy(self): def __recursiveHelper(key_name, output, indent): if key_name in self.relations: for employee in self.relations[key_name].employees: output += " " * indent + str(employee) +"\n" # return __recursiveHelper(employee, output, indent+1) __recursiveHelper(employee, output, indent+1) else: print(output) return output #experimenting with Iter() and next() iterators/generators #and a while loop in the recursive function: # def __recursiveHelper(key_name, output, indent): # if key_name in self.relations: # employees = iter(self.relations[key_name].employees) # employee = next(employees, "stop") # while employees and employee != 'stop': # output += " " * indent + str(employee) +"\n" # __recursiveHelper(next(employees, "stop"), output, indent+1) # else: # employee = next(employees, "stop") # # else: # return output output = "" indent = -1 # self.relations is a dictionary of manager-name string keys. # The employees of None are the top-ranking managers. # only issue: # having trouble returning the concatenated output # from the recursive function: return __recursiveHelper(None, output, indent+1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def buildHierarchy(self, test_input):\n for entry in test_input:\n if entry['manager']not in self.relations:\n self.relations[entry['manager']] = Node(entry['manager'], entry['name'])\n else:\n self.relations[entry['manager']].employees.append(entry['name'...
[ "0.5634923", "0.53542036", "0.5284192", "0.5087596", "0.49705473", "0.4939793", "0.49350137", "0.48989594", "0.48973182", "0.48740613", "0.485191", "0.48157832", "0.47859207", "0.47704506", "0.47661456", "0.4745172", "0.47298232", "0.47289705", "0.4718572", "0.46928048", "0.4...
0.6343025
0
r""" 1d nonlinear elasticity riemann solver aux is expected to contain aux[i,0] density in cell i aux[i,1] bulk modulus in cell i
def rp_nel_1d(q_l, q_r, aux_l, aux_r, aux_global): meqn = 2 mwaves = 2 # Convenience nrp = np.size(q_l, 0) # Set up arrays for return values fwave = np.empty((nrp, meqn, mwaves)) s = np.empty((nrp, mwaves)) amdq = np.empty((nrp, meqn)) apdq = np.empty((nrp, meqn)) # Linearized bulk modulus, sound speed, and impedance: bulkl = sigmap(q_l[:, 0], aux_l[:, 1]) bulkr = sigmap(q_r[:, 0], aux_r[:, 1]) cl = np.sqrt(bulkl / aux_l[:, 0]) cr = np.sqrt(bulkr / aux_r[:, 0]) zl = cl * aux_l[:, 0] zr = cr * aux_r[:, 0] # Jumps: du = q_r[:, 1] / aux_r[:, 0] - q_l[:, 1] / aux_l[:, 0] dsig = sigma(q_r[:, 0], aux_r[:, 1]) - sigma(q_l[:, 0], aux_l[:, 1]) b1 = -(zr * du + dsig) / (zr + zl) b2 = -(zl * du - dsig) / (zr + zl) # Compute the f-waves # 1-Wave fwave[:, 0, 0] = b1 fwave[:, 1, 0] = b1 * zl s[:, 0] = -cl # 2-Wave fwave[:, 0, 1] = b2 fwave[:, 1, 1] = b2 * (-zr) s[:, 1] = cr # Compute the left going and right going fluctuations for m in range(meqn): amdq[:, m] = fwave[:, m, 0] apdq[:, m] = fwave[:, m, 1] return fwave, s, amdq, apdq
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, cell):\n self._cell = cell\n self._residual_fn = gnmt_residual_fn", "def auxmin_f1_part_i(x,m_ind):\n \n tmp1 = 2.0*auxminrho1(x,m_ind)-cfg.a[m_ind,cfg.nfea-1] \n tmp2 = 2.0*auxminrho2(x,m_ind)+cfg.a[m_ind,cfg.nfea-1]\n\n # checking maximum used in auxminrho1 \n if (tm...
[ "0.6120228", "0.5804139", "0.5638813", "0.5638267", "0.5628902", "0.5628902", "0.5626579", "0.5618351", "0.5618351", "0.5618351", "0.56004834", "0.5596577", "0.5594398", "0.55738205", "0.55608195", "0.5557147", "0.55332184", "0.5526329", "0.5496661", "0.54944676", "0.54727423...
0.0
-1
Extract zipfile to a directory if password is correct.
def extractfile(file, passwd): try: zipf = zipfile.ZipFile(file) zipf.extractall(path=os.path.join(file[:-4]), pwd=str.encode(passwd)) print('Password: {}'.format(passwd)) except: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unzipArchives(zip_file, password):\n with ZipFile(zip_file) as archive:\n archive.extractall(pwd=bytes(password, \"utf8\"))", "def unzip_item(source_path, destination_path, password):\n\n if not destination_path:\n destination_path = source_path.replace(\".zip\", \"\")\n if not os.path...
[ "0.73367697", "0.68281114", "0.6682046", "0.66589713", "0.6608535", "0.65792376", "0.65135366", "0.65102696", "0.6478845", "0.64773226", "0.64197737", "0.6360199", "0.6358647", "0.6311184", "0.62914145", "0.6283126", "0.6278718", "0.62752926", "0.62581104", "0.62555015", "0.6...
0.7807078
0
Calculate Profit of Order
def calculate_profit(self):
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def profit(self):\n retail_value = 0\n wholesale_value = 0\n for bike in self.sold:\n retail_value += bike.total_cost() + (\n self.retail_margin * bike.total_cost())\n wholesale_value += bike.total_cost()\n return retail_value - wholesale_value", "...
[ "0.73407346", "0.70521134", "0.69925046", "0.66937786", "0.664156", "0.6609153", "0.660143", "0.64814395", "0.63270825", "0.62858367", "0.62036735", "0.6141628", "0.6069652", "0.6067097", "0.6044266", "0.60409504", "0.602659", "0.6025829", "0.6020886", "0.6019667", "0.6008011...
0.79486245
0
Try to close when TakeProfit or StopLoss hits.
def try_to_close(self, candle_high, candle_low, time, pre_candle_close=0.0, pre_time=None):
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def close(self):\n\t\tself.applied = 0", "def _close(self):\n # TODO\n self.holding = False", "def handle_close(self):\n self.active = False\n self.close()", "def close(self):\n super(OpenCNTradeContext, self).close()", "def close(self):\n self._normal_close = True...
[ "0.6352216", "0.6309689", "0.62376684", "0.617212", "0.6147022", "0.6096366", "0.6096366", "0.6072034", "0.6071069", "0.60654074", "0.6047621", "0.6024533", "0.6024533", "0.60132384", "0.6009106", "0.6005265", "0.6000903", "0.59958005", "0.5981424", "0.5960146", "0.595187", ...
0.0
-1
Removes a service from a list of existing services.
def RemoveServiceFromEndpoints(service_name, services): new_services = [] if not isinstance(services, list): return new_services # TODO(user): Consider throwing an exception if the service is not # already configured in the list of endpoints. for service in services: if not isinstance(service, dict) or 'name' not in service: raise exceptions.ToolException(ValueError( 'Services are expected to be service dicts!')) if service['name'] != service_name: new_services.append(service) return new_services
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def DeleteServices(self):\n for service in self.services.values():\n service.Delete()", "def delete_service(self, service):\n # type: (LoadBalancerService) -> List[BoundAction]\n return self._client.delete_service(self, service)", "def remove(self, service):\n os.remove(os.path.joi...
[ "0.7037286", "0.675924", "0.6610748", "0.6590831", "0.62848717", "0.6255595", "0.61463314", "0.6130574", "0.611379", "0.6080032", "0.6072836", "0.6069888", "0.60036", "0.5951796", "0.5946897", "0.59276074", "0.59024787", "0.58957607", "0.58896816", "0.5826269", "0.57981753", ...
0.7142464
0
Return distance of two keys in qwerty keyboard based on manhattan or euclidean distance.
def key_distance(self, x, y, type="manhattan"): if type == "manhattan": return self.manhattan_dist_matrix[self.keys.index(x), self.keys.index(y)] elif type == "euclidean": return self.euclidean_dist_matrix[self.keys.index(x), self.keys.index(y)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def distance(self, keyOne, keyTwo):", "def qwerty_distance():\n from collections import defaultdict\n import math\n R = defaultdict(dict)\n R['-']['-'] = 0\n zones = [\"dfghjk\", \"ertyuislcvbnm\", \"qwazxpo\"]\n keyboard = [\"qwertyuiop\", \"asdfghjkl\", \"zxcvbnm\"]\n for num, content in e...
[ "0.75387555", "0.69056517", "0.6859236", "0.67510206", "0.6659005", "0.66116893", "0.6603836", "0.65740633", "0.6566495", "0.65459806", "0.6529574", "0.6490348", "0.6489842", "0.64859676", "0.6471863", "0.6434619", "0.6432744", "0.6414012", "0.6411454", "0.63999486", "0.63896...
0.71139956
1
Calculate matrix of number of edits to convert every subset of y to every subset of x
def distance_matrix(self, x, y, keyboard_weight=None): # create distance matrix size_x = len(x) + 1 size_y = len(y) + 1 dist_matrix = np.zeros((size_x, size_y)) for i in range(size_x): dist_matrix[i, 0] = i for j in range(size_y): dist_matrix[0, j] = j ## fill distance matrix # no keyboard weight if not keyboard_weight: for i in range(1, size_x): for j in range(1, size_y): # if letters are same if x[i-1] == y[j-1]: dist_matrix[i, j] = dist_matrix[i-1, j-1] # if letters are different else: subs = dist_matrix[i-1, j-1] + 1 delete = dist_matrix[i-1, j] + 1 insert = dist_matrix[i, j-1] + 1 dist_matrix[i, j] = min(subs, delete, insert) # manhattan keyboard weight elif keyboard_weight == "manhattan": for i in range(1, size_x): for j in range(1, size_y): # if letters are same if x[i-1] == y[j-1]: dist_matrix[i, j] = dist_matrix[i-1, j-1] # if letters are different else: dist = self.key_distance(x[i-1], y[j-1], keyboard_weight) subs_weight = dist * self.manhattan_coef subs = dist_matrix[i-1, j-1] + subs_weight delete = dist_matrix[i-1, j] + 1 insert = dist_matrix[i, j-1] + 1 dist_matrix[i, j] = min(subs, delete, insert) # euclidean keyboard weight elif keyboard_weight == "euclidean": for i in range(1, size_x): for j in range(1, size_y): # if letters are same if x[i-1] == y[j-1]: dist_matrix[i, j] = dist_matrix[i-1, j-1] # if letters are different else: dist = self.key_distance(x[i-1], y[j-1], keyboard_weight) subs_weight = dist * self.euclidean_coef subs = dist_matrix[i-1, j-1] + subs_weight delete = dist_matrix[i-1, j] + 1 insert = dist_matrix[i, j-1] + 1 dist_matrix[i, j] = min(subs, delete, insert) return dist_matrix
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def topsolutions(self):\n answers = []\n for y in xrange(0, self.y):\n answer = self.retrieve(y,self.y)\n i = 0\n for x in xrange(0,y):\n answer -= self.retrieve(y,x)*answers[i]\n i += 1\n answers.append(answer)\n return...
[ "0.5772924", "0.573736", "0.57085884", "0.5653021", "0.56054556", "0.54707366", "0.5380854", "0.53584445", "0.5358038", "0.53385925", "0.5314144", "0.5285138", "0.52835494", "0.52652776", "0.52577674", "0.5253505", "0.5253083", "0.524562", "0.5237841", "0.52344394", "0.521122...
0.0
-1
Calculate number of edits to convert y to x
def distance(self, x, y, keyboard_weight=None): dist_matrix = self.distance_matrix(x, y, keyboard_weight) return dist_matrix[-1, -1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_num_applies(self):\n ops = 0\n for _, remainder, _ in self:\n ops += len(remainder)\n return ops", "def countNeighbors(oldgen, x, y):\n temp = 1\n\n count = 0\n for i in range(-1, 2):\n for j in range(-1, 2):\n\n # TODO: this needs rewritin to be...
[ "0.6166617", "0.591628", "0.58021086", "0.57318366", "0.5673574", "0.5647557", "0.5594943", "0.5514994", "0.550421", "0.5470522", "0.54313797", "0.54289174", "0.542215", "0.53802377", "0.5366139", "0.5360295", "0.53558695", "0.53518134", "0.5347793", "0.53407663", "0.5332358"...
0.0
-1
Return a dataframe of distance matrix of x and y. Indexes are letters of x and columns are letters of y.
def distance_dataframe(self, x, y, keyboard_weight=None): dist_matrix = self.distance_matrix(x, y, keyboard_weight) dist_df = pd.DataFrame(dist_matrix, index=["", *list(x)], columns=["", *list(y)]) return dist_df
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def calc_dist_matrix(self):\n\n self.dist_matrix = spatial.distance.squareform(spatial.distance.pdist(self.data_vector,metric=\"hamming\"))\n\n self.dist_frame = pd.DataFrame(self.dist_matrix,\n index = self.seq_strings,\n co...
[ "0.69271284", "0.67564255", "0.6533258", "0.6516786", "0.6428106", "0.63869244", "0.6351963", "0.6319464", "0.63100487", "0.62420344", "0.62378067", "0.62373847", "0.62171084", "0.62104243", "0.62080455", "0.6201593", "0.61032706", "0.6101561", "0.60353494", "0.6023481", "0.5...
0.8356927
0
Calculate similarity of two words Return a number between 0 and 1 (1 means same and 0 means fully different)
def similarity(self, x, y, keyboard_weight=None): dist = self.distance(x, y, keyboard_weight) max_len = max(len(x), len(y)) max_dissimilarity = max_len * self.scale_coef similarity = 1 - dist / max_dissimilarity return similarity
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wordSimilarityRatio(sent_1,sent_2):", "def similarity(a, b):\n distance = Levenshtein.distance(a, b)\n return 1 - (distance / max((len(a), len(b))))", "def similar_text(word1, word2) -> float:\n\n return textdistance.overlap.similarity(word1, word2)", "def text_similarity(self, text_1: str, text...
[ "0.84911114", "0.8240029", "0.8203488", "0.80538607", "0.7916579", "0.78859663", "0.78573513", "0.78237593", "0.78181165", "0.7802038", "0.7788888", "0.77773386", "0.7673199", "0.76593477", "0.760347", "0.7594198", "0.7564784", "0.7561484", "0.7549728", "0.7532246", "0.750004...
0.71260786
44
Updates this Role instance
def update(self, permission, **kwargs): kwargs['permission'] = permission return self.update_instance(**kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update(self, role):\n model = models.load('Role', role)\n model.account_id = self.account_id\n\n return self.client.update_role(model)", "def update(self, role):\n self._router_request(\n self._make_request_data(\n 'updateAdminRole',\n data...
[ "0.75100726", "0.73120785", "0.6926027", "0.6725893", "0.6725881", "0.66784096", "0.6674918", "0.6656022", "0.65352", "0.6417849", "0.64106894", "0.62678677", "0.62678677", "0.6230835", "0.6226089", "0.6220385", "0.6199606", "0.6199606", "0.6199606", "0.6171208", "0.6162495",...
0.53856415
93
Delete a given Role
def delete(self, sid): return self.delete_instance(sid)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_role(role):\n fallback = Role.load_cli_user()\n\n def _del(cls, col):\n pq = db.session.query(cls)\n pq = pq.filter(col == role.id)\n\n def _repo(cls, col):\n pq = db.session.query(cls).filter(col == role.id)\n pq.update({col: fallback.id}, synchronize_session=False)...
[ "0.82603055", "0.8098692", "0.80856603", "0.80064636", "0.79942334", "0.79914016", "0.791367", "0.7900632", "0.78198117", "0.779961", "0.778778", "0.7761136", "0.77352875", "0.76953554", "0.76781493", "0.76682544", "0.76425016", "0.7444573", "0.7420755", "0.7399109", "0.73983...
0.0
-1
Updates the Role instance identified by sid
def update(self, sid, permission, **kwargs): kwargs['permission'] = permission return self.update_instance(sid, kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update(self, role):\n model = models.load('Role', role)\n model.account_id = self.account_id\n\n return self.client.update_role(model)", "def update_role(self, role_id, role):\n raise exception.NotImplemented() # pragma: no cover", "def updateRole(self, role_id, title, descript...
[ "0.6914042", "0.68597794", "0.6641208", "0.6528812", "0.6419764", "0.6366573", "0.6216717", "0.620263", "0.61314535", "0.60046023", "0.59110934", "0.5883643", "0.58780575", "0.5868561", "0.58423275", "0.57909155", "0.5775993", "0.5757083", "0.5700232", "0.5646141", "0.5625624...
0.6374615
5
Returns the token and dsn from a key Generate a simple SHA1 hash of the key key is a 64bits integer Token is a 32bits integer, dsn is a 64bits integer
def key2tokenAndDSN(self, key): import binascii import struct import hashlib self.keystr = struct.pack("!Q", key) self.h = hashlib.sha1(self.keystr.rjust(8,'\00')) self.shastr=self.h.digest() # binary #shastr = struct.pack("!IIIII", *struct.unpack("@IIIII",shastr)) #to net self.token, self.dsn = self.shastr[0:4], self.shastr[-8:] #print "raw: %s (len=%i)"%(shastr,len(shastr)) #print "hex: %s"% binascii.hexlify(token), "%s"%binascii.hexlify(dsn) self.d1, self.d2 = struct.unpack("!II",self.dsn) self.token, self.dsn = (struct.unpack("!I",self.token)[0], (long(self.d2)<<32)+self.d1) #print "token: %x"% token #print "dsn: %x" % dsn return (self.token, self.dsn)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fnv1(self, key):\n # hash = 0xff\n hash = 0xcbf29ce484222325\n for n in key.encode():\n # print(n)\n hash = hash ^ n\n hash = hash * 0x100000001b3\n\n # print(hash)\n return hash", "def _hash(self, key):\n\n return long(hashlib.md5(ke...
[ "0.59551054", "0.59115833", "0.59115833", "0.58881515", "0.5808499", "0.5731819", "0.5719893", "0.57018846", "0.569349", "0.5679318", "0.5663129", "0.5659303", "0.561188", "0.5586423", "0.5538995", "0.55299336", "0.54740316", "0.5471576", "0.5457066", "0.5430525", "0.54092103...
0.8006403
0
Identify distinct MPTCP Connections that reached Successful handshake Look for Ack packets with MPTCP option Header For each MPTCP connection report Receiver's token value which acts as the connectionID
def mptcp_connections(self, pkts): count = 0 #MPTCP_Capable = 0x0 #MPTCP_CapableACK ---> successful handshake print "======================================================================" print "Successful Handshake --- Look for Ack packets with MPTCP option Header" print """Token = connectionID = SHA1(key)[0-32] of Other party's key. (Capture from either step 2 or 3 in the first handshake)""" print "Total packets: %s" % len(pkts) print "======================================================================" print "Identifying MPTCP Connections...." for i in range(len(pkts)): if(MPTCP_CapableACK in pkts[i] and pkts[i][TCPOption_MP].mptcp.subtype == 0): count +=1 #Count the number of distinct MPTCP connections #Compute the receiver's token self.key_rcv = pkts[i][TCPOption_MP].mptcp.rcv_key self.rcv_token, self.rcv_dsn = self.key2tokenAndDSN(self.key_rcv) #Compute the sender's token self.key_snd = pkts[i][TCPOption_MP].mptcp.snd_key self.snd_token, self.snd_dsn = self.key2tokenAndDSN(self.key_snd) print ("%i. New MPTCP Connection (Successful Handshake) src: %s; dest: %s; Sender's key: %s; Receiver's key: %s; Receivers Token (connectionID): %s; Sender's Token: %s" % (count, pkts[i][IP].src, pkts[i][IP].dst, pkts[i][TCPOption_MP].mptcp.snd_key, pkts[i][TCPOption_MP].mptcp.rcv_key, self.rcv_token, self.snd_token)) print "Total MPTCP Connections: %i" % count
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def process_mptcp_pkt_from_client(ts_delta, acks, conn_acks, mptcp_connections, tcp, ip, saddr, daddr, sport, dport):\n dss, dack, dss_is_8_bytes = get_dss_and_data_ack(tcp)\n conn_id = acks[saddr, sport, daddr, dport][co.CONN_ID]\n flow_id = acks[saddr, sport, daddr, dport][co.FLOW_ID]\n if conn_acks[...
[ "0.5511609", "0.5456236", "0.5397558", "0.53674424", "0.5341668", "0.5300616", "0.52610534", "0.5249031", "0.5168756", "0.5108904", "0.5044106", "0.5025528", "0.50196034", "0.49723715", "0.4895106", "0.48823294", "0.4870531", "0.48611027", "0.4818591", "0.4818538", "0.4802288...
0.7726427
0
Report the number of payload bytes cumulatively exchanged over each MPTCP connection
def payload_data(self, pkts): #Get all the payload bytes exchanged over MPTCP connections payload_bytes = 0 print "Determining the number of payload bytes excluding headers...." #DSS = 0x2 for i in range(len(pkts)): if(TCPOption_MP in pkts[i] and pkts[i][TCPOption_MP].mptcp.subtype == 2 and Raw in pkts[i]): payload_bytes += len(pkts[i][Raw].load) #print("DSN: %s; subflow_seqnum: %s; Data(bytes): %s" % (pkts[i][TCPOption_MP].mptcp.dsn, pkts[i][TCPOption_MP].mptcp.subflow_seqnum, len(pkts[i][Raw].load))) print "Total Number of payload bytes in the file (entire MPTCP connections) excluding headers): %s" % (payload_bytes) #MPTCP WITH SUBFLOW CONNECTIONS #MPTCP_JOINs = 0x1 print "============================================================" print "SUBFLOW Connections with their respective MPTCP connection (identified by connectionID)" for i in range(len(pkts)): #Initial Join Message #rcv_token Identifies the connection to which the subflow belongs: connectionID if(MPTCP_JoinSYN in pkts[i] and pkts[i][TCPOption_MP].mptcp.subtype == 1): print("New subflow: connectionID: %s; src: %s; dest: %s; snd_nonce: %s" % (pkts[i][TCPOption_MP].mptcp.rcv_token, pkts[i][IP].src, pkts[i][IP].dst, pkts[i][TCPOption_MP].mptcp.snd_nonce)) #TODO: Now Need to track per-connection and per-subflow state
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def BytesTransferred(self) -> int:", "def SendPacketsSendSize(self) -> int:", "def delta_bytes(self):\n return sum(self.fcip_doc['packet_lengths'])", "def getPacketCount(self):\n return 1", "def mptcp_connections(self, pkts):\n\t\tcount = 0\n\t\t#MPTCP_Capable = 0x0\n\t\t#MPTCP_CapableACK ---...
[ "0.66460896", "0.64858186", "0.6429485", "0.63838947", "0.6298005", "0.6263551", "0.60947376", "0.6089673", "0.6043087", "0.6028377", "0.6015029", "0.5988581", "0.5972827", "0.590944", "0.5890168", "0.5861066", "0.5850881", "0.5805821", "0.5804993", "0.5774528", "0.5753648", ...
0.64004153
3
Adds an input value to the averaging filter.
def Input(self, value): print("Input: {}".format(value)) # Add the new value to the filter values. self.Values.append(value) # If the filter has reached its maximum Depth, # pop the last item from the filter values. if len(self.Values) > self.Depth: s = 0 self.Values.pop(s) print("Filter ({}): {}".format(len(self.Values), self.Values))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_value(self, value):\n if (value - self.avg) ** 2 > \\\n self.deviation_scale * (self.std + self.deviation_offset):\n BaseFilter.add_value(self, self.avg)\n else:\n BaseFilter.add_value(self, value)", "def add_value(self, value):\n if len(self.hist...
[ "0.74540794", "0.71780056", "0.6814798", "0.6716383", "0.6649134", "0.63110775", "0.6276187", "0.6150249", "0.6108856", "0.6071165", "0.60466886", "0.60204285", "0.6016385", "0.5957288", "0.5926967", "0.59223187", "0.5921518", "0.591883", "0.5895319", "0.57737994", "0.5753940...
0.6106456
9
Calculates the average of the current filter values.
def Output(self): filter_sum = 0 # Calculate the sum of all filter values. for s in self.Values: filter_sum += s print("Sum: {}".format(filter_sum)) # The average is the sum divided by the current amount of # samples. avg = filter_sum / len(self.Values) print("Average: {}".format(avg)) if self.Round is True: # Round the average to the nearest integer. avg = round(avg) print("Output: {}".format(avg)) return avg
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def average(self):\n return self.summation() / self.count()", "def average(self):\n return (self.current + self.last) / 2.0", "def average(self):\n s = self.sum()\n flat_shape = self.flatten_shape(self.shape)\n num_of_elements = fct.reduce(opr.mul, flat_shape, 1)\n ave...
[ "0.75439095", "0.73029804", "0.7202501", "0.7086935", "0.7005572", "0.6967038", "0.6947147", "0.6872095", "0.68588364", "0.6849239", "0.6839921", "0.6821161", "0.6814509", "0.6814509", "0.6768326", "0.6744722", "0.6728115", "0.67118907", "0.6687017", "0.6667109", "0.66239166"...
0.7149273
3
Resets the filter values.
def Reset(self): n = len(self.Values) for i in range(0, n): self.Values.pop(i)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reset_filter(self):\n arlen = len(self.variant_list)\n self.filter = np.zeros((arlen, arlen)) == 0", "def reset_filters():\n logger.info(\"reset filters\")\n global filter_item\n filter_item = -1\n filter_topics_table.view.filters = [IndexFilter()]\n filter_custom_table.view.filt...
[ "0.7977834", "0.7429131", "0.74247444", "0.7300497", "0.7222309", "0.72031015", "0.7117929", "0.7102414", "0.7066342", "0.70600677", "0.6972775", "0.68997204", "0.68905616", "0.68335426", "0.6815627", "0.6741218", "0.66964805", "0.66880286", "0.66741985", "0.66408324", "0.662...
0.64528835
43
return the current schema_org schema version
def get_schema_org_version(): return _get_schemaorg_version()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_schemaorg_version():\n try:\n version = get_latest_schemaorg_version()\n except ValueError:\n version = SCHEMAORG_DEFAULT_VERSION\n return version", "def schema_version(self):\n # return self._parsed[\"schemaVersion\"]\n # does not exist in manifest reference\n ...
[ "0.84921485", "0.82677555", "0.8089187", "0.7736187", "0.74551374", "0.7435045", "0.73047084", "0.6960835", "0.68983686", "0.68766624", "0.68467546", "0.68244046", "0.6751349", "0.67256296", "0.6715355", "0.6658414", "0.6645252", "0.6636151", "0.6614081", "0.66119516", "0.658...
0.89709204
0
Return a list of schema namespaces registered in DDE
def registered_dde_schemas(self): return [s["_id"] for s in schemas.get_all(size=100)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def registered_dde_schemas(self):\n url = DDE_SCHEMA_BASE_URL + \"?field=_id&size=20\"\n if self.verbose:\n print(f'Loading registered DDE schema list from \"{url}\"')\n data = load_json_or_yaml(url)\n return [s[\"namespace\"] for s in data[\"hits\"]]", "def namespaces(self...
[ "0.82582337", "0.8003308", "0.7945194", "0.77889043", "0.74570817", "0.7238755", "0.7172259", "0.71680105", "0.7109484", "0.70704514", "0.70290285", "0.7007423", "0.68698627", "0.6844944", "0.6776967", "0.6739239", "0.6705429", "0.6666555", "0.65324306", "0.6531712", "0.65198...
0.7246093
5
Load a registered schema
def load_dde_schemas(self, schema): if self.verbose: print(f'Loading registered DDE schema "{schema}"') schema_source = schemas.get(schema) schema_source.pop("_id") return schema_source
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_validator_schema():\n logger.info('Loading validator schemas')\n SchemaLoader.load_all_from_path(validator_config_path)", "def load_schema(schema_path):\n with open(schema_path) as schema_file:\n return Utils.parse(schema_file.read())", "def schema_load(filename):\n print(uc...
[ "0.71533245", "0.7038362", "0.70110863", "0.6746675", "0.6709357", "0.6656535", "0.6612241", "0.66008633", "0.65909773", "0.65366113", "0.64810675", "0.6431707", "0.6424626", "0.64035326", "0.63424355", "0.63148296", "0.63040596", "0.62571245", "0.6245016", "0.62211007", "0.6...
0.6969959
3
get only classes defined in this schema
def get_class_defs(self): return list(self._get_class_defs().values())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_all_classes(self):\n classes = list(self.extended_class_only_graph.nodes())\n classes = [SchemaClass(_cls, self) for _cls in classes]\n return classes", "def get_classes(self):\n return", "def return_classes(self):\n\n\t\t \n\t\t \n\t\treturn self.classes", "def get_c...
[ "0.78731024", "0.71519256", "0.7128944", "0.7102175", "0.7069883", "0.6910617", "0.67119366", "0.66961735", "0.6668723", "0.6640557", "0.66171163", "0.6541783", "0.6539696", "0.65327054", "0.65109813", "0.64602256", "0.6432275", "0.64070165", "0.63955426", "0.6380493", "0.637...
0.65426725
11
get only classes referenced outside this schema
def get_class_refs(self): return list(self._get_class_refs().values())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_all_classes(self):\n classes = list(self.extended_class_only_graph.nodes())\n classes = [SchemaClass(_cls, self) for _cls in classes]\n return classes", "def parent_classes(self):\n response = check_defined(self, inspect.stack()[0][3])\n if not response:\n r...
[ "0.6365546", "0.62089795", "0.6205446", "0.6171531", "0.60406595", "0.59149647", "0.5896615", "0.58919096", "0.5831818", "0.57982343", "0.5780626", "0.5756438", "0.57559365", "0.5708741", "0.5698709", "0.5691668", "0.5679516", "0.56510174", "0.56453377", "0.5609935", "0.55802...
0.6016841
5
get all classes and label them if they are referenced if include_ref is False, only "defined" classes are included.
def get_classes(self, include_ref=True): defs = self._get_class_defs() ans = {} ans.update(defs) if include_ref: refs = self._get_class_refs() ans.update(refs) return list(ans.values())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_class_refs(self):\n return list(self._get_class_refs().values())", "def process_class_list(self, module, classes):", "def _load_classes(self):\n classdocs = self._docset.get_classes()\n for classdoc in classdocs:\n files = [self._docmap[filedoc] for filedoc in classdoc.g...
[ "0.6035254", "0.58814853", "0.5880051", "0.5856091", "0.57512456", "0.5681698", "0.5672593", "0.5591788", "0.5589396", "0.55738693", "0.55246097", "0.55189526", "0.5512281", "0.5482113", "0.54583037", "0.5448857", "0.54194885", "0.5409159", "0.53911316", "0.5388025", "0.53866...
0.7381208
0
return True if there is at least one validation error.
def has_validation_error(self): for err in self._schema.validator.validation_errors: if not err.warning: return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def isValid(self):\n errorList = self.getErrors()\n\n return not errorList", "def is_valid(self):\n return not self.errors", "def is_valid(self):\n self.clean()\n return not bool(self.errors)", "def has_errors(self):\n return len(self.get_errors()) > 0", "def has_e...
[ "0.79174244", "0.7894807", "0.7860656", "0.7844026", "0.78179806", "0.78001493", "0.77460116", "0.76075137", "0.7494825", "0.7405556", "0.7374498", "0.73029155", "0.7184244", "0.714132", "0.7135738", "0.7081772", "0.7046217", "0.7024928", "0.69485885", "0.6942181", "0.6913216...
0.78778917
2
return validation errors as a list of dictionaries
def get_validation_errors(self): return [err.to_dict() for err in self._schema.validator.validation_errors]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def error_wrapper(x):\n errors = list()\n for error_key, error_list in list(x.items()):\n for error in error_list:\n if error_key == 'non_field_errors':\n errors.append(error)\n else:\n errors.append(\"%s: %s\" % (error_key, error))\n return error...
[ "0.7443273", "0.7419965", "0.7331665", "0.7269884", "0.7269884", "0.7269884", "0.7269884", "0.7269884", "0.72689366", "0.72655326", "0.7202294", "0.7177334", "0.7172576", "0.7163961", "0.7127716", "0.6941691", "0.6911603", "0.6893204", "0.6834352", "0.6751597", "0.6711623", ...
0.84701467
0
Faster Wavelenght selector If passed lists it will return lists. If passed np arrays it will return arrays Fastest is using np.ndarrays fast_wav_selector ~10002000 quicker than wav_selector
def fast_wav_selector(wav, flux, wav_min, wav_max): if isinstance(wav, list): # if passed lists wav_sel = [value for value in wav if(wav_min < value < wav_max)] flux_sel = [value[1] for value in zip(wav,flux) if(wav_min < value[0] < wav_max)] elif isinstance(wav, np.ndarray): # Super Fast masking with numpy mask = (wav > wav_min) & (wav < wav_max) wav_sel = wav[mask] flux_sel = flux[mask] else: raise TypeError("Unsupported input wav type") return [wav_sel, flux_sel]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wav_selector(wav, flux, wav_min, wav_max, verbose=False):\n if isinstance(wav, list): # if passed lists\n wav_sel = [wav_val for wav_val in wav if (wav_min < wav_val < wav_max)]\n flux_sel = [flux_val for wav_val, flux_val in zip(wav,flux) if (wav_min < wav_val < wav_max)]\n elif isinst...
[ "0.7074208", "0.54926926", "0.543694", "0.5338126", "0.5282017", "0.5270855", "0.51453614", "0.5138771", "0.5112849", "0.5108346", "0.5105792", "0.5059946", "0.5033493", "0.5002626", "0.49823514", "0.49728918", "0.4955809", "0.4946704", "0.49403378", "0.49394882", "0.48933354...
0.70528
1
Gaussian_function of area=1 p[0] = A; p[1] = mean; p[2] = FWHM;
def unitary_Gauss(x, center, FWHM): sigma = np.abs(FWHM) /( 2 * np.sqrt(2 * np.log(2)) ); Amp = 1.0 / (sigma*np.sqrt(2*np.pi)) tau = -((x - center)**2) / (2*(sigma**2)) result = Amp * np.exp( tau ); return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def gaussian(amp, fwhm, mean):\n return lambda x: amp * np.exp(-4. * np.log(2) * (x-mean)**2 / fwhm**2)", "def gaussian(amp, fwhm, mean, x):\n return amp * np.exp(-4. * np.log(2) * (x-mean)**2 / fwhm**2)", "def Gaussian(x, mu=0, sigma=26.4, A=1, y0=0):\r\n #width = sigma*(2*np.sqrt(2*np.log(2)))\r\n ...
[ "0.7608028", "0.75165457", "0.6953791", "0.69522965", "0.6922625", "0.6717248", "0.6687247", "0.6661979", "0.66529137", "0.66487074", "0.6572644", "0.65556943", "0.65399146", "0.6530924", "0.6530924", "0.6530924", "0.6530346", "0.6524894", "0.65182394", "0.64871186", "0.64804...
0.59211177
70
IP convolution multiplication step for a single wavelength value
def fast_convolve(wav_val, R, wav_extended, flux_extended, FWHM_lim): FWHM = wav_val/R index_mask = (wav_extended > (wav_val - FWHM_lim*FWHM)) & (wav_extended < (wav_val + FWHM_lim*FWHM)) flux_2convolve = flux_extended[index_mask] IP = unitary_Gauss(wav_extended[index_mask], wav_val, FWHM) sum_val = np.sum(IP*flux_2convolve) unitary_val = np.sum(IP*np.ones_like(flux_2convolve)) # Effect of convolution onUnitary. For changing number of points return sum_val/unitary_val
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def forward(self, x):\n if self.signal_length is None:\n self.signal_length = x.shape[-1]\n self.channels = x.shape[-2]\n self._scales = self.compute_optimal_scales()\n self._kernel = self._build_wavelet_bank()\n\n if self._kernel.is_complex():\n ...
[ "0.6042097", "0.5845206", "0.5728535", "0.57079965", "0.5538608", "0.5477923", "0.54210114", "0.5398492", "0.53849536", "0.5383139", "0.5380405", "0.5379147", "0.5379049", "0.5376933", "0.53721017", "0.535263", "0.5341408", "0.53101605", "0.5305929", "0.5297572", "0.5297235",...
0.0
-1
Convolution code adapted from pedros code and speed up with np mask logic
def convolution_nir(wav, flux, chip, R, FWHM_lim=5.0, plot=True): wav_chip, flux_chip = chip_selector(wav, flux, chip) #we need to calculate the FWHM at this value in order to set the starting point for the convolution FWHM_min = wav_chip[0]/R #FWHM at the extremes of vector FWHM_max = wav_chip[-1]/R #wide wavelength bin for the resolution_convolution wav_extended, flux_extended = fast_wav_selector(wav, flux, wav_chip[0]-FWHM_lim*FWHM_min, wav_chip[-1]+FWHM_lim*FWHM_max) print("wav_extended type", type(wav_extended)) wav_extended = np.array(wav_extended, dtype="float64") print("wav_extended type after arrayed", type(wav_extended)) # should be the same. flux_extended = np.array(flux_extended, dtype="float64") print("Starting the Resolution convolution...") # Predefine np array space flux_conv_res = np.empty_like(wav_chip, dtype="float64") counter = 0 base_val = len(wav_chip)//20 # Adjust here to change % between reports for n, wav in enumerate(wav_chip): # put value directly into the array flux_conv_res[n] = fast_convolve(wav, R, wav_extended, flux_extended, FWHM_lim) if(n%base_val== 0): counter = counter+5 print("Resolution Convolution at {}%%...".format(counter)) print("flux conv res type after loop", type(flux_conv_res)) flux_conv_res = np.array(flux_conv_res, dtype="float64") print("flux conv res type after np.array", type(flux_conv_res)) print("Done.\n") if(plot): fig=plt.figure(1) plt.xlabel(r"wavelength [ $\mu$m ])") plt.ylabel(r"flux [counts] ") plt.plot(wav_chip, flux_chip/np.max(flux_chip), color ='k', linestyle="-", label="Original spectra") plt.plot(wav_chip, flux_conv_res/np.max(flux_conv_res), color ='b', linestyle="-", label="Spectrum observed at and R=%d ." % (R)) plt.legend(loc='best') plt.show() return wav_chip, flux_conv_res
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clConvolution(self, size, mask):", "def compute(self, node, input_vals):\r\n #assert len(input_vals) == 2\r\n #start = time.time()\r\n strides = node.const_attr\r\n ish = list(input_vals[0].shape)\r\n fsh = list(input_vals[1].shape)\r\n filter = input_vals[1].astype(...
[ "0.85066", "0.7019092", "0.68060744", "0.6763622", "0.6690711", "0.6668837", "0.6661964", "0.6645885", "0.65938205", "0.6570192", "0.65078753", "0.6501851", "0.6485171", "0.6481288", "0.64750814", "0.64741033", "0.64626104", "0.64103127", "0.63968295", "0.63761157", "0.635630...
0.0
-1
Interpolate Wavelengths of spectra to common WL Most likely convert telluric to observed spectra wl after wl mapping performed
def match_wl(wl, spec, ref_wl, method="scipy", kind="linear"): starttime = time.time() if method == "scipy": #print(kind + " scipy interpolation") linear_interp = interp1d(wl, spec, kind=kind) new_spec = linear_interp(ref_wl) elif method == "numpy": if kind.lower() is not "linear": print("Warning: Cannot do " + kind + " interpolation with numpy, switching to linear" ) #print("Linear numpy interpolation") new_spec = np.interp(ref_wl, wl, spec) # 1-d peicewise linear interpolat else: print("Method was given as " + method) raise("Not correct interpolation method specified") #print("Interpolation Time = " + str(time.time() - starttime) + " seconds") return new_spec # test inperpolations
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_spectral_bandpass_interpol(interpol_wavelen, interpol_rad, center_wvl,\n save_dir):\n\n save_dir = os.path.join(save_dir, r'look_up_table')\n if not os.path.exists(save_dir):\n os.makedirs(save_dir)\n\n\n center_wvl1 = np.arange(min(center_wvl), max(c...
[ "0.6577591", "0.6556523", "0.6496855", "0.64009374", "0.6335774", "0.62576574", "0.6154168", "0.60373366", "0.5991376", "0.5960921", "0.59510165", "0.5908147", "0.5871862", "0.58661956", "0.58438027", "0.5817838", "0.5814624", "0.5808811", "0.58038354", "0.5801125", "0.579968...
0.59196925
11
Takes two files in a list as input eg. data = [path1,path2]
def _input_as_string(self,data): inputFiles = ' '.join(data) self._input_filename = data return inputFiles
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_content(file1, file2):\n with open(file1, 'r') as f1, open(file2, 'r') as f2:\n return [line for line in f1], [line for line in f2]", "def load_files_to_compare(self):\n self.first_source_data = load_path(self.path1)\n self.second_source_data = load_path(self.path2)", "def load_...
[ "0.6610108", "0.6356825", "0.635545", "0.6338313", "0.62560904", "0.61758494", "0.6088548", "0.6032201", "0.598844", "0.5927247", "0.58987236", "0.5861115", "0.5815601", "0.5770374", "0.57467705", "0.57203543", "0.57123935", "0.57011217", "0.56484956", "0.56306076", "0.560777...
0.0
-1
Writes to first sequences(fasta) in a list to two temp files
def _input_as_lines(self,data): inputFiles = '' self._input_filename = [] for i in range(2): filename = self.getTmpFilename(self.WorkingDir) self._input_filename.append(filename) data_file = open(filename,'w') if i == 0: data_to_file = '\n'.join(data[:2]) tmp1 = filename else: data_to_file = '\n'.join(data[2:]) tmp2 = filename data_file.write(data_to_file) data_file.close() inputFiles = ' '.join([tmp1,tmp2]) return inputFiles
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def combine_fasta_files(fastas_paths, out_file):\n with open(out_file, 'w') as out:\n for filename in fastas_paths:\n for seq_record in SeqIO.parse(filename, \"fasta\"):\n out.write('>' + str(seq_record.id) + '\\n' + str(seq_record.seq) + '\\n')", "def test_write_seqs_to_fasta...
[ "0.7115534", "0.6977876", "0.6929793", "0.66960967", "0.66449195", "0.6635358", "0.6599218", "0.6559618", "0.6408864", "0.6341006", "0.6326734", "0.6306929", "0.629032", "0.62058866", "0.6190285", "0.612359", "0.611101", "0.6104494", "0.60735565", "0.6060679", "0.60134447", ...
0.0
-1
Run the application with the specified kwargs on data
def __call__(self,data=None, remove_tmp=True): input_handler = self.InputHandler suppress_stdout = self.SuppressStdout suppress_stderr = self.SuppressStderr if suppress_stdout: outfile = FilePath('/dev/null') else: outfile = self.getTmpFilename(self.TmpDir) if suppress_stderr: errfile = FilePath('/dev/null') else: errfile = FilePath(self.getTmpFilename(self.TmpDir)) if data is None: input_arg = '' else: input_arg = getattr(self,input_handler)(data) # Build up the command, consisting of a BaseCommand followed by # input and output (file) specifications command = self._command_delimiter.join(filter(None,\ [self.BaseCommand,str(input_arg),'>',str(outfile),'2>',\ str(errfile)])) if self.HaltExec: raise AssertionError, "Halted exec with command:\n" + command # The return value of system is a 16-bit number containing the signal # number that killed the process, and then the exit status. # We only want to keep the exit status so do a right bitwise shift to # get rid of the signal number byte tmp_dir = ''.join([self.WorkingDir, 'tmp']) mkdir(tmp_dir) exit_status = system(command) >> 8 rmdir(tmp_dir) # Determine if error should be raised due to exit status of # appliciation if not self._accept_exit_status(exit_status): raise ApplicationError, \ 'Unacceptable application exit status: %s, command: %s'\ % (str(exit_status),command) # open the stdout and stderr if not being suppressed out = None if not suppress_stdout: out = open(outfile,"r") err = None if not suppress_stderr: err = open(errfile,"r") result = CommandLineAppResult(out,err,exit_status,\ result_paths=self._get_result_paths(data)) # Clean up the input file if one was created if remove_tmp: if self._input_filename: for f in self._input_filename: remove(f) self._input_filename = None return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run(self, **kwargs):\n app = self.create_app()\n\n app.run(host=self.host, port=self.port, **kwargs)", "def run(self, **kwargs):", "def run(self, **kwargs):\n pass", "def run(self, *args, **kwargs):\n pass", "def run(self, args, **kwargs):\n raise NotImplementedError(...
[ "0.6900019", "0.6881134", "0.68657255", "0.68310666", "0.6701029", "0.66893464", "0.65126204", "0.64587903", "0.64343816", "0.64343816", "0.6430925", "0.64042205", "0.6383089", "0.6359522", "0.63256764", "0.6306275", "0.62514395", "0.62414396", "0.61845434", "0.6160434", "0.6...
0.0
-1
Retry calling the decorated function using an exponential backoff.
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None): def deco_retry(f): @wraps(f) def f_retry(*args, **kwargs): mtries, mdelay = tries, delay while mtries > 1: try: return f(*args, **kwargs) except ExceptionToCheck, e: msg = "%s, Retrying in %d seconds..." % (str(e), mdelay) if logger: logger.warning(msg) else: print msg time.sleep(mdelay) mtries -= 1 mdelay *= backoff return f(*args, **kwargs) return f_retry # true decorator return deco_retry
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def retry(tries, delay=3, backoff=2, except_on=(Exception, )):\n\n tries = math.floor(tries)\n\n def decorator(f):\n def f_retry(*args, **kwargs):\n return function_retry(\n tries, delay, backoff, except_on, f, *args, **kwargs)\n return f_retry # true decorator -> dec...
[ "0.78631854", "0.7701622", "0.7667836", "0.7597426", "0.7546011", "0.75036615", "0.7499006", "0.74785614", "0.7439682", "0.74343145", "0.7429641", "0.74183977", "0.7401052", "0.73962057", "0.7395584", "0.73916775", "0.7338477", "0.7319612", "0.7313288", "0.7284679", "0.716480...
0.6932827
36
Downloads a FASTA file for the proteome by organism ID
def get_fasta_by_id(proteome_id, output_file): taxid_pattern = re.compile('^\d{1,7}$') # if not taxid_pattern.match(proteome_id): # fetch file from Uniprot # raise ValueError(str(proteome_id) + ' is not a valid proteome identifier') url = UNIPROT_BASE_URL + proteome_id attempts = 0 while attempts < 3: try: response = requests.get(url) if response.status_code > 399 or response.status_code < 200: raise requests.HTTPError(response.status_code + ': ' + response.content) content = response.content if len(content) < 10: raise FastaNotFoundError() with open(output_file, 'w') as f: f.write(content) break except requests.HTTPError as e: attempts += 1 if attempts >= 3: raise FastaNotFoundError('Failed to download fasta: ' + response.status_code + ' response.content') return output_file
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def seq_download(name, organism=\"Homo sapiens\", gaba=False):\n\n subunits = {\n \"Alpha-1\": \"Gabra1\",\n \"Alpha-2\": \"Gabra2\",\n \"Alpha-3\": \"Gabra3\",\n \"Alpha-4\": \"Gabra4\",\n \"Alpha-5\": \"Gabra5\",\n \"Alpha-6\": \"Gabra6\",\n \"Beta-1\": \"Gabrb...
[ "0.72803736", "0.64713925", "0.622999", "0.6218201", "0.6060453", "0.59448034", "0.57914644", "0.5711981", "0.5588932", "0.5571663", "0.556189", "0.5560884", "0.55341786", "0.553164", "0.55225044", "0.5511088", "0.5500932", "0.54671043", "0.54671043", "0.54506993", "0.5367374...
0.71304876
1
A function for generating reaction likelihoods for a given genome according to the Probabilistic Annotation algorithm as
def generate_reaction_probabilities(fasta_file, template_model_file, genome_id=None): if genome_id is None: # Use fasta_file name minus extension. worker uses only for file names and logging genome_id = '.'.join(fasta_file.split('.')[0:-1]) # Create a worker for running the algorithm. worker = ProbAnnotationWorker(genome_id) try: template_model = _load_template_file(template_model_file) # Run blast using the fasta file. blast_result_file = worker.runBlast(fasta_file) # Calculate roleset probabilities. rolestring_tuples = worker.rolesetProbabilitiesMarble(blast_result_file) # Calculate per-gene role probabilities. role_probs = worker.rolesetProbabilitiesToRoleProbabilities(rolestring_tuples) # Calculate whole cell role probabilities. total_role_probs = worker.totalRoleProbabilities(role_probs) # Calculate complex probabilities. complex_probs = worker.complexProbabilities(total_role_probs, complexesToRequiredRoles=_complex_to_roles_dict(template_model)) # Calculate reaction probabilities. rxn_probs = worker.reactionProbabilities(complex_probs, rxnsToComplexes=_reactions_to_complexes_dict(template_model)) # Store in dictionary for better serialization return ReactionProbabilities([{'reaction': r[0], 'probability': r[1], 'type': r[2], 'complexes': _deserialize_cplx(r[3], worker.config['separator']), 'gpr': r[4]} for r in rxn_probs]) finally: worker.cleanup() # worker creates lots of temporary and intermediate files. Allow it to clean up
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def calculateLogJointProbabilities(self, datum):\n logJoint = util.Counter()\n \n \"*** YOUR CODE HERE ***\"\n \n # -- OUR CODE HERE\n \n \n import math\n for label in self.legalLabels:\n sumThing = 0.0\n for pixel in self.conditionalProb[label]:\n if datum[pixel] is 1:\...
[ "0.64158255", "0.61762154", "0.6171898", "0.61255485", "0.6108559", "0.60751355", "0.5888883", "0.581115", "0.57994837", "0.5773769", "0.57539636", "0.5743196", "0.57412934", "0.57062215", "0.5637121", "0.5604803", "0.56041193", "0.55022335", "0.5498393", "0.543617", "0.53924...
0.6003137
6
Gapfill a model using probabilistic weights
def probabilistic_gapfill(model, universal_model, reaction_probabilities, clean_exchange_rxns=True, default_penalties=None, dm_rxns=False, ex_rxns=False, **solver_parameters): universal_model = universal_model.copy() model = clean_exchange_reactions(model) if clean_exchange_rxns else model.copy() if default_penalties is None: default_penalties = {'Universal': 1, 'Exchange': 100, 'Demand': 1, 'Reverse': 75} penalties = default_penalties reactions_to_remove = [] for r in universal_model.reactions: if model.reactions.has_id(r.id): reactions_to_remove.append(r) penalties[r.id] = 0 # In the model elif r.id in reaction_probabilities: penalties[r.id] = max(0, 1 - reaction_probabilities[r.id]) * (penalties[r.id] if r.id in penalties else 1) universal_model.remove_reactions(reactions_to_remove) return cobra.flux_analysis.gapfill(model, universal_model, penalties=penalties, demand_reactions=dm_rxns, exchange_reactions=ex_rxns, **solver_parameters)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def init_weights(model):\n ...", "def gap2d(_w_in):\n return nn.AdaptiveAvgPool2d((1, 1))", "def weightGenerate(self):\n\t\tfor i in range(0, self.numberOfInput):\n\t\t\tself.weight.append(random.random()-0.5)", "def update_weights(self):\n\n self.weights -= self.loss_grads\n self.los...
[ "0.59936357", "0.5899231", "0.5710139", "0.5680814", "0.5665641", "0.565304", "0.56094337", "0.56018", "0.55877113", "0.55586326", "0.5552374", "0.5551015", "0.5542684", "0.5506722", "0.5504694", "0.5479312", "0.54784465", "0.5458892", "0.54003364", "0.5388704", "0.5387438", ...
0.5979327
1
Exports the given reaction probabilities into a JSON formatted file, saved at filename
def export_json(rxn_probs, filename): with open(filename, 'w') as f: f.write(json.dumps(rxn_probs)) return filename
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write_json(self, filename):\n with open(filename, 'a+') as f:\n f.write(json.dumps(self.weights))\n f.write(\"\\n\")", "def save(statistic_entries):\n with open('learn.json', 'w') as file:\n json.dump(statistic_entries, file, indent=2)", "def dump(pred_out_path, xyz_p...
[ "0.6705845", "0.641551", "0.6144338", "0.6136017", "0.6124605", "0.612017", "0.61101353", "0.60968125", "0.60849845", "0.5977744", "0.59029186", "0.587066", "0.5857155", "0.5841988", "0.5835876", "0.5835178", "0.5819759", "0.5813063", "0.5810192", "0.57985073", "0.5744357", ...
0.8034322
0
return the probability of a given reaction
def get_probability(self, reaction): return self.__getitem__(reaction)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def probability(self, item):\n count = self.counter.get(item, 0)\n if self.smoothing_dict:\n smooth_count = self.smoothing_dict.get(count, count)\n assert smooth_count > 0\n return smooth_count / self.smooth_total\n else:\n return count / self.total"...
[ "0.6590192", "0.6571907", "0.647744", "0.64491594", "0.6418756", "0.63888365", "0.63714635", "0.6371433", "0.6367964", "0.63313365", "0.6296423", "0.62912256", "0.62699294", "0.62629133", "0.62573755", "0.6252374", "0.62146765", "0.6207749", "0.62025577", "0.6187049", "0.6182...
0.8662731
0
Serializes this object as a JSON stringrxn
def to_json_file(self, path): with open(path, 'w') as f: f.write(self.to_json())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def serialize(self):\n return json.dumps(self.as_dict())", "def _toJSON(self):\n\n return json.encode(self.__toJSON())", "def to_json(self) -> str:\n return json.dumps(asdict(self))", "def to_json(self) -> str:\n return json.dumps(self.to_dict())", "def to_json(self) -> str:\n ...
[ "0.85692286", "0.85322154", "0.84612757", "0.84603655", "0.84603655", "0.84603655", "0.84603655", "0.84603655", "0.84603655", "0.84603655", "0.84603655", "0.84603655", "0.84569603", "0.8437394", "0.84211195", "0.8399535", "0.83540624", "0.83540624", "0.83424884", "0.83178025", ...
0.0
-1
Deserialize a ReactionProbabilities from a JSON file
def from_json_file(path): with open(path, 'r') as f: return ReactionProbabilities.from_json(f.read())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_priors(file_name):\n with open(file_name, \"r\") as fp:\n priors = json.load(fp)\n return priors", "def read_classification_json(fn):\n with open(fn) as f:\n classification_data = json.load(f)\n f.close()\n \n return classification_data", "def load_priors(self, ...
[ "0.6095132", "0.60795057", "0.5984798", "0.59495234", "0.57160014", "0.56639713", "0.5619954", "0.5497838", "0.5492578", "0.53609985", "0.5356933", "0.53483236", "0.5318638", "0.53132015", "0.52867985", "0.52783847", "0.5249616", "0.52431166", "0.5227828", "0.5201468", "0.519...
0.85232556
0
Updates the Reaction Probabilities
def update(self, rxn_probs): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def prob_update(self):\n pass", "def update_probabilities(self):\n self.probabilities = self.pheromones**self.EXP_PH * self.mcv**self.EXP_MCV", "def update(probabilities, one_gene, two_genes, have_trait, p):\n for person in probabilities:\n\n # count the genes for the person\n ge...
[ "0.7125513", "0.7022864", "0.6515388", "0.64963096", "0.64798397", "0.64248484", "0.64184403", "0.63899285", "0.6279894", "0.6097903", "0.6097903", "0.6088608", "0.6073833", "0.6068825", "0.60576266", "0.6045901", "0.6018484", "0.60050225", "0.5990919", "0.5990202", "0.598924...
0.6925041
2
Takes a big limit as an integer and get all the prime numbers in that range, including the limit itself. Returns a numpy array of the primes. Fragmentation is an int that multiplies the sqrt of the limit to increase the fragment size. Bigger fragmentation consumes more memory and less time. Fragmentation limit = sqrt of limit. For 4 GB RAM not enough memory for limit == 109. Fragmentation 1000 ok
def get_primes_in_big_limit(limit, fragmentation=1): print("Getting primes...") print("Fragmentation set to", fragmentation) fragment_limit = int(np.sqrt(limit)) fragment_lowest = 0 fragment_highest = fragment_lowest + fragment_limit primes_in_limit = np.array([], dtype=int) while fragment_highest < limit: if fragment_lowest == 0: fragment_highest += 1 primes_in_first_fragment = get_primes_in(fragment_highest) primes_in_limit = np.concatenate([primes_in_limit, primes_in_first_fragment], axis=None) else: primes_in_fragment = get_primes_in_fragment(fragment_lowest, fragment_highest, primes_in_first_fragment ) primes_in_limit = np.concatenate([primes_in_limit, primes_in_fragment], axis=None) fragment_lowest = fragment_highest fragment_highest += (fragment_limit * fragmentation) primes_in_last_fragment = get_primes_in_fragment(fragment_lowest, limit+1, primes_in_first_fragment ) return np.concatenate([primes_in_limit, primes_in_last_fragment], axis=None)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_primes_in(limit):\n range_limit = np.arange(limit)\n prime_mask = np.ones(limit, dtype=bool)\n prime_mask[0:2] = False\n for i in range_limit[:int(np.sqrt(limit))+1]:\n if prime_mask[i]:\n prime_mask[2*i::i] = False\n return range_limit[prime_mask]", "def eratosthenes(lim...
[ "0.7772032", "0.75551635", "0.754886", "0.71958756", "0.7041358", "0.6890954", "0.6870041", "0.67170656", "0.66442776", "0.6626837", "0.65599114", "0.63915014", "0.6374505", "0.63370544", "0.6328696", "0.6316949", "0.6307289", "0.62010443", "0.6167947", "0.61109275", "0.61072...
0.8527422
0
Takes a limit as an integer and get all the prime numbers in that range, NOT including the limit itself. Returns a numpy array of the primes.
def get_primes_in(limit): range_limit = np.arange(limit) prime_mask = np.ones(limit, dtype=bool) prime_mask[0:2] = False for i in range_limit[:int(np.sqrt(limit))+1]: if prime_mask[i]: prime_mask[2*i::i] = False return range_limit[prime_mask]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def eratosthenes(limit):\n if isinstance(limit, (int, float)) and limit == int(limit):\n limit = int(limit)\n else:\n raise ValueError\n primes = []\n mask = [1]*(limit+1)\n for i in range(2, limit+1):\n if mask[i]:\n primes.append(i)\n for j in range(i*i, ...
[ "0.80535877", "0.7970838", "0.7566367", "0.75646406", "0.74854904", "0.7452761", "0.73869497", "0.7217808", "0.72028553", "0.7137117", "0.69901395", "0.69098914", "0.6839922", "0.66989404", "0.6691934", "0.6605368", "0.65635985", "0.64167565", "0.6413339", "0.6396981", "0.637...
0.846045
0
Takes fragment lowest and highest limits as an integers and get all the prime numbers in that range, NOT including the limit itself. Returns a numpy array of the primes. Needs the primes from the first fragment of the program as input.
def get_primes_in_fragment(fragment_lowest, fragment_highest, primes_in_first_fragment): fragment_range = np.arange(fragment_lowest, fragment_highest) prime_mask = np.ones(len(fragment_range), dtype=bool) for p in primes_in_first_fragment: if fragment_lowest % p == 0: first_multiple = fragment_lowest // p else: first_multiple = fragment_lowest // p + 1 first_multiple_index = first_multiple * p - fragment_lowest prime_mask[first_multiple_index::p] = False return fragment_range[prime_mask]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_primes_in_big_limit(limit, fragmentation=1):\n print(\"Getting primes...\")\n print(\"Fragmentation set to\", fragmentation)\n fragment_limit = int(np.sqrt(limit))\n fragment_lowest = 0\n fragment_highest = fragment_lowest + fragment_limit\n primes_in_limit = np.array([], dtype=int)\n ...
[ "0.7555909", "0.745268", "0.72320384", "0.6928359", "0.6911003", "0.6905713", "0.67729235", "0.6719026", "0.6628001", "0.66014606", "0.65840936", "0.6533467", "0.6412444", "0.64070624", "0.64064884", "0.63762397", "0.63470197", "0.6316131", "0.6301665", "0.6293129", "0.629164...
0.80316025
0
Takes a tuple where the first element is the dividend and the second element is the divisor. Both element sould be int. Performs a long division
def long_division(dividend_divisor_tuple, decimal_limit=5): natural, decimal = [], [] dividend, divisor = dividend_divisor_tuple[0], dividend_divisor_tuple[1] assert isinstance(dividend, int), "Dividend not int" assert isinstance(divisor, int), "Divisor not int" floor_div = dividend // divisor rest = dividend % divisor # Natural part of the division while floor_div > 0: natural.append(str(floor_div)) dividend = rest floor_div = dividend // divisor rest = dividend % divisor if rest == 0: # Divisor is factor of dividend print("Divisor is factor of dividend") return ("".join(natural), None, None) # Decimal part of the division dividend_list = [] recurring_index = None while len(decimal) < decimal_limit: dividend_list.append(dividend) dividend *= 10 floor_div = dividend // divisor decimal.append(str(floor_div)) rest = dividend % divisor if rest == 0: # Terminating decimal reached return ("".join(natural), "".join(decimal), None) elif rest in dividend_list: # Recurring cycle found recurring_index = dividend_list.index(rest) print("Recurring cycle found") break else: dividend = rest if recurring_index is not None: recurring = decimal[recurring_index:] decimal = decimal[:recurring_index] return ("".join(natural), "".join(decimal), "".join(recurring)) else: print("Decimal limit reached") return ("".join(natural), "".join(decimal), None)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def div_numbers(a: int, b: int) -> int:\n return a / b", "def test_div():\n l = [1, 2, 3, 4]\n assert s7.div(*l) == 1 / 2 / 3 / 4\n assert s7.div(100, 20) == 5\n assert s7.div(100.0, 20) == 5.0\n assert s7.div(100, 20.0) == 5.0", "def div(seq):\n for (i, n) in enumerate(seq):\n # tr...
[ "0.65800995", "0.6371114", "0.63462335", "0.6256875", "0.6244074", "0.62214583", "0.6192333", "0.61851525", "0.6185146", "0.61841047", "0.6174636", "0.6085751", "0.6082528", "0.603412", "0.6029013", "0.6028425", "0.6017244", "0.6011632", "0.60035694", "0.59994143", "0.5984466...
0.6508378
1
Get length of number in digits.
def get_number_length(number): return len(str(number))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ndigits(n):\n return len(str(abs(n)))", "def count_digits(n):\n return len(str(n))", "def _number_of_digits(number: int) -> int:\n return int(log10(number)) + 1", "def get_int_width(integer):\n return len(str(integer))", "def get_length(x):\n\n try:\n return int(x)\n ex...
[ "0.7974036", "0.78436166", "0.75876623", "0.72771937", "0.68577904", "0.68538064", "0.677399", "0.6735333", "0.6720063", "0.6719415", "0.6715743", "0.6707653", "0.6707653", "0.6627682", "0.6612727", "0.65931964", "0.6514543", "0.64946806", "0.6464933", "0.6453461", "0.6445275...
0.8823181
0
Returns array of all the posible sums between list elements.
def cross_sum_elements_of_list(list_of_int): array_of_int = np.array(list_of_int).reshape((len(list_of_int), 1)) transposed_array = array_of_int.copy().T sum_of_elements_array = array_of_int + transposed_array return np.unique(sum_of_elements_array)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sum(lists) -> list:\r\n return list(np.sum(lists, 0))", "def sum_elements(arr):\n return sum(arr)", "def sum_list(input_list: List[float]) -> float:\n return sum(input_list)", "def sum_list(input_list: List[float]) -> float:\n return sum(input_list)", "def sum_list(input_list: List[float]) ...
[ "0.7886432", "0.72365195", "0.7159521", "0.7159521", "0.71008116", "0.7051906", "0.70465595", "0.7040757", "0.67752486", "0.6750484", "0.6748924", "0.6736692", "0.6732239", "0.66944194", "0.6687826", "0.667164", "0.66535944", "0.6639427", "0.66387355", "0.6596252", "0.6593165...
0.71858424
2
Return list of all the divisors of an integer num.
def get_divisors(num): assert num != 0, "Num is 0" divisors = [] sq_root = int(num**0.5) for i in range(1, sq_root + 1): if num % i == 0: divisors.extend([i, num // i]) # if num has a perfect sq, that number will be added twice, then: if sq_root ** 2 == num: divisors.remove(sq_root) return divisors
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def simple_get_divisors(num: int) -> list:\n all_divisors = []\n for possible_divisor in range(1, math.floor(num / 2) + 1):\n if num % possible_divisor == 0:\n all_divisors.append(possible_divisor)\n return all_divisors", "def get_divisores(num):\n divisores = [] #uso una lista para...
[ "0.87616765", "0.84940773", "0.8239428", "0.8224629", "0.81623006", "0.80934465", "0.8033521", "0.78625435", "0.78059375", "0.7753855", "0.7717071", "0.76835775", "0.767592", "0.7558653", "0.75212115", "0.74451685", "0.74397975", "0.7423215", "0.74036175", "0.7400764", "0.739...
0.8431378
2
Get absolute path to resource, to get logo
def resource_path(relative_path): try: # PyInstaller creates a temp folder and stores path in _MEIPASS base_path = sys._MEIPASS except Exception: base_path = os.path.abspath(".") return os.path.join(base_path, relative_path)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def logo_url(self):\n return self.get_url(\"logo\", \"images/logo.png\")", "def logo_uri(self) -> str:\n return pulumi.get(self, \"logo_uri\")", "def resource_path(name):\n return os.path.join(\n os.path.dirname(__file__), 'images', 'resource', name)", "def get_client_company_logo_dir...
[ "0.72962266", "0.719628", "0.715207", "0.7071001", "0.6983272", "0.69696295", "0.6932713", "0.6828541", "0.6828541", "0.6821016", "0.68035823", "0.6728892", "0.67225164", "0.67204624", "0.6692771", "0.6692771", "0.6684799", "0.6639405", "0.6596103", "0.65899694", "0.6586349",...
0.0
-1
function looping through other functions to build tree and then save it
def loop_trough_row(var, name_col, list_error, list_of_project_info): sheet = get_excel(exceldokument) i = 3 while i < sheet.nrows: try: file_name = str(sheet.cell_value(i, name_col)) if file_name != "": tree, safecookie, steps, prev = createxmlmall() list_error = loop_through_col(steps, safecookie, i, file_name, var, list_error, list_of_project_info) # save_xml(tree, (file_name) + ".xml", folder_name) for errors in list_error: if errors.error_type == "4": return list_error save_xml(tree, file_name + ".tcs", folder_name) else: for l in range(sheet.ncols): if str(sheet.cell_value(i, l)) != "": p = AddFileWithError(i + 1, "3") list_error = p.add_el(list_error, p) i += 1 except: p = AddFileWithError(i + 1, "1") list_error = p.add_el(list_error, p) return list_error return list_error
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def build_tree(self, prefix, depth):\n for count, function in [[self.n_files, self.make_file],\n [self.n_children, self.make_child_recurse],\n [self.n_symlinks, self.make_symlink]]:\n for i in range(count):\n if not self...
[ "0.635694", "0.63468206", "0.6294683", "0.6240964", "0.6053419", "0.59460986", "0.59322184", "0.58918864", "0.5855702", "0.581498", "0.57331216", "0.56759137", "0.56216335", "0.5617605", "0.5604815", "0.56032133", "0.56020284", "0.559628", "0.5595393", "0.5594104", "0.5552079...
0.0
-1
opens the chosen exceldocument and returns it as sheet
def get_excel(exceldocument): sheet = xlrd.open_workbook(exceldocument).sheet_by_index(0) return sheet
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def openExcelSheet(outputFileName):\n workbook = Workbook()\n worksheet = workbook.add_sheet(\"Sheet 1\")\n return workbook, worksheet", "def _get_spreadsheet(i):\n path = io_mgr.get_parties_spreadsheet(i)\n if not os.path.exists(path):\n raise IOError()\n\n return openpyxl.load_workbook(path,...
[ "0.69280106", "0.6738904", "0.6419168", "0.6310797", "0.605773", "0.6051629", "0.6044591", "0.6008777", "0.5982114", "0.5974647", "0.59715974", "0.5966216", "0.59524924", "0.59430736", "0.5873261", "0.5856575", "0.57233405", "0.5704794", "0.569487", "0.5689535", "0.5683357", ...
0.77836186
0
creates an xml structure with root and motherelements
def createxmlmall(): root = ET.Element("state") model = ET.SubElement(root, "model") model.text = r"" dataid = ET.SubElement(root, "dataids") application = ET.SubElement(root, "application") application.text = "SIBS Configurator" safecookie = ET.SubElement(root, "safecookie") steps = ET.SubElement(root, "steps") prev = ET.SubElement(steps, "prev") lastproxy = ET.SubElement(root, "last-proxy").text = "tcserver0" tree = ET.ElementTree(root) # saves tree in variable "tree" return tree, safecookie, steps, prev
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def new_xml(self, root_name):\n\n self.tree = ET.ElementTree(ET.fromstring('<?xml version=\"1.0\" encoding=\"UTF-8\"?><%s></%s>'%(\n root_name, root_name)))\n return self.tree.getroot()", "def create_roots(self):\n self.root ...
[ "0.6970181", "0.63764966", "0.6287799", "0.6225016", "0.6220165", "0.6217637", "0.6206258", "0.619495", "0.6160407", "0.6134908", "0.61195517", "0.6091069", "0.6088098", "0.60488194", "0.59819216", "0.59459764", "0.59355354", "0.5920509", "0.5884173", "0.5878814", "0.5870564"...
0.75603426
0
Creates a folder and saves xml tree in a specific path
def save_xml(tree, file_name, folder_name): import os # ändrar plats för filer os.chdir(folder_name) tree.write(file_name) # Namnet på ny fil
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mkdir(path):", "def create_folder(path):\n command = ['mkdir', TEST_DIR]\n file_operation(path, command)", "def create_folder(path):\n if not os.path.exists(path):\n os.makedirs(path)", "def create_folder(self):\n Path(self.root_name).mkdir(parents=True, exist_ok=True)\n Pat...
[ "0.7024828", "0.700881", "0.6860256", "0.68011755", "0.6756072", "0.6738238", "0.66949385", "0.66890776", "0.6670779", "0.6649409", "0.6647468", "0.66387165", "0.66373086", "0.66274315", "0.66152924", "0.66047573", "0.6588086", "0.6582058", "0.656134", "0.65589786", "0.651623...
0.7450946
0
takes input and returns a string
def makeinputstring(variabel): if type(variabel) == int: return str(variabel) elif type(variabel) == float: return str(int(float(variabel))) else: return str(variabel)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_input_string():\n return input(\"Enter input string: \")", "def get_string(opt=\"encode\"):\n text = input(f\"Enter string to {opt}: \")\n return text", "def input_str() -> str:\n\tinput_string = str(input('Enter your string: '))\n\treturn input_string", "def input_str() -> str:\n\t\tinput_s...
[ "0.73874146", "0.7343399", "0.72803926", "0.7171811", "0.7066056", "0.6929175", "0.69019353", "0.6721488", "0.66247624", "0.65604264", "0.6521488", "0.6518847", "0.6444485", "0.6426503", "0.6425762", "0.63756746", "0.6344128", "0.6306453", "0.6288618", "0.6283329", "0.6271066...
0.63531923
16
This function loops through the excel and sorts out elements, names, texts and where in the tree they should be added.
def loop_through_col(steps, safecookie, b, file_name, var, list_error, list_of_project_info): col, k, j, g = 4, 1, 0, 0 sheet = get_excel(exceldokument) row_for_commitname = 2 #nya #row_for_commitname = 1 #gamla while col < sheet.ncols: if sheet.cell_type(0, col) != 0: j += 1 if sheet.cell_type(row_for_commitname, col)!= 0: g += 1 col += 1 if j == 0 or g == 0: p = AddFileWithError(file_name, "4") list_error = p.add_el(list_error, p) return list_error col = 4 while col < sheet.ncols: if sheet.cell_type(0, col) != 0: name = (sheet.cell_value(0, col)) commit = Steps(name).addtoxml(k, j, steps, safecookie) commit_name = (sheet.cell_value(row_for_commitname, col)) list_error = check_cell_error(b, col, sheet, list_error, file_name) if commit_name.lower() == "littra": commit_name = "littra" if k == 1 and sheet.cell_type(b, col) != 0: c_name = (sheet.cell_value(b, col)) AddAttrToTree(commit, commit_name, makeinputstring(c_name)).addtoxml() for x in range(0, 5): AddAttrToTree(commit, list_of_project_info[x], makeinputstring(var[x])).addtoxml() elif k != 1 and sheet.cell_type(b, col) != 0: c_name = (sheet.cell_value(b, col)) AddAttrToTree(commit, commit_name, makeinputstring(c_name)).addtoxml() k += 1 n = col + 1 if sheet.cell_type(0, n) != 0: if sheet.cell_type(b, col) != 0: commit_name = (sheet.cell_value(row_for_commitname, col)) c_name = (sheet.cell_value(b, col)) AddAttrToTree(commit, commit_name, makeinputstring(c_name)).addtoxml() elif sheet.cell_type(0, n) == 0: try: while sheet.cell_type(0, n) == 0 and n - 1 < sheet.ncols: if sheet.cell_type(b, n) != 0: list_error = check_cell_error(b, n, sheet, list_error, file_name) commit_name = (sheet.cell_value(row_for_commitname, n)) c_name = (sheet.cell_value(b, n)) AddAttrToTree(commit, commit_name, makeinputstring(c_name)).addtoxml() n += 1 except: pass col += 1 return list_error
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_excel(self):\r\n try:\r\n self.check_point_1 = True\r\n self.filename = tkFileDialog.askopenfilename(initialdir=\"/\", title=\"Select file\",\r\n filetypes=((\"excel files\", \"*.xlsx\"), (\"all files\", \"*.*\")))\r\n\r\n ...
[ "0.58093977", "0.55304813", "0.54485965", "0.5410043", "0.53819305", "0.5349107", "0.5269191", "0.5251047", "0.51980996", "0.5178889", "0.51472735", "0.5129128", "0.5119549", "0.50939554", "0.5085807", "0.5049187", "0.5038129", "0.50297076", "0.5020485", "0.49983808", "0.4973...
0.49841878
20
Creates variable of chosen exceldocument
def entry_set_excel(self, entry): global exceldokument exceldokument = filedialog.askopenfilename(filetypes=[("Excel file","*.xlsx"),("Excel file", "*.xlsm")]) entry.delete(0, 'end') entry.insert(tk.END, exceldokument)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def documento():\r\n\tpass", "def edit_document():", "def build_document(self):\n pass", "def savedoc():\r\n document.save('QSDoc_{0}_{1}_{2}_{3}.docx'.format(args.server, year, month, day))", "def create(init_document: 'Document') -> 'DocumentArray':", "def _create_document(result_dict):\n ...
[ "0.5883688", "0.5880611", "0.58433646", "0.58253616", "0.57991326", "0.57213825", "0.5719066", "0.5658872", "0.56124973", "0.5600502", "0.5573449", "0.55684054", "0.55032", "0.5484727", "0.54810476", "0.5461333", "0.5415002", "0.54017013", "0.53869313", "0.538439", "0.5352028...
0.0
-1
Creates variable of chosen folder
def entry_set_folder(self, entry): global folder_name folder_name = filedialog.askdirectory() entry.delete(0, 'end') entry.insert(tk.END, folder_name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_folder():\n return input(\"Folder: \")", "def create_folder(self):\n cur_dir=os.getcwd()\n unique=False\n dirlist= [item for item in os.listdir(cur_dir) if os.path.isdir(os.path.join(cur_dir,item))]\n folder_name='taxonomy_{}_{}'.format(self.place,self.year)\n j=1\n ...
[ "0.6584977", "0.6489762", "0.6430416", "0.6309875", "0.63006985", "0.6285221", "0.6282425", "0.6207818", "0.61075854", "0.60703206", "0.6058079", "0.6047258", "0.6042317", "0.6019528", "0.6018604", "0.6012094", "0.6007274", "0.597246", "0.5971677", "0.59710586", "0.5961468", ...
0.56268847
66
Sends all the inputinformation to loop
def start_config(self, entries, name_col, list_error, list_of_project_info): var = [] for x in entries[3:]: var.append(x.get()) list_error = loop_trough_row(var, name_col, list_error, list_of_project_info) return list_error
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def loop(self):\n pass", "def loop(self):\n while True:\n self._print_field()\n try:\n cmd = input(PROMPT)\n self._invoke_cmd(cmd)\n except EOFError: # Allows to exit by pressing ⌃D without error\n break", "def inputlo...
[ "0.68043214", "0.6738158", "0.6389131", "0.6361578", "0.63389635", "0.62979394", "0.626825", "0.6264768", "0.62522584", "0.6227473", "0.6211855", "0.61628276", "0.6144344", "0.6113936", "0.6113531", "0.6113409", "0.6088087", "0.6075006", "0.60653067", "0.6044094", "0.6031112"...
0.0
-1
Contoll of inputs and try/except for mainloop
def check_entry(self, controller, entries, list_of_project_info, error_label): for x in range(0, len(entries)): if entries[x].get() == "": messagebox.showerror("Error", "Expected no empty fields") return if not entries[2].get().isalpha(): messagebox.showerror("Error", "Expected column in letter not number, e.g. 'B' ") return name_col = self.col_to_num(entries[2].get()) self.write_to_indata(entries) list_error,error_present = [], [] list_error = controller.start_config(entries, name_col, list_error, list_of_project_info) if len(list_error) == 0: message = "Successfully generated all state files" error_present.append(message) error_label.config(text="Successfully generated all state files") else: for element in list_error: if element.error_type == "1": # error in loop_trough_row message = "expected error in excel spreadsheet at row" + str(element.file_name) + "\n" elif element.error_type == "2": #filname missing message = "expected error in file " + str(element.file_name)+ "\n" elif element.error_type == "3": # Filename error message = "expected error in file name at row " + str(element.file_name) + "\n" elif element.error_type == "4": # "Seems like error in 1:st or 3:rd line in excel sheet" message = "expected error in excel spreadsheet on 1:st or 3:rd row " + "\n" error_present.append(message) error_report = open("error_report.txt", "w+") error_report.write(''.join(error_present)) error_report.close() error_label.config(text="Error occured, check error report in "+ entries[1].get()) # error_label.config(text=(''.join(error_present)))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cont():\n\n try:\n input = raw_input()\n except Exception:\n pass", "def interactive_run(self):\r\n while True:\r\n try:\r\n #self.display_mang.print_instructions()\r\n input_string = input()\r\n if input_string == \"exit\":\r...
[ "0.68585896", "0.68095475", "0.6770353", "0.65708673", "0.6512728", "0.6459991", "0.6366776", "0.6324164", "0.62918556", "0.6262099", "0.6262099", "0.62162334", "0.6207645", "0.6178404", "0.6168539", "0.61427873", "0.60846925", "0.60826176", "0.6050576", "0.60469836", "0.6045...
0.0
-1
Create an instance to manager the blob properties extraction.
def Instance(self): if self.__Instance is None: self.__Instance = Properties() return self.__Instance
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, blob=None):\n if blob is None:\n self.versionCode = '0'\n self.data = {}\n else:\n self.versionCode = blob[0]\n encoded = blob[1:]\n compressed = base64.b64decode(encoded)\n self.data = json.loads(zlib.decompress(com...
[ "0.6232803", "0.5880269", "0.58206016", "0.5818862", "0.5719443", "0.57131475", "0.5703017", "0.56779015", "0.5674141", "0.5668743", "0.5663582", "0.5659485", "0.565841", "0.56503475", "0.56440836", "0.5627367", "0.5611333", "0.5609648", "0.5596115", "0.5574799", "0.55605406"...
0.0
-1
This constructor is never used by the system.
def __init__(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self):\n raise NotImplementedError", "def __init__(self):\n raise NotImplementedError", "def __init__(self):\n raise NotImplementedError", "def __init__(self):\n raise NotImplementedError", "def __init__(self):\n raise NotImplementedError(\"This class cannot ...
[ "0.82291555", "0.82291555", "0.82291555", "0.82291555", "0.8227341", "0.8077878", "0.8016458", "0.8003538", "0.79657507", "0.79657507", "0.79657507", "0.79657507", "0.79657507", "0.79657507", "0.79657507", "0.79657507", "0.79657507", "0.79657507", "0.79657507", "0.79657507", ...
0.73578566
63
Gets an object representation in a string format.
def __repr__(self): return "IAMLTools.BlobProperties object."
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_str(self, obj):\n if self.pretty:\n return pprint.pformat(obj)\n else:\n return str(obj)", "def objectToString(obj):\n if (hasattr(obj, \"__iter__\")):\n # matrix or vector\n if len(obj) == 0:\n return \"\"\n else:\n if (ha...
[ "0.79434896", "0.76730764", "0.74159396", "0.7268582", "0.7246455", "0.7231229", "0.7168203", "0.710605", "0.7084541", "0.70546156", "0.70546156", "0.70546156", "0.70546156", "0.70546156", "0.70546156", "0.70546156", "0.70546156", "0.70546156", "0.70546156", "0.70546156", "0....
0.0
-1
Calcule and return a list of strings specifying by properties.
def getContourProperties(self, contour, properties=[]): # Initial variables. failInInput = False props = {} for prop in properties: prop = str(prop).lower() if prop == "approximation": props.update({"Approximation" : self.__CalculateApproximation(contour)}) if prop == "area": props.update({"Area" : self.__CalculateArea(contour)}) elif prop == "boundingbox": props.update({"BoundingBox" : self.__CalculateBoundingBox(contour)}) elif prop == "centroid": props.update({"Centroid" : self.__CalculateCentroid(contour)}) elif prop == "circle": props.update({"Circle" : self.__CalculateCircle(contour)}) elif prop == "circularity": props.update({"Circularity" : self.__CalculateCircularity(contour)}) elif prop == "convexhull": props.update({"ConvexHull" : self.__CalculateConvexHull(contour)}) elif prop == "extend": props.update({"Extend" : self.__CalculateExtend(contour)}) elif prop == "ellipse": props.update({"Ellipse" : self.__CalculateEllipse(contour)}) elif prop == "isconvex": props.update({"IsConvex" : self.__IsConvex(contour)}) elif prop == "length": props.update({"Length" : self.__CalculateLength(contour)}) elif prop == "moments": props.update({"Moments" : self.__CalculateMoments(contour)}) elif prop == "perimeter": props.update({"Perimeter" : self.__CalculatePerimeter(contour)}) elif prop == "rotatedbox": props.update({"RotatedBox" : self.__CalculateRotatedBox(contour)}) elif failInInput: pass else: print("\t--" * 20) print("\t*** PROPERTY ERROR " + prop + " DOES NOT EXIST ***") print("\tTHIS ERROR MESSAGE WILL ONLY BE PRINTED ONCE") print("\--" * 20) failInInput = True return props
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getProperties(properties =['electrical_props', '__description'], \r\n sproperty ='electrical_props'):\r\n #------------------------------------\r\n from .database import GeoDataBase\r\n #-----------------------------------\r\n def _fs (v): \r\n \"\"\...
[ "0.66682273", "0.6444924", "0.6429075", "0.6418234", "0.62668264", "0.61048454", "0.6090979", "0.60708404", "0.60441667", "0.600167", "0.59521866", "0.5907654", "0.58133453", "0.5764389", "0.5727516", "0.57220525", "0.5717066", "0.56708825", "0.5670351", "0.56337535", "0.5605...
0.5443529
30
Calculate the approximation of a contour shape to another shape with less number of vertices depending upon the precision we specify.
def __CalculateApproximation(self, contour): epsilon = 0.1 * cv2.arcLength(contour, True) return cv2.approxPolyDP(contour, epsilon, True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def contourApprox(cnt, epsilon = 0.005):\n\tepsilon = epsilon*cv2.arcLength(cnt, True)\n\tapprox = cv2.approxPolyDP(cnt, epsilon, True)\n\treturn approx", "def approx_poly(self, mask):\n\n mask_expand = mask.copy()\n contours, _ = cv2.findContours(mask_expand, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NO...
[ "0.6073888", "0.5673097", "0.5591408", "0.5486369", "0.5361581", "0.53499943", "0.5272163", "0.52631676", "0.5168495", "0.51550204", "0.5142952", "0.51347584", "0.51139116", "0.5096012", "0.5069017", "0.50188994", "0.5007644", "0.5004071", "0.50028485", "0.49921945", "0.49831...
0.6092272
0
Calculate the contour area by the function cv2.contourArea() or from moments, M["m00"].
def __CalculateArea(self, contour): return cv2.contourArea(contour)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def area(cnt):\n\treturn cv2.contourArea(cnt)", "def get_contour_area(contour):\n assert isinstance(contour, np.ndarray), 'contour should be a numpy array'\n return cv2.contourArea(contour)", "def __CalculateMoments(self, contour):\r\n return cv2.moments(contour)", "def get_max_area(contours):\n...
[ "0.7210734", "0.66500163", "0.6628211", "0.6365675", "0.614254", "0.61276436", "0.6062467", "0.5952984", "0.5937475", "0.59279317", "0.58706164", "0.57940084", "0.5752224", "0.5744717", "0.5743859", "0.5743859", "0.5719784", "0.5644189", "0.56355894", "0.5590524", "0.5581619"...
0.7479222
0
Calculate the bouding rectangle. It is a straight rectangle, it doesn't consider the rotation of the object. So area of the bounding rectangle won't be minimum. It is found by the function cv2.boundingRect().
def __CalculateBoundingBox(self, contour): return cv2.boundingRect(contour)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def boundingRect(self):\n return self.rect().adjusted(-2, -2, 2, 2)", "def boundingRect(cnt):\n\tx, y, w, h = cv2.boundingRect(cnt)\n\treturn {\"x\":x, \"y\": y, \"w\": w, \"h\": h}", "def boundingBox(self):\n minx, miny, maxx, maxy = self.substrates.bounds\n return pcbnew.BOX2I(\n ...
[ "0.7676917", "0.72490233", "0.71938384", "0.7107463", "0.70778424", "0.7075241", "0.6987138", "0.698335", "0.69716114", "0.691494", "0.6906388", "0.6903976", "0.68773127", "0.685709", "0.6815819", "0.68036884", "0.6765353", "0.6762905", "0.6715452", "0.66673505", "0.66570497"...
0.74042195
1
Calculates the centroid of the contour. Moments up to the third order of a polygon or rasterized shape.
def __CalculateCentroid(self, contour): moments = cv2.moments(contour) centroid = (-1, -1) if moments["m00"] != 0: centroid = (int(round(moments["m10"] / moments["m00"])), int(round(moments["m01"] / moments["m00"]))) return centroid
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def centroid(self):\n return self.contours_to_matrix().mean(axis=0)", "def calculateCentroid(self,image):\n\t\tim=cv2.imread(image,0) #reads it in greyscale\n\t\tret,thresh = cv2.threshold(img_copy,128,255,cv2.THRESH_OTSU)\n\t\tim2,contours,hierarchy = cv2.findContours(thresh, 1, 2)\n\t\tcnt = contours[0]...
[ "0.7549272", "0.7480494", "0.74477065", "0.72905296", "0.72855806", "0.7277071", "0.72547257", "0.7159602", "0.71536976", "0.70305306", "0.7029446", "0.7029446", "0.7026189", "0.6984927", "0.69735426", "0.6927554", "0.69200563", "0.69048506", "0.69047856", "0.6894379", "0.688...
0.80175775
0
Calculate the circumcircle of an object using the function cv2.minEnclosingCircle(). It is a circle which completely covers the object with minimum area.
def __CalculateCircle(self, contour): return cv2.minEnclosingCircle(contour)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def boundingCircle(self):\n\n try:\n import cv2\n except:\n logger.warning(\"Unable to import cv2\")\n return None\n\n # contour of the blob in image\n contour = self.contour()\n\n points = []\n # list of contour points converted to suitabl...
[ "0.74524856", "0.72214043", "0.6968209", "0.655094", "0.637335", "0.62411404", "0.61780167", "0.61020416", "0.6038088", "0.6018948", "0.6009186", "0.59972024", "0.5996103", "0.5971567", "0.5965567", "0.5927634", "0.5920173", "0.5918442", "0.59156424", "0.5889828", "0.58796084...
0.80844194
0
Calculate the ellipse circularity.
def __CalculateCircularity(self, contour): if len(contour) < 2: return 0 perimeter = cv2.arcLength(contour, False) area = self.__CalculateArea(contour) return (4 * math.pi * area) / (perimeter * perimeter)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_eccentricity(self, ellipse):\r\n a = ellipse.get_width()\r\n b = ellipse.get_height()\r\n if b > a:\r\n a, b = b, a\r\n c = np.sqrt(a**2 - b**2)\r\n return fdiv(c, a)", "def ellipse(self):\n f = self.img\n x = self.x\n y = self.y\n x2 ...
[ "0.7989176", "0.6924032", "0.68412817", "0.68127716", "0.6727333", "0.66740966", "0.6610501", "0.6582428", "0.6562776", "0.65454847", "0.6515111", "0.6455083", "0.6424359", "0.6401664", "0.6375693", "0.6350507", "0.6339286", "0.6323839", "0.62589574", "0.6257008", "0.6223934"...
0.6321363
18
Finds the convex hull of a point set by checking a curve for convexity defects and corrects it.
def __CalculateConvexHull(self, contour): return cv2.convexHull(contour)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def convex_hull(points):\n\n # Sort the points lexicographically (tuples are compared lexicographically).\n # Remove duplicates to detect the case we have just one unique point.\n points = sorted(set(points))\n\n # Boring case: no points or a single point, possibly repeated multiple times.\n if len(...
[ "0.7381319", "0.7166372", "0.71521324", "0.69881785", "0.69819653", "0.69719875", "0.69375825", "0.68497926", "0.6776819", "0.66407245", "0.6561083", "0.6464603", "0.6462413", "0.6459737", "0.64563394", "0.64457273", "0.6220267", "0.6213466", "0.62115705", "0.61951804", "0.61...
0.5578606
48
Fit an ellipse to an object. It returns the rotated rectangle in which the ellipse is inscribed.
def __CalculateEllipse(self, contour): if len(contour) > 5: return cv2.fitEllipse(contour) return cv2.minAreaRect(contour)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fit_ellipse(x,y):\r\n \r\n def fit(x,y):\r\n x = x[:,np.newaxis]\r\n y = y[:,np.newaxis]\r\n D = np.hstack((x*x, x*y, y*y, x, y, np.ones_like(x)))\r\n S = np.dot(D.T,D)\r\n C = np.zeros([6,6])\r\n C[0,2] = C[2,0] = 2; C[1,1] = -1\r\n E, V = np.linalg.eig...
[ "0.72764295", "0.65389484", "0.6258115", "0.6085874", "0.6078846", "0.59491014", "0.5820374", "0.5784566", "0.561725", "0.55927783", "0.5559429", "0.5536257", "0.5470644", "0.5456046", "0.54508764", "0.54230434", "0.5402299", "0.53830856", "0.5359062", "0.53571004", "0.535605...
0.5605659
9
Calculate the countour extend.
def __CalculateExtend(self, contour): area = self.__CalculateArea(contour) boundingBox = self.__CalculateBoundingBox(contour) return area / (boundingBox[2] * boundingBox[3])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def life_insurance_to_recive_total(self):\n pass", "def extendability(self):\n self._extendability = 0.50 * self.ANA - 0.50 * self.DCC + 0.50 * self.MFA + 0.50 * self.NOP\n return round(self._extendability, 5)", "def calculate(self):", "def calculate(self):\n pass", "def update_...
[ "0.56703246", "0.5525739", "0.5475888", "0.5367172", "0.5338628", "0.53284657", "0.53263986", "0.53008175", "0.529843", "0.5273235", "0.52313775", "0.5210509", "0.5181706", "0.51630366", "0.5138338", "0.51348615", "0.50815606", "0.50513184", "0.504204", "0.50351846", "0.50212...
0.6228183
0
Check if a curve is convex or not.
def __IsConvex(self, contour): return cv2.isContourConvex(contour)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def convex(self):\n # Convex has positive curvature (2nd derivative)\n # f\"(x) = 2a, so a > 0 corresponds to convex\n return (self.a > 0)", "def _check_curve(layer: ogr.Layer) -> None:\n # Check if the feature geometry is polygonal:\n feature_defn = layer.GetLayerDefn()\n ...
[ "0.7643623", "0.6757536", "0.67146444", "0.65801567", "0.6464188", "0.6454179", "0.635574", "0.62782335", "0.61885536", "0.61603767", "0.6148129", "0.6033672", "0.60245943", "0.590163", "0.580078", "0.5784255", "0.57838607", "0.5783847", "0.5773544", "0.57301515", "0.5673152"...
0.73451805
1
Calculate a contour perimeter or a curve length.
def __CalculateLength(self, curve): return cv2.arcLength(curve, True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __CalculatePerimeter(self, curve):\r\n return cv2.arcLength(curve, True)", "def perimeter(cnt):\n\treturn cv2.arcLength(cnt, True)", "def perimeter(self):\n return self.sidelength1 + self.sidelength2 + self.baselength1 + self.baselength2", "def perimeter(self):\n return sum([s.length for...
[ "0.7951354", "0.72823113", "0.69471735", "0.68977535", "0.6630319", "0.65940684", "0.65453714", "0.65153366", "0.6505985", "0.64768934", "0.6449339", "0.6431239", "0.6404429", "0.63940036", "0.6379212", "0.6343764", "0.6341372", "0.6341372", "0.63238937", "0.6322581", "0.6228...
0.66685665
4
Calculate the contour moments to help you to calculate some features like center of mass of the object, area of the object etc.
def __CalculateMoments(self, contour): return cv2.moments(contour)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def moments(cnt):\n\treturn cv2.moments(cnt)", "def moments(cnt):\n\treturn cv2.moments(cnt)", "def moments(self):", "def moments(data):\n# =============================================================================\n# total = data.sum()\n# X, Y = np.indices(data.shape)\n# x = (X*da...
[ "0.6727947", "0.6727947", "0.66949683", "0.65000355", "0.6481608", "0.63708127", "0.6356746", "0.6351715", "0.6243697", "0.61840034", "0.6168322", "0.6164202", "0.6142851", "0.6103647", "0.60443234", "0.6014186", "0.6013817", "0.5995825", "0.59587353", "0.59543926", "0.595082...
0.77274
0
Calculates a contour perimeter or a curve length.
def __CalculatePerimeter(self, curve): return cv2.arcLength(curve, True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def perimeter(cnt):\n\treturn cv2.arcLength(cnt, True)", "def perimeter(self):\n return self.sidelength1 + self.sidelength2 + self.baselength1 + self.baselength2", "def perimeter(self):\n return sum([s.length for s in self.segments])", "def perimeter(self):", "def __CalculateLength(self, curve):\...
[ "0.721825", "0.69289505", "0.6759035", "0.6589213", "0.6564522", "0.65498984", "0.6483492", "0.6478432", "0.6431225", "0.6417622", "0.6385283", "0.6373984", "0.63636816", "0.636077", "0.6348714", "0.6287157", "0.62746215", "0.6269028", "0.6236131", "0.6204229", "0.6204229", ...
0.7875777
0
Calculate the rotated rectangle as a Box2D structure which contains
def __CalculateRotatedBox(self, contour): rectangle = cv2.minAreaRect(contour) box = cv2.boxPoints(rectangle) return np.int0(box)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _apply_rotation(self, rectangle):\n if self._rotation == 90:\n return Rectangle(\n self._height - rectangle.y2,\n rectangle.x1,\n self._height - rectangle.y1,\n rectangle.x2,\n )\n if self._rotation == 180:\n ...
[ "0.7022365", "0.667482", "0.64325494", "0.641342", "0.6358206", "0.6105708", "0.60524374", "0.604659", "0.604289", "0.602959", "0.60229516", "0.6003215", "0.5952361", "0.58678347", "0.58623916", "0.5842129", "0.5837796", "0.58315367", "0.5814212", "0.5811554", "0.58072996", ...
0.6074911
6
Converts UTM coordinates into latitude/longitude. assumes rows are easting, northing, zone number, either 'N' for northern hemisphere or 'S' for southern hemisphere
def utm_to_latlong(input_data_file=None, output_data_file=None, log_file=None, log_level=DEFAULT_LOG_LEVEL): logger = logger_message(__name__, log_file, log_level) # Check required input and output data file names were given. assert input_data_file is not None, 'An input CSV file with columns of values.' assert output_data_file is not None, 'An output CSV file to write new values.' _in = open(input_data_file, 'r') try: _out = open(output_data_file, 'w') try: data = csv.reader(_in) output = csv.writer(_out) for row_ind, row in enumerate(data): east = float(row[0]) north = float(row[1]) zone = int(row[2]) latlong = utm.to_latlon(east, north, zone, northern=('N' == row[3])) logger.info('Changed row {} from: {} to: {}'.format(row_ind, (row[0], row[1]), latlong)) output.writerow(latlong) finally: _out.close() finally: _in.close()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_pseudo_epsg4326_coordinates(self):\n self.create_3d_coord_on_sphere(on_sphere=True)\n self.df_attributes['lat'] = 180*(pi/2 - np.arccos(self.df_attributes['coord_z']))/pi\n self.df_attributes['lon'] = 180*np.arctan2(self.df_attributes['coord_y'], self.df_attributes['coord_x'])/pi", ...
[ "0.6428161", "0.63630664", "0.63455874", "0.6290571", "0.62546146", "0.62182456", "0.6173703", "0.616266", "0.61362135", "0.6058876", "0.60567707", "0.6025662", "0.5944653", "0.5914385", "0.5914179", "0.5903728", "0.5840953", "0.5805078", "0.57799006", "0.5759482", "0.5756983...
0.5855182
16
Adds a shortcut between input and residual block and merges them with "sum"
def _shortcut(input, residual): # Expand channels of shortcut to match residual. # Stride appropriately to match residual (width, height) # Should be int if network architecture is correctly configured. input_shape = K.int_shape(input) residual_shape = K.int_shape(residual) stride_width = int(round(input_shape[2] / residual_shape[2])) stride_height = int(round(input_shape[3] / residual_shape[3])) stride_depth = int(round(input_shape[4] / residual_shape[4])) equal_channels = input_shape[1] == residual_shape[1] shortcut = input # 1 X 1 conv if shape is different. Else identity. if stride_width > 1 or stride_height > 1 or stride_depth > 1 or not equal_channels: shortcut = Convolution3D(residual_shape[1], 1, 1, 1, subsample=(stride_width, stride_height, stride_depth), border_mode = "valid", init="he_normal",W_regularizer=l2(0.0001))(input) return shortcut
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def residual_block(layer, filters):\n shortcut = layer\n layer = Conv2D(filters=filters, kernel_size=(3, 3),\n strides=(1, 1), padding=\"same\")(layer)\n layer = LeakyReLU(alpha=LEAKY_RELU_ALPHA)(layer)\n layer = Conv2D(filters=filters, kernel_s...
[ "0.6083529", "0.5894693", "0.5735994", "0.56371546", "0.5619291", "0.5514519", "0.5502387", "0.5502087", "0.54550755", "0.54191214", "0.54108816", "0.5387289", "0.53559756", "0.5333165", "0.53286403", "0.5305475", "0.5285419", "0.5280931", "0.5280322", "0.52757835", "0.526515...
0.515336
25
Patching out the functions in CrimeIncidentsIntent that use requests.get
def setUp(self): super().setUp() self.get_crime_incident_response = \ mock.patch( ('mycity.intents.crime_activity_intent.' 'get_crime_incident_response'), return_value=test_constants.GET_CRIME_INCIDENTS_API_MOCK) self.get_crime_incident_response.start() response = self.controller.on_intent(self.request) for record in MOCK_RESPONSE[RESULT][RECORDS]: self.assertIn(record[STREET], response.output_speech)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _external_request(self, method, url, *args, **kwargs):\n self.last_url = url\n if url in self.responses.keys() and method == 'get':\n return self.responses[url] # return from cache if its there\n\n headers = kwargs.pop('headers', None)\n custom = {'User-Agent': useragent}...
[ "0.6091576", "0.5723631", "0.5565101", "0.5542613", "0.55409104", "0.55404246", "0.5505588", "0.5505458", "0.54588634", "0.5443315", "0.5433013", "0.5402714", "0.5384366", "0.5381944", "0.5375504", "0.5370188", "0.5344704", "0.5320391", "0.5319966", "0.531785", "0.5312466", ...
0.0
-1
Update the ElasticSearch index every hour.
def update_es_index(): for job in scheduler.get_jobs(): if 'task_type' in job.meta and job.meta['task_type'] == "update_index": scheduler.cancel(job) scheduler.schedule( scheduled_time=datetime.now(), func='haystack.management.commands.update_index.Command().handle()', interval=60 * 60, repeat=None, ) for job in scheduler.get_jobs(): index_job = job if index_job.func_name == 'haystack.management.commands.update_index.Command().handle()': break index_job.meta['task_type'] = "update_index" index_job.save()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def index_later(self):\n return", "def update_time(cls, key):\n key.put()", "def every_hour(self, time, function, args=None, kwargs=None, name=None):\n if args is None:\n args = list()\n if kwargs is None:\n kwargs = dict()\n if name is None:\n ...
[ "0.59411937", "0.57995015", "0.56473935", "0.5602451", "0.558092", "0.551876", "0.5453459", "0.5422525", "0.54206413", "0.54206413", "0.5394929", "0.5394929", "0.53662395", "0.53635", "0.53479695", "0.53402996", "0.532217", "0.53142494", "0.5309737", "0.5285711", "0.52673167"...
0.6486948
0
JavaProcess.__init__(self, class_loc, args=[]) Initializes an external Java process.
def __init__(self, config, class_loc, args=[]): JavaProcess.config = JavaProcessConfig.configFrom_dict(config) self._cp = self._construct_classpath_str() self.class_loc = class_loc self.args = args self._process = None self._stdout = None self._stderr = None LOG.debug("JavaProcess constructed for %s", self.class_loc) return
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, paths):\n Process.__init__(self)\n self.paths = paths", "def __init__(self, host=\"\", port=8432):\n Process.__init__(self)\n self.host, self.port = host, port\n self._Handler.annotator = self", "def __init__(self):\n self._recording = None\n ...
[ "0.6975941", "0.6958235", "0.67394876", "0.6490618", "0.6321262", "0.6161817", "0.612476", "0.6118696", "0.6096931", "0.6078772", "0.60693926", "0.60651165", "0.60423166", "0.60343724", "0.6023563", "0.600156", "0.5997967", "0.5996555", "0.5996109", "0.5992397", "0.59232205",...
0.8258955
0
we will do a bubble sort on the list and then get the 2nd element to the last
def second_largest(number_list): for i in range(len(number_list)): for j in range(len(number_list) - 1 - i): if number_list[j] > number_list[j+1]: number_list[j + 1], number_list[j] = number_list[j], number_list[j+1] return number_list[-2]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bubbleSort(list):", "def bubble_sort(list):\n for i in range(1, len(list) - 1):\n for j in range(len(list) - 1, i-1, -1):\n if list[j - 1] > list[j]:\n x = list[j]\n list[j] = list[j - 1]\n list[j - 1] = x\n return list", "def bubbleSort(...
[ "0.77857566", "0.7745999", "0.75933444", "0.7433549", "0.7385789", "0.73774844", "0.736563", "0.7351545", "0.73503095", "0.73399705", "0.7298297", "0.7254225", "0.7216236", "0.7211703", "0.7194435", "0.7183924", "0.71444356", "0.71343726", "0.7121137", "0.7121137", "0.7100498...
0.6551871
56
slick solution in python ONLY zeros = [0 for i in range(zeros_and_ones.count(0))] ones = [1 for j in range(zeros_and_ones.count(1))] return zeros + ones
def zeros_before_ones(zeros_and_ones): index_i = 0 last_index = len(zeros_and_ones) - 1 while index_i < last_index: if zeros_and_ones[index_i] == 1 and zeros_and_ones[last_index] == 0: zeros_and_ones[index_i], zeros_and_ones[last_index] = zeros_and_ones[last_index], zeros_and_ones[index_i] index_i += 1 last_index -= 1 # print(zeros_and_ones) # TODO: NEEDS IMPROVEMENTS! zeros_and_ones
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solution2(n):\n ones = 0\n while n > 0:\n if n & 1:\n ones += 1\n n = n >> 1\n\n return 0 if ones % 2 == 0 else 1", "def c(ixs):\n return sum(range(1, sum((i > 0 for i in ixs)) + 1))", "def _iter_restrict(self, zeros, ones):\n inputs = list(self.inputs)\n ...
[ "0.6483679", "0.624784", "0.61705774", "0.610907", "0.6090902", "0.6031265", "0.59823155", "0.59540856", "0.59308344", "0.59170455", "0.5842495", "0.5830201", "0.5826083", "0.5814207", "0.578468", "0.57763344", "0.5771295", "0.57675946", "0.5759372", "0.5745411", "0.5724746",...
0.6798596
0
Implements a descending exponential cooling scheme. Temperature becomes zero after all iterations have been passed.
def update_temperature(self): self.iteration += 1 self.T = self.T0 * 0.9935**self.iteration
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def constant_temp(self, numIterations):\n return 1 + self.alpha", "def _etaE_cool(self,x):\n return self._eta_sfr_scaling(x,'E_cool')", "def _etaE(self,x):\n return self._etaE_cool(x) + self._etaE_hot(x)", "def sim_alternating_exp():\n catches = 0\n for _ in range(100000):\n ...
[ "0.65226835", "0.63014704", "0.613545", "0.6096842", "0.60904115", "0.6022001", "0.59910226", "0.5871977", "0.58546734", "0.5735474", "0.5727299", "0.5672892", "0.5629865", "0.560521", "0.55958956", "0.558111", "0.5577392", "0.5571009", "0.5570365", "0.5565791", "0.5564864", ...
0.5585291
15
Calculates new and old K. Checks and accepts better solutions than the current solution. Also sometimes accepts solutions that are worse, depending on the current temperature.
def check_solution(self, potential_solution): old_k = self.K new_k = potential_solution.set_K(self.len_connections) # calculate the probability of accepting this solution delta = new_k - old_k if delta >= 0: probability = 1 probability = np.exp(delta / self.T) # pull a random number between 0 and 1 and see if we accept the solution if random.random() < probability: self.column1.append(self.iteration) self.column2.append(new_k) self.state = potential_solution self.K = new_k # save progress to a csv file with open('annealing.csv', 'w', newline='') as csv_file: fieldnames = ['Iterations', 'K'] writer = csv.DictWriter(csv_file, fieldnames=fieldnames) writer.writeheader() for i, j in zip(self.column1, self.column2): writer.writerow({'Iterations': i, 'K': j}) # update the temperature self.update_temperature()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def calc_k(self):\n\t\n\tself.k = -np.array([self.sth*self.cphi, self.sth*self.sphi, self.cth])\n\n\treturn", "def obrien_fleming_cutoff(K, current_k, alpha):\n\n if not isinstance(K, int) or K < 1 or K > 10:\n raise ValueError('K must be an integer between 1 and 10.')\n\n if not isinstance(current_...
[ "0.6411429", "0.61664015", "0.60879153", "0.59830314", "0.58720684", "0.5814098", "0.58111423", "0.58111423", "0.577404", "0.5765074", "0.5746197", "0.57347727", "0.5694414", "0.5672527", "0.5637371", "0.5624423", "0.56225246", "0.5611478", "0.5611203", "0.5557416", "0.555590...
0.0
-1
Warn the user that running with nipy being imported locally is a bad idea.
def _test_local_install(): if os.getcwd() == os.sep.join( os.path.abspath(__file__).split(os.sep)[:-2]): import warnings warnings.warn('Running the tests from the install directory may ' 'trigger some failures')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_nuke():\n try:\n import _nuke\n return True\n except ImportError:\n return False", "def provoke_and_handle_ImportError():\n try:\n from you import me\n except ImportError as impe:\n print(f\"Sorry! {impe}\")", "def unavailable_importer(**kwargs):\n retur...
[ "0.59475875", "0.5733747", "0.5700879", "0.56831914", "0.56009203", "0.5547987", "0.54913825", "0.5431076", "0.5426784", "0.53830874", "0.53597766", "0.53400105", "0.53313553", "0.53298444", "0.52693295", "0.5264683", "0.5247983", "0.5241664", "0.5224841", "0.5179244", "0.517...
0.5077109
29
Tire l'objectif d'une partie de chiffre.
def tire_objectif() -> int: return random.randint(min_objectif, max_objectif)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def object(self):", "def test_partie(joueur1: object,\n joueur2: object,\n tableau_invisible_joueur1: list, tableau_invisible_joueur2: list\n ):\n print(\"plateau du joueur 2 : \\n\")\n\n tour_de_jeu(joueur1, joueur2, tableau_invisible_joueur2)\n\n rafraichir...
[ "0.6027109", "0.5971558", "0.58855444", "0.58450836", "0.5830027", "0.58068246", "0.5640876", "0.55412936", "0.55015695", "0.5419981", "0.53639406", "0.53315765", "0.5311751", "0.5300423", "0.5263753", "0.5260159", "0.52363807", "0.5175996", "0.51674426", "0.5153223", "0.5149...
0.0
-1
Saves table's data source to given path
def save(self, export_path: str):
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save(file, table):\n pq.write_table(pa.Table.from_pandas(table), file)", "def save(self):\r\n debug.write(\"[SourceRPG] Handling SQL Save\", 1)\r\n if self.path != \":memory:\":\r\n debug.write(\"Path is not in memory\", 2, False)\r\n if currentTurboMode is False:\r\n ...
[ "0.6399908", "0.6323182", "0.61869615", "0.6044013", "0.6044013", "0.6044013", "0.59645635", "0.59101415", "0.58439785", "0.5820229", "0.58036727", "0.57980114", "0.5796495", "0.5790788", "0.57840085", "0.57607806", "0.57270277", "0.57178634", "0.570963", "0.570963", "0.56416...
0.582292
9
Registers all the JRPC overloaders in the jrpc server
def register_overloaders(jrpc_server: JRPCServer, receiver) -> None: jrpc_server.register_overloader( 'Application.GetProperties', lambda server: GetPropertiesOverloader(server, receiver)) jrpc_server.register_overloader( 'Application.SetMute', lambda server: SetMuteOverloader(receiver)) jrpc_server.register_overloader( 'Application.SetVolume', lambda server: SetVolumeOverloader(receiver)) jrpc_server.register_overloader( 'Application.Quit', lambda server: ApplicationQuitOverloader(receiver)) jrpc_server.register_overloader( 'System.Hibernate', lambda server: ApplicationQuitOverloader(receiver)) jrpc_server.register_overloader( 'System.Shutdown', lambda server: ApplicationQuitOverloader(receiver)) jrpc_server.register_overloader( 'System.Suspend', lambda server: ApplicationQuitOverloader(receiver)) jrpc_server.register_overloader( 'System.GetProperties', lambda server: SystemPropertiesOverloader())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def register_rpc_proxies(self):\n for rpc_name in self.rpc_proxy_list:\n logger.debug('Registering RPC to Proxy: {}'.format(rpc_name))\n\n class RPCProxy:\n\n def __init__(self, local_session, rpc_name):\n self._local_session = local_session\n ...
[ "0.5887183", "0.53439647", "0.5303127", "0.5229928", "0.5181709", "0.51379925", "0.5042705", "0.5018712", "0.50179935", "0.5007511", "0.50063205", "0.50051254", "0.49503332", "0.4928616", "0.4922325", "0.4869519", "0.4867418", "0.48619875", "0.4855552", "0.4855092", "0.484246...
0.78924644
0