query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Adapted from nicholls_turton.ipynb sst, sea surface temperature (K) ft_qv, mixedlayer top qv (kg kg^1) use_NT, True or False
def calc_equil(sst, ft_qv, use_NT=False): run_main(sst, ft_qv, use_NT) # grab csv file with open('dumpmodel.csv','r') as f: df_result=pd.read_csv(f) # last time step into named tupple out=df_result.iloc[-1] steady_state=make_tuple(out.to_dict()) steady_state # obtain steady-state values dth=steady_state.deltheta dqt=steady_state.delqv thetal_m=steady_state.theta qt_m=steady_state.qv h=steady_state.h press=tf.find_press(steady_state.h) #kPa thetal_ft = steady_state.theta + dth qt_ft = steady_state.qv + dqt zb = steady_state.LCL zi = steady_state.h we = steady_state.went # calculate thetal at z = 3000 m (take qt(z = 3000m) = qt(z = h), so delta_qt = dqt) gamma = 6e-3 thetal_3000 = thetal_ft + gamma*(3000-h) LTS = thetal_3000 - steady_state.theta # calculate delta_Fr delta_Frstar = 82.0 # Wm^-2 Frlambda = 7.9 # Wm^-2, using with CTL from Gesso delta_Fr = delta_Frstar - Frlambda*qt_ft*1000 # convert qt_ft to g kg^-1 # calculate LWP rho = 1. LWP = 0.5*rho*(zi-zb)**2 # put all required variables into output array out_array = np.array([thetal_m, qt_m, zi, zb, we, LWP, delta_Fr, LTS, dqt]) return out_array
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run_main(sst, ft_qv, use_NT):\n\n dtout=10. #minutes\n end_time=8*24. #hours\n del_time=dtout*60. #seconds\n end_time=end_time*3600. #seconds\n #sst=297\n D=5.e-6 #s-1\n U=7 #m/s\n psfc=100. #kPa\n qsfc=tf.qs_tp(sst,psfc)\n ft_intercept = 292 #K\n ft_gamma = 6.e-3 #K/m\n ...
[ "0.60827553", "0.5971266", "0.57040036", "0.5694853", "0.56882703", "0.5666581", "0.5637644", "0.5630418", "0.56049395", "0.55450636", "0.54949796", "0.5493195", "0.54648805", "0.54205227", "0.5363305", "0.53159505", "0.5269913", "0.52530104", "0.524438", "0.52441275", "0.521...
0.65542036
0
Send an event to sensu via pysensu_yelp with the given information.
def send_event(instance_config, status, output): # This function assumes the input is a string like "mumble.main" monitoring_overrides = instance_config.get_monitoring() if 'alert_after' not in monitoring_overrides: monitoring_overrides['alert_after'] = '2m' monitoring_overrides['check_every'] = '1m' monitoring_overrides['runbook'] = monitoring_tools.get_runbook( monitoring_overrides, instance_config.service, soa_dir=instance_config.soa_dir, ) check_name = ( 'check_marathon_services_replication.%s' % instance_config.job_id ) monitoring_tools.send_event( service=instance_config.service, check_name=check_name, overrides=monitoring_overrides, status=status, output=output, soa_dir=instance_config.soa_dir, cluster=instance_config.cluster, ) _log( service=instance_config.service, line='Replication: %s' % output, component='monitoring', level='debug', cluster=instance_config.cluster, instance=instance_config.instance, )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def command_where(self, bot, update):\n\n bot.sendChatAction(update.message.chat_id, action='typing')\n\n foursquare = ext.get_foursquare_location(self.config['foursquare'])\n venue = foursquare['venue']\n location = venue['location']\n\n msg = 'Myles Braithwaite checked in to *{...
[ "0.55040216", "0.5488861", "0.54437023", "0.5373623", "0.5324502", "0.5313765", "0.5313765", "0.523546", "0.5173672", "0.5173648", "0.5173168", "0.5152679", "0.51510435", "0.5126581", "0.5120982", "0.5093802", "0.50590044", "0.50430846", "0.50288856", "0.5026668", "0.5018201"...
0.0
-1
Check a set of namespaces to see if their number of available backends is too low, emitting events to Sensu based on the fraction available and the thresholds defined in the corresponding yelpsoa config.
def check_smartstack_replication_for_instance( instance_config, expected_count, smartstack_replication_checker, ): crit_threshold = instance_config.get_replication_crit_percentage() log.info('Checking instance %s in smartstack', instance_config.job_id) smartstack_replication_info = \ smartstack_replication_checker.get_replication_for_instance(instance_config) log.debug('Got smartstack replication info for %s: %s' % (instance_config.job_id, smartstack_replication_info)) if len(smartstack_replication_info) == 0: status = pysensu_yelp.Status.CRITICAL output = ( 'Service %s has no Smartstack replication info. Make sure the discover key in your smartstack.yaml ' 'is valid!\n' ) % instance_config.job_id log.error(output) else: expected_count_per_location = int(expected_count / len(smartstack_replication_info)) output = '' output_critical = '' output_ok = '' under_replication_per_location = [] for location, available_backends in sorted(smartstack_replication_info.items()): num_available_in_location = available_backends.get(instance_config.job_id, 0) under_replicated, ratio = is_under_replicated( num_available_in_location, expected_count_per_location, crit_threshold, ) if under_replicated: output_critical += '- Service %s has %d out of %d expected instances in %s (CRITICAL: %d%%)\n' % ( instance_config.job_id, num_available_in_location, expected_count_per_location, location, ratio, ) else: output_ok += '- Service %s has %d out of %d expected instances in %s (OK: %d%%)\n' % ( instance_config.job_id, num_available_in_location, expected_count_per_location, location, ratio, ) under_replication_per_location.append(under_replicated) output += output_critical if output_critical and output_ok: output += '\n\n' output += 'The following locations are OK:\n' output += output_ok if any(under_replication_per_location): status = pysensu_yelp.Status.CRITICAL output += ( "\n\n" "What this alert means:\n" "\n" " This replication alert means that a SmartStack powered loadbalancer (haproxy)\n" " doesn't have enough healthy backends. Not having enough healthy backends\n" " means that clients of that service will get 503s (http) or connection refused\n" " (tcp) when trying to connect to it.\n" "\n" "Reasons this might be happening:\n" "\n" " The service may simply not have enough copies or it could simply be\n" " unhealthy in that location. There also may not be enough resources\n" " in the cluster to support the requested instance count.\n" "\n" "Things you can do:\n" "\n" " * You can view the logs for the job with:\n" " paasta logs -s %(service)s -i %(instance)s -c %(cluster)s\n" "\n" " * Fix the cause of the unhealthy service. Try running:\n" "\n" " paasta status -s %(service)s -i %(instance)s -c %(cluster)s -vv\n" "\n" " * Widen SmartStack discovery settings\n" " * Increase the instance count\n" "\n" ) % { 'service': instance_config.service, 'instance': instance_config.instance, 'cluster': instance_config.cluster, } log.error(output) else: status = pysensu_yelp.Status.OK log.info(output) send_event(instance_config=instance_config, status=status, output=output)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check(self):\n BadNamespaces = list()\n\n for namespace in pm.listNamespaces():\n BadNamespaces.append(namespace)\n\n if not BadNamespaces:\n self.status = \"OK\"\n else:\n self.status = self.errorMode\n self.errorNodes = namespace\n ...
[ "0.51222503", "0.51177275", "0.5094526", "0.50608486", "0.49589232", "0.49122682", "0.4861849", "0.48579317", "0.48054186", "0.47776112", "0.47486487", "0.47325936", "0.46939585", "0.4686834", "0.4660218", "0.45691466", "0.4541956", "0.45377165", "0.4520044", "0.45192608", "0...
0.0
-1
Checks a service's replication levels based on how the service's replication should be monitored. (smartstack or mesos)
def check_service_replication( instance_config, all_tasks, smartstack_replication_checker, ): expected_count = instance_config.get_instances() log.info("Expecting %d total tasks for %s" % (expected_count, instance_config.job_id)) proxy_port = marathon_tools.get_proxy_port_for_instance( name=instance_config.service, instance=instance_config.instance, cluster=instance_config.cluster, soa_dir=instance_config.soa_dir, ) registrations = instance_config.get_registrations() # if the primary registration does not match the service_instance name then # the best we can do is check marathon for replication (for now). if proxy_port is not None and registrations[0] == instance_config.job_id: check_smartstack_replication_for_instance( instance_config=instance_config, expected_count=expected_count, smartstack_replication_checker=smartstack_replication_checker, ) else: check_healthy_marathon_tasks_for_service_instance( instance_config=instance_config, expected_count=expected_count, all_tasks=all_tasks, )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mmo_configsrv_replication_status(self, mmo_connection):\n replication_state = []\n if self.mmo_is_mongos(mmo_connection):\n configsrv = self.mmo_config_servers(mmo_connection)[0]\n auth_dic = self.mmo_get_auth_details_from_connection(mmo_connection)\n c = self.mmo...
[ "0.59834796", "0.5918712", "0.537204", "0.5249215", "0.52178967", "0.5167108", "0.5149234", "0.5141773", "0.5053718", "0.50021493", "0.49990278", "0.49700785", "0.48497504", "0.4836432", "0.48359025", "0.47858948", "0.4753289", "0.47522944", "0.4747612", "0.47456878", "0.4742...
0.59363264
1
Finds shortest path from start node to end node on weighted graph using Dijkstra's algorithm. Returns list of nodes from start to end making up the shortest route
def shortest_path(graph, start, end): nodes_to_visit = {start} visited_nodes = set() # Distance from start to start is 0 distance_from_start = {start: 0} predecessors = {} # Store previous node for shortest route for each node while nodes_to_visit: # Get node with smallest weight current = min( [(distance_from_start[node], node) for node in nodes_to_visit] )[1] # If the end is reached, quit if current == end: break nodes_to_visit.discard(current) visited_nodes.add(current) edges = graph[current] unvisited_neighbours = set(edges).difference(visited_nodes) for neighbour in unvisited_neighbours: neighbour_distance = distance_from_start[current] + \ edges[neighbour] if neighbour_distance < distance_from_start.get(neighbour, float('inf')): distance_from_start[neighbour] = neighbour_distance predecessors[neighbour] = current nodes_to_visit.add(neighbour) return _deconstruct_path(predecessors, end)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def shortestpaths(self, start, end, edgeweight=\"t_0\"):\n graph = self.graph\n shortest_nodepaths = list(\n nx.all_shortest_paths(\n graph, start, end, weight=edgeweight, method=\"dijkstra\"\n )\n )\n shortest_paths = []\n for path in shortes...
[ "0.858256", "0.7994935", "0.7979896", "0.7651095", "0.75588065", "0.7505487", "0.7493066", "0.7488095", "0.7451653", "0.74178004", "0.73748696", "0.7356794", "0.726981", "0.7252933", "0.7238483", "0.72201383", "0.7189133", "0.71680886", "0.71166384", "0.7039121", "0.70280594"...
0.7826668
3
Traverses backwards through predecessors from end
def _deconstruct_path(predecessors, end): if end not in predecessors: return None current = end path = [] while current: path.append(current) current = predecessors.get(current) return list(reversed(path))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __backward(self):\n if self.is_empty():\n raise StopIteration\n\n current = self._tail\n yield current._data\n while current._previ:\n current = current._previ\n yield current._data", "def reverse_iterative(self):\n \"\"\"O(n) / O(1) solutio...
[ "0.69886434", "0.69002724", "0.6687282", "0.659495", "0.6564393", "0.655781", "0.6412653", "0.63977957", "0.63977957", "0.63977957", "0.6395437", "0.63730013", "0.6367989", "0.6310967", "0.6306441", "0.6306441", "0.6306441", "0.63061464", "0.62905055", "0.6285904", "0.6272839...
0.7261165
0
Recursively creates Level_Pair nodes from start up to end. Assumes that end's attack and strength are greater than start's. Neighbours for a node are stored in graph[node]. Distances between neighbours are stored in graph[nodeA][nodeB].
def populate_graph( graph, start, end, attack_bonus, strength_bonus): # Check if already created if start in graph: return graph[start] = dict() # Recursively create neighbouring level pairs if start.attack < end.attack: inc_attack = Level_Pair(start.attack + 1, start.strength) # Store level-up time graph[start][inc_attack] = level_time_average( start, Attack_Style.ATTACK, attack_bonus, strength_bonus) # Continue at next node populate_graph(graph, inc_attack, end, attack_bonus, strength_bonus) if start.strength < end.strength: inc_strength = Level_Pair(start.attack, start.strength + 1) # Store level-up time graph[start][inc_strength] = level_time_average( start, Attack_Style.STRENGTH, attack_bonus, strength_bonus) # Continue at next node populate_graph(graph, inc_strength, end, attack_bonus, strength_bonus)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def astar_map(map, start, end):\n\n # Create start and end node\n start_node = Node3(None, start)\n start_node.g = start_node.h = start_node.f = 0\n end_node = Node3(None, end)\n end_node.g = end_node.h = end_node.f = 0\n\n # Initialize both open and closed list\n open_list = []\n closed_li...
[ "0.58267266", "0.5793475", "0.56011593", "0.55371314", "0.55277735", "0.55161214", "0.55161214", "0.55161214", "0.54637635", "0.5456182", "0.5430679", "0.5403021", "0.5386815", "0.53318155", "0.5256505", "0.5214026", "0.5162665", "0.51556855", "0.5142307", "0.5126955", "0.508...
0.7375074
0
Runs simulations to determine time (ticks) to level up attack or strength Enemy is set as sand crab (60hp, 1 def, 0 def bonus) Weapon is best available scimitar
def level_time_simulate(start_levels, attack_style, attack_bonus, strength_bonus): ticks_per_attack = 4 # Scimitar attack speed enemy_health = 60 # Sand crab health max_hit, accuracy = get_max_hit_and_accuracy( start_levels, attack_style, attack_bonus, strength_bonus) if attack_style == Attack_Style.ATTACK: start_exp = osrs.experience[start_levels.attack] end_exp = osrs.experience[start_levels.attack+1] elif attack_style == Attack_Style.STRENGTH: start_exp = osrs.experience[start_levels.strength] end_exp = osrs.experience[start_levels.strength+1] experience = end_exp - start_exp avg_ticks = combat_simulator.ticks_until_exp(max_hit, accuracy, ticks_per_attack, enemy_health, experience, osrs.BASE_EXP_PER_DAMAGE, ITERATIONS) return avg_ticks
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testrandom(self):\n for i in range(100):\n WeaponAbility()", "def scenario1(height, speed):\n time = math.sqrt((2 * height) / 9.81)\n result = speed * time\n return result", "def attack(health_meter):\n hit_list = 4 * ['igrac'] + 6 * ['neprijatelj']\n injured_unit = random....
[ "0.6155685", "0.6128568", "0.61060804", "0.6067592", "0.6018623", "0.5902906", "0.5877961", "0.5872772", "0.5862628", "0.58594483", "0.57941294", "0.5765011", "0.57348096", "0.5728712", "0.57282984", "0.5714876", "0.5714854", "0.5682435", "0.56775427", "0.5648126", "0.5637001...
0.6836062
0
Uses average damage to determine time (ticks) to level up attack or strength Enemy has 1 def and 0 def bonus Weapon is best available scimitar
def level_time_average(start_levels, attack_style, attack_bonus, strength_bonus): ticks_per_attack = 4 # Scimitar attack speed max_hit, accuracy = get_max_hit_and_accuracy( start_levels, attack_style, attack_bonus, strength_bonus) if attack_style == Attack_Style.ATTACK: start_exp = osrs.experience[start_levels.attack] end_exp = osrs.experience[start_levels.attack+1] elif attack_style == Attack_Style.STRENGTH: start_exp = osrs.experience[start_levels.strength] end_exp = osrs.experience[start_levels.strength+1] experience = end_exp - start_exp avg_hit = accuracy * max_hit / 2 exp_per_hit = avg_hit * osrs.BASE_EXP_PER_DAMAGE ticks = experience / exp_per_hit * ticks_per_attack return ticks
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def attack(self):\n # TODO: Use integer division to find half of the max_damage value\n # then return a random integer between\n # half of max_damage and max_damage\n print(\"max damage of \" + self.name + \" is \")\n print(str(self.attack_strength))\n min_damage = self.at...
[ "0.6950379", "0.6850593", "0.68326175", "0.6792282", "0.6762298", "0.6743478", "0.6738781", "0.6719024", "0.6649889", "0.66238135", "0.6622487", "0.6583088", "0.65515465", "0.6544407", "0.65372705", "0.6528842", "0.65219796", "0.6520335", "0.6516249", "0.64586735", "0.6457608...
0.6319719
35
Returns tuple of the form, (max_hit, accuracy), for the given levels after factoring in the weapons available and the selected attack style. Assumes enemy has level 1 defence and 0 defence bonus
def get_max_hit_and_accuracy( levels, attack_style, attack_bonus, strength_bonus): weapon_attack, weapon_strength = get_weapon_stats(levels.attack) attack_bonus += weapon_attack strength_bonus += weapon_strength if attack_style == Attack_Style.ATTACK: effective_attack = osrs.effective_level(levels.attack, 1, 3, 1) effective_strength = osrs.effective_level(levels.strength, 1, 0, 1) elif attack_style == Attack_Style.STRENGTH: effective_attack = osrs.effective_level(levels.attack, 1, 0, 1) effective_strength = osrs.effective_level(levels.strength, 1, 3, 1) enemy_effective_defence = osrs.effective_level(1, 1, 0, 1) max_hit = osrs.max_hit(effective_strength, strength_bonus) accuracy = osrs.accuracy(effective_attack, attack_bonus, enemy_effective_defence, 0) return (max_hit, accuracy)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_weapon_stats(attack_level):\n if attack_level >= 60:\n # Dragon scimitar\n return (67, 66)\n elif attack_level >= 40:\n # Rune scimitar\n return (45, 44)\n elif attack_level >= 30:\n # Adamant scimitar\n return (29, 28)\n elif attack_level >= 20:\n ...
[ "0.6369547", "0.6126176", "0.5764121", "0.5721204", "0.56999665", "0.56620437", "0.5656497", "0.5552577", "0.5546967", "0.554252", "0.5487898", "0.54512966", "0.5447283", "0.5400377", "0.53890437", "0.5385318", "0.5371194", "0.53344166", "0.532357", "0.5270881", "0.5253192", ...
0.8360239
0
Returns tuple of the form (attack_bonus, strength_bonus) for the best scimitar (weapon) at a given attack level. Scimitars are almost always the most efficient weapon
def get_weapon_stats(attack_level): if attack_level >= 60: # Dragon scimitar return (67, 66) elif attack_level >= 40: # Rune scimitar return (45, 44) elif attack_level >= 30: # Adamant scimitar return (29, 28) elif attack_level >= 20: # Mithril scimitar return (21, 20) elif attack_level >= 10: # Black scimitar return (19, 14) elif attack_level >= 5: # Steel scimitar return (15, 14) else: # Iron scimitar return (10, 9)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def most_powerful_weapon(self):\n # sets inital damge to 0\n max_damage = 0\n # sets the best weapon to nothing\n best_weapon = None\n # Loop for each item in inventory\n for item in self.inventory:\n # Code adapted from Make Your own Python Text Based Adventure...
[ "0.6914016", "0.66133755", "0.647129", "0.6280319", "0.6277309", "0.62479544", "0.6210469", "0.6196124", "0.6190553", "0.61680675", "0.60728323", "0.60306805", "0.6020015", "0.59235066", "0.5873241", "0.5860358", "0.5845829", "0.5792037", "0.57798666", "0.5775046", "0.5746795...
0.733018
0
Returns list of tuples of the form (level, max_hit) for levels between start_strength_level and end_strength_level that increase max_hit. Assumes start_strength_level < end_strength_level and no multipliers
def get_max_hit_increases( start_strength_level, end_strength_level, strength_bonus, stance_adder): greatest_max_hit = 0 max_hit_increases = [] cur_strength_level = start_strength_level while cur_strength_level < end_strength_level: effective_strength = osrs.effective_level( cur_strength_level, 1, stance_adder, 1) max_hit = osrs.max_hit(effective_strength, strength_bonus) if max_hit > greatest_max_hit: greatest_max_hit = max_hit max_hit_increases.append((cur_strength_level, max_hit)) cur_strength_level += 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_max_hit_and_accuracy(\n levels, attack_style, attack_bonus, strength_bonus):\n weapon_attack, weapon_strength = get_weapon_stats(levels.attack)\n attack_bonus += weapon_attack\n strength_bonus += weapon_strength\n\n if attack_style == Attack_Style.ATTACK:\n effective_attack = osrs...
[ "0.58623534", "0.56103396", "0.5589479", "0.556326", "0.5557862", "0.5534794", "0.5484521", "0.54517794", "0.5345242", "0.53201896", "0.5267493", "0.522973", "0.5207859", "0.5173894", "0.5169486", "0.51680756", "0.5166746", "0.5157776", "0.51527137", "0.51492393", "0.51178026...
0.79078573
0
Generates steric beads required for checking for steric clashes between motifs. Each residues has three beads modeled after the typical three bead models used in coarse grain modeling. The three beads are, Phosphate (P, OP1, OP2) Sugar (O5',C5',C4',O4',C3',O3',C1',C2',O2') and Base (All remaining atoms).
def get_beads(self): phos_atoms,sugar_atoms,base_atoms = [],[],[] for i,a in enumerate(self.atoms): if a is None: continue if i < 3: phos_atoms.append(a) elif i < 12: sugar_atoms.append(a) else: base_atoms.append(a) beads = [] types = [residue.BeadType.PHOS, residue.BeadType.SUGAR, residue.BeadType.BASE] for i,alist in enumerate([phos_atoms,sugar_atoms,base_atoms]): if len(alist) > 0: beads.append(residue.Bead(util.center(alist), types[i])) return beads
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_bespoke_bond_smirks():\n gen = SmirksGenerator()\n mol = Molecule.from_smiles(\"CC\")\n\n bond_smirks = gen._get_bespoke_bond_smirks(molecule=mol)\n # there should be 2 unique bond smirks\n assert len(bond_smirks) == 2\n all_bonds = []\n for smirk in bond_smirks:\n atoms = cond...
[ "0.55873483", "0.54603845", "0.54429716", "0.5408802", "0.5403582", "0.5389649", "0.53191894", "0.5310863", "0.52691495", "0.52479964", "0.52202135", "0.5188183", "0.51712984", "0.5130737", "0.512443", "0.5119801", "0.5115056", "0.511297", "0.51073724", "0.51057136", "0.50982...
0.5812885
0
send the value to set for the piezo to the controller
def func_piezo(self,piezo): self.write('SOURce:VOLTage:PIEZo '+str(piezo)) self.read()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def value(self, value):\n self.set_data(value)", "def update_controller(self):", "def _set_parameter(self):\n # Get parameter keys\n self.input_parameter = self.parameter_combobox.currentText()\n self.result_parameter = self.result_parameters[self.input_parameter]\n # Adjust ...
[ "0.593418", "0.5828751", "0.57180893", "0.5538856", "0.5415524", "0.54018885", "0.5385673", "0.5354321", "0.5354321", "0.5354321", "0.5354321", "0.5354321", "0.5354321", "0.5338695", "0.5303437", "0.5277722", "0.5271039", "0.5270866", "0.5265829", "0.52549756", "0.525472", ...
0.5316031
14
conv_forward performs forward propagation over a convolutional layer of a neural network.
def conv_forward(A_prev, W, b, activation, padding="same", stride=(1, 1)): m, h_prev, w_prev, c_prev = A_prev.shape kh, kw, _, c_new = W.shape ph = pw = 0 sh, sw = stride if padding == 'same': ph = int(((h_prev - 1) * sh + kh - h_prev) / 2) pw = int(((w_prev - 1) * sw + kw - w_prev) / 2) elif type(padding) == tuple: ph, pw = padding pad = np.pad(A_prev, ((0, 0), (ph, ph), (pw, pw), (0, 0)), 'constant') ch = int((h_prev + 2 * ph - kh) / sh) + 1 cw = int((w_prev + 2 * pw - kw) / sw) + 1 conv_W = np.zeros((m, ch, cw, c_new)) for i in range(ch): for j in range(cw): for k in range(c_new): slide_img = pad[:, i * sh:i * sh + kh, j * sw:j * sw + kw] kernel = W[:, :, :, k] element = np.multiply(slide_img, kernel) conv_W[:, i, j, k] = np.sum(np.sum(np.sum(element, axis=1), axis=1), axis=1) Z = conv_W + b return activation(Z)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def forward_pass_on_convolutions(self, x):\n conv_output = None\n for module_name, module in self.model._modules.items():\n print(module_name)\n if module_name == 'fc':\n return conv_output, x\n x = module(x) # Forward\n # print(module_name,...
[ "0.7160891", "0.7006296", "0.6955772", "0.6950637", "0.6934328", "0.6888566", "0.68857485", "0.6878123", "0.6862401", "0.6859037", "0.6839946", "0.68377876", "0.6797625", "0.67588794", "0.6732545", "0.6729661", "0.6714939", "0.6703585", "0.6667598", "0.66208446", "0.6617689",...
0.6130725
59
messages1 and messages2 represent the encoded headlines from two news sources corr represents the correlation between the two currently returns average correlation
def average_similarity(messages1, messages2): if np.array_equal(messages2, messages1): return 1 corr = np.corrcoef(messages1, messages2) return np.average(corr)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def corr(arr1, arr2):\n\n\n X = []\n Y = []\n for index in range(len(arr1)):\n if arr1[index] == None or arr2[index] == None:\n continue\n X.append(arr1[index])\n Y.append(arr2[index])\n\n\n r = np.corrcoef(X, Y)[0,1]\n f = 0.5*np.log((1+r)/(1-r))\n se = 1/np.sqrt(...
[ "0.6506768", "0.6241487", "0.61946344", "0.6189344", "0.6137197", "0.60439324", "0.589026", "0.5836525", "0.58354324", "0.58140975", "0.5799354", "0.575254", "0.5746967", "0.5743244", "0.57305694", "0.5727684", "0.5720525", "0.56870764", "0.5681944", "0.5680134", "0.56749845"...
0.6749743
0
represents messages as vectors which are used to calculate similarity
def find_similarity(message1, message2): total = 0 for i in range(len(message1)): max = 0 for j in range(len(message2)): message1_encoded = embed([message1[i]]) message2_encoded = embed([message2[j]]) sim = average_similarity(message1_encoded, message2_encoded) if sim > max: max = sim total += max return total/len(message1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wrapMsg(self,vec):\n return vec.todense()", "def plot_similarity(self) -> None:\n if isinstance(self.model, FastTextWrapper):\n self.valid_data[\"vector\"] = self.valid_data[\"text\"].apply(\n lambda x: self.model.inference(word_tokenize(x), sentence_level=True))\n ...
[ "0.6673192", "0.6187192", "0.6166319", "0.6151653", "0.60369694", "0.5990431", "0.59603184", "0.593852", "0.59355754", "0.5926192", "0.5897822", "0.58214325", "0.577277", "0.5761639", "0.57491434", "0.57217455", "0.570953", "0.5705797", "0.56846756", "0.56698257", "0.5666215"...
0.6307364
1
An iterator that will in turn yield all drawable curves in the form of (kind, name, ds, style) tuples (where kind is one of 'algorithm', 'oracle', 'unifpf', 'strategy').
def _pds_plot_iterator(pds, dim, funcId): i = 0 for (algname, ds) in pds.algds_dimfunc((dim, funcId)): yield ('algorithm', algname, ds, _style_algorithm(algname, i)) i += 1 yield ('oracle', 'oracle', pds.oracle((dim, funcId)), _style_oracle()) yield ('unifpf', 'eUNIF', pds.unifpf().dictByDimFunc()[dim][funcId][0], _style_unifpf()) i = 0 for (stratname, ds) in pds.stratds_dimfunc((dim, funcId)): yield ('strategy', stratname, ds, _style_strategy(stratname, i)) i += 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def iter_svgs(self):\n for name in self.parent.layers:\n yield name, self.parent.layers[name]\n for elem in self.parent.elements:\n if isinstance(elem, SVG):\n yield None, elem", "def efficiency_curves(self):\n for key in self._efficiency_curves:\n ...
[ "0.625494", "0.62062955", "0.61237484", "0.5620857", "0.5553316", "0.54564095", "0.5426019", "0.5304748", "0.5274277", "0.5261457", "0.525136", "0.5238986", "0.521688", "0.5185847", "0.5158221", "0.5131699", "0.51237047", "0.5082937", "0.5075545", "0.5069475", "0.5063264", ...
0.6866871
0
Show a legend. obj can be an Axes or Figure (in that case, also pass handles and labels arguments).
def legend(obj, ncol=3, **kwargs): # Font size handling here is a bit weird. We specify fontsize=6 # in legend constructor since that affects spacing. However, we # need to manually override with 'small' later, because the original # specification did not take effect on whole-figure legends (and for # actual text, 6 is a wee bit small). We get a specific cramped # appearance and correct behavior for whole-figure legends this way. l = obj.legend(ncol=ncol, fancybox=True, markerscale=0.66, fontsize=6, **kwargs) plt.setp(l.get_texts(), fontsize='small')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def legend (self, **kwargs):\n axes = self.twin_axes or self.axes\n self.mpl_legend = axes.legend (self.mpl_lines, self.labels, **kwargs)", "def legend_extras(\n self, handles=None, labels=None, *, loc=None,\n frame=None, frameon=None, ncol=None, ncols=None,\n center=None, order='C', label...
[ "0.649047", "0.61699635", "0.6117448", "0.6017658", "0.59494585", "0.59489495", "0.59252846", "0.5908187", "0.58153063", "0.58062863", "0.5786078", "0.5747794", "0.5728206", "0.5683173", "0.56398714", "0.56154585", "0.5608667", "0.5580261", "0.5554319", "0.5549609", "0.554207...
0.723461
0
Plot a classic "convergence plot" that shows how the function value approaches optimum as time passes, in terms of raw performance. groupby is the method of aggregating results of multiple instances a callable, stringable object, GroupByMedian by default. By default, raw function values (as difference to optimum) are shown, but relative values to some baseline dataset can be shown instead.
def fval_by_budget(ax, pds, baseline_ds=None, baseline_label="", dim=None, funcId=None, groupby=None): if groupby is None: groupby = GroupByMedian() pfsize = len(pds.algds.keys()) if baseline_ds: baseline_budgets = baseline_ds.funvals[:, 0] baseline_funvals = groupby(baseline_ds.funvals[:, 1:], axis=1) baseline_safefunvals = np.maximum(baseline_funvals, 10**-8) # eschew zeros # fvb is matrix with each row being [budget,funval] baseline_fvb = np.transpose(np.vstack([baseline_budgets, baseline_safefunvals])) for (kind, name, ds, style) in _pds_plot_iterator(pds, dim, funcId): #print name, ds budgets = ds.funvals[:, 0] funvals = groupby(ds.funvals[:, 1:], axis=1) # Throw away funvals after ftarget reached try: limit = np.nonzero(funvals < 10**-8)[0][0] + 1 except IndexError: limit = np.size(budgets)+1 budgets = budgets[:limit] funvals = funvals[:limit] fvb = np.transpose(np.vstack([budgets[:limit], funvals[:limit]])) if baseline_ds: # Relativize by baseline fvba = ra.alignArrayData(ra.VArrayMultiReader([fvb, baseline_fvb])) budgets = fvba[:, 0] funvals = fvba[:, 1] / fvba[:, 2] style['markevery'] = 16 ax.loglog(budgets, funvals, label=name, basex=pfsize, **style) if baseline_ds: ax.set_yticks([1], minor=True) ax.set_xlabel('Budget') ax.set_ylabel(_fval_label(baseline_ds, baseline_label, str(groupby))) ax.grid() if baseline_ds: ax.yaxis.grid(True, which = 'minor')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def plot_bias(clf_list = ['test_small','rt_small','test2_small'],return_df = False,XKCD = False):\n if XKCD = True:\n plt.xkcd()\n print('damn')\n df = load_all_dfs(clf_list)\n df = df.swaplevel(0,1)\n del df['std']\n df.hist()\n plt.figure()\n\n for clf in clf_list:\n df.ix[c...
[ "0.5761602", "0.5579714", "0.55573255", "0.55306447", "0.548709", "0.54616326", "0.5449815", "0.54433835", "0.54180986", "0.5371205", "0.53488153", "0.53472847", "0.53443724", "0.53393245", "0.5338107", "0.533209", "0.5318276", "0.53181833", "0.53000176", "0.52845144", "0.523...
0.53283376
16
Plot each algorithm/method's rank evolving as budget increases. groupby is the method of aggregating results of multiple instances a callable, stringable object, GroupByMedian by default. Note that funcId may be an array of id numbers; in that case, an average rank over listed functions is taken.
def rank_by_budget(ax, pds, dim=None, funcId=None, groupby=None): if groupby is None: groupby = GroupByMedian() pfsize = len(pds.algds.keys()) try: # funcId is array? # _pds_plot_iterator[] uses funcId only for things we don't care for fakeFuncId = funcId[0] manyranking = np.array([pds.ranking((dim, i), groupby) for i in funcId]) rankcount = np.shape(manyranking[0])[1] - 1 amanyranking = ra.alignArrayData(ra.VArrayMultiReader(manyranking)) budget = amanyranking[:,0] rankings = np.hsplit(amanyranking[:,1:], len(funcId)) avgranking = np.average(rankings, axis=0) ranking = np.vstack([budget, avgranking.T]).T except TypeError: # funcId is scalar fakeFuncId = funcId ranking = pds.ranking((dim, funcId), groupby) i = 0 for (kind, name, ds, style) in _pds_plot_iterator(pds, dim, fakeFuncId): if kind != 'algorithm' and kind != 'strategy': continue #print name, ds budgets = ranking[:,0] ranks = ranking[:,1+i] style['markevery'] = 64 ax.plot(budgets, ranks, label=name, **style) i += 1 ax.set_xlabel('Budget') ax.set_ylabel('Rank by '+str(groupby).title()+' Function Value') ax.set_xscale('log', basex=pfsize) ax.grid()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ranking(self, dimfun, groupby, ftarget=10**-8):\n nameds = list(itertools.chain(self.algds_dimfunc(dimfun), self.stratds_dimfunc(dimfun)))\n count = len(nameds)\n\n # Produce \"fv\" items, one per dataset, containing single function value\n # for each budget\n fvset = []\n ...
[ "0.63259864", "0.6046818", "0.53915054", "0.5336046", "0.5121077", "0.5051932", "0.48781553", "0.48333043", "0.48324347", "0.47948903", "0.47788268", "0.4766968", "0.47533748", "0.47356328", "0.47074386", "0.46614596", "0.46601972", "0.465941", "0.46516296", "0.46457088", "0....
0.7678806
0
Plot a rotated convergence plot. It is essentially like fval_by_budget(), but rotated by 90 degrees, showing how big budget is required to reach every target. While this is a little less intuitive at first, it allows better judgement of performance impact of each strategy. With fval_by_budget(), performance change is represented by a curve phase shift, while in evals_by_target(), it simply translates position on the y axis. groupby is the method of aggregating results of multiple instances a callable, stringable object, GroupByMedian by default. By default, absolute evaluations count is shown, but relative values to some baseline dataset can be shown instead.
def evals_by_target(ax, pds, baseline_ds=None, baseline_label="", dim=None, funcId=None, groupby=None): if groupby is None: groupby = GroupByMedian() pfsize = len(pds.algds.keys()) runlengths = 10**np.linspace(0, np.log10(pds.maxevals((dim, funcId))), num=500) target_values = pp.RunlengthBasedTargetValues(runlengths, reference_data=pds.bestalg(None), force_different_targets_factor=10**0.004) targets = target_values((funcId, dim)) if baseline_ds: baseline_fevs = groupby(baseline_ds.detEvals(targets), axis=1) for (kind, name, ds, style) in _pds_plot_iterator(pds, dim, funcId): #print name, ds fevs = groupby(ds.detEvals(targets), axis=1) if baseline_ds: fevs /= baseline_fevs style['markevery'] = 64 ax.loglog(targets, fevs, label=name, basey=pfsize, **style) ax.set_xlim(10**2, 10**(np.log10(targets[-1])-0.2)) if baseline_ds: ax.set_yticks([2, 3.5], minor=True) ax.set_xlabel('Function Value Targets') ax.set_ylabel(_evals_label(baseline_ds, baseline_label, str(groupby))) ax.grid() if baseline_ds: ax.yaxis.grid(True, which = 'minor')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fval_by_budget(ax, pds, baseline_ds=None, baseline_label=\"\", dim=None, funcId=None, groupby=None):\n if groupby is None: groupby = GroupByMedian()\n pfsize = len(pds.algds.keys())\n\n if baseline_ds:\n baseline_budgets = baseline_ds.funvals[:, 0]\n baseline_funvals = groupby(baseline_d...
[ "0.57839125", "0.5279949", "0.50594544", "0.50512856", "0.50476915", "0.4997148", "0.4956228", "0.48934639", "0.4885822", "0.4879313", "0.48789495", "0.48726746", "0.48418292", "0.48268393", "0.48255506", "0.4809495", "0.47626424", "0.4736917", "0.47095123", "0.46958274", "0....
0.5902508
0
Plot the evolution of relative evaluations for a target based on increasing absolute evaluations. In other words, for each absolute number of evaluations, determine the target reached and show how faster did baseline reach it. groupby is the method of aggregating results of multiple instances a callable, stringable object, GroupByMedian by default. It's not clear whether this will eventually be useful at all, but it offers another perspective that might aid some analysis.
def evals_by_evals(ax, pds, baseline1_ds=None, baseline1_label="", baseline2_ds=None, baseline2_label="", dim=None, funcId=None, groupby=None): if groupby is None: groupby = GroupByMedian() pfsize = len(pds.algds.keys()) runlengths = 10**np.linspace(0, np.log10(pds.maxevals((dim, funcId))), num=500) target_values = pp.RunlengthBasedTargetValues(runlengths, reference_data=pds.bestalg(None), force_different_targets_factor=10**0.004) targets = target_values((funcId, dim)) if baseline1_ds: baseline1_fevs = np.array(groupby(baseline1_ds.detEvals(targets), axis=1)) if baseline2_ds: baseline2_fevs = np.array(groupby(baseline2_ds.detEvals(targets), axis=1)) for (kind, name, ds, style) in _pds_plot_iterator(pds, dim, funcId): #print name, ds fevs1 = groupby(ds.detEvals(targets), axis=1) if baseline1_ds: fevs1 /= baseline1_fevs fevs2 = groupby(ds.detEvals(targets), axis=1) if baseline2_ds: fevs2 /= baseline2_fevs infsx = np.nonzero(fevs1 == inf) infs = infsx[0] if np.size(infs) > 0: #print infs fevs1 = fevs1[:infs[0]-1] fevs2 = fevs2[:infs[0]-1] #print name, fevs1, fevs2 style['markevery'] = 64 ax.loglog(fevs2, fevs1, label=name, basex=pfsize, basey=pfsize, **style) ax.grid() ax.set_xlim(0, runlengths[-1] * pfsize) # i.e. log(runlengths) + 1 ax.set_ylabel('Per-target ' + _evals_label(baseline1_ds, baseline1_label, str(groupby))) ax.set_xlabel('Per-target ' + _evals_label(baseline2_ds, baseline2_label, str(groupby)))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def evals_by_target(ax, pds, baseline_ds=None, baseline_label=\"\", dim=None, funcId=None, groupby=None):\n if groupby is None: groupby = GroupByMedian()\n pfsize = len(pds.algds.keys())\n\n runlengths = 10**np.linspace(0, np.log10(pds.maxevals((dim, funcId))), num=500)\n target_values = pp.RunlengthBa...
[ "0.6690005", "0.52602696", "0.5226427", "0.5201476", "0.5161905", "0.51192516", "0.5022657", "0.49890342", "0.497622", "0.49757755", "0.49593621", "0.4885093", "0.4878392", "0.48618805", "0.48530275", "0.48435473", "0.48297042", "0.4821337", "0.47865835", "0.47860396", "0.478...
0.60396034
1
Generate keyword arguments suitable for consumption by the ticketswitch API
def as_api_parameters(self): raise NotImplementedError( 'as_api_parameters not implemented on ' + self.__class__)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generateKwargsAsString(self):\n args = \"\"\n axisList = self.tabWidget.currentWidget()\n\n for axisWidget in axisList.getAxisWidgets():\n args += \"%s = %s, \" % (axisWidget.axis.id,\n axisWidget.getCurrentValuesAsStr())\n\n # Generate a...
[ "0.6476009", "0.64399606", "0.639305", "0.6366198", "0.6307585", "0.6280311", "0.6226344", "0.6221674", "0.6219365", "0.6188294", "0.61156327", "0.6104635", "0.60843754", "0.608238", "0.6072212", "0.60718983", "0.6050508", "0.6045505", "0.603796", "0.60124546", "0.6003399", ...
0.0
-1
Generates a dictionary of parameters to be passed back to the API.
def as_api_parameters(self): params = { 'card_number': self.card_number, } missing_expiry_year = not self.expiry_year missing_expiry_month = not self.expiry_month if missing_expiry_year or missing_expiry_month: raise InvalidParametersError( 'both expiry_year and expiry_month must be specified') params.update( expiry_date='{:0>2}{:0>2}'.format( self.expiry_month, # handle 4 digit years str(self.expiry_year)[-2:] ) ) missing_start_year = not self.start_year missing_start_month = not self.start_month specifying_start_date = self.start_year or self.start_month if specifying_start_date and (missing_start_year or missing_start_month): raise InvalidParametersError( 'both start_year and start_month must be specified or neither specified') if specifying_start_date: params.update( start_date='{:0>2}{:0>2}'.format( self.start_month, str(self.start_year)[-2:] ) ) if self.ccv2: params.update(cv_two=self.ccv2) if self.issue_number: params.update(issue_number=self.issue_number) if self.billing_address: params.update( **self.billing_address.as_api_billing_address_parameters() ) if self.return_url: params.update(return_url=self.return_url) if self.return_token: params.update(return_token=self.return_token) if self.user_agent: params.update(client_http_user_agent=self.user_agent) if self.accept: params.update(client_http_accept=self.accept) if self.remote_site: params.update(remote_site=self.remote_site) return params
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _generate_params(self):\n return {\n 'lis_outcome_service_url': self.lis_outcome_service_url,\n 'lis_result_sourcedid': self.lis_result_sourcedid,\n 'oauth_consumer_key': self.key\n }", "def get_params(self):\n return {}", "def parameters_dict(self):\n ...
[ "0.79212326", "0.7483993", "0.72430176", "0.71748704", "0.7172315", "0.7096033", "0.7091137", "0.70732015", "0.7070858", "0.7060539", "0.697383", "0.69705796", "0.6943187", "0.6936735", "0.69144523", "0.6904813", "0.68293667", "0.6813441", "0.6812606", "0.67854816", "0.677385...
0.0
-1
Generate API keyword args for these details.
def as_api_parameters(self): return { 'return_token': self.token, 'return_url': self.url, 'client_http_user_agent': self.user_agent, 'client_http_accept': self.accept, 'remote_site': self.remote_site, }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extra_target_arguments(self):\n return {}", "def init_args(self):\n return {\n \"doc\": self.__doc__.format(name=colored(self.module_name, \"green\", attrs=['bold','underline'])),\n \"Url\": \"set a target url\",\n 'Type': \"set type to check , [php, asp, aspx, cgi,...
[ "0.6347063", "0.6342163", "0.6047952", "0.60061", "0.5908773", "0.590851", "0.58989066", "0.58448356", "0.58410525", "0.58357745", "0.5807142", "0.57746965", "0.57710725", "0.5671101", "0.5665809", "0.56421447", "0.5637025", "0.5618054", "0.55946404", "0.5592014", "0.5560202"...
0.5200405
77
Generate API keyword args for these details.
def as_api_parameters(self): return { '{}_callback/stripeToken'.format(source): token for source, token in self.tokens.items() }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extra_target_arguments(self):\n return {}", "def init_args(self):\n return {\n \"doc\": self.__doc__.format(name=colored(self.module_name, \"green\", attrs=['bold','underline'])),\n \"Url\": \"set a target url\",\n 'Type': \"set type to check , [php, asp, aspx, cgi,...
[ "0.6347063", "0.6342163", "0.6047952", "0.60061", "0.5908773", "0.590851", "0.58989066", "0.58448356", "0.58410525", "0.58357745", "0.5807142", "0.57746965", "0.57710725", "0.5671101", "0.5665809", "0.56421447", "0.5637025", "0.5618054", "0.55946404", "0.5592014", "0.5560202"...
0.0
-1
Generate API keyword args for these details.
def as_api_parameters(self): data = {} for system in self.system_codes: data.update({ "{0}_callback/{1}".format(system, variable): self.data[variable] for variable in self.data.keys() }) return data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extra_target_arguments(self):\n return {}", "def init_args(self):\n return {\n \"doc\": self.__doc__.format(name=colored(self.module_name, \"green\", attrs=['bold','underline'])),\n \"Url\": \"set a target url\",\n 'Type': \"set type to check , [php, asp, aspx, cgi,...
[ "0.6347063", "0.6342163", "0.6047952", "0.60061", "0.5908773", "0.590851", "0.58989066", "0.58448356", "0.58410525", "0.58357745", "0.5807142", "0.57746965", "0.57710725", "0.5671101", "0.5665809", "0.56421447", "0.5637025", "0.5618054", "0.55946404", "0.5592014", "0.5560202"...
0.52038634
74
Fits the model for the given targets.
def fit(self, X, Y): # + 1 because we want to also have a free term (bias) that is not influenced by the training values necessarily. self._weights = np.zeros(1 + X.shape[1]) Y = [0 if y == -1 else 1 for y in Y] self.epochs = 0 fitted = False while not fitted: errors = 0 self.epochs += 1 # We now parse the training data set for entry, target in zip(X, Y): classification_error = target - self.predict(entry) if classification_error: self.updates += 1 # we compute now with how much we should adjust the weights weights_update = self.learning_rate * classification_error # Adjust the weights based on the error (+/- 1 or 0) and the training entry self._weights[1:] += weights_update * entry self._weights[0] += weights_update errors += np.where(classification_error == 0, 0, 1) fitted = errors == 0 self._errors.append(errors) return self._weights
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fit(self, features, targets):\n self.model_features = features\n self.model_targets= targets", "def fit(self, input_data, targets):\n self.ensemble_model_.fit(input_data, targets)", "def set_targets(self, targets: List[float]):\n self.targets = targets", "def set_targets(self,...
[ "0.7428789", "0.6653737", "0.61757916", "0.60686624", "0.60502565", "0.5901099", "0.58539677", "0.5839297", "0.5839297", "0.5835604", "0.58182526", "0.5763666", "0.575089", "0.57495534", "0.5730885", "0.5730016", "0.57109356", "0.56859463", "0.5679586", "0.5662925", "0.566072...
0.0
-1
Lakukan sesuatu ketika tombol ditekan
def gambar_plot(self): # memanggil isi dari Line Edit pada kolom X dan # menyimpannya pada variabel self.nilai_x # sekaligus mengkonversinya menjadi angka try: # cetak isi nilai X # Mengcheck apakah plot pertama dijalankan atau tidak if self.pertamaPlot: self.x = float(self.input_x.text()) self.y = float(self.input_y.text()) self.input_x.setEnabled(False) self.input_y.setEnabled(False) self.input_az.setEnabled(True) self.input_jarak.setEnabled(True) self.reset.setEnabled(True) self.check_garis.setEnabled(True) #Mengecheck apakah layer point sudah di buat sebelumnya if self.pertamaPoint: #Membuat layer titik self.layerTitik = self.buat_layer("Plot Titik","Point") self.measureDialog.show() self.pertamaPoint = False self.buat_titik() self.pertamaPlot = False else: #Mengecheck apakah layer garis sudah di buat sebelumnya if self.pertamaLine: #Mengecheck apakah checkbox check baris sudah di centang oleh user atau belum if self.check_garis.isChecked(): #Membuat layer garis self.layerGaris = self.buat_layer("Plot Garis","LineString") self.pertamaLine = False self.hitung_azimuth_jarak() except Exception as e: print(e) iface.messageBar().pushMessage("Error","anda salah memasukkan input", level=Qgis.Warning,duration=3)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mezclar_bolsa(self):", "def agregar_bolsa(self, letra, cantidad):", "def preberi_pot(ukazi):", "def preberi_pot(ukazi):", "def preberi_pot(ukazi):", "def preberi_pot(ukazi):", "def preberi_pot(ukazi):", "def main():\r\n\r\n print(\"Berikut adalah daftar naga yang tersedia.\")\r\n for naga i...
[ "0.6971054", "0.64303", "0.6378631", "0.6378631", "0.6378631", "0.6378631", "0.6378631", "0.610913", "0.6093041", "0.6004066", "0.5950295", "0.58474815", "0.58162034", "0.5795707", "0.5697223", "0.5587125", "0.5565432", "0.5492906", "0.5483054", "0.54795176", "0.54650056", ...
0.0
-1
buat titik di koordinat masukan
def buat_titik(self): # memberi geometri pada fitur baru # Memberi fitur titik feature = QgsFeature() feature.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(self.x, self.y))) self.measureDialog.insertParams(self.idTitik,self.x,self.y) # menambahkan fitur pada layer self.layerTitik.dataProvider().addFeatures([feature]) self.layerTitik.updateExtents() self.layerTitik.startEditing() self.layerTitik.changeAttributeValue(self.idTitik,0,self.idTitik) self.layerTitik.changeAttributeValue(self.idTitik,1,self.x ) self.layerTitik.changeAttributeValue(self.idTitik,2,self.y ) self.layerTitik.commitChanges() self.idTitik = self.idTitik+1 self.iface.actionZoomToLayer().trigger()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_title():", "def getTitle(self): #$NON-NLS-1$\r", "def getTitle(self): #$NON-NLS-1$\r", "def get_title(self, obj):\n title = obj.habit.title\n return title", "def Label(self) -> str:", "def title(self, txt):\n num = len(txt)\n ticks = \"=\" * num\n print(ticks)\n...
[ "0.7076962", "0.67579925", "0.67579925", "0.66918576", "0.64708865", "0.64061403", "0.6356622", "0.62820756", "0.62643343", "0.62187165", "0.61947083", "0.61748713", "0.6173121", "0.6151515", "0.6145324", "0.6133842", "0.61263466", "0.60638046", "0.60589373", "0.60490286", "0...
0.0
-1
Creates an error log for a ``logging`` module ``record`` instance.
def create_from_record(self, record, **kwargs): for k in ('url', 'view', 'request', 'data'): if k not in kwargs: kwargs[k] = record.__dict__.get(k) kwargs.update({ 'logger': record.name, 'level': record.levelno, 'message': force_unicode(record.msg), 'server_name': conf.NAME, }) # construct the checksum with the unparsed message kwargs['checksum'] = construct_checksum(**kwargs) # save the message with included formatting kwargs['message'] = record.getMessage() # If there's no exception being processed, exc_info may be a 3-tuple of None # http://docs.python.org/library/sys.html#sys.exc_info if record.exc_info and all(record.exc_info): return self.create_from_exception(record.exc_info, **kwargs) return self.process( traceback=record.exc_text, **kwargs )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _record(self):\n record_attr = {\n 'name': 'test_record',\n 'level': 'ERROR',\n 'pathname': '/test/path',\n 'msg': 'This is a test record.',\n }\n record = logging.makeLogRecord(record_attr)\n return record", "def log_message(self, build...
[ "0.69343174", "0.6103778", "0.599118", "0.5986464", "0.58544385", "0.5821842", "0.58028203", "0.5739559", "0.57346153", "0.5654814", "0.56020546", "0.5592666", "0.5585921", "0.5539647", "0.55095744", "0.54659367", "0.54659367", "0.54548794", "0.5440924", "0.5418258", "0.53907...
0.59985435
2
Creates an error log for from ``message``.
def create_from_text(self, message, **kwargs): return self.process( message=message, **kwargs )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def error(self, message):\n return self.log(\"ERROR\", message)", "def error(error_message: str):\n logger.error(error_message)", "def error(self, message: str):\n self.log(Level.ERROR, message)", "def err(message):\n\n timestamp = format_time(get_time())\n message = '{} - [ERROR] - {}...
[ "0.68963873", "0.6863878", "0.6833658", "0.68022585", "0.6657312", "0.66282344", "0.66032934", "0.6493894", "0.6439013", "0.6363229", "0.6361158", "0.6326939", "0.6277279", "0.62659764", "0.6256422", "0.61797696", "0.61486435", "0.6043684", "0.60246456", "0.59690106", "0.5967...
0.0
-1
Creates an error log from an exception.
def create_from_exception(self, exc_info=None, **kwargs): if not exc_info: exc_info = sys.exc_info() exc_type, exc_value, exc_traceback = exc_info def shorten(var): var = transform(var) if isinstance(var, basestring) and len(var) > 200: var = var[:200] + '...' return var reporter = ExceptionReporter(None, exc_type, exc_value, exc_traceback) frames = varmap(shorten, reporter.get_traceback_frames()) if not kwargs.get('view'): # This should be cached modules = get_installed_apps() if conf.INCLUDE_PATHS: modules = set(list(modules) + conf.INCLUDE_PATHS) def iter_tb_frames(tb): while tb: yield tb.tb_frame tb = tb.tb_next def contains(iterator, value): for k in iterator: if value.startswith(k): return True return False # We iterate through each frame looking for an app in INSTALLED_APPS # When one is found, we mark it as last "best guess" (best_guess) and then # check it against SENTRY_EXCLUDE_PATHS. If it isnt listed, then we # use this option. If nothing is found, we use the "best guess". best_guess = None view = None for frame in iter_tb_frames(exc_traceback): try: view = '.'.join([frame.f_globals['__name__'], frame.f_code.co_name]) except: continue if contains(modules, view): if not (contains(conf.EXCLUDE_PATHS, view) and best_guess): best_guess = view elif best_guess: break if best_guess: view = best_guess if view: kwargs['view'] = view data = kwargs.pop('data', {}) or {} if hasattr(exc_type, '__class__'): exc_module = exc_type.__class__.__module__ else: exc_module = None data['__sentry__'] = { 'exc': map(transform, [exc_module, exc_value.args, frames]), } if isinstance(exc_value, TemplateSyntaxError) and hasattr(exc_value, 'source'): origin, (start, end) = exc_value.source data['__sentry__'].update({ 'template': (origin.reload(), start, end, origin.name), }) kwargs['view'] = origin.loadname tb_message = '\n'.join(traceback.format_exception(exc_type, exc_value, exc_traceback)) kwargs.setdefault('message', transform(force_unicode(exc_value))) return self.process( class_name=exc_type.__name__, traceback=tb_message, data=data, **kwargs )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_log(self, exc):\n return self.formatter.formatException(exc)", "def log_exception():\n logging.basicConfig(level=logging.DEBUG)\n return logging.getLogger('exceptions_log')", "def log_exception(e):\n logger.exception(e)\n raise", "def _log_exception(self, exception, query, param...
[ "0.6838723", "0.6404485", "0.6333006", "0.6018795", "0.5974132", "0.5974132", "0.5963912", "0.59542024", "0.5951783", "0.5901242", "0.5837355", "0.5835964", "0.5816076", "0.5780152", "0.57722586", "0.5741803", "0.5727721", "0.57214516", "0.5720709", "0.5701371", "0.56902647",...
0.0
-1
Initialize the motor with its control pins and start pulsewidth modulation
def __init__(self, pinForward, pinBackward, pinControlStraight,pinLeft, pinRight, pinControlSteering): self.pinForward = pinForward self.pinBackward = pinBackward self.pinControlStraight = pinControlStraight self.pinLeft = pinLeft self.pinRight = pinRight self.pinControlSteering = pinControlSteering GPIO.setup(self.pinForward, GPIO.OUT) GPIO.setup(self.pinBackward, GPIO.OUT) GPIO.setup(self.pinControlStraight, GPIO.OUT) GPIO.setup(self.pinLeft, GPIO.OUT) GPIO.setup(self.pinRight, GPIO.OUT) GPIO.setup(self.pinControlSteering, GPIO.OUT) self.pwm_forward = GPIO.PWM(self.pinForward, 100) self.pwm_backward = GPIO.PWM(self.pinBackward, 100) self.pwm_forward.start(0) self.pwm_backward.start(0) self.pwm_left = GPIO.PWM(self.pinLeft, 100) self.pwm_right = GPIO.PWM(self.pinRight, 100) self.pwm_left.start(0) self.pwm_right.start(0) GPIO.output(self.pinControlStraight,GPIO.HIGH) GPIO.output(self.pinControlSteering,GPIO.HIGH)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setup_motor(self,pin_num):\n pi.set_servo_pulsewidth(pin_num, 2000)\n sleep(2)\n pi.set_servo_pulsewidth(pin_num, 500 )\n sleep(2)", "def setup(self):\n GPIO.setwarnings(False)\n GPIO.setmode(GPIO.BCM)\n GPIO.setup(self.Motor_A_EN, GPIO.OUT)\n GPIO.setu...
[ "0.78306526", "0.7692866", "0.72555536", "0.71739495", "0.7074802", "0.6707869", "0.66933286", "0.6581241", "0.6572955", "0.6535936", "0.6500315", "0.6482761", "0.64587724", "0.6382999", "0.63571215", "0.63376045", "0.6329462", "0.63277125", "0.630661", "0.6273071", "0.626463...
0.72353333
3
pinForward is the forward Pin, so we change its duty cycle according to speed.
def forward(self, speed): self.pwm_backward.ChangeDutyCycle(0) self.pwm_forward.ChangeDutyCycle(speed)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def forward(self):\n global motor_direction\n with self._lock:\n GPIO.output(7, True)\n GPIO.output(11, False)\n GPIO.output(13, True)\n GPIO.output(15, False)\n # time.sleep(sec)\n motor_direction = 'Forward'\n return motor...
[ "0.66786534", "0.66583955", "0.66447824", "0.6522863", "0.6463652", "0.6370594", "0.62973475", "0.6285964", "0.6233026", "0.6130106", "0.6119961", "0.6082686", "0.60227823", "0.6020035", "0.5921472", "0.5857334", "0.58558404", "0.57667553", "0.5758451", "0.5737794", "0.571628...
0.726632
1
pinForward is the forward Pin, so we change its duty cycle according to speed.
def forward_left(self, speed): self.pwm_backward.ChangeDutyCycle(0) self.pwm_forward.ChangeDutyCycle(speed) self.pwm_right.ChangeDutyCycle(0) self.pwm_left.ChangeDutyCycle(100)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def forward(self, speed):\n self.pwm_backward.ChangeDutyCycle(0)\n self.pwm_forward.ChangeDutyCycle(speed)", "def forward(self, speed):\n self.pwm_backward.ChangeDutyCycle(0)\n self.pwm_forward.ChangeDutyCycle(speed)", "def forward(self):\n global motor_direction\n wit...
[ "0.7267332", "0.7267332", "0.66788316", "0.66588324", "0.6522946", "0.64650464", "0.6370248", "0.6297182", "0.6286759", "0.6234336", "0.6129071", "0.61207354", "0.60830605", "0.6021252", "0.6019744", "0.5922687", "0.58570683", "0.5856794", "0.57674026", "0.5759413", "0.573795...
0.66452456
4
pinForward is the forward Pin, so we change its duty cycle according to speed.
def forward_right(self, speed): self.pwm_backward.ChangeDutyCycle(0) self.pwm_forward.ChangeDutyCycle(speed) self.pwm_left.ChangeDutyCycle(0) self.pwm_right.ChangeDutyCycle(100)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def forward(self, speed):\n self.pwm_backward.ChangeDutyCycle(0)\n self.pwm_forward.ChangeDutyCycle(speed)", "def forward(self, speed):\n self.pwm_backward.ChangeDutyCycle(0)\n self.pwm_forward.ChangeDutyCycle(speed)", "def forward(self):\n global motor_direction\n wit...
[ "0.72670037", "0.72670037", "0.6678285", "0.6659141", "0.6645468", "0.6523574", "0.64648247", "0.6370868", "0.62966657", "0.62861395", "0.62338525", "0.6129374", "0.6120421", "0.60226107", "0.60188246", "0.59220994", "0.5857952", "0.58566344", "0.57671356", "0.5759647", "0.57...
0.6083617
13
pinBackward is the forward Pin, so we change its duty cycle according to speed.
def backward(self, speed): self.pwm_forward.ChangeDutyCycle(0) self.pwm_backward.ChangeDutyCycle(speed)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _reverseduty(self):\n if self.ir_pin.duty() == 0:\n self.ir_pin.duty(512)\n else:\n self.ir_pin.duty(0)", "def motor_B(self, direction, speed):\n if direction == 1:\n GPIO.output(self.Motor_B_Pin1, GPIO.HIGH)\n GPIO.output(self.Motor_B_Pin2, GP...
[ "0.6781866", "0.6440657", "0.63590693", "0.6158023", "0.6145336", "0.6108258", "0.6108258", "0.6104545", "0.6029256", "0.6004843", "0.5955468", "0.59549665", "0.5944324", "0.5896161", "0.58845234", "0.5840875", "0.5830278", "0.5827748", "0.5796934", "0.5789884", "0.5785566", ...
0.67496103
2
pinForward is the forward Pin, so we change its duty cycle according to speed.
def left(self, speed): self.pwm_right.ChangeDutyCycle(0) self.pwm_left.ChangeDutyCycle(speed)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def forward(self, speed):\n self.pwm_backward.ChangeDutyCycle(0)\n self.pwm_forward.ChangeDutyCycle(speed)", "def forward(self, speed):\n self.pwm_backward.ChangeDutyCycle(0)\n self.pwm_forward.ChangeDutyCycle(speed)", "def forward(self):\n global motor_direction\n wit...
[ "0.726632", "0.726632", "0.66786534", "0.66583955", "0.66447824", "0.6522863", "0.6463652", "0.6370594", "0.62973475", "0.6285964", "0.6233026", "0.6130106", "0.6119961", "0.6082686", "0.60227823", "0.6020035", "0.5921472", "0.5857334", "0.58558404", "0.57667553", "0.5758451"...
0.545681
36
pinForward is the forward Pin, so we change its duty cycle according to speed.
def right(self, speed): self.pwm_left.ChangeDutyCycle(0) self.pwm_right.ChangeDutyCycle(speed)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def forward(self, speed):\n self.pwm_backward.ChangeDutyCycle(0)\n self.pwm_forward.ChangeDutyCycle(speed)", "def forward(self, speed):\n self.pwm_backward.ChangeDutyCycle(0)\n self.pwm_forward.ChangeDutyCycle(speed)", "def forward(self):\n global motor_direction\n wit...
[ "0.7267332", "0.7267332", "0.66788316", "0.66588324", "0.66452456", "0.6522946", "0.64650464", "0.6370248", "0.6297182", "0.6286759", "0.6234336", "0.6129071", "0.61207354", "0.60830605", "0.6021252", "0.6019744", "0.5922687", "0.58570683", "0.5856794", "0.57674026", "0.57594...
0.51197535
83
Set the duty cycle of both control pins to zero to stop the motor.
def stop(self): self.pwm_forward.ChangeDutyCycle(0) self.pwm_backward.ChangeDutyCycle(0) self.pwm_left.ChangeDutyCycle(0) self.pwm_right.ChangeDutyCycle(0)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def stop(self):\n\n self.pwm_forward.ChangeDutyCycle(0)\n self.pwm_backward.ChangeDutyCycle(0)", "def stop_motor(self):\n self.output(self.steering_pin, 0)\n self.pi.set_servo_pulsewidth(self.steering_pin, 0)", "def stop(self):\n\t\tGPIO.output(self._dir_pin_1, GPIO.HIGH)\n\t\tGPIO....
[ "0.7451313", "0.71744835", "0.70240444", "0.64211655", "0.6414792", "0.6408484", "0.6395715", "0.63024884", "0.6269145", "0.62610817", "0.6226628", "0.6201555", "0.614517", "0.61346424", "0.6122426", "0.6099769", "0.6093762", "0.60788894", "0.6051263", "0.60405594", "0.603532...
0.736179
1
There are 4 dimensions simulation id(600), physical variables(4), runs(3) and timesteps(500). For every scene, we need to pull up data for all timesteps for selected physical variables for a given simulation id.
def onestatfile(): with hp.File('StatsFile.h5', 'w') as onefile: alldata = np.empty((600, 4, 3, 500), dtype=np.float32) for j in range(600): for i in range(3): msd, vol, rms, asp = getstats(i, j+1) alldata[j, 0, i, :] = msd alldata[j, 1, i, :] = vol alldata[j, 2, i, :] = rms alldata[j, 3, i, :] = asp onefile.create_dataset('Stats', data=alldata, chunks=(1, 4, 3, 500), compression='gzip', compression_opts=9)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def general_simulation_data(self):\n iterations = {}\n nstates = {}\n natoms = {}\n for phase in self.phases:\n positions = self.ncfiles[phase].variables['positions']\n iterations[phase], nstates[phase], natoms[phase], spatial = positions.shape\n\n leniter =...
[ "0.61006594", "0.5689221", "0.56312686", "0.5612549", "0.55848944", "0.55802494", "0.55152977", "0.5429191", "0.54044414", "0.5346934", "0.53425074", "0.53219706", "0.5282279", "0.52707255", "0.5253489", "0.5242618", "0.52306026", "0.5217722", "0.5216223", "0.5202785", "0.519...
0.0
-1
There are 4 dimensions simulation id(600), timesteps(500), runs(3) and coordinates(216). For every scene, we need to pull up 216 floats for the 72 (x, y, z) coordinates. The user may want to see the shell for another run of the same simulation parameters. Hence, runs are the second fastest varying dimension.
def onevtkfile(): basedir = '/home/amit/WorkSpace/UCLA/simulations/PhaseDiagram/RawData' with hp.File('VTKFile.h5', 'w') as onefile: allvtk = np.empty((600, 500, 3, 216), dtype=np.float32) for j in range(600): for i in range(3): vtkfilepath = '{}/Run{}/VTKFile-{}.h5'.format(basedir, i, j+1) with hp.File(vtkfilepath, 'r') as vtkfile: for t in range(500): allvtk[j, t, i, :] = vtkfile['T{}/Points'.format(2*t)][:].ravel() onefile.create_dataset('Points', data=allvtk, chunks=(1, 50, 3, 216), compression='gzip', compression_opts=9)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def general_simulation_data(self):\n iterations = {}\n nstates = {}\n natoms = {}\n for phase in self.phases:\n positions = self.ncfiles[phase].variables['positions']\n iterations[phase], nstates[phase], natoms[phase], spatial = positions.shape\n\n leniter =...
[ "0.5814169", "0.568188", "0.56700325", "0.5589331", "0.5585894", "0.5571232", "0.5549986", "0.5539828", "0.5512618", "0.5489092", "0.5436523", "0.5434775", "0.541683", "0.541113", "0.54004127", "0.5391306", "0.5379776", "0.5379776", "0.5369655", "0.5350718", "0.53469867", "...
0.0
-1
Overrides the default implementation, want to check that the quadratic form is equal as well
def __eq__(self, other): if type(other) is type(self): # TODO: check that this does not mix Clifford classes without different symmetric bilinear forms, # as created with class factories. return ( self.items() == other.items() and self.symmetric_bilinear_form.__code__.co_code == other.symmetric_bilinear_form.__code__.co_code ) return NotImplemented
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_equality(self):\n\n s3 = space(curvature=1/5)\n for k in (0, -1, 1, 1.75, 0.325, 1/7, -1.75, -0.325, -1/7):\n s1 = space(fake_curvature=k)\n s2 = space(fake_curvature=k)\n self.assertTrue(s1 == s2)\n self.assertTrue(hash(s1) == hash(s2))\n ...
[ "0.66910017", "0.6484679", "0.64480114", "0.6388875", "0.6279408", "0.6213788", "0.61958015", "0.61131495", "0.60789216", "0.60751086", "0.6066792", "0.6038577", "0.602096", "0.5967689", "0.5960483", "0.5953086", "0.59330404", "0.5928619", "0.5919364", "0.5909345", "0.5903139...
0.0
-1
Overrides the default implementation
def __hash__(self): return hash(tuple(sorted(self.__dict__.items())))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __call__(self):\r\n raise NotImplementedError('override me')", "def __call__(self):\n raise NotImplementedError", "def override(self):\n return None", "def __call__(self):\n raise NotImplementedError()", "def __call__(self):\n pass", "def __call__(self):\n pa...
[ "0.8084532", "0.7757971", "0.7661786", "0.7621949", "0.75538856", "0.75538856", "0.7523752", "0.74512076", "0.7332789", "0.73304206", "0.729408", "0.729408", "0.7254287", "0.7254287", "0.72166955", "0.72166955", "0.72166955", "0.71078914", "0.69840276", "0.6974919", "0.697491...
0.0
-1
r"""Uses the ideal in the clifford algebra,
def _clifford_swap(cls, slot_i, slot_j) -> Tensor: return Tensor( { Tensor._merge_keys((slot_j,), (slot_i,)): -1, Tensor._merge_keys(): 2 * cls.symmetric_bilinear_form(slot_i, slot_j), } )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solvate(self):\n\n pass", "def solve(self):", "def get_sol(self):", "def idealize(self) -> None:\n self.k = np.zeros(6, dtype=float)\n self.p = np.zeros(2, dtype=float)\n self.c = np.zeros(2, dtype=float)", "def clifford_set(u):\n i, x, y, z = u.v\n result = []\n re...
[ "0.65014106", "0.6379623", "0.63238484", "0.6098798", "0.5973083", "0.59521735", "0.5935254", "0.590939", "0.58923155", "0.58820564", "0.58624995", "0.5835705", "0.5805436", "0.58031327", "0.5798486", "0.5798486", "0.5775567", "0.5768856", "0.57159203", "0.57043415", "0.56889...
0.0
-1
r""" Swaps the order of the tensor multiplications
def swap(self, adjacent_transposition): result = Tensor() for key_self in self.keys(): # ensure that the swap can be made with the available slots if max(adjacent_transposition) < len(key_self): prefix = Tensor({Tensor._merge_keys(*key_self[0 : min(adjacent_transposition)]): self[key_self]}) root = type(self)._clifford_swap( *key_self[min(adjacent_transposition) : max(adjacent_transposition) + 1] ) postfix = Tensor({Tensor._merge_keys(*key_self[max(adjacent_transposition) + 1 :]): 1}) result = result + prefix * root * postfix else: result = result + Tensor({key_self: self[key_self]}) self.clear() self.update(result) return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mul_inplace(a, b):", "def transpose(self):\n order = list(self.order)\n order[-2], order[-1] = order[-1], order[-2]\n self.order = order", "def SwapSides(self):\n for c in self.reactants:\n c.coeff = -c.coeff", "def __matmul__(self, tensor):\n return self.mat...
[ "0.6521079", "0.62019324", "0.6163201", "0.6071542", "0.60506517", "0.5975764", "0.59555125", "0.59367365", "0.59306496", "0.5920101", "0.59189886", "0.58538663", "0.5833324", "0.5821733", "0.5819354", "0.580724", "0.5802979", "0.5783564", "0.5771913", "0.5763691", "0.5743860...
0.541512
55
For each entry in the connection matrix, examine the points that are supposed to be connected and if they don't match change the connection matrix to reflect the actual connection order.
def whereConnect(self, refsec, fixsec): ref10 = self.sections[refsec][0] ref11 = self.sections[refsec][-1] fix00 = self.sections[fixsec][0] fix01 = self.sections[fixsec][-1] if ref10 == fix00: return 0 # use the 0th node's rad of ref sec for new rad elif ref10 == fix01: return 0 elif ref11 == fix00: return 1 elif ref11 == fix01: return 1 else: print('No valid connection found between sections %i and %i' % (refsec, fixsec))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_connection_between_nodes(self):\n\n for i, node in enumerate(self.list_empty_nodes):\n line = node.labyrinth_position[0]\n column = node.labyrinth_position[1]\n\n for j in range(i+1, len(self.list_empty_nodes)):\n line_j = self.list_empty_nodes[j].laby...
[ "0.69678575", "0.6915727", "0.6357731", "0.6154334", "0.6130675", "0.6066713", "0.6047329", "0.59654814", "0.59127927", "0.5845888", "0.5836875", "0.57811123", "0.57547414", "0.5699769", "0.5684559", "0.56675184", "0.5660186", "0.56549203", "0.56542313", "0.5649522", "0.56071...
0.0
-1
If a radius of < 0 is found, its neighbors are used to get the correct radius
def matchRadius(self): print('Making sure all points and radii match...') rads = [] for sec in self.secRads.keys(): for r in self.secRads[sec]: rads.append(r) if not self.medrad: self.medrad = rads[int(len(rads)/2)] print('Median radius is: %.5f' % self.medrad) self.uniqueNodes = [] self.uniqueRads = [] for sec in self.sections.keys(): for n in xrange(len(self.sections[sec])): # check to see if that point already exists in uniquesecs if self.sections[sec][n] in self.uniqueNodes: radInd = self.uniqueNodes.index(self.sections[sec][n]) self.secRads[sec][n] = self.uniqueRads[radInd] else: self.uniqueNodes.append(self.sections[sec][n]) if self.secRads[sec][n] <= 0 or self.secRads[sec][n] > 10000: print('Bad radius found, section %i node %i' %(sec, n)) self.uniqueRads.append(self.medrad) self.secRads[sec][n] = self.medrad print('Replaced with: %.5f' % self.uniqueRads[-1]) else: self.uniqueRads.append(self.secRads[sec][n]) print('Radii fixed.') return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_neighborhood_radius_consistent():\r\n grid_spacing = random.uniform(1e-6, 10.0)\r\n center = numpy.random.random(random.randint(1, 3))\r\n\r\n # Find points with radius neighborhood\r\n radius = random.uniform(_distance_to_nearest(grid_spacing, center), grid_spacing*5)\r\n points = ill....
[ "0.7410353", "0.71742636", "0.7014808", "0.7014808", "0.6995029", "0.69532037", "0.69336915", "0.67915857", "0.6648367", "0.65443754", "0.65413064", "0.6540949", "0.64974695", "0.64635015", "0.6388047", "0.6292354", "0.62815535", "0.6259406", "0.6251604", "0.6242718", "0.6240...
0.6215466
22
Determines whether points in certain segments are spaced too far apart (relative to median point spacing).
def findLongSections(self, version=2): median_distances = [self.medianDist(sec) for sec in self.sections.keys()] median_distances.sort() median_dist = median_distances[int(len(median_distances)/2)] long_sections, long_distances = [], [] if version == 1: for s in xrange(len(median_distances)): if median_distances[s] > 2*median_dist: long_sections.append(median_distances[s]) long_distances.append(s) print('Found %i sections with points spaced far apart' \ % len(long_sections)) elif version == 2: for sec in self.sections.keys(): if len(self.sections[sec]) < 3: long_sections.append(sec) d = [a - b for a,b in zip(self.sections[sec][0], \ self.sections[sec][-1])] long_distances.append(np.linalg.norm(d)) print(np.shape(long_distances)) # sections_distances = zip(long_sections, long_distances) return long_sections, long_distances, median_dist
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _filter_out_bad_segments(img1, seg1, img2, seg2):\n minval = tf.reduce_min(tf.reduce_sum(seg1, [0,1])*tf.reduce_sum(seg2, [0,1]))\n if minval < 0.5:\n warnings.warn(\"filtering bad segment\")\n return False\n else:\n return True", "def on_segment(point_p, point_q, point_r):\n ...
[ "0.6294915", "0.62770545", "0.62648475", "0.61977303", "0.6162508", "0.6156369", "0.6076896", "0.605726", "0.6015216", "0.59716797", "0.59055436", "0.5897351", "0.5878322", "0.5816097", "0.57989115", "0.57959044", "0.5737896", "0.57194346", "0.571571", "0.57036346", "0.566979...
0.0
-1
Interpolate the points and radii between sections that have too few points.
def interpPoints(self, interpRad=False): # print(np.shape(long_distances)) long_sections, long_distances, meddist = self.findLongSections() print('Long inter-point distances found: %i' % len(long_sections)) count = 0 for sec in long_sections: print('Supposed long section %i has %i nodes' \ % (sec, len(self.sections[sec]))) # set first and last points for interpolation pt0, pt1 = self.sections[sec][0], self.sections[sec][-1] # find number of points numpts = int(long_distances[long_sections.index(sec)]/meddist) Xs = np.linspace(pt0[0], pt1[0], numpts) Ys = np.linspace(pt0[1], pt1[1], numpts) Zs = np.linspace(pt0[2], pt1[2], numpts) newpts = np.dstack((Xs, Ys, Zs)) newpts = [newpts[0][i] for i in xrange(len(newpts[0]))] self.sections[sec] = newpts count = count + 1 rad0, rad1 = self.secRads[sec][0], self.secRads[sec][-1] # print(rad0, rad1) rads = np.linspace(rad0, rad1, numpts) # print(rads) self.secRads[sec] = rads long_sections, long_distances, meddist = self.findLongSections() print('Long sections still remaining: %i' % len(long_sections)) if len(long_sections) > 0: print(long_distances, meddist) return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __hinterpolate(self):\n \n # Temp. Data holders\n upperint = []\n lowerint = []\n \n # Dont like this, because here we insert points into the rawdata\n # But it creates consisitent results in the interpolation results\n if self.__upper[0][0] != 0: self.__...
[ "0.6352975", "0.62675714", "0.61275345", "0.5952627", "0.5907965", "0.58464473", "0.5796776", "0.578933", "0.57794523", "0.57792443", "0.5757177", "0.5749844", "0.57261205", "0.5726011", "0.56745857", "0.5649165", "0.56255597", "0.5618809", "0.5615824", "0.55846244", "0.55757...
0.6316739
1
Write a hoc file.
def writeHoc(self): print('Writing output file %s ...' % self.outFile) with open(self.outFile, 'w') as fOut: def createSection(secNum): fOut.write('create section_%i\n' %secNum) fOut.write('section_%i {\n' %secNum) fOut.write('pt3dclear()\n') for node in xrange(len(self.sections[secNum])): fOut.write('pt3dadd(%.6f, %.6f, %.6f, %.6f)\n' \ % (self.sections[secNum][node][0], self.sections[secNum][node][1], self.sections[secNum][node][2], self.secRads[secNum][node])) fOut.write('}\n') def createConnection(): for c in xrange(len(self.connections)): fOut.write('connect section_%i(1), section_%i(0)\n' \ % (self.connections[c][0],self.connections[c][1])) for sec in self.sections.keys(): createSection(sec) createConnection() return
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write_to_file(self, filename: str) -> None:", "def write(self, filename):\n pass", "def write(self, filename):\n pass", "def write(self, filename, data, hdr):\n pass", "def write(self, fname):\n pass", "def write_file(self):\n file = open(self.__file_path, 'w+')\n ...
[ "0.68592376", "0.674406", "0.674406", "0.656854", "0.6566108", "0.6498972", "0.6464658", "0.6464213", "0.6418377", "0.6384356", "0.63767195", "0.637185", "0.63714814", "0.63141376", "0.6300945", "0.63001466", "0.6275614", "0.62744516", "0.6243898", "0.6229664", "0.62277204", ...
0.5741435
90
Loads the data set provided in this repository and returns a list of Decks or FuzzyDecks. The deck list is sorted by archetype so the distance matrix is easier to visualize.
def load_data_set(hero_class: str, fuzzy: bool, filename: str = "data/Decks.json", debug: bool = False) \ -> Union[List[Deck], List[FuzzyDeck]]: if debug: print("### loading dataset...") with open(filename) as f: data = json.load(f) hero_classes = list(data["series"]["metadata"].keys()) if hero_class not in hero_classes and hero_class != "ALL": raise Exception("hero class <" + hero_class + "> not available. " "Consider using one class out of: " + ", ".join(hero_classes)) if debug: for cl in hero_classes: print("" + str(len(data["series"]["data"][cl])) + " played decks for hero class " + cl) played_decks = [] if hero_class == "ALL": for hero_class in hero_classes: for i, deck_data in enumerate(data["series"]["data"][hero_class]): if fuzzy: played_decks.append(FuzzyDeck(deck_data)) else: played_decks.append(Deck(deck_data)) else: for i, deck_data in enumerate(data["series"]["data"][hero_class]): if fuzzy: played_decks.append(FuzzyDeck(deck_data)) else: played_decks.append(Deck(deck_data)) # sort by cluster label for easier visualization of distance matrix played_decks = sorted(played_decks, key=lambda x: x.archetype[0]) return played_decks
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_decks(**options):\n graph = bonobo.Graph()\n\n csv_in = bonobo.noop\n\n graph.add_chain(csv_in, in_use_cards, _input=None)\n\n for deck in listdir('decks'):\n deck_path = join('decks', deck)\n if deck == '.gitignore':\n continue\n\n if isfile(deck_path):\n ...
[ "0.5819562", "0.5731613", "0.544646", "0.53726274", "0.5366942", "0.5298741", "0.5272846", "0.52254647", "0.5140814", "0.50974256", "0.50899714", "0.50865227", "0.5069896", "0.50649506", "0.5054334", "0.50400245", "0.5027452", "0.50256103", "0.50096786", "0.50049436", "0.4990...
0.648076
0
Calculates the distance matrix of a list of Deck or FuzzyDeck objects. Returns the vectorform distance vector.
def calculate_distance_matrix(played_decks: Union[List[FuzzyDeck], List[Deck]], measure: str): deck_data = np.array(played_decks).reshape(len(played_decks), 1) if measure == "jaccard": dist = pdist(deck_data, lambda u, v: u[0].jaccard_distance(v[0])) elif measure == "euclidean": dist = pdist(deck_data, lambda u, v: u[0].euclidean_distance(v[0])) else: raise ValueError("Unknown distance measure {}. ".format(measure) + "Please choose one of the following distance measures ['euclidean','jaccard']") return dist
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getDistanceMatrix(self):\n v = self.getVectors()\n vLis = v.keys()\n N = len(v.keys())\n D = np.zeros([N, N], dtype=np.float32)\n print(N)\n for i in range(N):\n print(\"%d/%d\" %(i, N))\n D[i, i] = 1\n for j in range(i + 1, N):\n ...
[ "0.6267775", "0.60056895", "0.59760046", "0.5957225", "0.5952635", "0.5888199", "0.5881407", "0.5870848", "0.58344764", "0.58344764", "0.5777284", "0.5726229", "0.5702075", "0.5700626", "0.5683218", "0.5678633", "0.56485", "0.55838466", "0.5582898", "0.55652297", "0.5557965",...
0.68410903
0
Calculates vmeasure, homogeneity, and completeness for each clustering algorithm stored in clustering_alg and adds it to each algorithms dictionary.
def eval_v_measure_homogeneity_completeness(clustering_alg: List, sdist_euclidean, sdist_jaccard, labels_true, debug: bool = False): for i, alg_dict in enumerate(clustering_alg): if "alg" in alg_dict: if alg_dict["distance"] == "euclidean": clustering = alg_dict["alg"].fit(sdist_euclidean) elif alg_dict["distance"] == "jaccard": clustering = alg_dict["alg"].fit(sdist_jaccard) else: raise ValueError("Unknown distance measure {}. ".format(alg_dict["distance"]) + "Please choose one of the following distance measures ['euclidean','jaccard']") labels_predicted = clustering.labels_ alg_dict["labels"] = labels_predicted else: labels_predicted = alg_dict["labels"] alg_dict["homogeneity"], alg_dict["completeness"], alg_dict["v-measure"] = \ homogeneity_completeness_v_measure(labels_true, labels_predicted) if debug: print("Alg: " + alg_dict["name"] + "; \t v-measure = " + str(alg_dict["v-measure"]))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def evaluate_clustering_methods(methods):\r\n results = {}\r\n for m in methods:\r\n res = results[m['name']] = {}\r\n prec = 3\r\n res['Adjusted Rand Score'] = round(sklearn.metrics.adjusted_rand_score(m['target'], m['clustering']),prec)\r\n res['Normalized Mutual Information'] =...
[ "0.6315484", "0.61245906", "0.5925279", "0.5822709", "0.57913584", "0.57853943", "0.57158273", "0.56992424", "0.5629585", "0.56262213", "0.55483156", "0.5493547", "0.5488877", "0.54682064", "0.5456372", "0.5410733", "0.5409758", "0.54085684", "0.53946304", "0.53820395", "0.53...
0.69420356
0
Calculates a clustering's contingency matrix for each clustering algorithm stored in the list clustering_alg and adds it to the dict.
def eval_cluster_contingency(clustering_alg: List, labels_true, sdist): for (alg_name, alg_dict) in clustering_alg: if "alg" in alg_dict: clustering = alg_dict["alg"].fit(sdist) labels_pred = clustering.labels_ alg_dict["labels"] = labels_pred else: labels_pred = alg_dict["labels"] pred_label_dict, new_labels = normalize_labels(labels_pred) alg_dict["cm"] = contingency_matrix(labels_true, new_labels)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_clustering_info(self, algorithm_type, clustering_parameters, clusterings = []):\n clustering_info = {}\n for i, running_parameters in enumerate(clustering_parameters):\n\n clustering_id = \"clustering_%04d\"%(self.current_clustering_id)\n self.current_clustering_id ...
[ "0.60130954", "0.5818192", "0.57543993", "0.5732739", "0.56639177", "0.56580245", "0.56447226", "0.560482", "0.552206", "0.551928", "0.54748267", "0.5463208", "0.5428008", "0.54264724", "0.5389764", "0.535151", "0.5327543", "0.53244734", "0.5321029", "0.5317272", "0.5313567",...
0.7477364
0
Calculates the mean distance and the sum of squared errors for each cluster and its related core and centroid. Always uses Jaccard distance.
def eval_mean_distance(played_decks, clustering_data: List, fuzzy: bool, debug: bool = False): for alg_dict in clustering_data: decks = np.array(played_decks) clusters = [] for label in set(alg_dict["labels"]): indices = np.where(alg_dict["labels"] == label) if fuzzy: clusters.append(FuzzyDeckCluster(decks[indices])) else: clusters.append(DeckCluster(decks[indices])) if fuzzy: clustering = FuzzyDeckClustering(clusters) else: clustering = DeckClustering(clusters) sum_of_squared_distances_centroid = 0 sum_of_squared_distances_core = 0 for cluster in clustering.deck_clusters: centroid = cluster.centroid() core = cluster.core() for deck in cluster.decks: sum_of_squared_distances_centroid += (deck.jaccard_distance(centroid))**2 sum_of_squared_distances_core += (deck.jaccard_distance(core))**2 alg_dict["sse_centroid"] = sum_of_squared_distances_centroid alg_dict["sse_core"] = sum_of_squared_distances_core if debug: print("Alg: " + alg_dict["name"] + "; \t sse = " + str(alg_dict["sse_centroid"])) print("Alg: " + alg_dict["name"] + "; \t sse = " + str(alg_dict["sse_core"]))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def calcAvgDistances(centroids, clusters, class_header=\"Class\"):\n avg_distances = [0] * len(centroids)\n multiprocess_count = multiprocessing.cpu_count() # Find processor count\n for centroid_row_index, centroid_tuple in enumerate(centroids.iterrows()): # For each cluster\n wor...
[ "0.67511815", "0.667103", "0.65723747", "0.6478668", "0.62807804", "0.6273955", "0.622381", "0.6105332", "0.60754263", "0.6067694", "0.6046383", "0.603666", "0.60061574", "0.59977764", "0.5997736", "0.59847236", "0.5973263", "0.5969204", "0.5959918", "0.59587914", "0.59409744...
0.66500586
2
Change the labels from arbitrary numbers to the range [0, len(set(labels))]. Points that are in the same cluster will stay in the same cluster. Points from different clusters will remain in different clusters.
def normalize_labels(labels): new_labels = np.array([-1] * len(labels)) labels = np.array(labels) label_dict = dict() for i, label in enumerate(set(labels)): new_labels[np.where(labels == label)] = i label_dict[i] = label return label_dict, new_labels
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _relocate_clusters(self, cluster_labels):\n for cluster_label in range(self.k):\n if cluster_labels[cluster_label] is not None:\n # mean of the pixels assigned to cluster\n p_sum, p_count = np.asarray(\n cluster_labels[\n ...
[ "0.68851614", "0.6850147", "0.6721148", "0.6478987", "0.643922", "0.6345853", "0.6343787", "0.6329328", "0.62441224", "0.6120226", "0.6087101", "0.60843414", "0.60319865", "0.5999183", "0.59800136", "0.5965767", "0.5940681", "0.59308654", "0.592457", "0.5924074", "0.5922587",...
0.6535897
3
Modify the column name to make it Pythoncompatible as a field name
def normalize_col_name(col_name, used_column_names, is_relation): field_params = {} field_notes = [] new_name = col_name.lower() if new_name != col_name: field_notes.append('Field name made lowercase.') if is_relation: if new_name.endswith('_id'): new_name = new_name[:-3] else: field_params['db_column'] = col_name new_name, num_repl = re.subn(r'\W', '_', new_name) if num_repl > 0: field_notes.append('Field renamed to remove unsuitable characters.') if new_name.find(LOOKUP_SEP) >= 0: while new_name.find(LOOKUP_SEP) >= 0: new_name = new_name.replace(LOOKUP_SEP, '_') if col_name.lower().find(LOOKUP_SEP) >= 0: # Only add the comment if the double underscore was in the original # name field_notes.append( "Field renamed because it contained more than one '_' in a row." ) if new_name.startswith('_'): new_name = 'field%s' % new_name field_notes.append("Field renamed because it started with '_'.") if new_name.endswith('_'): new_name = '%sfield' % new_name field_notes.append("Field renamed because it ended with '_'.") if keyword.iskeyword(new_name): new_name += '_field' field_notes.append( 'Field renamed because it was a Python reserved word.') if new_name[0].isdigit(): new_name = 'number_%s' % new_name field_notes.append( "Field renamed because it wasn't a valid Python identifier.") if new_name in used_column_names: num = 0 while '%s_%d' % (new_name, num) in used_column_names: num += 1 new_name = '%s_%d' % (new_name, num) field_notes.append('Field renamed because of name conflict.') if col_name != new_name and field_notes: field_params['db_column'] = col_name return new_name, field_params, field_notes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _update_column_name(self, column, idx, old_name, name):\n dtype = self.dtype\n # Updating the names on the dtype should suffice\n dtype.names = dtype.names[:idx] + (name,) + dtype.names[idx + 1 :]", "def py_field_name(self, field):\n name = field.name\n name = as_identifier...
[ "0.7072573", "0.7007338", "0.6994978", "0.69579434", "0.68072045", "0.6802428", "0.6802428", "0.67907166", "0.6732477", "0.6600031", "0.65641886", "0.64931035", "0.64931035", "0.64816284", "0.6459289", "0.64281356", "0.63902634", "0.6334104", "0.6324554", "0.6324554", "0.6286...
0.7277575
0
Given the database connection, the table name, and the cursor row description, this routine will return the given field type name, as well as any additional keyword parameters and notes for the field.
def get_field_type(connection, table_name, row): field_params = OrderedDict() field_notes = [] is_geometry = False try: field_type = connection.introspection.get_field_type(row[1], row) except KeyError: field_type = 'TextField' field_notes.append('This field type is a guess.') # This is a hook for data_types_reverse to return a tuple of # (field_type, field_params_dict). if type(field_type) is tuple: field_type, new_params = field_type field_params.update(new_params) # Add max_length for all CharFields. if field_type == 'CharField' and row[3]: field_params['max_length'] = int(row[3]) if field_type == 'DecimalField': if row[4] is None or row[5] is None: field_notes.append( 'max_digits and decimal_places have been guessed, as this ' 'database handles decimal fields as float') field_params['max_digits'] = row[4] if row[4] is not None else 10 field_params['decimal_places'] = row[ 5] if row[5] is not None else 5 else: field_params['max_digits'] = row[4] field_params['decimal_places'] = row[5] if field_type == 'GeometryField': geo_col = row[0] # Getting a more specific field type and any additional parameters # from the `get_geometry_type` routine for the spatial backend. field_type, geo_params = connection.introspection.get_geometry_type( table_name, geo_col) field_params.update(geo_params) is_geometry = True return field_type, field_params, is_geometry # return getattr(models.fields, field_type), field_params
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_fields(self):\n if not self._cursor.description:\n return {}\n\n results = {}\n column = 0\n\n for des in self._cursor.description:\n fieldname = des[0]\n results[column] = fieldname\n column = column + 1\n\n return results", ...
[ "0.6303345", "0.6107471", "0.5995526", "0.5872125", "0.5803399", "0.5616249", "0.56076336", "0.5588405", "0.55621403", "0.55433136", "0.55343395", "0.5498443", "0.5464399", "0.5463323", "0.5434962", "0.53413516", "0.53411543", "0.53399295", "0.53027624", "0.52892405", "0.5286...
0.7295043
0
Return a sequence comprising the lines of code necessary to construct the inner Meta class for the model corresponding to the given database table name.
def get_meta(table_name, constraints, column_to_field_name): # unique_together = [] # for index, params in constraints.items(): # if params['unique']: # columns = params['columns'] # if len(columns) > 1: # we do not want to include the u"" or u'' prefix # so we build the string rather than interpolate the tuple # tup = '(' + ', '.join("'%s'" % column_to_field_name[c] for c in columns) + ')' # unique_together.append(tup) return type('Meta', (), dict(managed=False, db_table=table_name, app_label='layers')) # if unique_together: # tup = '(' + ', '.join(unique_together) + ',)' # meta += [" unique_together = %s" % tup] # return meta
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_meta_table_name(engine):\n class Model(engine.model):\n id = Column(UUID, hash_key=True)\n\n assert Model.Meta.table_name == \"Model\"\n\n class Other(engine.model):\n class Meta:\n table_name = \"table_name\"\n write_units = 3\n id = Column(UUID, hash_k...
[ "0.64038646", "0.6319936", "0.61308765", "0.58914566", "0.58779913", "0.5795165", "0.5719704", "0.5694968", "0.5619841", "0.56065655", "0.5591526", "0.55195546", "0.5389739", "0.5318234", "0.5304673", "0.5291211", "0.52884954", "0.5279363", "0.52785826", "0.52544487", "0.5232...
0.58967185
3
returns a tuple (Model, geometry_field, geometry_field_type) for a given table in given schema
def get_layer(schema, table_name): fn = '{}.{}'.format(schema, table_name) if fn not in LAYER_MODELS: LAYER_MODELS[fn] = inspect_table(schema, table_name) return LAYER_MODELS.get(fn)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_geometry_type(self, table_name, description):\n with self.connection.cursor() as cursor:\n cursor.execute(\n \"\"\"\n SELECT t.coord_dimension, t.srid, t.type FROM (\n SELECT * FROM geometry_columns\n UNION ALL\n ...
[ "0.6816969", "0.65374726", "0.62112284", "0.6156063", "0.5837291", "0.569644", "0.56083405", "0.55587506", "0.55330145", "0.547764", "0.5454867", "0.5441994", "0.54250515", "0.54212207", "0.5413206", "0.5390883", "0.5313759", "0.5281348", "0.52764404", "0.52503705", "0.521442...
0.54125166
15
Runs a single userprovided line as a REPL input.
def handle_line(line: str, stmt_index: int): fn_name = f'repl_{stmt_index}' module_text = f""" import std fn {fn_name}() -> () {{ {line} }} """ # For error reporting we use a helper that puts this into a fake filesystem # location. def make_fakefs_open(): fs = fake_filesystem.FakeFilesystem() fs.CreateFile(FILENAME, module_text) return fake_filesystem.FakeFileOpen(fs) importer = import_helpers.Importer() while True: try: fake_module = parser.Parser( scanner.Scanner(FILENAME, module_text), fn_name).parse_module() except span.PositionalError as e: parser_helpers.pprint_positional_error(e, fs_open=make_fakefs_open()) return # First attempt at type checking, we expect this may fail the first time # around and we'll substitute the real return type we observe. try: type_info = cpp_typecheck.check_module(fake_module, importer.cache, importer.additional_search_paths) except XlsTypeError as e: # We use nil as a placeholder, and swap it with the type that was expected # and retry once we determine what that should be. if e.rhs_type == concrete_type_mod.ConcreteType.NIL: module_text = module_text.replace(' -> ()', ' -> ' + str(e.lhs_type)) continue # Any other errors are likely real type errors in the code and we should # report them. parser_helpers.pprint_positional_error(e, fs_open=make_fakefs_open()) return # It type checked ok, and we can proceed. break # Interpret the line and print the result. # TODO(leary): 2020-06-20 No let bindings for the moment, just useful for # evaluating expressions -- could put them into the module scope as consts. interpreter = interpreter_mod.Interpreter( fake_module, type_info, importer.typecheck, import_cache=importer.cache, additional_search_paths=(), trace_all=False) result = interpreter.run_function(fn_name, args=(), symbolic_bindings=None) print(result) type_info.clear_type_info_refs_for_gc() return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _handle_stdin(self, line):\r\n return input(line.replace(STDIN_PROMPT, \"\"))", "def read_user_input(self):\n\n self.commandline = raw_input(\"Enter the string you want to parse\\n\")", "def do_prompt(self, line):\n self.prompt = line + ': '", "def on_user_input(self, line: str) -> s...
[ "0.695336", "0.66525394", "0.65766966", "0.6399153", "0.63881487", "0.63797593", "0.63621664", "0.6285082", "0.62843174", "0.6240477", "0.6193314", "0.6086187", "0.60828507", "0.6036487", "0.5988181", "0.59823424", "0.5970285", "0.59633726", "0.59236956", "0.5900732", "0.5900...
0.0
-1
Encodes user verification request using user profile ID as pub key.
def generate_authentication_code(user): salt = 'd9!1l@39#c3' expire_timestamp = time.time() + EXPIRE_TIME_LIMIT # Make a string which depends on restaurant id # Same encoding mechanism will be used in seerpod hardware composite_string = "%s%s%s" % (user.id, user.password, salt) str_hex = hashlib.md5(composite_string).hexdigest() decoded_str = str(user.owner_email_id) + str(user.id) + "_" + str(expire_timestamp) + "_" + str_hex # Encoded string will be a multiple line string, if it is greater # than maximum bin size of 76. Browser strips the newline character # in the url. encoded = base64.encodestring(decoded_str).strip().replace('\n', '') return encoded
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_verification_code(self, user_id, verify_type, secret):\n user = self.get(user_id, raise_error=True)\n code_hash = hmac.new(secret)\n code_hash.update(str(user_id))\n code_hash.update(str(user.user_name))\n code_hash.update(str(verify_type))\n return code_hash.hexdi...
[ "0.6182733", "0.5893101", "0.58795995", "0.5852252", "0.5845634", "0.5823406", "0.575619", "0.564391", "0.56402856", "0.5625334", "0.55854565", "0.55798405", "0.55779624", "0.55727375", "0.5564628", "0.5561092", "0.5531494", "0.55198497", "0.550899", "0.5470735", "0.5465826",...
0.5185567
38
Authenticate user based on code.
def authenticate_user(authentication_code): for suffix in ('', '=', '=='): attempt = authentication_code + suffix decoded = base64.decodestring(attempt) fields = decoded.split('_') email, user_id, time_stamp, str_hex = fields if time_stamp < time.time(): # Authentication Code Expired raise seerpod_exceptions.AuthenticationCodeExpired('Authentication code expired', response_data=authentication_code) user = None #business_contact_api.BusinessContacts().get_user_detail_from_email(email) if not user: continue if attempt == generate_authentication_code( user.id, time_stamp, user.owner_email_id, user.password): return user # Invalid authentication code raise seerpod_exceptions.InvalidAuthenticationCode('Invalid Authentication code', response_data=authentication_code)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def authenticate(self, request, **kwargs):\n\n self.request = request\n if not self.request:\n return None\n\n state = self.request.GET.get('state')\n code = self.request.GET.get('code')\n nonce = kwargs.pop('nonce', None)\n\n if not code or not state:\n ...
[ "0.66076124", "0.6604477", "0.6523267", "0.64544946", "0.6403558", "0.6359539", "0.6357345", "0.6290257", "0.6288636", "0.6280958", "0.6248023", "0.6243201", "0.6242323", "0.6237753", "0.6226705", "0.6226705", "0.62162906", "0.61871463", "0.6182399", "0.61742467", "0.6170313"...
0.70372474
0
Creates a new service client
def __init__(self, config, **kwargs): validate_config(config, signer=kwargs.get('signer')) if 'signer' in kwargs: signer = kwargs['signer'] else: signer = Signer( tenancy=config["tenancy"], user=config["user"], fingerprint=config["fingerprint"], private_key_file_location=config.get("key_file"), pass_phrase=get_config_value_or_default(config, "pass_phrase"), private_key_content=config.get("key_content") ) base_client_init_kwargs = { 'regional_client': True, 'service_endpoint': kwargs.get('service_endpoint'), 'timeout': kwargs.get('timeout'), 'base_path': '/20160918', 'skip_deserialization': kwargs.get('skip_deserialization', False) } self.base_client = BaseClient("identity", config, signer, identity_type_mapping, **base_client_init_kwargs) self.retry_strategy = kwargs.get('retry_strategy')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_client(service_name: str, config_name: str = None, **client_args):\n session = get_session(config_name)\n return session.client(service_name, **client_args)", "def create_client(self) -> None:\n pass", "def create(ctx, name, company, mail, age):\n client = Client(name,company,mail,age)\n...
[ "0.801748", "0.78244793", "0.72532517", "0.7216976", "0.71908116", "0.71908116", "0.70718634", "0.7055989", "0.69973916", "0.6996517", "0.6937248", "0.68635684", "0.68541193", "0.68492603", "0.6825555", "0.67868036", "0.66969323", "0.6665927", "0.6663688", "0.6585112", "0.658...
0.0
-1
Activates the specified MFA TOTP device for the user. Activation requires manual interaction with the Console.
def activate_mfa_totp_device(self, user_id, mfa_totp_device_id, mfa_totp_token, **kwargs): resource_path = "/users/{userId}/mfaTotpDevices/{mfaTotpDeviceId}/actions/activate" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "activate_mfa_totp_device got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id, "mfaTotpDeviceId": mfa_totp_device_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing), "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=mfa_totp_token, response_type="MfaTotpDeviceSummary") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=mfa_totp_token, response_type="MfaTotpDeviceSummary")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def activate_application_token(self, apptoken, temptoken) -> bool:\n await self.raw_request(\n self.URL_ACTIVATE.format(apptoken=apptoken, temptoken=temptoken)\n )\n return True", "def activate(request, uidb64, token):\n try:\n uid = force_text(urlsafe_base64_decod...
[ "0.60715777", "0.6028133", "0.59996897", "0.58769685", "0.5760404", "0.5734255", "0.5616806", "0.5603392", "0.5589512", "0.5500395", "0.5455279", "0.5448774", "0.5440349", "0.54232854", "0.5410598", "0.5386604", "0.5370613", "0.53615427", "0.5324096", "0.53227437", "0.5316876...
0.6753613
0
Adds the specified user to the specified group and returns a `UserGroupMembership` object with its own OCID. After you send your request, the new object's `lifecycleState` will temporarily be CREATING. Before using the object, first make sure its `lifecycleState` has changed to ACTIVE.
def add_user_to_group(self, add_user_to_group_details, **kwargs): resource_path = "/userGroupMemberships" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "add_user_to_group got unknown kwargs: {!r}".format(extra_kwargs)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, header_params=header_params, body=add_user_to_group_details, response_type="UserGroupMembership") else: return self.base_client.call_api( resource_path=resource_path, method=method, header_params=header_params, body=add_user_to_group_details, response_type="UserGroupMembership")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def addUserToGroup(self, user, group):\n return self.pm_getUserManager().addUserToGroup(self._unbox(user), self._unbox(group))", "def add_user(self, user):\n store = self.get('__store')\n members = Members(store.db)\n membership = members.first(group_id=self._id, user_id=user._id)\n ...
[ "0.74806035", "0.7402917", "0.73061603", "0.7215162", "0.720623", "0.7198249", "0.6962066", "0.6901342", "0.68765295", "0.6858431", "0.6735927", "0.6672501", "0.66710955", "0.6659128", "0.6647979", "0.65809935", "0.6571874", "0.646748", "0.6427779", "0.6386597", "0.62940246",...
0.71962476
6
Assembles tag defaults in the specified compartment and any parent compartments to determine the tags to apply. Tag defaults from parent compartments do not override tag defaults referencing the same tag in a compartment lower down the hierarchy. This set of tag defaults includes all tag defaults from the current compartment back to the root compartment.
def assemble_effective_tag_set(self, compartment_id, **kwargs): resource_path = "/tagDefaults/actions/assembleEffectiveTagSet" method = "GET" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "lifecycle_state" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "assemble_effective_tag_set got unknown kwargs: {!r}".format(extra_kwargs)) if 'lifecycle_state' in kwargs: lifecycle_state_allowed_values = ["ACTIVE"] if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values: raise ValueError( "Invalid value for `lifecycle_state`, must be one of {0}".format(lifecycle_state_allowed_values) ) query_params = { "compartmentId": compartment_id, "lifecycleState": kwargs.get("lifecycle_state", missing) } query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None} header_params = { "accept": "application/json", "content-type": "application/json" } retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[TagDefaultSummary]") else: return self.base_client.call_api( resource_path=resource_path, method=method, query_params=query_params, header_params=header_params, response_type="list[TagDefaultSummary]")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initDefaults(self):\n return _libsbml.Compartment_initDefaults(self)", "def initDefaults(self):\n return _libsbml.CompartmentGlyph_initDefaults(self)", "def set_defaults(self, compmgr=None):\n for section, default_options in self.defaults(compmgr).items():\n for name, value ...
[ "0.5848242", "0.5620025", "0.539464", "0.5353507", "0.53439975", "0.5028083", "0.49867433", "0.49008948", "0.48286456", "0.46795782", "0.46378452", "0.4624426", "0.46004072", "0.45709348", "0.45590082", "0.4541855", "0.44945678", "0.44693825", "0.4469151", "0.44664097", "0.44...
0.43321195
31
Moves the specified tag namespace to the specified compartment within the same tenancy. To move the tag namespace, you must have the manage tagnamespaces permission on both compartments. For more information about IAM policies, see `Details for IAM`__. Moving a tag namespace moves all the tag key definitions contained in the tag namespace.
def change_tag_namespace_compartment(self, tag_namespace_id, change_tag_namespace_compartment_detail, **kwargs): resource_path = "/tagNamespaces/{tagNamespaceId}/actions/changeCompartment" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "change_tag_namespace_compartment got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "tagNamespaceId": tag_namespace_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=change_tag_namespace_compartment_detail) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=change_tag_namespace_compartment_detail)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_namespace(self, doc, namespace):\r\n ns = u'{%s}' % namespace\r\n nsl = len(ns)\r\n for elem in doc.getiterator():\r\n if elem.tag.startswith(ns):\r\n elem.tag = elem.tag[nsl:]\r\n else:\r\n pass", "def remove_namespace(doc, name...
[ "0.55153644", "0.5485366", "0.5347061", "0.52107394", "0.52107394", "0.5199132", "0.5165057", "0.5079889", "0.5022554", "0.49942735", "0.49675435", "0.49604434", "0.49384886", "0.48304433", "0.4796919", "0.4729666", "0.46716085", "0.46684968", "0.4638175", "0.46381432", "0.46...
0.61622286
0
Creates a new auth token for the specified user. For information about what auth tokens are for, see `Managing User Credentials`__. You must specify a description for the auth token (although it can be an empty string). It does not have to be unique, and you can change it anytime with
def create_auth_token(self, create_auth_token_details, user_id, **kwargs): resource_path = "/users/{userId}/authTokens" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_auth_token got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_auth_token_details, response_type="AuthToken") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_auth_token_details, response_type="AuthToken")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _create_auth_token(self, user=None):\n token, created = Token.objects.get_or_create(user=user)\n return token", "def create_token(user, title, expiration=_default_expiration_duration_opt):\n if expiration == _default_expiration_duration_opt:\n duration = _default_expiration_duration()...
[ "0.7601047", "0.7413485", "0.7076036", "0.70279247", "0.70279247", "0.70279247", "0.6908097", "0.68845016", "0.683764", "0.68366253", "0.6822452", "0.6817627", "0.68121517", "0.6806323", "0.67799586", "0.6771342", "0.676491", "0.67617136", "0.6744228", "0.67380023", "0.668147...
0.70019704
6
Creates a new compartment in the specified compartment.
def create_compartment(self, create_compartment_details, **kwargs): resource_path = "/compartments" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_compartment got unknown kwargs: {!r}".format(extra_kwargs)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, header_params=header_params, body=create_compartment_details, response_type="Compartment") else: return self.base_client.call_api( resource_path=resource_path, method=method, header_params=header_params, body=create_compartment_details, response_type="Compartment")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def createCompartment(self):\n return _libsbml.Model_createCompartment(self)", "def addCompartment(self, vol=1, comp_id=\"\"):\n\n c1 = self.model.createCompartment()\n self.check(c1, \"create compartment\")\n if len(comp_id) == 0:\n comp_id = \"c\" + str(self.model.getNumC...
[ "0.736534", "0.7325312", "0.66521376", "0.6403803", "0.6099133", "0.60763836", "0.6029606", "0.59636647", "0.5960595", "0.59251946", "0.57609355", "0.57137877", "0.5665697", "0.563881", "0.55801606", "0.5452205", "0.54070544", "0.53563625", "0.5342132", "0.5272799", "0.522575...
0.6708804
2
Creates a new secret key for the specified user. Secret keys are used for authentication with the Object Storage Service's Amazon S3 compatible API. For information, see `Managing User Credentials`__. You must specify a description for the secret key (although it can be an empty string). It does not have to be unique, and you can change it anytime with
def create_customer_secret_key(self, create_customer_secret_key_details, user_id, **kwargs): resource_path = "/users/{userId}/customerSecretKeys" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_customer_secret_key got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_customer_secret_key_details, response_type="CustomerSecretKey") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_customer_secret_key_details, response_type="CustomerSecretKey")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_access_key(self, user_name=None):\r\n params = {'UserName' : user_name}\r\n return self.get_response('CreateAccessKey', params)", "def create_key(iam_username):\n\n try:\n response = iam.create_access_key(UserName=iam_username)\n access_key = response[\"AccessKey\"][\"Ac...
[ "0.6538113", "0.6313658", "0.6207733", "0.61237496", "0.60848606", "0.6013739", "0.6008015", "0.58083314", "0.5791427", "0.56150365", "0.5601213", "0.55981004", "0.5564056", "0.55623674", "0.5552964", "0.5467448", "0.54615223", "0.545093", "0.5446797", "0.5376637", "0.5359139...
0.62441874
2
Creates a new dynamic group in your tenancy. You must specify your tenancy's OCID as the compartment ID in the request object (remember that the tenancy is simply the root compartment). Notice that IAM resources (users, groups, compartments, and some policies) reside within the tenancy itself, unlike cloud resources such as compute instances, which typically reside within compartments inside the tenancy. For information about OCIDs, see `Resource Identifiers`__. You must also specify a name for the dynamic group, which must be unique across all dynamic groups in your tenancy, and cannot be changed. Note that this name has to be also unique across all groups in your tenancy. You can use this name or the OCID when writing policies that apply to the dynamic group. For more information about policies, see `How Policies Work`__. You must also specify a description for the dynamic group (although it can be an empty string). It does not
def create_dynamic_group(self, create_dynamic_group_details, **kwargs): resource_path = "/dynamicGroups" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_dynamic_group got unknown kwargs: {!r}".format(extra_kwargs)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, header_params=header_params, body=create_dynamic_group_details, response_type="DynamicGroup") else: return self.base_client.call_api( resource_path=resource_path, method=method, header_params=header_params, body=create_dynamic_group_details, response_type="DynamicGroup")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_group():\n groupname = request.get_json().get(\"name\")\n description = request.get_json().get(\"description\")\n grp = admin.create_group(current_app.scoped_session(), groupname, description)\n if grp:\n response = admin.get_group_info(current_app.scoped_session(), groupname)\n el...
[ "0.7186179", "0.71604204", "0.7018895", "0.679185", "0.6695333", "0.661491", "0.6580392", "0.6452021", "0.64109707", "0.64004433", "0.6369519", "0.63523793", "0.6294868", "0.628068", "0.6277388", "0.62702346", "0.6215326", "0.6186225", "0.61719334", "0.6139862", "0.61309004",...
0.71613544
1
Creates a new group in your tenancy. You must specify your tenancy's OCID as the compartment ID in the request object (remember that the tenancy is simply the root compartment). Notice that IAM resources (users, groups, compartments, and some policies) reside within the tenancy itself, unlike cloud resources such as compute instances, which typically reside within compartments inside the tenancy. For information about OCIDs, see `Resource Identifiers`__. You must also specify a name for the group, which must be unique across all groups in your tenancy and cannot be changed. You can use this name or the OCID when writing policies that apply to the group. For more information about policies, see `How Policies Work`__. You must also specify a description for the group (although it can be an empty string). It does not
def create_group(self, create_group_details, **kwargs): resource_path = "/groups" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_group got unknown kwargs: {!r}".format(extra_kwargs)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, header_params=header_params, body=create_group_details, response_type="Group") else: return self.base_client.call_api( resource_path=resource_path, method=method, header_params=header_params, body=create_group_details, response_type="Group")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_group(group_id, group_name):\n\n kwargs = config.DEFAULT_REST_KWARGS\n kwargs[\"data\"] = {\"id\": group_id, \"name\": group_name}\n http_response = call_rest_api(\"/identities/groups/\", \"post\", **kwargs)\n if http_response.status_code != 201: # 201 = 'new group created'\n raise V...
[ "0.7795205", "0.76985514", "0.75594074", "0.7485939", "0.7361224", "0.7275385", "0.7226914", "0.7171012", "0.7022135", "0.7012331", "0.70026654", "0.6958713", "0.6953057", "0.6946662", "0.6930038", "0.68761617", "0.68680024", "0.6829689", "0.6791976", "0.67673075", "0.6745819...
0.6698751
23
Creates a new identity provider in your tenancy. For more information, see `Identity Providers and Federation`__. You must specify your tenancy's OCID as the compartment ID in the request object. Remember that the tenancy is simply the root compartment. For information about OCIDs, see `Resource Identifiers`__. You must also specify a name for the `IdentityProvider`, which must be unique across all `IdentityProvider` objects in your tenancy and cannot be changed. You must also specify a description for the `IdentityProvider` (although it can be an empty string). It does not have to be unique, and you can change it anytime with
def create_identity_provider(self, create_identity_provider_details, **kwargs): resource_path = "/identityProviders" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_identity_provider got unknown kwargs: {!r}".format(extra_kwargs)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, header_params=header_params, body=create_identity_provider_details, response_type="IdentityProvider") else: return self.base_client.call_api( resource_path=resource_path, method=method, header_params=header_params, body=create_identity_provider_details, response_type="IdentityProvider")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_identity_provider(module, sdk, cloud, name):\n\n if module.check_mode:\n return True, None\n\n description = module.params.get('description')\n enabled = module.params.get('enabled')\n domain_id = module.params.get('domain_id')\n remote_ids = module.params.get('remote_ids')\n\n ...
[ "0.6144289", "0.5990092", "0.5924737", "0.5494253", "0.5485202", "0.53507775", "0.53166866", "0.51905113", "0.5181825", "0.51246256", "0.5108798", "0.51040316", "0.5087071", "0.50794953", "0.50547856", "0.50211", "0.50116605", "0.5002519", "0.4999784", "0.49900097", "0.497556...
0.61856276
0
Creates a single mapping between an IdP group and an IAM Service
def create_idp_group_mapping(self, create_idp_group_mapping_details, identity_provider_id, **kwargs): resource_path = "/identityProviders/{identityProviderId}/groupMappings" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_idp_group_mapping got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "identityProviderId": identity_provider_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_idp_group_mapping_details, response_type="IdpGroupMapping") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_idp_group_mapping_details, response_type="IdpGroupMapping")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_api_mapping(self, ApiId: str, DomainName: str, Stage: str, ApiMappingKey: str = None) -> Dict:\n pass", "def get_service_mapping():\r\n # Get all Service types:\r\n all_service_type = requests.get(base_url + 'services/v2/service_types', headers=headers3).json()\r\n # Make Dict of servi...
[ "0.5638617", "0.535589", "0.52390397", "0.5175165", "0.5099908", "0.50906384", "0.5057816", "0.5040309", "0.50400156", "0.5031047", "0.5025609", "0.49890697", "0.49736708", "0.49677986", "0.49640617", "0.49320868", "0.49188626", "0.4918003", "0.4915699", "0.48859853", "0.4866...
0.5351723
2
Creates a new MFA TOTP device for the user. A user can have one MFA TOTP device.
def create_mfa_totp_device(self, user_id, **kwargs): resource_path = "/users/{userId}/mfaTotpDevices" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_mfa_totp_device got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="MfaTotpDevice") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="MfaTotpDevice")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def activate_mfa_totp_device(self, user_id, mfa_totp_device_id, mfa_totp_token, **kwargs):\n resource_path = \"/users/{userId}/mfaTotpDevices/{mfaTotpDeviceId}/actions/activate\"\n method = \"POST\"\n\n # Don't accept unknown kwargs\n expected_kwargs = [\n \"retry_strategy\",...
[ "0.5845079", "0.58070564", "0.57539624", "0.56343126", "0.5609716", "0.55635047", "0.5540228", "0.5524604", "0.552029", "0.552029", "0.552029", "0.5508349", "0.5505848", "0.5504013", "0.54644525", "0.5441662", "0.53983897", "0.53840804", "0.5339161", "0.5339142", "0.5335467",...
0.7277988
0
Creates a new network source in your tenancy. You must specify your tenancy's OCID as the compartment ID in the request object (remember that the tenancy is simply the root compartment). Notice that IAM resources (users, groups, compartments, and some policies) reside within the tenancy itself, unlike cloud resources such as compute instances, which typically reside within compartments inside the tenancy. For information about OCIDs, see `Resource Identifiers`__. You must also specify a name for the network source, which must be unique across all network sources in your tenancy, and cannot be changed. You can use this name or the OCID when writing policies that apply to the network source. For more information about policies, see `How Policies Work`__. You must also specify a description for the network source (although it can be an empty string). It does not
def create_network_source(self, create_network_source_details, **kwargs): resource_path = "/networkSources" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_network_source got unknown kwargs: {!r}".format(extra_kwargs)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, header_params=header_params, body=create_network_source_details, response_type="NetworkSources") else: return self.base_client.call_api( resource_path=resource_path, method=method, header_params=header_params, body=create_network_source_details, response_type="NetworkSources")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def network_create(request, **kwargs):\n LOG.debug(\"network_create(): kwargs = %s\", kwargs)\n if 'tenant_id' not in kwargs:\n kwargs['tenant_id'] = request.user.project_id\n body = {'network': kwargs}\n network = neutronclient(request).create_network(body=body).get('network')\n return Netwo...
[ "0.65968263", "0.64328766", "0.63080657", "0.60401773", "0.585405", "0.5773544", "0.5750441", "0.5715569", "0.569809", "0.55888134", "0.5588614", "0.5553594", "0.54480547", "0.5373371", "0.5347243", "0.53328407", "0.5326178", "0.53198993", "0.5311947", "0.52965164", "0.529295...
0.6830833
0
Creates Oauth token for the user
def create_o_auth_client_credential(self, user_id, create_o_auth2_client_credential_details, **kwargs): resource_path = "/users/{userId}/oauth2ClientCredentials" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_o_auth_client_credential got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_o_auth2_client_credential_details, response_type="OAuth2ClientCredential") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_o_auth2_client_credential_details, response_type="OAuth2ClientCredential")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _create_auth_token(self, user=None):\n token, created = Token.objects.get_or_create(user=user)\n return token", "def token_auth(self):\n self.client = APIClient()\n self.user = User.objects.create_user(username='testuser', email='test@test.com', password='testpassword')\n s...
[ "0.7509521", "0.7463173", "0.7187646", "0.71259236", "0.70469594", "0.7042824", "0.70313114", "0.6959093", "0.6959093", "0.6959093", "0.6947535", "0.6934357", "0.6917978", "0.6896815", "0.68936986", "0.6844643", "0.68159", "0.6812227", "0.6790298", "0.67814326", "0.675172", ...
0.0
-1
Creates a new Console onetime password for the specified user. For more information about user credentials, see `User Credentials`__. Use this operation after creating a new user, or if a user forgets their password. The new onetime password is returned to you in the response, and you must securely deliver it to the user. They'll be prompted to change this password the next time they sign in to the Console. If they don't change it within 7 days, the password will expire and you'll need to create a new onetime password for the user.
def create_or_reset_ui_password(self, user_id, **kwargs): resource_path = "/users/{userId}/uiPassword" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_or_reset_ui_password got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="UIPassword") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, response_type="UIPassword")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create(self, user):\r\n\r\n if not (PasswordHistory.is_student_password_reuse_restricted() or\r\n PasswordHistory.is_staff_password_reuse_restricted() or\r\n PasswordHistory.is_password_reset_frequency_restricted() or\r\n PasswordHistory.is_staff_forced_passw...
[ "0.66686237", "0.66496706", "0.6624422", "0.64673924", "0.6430282", "0.6305761", "0.62900466", "0.6217141", "0.6100756", "0.6072967", "0.60237443", "0.6004952", "0.5996487", "0.5996053", "0.5969086", "0.5958626", "0.5951579", "0.59395665", "0.5936133", "0.59078705", "0.587925...
0.54996353
70
Creates a new policy in the specified compartment (either the tenancy or another of your compartments). If you're new to policies, see `Getting Started with Policies`__. You must specify a name for the policy, which must be unique across all policies in your tenancy and cannot be changed. You must also specify a description for the policy (although it can be an empty string). It does not
def create_policy(self, create_policy_details, **kwargs): resource_path = "/policies" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_policy got unknown kwargs: {!r}".format(extra_kwargs)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, header_params=header_params, body=create_policy_details, response_type="Policy") else: return self.base_client.call_api( resource_path=resource_path, method=method, header_params=header_params, body=create_policy_details, response_type="Policy")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add(self, policy_name, data):\n path = self.vault.normalize(\"/sys/policies/acl/\" + policy_name)\n address = self.vault.vault_adress + \"/v1\" + path\n logging.info(\"Adding the policy: %s\", address)\n payload = json.dumps({\"policy\": data})\n response = self.vault.request...
[ "0.69018525", "0.6894368", "0.65139604", "0.6491852", "0.6477794", "0.63088816", "0.6284742", "0.62571865", "0.62571806", "0.6177285", "0.6173109", "0.6170149", "0.6067969", "0.60238564", "0.59549844", "0.5883088", "0.58814156", "0.58473706", "0.58184075", "0.5796063", "0.577...
0.600187
14
Creates a subscription to a region for a tenancy.
def create_region_subscription(self, create_region_subscription_details, tenancy_id, **kwargs): resource_path = "/tenancies/{tenancyId}/regionSubscriptions" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_region_subscription got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "tenancyId": tenancy_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_region_subscription_details, response_type="RegionSubscription") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_region_subscription_details, response_type="RegionSubscription")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_subscription(self,\n body):\n\n return super().new_api_call_builder.request(\n RequestBuilder().server('default')\n .path('/v2/subscriptions')\n .http_method(HttpMethodEnum.POST)\n .header_param(Parameter()\n ...
[ "0.61023337", "0.6050946", "0.60171896", "0.59941614", "0.5989991", "0.5985926", "0.59581876", "0.59402776", "0.5903215", "0.5853864", "0.5773465", "0.57645184", "0.56852347", "0.5659827", "0.56107926", "0.56008047", "0.55303335", "0.55104107", "0.5472032", "0.5425874", "0.54...
0.74824893
0
Creates a new SMTP credential for the specified user. An SMTP credential has an SMTP user name and an SMTP password. You must specify a description for the SMTP credential (although it can be an empty string). It does not have to be unique, and you can change it anytime with
def create_smtp_credential(self, create_smtp_credential_details, user_id, **kwargs): resource_path = "/users/{userId}/smtpCredentials" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_smtp_credential got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_smtp_credential_details, response_type="SmtpCredential") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_smtp_credential_details, response_type="SmtpCredential")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_new_credential(account,userName,password):\n new_credential = Credentials(account,userName,password)\n return new_credential", "def CreateNewSmtpUser(s):\n payload = ['adduser %s %s\\n' % (FLAGS.exploit_user, FLAGS.exploit_password),\n 'quit\\n']\n SendPayload(s, payload)\n ...
[ "0.6836732", "0.6782684", "0.66625166", "0.6148272", "0.60939145", "0.59752667", "0.59635276", "0.59110373", "0.5898103", "0.58977437", "0.5887567", "0.58812404", "0.5815839", "0.5802219", "0.57664925", "0.57609296", "0.57314557", "0.57081306", "0.56840855", "0.56605315", "0....
0.72970504
0
Creates a new tag in the specified tag namespace. The tag requires either the OCID or the name of the tag namespace that will contain this tag definition. You must specify a name for the tag, which must be unique across all tags in the tag namespace and cannot be changed. The name can contain any ASCII character except the space (_) or period (.) characters. Names are case insensitive. That means, for example, \"myTag\" and \"mytag\" are not allowed in the same namespace. If you specify a name that's already in use in the tag namespace, a 409 error is returned. The tag must have a description. It does not have to be unique, and you can change it with
def create_tag(self, tag_namespace_id, create_tag_details, **kwargs): resource_path = "/tagNamespaces/{tagNamespaceId}/tags" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_tag got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "tagNamespaceId": tag_namespace_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_tag_details, response_type="Tag") else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params, body=create_tag_details, response_type="Tag")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create(self, name, tag):\n\n\t\turl_json = urllib.urlencode({\"name\": name, \"tag\": tag})\n\t\treturn self._create(\"/tag?json_hash=%s\" % url_json, \"tag\")", "def create_tag(self, session, tags):\n self._tag(session.put, tags=tags, session=session)", "def create_tag(self, entry_name, tag):\n ...
[ "0.668044", "0.64789695", "0.6425235", "0.63370335", "0.62885815", "0.6265026", "0.6156292", "0.61304843", "0.5985835", "0.5902972", "0.5860836", "0.5857317", "0.58103025", "0.5736905", "0.57278633", "0.571501", "0.5682197", "0.56273866", "0.5566556", "0.5561892", "0.5553828"...
0.7148086
0
Creates a new tag default in the specified compartment for the specified tag definition. If you specify that a value is required, a value is set during resource creation (either by the user creating the resource or another tag defualt). If no value is set, resource creation is blocked. If the `isRequired` flag is set to \"true\", the value is set during resource creation. If the `isRequired` flag is set to \"false\", the value you enter is set during resource creation.
def create_tag_default(self, create_tag_default_details, **kwargs): resource_path = "/tagDefaults" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token", "opc_request_id" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_tag_default got unknown kwargs: {!r}".format(extra_kwargs)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing), "opc-request-id": kwargs.get("opc_request_id", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, header_params=header_params, body=create_tag_default_details, response_type="TagDefault") else: return self.base_client.call_api( resource_path=resource_path, method=method, header_params=header_params, body=create_tag_default_details, response_type="TagDefault")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def Option(name: str, value: Union[str, int], default: Optional[bool] = None) -> Dict:\n doc = {'name': name, 'value': value}\n if default is not None:\n doc['isDefault'] = default\n return doc", "def register_option_pair(key, default_value):\n\n _OPTION_TEMPLATE[key] = default_value", "def ...
[ "0.5894807", "0.55696887", "0.548009", "0.52427024", "0.5172015", "0.5163364", "0.5069004", "0.50466603", "0.50242513", "0.5003325", "0.4976948", "0.49649096", "0.49576932", "0.49290386", "0.4914476", "0.49040148", "0.49038035", "0.48990166", "0.48935652", "0.48932627", "0.48...
0.64231205
0
Creates a new tag namespace in the specified compartment. You must specify the compartment ID in the request object (remember that the tenancy is simply the root compartment). You must also specify a name for the namespace, which must be unique across all namespaces in your tenancy and cannot be changed. The name can contain any ASCII character except the space (_) or period (.). Names are case insensitive. That means, for example, \"myNamespace\" and \"mynamespace\" are not allowed in the same tenancy. Once you created a namespace, you cannot change the name. If you specify a name that's already in use in the tenancy, a 409 error is returned. You must also specify a description for the namespace. It does not have to be unique, and you can change it with
def create_tag_namespace(self, create_tag_namespace_details, **kwargs): resource_path = "/tagNamespaces" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_tag_namespace got unknown kwargs: {!r}".format(extra_kwargs)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, header_params=header_params, body=create_tag_namespace_details, response_type="TagNamespace") else: return self.base_client.call_api( resource_path=resource_path, method=method, header_params=header_params, body=create_tag_namespace_details, response_type="TagNamespace")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def post_namespace_create(self, resource_dict):\n pass", "def create_namespace(node, namespace, delete_before_create=True):\n if delete_before_create:\n Namespaces.delete_namespace(node, namespace)\n\n cmd = f\"ip netns add {namespace}\"\n exec_cmd_no_error(node, cmd, sudo=...
[ "0.6208053", "0.61728823", "0.61027914", "0.61002004", "0.59118104", "0.5871321", "0.58043087", "0.5771377", "0.5746048", "0.5731766", "0.5726167", "0.57059276", "0.5678216", "0.55117524", "0.54968196", "0.54622465", "0.54276574", "0.5421414", "0.54114", "0.5391365", "0.53783...
0.656771
0
Creates a new user in your tenancy. For conceptual information about users, your tenancy, and other IAM Service components, see `Overview of the IAM Service`__. You must specify your tenancy's OCID as the compartment ID in the request object (remember that the tenancy is simply the root compartment). Notice that IAM resources (users, groups, compartments, and some policies) reside within the tenancy itself, unlike cloud resources such as compute instances, which typically reside within compartments inside the tenancy. For information about OCIDs, see `Resource Identifiers`__. You must also specify a name for the user, which must be unique across all users in your tenancy
def create_user(self, create_user_details, **kwargs): resource_path = "/users" method = "POST" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "opc_retry_token" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "create_user got unknown kwargs: {!r}".format(extra_kwargs)) header_params = { "accept": "application/json", "content-type": "application/json", "opc-retry-token": kwargs.get("opc_retry_token", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: if not isinstance(retry_strategy, retry.NoneRetryStrategy): self.base_client.add_opc_retry_token_if_needed(header_params) return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, header_params=header_params, body=create_user_details, response_type="User") else: return self.base_client.call_api( resource_path=resource_path, method=method, header_params=header_params, body=create_user_details, response_type="User")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create():\n api_request = apireq.APIRequest(request, 'client_schema')\n if api_request.is_invalid():\n return api_request.error_text, 400\n return user_management.create_user(api_json['username'])", "def create_user(self, **kwargs):\n\n user = self.user_model(**self._prepare_create_use...
[ "0.77350646", "0.7719065", "0.7699377", "0.7653624", "0.76405936", "0.7613418", "0.7483702", "0.7418345", "0.7319665", "0.73193854", "0.7315282", "0.7287517", "0.7279548", "0.7276206", "0.72723275", "0.72554487", "0.724378", "0.72345245", "0.718245", "0.71740806", "0.71527773...
0.0
-1
Deletes the specified API signing key for the specified user. Every user has permission to use this operation to delete a key for their own user ID. An administrator in your organization does not need to write a policy to give users this ability. To compare, administrators who have permission to the tenancy can use this operation to delete a key for any user, including themselves.
def delete_api_key(self, user_id, fingerprint, **kwargs): resource_path = "/users/{userId}/apiKeys/{fingerprint}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_api_key got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id, "fingerprint": fingerprint } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_ssh_key(self, user_id, key_id):\n\n _gu = self.get_user(user_id)\n if _gu is None:\n return None\n\n # build URL and make request\n return self._delete('/users/{0}/keys/{1}'.format(_gu['id'], key_id))", "def Delete(self, user, key):\n return self.Remove(us...
[ "0.70384455", "0.6984856", "0.69313693", "0.66379356", "0.663607", "0.65839124", "0.6564669", "0.656256", "0.6548626", "0.65405446", "0.6510242", "0.64997286", "0.6473384", "0.646185", "0.64603364", "0.6452552", "0.64474607", "0.6435671", "0.6400676", "0.63887805", "0.6383552...
0.66472024
3
Deletes the specified auth token for the specified user.
def delete_auth_token(self, user_id, auth_token_id, **kwargs): resource_path = "/users/{userId}/authTokens/{authTokenId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_auth_token got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id, "authTokenId": auth_token_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete(self):\n\n user_id = get_jwt_identity()\n user = user_crud.get(user_id)\n if not user:\n abort(404, message=\"User not Found\")\n all_tokens = auth_crud.get_user_tokens(user_id)\n tokens = [token.to_dict() for token in all_tokens]\n for token in token...
[ "0.7637971", "0.7298944", "0.71500826", "0.71234906", "0.71165484", "0.71092767", "0.69556636", "0.6910611", "0.6849392", "0.68058825", "0.6793231", "0.6755561", "0.6754695", "0.6745995", "0.6745789", "0.6745456", "0.67416966", "0.66653407", "0.6662869", "0.6655789", "0.66440...
0.6980072
6
Deletes the specified compartment. The compartment must be empty.
def delete_compartment(self, compartment_id, **kwargs): resource_path = "/compartments/{compartmentId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_compartment got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "compartmentId": compartment_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def removeCompartment(self, *args):\n return _libsbml.Model_removeCompartment(self, *args)", "def removeCompartmentReference(self, *args):\n return _libsbml.MultiCompartmentPlugin_removeCompartmentReference(self, *args)", "def delcomponent(self,\n context=[],\n componentid=N...
[ "0.6996225", "0.5918634", "0.5823131", "0.573744", "0.57213587", "0.571788", "0.5709205", "0.56610256", "0.56202364", "0.55895793", "0.5515105", "0.5480321", "0.5393466", "0.5318188", "0.5316087", "0.53012276", "0.5280208", "0.527711", "0.527472", "0.52587646", "0.5253273", ...
0.6579754
1
Deletes the specified secret key for the specified user.
def delete_customer_secret_key(self, user_id, customer_secret_key_id, **kwargs): resource_path = "/users/{userId}/customerSecretKeys/{customerSecretKeyId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_customer_secret_key got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id, "customerSecretKeyId": customer_secret_key_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_ssh_key(self, user_id, key_id):\n\n _gu = self.get_user(user_id)\n if _gu is None:\n return None\n\n # build URL and make request\n return self._delete('/users/{0}/keys/{1}'.format(_gu['id'], key_id))", "def Delete(self, user, key):\n return self.Remove(us...
[ "0.7391871", "0.73518765", "0.6827607", "0.6825235", "0.67515635", "0.6722836", "0.6718346", "0.66737527", "0.66446835", "0.6612921", "0.6582571", "0.6504976", "0.6503876", "0.64849615", "0.6463512", "0.64591646", "0.6441352", "0.64239824", "0.6421343", "0.64208347", "0.64044...
0.6096357
45
Deletes the specified dynamic group.
def delete_dynamic_group(self, dynamic_group_id, **kwargs): resource_path = "/dynamicGroups/{dynamicGroupId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_dynamic_group got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "dynamicGroupId": dynamic_group_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_group(self, group):\n raise NotImplementedError('delete_group')", "def deleteGroup(groupName):\r\n Group.deleteGroup(groupName)", "def delete_group(\n group_id: BSONObjectId,\n tkn: Token = Depends(from_authotization_header_nondyn),\n):\n assert_has_clearance(tkn.owner, \"sni....
[ "0.7652342", "0.7170237", "0.71552515", "0.715084", "0.71241444", "0.71064895", "0.7096797", "0.7081772", "0.7047306", "0.7031457", "0.70275164", "0.7024975", "0.6973406", "0.6965726", "0.6895656", "0.6874776", "0.68428373", "0.6803785", "0.6794903", "0.6792931", "0.67883205"...
0.71124274
5
Deletes the specified group. The group must be empty.
def delete_group(self, group_id, **kwargs): resource_path = "/groups/{groupId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_group got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "groupId": group_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_group(self, group):\n raise NotImplementedError('delete_group')", "def delete_group(self, group_name):\r\n params = {'GroupName' : group_name}\r\n return self.get_response('DeleteGroup', params)", "def delete_group(self, group_id: str):\n # If successful, this method ret...
[ "0.8661287", "0.8429421", "0.8341084", "0.83324933", "0.81745154", "0.80304027", "0.8005047", "0.7919353", "0.79183817", "0.7814903", "0.7808236", "0.7775581", "0.77595615", "0.77590305", "0.773771", "0.7719856", "0.75224054", "0.75213486", "0.7515984", "0.74853754", "0.74750...
0.68005127
46
Deletes the specified identity provider. The identity provider must not have
def delete_identity_provider(self, identity_provider_id, **kwargs): resource_path = "/identityProviders/{identityProviderId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_identity_provider got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "identityProviderId": identity_provider_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_identity_provider(module, sdk, cloud, idp):\n\n if idp is None:\n return False\n\n if module.check_mode:\n return True\n\n try:\n cloud.identity.delete_identity_provider(idp)\n except sdk.exceptions.OpenStackCloudException as ex:\n module.fail_json(msg='Failed to ...
[ "0.76473016", "0.6944449", "0.6763664", "0.6758632", "0.67521816", "0.67521816", "0.66426307", "0.6528002", "0.63240004", "0.62183905", "0.6177831", "0.6141892", "0.60588896", "0.59532607", "0.58217615", "0.55912703", "0.55306107", "0.5515687", "0.54976976", "0.53798723", "0....
0.6678267
6
Deletes the specified group mapping.
def delete_idp_group_mapping(self, identity_provider_id, mapping_id, **kwargs): resource_path = "/identityProviders/{identityProviderId}/groupMappings/{mappingId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_idp_group_mapping got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "identityProviderId": identity_provider_id, "mappingId": mapping_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_group(self, group):\n raise NotImplementedError('delete_group')", "def deleteGroup(groupName):\r\n Group.deleteGroup(groupName)", "def delete_group(gid):\n if request.method == 'POST':\n hl.deleteGroup(gid)\n return redirect('/users')", "def do_del_group(dbsync, grou...
[ "0.7407182", "0.6943236", "0.681764", "0.6814603", "0.66882336", "0.665547", "0.66528887", "0.6552498", "0.6507317", "0.65058494", "0.6426595", "0.64135116", "0.64025885", "0.6398474", "0.6387597", "0.63239", "0.62887627", "0.6258294", "0.62549347", "0.6252067", "0.62451065",...
0.5965616
36
Deletes the specified MFA TOTP device for the specified user.
def delete_mfa_totp_device(self, user_id, mfa_totp_device_id, **kwargs): resource_path = "/users/{userId}/mfaTotpDevices/{mfaTotpDeviceId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_mfa_totp_device got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id, "mfaTotpDeviceId": mfa_totp_device_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_user(self, user):\n self.delete(user)", "def delete_user(self, user):\n # noinspection PyUnresolvedReferences\n self.delete(user)", "def delete_user(self, user):\n try:\n with dbm.open(self.dbm_path, 'c', 0o600) as db:\n del db[user.name]\n ...
[ "0.7167678", "0.69209623", "0.6781475", "0.6747502", "0.67389566", "0.66825885", "0.66802096", "0.66421336", "0.66130745", "0.6571461", "0.64496547", "0.64242226", "0.63735026", "0.63403934", "0.6330194", "0.63215464", "0.63176155", "0.63172036", "0.63106817", "0.62828964", "...
0.69395584
1
Deletes the specified network source
def delete_network_source(self, network_source_id, **kwargs): resource_path = "/networkSources/{networkSourceId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_network_source got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "networkSourceId": network_source_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete(self, source):\n _source = self._source_prefix+source\n assert _source in self.cache.keys()\n del self.cache[_source]", "def delete_source(self, src_name: SourceName) -> None:\n while True:\n try:\n response = self.genes.query(\n ...
[ "0.69649154", "0.67236555", "0.67101824", "0.6658213", "0.66125065", "0.64707583", "0.6187282", "0.6125395", "0.6124346", "0.6093754", "0.6084731", "0.60805976", "0.6080252", "0.6072282", "0.6040917", "0.6035688", "0.6021448", "0.60198027", "0.60127157", "0.6012108", "0.59954...
0.69568324
1
Delete Oauth token for the user
def delete_o_auth_client_credential(self, user_id, oauth2_client_credential_id, **kwargs): resource_path = "/users/{userId}/oauth2ClientCredentials/{oauth2ClientCredentialId}" method = "DELETE" # Don't accept unknown kwargs expected_kwargs = [ "retry_strategy", "if_match" ] extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs] if extra_kwargs: raise ValueError( "delete_o_auth_client_credential got unknown kwargs: {!r}".format(extra_kwargs)) path_params = { "userId": user_id, "oauth2ClientCredentialId": oauth2_client_credential_id } path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing} for (k, v) in six.iteritems(path_params): if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0): raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k)) header_params = { "accept": "application/json", "content-type": "application/json", "if-match": kwargs.get("if_match", missing) } header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None} retry_strategy = self.retry_strategy if kwargs.get('retry_strategy'): retry_strategy = kwargs.get('retry_strategy') if retry_strategy: return retry_strategy.make_retrying_call( self.base_client.call_api, resource_path=resource_path, method=method, path_params=path_params, header_params=header_params) else: return self.base_client.call_api( resource_path=resource_path, method=method, path_params=path_params, header_params=header_params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_auth_token():\n data = get_request_data(request)\n address = data.get(\"address\")\n token = data.get(\"token\")\n\n valid, message = is_token_valid(token, address)\n if not valid:\n return jsonify(error=message), 400\n\n force_expire_token(token)\n\n return jsonify(success=\...
[ "0.80231464", "0.77013326", "0.75746226", "0.75228196", "0.75083303", "0.7371194", "0.7269982", "0.7242659", "0.7240688", "0.7185005", "0.69856465", "0.6977537", "0.6966745", "0.6926778", "0.6911425", "0.690824", "0.6878121", "0.68549204", "0.6834548", "0.6783257", "0.6767800...
0.0
-1