query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Setter for the number at tile position pos
def set_number(self, row, col, value): self._grid[row][col] = value
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_number(self, row, col, value):\r\n self._grid[row][col] = value", "def set_number(self, row, col, value):\r\n self._grid[row][col] = value", "def set_number(self, row, col, value):\r\n self._grid[row][col] = value", "def set_tile(self, row, col, value):\n # replace with yo...
[ "0.7453924", "0.7453924", "0.7453924", "0.69439274", "0.690476", "0.690476", "0.6867873", "0.6846585", "0.6842772", "0.68179584", "0.67959946", "0.67660105", "0.67594844", "0.67594844", "0.66707844", "0.66694415", "0.66609603", "0.66590446", "0.66233623", "0.6552477", "0.6444...
0.74432904
6
Make a copy of the puzzle to update during solving Returns a Puzzle object
def clone(self): new_puzzle = Puzzle(self._height, self._width, self._grid) return new_puzzle
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solve(self):\n new_puzzle = self._puzzle.clone()\n self._solution = new_puzzle.solve_puzzle()\n del new_puzzle\n pass", "def clone(self):\r\n new_puzzle = Puzzle(self._height, self._width, self._grid)\r\n return new_puzzle", "def clone(self):\r\n new_puzzle ...
[ "0.8077919", "0.7812692", "0.7812692", "0.7812692", "0.7204438", "0.70470625", "0.68246186", "0.6803633", "0.67532885", "0.6707239", "0.6599824", "0.65672135", "0.65080994", "0.65023094", "0.64462394", "0.6381703", "0.63737535", "0.62523", "0.6221241", "0.62164736", "0.619480...
0.7704075
7
Locate the current position of the tile that will be at position (solved_row, solved_col) when the puzzle is solved Returns a tuple of two integers
def current_position(self, solved_row, solved_col): solved_value = (solved_col + self._width * solved_row) for row in range(self._height): for col in range(self._width): if self._grid[row][col] == solved_value: return (row, col) assert False, "Value " + str(solved_value) + " not found"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def current_position(self, solved_row, solved_col):\r\n solved_value = (solved_col + self._width * solved_row)\r\n\r\n for row in range(self._height):\r\n for col in range(self._width):\r\n if self._grid[row][col] == solved_value:\r\n return (row, col)\r\n...
[ "0.84899926", "0.84899926", "0.84899926", "0.7324289", "0.707103", "0.69124496", "0.6876786", "0.68116784", "0.67074156", "0.6705732", "0.6690645", "0.66753584", "0.6621211", "0.66135", "0.66128737", "0.66079515", "0.6600574", "0.65944666", "0.65453476", "0.65348464", "0.6522...
0.8510771
3
Updates the puzzle state based on the provided move string
def update_puzzle(self, move_string): zero_row, zero_col = self.current_position(0, 0) for direction in move_string: if direction == "l": assert zero_col > 0, "move off grid: " + direction self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col - 1] self._grid[zero_row][zero_col - 1] = 0 zero_col -= 1 elif direction == "r": assert zero_col < self._width - 1, "move off grid: " + direction self._grid[zero_row][zero_col] = self._grid[zero_row][zero_col + 1] self._grid[zero_row][zero_col + 1] = 0 zero_col += 1 elif direction == "u": assert zero_row > 0, "move off grid: " + direction self._grid[zero_row][zero_col] = self._grid[zero_row - 1][zero_col] self._grid[zero_row - 1][zero_col] = 0 zero_row -= 1 elif direction == "d": assert zero_row < self._height - 1, "move off grid: " + direction self._grid[zero_row][zero_col] = self._grid[zero_row + 1][zero_col] self._grid[zero_row + 1][zero_col] = 0 zero_row += 1 else: assert False, "invalid direction: " + direction
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_puzzle(self, move_string):\r\n zero_row, zero_col = self.current_position(0, 0)\r\n for direction in move_string:\r\n if direction == \"l\":\r\n assert zero_col > 0, \"move off grid: \" + direction\r\n self._grid[zero_row][zero_col] = self._grid[zer...
[ "0.8120185", "0.8120185", "0.8120185", "0.8101825", "0.8077781", "0.7200039", "0.70454454", "0.6661715", "0.6583404", "0.6582128", "0.65785265", "0.65703756", "0.6560121", "0.6289467", "0.6268087", "0.61335135", "0.61134064", "0.60638404", "0.605805", "0.6051816", "0.60436565...
0.81526875
1
Check whether the puzzle satisfies the specified invariant at the given position in the bottom rows of the puzzle (target_row > 1) Returns a boolean
def lower_row_invariant(self, target_row, target_col): assert target_row > 1, 'target_row invalid' result = True if self._grid[target_row][target_col] != 0: result = False for row in range(target_row+1, self._height): for col in range(self._width): solved_value = (col + self._width * row) if solved_value != self._grid[row][col]: result = False for col in range(target_col+1, self._width): solved_value = (col + self._width * target_row) if solved_value != self._grid[target_row][col]: result = False return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def row1_invariant(self, target_col):\r\n # replace with your code\r\n conditions = 0\r\n current = self._grid[1][target_col] == 0\r\n if current:\r\n conditions +=1\r\n else:\r\n # print 'Tile ZERO is not at (0, %s) position' %(target_col)\r\n re...
[ "0.71912694", "0.7152272", "0.70571184", "0.70388037", "0.70372164", "0.6987461", "0.6711312", "0.67097217", "0.6679228", "0.6654584", "0.66055614", "0.6557503", "0.6554002", "0.65482295", "0.64755046", "0.6440227", "0.6397061", "0.6329067", "0.6262952", "0.62180895", "0.6097...
0.6862375
6
helper function for solve_interior_tile and solve_col0_tile
def position_tile(self, target_row, target_col, cur_row, cur_col, need_ld=True): move_str = '' if cur_row == target_row: if cur_col < target_col: move_str += 'l' * (target_col - cur_col) if target_col - cur_col > 1: move_str += 'ur' move_str += 'druldru' * (target_col - cur_col - 1) else: move_str += 'ur' if not need_ld else '' need_ld = False else: move_str += 'r' * (cur_col - target_col) if cur_col - target_col > 1: move_str += 'ul' move_str += 'dlurdlu' * (cur_col - target_col - 1) else: need_ld = False else: move_str += 'u' * (target_row - cur_row) if cur_col < target_col: move_str += ('l' * (target_col - cur_col) + 'dru') move_str += 'druldru' * (target_col - cur_col - 1) move_str += 'lddru' * (target_row - cur_row - 1) elif cur_col > target_col: move_str += ('r' * (cur_col - target_col) + 'dlu') move_str += 'dlurdlu' * (cur_col - target_col - 1) move_str += 'lddru' * (target_row - cur_row - 1) else: move_str += 'lddru' * (target_row - cur_row - 1) if need_ld: move_str += 'ld' return move_str
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solve_interior_tile(self, target_row, target_col):\r\n whole_move = ''\r\n # replace with your code\r\n if self._grid[target_row][target_col] != 0:\r\n # print \"DEBUG CASE WHEN ZERO IN JOPA\"\r\n \r\n # print self\r\n # print 'Solwing tile', sel...
[ "0.6987945", "0.69417804", "0.65950215", "0.657609", "0.64476514", "0.6431955", "0.6417518", "0.6369899", "0.6368723", "0.6354411", "0.6283423", "0.6279517", "0.625173", "0.61543477", "0.61364645", "0.612347", "0.606173", "0.60402596", "0.6014027", "0.6010626", "0.6002269", ...
0.0
-1
Place correct tile at target position Updates puzzle and returns a move string
def solve_interior_tile(self, target_row, target_col): cur_row, cur_col = self.current_position(target_row, target_col) move_str = self.position_tile(target_row, target_col, cur_row, cur_col) self.update_puzzle(move_str) return move_str
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def move_tile(self, target_row, target_col, val):\n # a little bit twisted here for the use of both solve_interior_tile and solve_col0_tile\n solved_row, solved_col = self.current_position(0, val)\n movements = \"\"\n if solved_row == target_row and solved_col == target_col:\n ...
[ "0.74114114", "0.7296464", "0.7176235", "0.7139627", "0.7137439", "0.7097358", "0.7088615", "0.7081109", "0.7047905", "0.703492", "0.69906497", "0.6971217", "0.6968847", "0.6915314", "0.6911569", "0.68654275", "0.6853786", "0.68512785", "0.68391025", "0.6836436", "0.68200254"...
0.69137377
14
Solve tile in column zero on specified row (> 1) Updates puzzle and returns a move string
def solve_col0_tile(self, target_row): move_str = 'ur' self.update_puzzle(move_str) cur_row, cur_col = self.current_position(target_row, 0) if cur_row == target_row and cur_col == 0: move_str += 'r' * (self._width - 2) else: move_str += self.position_tile(target_row-1, 1, cur_row, cur_col) move_str += 'ruldrdlurdluurddlur' move_str += 'r' * (self._width - 2) self.update_puzzle(move_str[2:]) return move_str
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solve_puzzle(self):\n\n move_str = \"\"\n \n # Move zero tile to bottom right corner tile of puzzle.\n zero_pos = self.current_position(0,0) \n vert_dist = (self.get_height() - 1) - zero_pos[0]\n horiz_dist = (self.get_width() - 1) - zero_pos[1]\n...
[ "0.80090946", "0.8002715", "0.79543203", "0.7940283", "0.7870043", "0.7847743", "0.7818097", "0.77486104", "0.774018", "0.7731879", "0.77268267", "0.7707487", "0.77027786", "0.76879483", "0.7634697", "0.76258874", "0.7590558", "0.7560754", "0.7522215", "0.75079787", "0.750233...
0.777891
7
Check whether the puzzle satisfies the row zero invariant at the given column (col > 1) Returns a boolean
def row0_invariant(self, target_col): result = True if self._grid[0][target_col] != 0: result = False if self._grid[1][target_col] != (target_col + self._width * 1): result = False for row in range(2, self._height): for col in range(self._width): solved_value = (col + self._width * row) if solved_value != self._grid[row][col]: result = False for row in (0, 1): for col in range(target_col+1, self._width): solved_value = (col + self._width * row) if solved_value != self._grid[row][col]: result = False return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def row1_invariant(self, target_col):\r\n # replace with your code\r\n conditions = 0\r\n current = self._grid[1][target_col] == 0\r\n if current:\r\n conditions +=1\r\n else:\r\n # print 'Tile ZERO is not at (0, %s) position' %(target_col)\r\n re...
[ "0.7548881", "0.752049", "0.75068325", "0.74679226", "0.742282", "0.7359223", "0.7341293", "0.73344994", "0.7321091", "0.73030216", "0.7264205", "0.7223349", "0.7205626", "0.7195732", "0.7169341", "0.7164632", "0.71609145", "0.71587044", "0.71222657", "0.7111231", "0.7105816"...
0.76055276
0
Check whether the puzzle satisfies the row one invariant at the given column (col > 1) Returns a boolean
def row1_invariant(self, target_col): result = True if self._grid[1][target_col] != 0: result = False for row in range(2, self._height): for col in range(self._width): solved_value = (col + self._width * row) if solved_value != self._grid[row][col]: result = False for row in (0, 1): for col in range(target_col+1, self._width): solved_value = (col + self._width * row) if solved_value != self._grid[row][col]: result = False return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def row1_invariant(self, target_col):\r\n # assert that row 1 is solved\r\n if not self.lower_row_invariant(1, target_col):\r\n return False\r\n # asserts that tile proceeded to (1,j), the grid below (1,j) and to the right is solved\r\n for dummy_j in range(0, self.get_width(...
[ "0.7744749", "0.7643967", "0.7540741", "0.7405464", "0.7363063", "0.7331644", "0.73298305", "0.72720087", "0.7253807", "0.71994525", "0.7157826", "0.71566737", "0.71124315", "0.7103953", "0.7086645", "0.70836926", "0.7064152", "0.7060724", "0.7032539", "0.7031373", "0.7020518...
0.77575016
0
Solve the tile in row zero at the specified column Updates puzzle and returns a move string
def solve_row0_tile(self, target_col): move_str = 'ld' self.update_puzzle(move_str) cur_row, cur_col = self.current_position(0, target_col) if cur_row == 0 and cur_col == target_col: return move_str else: move_str += self.position_tile(1, target_col-1, cur_row, cur_col) move_str += 'urdlurrdluldrruld' self.update_puzzle(move_str[2:]) return move_str
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solve_col0_tile(self, target_row):\r\n moves_str = \"\"\r\n # move the zero tile from (i,0) to (i−1,1) \r\n # using the move string \"ur\"\r\n moves_str += \"ur\"\r\n temp_grid = Puzzle(self._height, self._width, self._grid)\r\n temp_grid.update_puzzle(moves_str)\r\n ...
[ "0.7689822", "0.76361793", "0.7602665", "0.759841", "0.7547982", "0.75370145", "0.75116783", "0.7487752", "0.7478812", "0.74497175", "0.7435299", "0.7407118", "0.7397174", "0.73776317", "0.73732615", "0.7324699", "0.72756827", "0.72559106", "0.7206573", "0.71551996", "0.71403...
0.745573
9
Solve the tile in row one at the specified column Updates puzzle and returns a move string
def solve_row1_tile(self, target_col): cur_row, cur_col = self.current_position(1, target_col) move_str = self.position_tile(1, target_col, cur_row, cur_col, need_ld=False) self.update_puzzle(move_str) return move_str
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solve_row1_tile(self, target_col):\r\n moves_str = \"\"\r\n current_row, current_col = self.current_position(1, target_col)\r\n zero_row, zero_col = self.current_position(0, 0)\r\n moves_str += self.position_tile(zero_row, zero_col, current_row, current_col)\r\n moves_str += ...
[ "0.7611342", "0.7530595", "0.7489633", "0.74769557", "0.7346029", "0.7312759", "0.72872084", "0.72763294", "0.72613305", "0.72458375", "0.72417307", "0.7236731", "0.72223794", "0.7210395", "0.71957016", "0.71925163", "0.7142041", "0.71206504", "0.70840454", "0.7064091", "0.70...
0.74047565
4
Solve the upper left 2x2 part of the puzzle Updates the puzzle and returns a move string
def solve_2x2(self): cur_row, cur_col = self.current_position(0, 0) move_str = 'u' * cur_row + 'l' * cur_col self.update_puzzle(move_str) if self.check_2x2_solved(): return move_str else: while not self.check_2x2_solved(): move_str += 'rdlu' self.update_puzzle('rdlu') return move_str
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solve_puzzle(self):\n\n move_str = \"\"\n \n # Move zero tile to bottom right corner tile of puzzle.\n zero_pos = self.current_position(0,0) \n vert_dist = (self.get_height() - 1) - zero_pos[0]\n horiz_dist = (self.get_width() - 1) - zero_pos[1]\n...
[ "0.86358064", "0.84391785", "0.820359", "0.81158245", "0.8079695", "0.7876614", "0.7855013", "0.77959996", "0.7629816", "0.75706357", "0.7446708", "0.71919894", "0.70876247", "0.6987711", "0.6876523", "0.686447", "0.686447", "0.686447", "0.6857152", "0.6857152", "0.6825898", ...
0.8153259
3
check if the top left 22 puzzle is solved
def check_2x2_solved(self): return self._grid[0][0] == 0 and self._grid[0][1] == 1 \ and self._grid[1][0] == self._width*1 and self._grid[1][1] == (1 + self._width * 1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def checkPuzzle(self):\n print('Got to checkPuzzle')", "def solve_puzzle(self):\r\n \r\n counter = 0\r\n rows = self._height-1\r\n cols = self._width-1\r\n # print rows, cols\r\n # print 'The greed has %s rows and %s coloumn indexes' %(rows, cols) \r\n solu...
[ "0.7565178", "0.73852265", "0.7355203", "0.73358935", "0.7311697", "0.7294157", "0.7242904", "0.7190867", "0.7131681", "0.71189845", "0.70927525", "0.70451087", "0.70317227", "0.70084786", "0.6971756", "0.6908844", "0.69036686", "0.6901154", "0.68942595", "0.68830353", "0.685...
0.6677703
31
Generate a solution string for a puzzle Updates the puzzle and returns a move string
def solve_puzzle(self): cur0_row, cur0_col = self.current_position(0, 0) move_str = 'd' * (self._height - cur0_row - 1) + 'r' * (self._width - cur0_col - 1) self.update_puzzle(move_str) for row in range(self._height-1, 1, -1): for col in range(self._width-1, -1, -1): assert self.lower_row_invariant(row, col) if col != 0: move_str += self.solve_interior_tile(row, col) else: move_str += self.solve_col0_tile(row) for col in range(self._width-1, 1, -1): assert self.row1_invariant(col) move_str += self.solve_row1_tile(col) assert self.row0_invariant(col) move_str += self.solve_row0_tile(col) move_str += self.solve_2x2() return move_str
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solve_puzzle(self):\n\n move_str = \"\"\n \n # Move zero tile to bottom right corner tile of puzzle.\n zero_pos = self.current_position(0,0) \n vert_dist = (self.get_height() - 1) - zero_pos[0]\n horiz_dist = (self.get_width() - 1) - zero_pos[1]\n...
[ "0.7559481", "0.73133874", "0.69816166", "0.66144603", "0.6516676", "0.63399726", "0.6325148", "0.6325148", "0.6299051", "0.6299051", "0.6299051", "0.6297025", "0.62952787", "0.6252764", "0.6244446", "0.6211515", "0.61357105", "0.6083045", "0.6063561", "0.6049944", "0.602346"...
0.7348655
1
Clear properties. Mapping properties show up in CXSMILES make validation less readable.
def _getProductCXSMILES(product): for a in product.GetAtoms(): for k in a.GetPropsAsDict(): a.ClearProp(k) return Chem.MolToCXSmiles(product)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clear_properties(self):\n self.properties.clear()", "def clearProperties(*args):", "def clearProperties(*args):", "def clearProperties(*args):", "def clearProperties(*args):", "def reset(self):\n self.valid_passes = set()\n self.property_set.clear()", "def reset_properties(self):\n...
[ "0.8134951", "0.7525838", "0.7525838", "0.7525838", "0.7525838", "0.6747215", "0.6691966", "0.66214865", "0.66214865", "0.66214865", "0.66214865", "0.65654373", "0.6476267", "0.64220273", "0.64089143", "0.6397436", "0.6378762", "0.6357874", "0.62982965", "0.6284592", "0.62551...
0.0
-1
Run a reaction and combine the products in a single string. Makes errors readable ish
def _reactAndSummarize(rxn_smarts, *smiles): rxn = rdChemReactions.ReactionFromSmarts(rxn_smarts) mols = [Chem.MolFromSmiles(s) for s in smiles] products = [] for prods in rxn.RunReactants(mols): products.append(' + '.join(map(_getProductCXSMILES, prods))) products = ' OR '.join(products) return products
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reaction_str(self):\n\n def format(number):\n return str(number).rstrip(\".0\") + \" \"\n\n reactant_bits = []\n product_bits = []\n for met in sorted(self._metabolites, key=attrgetter(\"id\")):\n coefficient = self._metabolites[met]\n if coefficient...
[ "0.6317691", "0.59467554", "0.57581663", "0.5728929", "0.5590028", "0.5556531", "0.5528526", "0.55182624", "0.5469046", "0.54264945", "0.5413973", "0.5307061", "0.5297744", "0.5281674", "0.5246843", "0.52160555", "0.51447445", "0.51412046", "0.5139743", "0.511896", "0.5064106...
0.5951013
1
StereoGroup atoms are in the reaction, but the reaction doesn't affect the chirality at the stereo centers > preserve stereo group
def test_reaction_preserves_stereo(self): reaction = '[C@:1]>>[C@:1]' reactants = ['F[C@H](Cl)Br |o1:1|', 'F[C@@H](Cl)Br |&1:1|', 'FC(Cl)Br'] for reactant in reactants: products = _reactAndSummarize(reaction, reactant) self.assertEqual(products, reactant)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_reaction_defines_stereo(self):\n products = _reactAndSummarize('[C:1]>>[C@@:1]', 'F[C@H](Cl)Br |o1:1|')\n self.assertEqual(products, 'F[C@@H](Cl)Br')\n products = _reactAndSummarize('[C:1]>>[C@@:1]', 'F[C@@H](Cl)Br |&1:1|')\n self.assertEqual(products, 'F[C@@H](Cl)Br')\n products = _reactAn...
[ "0.75458586", "0.7168434", "0.7091707", "0.6843433", "0.6454386", "0.60892975", "0.5650636", "0.55933136", "0.55537635", "0.5514306", "0.54825073", "0.5466172", "0.54264325", "0.5393186", "0.5328103", "0.5303823", "0.5297103", "0.52903897", "0.5283487", "0.5206587", "0.504461...
0.5968604
6
StereoGroup atoms are in the reaction, but the reaction doesn't specify the chirality at the stereo centers > preserve stereo group
def test_reaction_ignores_stereo(self): reaction = '[C:1]>>[C:1]' reactants = ['F[C@H](Cl)Br |o1:1|', 'F[C@@H](Cl)Br |&1:1|', 'FC(Cl)Br'] for reactant in reactants: products = _reactAndSummarize(reaction, reactant) self.assertEqual(products, reactant)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_reaction_defines_stereo(self):\n products = _reactAndSummarize('[C:1]>>[C@@:1]', 'F[C@H](Cl)Br |o1:1|')\n self.assertEqual(products, 'F[C@@H](Cl)Br')\n products = _reactAndSummarize('[C:1]>>[C@@:1]', 'F[C@@H](Cl)Br |&1:1|')\n self.assertEqual(products, 'F[C@@H](Cl)Br')\n products = _reactAn...
[ "0.7502093", "0.7083151", "0.696663", "0.6748644", "0.6419238", "0.5837475", "0.576132", "0.5678274", "0.5618676", "0.55914426", "0.5525545", "0.5486366", "0.54649514", "0.54351145", "0.54303616", "0.5415109", "0.5333785", "0.5333203", "0.5273259", "0.5203159", "0.51855856", ...
0.6020158
5
StereoGroup atoms are in the reaction, and the reaction inverts the specified chirality at the stereo centers. > preserve stereo group
def test_reaction_inverts_stereo(self): reaction = '[C@:1]>>[C@@:1]' products = _reactAndSummarize(reaction, 'F[C@H](Cl)Br |o1:1|') self.assertEqual(products, 'F[C@@H](Cl)Br |o1:1|') products = _reactAndSummarize(reaction, 'F[C@@H](Cl)Br |&1:1|') self.assertEqual(products, 'F[C@H](Cl)Br |&1:1|') products = _reactAndSummarize(reaction, 'FC(Cl)Br') self.assertEqual(products, 'FC(Cl)Br')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_reaction_defines_stereo(self):\n products = _reactAndSummarize('[C:1]>>[C@@:1]', 'F[C@H](Cl)Br |o1:1|')\n self.assertEqual(products, 'F[C@@H](Cl)Br')\n products = _reactAndSummarize('[C:1]>>[C@@:1]', 'F[C@@H](Cl)Br |&1:1|')\n self.assertEqual(products, 'F[C@@H](Cl)Br')\n products = _reactAn...
[ "0.65880185", "0.6446109", "0.593325", "0.5791387", "0.5475662", "0.53331393", "0.52869034", "0.52321607", "0.51858443", "0.5148949", "0.5144954", "0.5143506", "0.51018935", "0.5073538", "0.5006511", "0.49255782", "0.4913669", "0.4903975", "0.48686603", "0.47758043", "0.47285...
0.5284103
7
StereoGroup atoms are in the reaction, and the reaction destroys the specified chirality at the stereo centers > invalidate stereo center, preserve the rest of the stereo group.
def test_reaction_destroys_stereo(self): reaction = '[C@:1]>>[C:1]' products = _reactAndSummarize(reaction, 'F[C@H](Cl)Br |o1:1|') self.assertEqual(products, 'FC(Cl)Br') products = _reactAndSummarize(reaction, 'F[C@@H](Cl)Br |&1:1|') self.assertEqual(products, 'FC(Cl)Br') products = _reactAndSummarize(reaction, 'FC(Cl)Br') self.assertEqual(products, 'FC(Cl)Br') reaction = '[C@:1]F>>[C:1]F' # Reaction destroys stereo (but preserves unaffected group products = _reactAndSummarize(reaction, 'F[C@H](Cl)[C@@H](Cl)Br |o1:1,&2:3|') self.assertEqual(products, 'FC(Cl)[C@@H](Cl)Br |&1:3|') # Reaction destroys stereo (but preserves the rest of the group products = _reactAndSummarize(reaction, 'F[C@H](Cl)[C@@H](Cl)Br |&1:1,3|') self.assertEqual(products, 'FC(Cl)[C@@H](Cl)Br |&1:3|')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_reaction_defines_stereo(self):\n products = _reactAndSummarize('[C:1]>>[C@@:1]', 'F[C@H](Cl)Br |o1:1|')\n self.assertEqual(products, 'F[C@@H](Cl)Br')\n products = _reactAndSummarize('[C:1]>>[C@@:1]', 'F[C@@H](Cl)Br |&1:1|')\n self.assertEqual(products, 'F[C@@H](Cl)Br')\n products = _reactAn...
[ "0.6586952", "0.58681905", "0.5560465", "0.5431528", "0.5087718", "0.49389872", "0.49223432", "0.4907192", "0.48812112", "0.4859121", "0.48425356", "0.48171473", "0.48001352", "0.47328418", "0.46946502", "0.46811792", "0.46665424", "0.46157676", "0.4614911", "0.4604623", "0.4...
0.6826048
0
StereoGroup atoms are in the reaction, and the reaction creates the specified chirality at the stereo centers > remove the stereo center from > invalidate stereo group
def test_reaction_defines_stereo(self): products = _reactAndSummarize('[C:1]>>[C@@:1]', 'F[C@H](Cl)Br |o1:1|') self.assertEqual(products, 'F[C@@H](Cl)Br') products = _reactAndSummarize('[C:1]>>[C@@:1]', 'F[C@@H](Cl)Br |&1:1|') self.assertEqual(products, 'F[C@@H](Cl)Br') products = _reactAndSummarize('[C:1]>>[C@@:1]', 'FC(Cl)Br') self.assertEqual(products, 'F[C@@H](Cl)Br') # Remove group with defined stereo products = _reactAndSummarize('[C:1]F>>[C@@:1]F', 'F[C@H](Cl)[C@@H](Cl)Br |o1:1,&2:3|') self.assertEqual(products, 'F[C@@H](Cl)[C@@H](Cl)Br |&1:3|') # Remove atoms with defined stereo from group products = _reactAndSummarize('[C:1]F>>[C@@:1]F', 'F[C@H](Cl)[C@@H](Cl)Br |o1:1,3|') self.assertEqual(products, 'F[C@@H](Cl)[C@@H](Cl)Br |o1:3|')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_reaction_destroys_stereo(self):\n reaction = '[C@:1]>>[C:1]'\n products = _reactAndSummarize(reaction, 'F[C@H](Cl)Br |o1:1|')\n self.assertEqual(products, 'FC(Cl)Br')\n products = _reactAndSummarize(reaction, 'F[C@@H](Cl)Br |&1:1|')\n self.assertEqual(products, 'FC(Cl)Br')\n products = _...
[ "0.6699158", "0.599879", "0.5703354", "0.54769295", "0.5319069", "0.51858544", "0.49389228", "0.49315634", "0.4910987", "0.48884475", "0.4821768", "0.4819886", "0.4803131", "0.48022223", "0.48019797", "0.47844344", "0.47762632", "0.47756767", "0.47696617", "0.47598013", "0.46...
0.6745221
0
StereoGroup atoms are not in the reaction > stereo group is unaffected
def test_stereogroup_is_spectator_to_reaction(self): # 5a. Reaction preserves unrelated stereo products = _reactAndSummarize('[C@:1]F>>[C@:1]F', 'F[C@H](Cl)[C@@H](Cl)Br |o1:3|') self.assertEqual(products, 'F[C@H](Cl)[C@@H](Cl)Br |o1:3|') # 5b. Reaction ignores unrelated stereo' products = _reactAndSummarize('[C:1]F>>[C:1]F', 'F[C@H](Cl)[C@@H](Cl)Br |o1:3|') self.assertEqual(products, 'F[C@H](Cl)[C@@H](Cl)Br |o1:3|') # 5c. Reaction inverts unrelated stereo' products = _reactAndSummarize('[C@:1]F>>[C@@:1]F', 'F[C@H](Cl)[C@@H](Cl)Br |o1:3|') self.assertEqual(products, 'F[C@@H](Cl)[C@@H](Cl)Br |o1:3|') # 5d. Reaction destroys unrelated stereo' 1:3| products = _reactAndSummarize('[C@:1]F>>[C:1]F', 'F[C@H](Cl)[C@@H](Cl)Br |o1:3|') self.assertEqual(products, 'FC(Cl)[C@@H](Cl)Br |o1:3|') # 5e. Reaction assigns unrelated stereo' products = _reactAndSummarize('[C:1]F>>[C@@:1]F', 'F[C@H](Cl)[C@@H](Cl)Br |o1:3|') self.assertEqual(products, 'F[C@@H](Cl)[C@@H](Cl)Br |o1:3|')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_reaction_defines_stereo(self):\n products = _reactAndSummarize('[C:1]>>[C@@:1]', 'F[C@H](Cl)Br |o1:1|')\n self.assertEqual(products, 'F[C@@H](Cl)Br')\n products = _reactAndSummarize('[C:1]>>[C@@:1]', 'F[C@@H](Cl)Br |&1:1|')\n self.assertEqual(products, 'F[C@@H](Cl)Br')\n products = _reactAn...
[ "0.72588325", "0.678643", "0.66508996", "0.6132741", "0.60709405", "0.5719462", "0.5535332", "0.552644", "0.5486801", "0.54643136", "0.53574395", "0.53247046", "0.52777433", "0.52353007", "0.5227962", "0.52224547", "0.5211532", "0.5198085", "0.51577014", "0.51439506", "0.5077...
0.6698314
2
StereoGroup atoms are split into two products by the reaction > Should the group be invalidated or trimmed?
def test_reaction_splits_stereogroup(self): products = _reactAndSummarize('[C:1]OO[C:2]>>[C:2]O.O[C:1]', 'F[C@H](Cl)OO[C@@H](Cl)Br |o1:1,5|') # Two product sets, each with two mols: self.assertEqual(products.count('|o1:1|'), 4)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_reaction_defines_stereo(self):\n products = _reactAndSummarize('[C:1]>>[C@@:1]', 'F[C@H](Cl)Br |o1:1|')\n self.assertEqual(products, 'F[C@@H](Cl)Br')\n products = _reactAndSummarize('[C:1]>>[C@@:1]', 'F[C@@H](Cl)Br |&1:1|')\n self.assertEqual(products, 'F[C@@H](Cl)Br')\n products = _reactAn...
[ "0.723399", "0.7145059", "0.7093139", "0.6711873", "0.5570379", "0.5568875", "0.54186857", "0.5407642", "0.53835154", "0.5234586", "0.5234586", "0.523138", "0.52302647", "0.5206368", "0.51776206", "0.5163662", "0.5118267", "0.5115464", "0.51110584", "0.510641", "0.5056884", ...
0.7006468
3
If multiple copies of an atom in StereoGroup show up in the product, they should all be part of the same product StereoGroup.
def test_reaction_copies_stereogroup(self): # Stereogroup atoms are in the reaction with multiple copies in the product products = _reactAndSummarize('[O:1].[C:2]=O>>[O:1][C:2][O:1]', 'Cl[C@@H](Br)C[C@H](Br)CCO |&1:1,4|', 'CC(=O)C') # stereogroup manually checked, product SMILES assumed correct. self.assertEqual( products, 'CC(C)(OCC[C@@H](Br)C[C@@H](Cl)Br)OCC[C@@H](Br)C[C@@H](Cl)Br |&1:6,9,15,18|' ) # Stereogroup atoms are not in the reaction, but have multiple copies in the # product. products = _reactAndSummarize('[O:1].[C:2]=O>>[O:1][C:2][O:1]', 'Cl[C@@H](Br)C[C@H](Br)CCO |&1:1,4|', 'CC(=O)C') # stereogroup manually checked, product SMILES assumed correct. self.assertEqual( products, 'CC(C)(OCC[C@@H](Br)C[C@@H](Cl)Br)OCC[C@@H](Br)C[C@@H](Cl)Br |&1:6,9,15,18|' )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_grouping(self):\n n = self.create(NodeItem, UML.Node)\n a = self.create(ArtifactItem, UML.Artifact)\n\n self.group(n, a)\n\n assert 1 == len(n.subject.deployment)\n assert n.subject.deployment[0].deployedArtifact[0] is a.subject", "def test_reaction_splits_stereogroup(...
[ "0.57665616", "0.5714965", "0.56087047", "0.5555653", "0.53248274", "0.5304287", "0.5222042", "0.520931", "0.5198424", "0.5198424", "0.5195729", "0.5162648", "0.5140849", "0.51147324", "0.5112582", "0.5106029", "0.50989425", "0.50513166", "0.5048671", "0.50138116", "0.5004784...
0.693719
0
initializes a new instance of the class
def __init__(self, infos, logger): super().__init__( infos, 'Delete Green Cloudformation Stack', logger, infos.green_infos )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def new(self):\n self._init()", "def initialize(cls):", "def init(self) -> None:", "def __init__ (self):\n pass", "def __init__(self) -> None:\n # TODO: Provide the complete constructor for this object", "def init(self) -> None:\n ...", "def __init__():", "def init(self):\...
[ "0.81939584", "0.81770974", "0.78722274", "0.7813727", "0.7803766", "0.77187806", "0.75613856", "0.752962", "0.752962", "0.752962", "0.752962", "0.752962", "0.752962", "0.752962", "0.752962", "0.7493935", "0.7488739", "0.7488739", "0.7488739", "0.7488739", "0.7477393", "0.7...
0.0
-1
Always print out typedefs for syntactic reasons in case of more passes.
def visit_Typedef(self, node): return str_node(node)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getPrettyType(self):\n s = self.sym\n if self.sym == None:\n s = self.define\n return \"Typedef (alias type: %s)\" % s.getType()", "def typedefs(self):\n raise exceptions.NotImplementedError()", "def typedef(typedefs):\n\n\n for d in typedefs:\n\n\n type = m...
[ "0.6419347", "0.6250635", "0.6137227", "0.6129789", "0.6000314", "0.5863036", "0.58271354", "0.57932997", "0.5779045", "0.5737586", "0.5731451", "0.5704002", "0.5692695", "0.5642191", "0.5641333", "0.56264526", "0.55807245", "0.55783653", "0.5577333", "0.556277", "0.55400676"...
0.5954348
5
Always print out struct declarations for syntactic reasons in case of more passes.
def visit_Struct(self, node): return str_node(node)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_out_struct(self):\n args ={\n \"name\": self.python_madz_types + (\"OUTSTRUCT\" if self.namespace == \"\" else self._namespace_mangle(self.namespace)),\n \"fields\":\"\"\n }\n\n res = \\\n\"\"\"class {name}(Structure):\n _fields_ = [{fields}]\n\"\"\"\n ...
[ "0.59062266", "0.5870518", "0.57124054", "0.56050116", "0.55951077", "0.55576754", "0.55329996", "0.550045", "0.546557", "0.5464527", "0.5442078", "0.5435242", "0.5433921", "0.5400498", "0.53979063", "0.5391494", "0.5334889", "0.5334051", "0.53326344", "0.53083193", "0.530733...
0.5306634
21
Generation from a statement node. This method exists as a wrapper for individual visit_ methods to handle different treatment of some statements in this context.
def _generate_stmt(self, n, add_indent=False): typ = type(n) if add_indent: self.indent_level += 2 indent = self._make_indent() if add_indent: self.indent_level -= 2 if typ in ( c_ast.Decl, c_ast.Assignment, c_ast.Cast, c_ast.UnaryOp, c_ast.BinaryOp, c_ast.TernaryOp, c_ast.FuncCall, c_ast.ArrayRef, c_ast.StructRef, c_ast.Constant, c_ast.ID, c_ast.Typedef, c_ast.ExprList): # These can also appear in an expression context so no semicolon # is added to them automatically # # Only print out expression if they are part of slice if n.sliced: return indent + self.visit(n) + ';\n' else: return indent + '{}\n' elif typ in (c_ast.Compound,): # No extra indentation required before the opening brace of a # compound - because it consists of multiple lines it has to # compute its own indentation. # return self.visit(n) else: if n.sliced: return indent + self.visit(n) + '\n' else: return ''
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def gen_stmt(self, statement):\n fn_map = {\n statements.If: self.gen_if,\n statements.While: self.gen_while,\n statements.DoWhile: self.gen_do_while,\n statements.For: self.gen_for,\n statements.Switch: self.gen_switch,\n statements.Goto: se...
[ "0.6982189", "0.6952451", "0.6714583", "0.6317924", "0.6317603", "0.6093038", "0.6031703", "0.6027475", "0.5918272", "0.5912852", "0.58984697", "0.58264", "0.58131385", "0.5812323", "0.5779691", "0.5761809", "0.57449067", "0.57259613", "0.567997", "0.5676137", "0.5661649", ...
0.64626193
3
Test the fast upate code against a for loop.
def test_update(): learner = optlearner.VolatilityLearner() for reward in [0, 1]: slow_pIk = slow_update(learner, reward) learner._update(reward) yield npt.assert_array_equal, slow_pIk, learner.pIk learner.reset()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testHasForLoop(self):\n no_foreach = build_code(['x=1'], [], ['x=3'], concise=False)\n foreach = build_code(['x=1'], ['x=2'], ['x=3'], concise=False)\n self.assertNotIn('for', no_foreach)\n self.assertIn('for', foreach)", "def test_run_loop_success(self):\n found = False\n ...
[ "0.627312", "0.62058866", "0.61405146", "0.6024515", "0.5982735", "0.59628016", "0.5885802", "0.58731663", "0.57872266", "0.5761439", "0.5710765", "0.56505996", "0.5533442", "0.5478586", "0.54322", "0.5400814", "0.53748524", "0.5355801", "0.52825946", "0.5275815", "0.5266114"...
0.48848116
81
This is a more or less literal translation of the original code.
def slow_update(learner, reward): pIk = learner.pIk.copy() k_grid = learner.k_grid I_grid = learner.I_grid p_grid = learner.p_grid Ip1gIk = learner._I_trans pp1gpIp1 = learner._p_trans for k in xrange(k_grid.size): # 1) Multiply pIk by Ip1gIk and integrate out I. This will give pIp1k pIp1k = np.zeros((p_grid.size, I_grid.size)) for Ip1 in xrange(I_grid.size): for p in xrange(p_grid.size): pIp1k[p, Ip1] = np.sum(Ip1gIk[Ip1, :, k] * pIk[p, :, k]) # 2) Multiply pIp1k by pp1gpIp1 and integrate out p. pp1Ip1k = np.zeros((p_grid.size, I_grid.size)) for Ip1 in xrange(I_grid.size): for pp1 in xrange(p_grid.size): pp1Ip1k[pp1, Ip1] = np.sum(pIp1k[:, Ip1] * pp1gpIp1[pp1, :, Ip1].T) # 3) Place pp1Ip1k into pIk (belief that is carried to the next trial) pIk[:, :, k] = pp1Ip1k if reward: for k in xrange(k_grid.size): for p in xrange(p_grid.size): pIk[p, :, k] *= p_grid[p] else: for k in xrange(k_grid.size): for p in xrange(p_grid.size): pIk[p, :, k] *= 1 - p_grid[p] # Normalization pIk /= pIk.sum() return pIk
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def code():", "def _fix_up(self, cls, code_name):", "def exercise_b2_106():\r\n pass", "def retranslate(self):\r\n pass", "def retranslate(self):\r\n pass", "def exercise_b2_107():\r\n pass", "def exercise_b2_113():\r\n pass", "def test_fix_code_typical_code():\r\n\r\n pass"...
[ "0.6567136", "0.62829435", "0.5988698", "0.59373957", "0.59373957", "0.5923645", "0.5897334", "0.58716434", "0.58613706", "0.58475685", "0.5832422", "0.5757656", "0.57205373", "0.56826997", "0.5669326", "0.5593282", "0.55611753", "0.556074", "0.5530139", "0.54967827", "0.5487...
0.0
-1
Tests the install workflow using the built in workflows.
def tests_pull_workflow(self): daemon_client = {} client = self.get_client(daemon_client) for container in client.containers(all=True): if 'test-container' in \ ''.join([name for name in container.get('Names')]): client.remove_container('test-container') if ['{0}:latest'.format(TEST_IMAGE)] in \ [i.get('RepoTags') for i in client.images()]: client.remove_image(TEST_IMAGE, force=True) # execute install workflow self.env.execute('install', task_retries=0) container_instance = {} for instance in self.env.storage.get_node_instances(): if 'container_id' in instance.runtime_properties.keys(): container_instance = instance self.assertTrue(container_instance is not None, 'Failed getting container.') container_id = container_instance.runtime_properties.get( 'container_id') containers = client.containers(all=True) self.assertTrue(container_id in [c.get('Id') for c in containers]) self.env.execute('uninstall', task_retries=3) repotags = [] for i in client.images(): repotags.append(i.get('RepoTags')) self.assertFalse(TEST_IMAGE in [tag for tag in repotags]) if ['{0}:latest'.format(TEST_IMAGE)] in \ [i.get('RepoTags') for i in client.images()]: client.remove_image(TEST_IMAGE, force=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_install(self):\n pass", "def test_deploy_workflow_definition(self):\n pass", "def test_installed(self):\n # OSA script should have been installed in setUp function\n self.assertTrue(self.run_function(\"assistive.installed\", [OSA_SCRIPT]))\n # Clean up install\n ...
[ "0.7448684", "0.6557498", "0.65304166", "0.65078044", "0.6446686", "0.63318056", "0.6329271", "0.6329271", "0.63074094", "0.63003325", "0.62722313", "0.62695414", "0.6268539", "0.62517077", "0.62472796", "0.62345684", "0.62330306", "0.62122643", "0.62017137", "0.61228365", "0...
0.0
-1
Rename a file or folder
def rename_file(self, file_id, new_name): func = f"setRenameFile(Token: $Token, FileRenames: $FileRenames)" query = f"mutation SetRenameFile($Token: String!, $FileRenames: [FileRenameInfo]!) {{ {func} }}" request = {"operationName": "SetRenameFile", "variables": { "Token": self.KEYS["Token"], "FileRenames": [{ "ID": file_id, "NewName": new_name }] }, "query": query } header = {"x-api-key": self.KEYS["x-api-key"]} response = requests.post(URL_API, headers=header, data=json.dumps(request)) if response.ok: rd = json.loads(response.text) if "errors" in rd: messages = [] for error in rd["errors"]: messages.append(error["message"]) message = '\n'.join(messages) raise DegooError(f"getUserInfo failed with: {message}") else: return rd["data"]['setRenameFile'] else: raise DegooError(f"renameFile failed with: {response}")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def RenameFile(self, oldname: str, newname: str) -> None:\n ...", "def rename(path, new_path):\n fs.rename(path, new_path)", "def rename(self, src, dst):\n os.rename(src, dst)", "def rename_file(path, old_name, new_name):\n \n old_file = os.path.join(path, old_name)\n new_file = os....
[ "0.7906396", "0.7777854", "0.77087426", "0.763373", "0.755172", "0.752285", "0.74998015", "0.74556607", "0.73914254", "0.7262698", "0.71969265", "0.7161511", "0.70347023", "0.70347023", "0.70059544", "0.699431", "0.69706184", "0.69470656", "0.6944914", "0.69139314", "0.690678...
0.6451277
46
Move a file or folder to new destination
def mv(self, file_id, new_parent_id): func = f"setMoveFile(Token: $Token, Copy: $Copy, NewParentID: $NewParentID, FileIDs: $FileIDs)" query = f"mutation SetMoveFile($Token: String!, $Copy: Boolean, $NewParentID: String!, $FileIDs: [String]!) {{ {func} }}" request = {"operationName": "SetMoveFile", "variables": { "Token": self.KEYS["Token"], "NewParentID": new_parent_id, "FileIDs": [ file_id ] }, "query": query } header = {"x-api-key": self.KEYS["x-api-key"]} response = requests.post(URL_API, headers=header, data=json.dumps(request)) if response.ok: rd = json.loads(response.text) if "errors" in rd: messages = [] for error in rd["errors"]: messages.append(error["message"]) message = '\n'.join(messages) raise DegooError(f"getUserInfo failed with: {message}") else: return rd["data"]['setMoveFile'] else: raise DegooError(f"renameFile failed with: {response}")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def move_file(source, destination):\n shutil.move(source, destination)", "def moveFile(source, dest):\n try:\n shutil.move(source, dest) \n except IOError as e:\n print (\"Unable to move file. %s\" %(e))", "def mv(self, src_path, dst_path):\n try:\n postdata = codecs.en...
[ "0.836858", "0.77318025", "0.7662355", "0.7631933", "0.7586252", "0.7535129", "0.7519395", "0.75076956", "0.74930936", "0.74675715", "0.74577117", "0.74045736", "0.7384608", "0.7289181", "0.7248294", "0.72342104", "0.72236645", "0.71997166", "0.7152438", "0.71472096", "0.7135...
0.62955916
80
Rename a file or folder
def rename(path_file_folder, new_name): old_name = path_file_folder[path_file_folder.rfind('/') + 1:] if '/' in path_file_folder else path_file_folder if old_name == new_name: raise DegooError(f"rename: Old name and new name \"{new_name}\" cannot be the same") if isinstance(path_file_folder, int): file_id = path_file_folder elif isinstance(path_file_folder, str): file_id = path_id(path_file_folder) else: raise DegooError(f"rm: Illegal file: {path_file_folder}") return api.rename_file(file_id, new_name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def RenameFile(self, oldname: str, newname: str) -> None:\n ...", "def rename(path, new_path):\n fs.rename(path, new_path)", "def rename(self, src, dst):\n os.rename(src, dst)", "def rename_file(path, old_name, new_name):\n \n old_file = os.path.join(path, old_name)\n new_file = os....
[ "0.7906396", "0.7777854", "0.77087426", "0.763373", "0.752285", "0.74998015", "0.74556607", "0.73914254", "0.7262698", "0.71969265", "0.7161511", "0.70347023", "0.70347023", "0.70059544", "0.699431", "0.69706184", "0.69470656", "0.6944914", "0.69139314", "0.690678", "0.689538...
0.755172
4
Move a file or folder
def mv(path_file_folder, new_path): if not is_folder(new_path): raise DegooError(f"mv: The target path is not a folder") source_path = path_file_folder if is_folder(path_file_folder) else path_file_folder[:path_file_folder.rfind('/')] if source_path == new_path: raise DegooError(f"mv: The target path cannot be the same as the source path") if isinstance(path_file_folder, int): file_id = path_file_folder elif isinstance(path_file_folder, str): file_id = path_id(path_file_folder) else: raise DegooError(f"rm: Illegal file: {path_file_folder}") if isinstance(new_path, int): new_parent_id = new_path elif isinstance(new_path, str): new_parent_id = path_id(new_path) else: raise DegooError(f"rm: Illegal destination folder: {new_path}") return api.mv(file_id, new_parent_id)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def move_file(source, destination):\n shutil.move(source, destination)", "def moveFile(source, dest):\n try:\n shutil.move(source, dest) \n except IOError as e:\n print (\"Unable to move file. %s\" %(e))", "def move_file(self, path: PathLike, dest: PathLike, force: bool = False):", "de...
[ "0.83455", "0.7957065", "0.7644527", "0.76268214", "0.75139415", "0.74997723", "0.74345535", "0.74257374", "0.73716736", "0.7324468", "0.7222057", "0.7205844", "0.72037476", "0.71912545", "0.7157873", "0.7147585", "0.7131787", "0.7130521", "0.71214455", "0.71214455", "0.71021...
0.69655865
27
Get the versions from GitHub tags
def get_versions(self): # They randomly use and don't use 'r' prefix so we have to sort # versions manually versions = list(self._get_github_tags()) versions.sort( key=operator.attrgetter('base_version'), reverse=True, ) return versions
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_github_chandra_models_version_info():\n with urlopen('https://api.github.com/repos/sot/chandra_models/tags') as url:\n response = url.read()\n tags = json.loads(response.decode('utf-8'))\n\n with urlopen('https://api.github.com/repos/sot/chandra_models/branches') as url:\n respon...
[ "0.6975342", "0.67174757", "0.66325366", "0.6590542", "0.6541449", "0.64341235", "0.64160585", "0.6401173", "0.6249824", "0.61892974", "0.61882895", "0.618634", "0.61683893", "0.61543375", "0.6109142", "0.60987633", "0.608993", "0.6088227", "0.6088083", "0.6067755", "0.604825...
0.7527406
0
Get all attributs values.
def __str__(self): status = "height = {}\n".format(self.height) status += "width = {}\n".format(self.width) status += "channels = {}\n".format(self.channels) status += "classes = {}\n".format(self.classes) status += "batch_size = {}\n".format(self.batch_size) status += "epochs = {}\n".format(self.epochs) status += "save_step = {}\n".format(self.save_step) status += "learning_rate = {}\n".format(self.learning_rate) status += "momentum = {}\n".format(self.momentum) return status
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_all_attribute(self):\n for attr, value in self.__dict__.items():\n print(attr, value)", "def values(self):\n return self.attrs.values()", "def values(self):\n return [getattr(self, a.name) for a in self.__attrs_attrs__]", "def getAttributes(self):\n pass", "de...
[ "0.7782514", "0.7658524", "0.75126153", "0.7339408", "0.7190724", "0.7190724", "0.71009326", "0.7089321", "0.7067784", "0.7013646", "0.7010554", "0.6959679", "0.6932175", "0.687121", "0.6866942", "0.6840013", "0.6839982", "0.68385345", "0.6838188", "0.67684263", "0.67606723",...
0.0
-1
Get all attributs values.
def __str__(self): status = "height = {}\n".format(self.height) status += "width = {}\n".format(self.width) status += "channels = {}\n".format(self.channels) status += "input_dim = {}\n".format(self.input_dim) status += "architecture = {}\n".format(self.architecture) status += "activations = {}\n".format(self.activations) status += "batch_size = {}\n".format(self.batch_size) status += "epochs = {}\n".format(self.epochs) status += "save_step = {}\n".format(self.save_step) status += "learning_rate = {}\n".format(self.learning_rate) status += "momentum = {}\n".format(self.momentum) return status
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_all_attribute(self):\n for attr, value in self.__dict__.items():\n print(attr, value)", "def values(self):\n return self.attrs.values()", "def values(self):\n return [getattr(self, a.name) for a in self.__attrs_attrs__]", "def getAttributes(self):\n pass", "de...
[ "0.7782514", "0.7658524", "0.75126153", "0.7339408", "0.7190724", "0.7190724", "0.71009326", "0.7089321", "0.7067784", "0.7013646", "0.7010554", "0.6959679", "0.6932175", "0.687121", "0.6866942", "0.6840013", "0.6839982", "0.68385345", "0.6838188", "0.67684263", "0.67606723",...
0.0
-1
Get all attributs values.
def __str__(self): status = "height = {}\n".format(self.height) status += "width = {}\n".format(self.width) status += "channels = {}\n".format(self.channels) status += "architecture = {}\n".format(self.architecture) status += "activations = {}\n".format(self.activations) status += "conv_activations = {}\n".format(self.conv_activations) status += "conv_architecture = {}\n".format(self.conv_architecture) status += "kernel_sizes = {}\n".format(self.kernel_sizes) status += "pool_kernel = {}\n".format(self.pool_kernel) status += "batch_size = {}\n".format(self.batch_size) status += "epochs = {}\n".format(self.epochs) status += "save_step = {}\n".format(self.save_step) status += "learning_rate = {}\n".format(self.learning_rate) status += "momentum = {}\n".format(self.momentum) return status
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_all_attribute(self):\n for attr, value in self.__dict__.items():\n print(attr, value)", "def values(self):\n return self.attrs.values()", "def values(self):\n return [getattr(self, a.name) for a in self.__attrs_attrs__]", "def getAttributes(self):\n pass", "de...
[ "0.7782514", "0.7658524", "0.75126153", "0.7339408", "0.7190724", "0.7190724", "0.71009326", "0.7089321", "0.7067784", "0.7013646", "0.7010554", "0.6959679", "0.6932175", "0.687121", "0.6866942", "0.6840013", "0.6839982", "0.68385345", "0.6838188", "0.67684263", "0.67606723",...
0.0
-1
Address for the xbee, where the response originated from.
def address(self): return self._address
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getAddress(self):\r\n return self._endpoint.getAddress()", "def addr(self):\r\n return self._addr", "def getAddress(self):\r\n raise NotImplementedError('Endpoint can not be used directly.')", "def address(self):\n ...", "def address(self) -> str:\n return self._backe...
[ "0.7066411", "0.6962745", "0.6875606", "0.68373436", "0.67819583", "0.67610395", "0.6751729", "0.6742783", "0.6728146", "0.6693614", "0.665993", "0.665993", "0.6601143", "0.6547315", "0.65288603", "0.65288603", "0.65288603", "0.6516686", "0.6515416", "0.6487318", "0.6476641",...
0.6983551
5
Fill the packets data properties.
def fill_data(self, data): self._data = data self._data_length = data[1:3] self._frame_id = data[4] self._address = XbeeAddress(data[5:9], data[9:13], data[13:15]) self._at_command = data[15:17] self._command_status = data[17] try: self._command_data = data[18:21] self._checksum = data[22] except IndexError: self._command_data = None self._checksum = data[18]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initAttributes(self):\n CCSDS.DU.DataUnit.initAttributes(self)\n self.dataFieldHeaderFlag = 0\n self.setPacketLength()", "def _init_data(self) -> None:\n self.dtype = dict()\n self.shape = dict()\n self.size = dict()\n self.attrs = dict()\n self.data_p...
[ "0.6564761", "0.6411371", "0.62188", "0.6214954", "0.61160105", "0.60499305", "0.592144", "0.5917901", "0.58758026", "0.5839473", "0.58239967", "0.58188593", "0.5807109", "0.57820135", "0.57779455", "0.5756668", "0.57558346", "0.57392687", "0.57246864", "0.57196575", "0.57140...
0.6753045
0
Call me before using any of the tables or classes in the model.
def init_model(connection): db = connection for obj in common.__dict__.itervalues(): if type(obj) == type and issubclass(obj, common.Model) and hasattr(obj, '__tablename__'): tablename = getattr(obj, '__tablename__') obj._object_store = Domain(db, tablename) collection_to_class[obj._object_store] = obj
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def init_model(self):\n pass", "def prepare_model(self, **kwargs):\n pass", "def initialize_model(self):\n pass", "def setUp(self):\n create_table(self.DATABASE_PATH)\n self.model = model.CodeReviewDatabase(self.DATABASE_PATH)", "def _before_execute(self, db):\n pa...
[ "0.67817503", "0.6613017", "0.6608557", "0.6572093", "0.6526641", "0.6456574", "0.6391493", "0.63803416", "0.63562936", "0.63505673", "0.63026184", "0.62730545", "0.624931", "0.6238614", "0.62380886", "0.6188223", "0.6165031", "0.6163278", "0.61397433", "0.61384445", "0.61249...
0.0
-1
Sets up the database session
def setup(): global connection connection = MySQLdb.connect(host=config.get('mysql.host'), user=config.get('mysql.user'), passwd=config.get('mysql.password'), db=config.get('mysql.db'), ssl={'ca' : config.get('mysql.cert')}) init_model(connection)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_db_session():\n g.s = database.db_session()", "def setup_session():\n print(\"Setting up session\")\n engine = setup_engine()\n Base.metadata.bin = engine\n\n DBSession = sessionmaker(bind=engine)\n session = DBSession()\n\n return session", "def init_database(self):\n engin...
[ "0.7948405", "0.7780912", "0.74744767", "0.7390211", "0.7389746", "0.7370456", "0.73018765", "0.7287071", "0.7270587", "0.72254795", "0.721942", "0.7173008", "0.71630186", "0.7160225", "0.7144433", "0.7144433", "0.71353585", "0.7134453", "0.7132723", "0.71059585", "0.7102454"...
0.66753596
46
Order the results. type should be ASC or DESC
def order(self, column, type): self._order = (column, type) return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sort_results(self):\n pass", "def orderby():\n pass", "def order_by(self, results, key_, direction=\"ASC\"):\n\n return sorted(results, key=lambda x: x.get(key_), reverse=direction==\"DESC\")", "def sort_results(results_list, sorting_type):\n if sorting_type == \"Oldest\":\n re...
[ "0.7437501", "0.6612537", "0.6586749", "0.65457374", "0.65006554", "0.6475219", "0.6381626", "0.6325047", "0.6201697", "0.6086321", "0.60466444", "0.59852207", "0.59614545", "0.59498024", "0.59177685", "0.5870425", "0.58585554", "0.58449817", "0.5832564", "0.57793987", "0.575...
0.6021243
11
Transform an object retrieved from the database
def transform_outgoing(self, son): if 'type' in son: klass = common.classify(son['type']) return klass.demongofy(son) else: try: return collection_to_class[self.domain].demongofy(son) except KeyError: return son
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def transform(self):", "def transform():", "def transform_one(self, obj: Any):\n return obj", "def transform(self, data):", "def Transform(self, record):\n pass", "def _from_db_object(boar, db_boar):\n foreign_key = ['category', 'dormitory', 'source']\n for field in boar.fields:\n...
[ "0.65609854", "0.64150685", "0.6350461", "0.6347081", "0.6238227", "0.5997735", "0.598158", "0.59786004", "0.59712017", "0.58984053", "0.58658886", "0.5845334", "0.58016354", "0.58016354", "0.58016354", "0.58016354", "0.58016354", "0.58016354", "0.58016354", "0.5780222", "0.5...
0.0
-1
test if the stations are sorted correctly by distance
def test_stations_by_distance(): station_list = build_station_list() #test for stations closest to cambridge city coordinates station_list_sort = stations_by_distance(station_list, (52.2053, 0.1218)) output = [(station.name, distance) for (station, distance) in station_list_sort] for n in range(1, len(station_list)): #make sure that the distance of the previous station to the point is less than the next one in the list assert output[n-1][1] <= output[n][1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_nearest_filter(self):\n for airport, reports, count in (\n (True, True, 6),\n (True, False, 16),\n (False, True, 6),\n (False, False, 30),\n ):\n stations = station.nearest(30, -80, 30, airport, reports, 1.5)\n self.assertEqua...
[ "0.66526514", "0.64656", "0.6275362", "0.60979766", "0.6097272", "0.6059003", "0.6043562", "0.59792054", "0.5845956", "0.5807009", "0.5800586", "0.57856506", "0.5729412", "0.5716074", "0.5695116", "0.5680469", "0.5653806", "0.5639628", "0.5623877", "0.5608037", "0.55967736", ...
0.8008606
0
Finds the minimum value of SequenceMatcher.ratio() for two strings such that Differ considers them as 'changed'.
def check_difflib_ratio(): import difflib import random import string def random_modify_string(input_string, change_word=0.5, change_char=0.3): word_list = input_string.split() for i, word in enumerate(word_list): if random.random() < change_word: for j in range(len(word)): if random.random() < change_char: word = word[:j] + random.choice(string.printable) + word[j + 1:] word_list[i] = word return ' '.join(word_list) differ = difflib.Differ() min_ratio = 1.0 for count in range(1000): length = random.randint(5, 100) s1 = ''.join(random.SystemRandom().choice(string.printable) for _ in range(length)) s2 = random_modify_string(s1) sm = difflib.SequenceMatcher(None, s1, s2) ratio = sm.ratio() result = list(differ.compare([s1], [s2])) for line in result: if line.startswith('?'): if ratio < min_ratio: min_ratio = ratio break print('Minimum ratio which difflib considers as "change" is: {}'.format(min_ratio))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_equal_rate(str1, str2):\r\n\treturn difflib.SequenceMatcher(None, str1, str2).quick_ratio()", "def string_match_ratio(str1, str2):\n sm = edit_distance.SequenceMatcher(a=str1, b=str2)\n return sm.ratio()", "def compare_strings(string1: str, string2: str) -> float:\n return SequenceMatcher(None...
[ "0.76466227", "0.75796056", "0.7486614", "0.7058355", "0.7058355", "0.70151675", "0.6979458", "0.6847581", "0.67930263", "0.6693175", "0.66115886", "0.6577811", "0.6511473", "0.6479723", "0.64503026", "0.64290327", "0.63911766", "0.63698953", "0.6221948", "0.60449994", "0.602...
0.7495494
2
Function to instanciate the instrument.
def connect_instrument(self): for instrument in self.rm.list_resources(): try: k2400 = self.init_inst(instrument) k2400.timeout = 5000 if k2400.query('*IDN?')[:8] == 'KEITHLEY': return k2400 except AttributeError as f: logger.warning(f'Unknown error - {f}') except errors.VisaIOError as e: logger.warning(f'Not possible to connect the port - {k2400}.')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def new_instrument(self, entry=\"entry\", instrument_name=\"id00\",):\n if not isinstance(entry, h5py.Group):\n entry = self.new_entry(entry)\n return self.new_class(entry, instrument_name, \"NXinstrument\")", "def new_instrument(self, instrument_type):\r\n return self.instrument_...
[ "0.67939395", "0.6719433", "0.6655605", "0.645817", "0.6346724", "0.6167076", "0.6164952", "0.5983404", "0.59604347", "0.5957381", "0.58981216", "0.58949465", "0.5830187", "0.58211017", "0.5807674", "0.580686", "0.57869667", "0.5773858", "0.57293445", "0.5729042", "0.5718009"...
0.0
-1
Function to reset instrument commands.
def reset_instrument(self): return self.inst.write('*RST')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _doReset(self):\n self._cmdReset()", "def reset():\n pass", "def reset():\n pass", "def reset():", "def reset():", "def reset():", "def reset(*args):", "def reset(*args):", "def reset(*args):", "def reset(self):\r\n _debug('simq03b_api.reset')\r\n self.write('*RST')...
[ "0.6838344", "0.6738413", "0.6738413", "0.671417", "0.671417", "0.671417", "0.66459775", "0.66459775", "0.66459775", "0.6641422", "0.6612196", "0.6542922", "0.6525732", "0.65187824", "0.6515516", "0.6514655", "0.649084", "0.646241", "0.646241", "0.646241", "0.646241", "0.64...
0.728747
0
Function to get the instrument ID.
def get_id(self): try: return self.inst.query('*IDN?')[:36] except errors.VisaIOError as e: logger.warning(e) return 'Device not connected.'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_instrument(instrument_id=\"ncnr.refl\"):\n instrument = lookup_instrument(instrument_id)\n return instrument.get_definition()", "def instrID(self):\n return self.query('*IDN?')", "def getIdent (self) :\n return self.id", "def instrumentLookup(self):\n try:\n retu...
[ "0.7232644", "0.7094874", "0.6803308", "0.6676362", "0.66161686", "0.6494307", "0.64633137", "0.64486635", "0.6435335", "0.64340276", "0.6428906", "0.6420143", "0.6411098", "0.6404091", "0.63868725", "0.63842493", "0.63842493", "0.63842493", "0.63842493", "0.63842493", "0.638...
0.6533145
5
Function to turn keithley on.
def power_on(self): return self.inst.write(':OUTP ON')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def turn_eht_on(self):\n raise NotImplementedError", "def friewallOn():\n pass", "def turn_on(self, **kwargs) -> None:\n self.heater.turn_on()", "def turn_on(self):\n self._interrupt_flash()\n if not self.on:\n GPIO.output(self.pin, GPIO.HIGH)\n self.on = ...
[ "0.7135671", "0.663822", "0.6409372", "0.6337713", "0.6281222", "0.6265482", "0.6240381", "0.6208941", "0.61929655", "0.6179498", "0.61754537", "0.6157566", "0.6145475", "0.6067093", "0.60128975", "0.5974138", "0.59354514", "0.58733046", "0.5868544", "0.5858084", "0.58495027"...
0.0
-1
Function to turn keithley off.
def power_off(self): return self.inst.write(':OUTP OFF')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def turn_off(self, **kwargs) -> None:\n self.wink.set_state(False)", "def turn_eht_off(self):\n raise NotImplementedError", "def turn_off(self, **kwargs: Any) -> None:\n with self._wemo_call_wrapper(\"turn off\"):\n self.wemo.off()", "def turn_off(self):\n GPIO.output(s...
[ "0.712389", "0.68947697", "0.67406696", "0.6689545", "0.66849595", "0.6672334", "0.66197157", "0.6614438", "0.6604366", "0.6602539", "0.65976703", "0.6577364", "0.6537614", "0.6469053", "0.6457736", "0.64300936", "0.6429341", "0.6427579", "0.64250124", "0.6411914", "0.6404472...
0.0
-1
Function to select panel.
def select_panel(self): radio_btn = self.sender() if radio_btn.isChecked(): term = radio_btn.text()[:-9] return self.inst.write(f':ROUT:TERM {term.upper()}')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_panel(self, panel_id):\n return self.panels.get(panel_id, None)", "def get_active_panel(cls):\n active_panel = None\n panel_list = pm.getPanel(type='modelPanel')\n for panel in panel_list:\n if pm.modelEditor(panel, q=1, av=1):\n active_panel = panel\...
[ "0.6231665", "0.60948694", "0.58209896", "0.57440066", "0.55831444", "0.55661774", "0.5561857", "0.55333793", "0.5477269", "0.5407169", "0.53396136", "0.5338085", "0.5338085", "0.53237325", "0.5315146", "0.53150046", "0.5298998", "0.52901876", "0.5276756", "0.5271008", "0.525...
0.6054565
2
Preprocess xml files, including extract main info from xml, and load category info.
def load_data_to_json(root_path, extract=True, decode="utf-8"): category_map = {} for file in os.listdir(root_path): # 行业分类文件夹 path = os.path.join(root_path, file) if os.path.isdir(path): for xml_file in os.listdir(path): # 读取每个行业分类的文件 xml_file_path = os.path.join(path, xml_file) try: if xml_file_path.endswith(".xml"): doc = read_main_info(xml_file_path, extract=extract, decode=decode) if file in category_map.keys(): category_map[file].append(doc) else: category_map[file] = list() category_map[file].append(doc) except UnicodeDecodeError: print("UnicodeDecodeError:%s" % xml_file_path) continue with open(JSON_FILE, "w") as f: json.dump(category_map, f)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def process_xml(self):\n self.process_gpx_file(str(self.filename))", "def extract_titles():\n \"\"\"\n The final data has the mapping post_title -> cat.\n This requires three relations:\n (pid, id) -> feed_url, feed_url -> blog_url, blog_url -> cat.\n Each file contains one raw feed with se...
[ "0.6510495", "0.58586115", "0.5815168", "0.5765223", "0.5643835", "0.5622581", "0.5620029", "0.55302376", "0.5526815", "0.5474749", "0.54636383", "0.54632366", "0.54615885", "0.5434923", "0.53978896", "0.53765374", "0.53668106", "0.53492767", "0.5326623", "0.52779377", "0.527...
0.0
-1
load training set and testing set from json
def load_data_set_from_json(json_path, ratio=0.7): train_doc_list = [] train_category_list = [] test_doc_list = [] test_category_list = [] if os.path.exists(json_path): with open(json_path, "r") as f: category_map = json.load(f) categories = category_map.keys() for category in categories: all_doc_list = category_map.get(category) length = len(all_doc_list) train_set_length = int(length * ratio) for i in range(length): if i < train_set_length: train_doc_list.append(all_doc_list[i]) train_category_list.append(category) else: test_doc_list.append(all_doc_list[i]) test_category_list.append(category) else: print("File doesn't exist, please run load_file_to_json first") return train_doc_list, train_category_list, test_doc_list, test_category_list
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_training_data(\n self,\n train_data_file=\"datasets/train_data.json\",\n test_data_file=\"datasets/test_data.json\",\n ):\n train_data = pd.read_json(train_data_file)\n test_data = pd.read_json(test_data_file)\n return train_data, test_data", "def load_traini...
[ "0.74476916", "0.7354245", "0.7169759", "0.6705753", "0.6644966", "0.66320914", "0.6515724", "0.64992905", "0.6398464", "0.63905", "0.62948185", "0.6278425", "0.6241316", "0.62365764", "0.62238926", "0.6203974", "0.6202843", "0.6183581", "0.6180504", "0.6172601", "0.6147451",...
0.6676436
4
Sends a message to all listeners of the topic
def _send(self, topic, message): body = {'message': encode(message)} result = requests.post('{0}/topics/{1}'.format(self.apiUrl, topic), json=body) return result.json()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def listen(self, topics):\n logging.debug(f'Listen to {list(map(lambda x: x.name, topics))}')\n\n for topic in map(lambda x: x.name, topics):\n try:\n self.subscribe(topic)\n logging.debug(f'Subscribed the {topic} topic')\n except Exception:\n ...
[ "0.6755926", "0.647603", "0.6460375", "0.6401803", "0.6373115", "0.6307153", "0.6262211", "0.6215271", "0.613345", "0.6130038", "0.6114487", "0.60955477", "0.6017219", "0.600299", "0.5941551", "0.588395", "0.58836484", "0.5870817", "0.58618623", "0.5843349", "0.5838989", "0...
0.0
-1
Starts listening for new messages on this topic
def _listen_on(self, topic, transform=None): task = self.topic_task_map.get(topic) if task is None: task = StreamingRequestTask(self.apiUrl, topic, transform) self.topic_task_map[topic] = task task.start() listener = task.create_listener(transform) self.listener_task_map[listener] = task return listener
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def listen(self):\n self.channel.start_consuming()", "def listen(self):\n self._client.listen(self._default_subscribe_to_dest)", "def listen(self):\n\n # It's ideal to start listening before the game starts, but the\n # down-side\n # is that object construction may not be don...
[ "0.7356364", "0.70570236", "0.7030769", "0.7006789", "0.68794745", "0.6866184", "0.685871", "0.6839579", "0.6831507", "0.67023295", "0.6645298", "0.6638821", "0.6618311", "0.6550533", "0.65046924", "0.64672434", "0.6445343", "0.6389064", "0.63380456", "0.63122493", "0.6299043...
0.644124
17
Sends a message to all listeners of the special topic broadcast
def broadcast(self, message): self._send('broadcast', message)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def broadcast(self, msg):\n for client in self.clients.values():\n send_data(client.socket, msg)", "def broadcast(msg):\r\n for user in clients:\r\n msg_client(msg, user)", "def broadcast(msg):\n\n for sock in clients:\n sock.send(bytes(msg, \"utf-8\"))", "def broadcast(msg, prefi...
[ "0.68770134", "0.6842449", "0.6777803", "0.6613753", "0.6587547", "0.6495968", "0.6483176", "0.6476947", "0.64376646", "0.64327085", "0.63729095", "0.6357374", "0.63375884", "0.6321455", "0.63169193", "0.6260771", "0.6246585", "0.62057936", "0.6156581", "0.6138631", "0.613646...
0.54191
70
Identity,Account/Astakos. Test ~okeanos authentication credentials
def check_user_credentials(token, auth_url='https://accounts.okeanos.grnet.gr' '/identity/v2.0'): logging.info(' Test the credentials') try: auth = AstakosClient(auth_url, token) auth.authenticate() logging.info(' Authentication verified') return AUTHENTICATED except ClientError: logging.error('Authentication failed with url %s and token %s' % ( auth_url, token)) return NOT_AUTHENTICATED
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_aio_can_login_to_web_portal(aio):", "def test_oms_credentials(*args, **kwargs):\n\treturn {'status':'success'}", "def test_basic_login(self):\n c = Client()\n c.login(username='a', password='123456')", "def test_01_authenticated(self):\r\n res = self.signin(email=self.email_addr...
[ "0.6723792", "0.671743", "0.6590624", "0.657539", "0.65334004", "0.643523", "0.64110607", "0.6394348", "0.6331037", "0.63070375", "0.6304327", "0.6270155", "0.62404686", "0.62375134", "0.6210401", "0.6204399", "0.61970514", "0.618015", "0.6161113", "0.61508554", "0.61445206",...
0.5886558
66
queries the database for a specific character takes a name returns a json with the lines
def lines_from_char(character): query = f""" SELECT script_l FROM script JOIN characters ON characters.char_id = script.characters_char_id WHERE name = '{character}' """ data = pd.read_sql_query(query,engine) return data.to_json(orient="records")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def lines_from_char_ep(character,ep):\n query = f\"\"\"\nSELECT script_l FROM script\nJOIN characters \nON characters.char_id = script.characters_char_id\nINNER JOIN episodes\nON episodes.ep_id = script.episodes_ep_id\nWHERE name = '{character}' and episode = '{ep}'\n\"\"\"\n data = pd.read_sql_query(query,e...
[ "0.6725487", "0.60146636", "0.5816583", "0.5747994", "0.5689336", "0.5673698", "0.56607604", "0.5536652", "0.5442061", "0.5321022", "0.5247645", "0.52152646", "0.5201513", "0.5195355", "0.5165863", "0.51640224", "0.5157612", "0.5139213", "0.5121536", "0.5115688", "0.5093197",...
0.7432127
0
queries the database for a specific character and episode takes a name and episode returns a json with the filtered lines
def lines_from_char_ep(character,ep): query = f""" SELECT script_l FROM script JOIN characters ON characters.char_id = script.characters_char_id INNER JOIN episodes ON episodes.ep_id = script.episodes_ep_id WHERE name = '{character}' and episode = '{ep}' """ data = pd.read_sql_query(query,engine) return data.to_json(orient="records")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def lines_():\n query = f\"\"\"\nSELECT script_l, `name`, episode\nFROM script\nINNER JOIN characters\nON characters.char_id = script.characters_char_id\nINNER JOIN episodes\nON episodes.ep_id = script.episodes_ep_id\n\"\"\"\n data = pd.read_sql_query(query, engine)\n return data.to_json(orient=\"records\...
[ "0.6161546", "0.61566335", "0.58697116", "0.531941", "0.51938283", "0.5191531", "0.51225615", "0.50665915", "0.49034274", "0.48820502", "0.48670354", "0.48493454", "0.48400128", "0.48004526", "0.47973293", "0.47908667", "0.47580832", "0.47565988", "0.47521466", "0.47492182", ...
0.7129047
0
queries the database for all lines takes no arguments returns a json with all the lines
def lines_(): query = f""" SELECT script_l, `name`, episode FROM script INNER JOIN characters ON characters.char_id = script.characters_char_id INNER JOIN episodes ON episodes.ep_id = script.episodes_ep_id """ data = pd.read_sql_query(query, engine) return data.to_json(orient="records")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def get_all_record():\n # X_new = item.to_df()\n # item_str = item.to_string()\n # project_code = int(item_str[item_str.find('=')+1:])\n pg = PostgreSQL()\n return_json = pg.fetch_all_records()\n return return_json", "def select_all_lines(conn):\n\n cur = conn.cursor()\n cur.execute...
[ "0.70531267", "0.6787454", "0.6718839", "0.66217417", "0.62935805", "0.62533355", "0.6238542", "0.6206922", "0.611652", "0.608391", "0.6056544", "0.59862447", "0.5970311", "0.59272057", "0.5872251", "0.58481425", "0.58452594", "0.5820792", "0.58157414", "0.5807491", "0.578491...
0.6937443
1
queries the database to insert a line from a character takes a name , character and episode returns a confirmation message
def new_line(script_l, character, episode): if up.check("characters", character): char_id = up.giveId("characters", character) else: up.insertCharacter(character) char_id = up.giveId("characters", character) if up.check("episodes", episode): ep_id = up.giveId("episodes", episode) else: up.insertEpisode(episode) ep_id = up.giveId("episodes", episode) if up.check("script", script_l) and up.check("characters", character) and up.check("episodes", episode): return "line exists" else: engine.execute(f""" INSERT INTO script (script_l, characters_char_id, episodes_ep_id) VALUES ("{script_l}", "{char_id}", "{ep_id}"); """) return f"successfully loaded: {character},{script_l},{episode}"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def insertCharacter(string):\n if check(\"character\", string):\n return \"character exists\"\n else:\n engine.execute(f\"INSERT INTO characters (name) VALUES ('{string}');\")", "def insertLine(row):\n if check(\"script\", row[\"dialogue\"]) and check(\"characters\", row[\"character\"]) an...
[ "0.68971574", "0.6643312", "0.6609034", "0.63286656", "0.62008613", "0.596312", "0.5952589", "0.5870909", "0.58442897", "0.5829052", "0.58106846", "0.58059555", "0.57868826", "0.57820517", "0.5769277", "0.56395286", "0.5630925", "0.56198794", "0.5592354", "0.55757797", "0.551...
0.6655293
1
Creates a new SnakemakeRule instance from a dict representation
def __init__( self, rule_id, parent_id, input, output, local=False, template=None, **kwargs ): self.rule_id = rule_id self.parent_id = parent_id self.input = input self.output = output self.local = local self.template = template self.params = kwargs
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def from_dict(cls, dictionary: Dict[str, Any]):\n return cls(**dictionary)", "def from_dict(cls, data: Dict[str, any]):\n return cls(**data)", "def from_dict(self, d):\r\n options = dict(d)\r\n task_id = options['task_id']\r\n del options['task_id']\r\n return SubtaskS...
[ "0.61375326", "0.60902405", "0.60286725", "0.60125345", "0.597105", "0.5911886", "0.5895749", "0.5895749", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", ...
0.0
-1
Prints a string representation of SnakemakeRule instance
def __repr__(self): template = """ SnakemakeRule ({}) - parent_id : {} - input : {} - output : {} - local : {} - template : {} - params : {} """ return template.format( self.rule_id, self.parent_id, self.input, self.output, self.local, self.template, self.params, )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __str__(self):\n return \"[ %s ]\" % str(self.__rule)", "def __str__(self):\n return \"{ %s }\" % str(self.__rule)", "def __str__(self):\n return \"{ %s }1\" % str(self.__rule)", "def print_rules(self):\n for idx, r in enumerate(self.rules):\n print(idx, \"=>\", r._...
[ "0.7497269", "0.74336165", "0.73365", "0.7002294", "0.6986714", "0.6474687", "0.6371976", "0.63269794", "0.62451273", "0.620827", "0.6196005", "0.6190379", "0.6182506", "0.6179493", "0.6122271", "0.60830194", "0.60090804", "0.5991629", "0.59799564", "0.5888777", "0.5873028", ...
0.7577809
0
Creates a new SnakemakeRule instance from a dict representation
def __init__( self, rule_id, parent_id, input, output, inline=True, local=False, template=None, **kwargs ): super().__init__(rule_id, parent_id, input, output, local, template, **kwargs) self.inline = inline self.groupped = False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def from_dict(cls, dictionary: Dict[str, Any]):\n return cls(**dictionary)", "def from_dict(cls, data: Dict[str, any]):\n return cls(**data)", "def from_dict(self, d):\r\n options = dict(d)\r\n task_id = options['task_id']\r\n del options['task_id']\r\n return SubtaskS...
[ "0.61388683", "0.60924554", "0.603016", "0.60145026", "0.59718156", "0.5911789", "0.5897906", "0.5897906", "0.5892087", "0.5892087", "0.5892087", "0.5892087", "0.5892087", "0.5892087", "0.5892087", "0.5892087", "0.5892087", "0.5892087", "0.5892087", "0.5892087", "0.5892087", ...
0.0
-1
Prints a string representation of SnakemakeRule instance
def __repr__(self): template = """ - inline : {} """ return super().__repr__() + template.format(self.inline)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __repr__(self):\n template = \"\"\"\n SnakemakeRule ({})\n \n - parent_id : {}\n - input : {}\n - output : {}\n - local : {}\n - template : {}\n - params : {}\n \"\"\"\n return template.format(\n ...
[ "0.7577809", "0.7497269", "0.74336165", "0.73365", "0.7002294", "0.6986714", "0.6474687", "0.6371976", "0.63269794", "0.62451273", "0.620827", "0.6196005", "0.6190379", "0.6182506", "0.6179493", "0.6122271", "0.60830194", "0.60090804", "0.5991629", "0.59799564", "0.5888777", ...
0.0
-1
Creates a new SnakemakeRule instance from a dict representation
def __init__(self, rule_id, input, output, local=False, template=None, **kwargs): super().__init__(rule_id, None, input, output, local, template, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def from_dict(cls, dictionary: Dict[str, Any]):\n return cls(**dictionary)", "def from_dict(cls, data: Dict[str, any]):\n return cls(**data)", "def from_dict(self, d):\r\n options = dict(d)\r\n task_id = options['task_id']\r\n del options['task_id']\r\n return SubtaskS...
[ "0.61375326", "0.60902405", "0.60286725", "0.60125345", "0.597105", "0.5911886", "0.5895749", "0.5895749", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", "0.5891623", ...
0.0
-1
Creates a new SnakemakeRule instance from a dict representation
def __init__(self, input, output, options, local=False): super().__init__( "create_training_set", None, input, output, local, "multi_training_set.snakefile", ) self.options = options
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def from_dict(cls, dictionary: Dict[str, Any]):\n return cls(**dictionary)", "def from_dict(cls, data: Dict[str, any]):\n return cls(**data)", "def from_dict(self, d):\r\n options = dict(d)\r\n task_id = options['task_id']\r\n del options['task_id']\r\n return SubtaskS...
[ "0.61378634", "0.60913914", "0.6031257", "0.6013804", "0.59718573", "0.591209", "0.5898094", "0.5898094", "0.5891009", "0.5891009", "0.5891009", "0.5891009", "0.5891009", "0.5891009", "0.5891009", "0.5891009", "0.5891009", "0.5891009", "0.5891009", "0.5891009", "0.5891009", ...
0.0
-1
Creates a new ReportRule instance from a dict representation
def __init__(self, rule_id, input, output, rmd, title, name, metadata, styles, theme, local=False, **kwargs): super().__init__(rule_id, None, input, output, local, **kwargs) self.rmd = rmd self.title = title self.name = name self.metadata = metadata self.styles = styles self.theme = theme # include rmd in params as well (expected by snakemake) self.params["rmd"] = rmd # other parameters used for report generation self.params["title"] = title self.params["name"] = name self.params["metadata"] = metadata self.params["styles"] = styles self.params["theme"] = theme
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def from_dictionary(cls,\r\n dictionary):\r\n if dictionary is None:\r\n return None\r\n\r\n # Extract variables from the dictionary\r\n id = dictionary.get('id')\r\n consumer_id = dictionary.get('consumerId')\r\n consumer_ssn = dictionary.get('c...
[ "0.61548984", "0.6064839", "0.5945463", "0.59221816", "0.591966", "0.591966", "0.591966", "0.591966", "0.591966", "0.591966", "0.591966", "0.591966", "0.591966", "0.591966", "0.591966", "0.591966", "0.591966", "0.591966", "0.591966", "0.591966", "0.591966", "0.591966", "0...
0.0
-1
Creates a new SnakemakeRuleGroup instance from a dict representation
def __init__( self, rule_id, parent_id, group_actions, input, output, local=False, **kwargs ): self.rule_id = rule_id self.parent_id = parent_id self.input = input self.output = output self.local = local self.params = kwargs self.groupped = True # load sub-actions self.actions = OrderedDict() for action in group_actions: # get action name action_name = action["action_name"] del action["action_name"] # determine template filepath action_type = action_name.split("_")[0] template = "actions/{}/{}.snakefile".format(action_type, action_name) # create new SnakemakeRule instance self.actions[action_name] = ActionRule( rule_id=None, parent_id=None, input=None, output=None, template=template, **action )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def from_json(value):\r\n for key in ('id', 'name', 'version'):\r\n if key not in value:\r\n raise TypeError(\"Group dict {0} missing value key '{1}'\".format(\r\n value, key))\r\n\r\n if value[\"version\"] != Group.VERSION:\r\n raise TypeError(...
[ "0.6249006", "0.60356647", "0.59878504", "0.58986735", "0.5808315", "0.57978857", "0.56254345", "0.55351543", "0.54451144", "0.5398016", "0.53898287", "0.5336041", "0.5296309", "0.5290205", "0.52836525", "0.5264334", "0.52494353", "0.52434814", "0.52236265", "0.5214074", "0.5...
0.50452054
83
Creates a new DataIntegrationRule instance from a dict representation
def __init__(self, rule_id, inputs, output, local=False, template=None, **kwargs): self.rule_id = rule_id self.inputs = inputs self.output = output self.local = local self.template = template self.params = kwargs
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def from_dict(cls, data):\n return cls(**data)", "def from_dict(cls, data: Dict[str, any]):\n return cls(**data)", "def _from_dict(cls, d):\n confidence = d.get(\"confidence\", None)\n constant = d.get(\"constant\", False)\n tags = d.get(\"tags\", None)\n return cls(\n...
[ "0.6742197", "0.65646493", "0.6552889", "0.648437", "0.63919514", "0.6375292", "0.6365639", "0.62918556", "0.62024546", "0.62024546", "0.62024546", "0.62024546", "0.62024546", "0.62024546", "0.62024546", "0.62024546", "0.62024546", "0.62024546", "0.62024546", "0.62024546", "0...
0.0
-1
Prints a string representation of DataIntegrationRule instance
def __repr__(self): template = """ DataIntegrationRule ({}) - inputs : {} - output : {} - local : {} - template : {} - params : {} """ return template.format( self.rule_id, self.inputs, self.output, self.local, self.template, self.params )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __str__(self):\n return \"[ %s ]\" % str(self.__rule)", "def __str__(self):\n return \"{ %s }\" % str(self.__rule)", "def __str__(self):\n return \"{ %s }1\" % str(self.__rule)", "def __str__ (self) :\n\t\ttext_rule = \"\"\n\t\t\n\t\tfor key, rules in self.production_rules.items() :\...
[ "0.68674856", "0.68624336", "0.6645672", "0.6532725", "0.6322785", "0.628434", "0.62689245", "0.6190423", "0.604574", "0.6039333", "0.59951794", "0.5989062", "0.5976397", "0.59762686", "0.5963851", "0.5931985", "0.59197044", "0.59166557", "0.590007", "0.5897591", "0.5883885",...
0.7635887
0
Syncs an account by the account_name
def test_sync_account(self): runner = CliRunner() LOG.info("Testing 'calm sync account {}".format(ACCOUNT_NAME)) result = runner.invoke( cli, ["sync", "account", ACCOUNT_NAME], ) if result.exit_code: cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} LOG.debug( "Cli Response: {}".format( json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) ) ) LOG.debug( "Traceback: \n{}".format( "".join(traceback.format_tb(result.exc_info[2])) ) ) pytest.fail("Account sync failed") LOG.info("Success")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sync_account(account):\n stripe_account = stripe.Account.retrieve(id=account.stripe_id)\n return sync_account_from_stripe_data(stripe_account)", "def put_account(self, account):\n \n pass", "def change_account(self, account):\r\n check_account = Account(account, steem_instance=se...
[ "0.7384746", "0.7075692", "0.68083966", "0.6631079", "0.6615135", "0.6615135", "0.657953", "0.6560432", "0.6560432", "0.65222824", "0.63499165", "0.6179354", "0.6179354", "0.6179354", "0.6179354", "0.60058033", "0.6002847", "0.5868163", "0.5854182", "0.5799399", "0.57433146",...
0.6851966
2
Send a password reset email to the suer
def deliever_password_reset_mail(user_id, reset_password_url): user = User.query.get(user_id) if user is not None: try: url = f"{celery.conf.get('EMAIL_SERVICE_HOST')}/api/email/" payload = { "sender": celery.conf.get("MAIL_DEFAULT_SENDER"), "receiver": user.email, "subject": "Password reset from snake eyes", "template_id": 2, "request_id": uuid4().hex, "template_params": {"username": user.username, "reset_password_url": reset_password_url} } response = post(url, json=payload, headers={"Accept": "application/json"}) except RequestException as e: print(f"[********] UNABLE TO DELIEVER MAIL {e}")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def send_password_reset_email():\n aaa.send_password_reset_email(\n username=post_get('username'),\n email_addr=post_get('email_address')\n )\n return 'Please check your mailbox.'", "def send_pw_reset_email(user):\n token = user.get_token()\n message = Message(\n 'Reset Your P...
[ "0.8450898", "0.8287654", "0.8170258", "0.81437016", "0.8022583", "0.79395986", "0.79303086", "0.7930095", "0.78969234", "0.7764277", "0.77437997", "0.7633127", "0.75886834", "0.7538881", "0.7430109", "0.74129903", "0.7399443", "0.73773956", "0.7365607", "0.7308838", "0.73015...
0.76305157
12
Initializes the finger model on which control's to be performed.
def __init__(self): self.urdf_path = '/opt/blmc_ei/src/robot_properties_fingers/urdf/pro/trifingerpro.urdf' self.tip_link_names = [ "finger_tip_link_0", "finger_tip_link_120", "finger_tip_link_240", ] self.robot_model = pinocchio.buildModelFromUrdf(self.urdf_path) self.data = self.robot_model.createData() self.tip_link_ids = [ self.robot_model.getFrameId(link_name) for link_name in self.tip_link_names ]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def controls_setup(self):\n pass", "def _initialize(self):\n \n self.view.lineEdit_3.setText(\"C,H,N,O,P,S\")\n self.view.spin_hit.setValue(20)\n self.view.lineEdit_2.setValue(10.)\n self.view.checkBox_8.setChecked(True)", "def initialize(self):\n self.Update()\...
[ "0.63043123", "0.60653013", "0.6031488", "0.59517133", "0.5951652", "0.5938752", "0.5925825", "0.5837696", "0.583645", "0.5820525", "0.5815757", "0.57706904", "0.5763885", "0.5759453", "0.5727885", "0.57115877", "0.57033086", "0.5632936", "0.5617571", "0.55889696", "0.5574349...
0.0
-1
Compute end effector positions for the given joint configuration.
def forward_kinematics(self, joint_positions): pinocchio.framesForwardKinematics( self.robot_model, self.data, joint_positions, ) return [ np.asarray(self.data.oMf[link_id].translation).reshape(-1).tolist() for link_id in self.tip_link_ids ]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def end_effectors_pos(self):\n def relative_pos_in_egocentric_frame(physics):\n end_effector = physics.bind(self._entity.end_effectors).xpos\n torso = physics.bind(self._entity.root_body).xpos\n xmat = np.reshape(physics.bind(self._entity.root_body).xmat, (3, 3))\n return np.reshape(np.dot(e...
[ "0.66042507", "0.5623347", "0.5479054", "0.5479054", "0.53170127", "0.5305009", "0.517255", "0.515755", "0.51083076", "0.5084015", "0.5083615", "0.503888", "0.4970291", "0.49539378", "0.4946133", "0.4942832", "0.49085337", "0.49002567", "0.48989028", "0.48899674", "0.48782575...
0.0
-1
Compute the jacobian of a finger at configuration q0.
def compute_jacobian(self, finger_id, q0): frame_id = self.tip_link_ids[finger_id] return pinocchio.computeFrameJacobian( self.robot_model, self.data, q0, frame_id, pinocchio.ReferenceFrame.LOCAL_WORLD_ALIGNED, )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def jacobian(self, dt):\n raise NotImplementedError", "def jacobian(self, x):\n pass", "def jacobian_ur5(q, delta=0.0001):\n # Alocacion de memoria\n J = np.zeros((3,6))\n # Transformacion homogenea inicial (usando q)\n T = fkine_ur5(q)\n # Iteracion para la derivada de cada column...
[ "0.7174676", "0.7125965", "0.71125233", "0.702315", "0.6786565", "0.6759826", "0.6759339", "0.66214025", "0.6568994", "0.653416", "0.6530263", "0.6495432", "0.64650875", "0.64634913", "0.64042807", "0.6402122", "0.6401185", "0.6370891", "0.6329893", "0.63101774", "0.63079286"...
0.7850823
0
Performs a line search, reducing the step size until the end effector moves closer to the desired position.
def _line_search(self, finger_id, xdes, q0, dq, max_iter=10, dt=1.0): xcurrent = self.forward_kinematics(q0)[finger_id] original_error = np.linalg.norm(xdes - xcurrent) error = np.inf q = q0 iter = 0 while error >= original_error: q = pinocchio.integrate(self.robot_model, q0, dt * dq) q = self._project_onto_constraints(q) xcurrent = self.forward_kinematics(q)[finger_id] error = np.linalg.norm(xdes - xcurrent) dt /= 2 iter += 1 if iter == max_iter: # Likely at a local minimum return q0, original_error, 0 return q, error, 2 * dt
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def LineSearch(Pos, Dir, dx, EFracTol, M, L, Cut,\n Accel = 1.5, MaxInc = 10., MaxIter = 10000):\n #start the iteration counter\n Iter = 0\n\n #find the normalized direction\n NormDir = Dir / np.sqrt(np.sum(Dir * Dir))\n\n #take the first two steps and compute energies\n Dists = [0....
[ "0.68932176", "0.6578148", "0.6477977", "0.6415695", "0.63791597", "0.63132477", "0.6291235", "0.62559795", "0.61942357", "0.6165324", "0.59852433", "0.59499186", "0.58552325", "0.58447623", "0.58173835", "0.57838714", "0.5747636", "0.5531475", "0.5473468", "0.546951", "0.546...
0.6214263
8
Computes the direction in which to update joint positions.
def _compute_dq(self, finger_id, xdes, q0): Ji = self.compute_jacobian(finger_id, q0)[:3, :] frame_id = self.tip_link_ids[finger_id] xcurrent = self.data.oMf[frame_id].translation Jinv = np.linalg.pinv(Ji) return Jinv.dot(xdes - xcurrent)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def junction_direction(start_junction: Cell, end_junction: Cell) -> Direction:\n dx = end_junction.column - start_junction.column\n dy = end_junction.row - start_junction.row\n if dy == 0:\n return Direction.E if dx > 0 else Direction.W\n return Direction.S if dy > 0 else Dir...
[ "0.66263056", "0.64241725", "0.6403347", "0.6384829", "0.635541", "0.6334753", "0.62950736", "0.6242469", "0.6156684", "0.61190027", "0.6118653", "0.6086555", "0.60604405", "0.59984165", "0.5993906", "0.5980935", "0.59729487", "0.59629905", "0.59500116", "0.5931701", "0.59066...
0.0
-1
Compute the joint positions which approximately result in a given end effector position.
def inverse_kinematics(self, finger_id, xdes, q0, tol=0.001, max_iter=20): iter = 0 q = self._project_onto_constraints(q0) xcurrent = self.forward_kinematics(q)[finger_id] error = np.linalg.norm(xdes - xcurrent) dt = 1.0 prev_error = np.inf while error > tol and (prev_error - error) > 1e-5 and iter < max_iter: dq = self._compute_dq(finger_id, xdes, q) # start the line search with a step size a bit larger than the # previous step size. dt = min(1.0, 2 * dt) prev_error = error q, error, dt = self._line_search(finger_id, xdes, q, dq, dt=dt) iter += 1 if error > tol: return None return q
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_joint_positions(self, joint_angles ): \n\n\n # current angles\n res_joint_angles = joint_angles.copy() \n\n # detect limits\n maskminus= res_joint_angles > self.joint_lims[:,0]\n maskplus = res_joint_angles < self.joint_lims[:,1]\n \n res_joint_angles = res_joi...
[ "0.6462823", "0.63993484", "0.5633511", "0.5578069", "0.5572044", "0.55286443", "0.549767", "0.54915667", "0.5486143", "0.5462283", "0.5461075", "0.54595315", "0.54163367", "0.5408067", "0.5399364", "0.53867376", "0.53730583", "0.536113", "0.533599", "0.5330091", "0.5328494",...
0.0
-1
Return the first element of a 2tuple. >>> x([1,2]) 1
def x(a): return a[0]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _unpack_tuple(x):\n if len(x) == 1:\n return x[0]\n else:\n return x", "def _unpack_tuple(x):\n if len(x) == 1:\n return x[0]\n else:\n return x", "def single_element_tuple():\n single = (1,)\n print(type(single)) # <type 'tuple'>", "def _item_or_tuple(self, ...
[ "0.7628563", "0.7628563", "0.70851874", "0.6855407", "0.6824074", "0.6824074", "0.68147033", "0.6742139", "0.6742139", "0.6360675", "0.63022053", "0.63022053", "0.62847096", "0.62811184", "0.62070453", "0.6096625", "0.60256463", "0.6006833", "0.59457403", "0.5892296", "0.5847...
0.6782777
7
Return the second element of a 2tuple. >>> y([1,2]) 2
def y(a): return a[1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def second(xs):\n if not xs:\n return None\n return xs[1]", "def second(xs):\n if not xs:\n return None\n return xs[1]", "def second(pair):\n\treturn pair[1]", "def _unpack_tuple(x):\n if len(x) == 1:\n return x[0]\n else:\n return x", "def _unpack_tuple(x):\n ...
[ "0.68817425", "0.68817425", "0.6846556", "0.67233163", "0.67233163", "0.6483205", "0.62575674", "0.6175765", "0.60955626", "0.60955626", "0.60284346", "0.59068465", "0.57835966", "0.57772356", "0.57753384", "0.5761288", "0.570475", "0.5695715", "0.5689088", "0.568156", "0.567...
0.6603884
5
Euclidean distance (in pixels). >>> distance( (1,1),(2,2) ) == math.sqrt(2) True
def distance(a,b): return np.sqrt( (x(a)-x(b))**2 + (y(a)-y(b))**2 )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def euclidean_distance(x, y):\n x1, y1 = x\n x2, y2 = y\n return sqrt((x1 - x2)**2 + (y1 - y2)**2)", "def euclidean_distance(x1: np.ndarray, x2: np.ndarray) -> float:\n return np.sqrt(np.square(x1 - x2).sum())", "def euclidean_distance(x1, x2):\n return np.sqrt(np.sum(np.square(np.subtract(x1, x...
[ "0.73406076", "0.7278529", "0.7243951", "0.7199394", "0.71825296", "0.7171505", "0.71208745", "0.7119449", "0.7014406", "0.6984854", "0.69682425", "0.69490933", "0.694145", "0.69366956", "0.69258237", "0.6921075", "0.6908294", "0.68853986", "0.68835557", "0.68756497", "0.6842...
0.6488884
58
Creates initial design of n_samples drawn from a latin hypercube.
def sample_latin_hypercube(low, high, n_samples, rng=None): if rng is None: rng = np.random.RandomState(np.random.randint(0, 10000)) n_dims = low.shape[0] samples = [] for i in range(n_dims): if isinstance(low[i], numbers.Integral): sample = random.sample(range(low[i], high[i]), n_samples) elif isinstance(low[i], numbers.Real): lower_bound = low[i] upper_bound = high[i] sample = lower_bound + rng.uniform(0, 1, n_samples) * (upper_bound - lower_bound) else: raise ValueError('Latin hypercube sampling can only draw from types int and real,' ' got {}!'.format(type(low[i]))) samples.append(sample) samples = np.array(samples, dtype=object) for i in range(n_dims): rng.shuffle(samples[i, :]) return samples.T
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def gen_hypercube(samples, N):\n\n np.random.seed(4654562)\n hypercube = lhs(N, samples=samples)\n\n return hypercube", "def latin_hypercube(n_pts, dim):\n X = np.zeros((n_pts, dim))\n centers = (1.0 + 2.0 * np.arange(0.0, n_pts)) / float(2 * n_pts)\n for i in range(dim): # Shuffle the center ...
[ "0.7344228", "0.70358026", "0.6742598", "0.66756856", "0.66756856", "0.66473216", "0.6490043", "0.6219821", "0.61849946", "0.5989805", "0.5899154", "0.5826263", "0.57939756", "0.5752896", "0.57472134", "0.5707989", "0.5667523", "0.5647223", "0.5642459", "0.5631831", "0.558605...
0.6458857
7
Creates the initial search space using latin hypercube sampling.
def lhs_start(hyperbounds, n_samples, rng=None): low_bounds = [] high_bounds = [] for bound in hyperbounds: low_bounds.append(bound[0]) high_bounds.append(bound[1]) low_bounds = np.array(low_bounds, dtype=object) high_bounds = np.array(high_bounds, dtype=object) samples = sample_latin_hypercube(low_bounds, high_bounds, n_samples, rng=rng) samples = samples.tolist() return samples
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_latin_hypercube(samples, param_dict, class_root, seed=10):\n # Set random seed\n random.seed(seed)\n\n # Create dictionary to hold sampled parameter values\n sample_points = {}\n for key in param_dict.keys():\n sample_points[key] = np.zeros(samples)\n Ndim = len(param_dict.key...
[ "0.6223337", "0.59628797", "0.58912903", "0.5874907", "0.5854502", "0.5754657", "0.5733271", "0.56222767", "0.5502466", "0.54998386", "0.54974604", "0.5460404", "0.5426823", "0.5370437", "0.53134114", "0.5280649", "0.52736545", "0.5264173", "0.52439547", "0.52239805", "0.5213...
0.6189054
1
merges station letters into single list of station names
def stations(station_let): stat = ['']*np.size(station_let,0) for i in range(len(stat)): for j in range(4): if station_let[i][j] is not np.ma.masked: stat[i]+=station_let[i][j] return stat
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def combine_state_names_and_abbreviations():\n lst=[]\n for k,v in us_state_abbrev.items():\n lst.append(v)\n lst = sorted(lst[:10])\n state = sorted(states)\n print(lst+state[-10:])\n return", "def combine_state_names_and_abbreviations():\n return sorted(us_state_abbrev.values())[:10...
[ "0.59636515", "0.5915971", "0.58542436", "0.5853559", "0.5729847", "0.5717873", "0.57039684", "0.5570993", "0.54327935", "0.54008317", "0.53579676", "0.5210223", "0.51921755", "0.5177079", "0.5170437", "0.51636785", "0.5162297", "0.514331", "0.5133783", "0.51176363", "0.51169...
0.53781
10
converts time to gmt, appends to list
def gmt(time): gmt = [0]*time.size for i in range(time.size): gmt[i]=datetime.utcfromtimestamp(time[i]).strftime('%Y-%m-%d %H:%M:%S') return gmt
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _update_time(self):\r\n\r\n curr_time = datetime.datetime.now()\r\n time = []\r\n time.append([int(x) for x in '{0:06b}'.format(curr_time.second)])\r\n time.append([int(x) for x in '{0:06b}'.format(curr_time.minute)])\r\n time.append([int(x) for x in '{0:06b}'.format(curr_tim...
[ "0.6536344", "0.61749583", "0.5915965", "0.57811874", "0.5765255", "0.5675775", "0.5667193", "0.5663627", "0.5660941", "0.5621869", "0.5613156", "0.55986845", "0.5576497", "0.5538767", "0.55233485", "0.55079854", "0.5443805", "0.5407144", "0.53734505", "0.53541595", "0.534225...
0.72068673
0
finds stations that don't have predictand data and appends them to a list
def miss_station(all_stations,stations): diff = len(all_stations)-len(stations) k=0 i=0 miss_stations = ['']*diff a = all_stations[:] a.sort() s = stations[:] s.sort() while i < len(stations): while a[i] != s[i]: miss_stations[k]=a[i] del a[i] k+=1 i+=1 return miss_stations
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def stations():\n\n return station_list", "def prep_stations(url):\n stations = []\n _stations = requests.get(url).json()\n\n for _station in _stations['stationBeanList']:\n if _station['statusKey'] == 1:\n stations.append([_station['stationName'], _station['id'],\n ...
[ "0.6918483", "0.63394576", "0.6295055", "0.6175655", "0.6134076", "0.6036043", "0.59990704", "0.5990439", "0.5939404", "0.59325945", "0.5899833", "0.5873434", "0.58216053", "0.5731162", "0.5703947", "0.5658584", "0.5635071", "0.563275", "0.56268054", "0.5625375", "0.56150836"...
0.66159266
1
builds and saves dataframe to be used for graphs
def dataframe(): #allows function to access station, gmt, and miss_station functions global stations global gmt global miss_station #read predictor file control = cfg.read_yaml('../registry/graphs.yaml') pred_ctrl = cfg.read_yaml(cfg.get_config_path(control.pred_file)) predd_ctrl = cfg.read_yaml(cfg.get_config_path(control.predd_file)) #get file paths and update database predictor_file_path = control.predictor_file_path predictand_file_path = control.predictand_file_path pred_file_id = update(predictor_file_path) predd_file_id = update(predictand_file_path) #store lead time and date range lead_time = control.lead_time date_range = control.date_range #get info for fetch many dates start,end,stride = read_pred.parse_range(date_range) fcst_ref_time = control.date_range[0].split('-')[0][-2:] #initialize list of predictors pred_list = pred_ctrl.predictors predictor = [] #loops through predictors to build camps data objects for entry_dict in pred_list: #formats metadata pred = create.preprocess_entries(entry_dict, fcst_ref_time) #adds info to metadata that's not currently being stored pred.search_metadata['reserved2'] = lead_time*3600 pred.search_metadata['file_id'] = pred_file_id pred.search_metadata['reserved1'] = 'vector' #build camps data objects for each day variable = fetch_many_dates(predictor_file_path,start,end,stride,pred.search_metadata) #appends all data to single camps object if variable[0] is not None: var = variable[0] arrs = [] for i in range(len(variable)): arrs.append(variable[i].data) var.data = np.stack(arrs) predictor.append(var) #initializes list of predictands predd_list = predd_ctrl.predictands predictand = [] #loops through predictands to build camps data objects for entry_dict in predd_list: #formats metadata vertical_coordinate = entry_dict.pop('Vertical_Coordinate') entry_dict['file_id'] = predd_file_id #build camps objects for each day variable = fetch_many_dates(predictand_file_path,start, end, stride, entry_dict) #append all data to single camps object var = variable[0] arrs = [] for i in range(len(variable)): arrs.append(variable[i].data) try: var.data = np.stack(arrs) predictand.append(var) except: print("Can't read " + variable.name) #getting predictor station and time data predr = Dataset(predictor_file_path[0]) predr_stat = predr.variables['station'][:] if lead_time == 3: predr_time = predr.variables['OM__phenomenonTimeInstant'][:] elif lead_time == 6: predr_time = predr.variables['OM__phenomenonTimeInstant1'][:] elif lead_time == 12: predr_time = predr.variables['OM__phenomenonTimeInstant2'][:] predr.close() #reformatting predictor station and time data predr_stations = stations(predr_stat) predr_gmt = gmt(predr_time) #getting predictand station and time data predd = Dataset(predictand_file_path[0]) predd_stat = predd.variables['station'][:] predd_time = predd.variables['OM__resultTime'][:] predd.close() #reformatting predictand station and time data predd_stations = stations(predd_stat) predd_gmt = gmt(predd_time) #choosing predictand observations that line up with predictor time hour = (predictor[0].metadata['FcstTime_hour']/3600) + lead_time days = len(predd_gmt)/24 predd_hours = [0]*days k=0 for i in range(len(predd_gmt)): if i%24 == hour: predd_hours[k]=predd_gmt[i] k+=1 #catches when GFS data doesn't cover the last day of the month if len(predr_gmt) < len(predd_hours): predd_hours = predd_hours[:-1] #find missing stations miss_stations = miss_station(predr_stations,predd_stations) stations = predd_stations #station and time array info = [['',''] for k in range(len(predr_gmt)*len(stations))] for i in range(len(predr_gmt)): for j in range(len(stations)): k = i*len(stations)+j info[k][0]=predr_gmt[i] info[k][1]=stations[j] #create column names names = ['']*(len(predictor)+len(predictand)+2) names[0]='Time' names[1]='Station' #creating array arr = np.zeros((len(stations)*len(predr_gmt),len(predictor)+len(predictand))) #adding predictor data for i in range(len(predictor)): #remove lead time and forecast reference time from variable name #and add variable name to column list of final dataframe if lead_time == 12: names[i+2]='GFS_'+predictor[i].get_variable_name()[:-11] else: names[i+2]='GFS_'+predictor[i].get_variable_name()[:-10] #create pandas dataframe of data and sort alphabetically by station name predictor[i].data = np.squeeze(predictor[i].data,axis=2) predictor[i].data = pd.DataFrame(predictor[i].data,columns=predr_stations,index=predr_gmt) predictor[i].data = predictor[i].data.reindex(sorted(predictor[i].data.columns),axis=1) #remove stations with no predictand data k=0 a=miss_stations[:] for j in predictor[i].data.columns: if not a: break if j==a[k]: predictor[i].data=predictor[i].data.drop(j,axis=1) del a[k] #add data to final dataframe for b in range(len(predr_gmt)): for c in range(len(stations)): k = b*len(stations)+c arr[k][i] = predictor[i].data.iloc[b][c] #add predictand data for i in range(len(predictand)): #removing extra underscore, adding variable name to column names names[len(predictor)+2+i]='METAR_'+predictand[i].get_variable_name()[:-1] #resize array and create pandas dataframe predictand[i].data = np.squeeze(predictand[i].data,axis=2) predictand[i].data = pd.DataFrame(predictand[i].data,columns=predd_stations,index=predd_hours) predictand[i].data = predictand[i].data.reindex(sorted(predictand[i].data.columns),axis=1) #remove extra days of predictand data predictand[i].data = predictand[i].data.iloc[0:len(predr_time),:] #add predictand data to array for b in range(len(predr_gmt)): for c in range(len(stations)): k = b*len(stations)+c val = predictand[i].data.iloc[b][c] #catch metar fill data if val == 9999: val = np.nan arr[k][len(predictor)+i]=val #add station and time data to array and save as csv data = np.concatenate([info,arr],axis = 1) to_save = pd.DataFrame(data,columns=names) to_save.to_csv(str(start)+'_'+str(end)+'_'+str(lead_time)+'hrs.csv')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_dataframe(self):\n\n df = pd.DataFrame({'date': [],\n 'RUN': [],\n 'CLONE': [],\n 'GEN': pd.Series(0, index=[], dtype='int'),\n 'frame': pd.Series([], index=[], dtype='int'),\n ...
[ "0.72918093", "0.6938951", "0.69203115", "0.67623717", "0.6709177", "0.661987", "0.66142595", "0.6613234", "0.65876186", "0.65471715", "0.65268254", "0.6518715", "0.6476695", "0.6442566", "0.63830763", "0.63486266", "0.6345487", "0.6316515", "0.6284121", "0.6281206", "0.62784...
0.6171512
27
Hexlify raw text, return hexlified text.
def hexlify(text): if six.PY3: text = text.encode('utf-8') hexlified = binascii.hexlify(text) if six.PY3: hexlified = hexlified.decode('utf-8') return hexlified
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unhexlify(text):\n unhexlified = binascii.unhexlify(text)\n\n if six.PY3:\n unhexlified = unhexlified.decode('utf-8')\n\n return unhexlified", "def hexify(text):\r\n return ' '.join([hexify_word(word) for word in text.split()])", "def normalize(self, text):\n\n return binascii.hexli...
[ "0.7632533", "0.7439492", "0.70764095", "0.65433866", "0.6380229", "0.61830765", "0.61594963", "0.61447966", "0.6132453", "0.6124988", "0.61229116", "0.6114684", "0.59884095", "0.5947459", "0.59325373", "0.5788667", "0.5782058", "0.57663274", "0.5741124", "0.57352805", "0.571...
0.78595716
0
Unhexlify raw text, return unhexlified text.
def unhexlify(text): unhexlified = binascii.unhexlify(text) if six.PY3: unhexlified = unhexlified.decode('utf-8') return unhexlified
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hexlify(text):\n if six.PY3:\n text = text.encode('utf-8')\n\n hexlified = binascii.hexlify(text)\n\n if six.PY3:\n hexlified = hexlified.decode('utf-8')\n\n return hexlified", "def test_unhexlify_not_python():\n assert '' == uflash.unhexlify(\n ':020000040003F7\\n:10E0...
[ "0.76805043", "0.724818", "0.71128637", "0.6909091", "0.6862392", "0.6219935", "0.6197366", "0.59515", "0.5922873", "0.5843483", "0.57907474", "0.5650561", "0.56349534", "0.56267136", "0.55028135", "0.550243", "0.5497304", "0.54252976", "0.5407516", "0.53313655", "0.52876633"...
0.8604057
0
Parse a line of text from the plot_data file.
def parse_line(self, line): if line[0] == "#": return False parts = [x.strip() for x in line.strip().split(",")] self.unix_time = int(parts[0]) self.cycles_done = int(parts[1]) self.cur_path = int(parts[2]) self.paths_total = int(parts[3]) self.pending_total = int(parts[4]) self.pending_favs = int(parts[5]) self.map_size = float(parts[6].replace("%","")) self.unique_crashes = int(parts[7]) self.unique_hangs = int(parts[8]) self.max_depth = int(parts[9]) self.execs_per_sec = float(parts[10]) return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_plot_cmd(self, line):\n line, any_vars = self.find_vars_in_str(line)\n words = line.split()\n words = self.fix_words(words)\n\n # Parse line\n has_out_var = False\n if len(words) == 6:\n has_out_var = True\n _, plot_type, _, in_data, _, out_...
[ "0.6670215", "0.6484195", "0.6202049", "0.61103636", "0.5995822", "0.5897944", "0.5888218", "0.58159584", "0.57317466", "0.5645726", "0.56055975", "0.55672276", "0.55463046", "0.55388415", "0.55194986", "0.55111915", "0.5509744", "0.55055726", "0.5483326", "0.54770184", "0.54...
0.49969345
89
Obtains the record in the set with the time closest to the given $unix_time. If this record with not $within the correct number of seconds, an exception is raised.
def get_record(self, unix_time, within): if len(self.records) <= 0: raise Exception("No records in this set") r = self.records[0] closest_record = r closest_delta = abs(r.unix_time - unix_time) for r in self.records[1:]: delta = abs(r.unix_time - unix_time) if delta < closest_delta: closest_record = r closest_delta = delta if closest_delta > within: raise Exception("Closest record to %d was %d (delta=%d) which exceeds limit of %d" % (unix_time, closest_record.unix_time, closest_delta, within)) return closest_record
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_closest_record(self, time):\n dist = 10000000\n record = -1\n # TODO: optimise a bit\n for i, itime in enumerate(self.times):\n if (abs(time-itime)) < dist:\n dist = abs(time-itime)\n record = i\n\n return record", "def find_near...
[ "0.6383025", "0.57725835", "0.5726713", "0.5603155", "0.550198", "0.5465888", "0.51996434", "0.51091826", "0.49394408", "0.49366295", "0.49245772", "0.48886275", "0.48809275", "0.48312008", "0.48274845", "0.4817998", "0.4760067", "0.47593623", "0.47202304", "0.47175002", "0.4...
0.84365386
0
Pulls in the records from other into self with the other, but since the timestamps won't match up perfectly, the output will only have a record per $period number of seconds.
def merge_with(self, other, period=60): new_list = [] last_timestamp = 0 for r in self.records: if abs(r.unix_time - last_timestamp) > period: # Accept this record last_timestamp = r.unix_time other_r = other.get_record(r.unix_time, period/2) r.merge_with(other_r) new_list.append(r) self.records = new_list
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __add__ ( self, other, resample_opts=None ):\n result = ObservationStorage (datadir=self.datadir, \\\n resample_opts=resample_opts )\n if self.date[0] > other.date[0]:\n start_date = other.date[0]\n else:\n start_date = self.date[0]\n if self.date[-1...
[ "0.60452324", "0.5793913", "0.5528706", "0.5526547", "0.5517215", "0.5493142", "0.54775965", "0.54737484", "0.5462078", "0.54354554", "0.5395026", "0.5321515", "0.529761", "0.5283373", "0.5275335", "0.5271934", "0.52359205", "0.51437724", "0.51378435", "0.513382", "0.51214904...
0.7975035
0
Given a UC480 camera object (instrumental module) and a number indicating the number of trap objects, applies an iterative image analysis to individual trap adjustment in order to achieve a nearly homogeneous intensity profile across traps.
def stabilize_intensity(which_cam, cam, verbose=False): L = 0.5 # Correction Rate mags = np.ones(12) ### ! ntraps = len(mags) iteration = 0 while iteration < 5: iteration += 1 print("Iteration ", iteration) im = cam.latest_frame() try: trap_powers = analyze_image(which_cam, im, ntraps, iteration, verbose) except (AttributeError, ValueError) as e: print("No Bueno, error occurred during image analysis:\n", e) break mean_power = trap_powers.mean() rel_dif = 100 * trap_powers.std() / mean_power print(f'Relative Power Difference: {rel_dif:.2f} %') if rel_dif < 0.8: print("WOW") break deltaP = [mean_power - P for P in trap_powers] dmags = [(dP / abs(dP)) * sqrt(abs(dP)) * L for dP in deltaP] mags = np.add(mags, dmags) print("Magnitudes: ", mags) break # self._update_magnitudes(mags) _ = analyze_image(im, ntraps, verbose=verbose)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def analyze_image(which_cam, image, ntraps, iteration=0, verbose=False):\n threshes = [0.5, 0.6]\n margin = 10\n threshold = np.max(image) * threshes[which_cam]\n im = image.transpose()\n\n x_len = len(im)\n peak_locs = np.zeros(x_len)\n peak_vals = np.zeros(x_len)\n\n ## Trap Peak Detectio...
[ "0.5652331", "0.5335536", "0.52658623", "0.5259198", "0.5201728", "0.51909906", "0.515129", "0.5142113", "0.5076965", "0.5063984", "0.5055916", "0.49459288", "0.4884683", "0.48668435", "0.4824002", "0.482143", "0.47895026", "0.47749937", "0.47587273", "0.4741739", "0.4710989"...
0.5542998
1
Returns intensity profile of 1d gaussian beam
def gaussian1d(x, x0, w0, A, offset): if w0 == 0: return 0 return A * np.exp(-2 * (x - x0) ** 2 / (w0 ** 2)) + offset
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pvalue_gaussian(self):\n \n pv = 2 * stats.norm.sf(abs(self.TS_prime_obs), loc=0, scale=1)\n return(pv)", "def gaussian(amp, fwhm, mean, x):\n return amp * np.exp(-4. * np.log(2) * (x-mean)**2 / fwhm**2)", "def estimate_uni_gaussian(X):\n mu = mean(X, axis=0)\n sigma2 = va...
[ "0.6619526", "0.63317794", "0.62996733", "0.627856", "0.62739635", "0.61623496", "0.6124203", "0.610553", "0.60432166", "0.60368556", "0.60006434", "0.59681493", "0.5952793", "0.59485036", "0.5927189", "0.5926995", "0.5875386", "0.5873252", "0.5861136", "0.58427656", "0.58294...
0.0
-1
Returns intensity profile of trap array
def gaussianarray1d(x, x0_vec, wx_vec, A_vec, offset, ntraps): array = np.zeros(np.shape(x)) for k in range(ntraps): array = array + gaussian1d(x, x0_vec[k], wx_vec[k], A_vec[k], 0) return array + offset
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def intensity(self) -> int:", "def intensity(self):\r\n return np.power(prb.amplitude, 2)", "def getIntensity(self):\n return self.getIntensityS() + self.getIntensityP()", "def estimate_skin_tone(face_roi):\n return [int(face_roi[:, :, i].mean()) for i in range(face_roi.shape[-1])]", "def ...
[ "0.62673634", "0.593113", "0.5869846", "0.58618975", "0.5649231", "0.5581468", "0.5578077", "0.5490693", "0.5435854", "0.5350697", "0.5301182", "0.5300452", "0.52878165", "0.5265786", "0.5254433", "0.52346134", "0.5218291", "0.5211029", "0.5208299", "0.5205285", "0.5173795", ...
0.0
-1
Juggles parameters in order to be able to fit a list of parameters
def wrapper_fit_func(x, ntraps, *args): a, b, c = list(args[0][:ntraps]), list(args[0][ntraps:2 * ntraps]), list(args[0][2 * ntraps:3 * ntraps]) offset = args[0][-1] return gaussianarray1d(x, a, b, c, offset, ntraps)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def doParametersOfInterest(self):\n\n self.modelBuilder.doVar(\"eAfb[0.6,-0.75,0.75]\");\n self.modelBuilder.doVar(\"eA0[0.05, -1.0, 1.0]\");\n self.modelBuilder.doVar(\"rAfb[1.0,-5.0, 5.0]\");\n self.modelBuilder.doVar(\"rA0[1.0, -5.0, 5.0]\");\n self.modelBuilder.doSet(\"POI\",...
[ "0.6610045", "0.65775573", "0.6563652", "0.6521646", "0.646521", "0.6455579", "0.63229144", "0.63210595", "0.63088965", "0.629318", "0.6284352", "0.6191725", "0.61914194", "0.60860085", "0.6085613", "0.60815185", "0.60616034", "0.6052334", "0.6052334", "0.6052334", "0.6023257...
0.0
-1