query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Method to deserialize in csv
def load_from_file_csv(cls): list_rectangle = ["id", "width", "height", "x", "y"] list_square = ["id", "size", "x", "y"] filename = cls.__name__ + ".csv" dictionary = [] result = [] try: with open(filename, encoding="utf-8") as file: obj_list = csv.reader(file) # read obj_list <_csv.reader object at 0x7fbfe5614b38> if cls.__name__ == "Rectangle": for list in obj_list: # create dictionary dict = {} for key, value in zip(list_rectangle, list): dict[key] = int(value) # create an object and append to a list result.append(cls.create(**dict)) if cls.__name__ == "Square": for list in obj_list: # create dictionary dict = {} for key, value in zip(list_square, list): dict[key] = int(value) # create an object and append to a list result.append(cls.create(**dict)) return result except: return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_csv():", "def read_csv_file(self):\n pass", "def from_csv(cls, load_folder: Path) -> \"Parameters\":\n serializer = serializer_factory(fmt=SerializerEnum.CSV)\n return serializer.load(class_obj=cls, folder_path=load_folder)", "def __obtain_data_from_csv__(self, csvfile):\n ...
[ "0.73366404", "0.68047184", "0.67198044", "0.65935373", "0.6587779", "0.6547909", "0.6501733", "0.64538", "0.63990664", "0.6373529", "0.6321235", "0.62773603", "0.6271541", "0.6211887", "0.6153087", "0.61328447", "0.61079156", "0.60422295", "0.60405314", "0.6038385", "0.60356...
0.59177405
32
Method that opens a window and draws all the Rectangles and Squares
def draw(list_rectangles, list_squares): art = turtle.Turtle() def set_position(x, y): art.penup() art.goto(x, y) art.pendown() def beauty_rectangle(width, height, art): art.begin_fill() for i in range(2): art.forward(width) art.right(90) art.forward(height) art.right(90) art.end_fill() for rectangle in list_rectangles: art.color("#800080") set_position(rectangle.x, rectangle.y) beauty_rectangle(rectangle.width, rectangle.height, art) set_position(-1 * rectangle.x, -1 * rectangle.y,) for square in list_squares: art.color("#40E0D0") set_position(square.x, square.y) beauty_rectangle(square.size, square.size, art) set_position(-1 * square.x, -1 * square.y) turtle.done()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw_windows():\n martin.begin_fill() # lines 88-118 draw out a row consisting of 3 rectangles for windows\n for i in range(2):\n martin.pendown()\n martin.forward(13)\n martin.right(90)\n martin.forward(20)\n martin.right(90)\n martin.penup()\n martin.end_fi...
[ "0.71454996", "0.6748487", "0.67099464", "0.6702593", "0.6619946", "0.661862", "0.66149807", "0.65757835", "0.6491082", "0.641685", "0.63909274", "0.63824314", "0.6365075", "0.6343854", "0.6338814", "0.63130724", "0.63010806", "0.62890565", "0.62855595", "0.6277103", "0.62664...
0.6050221
37
Returns the path for userspecific blender scripts for all major platforms
def getScriptsPath(blenderversion): if sys.platform == 'linux': scriptspath = os.path.normpath( os.path.expanduser('~/.config/blender/{0}/scripts'.format(blenderversion)) ) elif sys.platform == 'darwin': scriptspath = os.path.normpath( os.path.expanduser( '~/Library/Application Support/Blender/{0}/scripts'.format(blenderversion) ) ) elif sys.platform == 'win32': scriptspath = os.path.normpath( os.path.expanduser( '~/AppData/Roaming/Blender Foundation/Blender/{0}/scripts'.format(blenderversion) ) ) else: scriptspath = 'ERROR: {0} not supported,'.format(sys.platform) return scriptspath
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getBlenderConfigPath(blenderversion):\n if sys.platform == 'linux':\n scriptspath = os.path.normpath(\n os.path.expanduser('~/.config/blender/{0}/config'.format(blenderversion))\n )\n elif sys.platform == 'darwin':\n scriptspath = os.path.normpath(\n os.path.exp...
[ "0.6769609", "0.62371445", "0.60271466", "0.590203", "0.5760948", "0.5753", "0.57450694", "0.57419586", "0.57419586", "0.5724369", "0.5693562", "0.5689543", "0.56795627", "0.5673507", "0.5627154", "0.5591305", "0.55765843", "0.55733895", "0.55558956", "0.5554302", "0.55541027...
0.72838265
0
Returns the path for configuration data for all major platforms
def getConfigPath(): if sys.platform == 'linux': configpath = os.path.normpath(os.path.expanduser('~/.config/phobos')) elif sys.platform == 'darwin': configpath = os.path.normpath(os.path.expanduser('~/Library/Application Support/phobos')) elif sys.platform == 'win32': configpath = os.path.normpath(os.path.expanduser('~/AppData/Roaming/phobos')) else: configpath = 'ERROR: {0} not supported,'.format(sys.platform) return configpath
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def platform_config_dir():\n if POSIX: # nocover\n dpath_ = os.environ.get('XDG_CONFIG_HOME', '~/.config')\n elif DARWIN: # nocover\n dpath_ = '~/Library/Application Support'\n elif WIN32: # nocover\n dpath_ = os.environ.get('APPDATA', '~/AppData/Roaming')\n else: # nocover\n ...
[ "0.7201866", "0.67362136", "0.6644315", "0.66377807", "0.6636949", "0.66022515", "0.65694153", "0.65639627", "0.65615714", "0.64916116", "0.64902836", "0.64741963", "0.6470176", "0.6469606", "0.6384029", "0.6373276", "0.63566935", "0.63498926", "0.63385725", "0.6334437", "0.6...
0.72742504
0
Returns the configuration path for userspecific blender data.
def getBlenderConfigPath(blenderversion): if sys.platform == 'linux': scriptspath = os.path.normpath( os.path.expanduser('~/.config/blender/{0}/config'.format(blenderversion)) ) elif sys.platform == 'darwin': scriptspath = os.path.normpath( os.path.expanduser( '~/Library/Application Support/Blender/{0}/config'.format(blenderversion) ) ) elif sys.platform == 'win32': scriptspath = os.path.normpath( os.path.expanduser( '~/AppData/Roaming/Blender Foundation/Blender/{0}/config'.format(blenderversion) ) ) else: scriptspath = 'ERROR: {0} not supported,'.format(sys.platform) return scriptspath
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cfg_path(self):\n return self._cfg_path", "def config_file_and_path():\n return str(rmfriend_dir() / 'config.cfg')", "def configPath(self):\n return os.path.dirname(__file__)", "def get_config_file_location():\n\n return './' + CONFIG_FILE_NAME", "def get_data_path(name):\n js = ...
[ "0.72044575", "0.6985214", "0.6869317", "0.68644977", "0.67701125", "0.6757046", "0.67386943", "0.6731083", "0.66863096", "0.66134137", "0.6526606", "0.650571", "0.65033567", "0.65007305", "0.64274645", "0.6427291", "0.64223856", "0.64209414", "0.6402593", "0.6392462", "0.634...
0.7370779
0
Calculate the area of each grid cell for a userprovided grid cell resolution. Area is in square meters, but resolution is given in decimal degrees.
def do_grid (resolution): # Calculations needs to be in radians lats = np.deg2rad(np.arange(-57,84, resolution)) r_sq = 6371000**2 n_lats = int(360./resolution) area = r_sq*np.ones(n_lats)[:, None]*np.deg2rad(resolution)*( np.sin(lats[1:]) - np.sin(lats[:-1])) return area.T
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def grid_area(ulon,ulat):\n R = 6371. * 1000. # radius of Earth in meters\n dlon = N.diff(ulon)\n dlat = N.diff(ulat)\n dx = N.outer(deg2rad * R * N.cos(deg2rad * ulat),dlon) # dx (meters)\n dy = 60. * 1852. * dlat # dy (meters)\n area = (dx[1:] + dx[:-1])...
[ "0.6939898", "0.6827993", "0.6783789", "0.6774796", "0.6750549", "0.67404014", "0.6659824", "0.6657721", "0.66374826", "0.66325015", "0.6627994", "0.6614619", "0.65675646", "0.653298", "0.6518739", "0.64830256", "0.6461933", "0.6324823", "0.63124484", "0.6302911", "0.6300791"...
0.743558
0
Integrate dy/dt = rhs_func from t=0 to t=num_days with y(0) = y0. Returns a list of state vectors, one for each day.
def integrate(rhs_func, y0, num_days, iterations_per_day): out = [y0.clone()] y = y0.clone() t = 0 dt = 1 / iterations_per_day for day in range(num_days): for it in range(iterations_per_day): y += dt * rhs_func(y, t) t += dt # y, t = RK4_step(rhs_func,y,t,dt) out.append(y.clone()) return out
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def integrateTransients(self, numdays=500):\n tend = numdays * 24.0\n\n r = sp.integrate.solve_ivp(self.derv, (0, tend), [0.7, 0.0, 0.0], t_eval=[tend], method='Radau')\n results_trans = np.transpose(r.y)\n\n return (results_trans[-1, :])", "def integrate_explicit(self, y, derivative,...
[ "0.650054", "0.5741845", "0.5683879", "0.54373115", "0.54229563", "0.53735167", "0.5365727", "0.5364101", "0.53461295", "0.5233571", "0.5200717", "0.518434", "0.5144111", "0.513495", "0.51227826", "0.5083833", "0.5077746", "0.5073881", "0.5071325", "0.504791", "0.5043892", ...
0.78576934
0
Defines and validates params for show
def get_args(): return {"id": fields.UUID(required=True, location="view_args")}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def show_params(self):\n \n return self.params[self.profile]", "def _validate_params(title, start, end, description, show_me_as):\n if start and end:\n start_date = datetime.datetime.strptime(start, '%Y-%m-%d')\n end_date = datetime.datetime.strptime(end, '%Y-%m-%d')\n if st...
[ "0.61713666", "0.611831", "0.5898031", "0.5898031", "0.5898031", "0.58703226", "0.58703226", "0.58703226", "0.58703226", "0.58380497", "0.5812235", "0.5797601", "0.5796676", "0.57913303", "0.57282037", "0.569812", "0.5684126", "0.5652535", "0.55059636", "0.54621", "0.5441083"...
0.0
-1
Defines and validates params for index
def index_args(): return {}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_index(self, *args, **dargs):\n pass", "def index_mapping_from_parameter(self, index, *param):\n pass", "def build_index():\n pass", "def get_indexed_param(self):\n switcher_index = self.input_param(\"switch_index\").value \n indexed_param = self.input_par...
[ "0.6390675", "0.63674235", "0.62412107", "0.6033372", "0.60061246", "0.5963241", "0.5963241", "0.5963241", "0.5963241", "0.5963241", "0.5963241", "0.5963241", "0.5963241", "0.5963241", "0.5963241", "0.5963241", "0.5963241", "0.5963241", "0.5963241", "0.5963241", "0.5963241", ...
0.5969254
5
Defines and validates params for create
def create_args(): return { "username": fields.String(required=True, validate=validate.Length(min=1)), "email": fields.Email(required=True), "password": fields.String(required=True), }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create(self, validated_data):", "def _validate_create_args(self, args):\r\n invalid_args = [k for k in self.required_params if args.get(k) is None]\r\n if invalid_args:\r\n raise ArgumentError('Missing required options: %s'\r\n % ','.join(invalid_args))...
[ "0.70593756", "0.64916545", "0.63662356", "0.63505614", "0.62192565", "0.62109154", "0.6152213", "0.612279", "0.60907304", "0.6072563", "0.60699487", "0.6029734", "0.5980383", "0.5978173", "0.5978173", "0.5950788", "0.5927034", "0.59155107", "0.591124", "0.590597", "0.5874916...
0.62978315
4
Defines and validates params for update
def update_args(): return { "id": fields.UUID(required=True, load_from="id", location="view_args"), "username": fields.String(validate=validate.Length(min=1)), "email": fields.Email(), "password": fields.String(), "old_password": fields.String(), }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _build_update_params(self, params):", "def _update_params(self):\n pass", "def update_params(self):\n pass", "def update(self, params):", "def test_invalid_update_kwarg(self):\n m0 = TestUpdateModel.create(count=5, text='monkey')\n with self.assertRaises(ValidationError):\n ...
[ "0.75934356", "0.73089683", "0.7228769", "0.696913", "0.6787318", "0.67589974", "0.6730389", "0.66961044", "0.6688387", "0.6641361", "0.6608608", "0.65807754", "0.65085006", "0.6446182", "0.6431962", "0.64096665", "0.6403965", "0.6382234", "0.63350594", "0.6333726", "0.629854...
0.0
-1
Defines and validates params for delete
def delete_args(): return {"id": fields.UUID(required=True, location="view_args")}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete(self, *args, **kwargs):\n pass", "def delete(self, *args, **kwargs):\n pass", "def delete(self, *args, **kwargs) -> Any:\n pass", "def delete_params(self, base_key, *params):\n self.params[base_key + \".no_\" + \"|\".join(params)] = self.delete_params_s(self.params[base...
[ "0.66014385", "0.66014385", "0.6589458", "0.64934564", "0.64702225", "0.6431175", "0.64087933", "0.6322618", "0.62888837", "0.6267921", "0.6250074", "0.61924964", "0.6137085", "0.6132666", "0.6091776", "0.6062422", "0.60473233", "0.6042318", "0.60356736", "0.6009196", "0.5993...
0.6291099
8
Raises a valid HTTPException
def raise_old_password_was_wrong(): return abort( 422, exc=ValidationError("old password is not correct"), messages={"old_password": ["Was not correct"]} )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _raise_http_error(self, *args, **kwargs):", "def bad_request():\n return HttpError(400)", "def httperror( status_code=500, message=b'' ):", "def raise_on_error(self):\n if not self._status.success:\n cls = UrlApi.InfraHTTPError if self._infra_step else UrlApi.HTTPError\n raise cls('...
[ "0.85027486", "0.7542584", "0.7349662", "0.73455864", "0.7268067", "0.7267772", "0.7257609", "0.72223854", "0.7208652", "0.7133695", "0.7032097", "0.7018844", "0.6965641", "0.6883675", "0.6882655", "0.685364", "0.6834133", "0.6827386", "0.6821966", "0.6815098", "0.6753286", ...
0.0
-1
This function takes a SOM or SO and goes through the individual spectra adjusting the bin contents by either multiplying or dividing by the bin widths or the bin centers taken from the individual spectra.
def fix_bin_contents(obj, **kwargs): import hlr_utils # set up for working through data (result, res_descr) = hlr_utils.empty_result(obj) o_descr = hlr_utils.get_descr(obj) # Setup keyword arguments try: scale = kwargs["scale"] except KeyError: scale = False try: width = kwargs["width"] except KeyError: width = True try: units = kwargs["units"] except KeyError: units = "microsecond" # Primary axis for transformation. If a SO is passed, the function, will # assume the axis for transformation is at the 0 position if o_descr == "SOM": axis_pos = hlr_utils.one_d_units(obj, units) else: axis_pos = 0 result = hlr_utils.copy_som_attr(result, res_descr, obj, o_descr) # iterate through the values import array_manip import utils for i in xrange(hlr_utils.get_length(obj)): val = hlr_utils.get_value(obj, i, o_descr, "y") err2 = hlr_utils.get_err2(obj, i, o_descr, "y") axis = hlr_utils.get_value(obj, i, o_descr, "x", axis_pos) axis_err2 = hlr_utils.get_err2(obj, i, o_descr, "x", axis_pos) map_so = hlr_utils.get_map_so(obj, None, i) if width: (bin_const, bin_const_err2) = utils.calc_bin_widths(axis, axis_err2) else: (bin_const, bin_const_err2) = utils.calc_bin_centers(axis, axis_err2) if scale: value = array_manip.mult_ncerr(val, err2, bin_const, bin_const_err2) else: value = array_manip.div_ncerr(val, err2, bin_const, bin_const_err2) hlr_utils.result_insert(result, res_descr, value, map_so, "y") return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def binspecdat( wavelength, flux, fluxerr=[], binwidth=10, sigclip=0, sumerrs=False,\n wstart=0, wend=0 ):\n\n w,f = wavelength, flux\n wbinned, fbinned = [], []\n wbin,fbin,dfbin = np.array([]), np.array([]), np.array([])\n dw, df = [], []\n if wstart : istart = np.where( w>wstart )[...
[ "0.5740556", "0.5733101", "0.57284594", "0.5622216", "0.55848444", "0.5580762", "0.5569622", "0.55688787", "0.5552011", "0.5551858", "0.5550779", "0.5461542", "0.5432613", "0.5347965", "0.528338", "0.52451", "0.5242152", "0.5237068", "0.5206165", "0.52003783", "0.5179254", ...
0.63741106
0
Ask the user for input and parse column selection against actual column list. Check that the user entry contains integer values and that these are within the upper boundary of actual column numbers.
def confirm_match_fields(list_of_columns): return_list = [] prompt = 'Select name fields for matching. Reply with a comma separated list of numbers. ie 0,1,2\n' # # Loop through columns in list and append to the prompt for idx, col in enumerate(list_of_columns): prompt = prompt + "({}) - {} \n".format(idx, col) # # Ask for user input and convert input to a list column_numbers_for_matching = (input(prompt)).split(',') # # Remove any duplicate entries without affecting the order column_numbers_for_matching = list(OrderedDict.fromkeys(column_numbers_for_matching)) # # Set the column count column_count = len(list_of_columns) # # Map field indexes back to column names for idx in column_numbers_for_matching: # # Make sure value entered is a number and not greater than the actual column count try: column_num = int(idx) if column_num >= column_count: print ("Field number entered greater than expected. This will be ignored. Number = ", column_num) else: return_list.append(list_of_columns[column_num]) except ValueError: print("Expected an integer as a column index. This entry will be ignored. => ", idx) return return_list
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def move_col(board: Connect4Board) -> int:\r\n\r\n while True:\r\n\r\n try:\r\n\r\n user_input = (int(input('Please specify the COLUMN number.\\nPlease enter an integer between 1 to {} for number of the column: '.format(board.get_num_columns())))) - 1\r\n\r\n #if gam...
[ "0.6237566", "0.62298584", "0.6013891", "0.58863854", "0.58837", "0.5858342", "0.57155514", "0.57063407", "0.5702495", "0.56861615", "0.56763095", "0.56336224", "0.56208456", "0.5606156", "0.5576528", "0.55718327", "0.5562949", "0.5559637", "0.55525136", "0.5552233", "0.55152...
0.5852762
6
powerset([1,2,3]) > (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3) not a true powerset as empty is not returned
def powerset(iterable): s = list(iterable) return chain.from_iterable(combinations(s, r) for r in range(1, len(s)+1))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def powerset(iterable):\n\n \"powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)\"\n s = list(iterable)\n return itertools.chain.from_iterable(\n itertools.combinations(s, r) for r in range(1, len(s) + 1)\n )", "def powerset(a):\n if len(a) == 0:\n return set([frozense...
[ "0.78217447", "0.7709693", "0.76820594", "0.757519", "0.75563943", "0.7551796", "0.75315934", "0.751685", "0.74874705", "0.73834807", "0.7375735", "0.733464", "0.733464", "0.733464", "0.7328478", "0.7326208", "0.73160434", "0.7309738", "0.72110635", "0.71566826", "0.7143099",...
0.7434176
9
Returns a feature vector for features given a certain task, model and similarity strategy
def _get_features(task, features, model, similarity_strategy=None): X = [] langs = analysis_utils.get_langs_for_task(task) for feature in features: if feature != "size": # this is a nested array X_feature = analysis_utils.load_lang2vec_vectors(task=task, features=feature) if X_feature is None: #continue return None if similarity_strategy != "-": # We start with similarities to english X_feature = [[sim] for sim in analysis_utils.compute_similarities_of_lang_vecs(X_feature, strategy=similarity_strategy)] elif feature == "size" and model == "xlmr": # this is an array, we put it in a list X_feature = [[size] for size in analysis_utils.xlmr_input_corpus_sizes(langs)] elif feature == "size" and model == "mbert": X_feature = [[size] for size in analysis_utils.mbert_input_corpus_sizes(langs)] else: raise ValueError() # we now have a feature vector for a single feature or feature set if len(X) == 0: X = np.array(X_feature) else: X = np.concatenate((X,np.array(X_feature)), axis=1) if len(X) == 0: return None return np.array(X, dtype=float)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def features(self, img, tasks):\n ensemble_probs = []\n\n model_iterable = self.tasks2models[tasks]\n ensemble_results = []\n for model in model_iterable():\n individual_feats = model.module.features2(img)\n ensemble_results.append(individual_feats)\n\n retu...
[ "0.6269173", "0.60586506", "0.6025307", "0.60061306", "0.5941046", "0.58661604", "0.58194387", "0.5813026", "0.58017576", "0.5770882", "0.5751222", "0.572277", "0.56830263", "0.56766546", "0.56654996", "0.5664544", "0.5647656", "0.5646675", "0.56205535", "0.55783176", "0.5547...
0.76417094
0
This custom filter take a string as input with comma separated values. Note that the value here is already a list as it has been transformed by the BaseInFilter class.
def filter_names(self, qs, name, value): return qs.filter(name__in=value)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def in_(self, value):\n if isinstance(value, (str, text_type)):\n value = [value]\n return Filter(self, value, 'in')", "def to_python(self, value):\n # Return an empty list if no input was given.\n if not value:\n return []\n return value.split(',')", "d...
[ "0.7434211", "0.64544165", "0.63526255", "0.6277826", "0.61804974", "0.6147383", "0.61087376", "0.5946269", "0.5861053", "0.58073354", "0.5784654", "0.5750738", "0.5747868", "0.57219", "0.5701402", "0.5690323", "0.55717665", "0.55665094", "0.5539709", "0.55298066", "0.5491114...
0.5347189
30
Test in filter on a string field.
def test_string_in_filter(query): Pet.objects.create(name="Brutus", age=12) Pet.objects.create(name="Mimi", age=3) Pet.objects.create(name="Jojo, the rabbit", age=3) schema = Schema(query=query) query = """ query { pets (name_In: ["Brutus", "Jojo, the rabbit"]) { edges { node { name } } } } """ result = schema.execute(query) assert not result.errors assert result.data["pets"]["edges"] == [ {"node": {"name": "Brutus"}}, {"node": {"name": "Jojo, the rabbit"}}, ]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_str(self):\n\t\tself.filter.set_operator('.match')\n\t\tself.filter.set_limit(\"test\")\n\t\tself.assertTrue(str(self.filter), \"String conversion failed!\")", "def filter_func(fieldname):\n if fieldname.startswith('_'):\n return False\n value = getattr(class_, fieldname)\n ...
[ "0.6680114", "0.62551516", "0.6214412", "0.59714603", "0.5864514", "0.5862243", "0.5818212", "0.5786967", "0.57790124", "0.57667947", "0.57471454", "0.57026047", "0.56858695", "0.56813645", "0.5636248", "0.56018573", "0.558521", "0.5572709", "0.5549825", "0.5532679", "0.55165...
0.5281426
51
Test in filter on a string field which has also a custom filter doing a similar operation.
def test_string_in_filter_with_otjer_filter(query): Person.objects.create(name="John") Person.objects.create(name="Michael") Person.objects.create(name="Angela") schema = Schema(query=query) query = """ query { people (name_In: ["John", "Michael"]) { edges { node { name } } } } """ result = schema.execute(query) assert not result.errors assert result.data["people"]["edges"] == [ {"node": {"name": "John"}}, {"node": {"name": "Michael"}}, ]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _match_filter(self, meta, field):\r\n val = meta[field]\r\n if field in self.ignored_values:\r\n for pattern in self.ignored_values[field]:\r\n val = val.replace(pattern, '')\r\n return val", "def _custom_filter(self, query):\r\n return query", "def tes...
[ "0.6647947", "0.6369712", "0.6352776", "0.6120829", "0.6069115", "0.6028925", "0.5965036", "0.59358466", "0.592832", "0.5900892", "0.58838147", "0.586241", "0.5809917", "0.5804265", "0.5789212", "0.57818174", "0.5766749", "0.57654047", "0.5749505", "0.5748385", "0.574097", ...
0.0
-1
Test in filter on a string field with a custom filterset class.
def test_string_in_filter_with_declared_filter(query): Person.objects.create(name="John") Person.objects.create(name="Michael") Person.objects.create(name="Angela") schema = Schema(query=query) query = """ query { people (names: "John,Michael") { edges { node { name } } } } """ result = schema.execute(query) assert not result.errors assert result.data["people"]["edges"] == [ {"node": {"name": "John"}}, {"node": {"name": "Michael"}}, ]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def filter_func(fieldname):\n if fieldname.startswith('_'):\n return False\n value = getattr(class_, fieldname)\n \n return isinstance(value, type)", "def test_str(self):\n\t\tself.filter.set_operator('.match')\n\t\tself.filter.set_limit(\"test\")\n\t\tself.assertTrue(str(s...
[ "0.6347453", "0.62784326", "0.6168746", "0.6023738", "0.5979025", "0.5928806", "0.5859785", "0.5711242", "0.5680016", "0.5651849", "0.5489161", "0.54681385", "0.54575986", "0.54201454", "0.5403745", "0.5398292", "0.537536", "0.5361248", "0.534145", "0.5331288", "0.5331288", ...
0.5272002
23
Test in filter on an integer field.
def test_int_in_filter(query): Pet.objects.create(name="Brutus", age=12) Pet.objects.create(name="Mimi", age=3) Pet.objects.create(name="Jojo, the rabbit", age=3) schema = Schema(query=query) query = """ query { pets (age_In: [3]) { edges { node { name } } } } """ result = schema.execute(query) assert not result.errors assert result.data["pets"]["edges"] == [ {"node": {"name": "Mimi"}}, {"node": {"name": "Jojo, the rabbit"}}, ] query = """ query { pets (age_In: [3, 12]) { edges { node { name } } } } """ result = schema.execute(query) assert not result.errors assert result.data["pets"]["edges"] == [ {"node": {"name": "Brutus"}}, {"node": {"name": "Mimi"}}, {"node": {"name": "Jojo, the rabbit"}}, ]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_for_int(check):", "def filterAcceptsRow(self, p_int, source_parent):\n index = self.sourceModel().index(p_int, 0)\n types_entry_str = index.data()\n if not types_entry_str:\n return False\n match = REGEX_DIGITS.search(types_entry_str)\n if match:\n ...
[ "0.6696639", "0.66578007", "0.66270024", "0.6509551", "0.6340903", "0.622832", "0.6143212", "0.60561484", "0.60561484", "0.5922049", "0.5920849", "0.5862091", "0.5855524", "0.5835367", "0.5818671", "0.57944286", "0.5734712", "0.5721278", "0.57029366", "0.5678141", "0.5656519"...
0.55338234
28
Check that using a in filter with an empty list provided as input returns no objects.
def test_in_filter_with_empty_list(query): Pet.objects.create(name="Brutus", age=12) Pet.objects.create(name="Mimi", age=8) Pet.objects.create(name="Picotin", age=5) schema = Schema(query=query) query = """ query { pets (name_In: []) { edges { node { name } } } } """ result = schema.execute(query) assert not result.errors assert len(result.data["pets"]["edges"]) == 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def empty_filter(item, *args, **kwargs):\n return True", "def is_empty(self):\n return not list(self._filtered_items)", "def is_empty(self):\n\n return self.some().map(lambda b: not b)", "def test_empty_list_error(self):\n with self.assertRaises(ValueError):\n function_incl...
[ "0.6915782", "0.68769324", "0.6442358", "0.64264005", "0.6415718", "0.63710684", "0.62980837", "0.6274831", "0.6274831", "0.62719107", "0.6263292", "0.62174505", "0.61688304", "0.61556524", "0.6136472", "0.6135395", "0.61337715", "0.6123703", "0.61021626", "0.60987234", "0.60...
0.70211446
0
Test in filter o an choice field not using an enum (Film.genre).
def test_choice_in_filter_without_enum(query): john_doe = Reporter.objects.create( first_name="John", last_name="Doe", email="john@doe.com" ) jean_bon = Reporter.objects.create( first_name="Jean", last_name="Bon", email="jean@bon.com" ) documentary_film = Film.objects.create(genre="do") documentary_film.reporters.add(john_doe) action_film = Film.objects.create(genre="ac") action_film.reporters.add(john_doe) other_film = Film.objects.create(genre="ot") other_film.reporters.add(john_doe) other_film.reporters.add(jean_bon) schema = Schema(query=query) query = """ query { films (genre_In: ["do", "ac"]) { edges { node { genre reporters { edges { node { lastName } } } } } } } """ result = schema.execute(query) assert not result.errors assert result.data["films"]["edges"] == [ { "node": { "genre": "do", "reporters": {"edges": [{"node": {"lastName": "Doe"}}]}, } }, { "node": { "genre": "ac", "reporters": {"edges": [{"node": {"lastName": "Doe"}}]}, } }, ]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validate_filterval(filterval):\n if filterval != 'description' and filterval != 'fulldescription' and filterval != 'completed':\n return False\n else:\n return True", "def filter_generation_type(self, what):\n return self.form.set_value('generation type', what)", ...
[ "0.54397243", "0.53808635", "0.5250568", "0.5232108", "0.52033514", "0.51955026", "0.51942164", "0.5174951", "0.5173452", "0.5173218", "0.516942", "0.5157201", "0.5134928", "0.50926733", "0.50899726", "0.5082016", "0.50662625", "0.5031735", "0.5031735", "0.5029781", "0.501617...
0.6587683
0
Test in filter on an foreign key relationship.
def test_fk_id_in_filter(query): john_doe = Reporter.objects.create( first_name="John", last_name="Doe", email="john@doe.com" ) jean_bon = Reporter.objects.create( first_name="Jean", last_name="Bon", email="jean@bon.com" ) sara_croche = Reporter.objects.create( first_name="Sara", last_name="Croche", email="sara@croche.com" ) Article.objects.create( headline="A", pub_date=datetime.now(), pub_date_time=datetime.now(), reporter=john_doe, editor=john_doe, ) Article.objects.create( headline="B", pub_date=datetime.now(), pub_date_time=datetime.now(), reporter=jean_bon, editor=jean_bon, ) Article.objects.create( headline="C", pub_date=datetime.now(), pub_date_time=datetime.now(), reporter=sara_croche, editor=sara_croche, ) schema = Schema(query=query) query = f""" query {{ articles (reporter_In: [{john_doe.id}, {jean_bon.id}]) {{ edges {{ node {{ headline reporter {{ lastName }} }} }} }} }} """ result = schema.execute(query) assert not result.errors assert result.data["articles"]["edges"] == [ {"node": {"headline": "A", "reporter": {"lastName": "Doe"}}}, {"node": {"headline": "B", "reporter": {"lastName": "Bon"}}}, ]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _filter_related_fk(self, rel):\n field = rel.field\n if isinstance(field, models.ForeignKey):\n if self._join_allowed(rel.parent_model, rel.model, field):\n return rel", "def _filter_fk(self, field):\n if isinstance(field, models.ForeignKey):\n if sel...
[ "0.758005", "0.7506856", "0.65179884", "0.6295803", "0.6255321", "0.5974624", "0.59294915", "0.5826716", "0.5780739", "0.576188", "0.57598424", "0.5719743", "0.5709073", "0.5687628", "0.56663066", "0.5635571", "0.5632098", "0.562849", "0.561815", "0.5595195", "0.559364", "0...
0.57559675
11
Test in filter on a choice field using an enum (Reporter.reporter_type).
def test_enum_in_filter(query): Reporter.objects.create( first_name="John", last_name="Doe", email="john@doe.com", reporter_type=1 ) Reporter.objects.create( first_name="Jean", last_name="Bon", email="jean@bon.com", reporter_type=2 ) Reporter.objects.create( first_name="Jane", last_name="Doe", email="jane@doe.com", reporter_type=2 ) Reporter.objects.create( first_name="Jack", last_name="Black", email="jack@black.com", reporter_type=None ) schema = Schema(query=query) query = """ query { reporters (reporterType_In: [A_1]) { edges { node { email } } } } """ result = schema.execute(query) assert not result.errors assert result.data["reporters"]["edges"] == [ {"node": {"email": "john@doe.com"}}, ] query = """ query { reporters (reporterType_In: [A_2]) { edges { node { email } } } } """ result = schema.execute(query) assert not result.errors assert result.data["reporters"]["edges"] == [ {"node": {"email": "jean@bon.com"}}, {"node": {"email": "jane@doe.com"}}, ] query = """ query { reporters (reporterType_In: [A_2, A_1]) { edges { node { email } } } } """ result = schema.execute(query) assert not result.errors assert result.data["reporters"]["edges"] == [ {"node": {"email": "john@doe.com"}}, {"node": {"email": "jean@bon.com"}}, {"node": {"email": "jane@doe.com"}}, ]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_choice_in_filter_without_enum(query):\n\n john_doe = Reporter.objects.create(\n first_name=\"John\", last_name=\"Doe\", email=\"john@doe.com\"\n )\n jean_bon = Reporter.objects.create(\n first_name=\"Jean\", last_name=\"Bon\", email=\"jean@bon.com\"\n )\n documentary_film = Fi...
[ "0.6124881", "0.5575385", "0.54288995", "0.54145145", "0.5324439", "0.53120065", "0.53093195", "0.52942735", "0.52369535", "0.5213601", "0.5205873", "0.5193913", "0.5182427", "0.5172175", "0.51323706", "0.51200104", "0.5100281", "0.50874496", "0.50767154", "0.50623184", "0.50...
0.56262904
1
Handle GET requests for single park area
def retrieve(self, request, pk=None): try: category = ProductCategory.objects.get(pk=pk) serializer = ProductCategorySerializer(category, context={'request': request}) return Response(serializer.data) except Exception as ex: return HttpResponseServerError(ex)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def retrieve(self, request, pk):\n try:\n area = Area.objects.filter(pk=pk).first()\n if area is None:\n return custom_response(Code.AREA_NOT_FOUND)\n\n province_lists, proportion_kpi_lists = [], []\n province_code_lists = area.provinces.split(',')\...
[ "0.5696643", "0.56247276", "0.5615363", "0.5595941", "0.55863386", "0.5570804", "0.5547689", "0.5539774", "0.5535828", "0.54864246", "0.54228663", "0.53815085", "0.5345064", "0.53380686", "0.530849", "0.52752537", "0.52634513", "0.5262047", "0.52559435", "0.52509636", "0.5247...
0.0
-1
Handle GET requests to park ProductCategorys resource
def list(self, request): product_category = ProductCategory.objects.all() # Support filtering ProductCategorys by area id # name = self.request.query_params.get('name', None) # if name is not None: # ProductCategories = ProductCategories.filter(name=name) serializer = ProductCategorySerializer( product_category, many=True, context={'request': request}) return Response(serializer.data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def retrieve(self, request, pk=None):\n try:\n category = ProductCategory.objects.get(pk=pk)\n serializer = ProductCategorySerializer(category, context={'request': request})\n return Response(serializer.data)\n except Exception as ex:\n return HttpResponseS...
[ "0.71682376", "0.7032157", "0.69441664", "0.6676812", "0.66664386", "0.6588189", "0.6565913", "0.64754415", "0.6378403", "0.6333164", "0.6246395", "0.6242302", "0.6219372", "0.6217133", "0.6207262", "0.6182406", "0.6178808", "0.61548734", "0.61504626", "0.6134846", "0.6096371...
0.7259174
0
returns True if event ends a possession, False otherwise
def is_possession_ending_event(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def count_as_possession(self):\n if self.is_possession_ending_event:\n if self.seconds_remaining > 2:\n return True\n # check when previous possession ended\n prev_event = self.previous_event\n while prev_event is not None and not prev_event.is_poss...
[ "0.75881785", "0.702573", "0.7022446", "0.6956652", "0.6942744", "0.68245476", "0.66753006", "0.6652011", "0.6642587", "0.65541005", "0.6489061", "0.64834446", "0.64689803", "0.6468721", "0.64471143", "0.6441375", "0.64016587", "0.63684183", "0.63627636", "0.63627636", "0.634...
0.85768574
0
returns list of dicts with all stats for event
def event_stats(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def event_stats(self):\n return self.base_stats", "def get_metrics(event):\n return tba_session.get(BASE_URL + '/event/%s/oprs' %event).json()", "def event_dicts(self):\n events = []\n # We're assuming that the table has alternating rows that\n # containg (date, event title) poss...
[ "0.7214178", "0.69517624", "0.67323565", "0.6659741", "0.6565855", "0.654559", "0.65454817", "0.6497064", "0.64058197", "0.6327785", "0.6292388", "0.62751824", "0.62552947", "0.62509114", "0.6241405", "0.62282646", "0.6225516", "0.62236094", "0.62227386", "0.62097263", "0.616...
0.7740182
0
returns team id for team on offense for event
def get_offense_team_id(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_team_id(team_name):\n\n team_name = team_name.lower()\n endpoint = \"/teams\"\n response = api.nhl_api(endpoint)\n\n if not response:\n raise ConnectionError(\"An invalid response was returned from the NHL Teams API.\")\n\n teams_json = response.json()\n teams = teams_json[\"teams\...
[ "0.6402092", "0.63786477", "0.6285374", "0.62278175", "0.6203459", "0.617215", "0.5979764", "0.5927196", "0.573476", "0.57201517", "0.56839955", "0.5652449", "0.5618322", "0.5600507", "0.5562163", "0.5533258", "0.55129105", "0.5488184", "0.5462104", "0.54099923", "0.53854465"...
0.82629144
0
returns seconds remaining in period as a ``float``
def seconds_remaining(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_seconds(self):\n return self.seconds_remaining", "def time_remaining(progress, elapsed):\n total = elapsed / progress\n return total - elapsed", "def get_song_seconds_remaining(result):\n remaining_ms = get_song_length_milliseconds(result) - get_song_elapsed_milliseconds(result)...
[ "0.76995057", "0.734497", "0.72716516", "0.7127699", "0.7037525", "0.699375", "0.6983011", "0.6970858", "0.69569576", "0.6924487", "0.69012284", "0.68821955", "0.68762916", "0.6871283", "0.68631834", "0.6851763", "0.68502617", "0.68149406", "0.6782675", "0.67755985", "0.67368...
0.7292954
2
returns list of dicts with all seconds played and possession count stats for event
def base_stats(self): return ( self._get_seconds_played_stats_items() + self._get_possessions_played_stats_items() )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def stats():\r\n times_lst = []\r\n time_dict = {}\r\n for album, details in dbase().items():\r\n time_m = 0\r\n time_s = 0\r\n for songs, details_s in details[0].items():\r\n time = details_s[1].split(\":\")\r\n min = int(time[0])\r\n sec = int(time[1...
[ "0.67780775", "0.6743794", "0.6251339", "0.6074646", "0.60666454", "0.60275286", "0.59841365", "0.5961759", "0.59369177", "0.59234816", "0.5922776", "0.5922776", "0.5922776", "0.59215605", "0.5893438", "0.58242834", "0.58116066", "0.5803698", "0.5781211", "0.56819916", "0.567...
0.6516011
2
returns list of all events that take place as the same time as the current event
def get_all_events_at_current_time(self): events = [self] # going backwards event = self while event is not None and self.seconds_remaining == event.seconds_remaining: if event != self: events.append(event) event = event.previous_event # going forwards event = self while event is not None and self.seconds_remaining == event.seconds_remaining: if event != self: events.append(event) event = event.next_event return sorted(events, key=lambda k: k.order)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_curr_events(self):\n today = datetime.date.today()\n return self.s.query(Event).filter(Event.time > today).all()", "def events(time):\n\n event_list = eventlist()\n idx = np.all(time == event_list[:, 0:len(time)], axis=1)\n return event_list[idx,:]", "def get_events(self):\n ...
[ "0.7656709", "0.7239735", "0.670789", "0.66426444", "0.65620106", "0.65207577", "0.64506966", "0.63653976", "0.63593066", "0.6298316", "0.6236205", "0.6210293", "0.6126111", "0.6072726", "0.60209477", "0.59940344", "0.5987256", "0.59750044", "0.5967468", "0.59477067", "0.5942...
0.7872362
0
returns dict with list of player ids for each team with players on the floor for current event For all non subsitution events current players are just the same as previous event
def current_players(self): return self.previous_event.current_players
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_players_on_floor(self):\n for period in self.Periods:\n # set current players to be period starters\n current_players = period.Starters.copy()\n for pbp_event in period.Events:\n if pbp_event.is_substitution():\n coming_in = pbp_even...
[ "0.65528506", "0.59010434", "0.5883524", "0.5855024", "0.58492154", "0.5839282", "0.57566845", "0.5750913", "0.57267255", "0.57229966", "0.56700575", "0.5654687", "0.5652263", "0.5650997", "0.5623131", "0.56151515", "0.56052953", "0.5571534", "0.5563076", "0.55602336", "0.555...
0.6070743
1
returns the score margin from perspective of offense team before the event took place
def score_margin(self): if self.previous_event is None: score = self.score else: score = self.previous_event.score offense_team_id = self.get_offense_team_id() offense_points = score[offense_team_id] defense_points = 0 for team_id, points in score.items(): if team_id != offense_team_id: defense_points = points return offense_points - defense_points
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def Margin(self):\n s = self.margin\n assert s in range(1,6), \"Margin score out of bounds.\"\n if s == 1: return 'Poor'\n elif s == 2: return 'Near Poor'\n elif s == 3: return 'Medium'\n elif s == 4: return 'Near Sharp'\n elif s == 5: return 'Sharp'", "def marg...
[ "0.6575193", "0.5990418", "0.59229994", "0.5904188", "0.58731693", "0.58324146", "0.57912004", "0.57143426", "0.56908834", "0.56435466", "0.56371015", "0.56231445", "0.5613722", "0.5583762", "0.5572104", "0.5566963", "0.55475724", "0.5513546", "0.5509342", "0.55083126", "0.54...
0.8493469
0
returns dict with lineup ids for each team for current event. Lineup ids are hyphen separated sorted player id strings.
def lineup_ids(self): lineup_ids = {} for team_id, team_players in self.current_players.items(): players = [str(player_id) for player_id in team_players] sorted_player_ids = sorted(players) lineup_id = "-".join(sorted_player_ids) lineup_ids[team_id] = lineup_id return lineup_ids
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compile_lineups(players, pos, id_list, team):\n lu = []\n subs = []\n names = players[team]\n positions = pos[team]\n ids = id_list[team]\n for n in range(len(names)):\n if names[n][-1] == ' ':\n names[n] = names[n][0:-1]\n for i in range(0, len(names)):\n names[i]...
[ "0.63278043", "0.5798467", "0.57650906", "0.576339", "0.54533684", "0.54256725", "0.5386581", "0.53313106", "0.5272173", "0.5270027", "0.5256748", "0.52523595", "0.52510047", "0.5233039", "0.5219826", "0.52058655", "0.5196101", "0.51753414", "0.5173734", "0.5145153", "0.51366...
0.8228265
0
returns the number of seconds that have elapsed since the previous event
def seconds_since_previous_event(self): if self.previous_event is None: return 0 if self.seconds_remaining == 720: # so that subs between periods for live don't have negative seconds return 0 if self.seconds_remaining == 300 and self.period > 4: # so that subs between periods for live don't have negative seconds return 0 return self.previous_event.seconds_remaining - self.seconds_remaining
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def time_since_last_state_change(self):\n current_time = rospy.get_rostime()\n difference = current_time - self._timestamps['last_state_change']\n return difference.to_sec()", "def elapsed():\n global start_time\n return time.time() - start_time", "def elapsed(self):\n ...
[ "0.73553234", "0.73396635", "0.7332249", "0.7310087", "0.7274196", "0.72670037", "0.7216402", "0.71862936", "0.71495765", "0.71136665", "0.70829964", "0.70781296", "0.7034211", "0.697659", "0.69762397", "0.696461", "0.69368035", "0.6934867", "0.69290966", "0.68871796", "0.688...
0.8197923
0
returns True if the event takes place after an offensive rebound on the current possession, False otherwise
def is_second_chance_event(self): event = self.previous_event if isinstance(event, Rebound) and event.is_real_rebound and event.oreb: return True while not (event is None or event.is_possession_ending_event): if isinstance(event, Rebound) and event.is_real_rebound and event.oreb: return True event = event.previous_event return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_possession_ending_event(self):\n pass", "def is_onhold(self) -> bool:", "def is_on(self) -> bool:\n return self.event.is_tripped", "def shooting(self):\r\n return not self.stopped", "def is_penalty_event(self):\n if hasattr(self, \"fouls_to_give\"):\n team_ids ...
[ "0.70515865", "0.6717488", "0.6612516", "0.6464781", "0.64172095", "0.63652587", "0.6318331", "0.6302998", "0.62632906", "0.62373704", "0.6230576", "0.622687", "0.62076503", "0.6200433", "0.61976546", "0.6172198", "0.6165521", "0.61521184", "0.6148918", "0.6104865", "0.608294...
0.69695365
1
returns True if the team on offense is in the penalty, False otherwise
def is_penalty_event(self): if hasattr(self, "fouls_to_give"): team_ids = list(self.current_players.keys()) offense_team_id = self.get_offense_team_id() defense_team_id = ( team_ids[0] if offense_team_id == team_ids[1] else team_ids[1] ) if self.fouls_to_give[defense_team_id] == 0: if isinstance(self, (Foul, FreeThrow, Rebound)): # if foul or free throw or rebound on a missed ft # check foul event and should return false is foul # was shooting foul and team had a foul to give if isinstance(self, Foul): foul_event = self elif isinstance(self, FreeThrow): foul_event = self.foul_that_led_to_ft else: # if rebound is on missed ft, also need to look at foul that led to FT if not self.oreb and isinstance(self.missed_shot, FreeThrow): foul_event = self.missed_shot.foul_that_led_to_ft else: return True if foul_event is None: return True fouls_to_give_prior_to_foul = ( foul_event.previous_event.fouls_to_give[defense_team_id] ) if fouls_to_give_prior_to_foul > 0: return False return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def winningTeamPenalty(r):\n \n #Check if home or away had more goals at the 'event' time\n homecheck = int(r['about.goals.home'] > r['about.goals.away'])\n awaycheck = int(r['about.goals.away'] > r['about.goals.home'])\n \n #If home had more goals and the penalty was on the home team, set to 1\n...
[ "0.6627246", "0.6298236", "0.6159958", "0.6144886", "0.61247426", "0.6035521", "0.6007446", "0.5965502", "0.5947476", "0.59469855", "0.5929833", "0.5910404", "0.5885881", "0.58652747", "0.5852705", "0.5821209", "0.5802464", "0.58004534", "0.5798505", "0.57926786", "0.5742253"...
0.70770013
0
returns True if event is possession changing event that should count as a real possession, False otherwise. In order to not include possessions which a very low probability of scoring in possession counts, possession won't be counted as a possession if it starts with <= 2 seconds left and no points are scored before period ends
def count_as_possession(self): if self.is_possession_ending_event: if self.seconds_remaining > 2: return True # check when previous possession ended prev_event = self.previous_event while prev_event is not None and not prev_event.is_possession_ending_event: prev_event = prev_event.previous_event if prev_event is None or prev_event.seconds_remaining > 2: return True # possession starts in final 2 seconds # return True if there is a FT or FGM between now and end of period next_event = prev_event.next_event while next_event is not None: if isinstance(next_event, FreeThrow) or ( isinstance(next_event, FieldGoal) and next_event.is_made ): return True next_event = next_event.next_event return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_possession_ending_event(self):\n pass", "def significant_position_change(self, timestamp, new_position):\n timediff = (timestamp - self.timestamp).total_seconds()\n posdiff = (new_position - self.position) / 1000\n diffdiff = timediff - posdiff\n\n if abs(diffdiff) > 5:\...
[ "0.7091942", "0.60511667", "0.58913386", "0.5813963", "0.5769117", "0.5687262", "0.5611369", "0.56004244", "0.55632025", "0.5527948", "0.5527946", "0.55076903", "0.54790807", "0.5442499", "0.54219216", "0.5415135", "0.5407619", "0.5396577", "0.5360468", "0.53576845", "0.53544...
0.82088506
0
Print methods and doc strings. Takes module, class, list, dictionary, or string.
def info(object, spacing=10, collapse=1): methodList = [method for method in dir(object) if callable(getattr(object, method))] argList = [method for method in dir(object) if not callable(getattr(object, method))] processFunc = collapse and (lambda s: " ".join(s.split())) or (lambda s: s) print "\n".join(["%s %s" % (method.ljust(spacing), processFunc(str(getattr(object, method).__doc__))) for method in methodList]) print argList
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def print_methods(obj: object) -> None:\n all_attributes = set(dir(obj))\n names_of_methods = set(\n filter(lambda atrr_name: callable(getattr(obj, atrr_name)), all_attributes)\n )\n methods = (getattr(obj, method_name) for method_name in names_of_methods)\n methods_names_and_docs = [(ful...
[ "0.717699", "0.683481", "0.67748636", "0.66115254", "0.65760064", "0.65314853", "0.65276307", "0.650433", "0.648049", "0.64213365", "0.6410133", "0.6410133", "0.64029634", "0.6261382", "0.62343085", "0.623214", "0.62060016", "0.6181748", "0.61703414", "0.615099", "0.6137972",...
0.6613502
3
Insert the HR2 in the sqlite DB
def insert(self,connector): c= connector.cursor() # print str_cmd snew = buildInsert(self,"HR2") # print snew # print type(snew) #print type(str_cmd) c.execute(snew) c.execute("select MAX(ID) from HR2") for row in c: lastid=row[0] break self.ID=lastid connector.commit() return lastid
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save_in_db(self):\n self.sql_database.table_name = self.table_db\n self.sql_database.db_name = self.db\n if self.sql_database.insert_item(text_path=self.path, word_first=self.word_1.get(),\n word_second=self.word_2.get(),\n ...
[ "0.6218955", "0.6192249", "0.61846954", "0.61208504", "0.61166215", "0.60911196", "0.60804343", "0.6061364", "0.6061364", "0.6059145", "0.602654", "0.59976053", "0.5978992", "0.5973187", "0.5965534", "0.59377855", "0.59237146", "0.59213096", "0.5909464", "0.5888386", "0.58613...
0.56449646
45
download the HR2 from the sqlite DB
def download(self,connector,condition): c= connector.cursor() # condition = " WHERE DIF_ID=%d AND NUM=%d" % (difid,num) snew = buildSelect(self,'HR2',condition) # print snew c.execute(snew) lnames=[] for name,val in sorted(self.__dict__.iteritems()): lnames.append(name) vobj=[] for row in c: # print row hr2=HR2Def(0) for i in range(len(lnames)): #print lnames[i],row[i] hr2.__dict__[lnames[i]]=row[i] vobj.append(hr2) return vobj
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def download_hess_dr1_data():\n download_data_files(FILENAMES_HESS_DR1)", "def download():\n return response.download(request,db)", "def download():\n return response.download(request,db)", "def download():\n return response.download(request,db)", "def download():\n return response.download(...
[ "0.6795823", "0.6621874", "0.6621874", "0.6621874", "0.6621874", "0.6621874", "0.6621874", "0.6479302", "0.62850004", "0.62850004", "0.62850004", "0.62850004", "0.62850004", "0.62850004", "0.62850004", "0.62850004", "0.62850004", "0.62850004", "0.62850004", "0.62850004", "0.6...
0.71289873
0
Insert the DCC in the sqlite DB
def insert(self,connector): c= connector.cursor() # print str_cmd snew = buildInsert(self,"DCC") # print snew # print type(snew) #print type(str_cmd) c.execute(snew) c.execute("select MAX(ID) from DCC") for row in c: lastid=row[0] break connector.commit() return lastid
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def insert(self,table,values):\n self.connect.execute(self.insert_disc[table],values)\n self.connect.commit()", "def test_insert(self):\n query = \"insert into cds values(%s,%s,%s,%s)\"\n values = (109876,\"cinderella\",\"big 5\",5)\n self.a.insert(query,values)\n query1...
[ "0.59060663", "0.5903229", "0.58125335", "0.5747999", "0.56894326", "0.5687091", "0.5669917", "0.5652024", "0.5629885", "0.56122637", "0.56104195", "0.56025684", "0.55959547", "0.55853516", "0.552687", "0.5512492", "0.5441902", "0.54274094", "0.54197073", "0.5402316", "0.5402...
0.53086513
30
download the DCC from the sqlite DB
def download(self,connector,condition): c= connector.cursor() snew = buildSelect(self,'DCC',condition) # print snew c.execute(snew) lnames=[] for name,val in sorted(self.__dict__.iteritems()): lnames.append(name) vobj=[] for row in c: # print row obj=DCCDef() for i in range(len(lnames)): obj.__dict__[lnames[i]]=row[i] vobj.append(obj) return vobj
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def download_dbase(ascii_dbase_url, ascii_dbase_root):\n from fiasco import log\n log.debug(f'Downloading database from {ascii_dbase_url}')\n log.debug(f'Downloading database to {ascii_dbase_root}')\n tar_tmp_dir = FIASCO_HOME / 'tmp'\n tar_tmp_dir.mkdir(exist_ok=True, parents=True)\n with set_te...
[ "0.64645934", "0.63106734", "0.63106734", "0.63106734", "0.63106734", "0.63106734", "0.63106734", "0.61389863", "0.61100876", "0.6083434", "0.59853745", "0.59853745", "0.59853745", "0.59853745", "0.59853745", "0.59853745", "0.59853745", "0.59853745", "0.59853745", "0.59853745",...
0.66012114
0
Insert the LDA in the sqlite DB
def insert(self,connector): c= connector.cursor() # print str_cmd snew = buildInsert(self,"LDA") # print snew # print type(snew) #print type(str_cmd) c.execute(snew) c.execute("select MAX(ID) from LDA") for row in c: lastid=row[0] break self.ID=lastid connector.commit() return lastid
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save_in_db(self):\n self.sql_database.table_name = self.table_db\n self.sql_database.db_name = self.db\n if self.sql_database.insert_item(text_path=self.path, word_first=self.word_1.get(),\n word_second=self.word_2.get(),\n ...
[ "0.6702664", "0.6602537", "0.63001543", "0.62909836", "0.6248819", "0.60898685", "0.6076462", "0.5986986", "0.59868914", "0.5966544", "0.5946718", "0.5937437", "0.58905023", "0.5888475", "0.58587915", "0.58564174", "0.5851747", "0.5846728", "0.58438265", "0.5839041", "0.58375...
0.55280584
63
download the LDA from the sqlite DB
def download(self,connector,condition): c= connector.cursor() snew = buildSelect(self,'LDA',condition) # print snew c.execute(snew) lnames=[] for name,val in sorted(self.__dict__.iteritems()): lnames.append(name) vobj=[] for row in c: # print row obj=LDADef() for i in range(len(lnames)): obj.__dict__[lnames[i]]=row[i] vobj.append(obj) return vobj
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def download():\n return response.download(request,db)", "def download():\n return response.download(request,db)", "def download():\n return response.download(request,db)", "def download():\n return response.download(request,db)", "def download():\n return response.download(request,db)", "...
[ "0.6365306", "0.6365306", "0.6365306", "0.6365306", "0.6365306", "0.6365306", "0.6256054", "0.6251598", "0.61086553", "0.61086553", "0.61086553", "0.61086553", "0.61086553", "0.61086553", "0.61086553", "0.61086553", "0.61086553", "0.61086553", "0.61086553", "0.61086553", "0.6...
0.62504494
8
Insert the SETUP in the sqlite DB
def insert(self,connector): c= connector.cursor() # print str_cmd snew = buildInsert(self,"SETUP") # print snew # print type(snew) #print type(str_cmd) c.execute(snew) c.execute("select MAX(ID) from SETUP") for row in c: lastid=row[0] break self.ID=lastid connector.commit() return lastid
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def insert_db():\n populate_tables()", "def create_db(self):", "def _db_setup(self):\n self.get_connection()\n sql_file = open(db_config.DATABASE_TABLES_SETUP_FILE, 'r')\n with self.conn.cursor() as cur:\n cur.execute(sql_file.read())\n self.conn.commit()\n ...
[ "0.74291366", "0.7166826", "0.7070855", "0.70342904", "0.69243664", "0.68464446", "0.6797319", "0.6748326", "0.67265975", "0.6686652", "0.6666268", "0.6637691", "0.66094667", "0.6602967", "0.65983397", "0.65963346", "0.65867853", "0.65650445", "0.6558582", "0.6543628", "0.650...
0.0
-1
download the SETUP from the sqlite DB
def download(self,connector,condition): c= connector.cursor() snew = buildSelect(self,'SETUP',condition) # print snew c.execute(snew) lnames=[] for name,val in sorted(self.__dict__.iteritems()): lnames.append(name) vobj=[] for row in c: # print row obj=SETUPDef() for i in range(len(lnames)): obj.__dict__[lnames[i]]=row[i] vobj.append(obj) return vobj
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def download_and_prepare(self):\n self._download_and_prepare()", "def download_and_preprocess(self):\n print('Preparing steering angle database.')\n print('Downloading...')\n self.download()\n print('Preprocessing...')\n self.preprocess()", "def initial_db_setup() -> None:...
[ "0.62866896", "0.62310046", "0.61061794", "0.60753727", "0.60753727", "0.60753727", "0.60753727", "0.60753727", "0.60753727", "0.6056658", "0.5950842", "0.5944346", "0.5913646", "0.58318216", "0.58160055", "0.57921296", "0.57838356", "0.57605076", "0.57173675", "0.5682499", "...
0.6631947
0
Insert the DIF in the sqlite DB
def insert(self,connector): c= connector.cursor() # print str_cmd snew = buildInsert(self,"DIF") # print snew # print type(snew) #print type(str_cmd) c.execute(snew) c.execute("select MAX(ID) from DIF") for row in c: lastid=row[0] break connector.commit() return lastid
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def insertarhab(fila):\n try:\n conexion.cur.execute('insert into habitacion(numero,tipo,prezo,libre) values(?,?,?,?)', fila)\n conexion.conex.commit()\n except sqlite3.OperationalError as e:\n print(e)\n conexion.conex.rollback()", "def insertarhab(fila):\n try:\n con...
[ "0.6635419", "0.6635419", "0.6574056", "0.65341944", "0.6382579", "0.63779986", "0.63482225", "0.62685174", "0.622647", "0.61911124", "0.6159744", "0.6108979", "0.6085483", "0.6027709", "0.5982691", "0.596893", "0.5968268", "0.59646344", "0.59219134", "0.59065986", "0.5899988...
0.0
-1
download the DIF from the sqlite DB
def download(self,connector,condition): c= connector.cursor() snew = buildSelect(self,'DIF',condition) # print snew c.execute(snew) lnames=[] for name,val in sorted(self.__dict__.iteritems()): lnames.append(name) vobj=[] for row in c: # print row obj=DIFDef() for i in range(len(lnames)): obj.__dict__[lnames[i]]=row[i] vobj.append(obj) return vobj
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def download():\n return response.download(request,db)", "def download():\n return response.download(request,db)", "def download():\n return response.download(request,db)", "def download():\n return response.download(request,db)", "def download():\n return response.download(request,db)", "...
[ "0.6380636", "0.6380636", "0.6380636", "0.6380636", "0.6380636", "0.6380636", "0.62019056", "0.6178717", "0.6111437", "0.60734355", "0.60734355", "0.6053673", "0.6032839", "0.6032839", "0.6032839", "0.6032839", "0.6032839", "0.6032839", "0.6032839", "0.6032839", "0.6032839", ...
0.6292242
6
Test if class ids get assigned to the same subclasses in multiple eopatches
def test_train_split_per_value(): shape = (1000, 1000, 3) input1 = np.random.randint(10, size=shape, dtype=int) input2 = np.random.randint(10, size=shape, dtype=int) patch1 = EOPatch() patch1[INPUT_MASK_FEATURE] = input1 patch2 = EOPatch() patch2[INPUT_MASK_FEATURE] = input2 bins = [0.2, 0.6] split_task = TrainTestSplitTask((*INPUT_MASK_FEATURE, NEW_FEATURE_NAME), bins, split_type='per_value') # seeds should get ignored when splitting 'per_value' patch1 = split_task(patch1, seed=1) patch2 = split_task(patch2, seed=1) otuput1 = patch1[NEW_MASK_FEATURE] otuput2 = patch2[NEW_MASK_FEATURE] unique = set(np.unique(input1)) | set(np.unique(input2)) for uniq in unique: folds1 = otuput1[input1 == uniq] folds2 = otuput2[input2 == uniq] assert_array_equal(np.unique(folds1), np.unique(folds2))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def identify_class(self, cls):", "def needs_unique_instance(type_):\n return type_ in unique_instance_types", "def test_has_unique_prefix(cls):\n for cls2 in Base.__subclasses__():\n if cls2 != cls and cls.__prefix__ == cls2.__prefix__:\n raise AssertionError(\n '{} and {...
[ "0.6422563", "0.62931657", "0.61861485", "0.6173785", "0.61657685", "0.61504954", "0.59057254", "0.58758515", "0.5815459", "0.5794447", "0.57742494", "0.57558924", "0.5732586", "0.5714551", "0.5697402", "0.567266", "0.5669307", "0.5663694", "0.5633025", "0.56235206", "0.56215...
0.0
-1
Create the descriptor. `base_attr` is the name of an integer attribute that represents binary flags. `bitmask` is the binary value to toggle on `base_attr`.
def __init__(self, base_attr, bitmask): self.base_attr = base_attr self.bitmask = bitmask
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __set__(self, obj, enabled):\n\n value = getattr(obj, self.base_attr)\n if enabled:\n value |= self.bitmask\n else:\n value &= ~self.bitmask\n setattr(obj, self.base_attr, value)", "def _create_inherited_flag_field(property_):\n name_for_methods = join_nam...
[ "0.5400316", "0.5286925", "0.5209522", "0.51593536", "0.51050526", "0.5062308", "0.49769795", "0.49352902", "0.4898312", "0.47941947", "0.4774199", "0.4736493", "0.473473", "0.4696283", "0.46891314", "0.46879336", "0.46860164", "0.4677896", "0.46669382", "0.46669382", "0.4665...
0.76175773
0
Returns a boolean indicating whether `bitmask` is enabled within the base attribute.
def __get__(self, obj, type=None): return bool(getattr(obj, self.base_attr) & self.bitmask)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_bitmask(self, bit):\r\n if (int((format(self.__bitmask__, '016b')[2:])[bit]) == 0):\r\n return True\r\n return False", "def get_bitmask(self):\r\n return self.__bitmask__", "def is_bit_mask(enumeration, potential_mask):\n if not isinstance(potential_mask, six.integer_types):\n ...
[ "0.6370673", "0.60460484", "0.5293593", "0.51872855", "0.51568425", "0.50588435", "0.4949427", "0.49299628", "0.49284515", "0.48571622", "0.48510534", "0.48098966", "0.48055518", "0.48039997", "0.4784679", "0.4772266", "0.47695738", "0.4745472", "0.4741142", "0.47368795", "0....
0.6028671
2
Sets or clears `bitmask` within the base attribute.
def __set__(self, obj, enabled): value = getattr(obj, self.base_attr) if enabled: value |= self.bitmask else: value &= ~self.bitmask setattr(obj, self.base_attr, value)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_bitmask(self, value):\r\n self.__bitmask__ = value | 0xFF00", "def shiftr_bitmask(self):\r\n self.__bitmask__ = self.__bitmask__ >> 1", "def __init__(self, base_attr, bitmask):\n\n self.base_attr = base_attr\n self.bitmask = bitmask", "def set_digit_raw(\n self, index: int,...
[ "0.70722485", "0.64177096", "0.61179703", "0.5430737", "0.54273397", "0.54076076", "0.54037625", "0.53838706", "0.538189", "0.53145444", "0.5254987", "0.5235531", "0.5200751", "0.5176938", "0.51489186", "0.5113641", "0.5109786", "0.5104309", "0.5100866", "0.50879514", "0.5075...
0.61557955
2
Sends an email in the background.
def send_async_email(app, msg): # The function is called on a custom Thread, so we need to get the application context before sending a message. with app.app_context(): # Instantiate the SendGridAPIClient with API key and send message sg = SendGridAPIClient(os.environ.get('SENDGRID_API_KEY')) sg.send(msg)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def send_async_email(self, msg):\n with app.app_context():\n result = mail.send(msg)\n print result", "def mail():\n mail_server = 'localhost'\n mail_port = 1025\n CustomSMTPServer((mail_server, mail_port), None)\n asyncore.loop()", "def send_mail(email):\n return em...
[ "0.7273771", "0.69978386", "0.68984574", "0.6791208", "0.6574176", "0.64573586", "0.6453505", "0.64428985", "0.6438768", "0.64248765", "0.6389422", "0.63677955", "0.6342771", "0.63268614", "0.6271502", "0.6241599", "0.62051046", "0.6199537", "0.61853224", "0.61802316", "0.615...
0.6089591
26
Prepares a new SendGrid Mail object from inputs and calls
def send_email(subject, sender, recipients, html_body): try: # Create a new SendGrid Mail object with the arguments given message = Mail( from_email=sender, to_emails=recipients, subject=subject, html_content=html_body) # We prepare a new Thread here to send the email in the background. This takes in the send_async_email # function as its target and runs the function with the parameters passed through args. Thread(target=send_async_email, args=(current_app._get_current_object(), message)).start() except Exception as e: print(e) # FIXME: should do some type of error handling here or allow error to bubble up
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, sender, user):\r\n self.user = user\r\n self.sender = Email(sender)\r\n self.recipient = Email(self.user.getEmail())\r\n self.sg = sendgrid.SendGridAPIClient(apikey = \"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\")", "def build_hello_email...
[ "0.6304504", "0.62640375", "0.60916734", "0.59165967", "0.5906015", "0.58479327", "0.5722822", "0.56613505", "0.56225824", "0.5571272", "0.5547633", "0.55359906", "0.5532558", "0.552747", "0.5448773", "0.54314727", "0.54307705", "0.5412892", "0.5398113", "0.5378171", "0.53776...
0.5421056
17
Tests the backward pass of the hinge loss function
def test_hinge_loss_backward(): from your_code import HingeLoss X = np.array([[-1, 2, 1], [-3, 4, 1]]) w = np.array([1, 2, 3]) y = np.array([1, -1]) loss = HingeLoss(regularization=None) _true = np.array([-1.5, 2, 0.5]) _est = loss.backward(X, w, y) print(_est)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_backward(net, X, Y, grad, loss, index):\n eps = 1e-7\n backup = X[index]\n X[index] += eps\n A1 = net.forward(X)\n loss1 = net.loss(Y, A1[-1])\n ratio = (loss1 - loss) / eps\n assert np.isclose(grad[index], ratio)\n X[index] = backup", "def _backward(loss):\n\n loss.backwa...
[ "0.70681214", "0.6942057", "0.659498", "0.6545506", "0.6523432", "0.64993215", "0.64905953", "0.64883435", "0.64445055", "0.6438305", "0.64352924", "0.642554", "0.6404636", "0.6404417", "0.63782287", "0.63528156", "0.6351562", "0.6339242", "0.6338139", "0.6330921", "0.6330372...
0.8668524
0
Tests the forward pass of the squared loss function
def test_squared_loss_forward(): from your_code import SquaredLoss X = np.array([[-1, 2, 1], [-3, 4, 1]]) w = np.array([1, 2, 3]) y = np.array([1, -1]) loss = SquaredLoss(regularization=None) _true = 26.5 _est = loss.forward(X, w, y) print(_est)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def forward(self,y_out, y_truth): \n result = (np.square(np.subtract(y_out, y_truth)))\n #########################################################################\n # TODO: #\n # Implement the forward pass and return the...
[ "0.6932652", "0.68198997", "0.65537393", "0.65324885", "0.6529143", "0.63533276", "0.6301397", "0.6298944", "0.628218", "0.62585825", "0.6255691", "0.62509155", "0.6244331", "0.6233", "0.6224718", "0.62080723", "0.6198414", "0.6166633", "0.61483914", "0.6138617", "0.6106742",...
0.8524856
0
Fit and predict on the training set using gradient descent and default parameter values. Note that in practice, the testing set should be used for predictions. This code is just to commonsense check that your gradient descent algorithm can classify the data it was trained on.
def make_predictions(features, targets, loss, regularization): from your_code import GradientDescent np.random.seed(0) learner = GradientDescent(loss=loss, regularization=regularization, learning_rate=0.01, reg_param=0.05) learner.fit(features, targets, batch_size=None, max_iter=1000) print("actual targets: ", targets) return learner.predict(features)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fit(self):\n # initialize parameters with zeros (≈ 1 line of code)\n weight, intercept = initialize_with_zeros(self._x_train.shape[0])\n\n # Gradient descent (≈ 1 line of code)\n parameters, grads, costs = optimize(weight,\n intercept,\n ...
[ "0.68939996", "0.68270785", "0.66562504", "0.6589167", "0.65455365", "0.6539147", "0.65299845", "0.6523337", "0.649722", "0.6483377", "0.644804", "0.6433906", "0.6358107", "0.63381016", "0.62936443", "0.62861294", "0.62641317", "0.62229747", "0.6215727", "0.6211669", "0.61867...
0.6327305
14
Tests the ability of the gradient descent algorithm to classify a linearly separable dataset.
def test_gradient_descent_blobs(): features, _, targets, _ = load_data('blobs') hinge = make_predictions(features, targets, 'hinge', None) # assert np.all(hinge == targets) # l1_hinge = make_predictions(features, targets, 'hinge', 'l1') # # assert np.all(l1_hinge == targets) # # l2_hinge = make_predictions(features, targets, 'hinge', 'l2') # # assert np.all(l2_hinge == targets) # # squared = make_predictions(features, targets, 'squared', None) # # assert np.all(squared == targets) # # l1_squared = make_predictions(features, targets, 'squared', 'l1') # # l2_squared = make_predictions(features, targets, 'squared', 'l2')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_train_dataset(self):\n classifiers, estimates =\\\n ada_boost.train_dataset(self.larger_matrix,\n self.larger_class_labels,\n 9)\n expected = [\n {'alpha': 0.6931471805599453,\n 'dim': 0,\...
[ "0.6490156", "0.6451971", "0.64335155", "0.6392328", "0.63797176", "0.62720233", "0.6257553", "0.6255466", "0.6249252", "0.623954", "0.6229559", "0.62094355", "0.6200106", "0.61554265", "0.61163557", "0.6115135", "0.61097056", "0.6069717", "0.60438246", "0.60386896", "0.60309...
0.65788424
0
Tests that the multiclass classifier also works on binary tasks
def test_multiclass_gradient_descent_blobs(): from your_code import MultiClassGradientDescent np.random.seed(0) features, _, targets, _ = load_data('blobs') learner = MultiClassGradientDescent(loss='squared', regularization=None, learning_rate=0.01, reg_param=0.05) learner.fit(features, targets, batch_size=None, max_iter=1000) predictions = learner.predict(features) print("predictions: ", predictions) print("targets: ", targets)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_models_multiclass(model):\n atom = ATOMClassifier(X_class2, y_class2, test_size=0.24, random_state=1)\n atom.run(\n models=model,\n metric=\"f1_micro\",\n n_calls=2,\n n_initial_points=1,\n bo_params={\"base_estimator\": \"rf\", \"cv\": 1},\n )\n assert not a...
[ "0.69859135", "0.6720554", "0.6710832", "0.6705844", "0.6657617", "0.6626682", "0.6590452", "0.6582172", "0.6527616", "0.6526195", "0.65169877", "0.64667857", "0.6466424", "0.6451735", "0.6446678", "0.6446403", "0.643857", "0.6436513", "0.64046556", "0.638151", "0.63791835", ...
0.0
-1
Creates a regression dataset of functions sampled from a GP.
def __init__(self, batch_size, max_num_context, x_size=1, y_size=1, l1_scale=0.6, sigma_scale=1.0, random_kernel_parameters=True, kernel = 'SE', #valid options {SE,PER} testing=False): self._batch_size = batch_size self._max_num_context = max_num_context self._x_size = x_size self._y_size = y_size self._l1_scale = l1_scale self._sigma_scale = sigma_scale self._random_kernel_parameters = random_kernel_parameters self._testing = testing self._kernel = kernel
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def random_reg_dataset(request):\n set_seed()\n shape = request.param.get('shape', 10)\n size = request.param.get('size', 100)\n X, Y = make_regression(n_samples=2*size, n_features=shape, n_informative=10)\n X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.5)\n Y_train, Y_tes...
[ "0.620292", "0.58756006", "0.586723", "0.5826934", "0.58107764", "0.58019704", "0.57757723", "0.5740523", "0.57274187", "0.57073337", "0.564298", "0.5618902", "0.56113887", "0.5600561", "0.55865526", "0.55512285", "0.5485134", "0.5467787", "0.54674876", "0.54655284", "0.54627...
0.0
-1
Applies the Gaussian kernel to generate curve data.
def _gaussian_kernel(self, xdata, l1, sigma_f, sigma_noise=2e-2): num_total_points = tf.shape(xdata)[1] # Expand and take the difference xdata1 = tf.expand_dims(xdata, axis=1) # [B, 1, num_total_points, x_size] xdata2 = tf.expand_dims(xdata, axis=2) # [B, num_total_points, 1, x_size] diff = xdata1 - xdata2 # [B, num_total_points, num_total_points, x_size] # [B, y_size, num_total_points, num_total_points, x_size] if self._kernel == 'PER': norm = 2*tf.square(tf.math.sin(3.14*diff[:, None, :, :, :])) / l1[:, :, None, None, :] norm = tf.reduce_sum(norm, -1) # [B, data_size, num_total_points, num_total_points] # [B, y_size, num_total_points, num_total_points] kernel = tf.square(sigma_f)[:, :, None, None] * tf.exp(-norm) else: # if kernel is normal gaussian norm = tf.square(diff[:, None, :, :, :] / l1[:, :, None, None, :]) norm = tf.reduce_sum(norm, -1) # [B, data_size, num_total_points, num_total_points] # [B, y_size, num_total_points, num_total_points] kernel = tf.square(sigma_f)[:, :, None, None] * tf.exp(-0.5*norm) # Add some noise to the diagonal to make the cholesky work. kernel += (sigma_noise**2) * tf.eye(num_total_points) return kernel
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def makeGaussianKernel(sigma: float) -> np.ndarray:\n\n # Your code here.\n kernel_size = 8*sigma+1\n kernel = np.zeros([kernel_size,kernel_size], dtype=float)\n center = kernel_size//2\n \n \n s = 2*(sigma**2)\n sum_val = 0\n for i in range(0,kernel_size):\n for j in range(0,kern...
[ "0.7186953", "0.7040933", "0.66974515", "0.6660252", "0.6650756", "0.66272604", "0.66248876", "0.66190356", "0.66012925", "0.65928894", "0.6569453", "0.65568036", "0.6538471", "0.65021783", "0.6494738", "0.6484124", "0.6426373", "0.6423837", "0.64235497", "0.6324433", "0.6317...
0.6274634
23
Builds the op delivering the data. Generated functions are `float32` with x values between 2 and 2.
def generate_curves(self, seed = None): num_context = tf.random_uniform( shape=[], minval=3, maxval=self._max_num_context, dtype=tf.int32, seed=seed) # If we are testing we want to have more targets and have them evenly # distributed in order to plot the function. if self._testing: num_target = 400 num_total_points = num_target x_values = tf.tile( tf.expand_dims(tf.range(-2., 2., 1. / 100, dtype=tf.float32), axis=0), [self._batch_size, 1]) x_values = tf.expand_dims(x_values, axis=-1) # During training the number of target points and their x-positions are # selected at random else: num_target = tf.random_uniform(shape=(), minval=0, maxval=self._max_num_context - num_context, dtype=tf.int32, seed=seed) num_total_points = num_context + num_target x_values = tf.random_uniform( [self._batch_size, num_total_points, self._x_size], -2, 2, seed=seed) # Set kernel parameters # Either choose a set of random parameters for the mini-batch if self._random_kernel_parameters: l1 = tf.random_uniform([self._batch_size, self._y_size, self._x_size], 0.1, self._l1_scale) sigma_f = tf.random_uniform([self._batch_size, self._y_size], 0.1, self._sigma_scale) # Or use the same fixed parameters for all mini-batches else: l1 = tf.ones(shape=[self._batch_size, self._y_size, self._x_size]) * self._l1_scale sigma_f = tf.ones(shape=[self._batch_size, self._y_size]) * self._sigma_scale # Pass the x_values through the Gaussian kernel # [batch_size, y_size, num_total_points, num_total_points] kernel = self._gaussian_kernel(x_values, l1, sigma_f) # Calculate Cholesky, using double precision for better stability: cholesky = tf.cast(tf.cholesky(tf.cast(kernel, tf.float64)), tf.float32) # Sample a curve # [batch_size, y_size, num_total_points, 1] y_values = tf.matmul( cholesky, tf.random_normal([self._batch_size, self._y_size, num_total_points, 1], seed=seed)) # [batch_size, num_total_points, y_size] y_values = tf.transpose(tf.squeeze(y_values, 3), [0, 2, 1]) if self._testing: # Select the targets target_x = x_values target_y = y_values # Select the observations idx = tf.random_shuffle(tf.range(num_target), seed=seed) context_x = tf.gather(x_values, idx[:num_context], axis=1) context_y = tf.gather(y_values, idx[:num_context], axis=1) else: # Select the targets which will consist of the context points as well as # some new target points target_x = x_values[:, :num_target + num_context, :] target_y = y_values[:, :num_target + num_context, :] # Select the observations context_x = x_values[:, :num_context, :] context_y = y_values[:, :num_context, :] query = ((context_x, context_y), target_x) return NPRegressionDescription( query=query, target_y=target_y, num_total_points=tf.shape(target_x)[1], num_context_points=num_context)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _gen_def(self):\n attributes = self.attributes()\n self._def = proto_util.make_operator_def_cpp(\n name=attributes.get('name', 'Op'),\n cache_key=self._cache_key,\n op_type=attributes['op_type'],\n device_option=proto_util.get_device_option(\n ...
[ "0.56554407", "0.56489086", "0.5634189", "0.5610428", "0.5561326", "0.55213106", "0.5519608", "0.5518672", "0.5507151", "0.54358256", "0.5426838", "0.5419625", "0.53720355", "0.53680634", "0.53558034", "0.5346631", "0.5309382", "0.5289855", "0.5285485", "0.52694434", "0.52685...
0.0
-1
Creates a regression dataset of functions sampled from a GP.
def __init__(self, batch_size, max_num_context, data, num_inst, testing=False): self._batch_size = batch_size self._max_num_context = max_num_context self._data = data self._x_data = self._data[:,1:-1] self._y_data = self._data[:,-1] self._testing = testing self._num_inst = num_inst self._num_pts_per_inst = tf.cast(self._data.get_shape().as_list()[0]/self._num_inst,tf.int32) self._x_uniq = self._x_data[:self._num_pts_per_inst] #tf.unique(self._x_data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def random_reg_dataset(request):\n set_seed()\n shape = request.param.get('shape', 10)\n size = request.param.get('size', 100)\n X, Y = make_regression(n_samples=2*size, n_features=shape, n_informative=10)\n X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.5)\n Y_train, Y_tes...
[ "0.6204215", "0.5876883", "0.5866936", "0.5827098", "0.581149", "0.5801915", "0.5776041", "0.5740856", "0.5726138", "0.5709513", "0.564359", "0.5622396", "0.5612163", "0.56014323", "0.55875564", "0.5551323", "0.54843557", "0.54687595", "0.54681313", "0.5466065", "0.54635614",...
0.0
-1
Builds the op delivering the data. Generated functions are `float32` with x values between 2 and 2.
def generate_curves(self, seed=None): num_context = tf.random_uniform( shape=[], minval=3, maxval=self._max_num_context, dtype=tf.int32, seed=seed) # If we are testing we want to have more targets and have them evenly # distributed in order to plot the function. if self._testing: num_target = self._x_data.get_shape().as_list()[0] num_total_points = num_target # During training the number of target points and their x-positions are # selected at random else: num_target = tf.random_uniform(shape=(), minval=0, maxval=self._max_num_context - num_context, dtype=tf.int32, seed=seed) num_total_points = num_context + num_target # idx for x vals in target idxs = [] # which instance to get y data from insts = [] for i in range(self._batch_size): idxs.append( tf.random_shuffle(tf.range(self._num_pts_per_inst), seed=seed) ) insts.append( tf.random_uniform(shape=[], minval=0, maxval=self._num_inst-1, dtype=tf.int32, seed=seed) ) idxs = tf.stack(idxs) insts = tf.stack(insts) # batchsize x numtotalpoints x size (xsize or ysize) x_values = tf.stack([tf.expand_dims(tf.gather(self._x_uniq, idxs[tf.cast(i,tf.int32)][:tf.cast(num_total_points,tf.int32)]), axis=-1) for i in range(self._batch_size)]) y_values = tf.stack([tf.expand_dims(tf.gather(self._y_data[insts[i]*self._num_pts_per_inst:(insts[i]+1)*self._num_pts_per_inst], idxs[i][:num_total_points]), axis=-1) for i in range(self._batch_size)]) if self._testing: # Select the targets target_x = x_values target_y = y_values # Select the observations idx_ctxt = tf.random_shuffle(tf.range(num_target), seed=seed) context_x = tf.gather(x_values, idx_ctxt[:num_context], axis=1) context_y = tf.gather(y_values, idx_ctxt[:num_context], axis=1) else: # Select the targets which will consist of the context points as well as # some new target points target_x = x_values[:, :num_target + num_context, :] target_y = y_values[:, :num_target + num_context, :] # Select the observations context_x = x_values[:, :num_context, :] context_y = y_values[:, :num_context, :] context_x = tf.squeeze(context_x,-1) target_x = tf.squeeze(target_x,-1) query = ((context_x, context_y), target_x) return NPRegressionDescription( query=query, target_y=target_y, num_total_points=tf.shape(target_x)[1], num_context_points=num_context)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _gen_def(self):\n attributes = self.attributes()\n self._def = proto_util.make_operator_def_cpp(\n name=attributes.get('name', 'Op'),\n cache_key=self._cache_key,\n op_type=attributes['op_type'],\n device_option=proto_util.get_device_option(\n ...
[ "0.56568396", "0.5650051", "0.563562", "0.5610685", "0.556", "0.5520528", "0.55197394", "0.5518905", "0.550704", "0.54352975", "0.542605", "0.541932", "0.53720164", "0.53683895", "0.53562075", "0.53452617", "0.530932", "0.5289747", "0.5284096", "0.5268779", "0.52684647", "0...
0.0
-1
Creates a regression dataset of functions sampled from a GP.
def __init__(self, batch_size, max_num_context, x_size=1, y_size=1, l1_scale=0.6, sigma_scale=1.0, epsilon=0.01, num_gammas=2, random_kernel_parameters=True, testing=False): self._batch_size = batch_size self._max_num_context = max_num_context self._x_size = x_size self._y_size = y_size self._l1_scale = l1_scale self._sigma_scale = sigma_scale self._random_kernel_parameters = random_kernel_parameters self._testing = testing self._epsilon = epsilon self._num_gammas = num_gammas
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def random_reg_dataset(request):\n set_seed()\n shape = request.param.get('shape', 10)\n size = request.param.get('size', 100)\n X, Y = make_regression(n_samples=2*size, n_features=shape, n_informative=10)\n X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.5)\n Y_train, Y_tes...
[ "0.620292", "0.58756006", "0.586723", "0.5826934", "0.58107764", "0.58019704", "0.57757723", "0.5740523", "0.57274187", "0.57073337", "0.564298", "0.5618902", "0.56113887", "0.5600561", "0.55865526", "0.55512285", "0.5485134", "0.5467787", "0.54674876", "0.54655284", "0.54627...
0.0
-1
Builds the op delivering the data. Generated functions are `float32` with x values between 2 and 2.
def generate_curves(self, seed=None): num_context = tf.random_uniform( shape=[], minval=3, maxval=self._max_num_context, dtype=tf.int32, seed=seed) # If we are testing we want to have more targets and have them evenly # distributed in order to plot the function. if self._testing: num_target = 400 num_total_points = num_target x_values = tf.tile( tf.expand_dims(tf.range(-2., 2., 1. / 100, dtype=tf.float32), axis=0), [self._batch_size, 1]) x_values = tf.expand_dims(x_values, axis=-1) # During training the number of target points and their x-positions are # selected at random else: num_target = tf.random_uniform(shape=(), minval=0, maxval=self._max_num_context - num_context, dtype=tf.int32, seed=seed) num_total_points = num_context + num_target x_values = tf.random_uniform( [self._batch_size, num_total_points, self._x_size], -2, 2, seed=seed) def w(x, x_min=-2, x_max=2): weight_vals = tf.stack([ [1/(i+1) if j <= i else 0 for j in range(self._num_gammas)] for i in range(self._num_gammas)]) bucketsize = (x_max-x_min)/self._num_gammas buckets = (x-x_min)/bucketsize buckets = tf.reshape(buckets,[-1]) mapped = tf.expand_dims(tf.expand_dims(tf.map_fn(lambda x: weight_vals[tf.cast(x,tf.int32)], buckets),-2),-2) return mapped # Set kernel parameters # Either choose a set of random parameters for the mini-batch if self._random_kernel_parameters: gammas = 3.14*tf.random_uniform([self._num_gammas, self._batch_size], 0.1, 2) gammas = tf.expand_dims(tf.expand_dims(gammas,-1),-1) # Or use the same fixed parameters for all mini-batches else: gammas = 3.14*tf.linspace(0.1,2,self._num_gammas) print(gammas) #gammas = tf.broadcast_to(gammas,[self._num_gammas, self._batch_size]) gammas = tf.reshape(tf.tile(gammas,tf.constant([self._batch_size])),[self._num_gammas, self._batch_size]) gammas = tf.expand_dims(tf.expand_dims(gammas,-1),-1) weights = w(x_values) weights = tf.reshape(weights, [self._batch_size, num_total_points,self._x_size,self._num_gammas]) weights = tf.transpose(weights,[3,0,1,2]) gammas = tf.broadcast_to(gammas,[self._num_gammas, self._batch_size, num_total_points, self._x_size]) x_values_bcast = tf.expand_dims(x_values, 0) x_values_bcast = tf.broadcast_to(x_values_bcast,[self._num_gammas, self._batch_size, num_total_points, self._x_size]) out = tf.math.multiply(gammas,x_values_bcast) out = tf.math.multiply(weights,tf.sin(out)) out = tf.reduce_sum(out,axis=0) y_values = out y_values += tf.random.normal((self._batch_size,num_total_points,self._y_size),stddev = self._epsilon, seed=seed) if self._testing: # Select the targets target_x = x_values target_y = y_values # Select the observations idx = tf.random_shuffle(tf.range(num_target), seed=seed) context_x = tf.gather(x_values, idx[:num_context], axis=1) context_y = tf.gather(y_values, idx[:num_context], axis=1) else: # Select the targets which will consist of the context points as well as # some new target points target_x = x_values[:, :num_target + num_context, :] target_y = y_values[:, :num_target + num_context, :] # Select the observations context_x = x_values[:, :num_context, :] context_y = y_values[:, :num_context, :] query = ((context_x, context_y), target_x) return NPRegressionDescription( query=query, target_y=target_y, num_total_points=tf.shape(target_x)[1], num_context_points=num_context)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _gen_def(self):\n attributes = self.attributes()\n self._def = proto_util.make_operator_def_cpp(\n name=attributes.get('name', 'Op'),\n cache_key=self._cache_key,\n op_type=attributes['op_type'],\n device_option=proto_util.get_device_option(\n ...
[ "0.56554407", "0.56489086", "0.5634189", "0.5610428", "0.5561326", "0.55213106", "0.5519608", "0.5518672", "0.5507151", "0.54358256", "0.5426838", "0.5419625", "0.53720355", "0.53680634", "0.53558034", "0.5346631", "0.5309382", "0.5289855", "0.5285485", "0.52694434", "0.52685...
0.0
-1
Creates a regression dataset of functions sampled from a GP.
def __init__(self, batch_size, min_num_context, max_num_context, data, num_inst, testing=False): self._batch_size = batch_size self._min_num_context = min_num_context self._max_num_context = max_num_context self._data = data # Hardcoded for right now self._x_data = self._data[:,1:-1] self._y_data = self._data[:,-1:] self._testing = testing self._num_inst = num_inst self._num_pts_per_inst = tf.cast(self._data.get_shape().as_list()[0]/self._num_inst,tf.int32) self._x_uniq = self._x_data[:self._num_pts_per_inst]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def random_reg_dataset(request):\n set_seed()\n shape = request.param.get('shape', 10)\n size = request.param.get('size', 100)\n X, Y = make_regression(n_samples=2*size, n_features=shape, n_informative=10)\n X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.5)\n Y_train, Y_tes...
[ "0.6204215", "0.5876883", "0.5866936", "0.5827098", "0.581149", "0.5801915", "0.5776041", "0.5740856", "0.5726138", "0.5709513", "0.564359", "0.5622396", "0.5612163", "0.56014323", "0.55875564", "0.5551323", "0.54843557", "0.54687595", "0.54681313", "0.5466065", "0.54635614",...
0.0
-1
Builds the op delivering the data. Generated functions are `float32` with x values between 2 and 2.
def generate_curves(self, seed=None): num_context = tf.random_uniform( shape=[], minval=self._min_num_context, maxval=self._max_num_context, dtype=tf.int32, seed=seed) # If we are testing we want to have more targets and have them evenly # distributed in order to plot the function. if self._testing: num_target = self._num_pts_per_inst #self._x_data.get_shape().as_list()[0] num_total_points = num_target # During training the number of target points and their x-positions are # selected at random else: num_target = tf.random_uniform(shape=(), minval=0, maxval=self._max_num_context - num_context, dtype=tf.int32, seed=seed) num_total_points = num_context + num_target # idx for x vals in target idxs = [] # which instance to get y data from insts = [] for i in range(self._batch_size): idxs.append( tf.random_shuffle(tf.range(self._num_pts_per_inst), seed=seed) ) insts.append( tf.random_uniform(shape=[], minval=0, maxval=self._num_inst-1, dtype=tf.int32, seed=seed) ) idxs = tf.stack(idxs) insts = tf.stack(insts) # batchsize x numtotalpoints x size (xsize or ysize) x_values = tf.stack([tf.expand_dims(tf.gather(self._x_uniq, idxs[tf.cast(i,tf.int32)][:tf.cast(num_total_points,tf.int32)]), axis=-1) for i in range(self._batch_size)]) y_values = tf.stack([tf.expand_dims(tf.gather(self._y_data[insts[i]*self._num_pts_per_inst:(insts[i]+1)*self._num_pts_per_inst], idxs[i][:num_total_points]), axis=-1) for i in range(self._batch_size)]) if self._testing: # Select the targets target_x = x_values target_y = y_values # Select the observations idx_ctxt = tf.random_shuffle(tf.range(num_target), seed=seed) context_x = tf.gather(x_values, idx_ctxt[:num_context], axis=1) context_y = tf.gather(y_values, idx_ctxt[:num_context], axis=1) else: # Select the targets which will consist of the context points as well as # some new target points target_x = x_values[:, :num_target + num_context, :] target_y = y_values[:, :num_target + num_context, :] # Select the observations context_x = x_values[:, :num_context, :] context_y = y_values[:, :num_context, :] context_x = tf.squeeze(context_x,-1) target_x = tf.squeeze(target_x,-1) context_y = tf.squeeze(context_y,-1) target_y= tf.squeeze(target_y,-1) query = ((context_x, context_y), target_x) return NPRegressionDescription( query=query, target_y=target_y, num_total_points=tf.shape(target_x)[1], num_context_points=num_context)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _gen_def(self):\n attributes = self.attributes()\n self._def = proto_util.make_operator_def_cpp(\n name=attributes.get('name', 'Op'),\n cache_key=self._cache_key,\n op_type=attributes['op_type'],\n device_option=proto_util.get_device_option(\n ...
[ "0.56568396", "0.5650051", "0.563562", "0.5610685", "0.556", "0.5520528", "0.55197394", "0.5518905", "0.550704", "0.54352975", "0.542605", "0.541932", "0.53720164", "0.53683895", "0.53562075", "0.53452617", "0.530932", "0.5289747", "0.5284096", "0.5268779", "0.52684647", "0...
0.0
-1
Convenience method to wrap the exceptions.
def _doRequest(self, httpClientMethod, *args): try: resp = httpClientMethod(*args) return resp.json() except RequestException as e: raise checkedError(e)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def WrappedException(self) -> object:", "def _wrap_exceptions(self):\n try:\n yield\n except OSError as err:\n if is_permission_err(err):\n raise AccessDenied(\n pid=None, name=self._name,\n msg=\"service %r is not querable ...
[ "0.79373294", "0.72308654", "0.69572264", "0.6808807", "0.6775871", "0.6631308", "0.65699166", "0.6539062", "0.6535695", "0.6529837", "0.6529736", "0.6491012", "0.63828194", "0.62940586", "0.62925386", "0.62722045", "0.6255905", "0.6253773", "0.62497485", "0.6234423", "0.6210...
0.0
-1
Returns a client configured with the given MetaHttpClient
def __init__(self, metaHttpClient): self.httpClient = metaHttpClient
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_httpx_client() -> httpx.Client:\n return httpx.Client(**CLIENT_PARAMETERS) # type: ignore", "def _obtain_http_client(hostname=METADATA_SERVER_HOSTNAME):\n return http.client.HTTPConnection(hostname,\n timeout=METADATA_SERVER_CONN_TIMEOUT)", "def get(cls, conf...
[ "0.69275284", "0.69132006", "0.6854978", "0.6838644", "0.67625606", "0.6725256", "0.6679172", "0.65378565", "0.6535458", "0.6499919", "0.64733833", "0.6437994", "0.6422018", "0.6411563", "0.6403961", "0.63889176", "0.6383493", "0.6340762", "0.6308783", "0.6306034", "0.6300599...
0.6965378
0
Factory method to reate a new client from url and auth strategy.
def createClientFromUrl(url, authStrategy=None): return MetaClient(MetaHttpClient(url, authStrategy))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def client():\n return Client(**common_data.AUTH_ARGS)", "def make_client(self, context):\n return Client(self.settings['client_routing'], context=context)", "def client_from_config(cls, base_client, conf, logger=None):\n _unused = conf\n if cls == PapiViewClient:\n # we're i...
[ "0.6883496", "0.6652142", "0.65504664", "0.63140374", "0.62927", "0.6198969", "0.61564", "0.6089419", "0.6076771", "0.60702914", "0.60631484", "0.60540426", "0.6042376", "0.6015032", "0.5999247", "0.5936448", "0.5922538", "0.59159464", "0.58842707", "0.5857151", "0.58331627",...
0.74867505
0
List the instances, with lsstLevel in ('DC', 'L1', 'L2', 'L3', 'dev'), which have at least one database for the 'db' repo type.
def getTypes(self): return self._doRequest(self.httpClient.getTypes)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def do_list(self, line):\n\t\tx = [i for i in self.client.list_databases() if i['name'] not in ['admin','config','line','local','mongoengine_test','pymongo_test']]\n\t\tfor db in x:\n\t\t\tprint(db['name'])", "def list_databases(self):\n r = self.__get_response(settings.LST_DBS)\n if r[\"status\"] ...
[ "0.6280146", "0.57084984", "0.55510706", "0.5403129", "0.53723526", "0.5356619", "0.530622", "0.5245011", "0.5198752", "0.5198608", "0.51941144", "0.5190803", "0.5145623", "0.5101044", "0.5100781", "0.50872535", "0.5075189", "0.5059387", "0.5053584", "0.49898225", "0.49266067...
0.0
-1
Retrieves information about a database
def getDbInfo(self, lsstLevel, dbName): return self._doRequest(self.httpClient.getDbInfo, lsstLevel, dbName)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dbinfo(self):\n data = self._http_get(\"dbInfo\")\n return data.json()", "def get_dbinfo(error=True):\n try:\n return sesh.query(DbInfo).one()\n except NoResultFound:\n if error:\n logger.warning(\"No entry _dbinfo table, database appears to be blank\")\n ...
[ "0.83317816", "0.7298387", "0.722238", "0.7191413", "0.7083686", "0.6978649", "0.6951895", "0.6880173", "0.68146276", "0.6764493", "0.6733084", "0.6724456", "0.6644894", "0.6605659", "0.66054237", "0.6599033", "0.65978765", "0.65917647", "0.6574369", "0.65687776", "0.65403485...
0.6968607
6
List available table names
def getTableNames(self, lsstLevel, dbName): return self._doRequest(self.httpClient.getTableNames, lsstLevel, dbName)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getTableNames(self):\n\n # The specific command depends on whether we are using mysql or sqlite\n if self.connector == 'mysql':\n sqlcmd = (\"SELECT table_name FROM INFORMATION_SCHEMA.TABLES \" +\n \"WHERE table_schema='\" + self.dbname + \"'\")\n else:\n ...
[ "0.7888064", "0.77418286", "0.77407134", "0.7710163", "0.7683815", "0.76791114", "0.7667677", "0.7644376", "0.76118696", "0.7600348", "0.7563779", "0.75542873", "0.7550293", "0.74972355", "0.74848264", "0.7477161", "0.7420689", "0.73450434", "0.73422253", "0.73126185", "0.726...
0.6605125
46
Get a table's information
def getTableInfo(self, lsstLevel, dbName, tableName): return self._doRequest(self.httpClient.getTableInfo, lsstLevel, dbName, tableName)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_table(self):\n\t\treturn self._table", "def table_info(self, table_path: str, verbose:bool = True) -> Table:\n dataset, table = table_path.split('.')\n dataset_ref = self.client.dataset(dataset)\n table_ref = dataset_ref.table(table)\n info = self.client.get_table(table_ref)\...
[ "0.74724716", "0.74560463", "0.744552", "0.71629936", "0.70946264", "0.7066326", "0.7054935", "0.70179635", "0.70179635", "0.69705325", "0.6951181", "0.69114673", "0.6870947", "0.6840309", "0.6770856", "0.67565924", "0.67344403", "0.67344403", "0.6717223", "0.6646127", "0.663...
0.7214678
3
Get a table's schema
def getTableSchema(self, lsstLevel, dbName, tableName): return self._doRequest(self.httpClient.getTableSchema, lsstLevel, dbName, tableName)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def schema(self):\n return self.table_info.schema", "def get_schema(self, repo, table):\n return self.user_con.get_schema(repo=repo, table=table)", "def get_table_schema(dataset_id, table_id):\n logging.info('getting table schema')\n bigquery_client = bigquery.Client()\n dataset_ref = bi...
[ "0.8177393", "0.79869926", "0.7746395", "0.763751", "0.75442374", "0.75305074", "0.75034606", "0.73682463", "0.7363106", "0.7291899", "0.714085", "0.7013943", "0.69358236", "0.69128716", "0.68929845", "0.6827799", "0.6759348", "0.66797787", "0.66750246", "0.6665475", "0.66397...
0.7176996
10
Function for loading the features and labels associated with the training dataset.
def _loadTrain(self, features, labels): self.trainX_, self.trainY_, self.trainLabel_ = self.__load(features, labels)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_features(self, features):\n pass\n # self.features = features", "def read_data(feature_file, label_file):", "def train(self, features, labels):\n pass", "def _read_train_datas(self):\r\n with open(self.train_label_path, 'r') as fb:\r\n lines = fb.readlines()\r\...
[ "0.72724336", "0.71050906", "0.70961773", "0.7020884", "0.6974502", "0.69578195", "0.68213254", "0.68110377", "0.68032", "0.67908686", "0.6778066", "0.6760534", "0.67411566", "0.6715979", "0.66920966", "0.66892487", "0.66885465", "0.66482824", "0.6603724", "0.65990937", "0.65...
0.7857445
0
Function for loading the features and labels associated with the testing dataset.
def _loadTest(self, features, labels): self.testX_, self.testY_, self.testLabel_ = self.__load(features, labels)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_test_data(label_fname, data_fname):\n labels = load_csv(label_fname)\n data = load_csv(data_fname, 'excel-tab')\n\n # Join all data together on the ids given in the files\n joined_data = {}\n for label in labels:\n id = label[0]\n joined_data[id] = {'class': label[1]}\n for...
[ "0.7340451", "0.7329776", "0.729219", "0.70436066", "0.70091707", "0.6979865", "0.6900357", "0.68143857", "0.67991424", "0.67709094", "0.67183954", "0.6622276", "0.6608396", "0.6606673", "0.65982616", "0.657758", "0.65768003", "0.65713066", "0.6517931", "0.65098", "0.65078866...
0.80802953
0
Function for loading the features and labels associated with the validation dataset.
def _loadValid(self, features, labels): self.validX_, self.validY_, self.validLabel_ = self.__load(features, labels)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _loadTrain(self, features, labels):\n\t\tself.trainX_, self.trainY_, self.trainLabel_ = self.__load(features, labels)", "def load_data(self):\n sets = ['train', 'val']\n images = []\n labels = []\n self.labels_dic = {}\n file = open(self.path + 'wnids.txt')\n train_l...
[ "0.7066322", "0.6910587", "0.68762726", "0.68701327", "0.6851733", "0.6828537", "0.6772606", "0.6747656", "0.671662", "0.6708065", "0.6663688", "0.6652927", "0.66424817", "0.66018474", "0.6580891", "0.6563428", "0.6560234", "0.65583473", "0.65253925", "0.6494413", "0.64848816...
0.7184243
0
This function handles user registration
def register(name, username, password, rpt_password): if name and username and password and rpt_password: if len(username) > 3 and len(username) < 11: if len(password) > 2 and len(password) < 11: if password == rpt_password: USERS[username] = User(name, username, password) return "Registration successful" return "Passwords don't match" return "Password should be 2 to 10 characters" return "Username should be 4 to 10 characters" return "None input"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def register_user():\n pass", "def user_register():\n \n data = user_obj.user_register(request.forms) \n return data", "def register(self):\n first_name = self.first_name_entry.get()\n insertion = self.insertion_entry.get()\n last_name = self.last_name_entry.get()\...
[ "0.85944766", "0.7984343", "0.7837519", "0.7786772", "0.77072763", "0.76304734", "0.76186043", "0.7611673", "0.7604288", "0.75816953", "0.75363594", "0.7516793", "0.7514423", "0.7514068", "0.7497228", "0.7494317", "0.74817437", "0.7476521", "0.7458846", "0.7452957", "0.744945...
0.0
-1
Handles the home route
def home(): # if session.get('username'): # return redirect(url_for('categories')) # else: return render_template('home.html')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def go_home(request):\n\n url = request.route_url('home', _app_url=get_app_url(request))\n return HTTPFound(location=url)", "def home(self, *args, **kwargs):\n pass", "def home():\n logging.info('Entering route: HOME')\n\n logging.info('Rendering template: main.html')\n return render_temp...
[ "0.79052025", "0.78745264", "0.7848005", "0.7505829", "0.7453932", "0.7407278", "0.73913604", "0.7381116", "0.7372379", "0.7372379", "0.7320766", "0.72788364", "0.7265526", "0.7234542", "0.7204967", "0.7204967", "0.7204967", "0.7204967", "0.7204967", "0.7204967", "0.7204967",...
0.6969318
67
Handles the sign_in route
def sign_in(): if request.method == 'POST': result = login(request.form['username'], request.form['password']) if result == "Login successful": session['username'] = request.form['username'] return redirect(url_for('categories')) flash(result, 'warning') return render_template('login.html')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def signin():\n auth_service = AuthService()\n form = SignInForm()\n if request.method == 'GET':\n return render_template('auth/signin.html', title='Sign In', form=form)\n\n elif request.method == 'POST':\n if form.validate_on_submit():\n user_dto = UserDto(form.email.data)\n ...
[ "0.7458304", "0.67892754", "0.6762494", "0.67435724", "0.6733293", "0.6732921", "0.6665116", "0.6608298", "0.6507655", "0.6502823", "0.6433052", "0.64084053", "0.64078736", "0.63882333", "0.63157964", "0.6315457", "0.63117033", "0.62719864", "0.6269254", "0.6257567", "0.62241...
0.63443756
14
Handles the sign_up route
def sign_up(): if request.method == 'POST': result = register(request.form['name'], request.form['username'], request.form['password'], request.form['rpt_password']) if result == "Registration successful": flash(result, 'info') return redirect(url_for('sign_in')) flash(result, 'warning') return render_template('register.html')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def signup():", "def sign_up():\n form = RegisterForm()\n if request.method == \"GET\":\n return render_template('adduser.html', title='Add New User', form=form)\n if request.method == 'POST' and form.validate_on_submit():\n username = form.username.data\n password = form.password1....
[ "0.80126196", "0.79250133", "0.7829918", "0.7825756", "0.77395386", "0.7690627", "0.76694167", "0.7645422", "0.76431364", "0.76015717", "0.7543025", "0.7489129", "0.7442244", "0.740847", "0.73981065", "0.73766357", "0.7358332", "0.7352743", "0.73002946", "0.7288514", "0.72815...
0.7683991
6
Decorator function to ensure some routes are only accessed by logged in users
def login_required(func): @wraps(func) def decorated_function(*args, **kwargs): """ Modified descriprition of the decorated function """ if not session.get('username'): flash('Login to continue', 'warning') return redirect(url_for('sign_in', next=request.url)) return func(*args, **kwargs) return decorated_function
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def require_logged_in():\n def handler(f, *args, **kwargs):\n if args[0].current_user is not None:\n return f(*args, **kwargs)\n else:\n raise HTTPFound(args[0].route_url('user.login', _query={'redirect': encode_route(args[0])}))\n return decorator(handler)", "def check_...
[ "0.74305403", "0.7284967", "0.7256965", "0.719715", "0.7149432", "0.71470237", "0.7113935", "0.7104468", "0.7091239", "0.7073398", "0.7050116", "0.70411307", "0.70002747", "0.6996211", "0.69772625", "0.6963908", "0.6963348", "0.69509095", "0.6946448", "0.69141006", "0.6906666...
0.0
-1
Modified descriprition of the decorated function
def decorated_function(*args, **kwargs): if not session.get('username'): flash('Login to continue', 'warning') return redirect(url_for('sign_in', next=request.url)) return func(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hook_description(self) -> str:", "def func_doc():", "def desc(text):\n def _decoration(fcn):\n fcn.desc = text\n return fcn\n return _decoration", "def describe(self, *args, **kwargs):\n def _autodoc(func, *_args, **_kwargs):\n if len(_args) > 0:\n #: ...
[ "0.7339779", "0.72559106", "0.7210304", "0.70913494", "0.7081971", "0.7081971", "0.7081971", "0.7081971", "0.7081971", "0.706399", "0.706313", "0.70578057", "0.7051008", "0.7051008", "0.7051008", "0.7051008", "0.7051008", "0.7051008", "0.7051008", "0.7051008", "0.7049549", ...
0.0
-1
Handles displaying recipe categories
def categories(): return render_template('categories.html', recipe_categories=USERS[session['username']].recipe_categories)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_categories():\n if 'user' not in session:\n flash(\"You need to own this recipe to be able to delete it.\")\n return redirect(url_for(\"login\"))\n\n categories = list(mongo.db.categories.find().sort(\"category_name\", 1))\n return render_template(\"categories.html\", categories=cate...
[ "0.7086564", "0.68516177", "0.67932105", "0.67012036", "0.66799504", "0.6656528", "0.6643379", "0.66044766", "0.6603303", "0.66017115", "0.65058404", "0.6419562", "0.6403973", "0.6363026", "0.63584274", "0.63432485", "0.63299835", "0.6321505", "0.6302908", "0.62928087", "0.62...
0.7799081
0
Handles new recipe_category creation requests
def add_category(): if request.method == 'POST': result = USERS[session['username']].add_recipe_category( request.form['title']) if result == 'recipe_category added': flash(result, 'info') else: flash(result, 'warning') return redirect(url_for('categories')) return render_template('add_category.html')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_category(self, category):\n\n super().new_entry()\n\n return Categories.objects.create(\n name=category['id'].split(':')[1],\n name_fr=category['name'],\n url=category['url']\n )", "def insert_recipe_category():\r\n\r\n # validates request form\...
[ "0.73274326", "0.73215884", "0.72547656", "0.720218", "0.7147975", "0.6993748", "0.6935057", "0.6929752", "0.6889053", "0.6879494", "0.6795871", "0.67828804", "0.6741852", "0.671282", "0.67056173", "0.6700246", "0.6682995", "0.66797304", "0.66602266", "0.6643519", "0.66363686...
0.70658386
5
Handles request to edit recipe categories
def edit_recipe_category(title): session['recipe_category_title'] = title if request.method == 'POST': result = USERS[session['username']].edit_recipe_category(session['recipe_category_title'], request.form['title']) if result == 'recipe_category edited': flash(result, 'info') else: flash(result, 'warning') return redirect(url_for('dashboard')) return render_template('edit_recipe_category.html')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update(self, request, *args, **kwargs):\n response = super(CategoryViewSet).update(self, request, *args, *kwargs)\n response.data['message'] = \"Categoria ha sido editada\"", "def update_recipe_category(db_id):\r\n\r\n # validates request form\r\n form = request.form\r\n error_list = v...
[ "0.6980986", "0.67703384", "0.66190696", "0.6612037", "0.6558475", "0.64691764", "0.6451772", "0.63451344", "0.6299408", "0.6298648", "0.6292263", "0.62858766", "0.62654465", "0.62084377", "0.6183476", "0.61449456", "0.6102691", "0.6096874", "0.60889256", "0.60741085", "0.607...
0.66975844
2
Handles request to delete a category_recipe
def delete_recipe_category(title): result = USERS[session['username']].delete_recipe_category(title) if result == "recipe category deleted": flash(result, 'info') else: flash(result, 'warning') return redirect(url_for('categories'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_recipe(request, recipe, **_kwargs):\n pass", "async def delete_recipe_category(category: str, session: Session = Depends(generate_session)):\n\n try:\n db.categories.delete(session, category)\n except Exception:\n raise HTTPException(status.HTTP_400_BAD_REQUEST)", "def delete(...
[ "0.79797304", "0.78852093", "0.7834236", "0.76737183", "0.74752146", "0.73958224", "0.72300977", "0.7216423", "0.7147001", "0.7062482", "0.70403314", "0.70357853", "0.69381744", "0.6923182", "0.6851127", "0.6824262", "0.6816408", "0.6795457", "0.67638993", "0.6761307", "0.675...
0.7298024
6
Handles new recipe creation requests
def add_recipe(): if request.method == 'POST': result = USERS[session['username']].recipe_category[session['current_recipe_category_title']].add_recipe( request.form['description']) if result == 'recipe added': flash(result, 'info') else: flash(result, 'warning') return redirect(url_for('view_recipes', recipe_category_title=session['current_recipe_category_title'])) return render_template('add_recipe.html', recipes=USERS[session['username']].recipe_category[session['current_recipe_catrgory_title']].recipes)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def new_recipe(request, **_kwargs):\n return create_view(request, _(\"Recipe\"), RecipeForm)", "def createRecipe():\n categories = session.query(Category).all()\n if request.method == 'POST':\n # check if user entered a name\n name = request.form['name'].strip()\n if not name:\n ...
[ "0.75435215", "0.7138149", "0.7075436", "0.7064944", "0.69318324", "0.6796718", "0.6732778", "0.663411", "0.66334003", "0.6547954", "0.6489557", "0.6473552", "0.64724547", "0.64358306", "0.64238113", "0.6405184", "0.63739914", "0.63559365", "0.6348362", "0.627751", "0.622008"...
0.6105025
25
Handles request to edit a recipe
def edit_recipe(description): session['description']=description if request.method == 'POST': des_result=(USERS[session['username']].recipe_category[session['current_recipe_category_title']]. update_description(session['description'], request.form['description'])) status_result=(USERS[session['username']].recipe_category[session['current_recipe_category_title']]. update_status(session['description'], request.form['status'])) if des_result == 'recipe updated' or status_result == 'recipe updated': flash('recipe updated', 'info') else: flash(des_result, 'warning') return redirect(url_for('edit_recipe', recipe_category_title=session['current_recipe_category_title'])) return render_template('edit_recipe.html', item=USERS[session['username']] .recipe_category[session['current_recipe_category_title']].recipes[description], recipes=USERS[session['username']]. recipe_category[session['current_recipe_category_title']].recipes)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def edit_recipe(request, recipe, **_kwargs):\n return edit_view(request, _(\"Recipe\"), RecipeForm, recipe)", "def edit_recipe(id):\n existing_recipe = mongo.db.recipes.find_one({\"_id\": ObjectId(id)})\n\n if request.method == \"POST\":\n recipe = recipe_parser(dict(request.form), session[\"user...
[ "0.8221591", "0.7541125", "0.75210214", "0.74383444", "0.73951805", "0.73193896", "0.7067067", "0.69540036", "0.6741911", "0.67342657", "0.66984826", "0.6649248", "0.6636701", "0.66151386", "0.6532925", "0.65055865", "0.64919716", "0.64248574", "0.64130706", "0.6391982", "0.6...
0.7465735
3
Creates the book table in the Book database
def __init__(self, db): self.connection = sqlite3.connect(db) self.cursor = self.connection.cursor() self.cursor.execute("CREATE TABLE IF NOT EXISTS Book (Id INTEGER PRIMARY KEY, \ Title TEXT, Author TEXT, Year INTEGER, ISBN INTEGER)") self.connection.commit()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_books_table():\n try:\n print(\"Creating 'books' table..\")\n db.execute(\"CREATE TABLE books (\\\n id SERIAL PRIMARY KEY, \\\n isbn VARCHAR NOT NULL, \\\n title VARCHAR NOT NULL, \\\n author VARCHAR NOT NULL, \\\n year INTEGER NOT ...
[ "0.8391721", "0.8073142", "0.75710934", "0.7502907", "0.74889827", "0.7375526", "0.72876626", "0.72835785", "0.72679055", "0.72471905", "0.7243783", "0.7243783", "0.72309494", "0.7198741", "0.71855557", "0.7175286", "0.71568626", "0.71470326", "0.7044951", "0.69564736", "0.69...
0.6404445
81
Inserts a book into the book table
def insert_book(self, title, author, year, isbn): self.cursor.execute("INSERT INTO Book VALUES(NULL, ?, ?, ?, ?)", (title, author, year, isbn)) self.connection.commit()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def insert_book(title, author, year):\n try:\n cursor = conn.cursor()\n cursor.execute(\"\"\"\n INSERT INTO books(title, author, year)\n VALUES(?,?,?)\n \"\"\", (title, author, year))\n conn.commit()\n except Exception as e:\n logging.error(e)\n ...
[ "0.8030016", "0.75679004", "0.74723715", "0.74601537", "0.7431787", "0.70822483", "0.6939844", "0.688597", "0.66291636", "0.6627789", "0.6603897", "0.65962255", "0.6493063", "0.64219564", "0.6401724", "0.625291", "0.62327266", "0.62045246", "0.618287", "0.618287", "0.618287",...
0.80825835
0
Returns all books from the book table
def view(self): self.cursor.execute("SELECT * FROM Book") rows = self.cursor.fetchall() return rows
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getBooks(self, showAll=False):\n if showAll:\n sql = '''select ID, NAME from books;'''\n else:\n sql = '''\nselect books.id, books.name, books.author\nfrom books where exists (\nselect * from clippings where books.id = clippings.book);'''\n\n cur = self.__execute__(sq...
[ "0.81415516", "0.8135023", "0.8131734", "0.7800268", "0.76401293", "0.7442416", "0.7225693", "0.7212461", "0.7101638", "0.70201796", "0.6977427", "0.69417465", "0.6848331", "0.68029505", "0.68029505", "0.67492485", "0.67429066", "0.6742117", "0.67173076", "0.66324574", "0.663...
0.7171645
8
Searches for a book and returns the results
def search(self, title="", author="", year="", isbn=""): self.cursor.execute("SELECT * FROM Book WHERE Title = ? OR Author = ? \ OR Year = ? OR ISBN = ?", (title, author, year, isbn)) rows = self.cursor.fetchall() return rows
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def search_book():\n\n title = request.form.get(\"search\")\n books = book_search_results(GR_KEY, title)\n acct = get_current_account(session['acct'])\n search = True\n\n return render_template(\"index.html\", books=books, acct=acct, search=search)", "def book_search(library: list) -> None:\n o...
[ "0.8034663", "0.78664994", "0.7567901", "0.7562604", "0.755627", "0.74418813", "0.74398714", "0.7436998", "0.7309737", "0.7302588", "0.72104746", "0.71729445", "0.7163339", "0.7037132", "0.7021564", "0.6922285", "0.6891528", "0.68843335", "0.6879334", "0.6812493", "0.6773731"...
0.72077996
11
Deletes a book from the book database
def delete(self, id): self.cursor.execute("DELETE FROM Book WHERE Id = ?", (id,)) self.connection.commit()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_book(self, book):\n try:\n with self._db as db:\n cur = db.cursor()\n cur.execute('DELETE FROM books WHERE rowid = ?', (book.id, ))\n if not cur.rowcount:\n raise BookError('Tried to delete book tha...
[ "0.86744374", "0.83739376", "0.81451386", "0.81293005", "0.80114824", "0.78326046", "0.7717967", "0.7628112", "0.7619284", "0.7606074", "0.7555229", "0.7365288", "0.73290306", "0.71810013", "0.70325434", "0.6949556", "0.68995345", "0.6835646", "0.68113416", "0.67038083", "0.6...
0.772658
6