query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Defines the observations provided by the environment. May use a subclass of `ArraySpec` that specifies additional properties such as min and max bounds on the values.
def observation_spec(self) -> types.NestedArraySpec:
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def observation_spec(self):\n return ArraySpec(shape=(23,), dtype=np.float32)", "def __init__(self, pathspec, properties={}):\n import numpy\n self.pathspec = pathspec\n super(ArraySpec,self).__init__(numpy.ndarray)\n self.properties = OrderedDict(properties)", "def observati...
[ "0.6746986", "0.60315347", "0.59593797", "0.5834228", "0.58072776", "0.5796935", "0.5789936", "0.5784668", "0.5759724", "0.5755014", "0.5729606", "0.57125753", "0.56525546", "0.5553753", "0.55418384", "0.5525847", "0.55208904", "0.5515985", "0.5496002", "0.5496002", "0.547812...
0.60313916
2
Defines the actions that should be provided to `step()`. May use a subclass of `ArraySpec` that specifies additional properties such as min and max bounds on the values.
def action_spec(self) -> types.NestedArraySpec:
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def action_spec(self):\n min_Kp_Kd = np.zeros(12)\n max_Kp_Kd = np.ones(12)*np.inf\n min_pos = np.array([-0.6, -1.7, -0.45, -0.6, -1.7, -0.45,\n -0.6, -1.7, -0.45, -0.6, -1.7, -0.45])\n max_pos = np.array([0.5, 1.7, 1.6, 0.5, 1.7, 1.6,\n ...
[ "0.6053397", "0.59856373", "0.5846451", "0.5825244", "0.5739144", "0.57092017", "0.5685348", "0.5438964", "0.5409966", "0.5284483", "0.5272299", "0.5256275", "0.524941", "0.524941", "0.52339363", "0.522407", "0.5194787", "0.5182742", "0.51721895", "0.51688147", "0.514305", ...
0.55081457
7
Defines the rewards that are returned by `step()`. Override this method to define an environment that uses nonstandard reward values, for example an environment with arrayvalued rewards.
def reward_spec(self) -> types.NestedArraySpec: return array_spec.ArraySpec(shape=(), dtype=np.float32, name='reward')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_rewards(self, step_reward, goal_reward, bad_state_reward=None, restart_state_reward = None):\n self.r_step = step_reward\n self.r_goal = goal_reward\n self.r_bad = bad_state_reward\n self.r_restart = restart_state_reward", "def __init__(self, env):\n gym.RewardWrapper._...
[ "0.62321734", "0.61911786", "0.6097618", "0.6054359", "0.5976745", "0.59590447", "0.59165645", "0.5758093", "0.56996363", "0.56274784", "0.56224126", "0.55879575", "0.5585855", "0.5566248", "0.55561215", "0.55531347", "0.5542366", "0.55359405", "0.5531425", "0.55208486", "0.5...
0.5081516
74
Defines the discount that are returned by `step()`. Override this method to define an environment that uses nonstandard discount values, for example an environment with arrayvalued discounts.
def discount_spec(self) -> types.NestedArraySpec: return array_spec.BoundedArraySpec( shape=(), dtype=np.float32, minimum=0.0, maximum=1.0, name='discount' )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def discount_spec(self):\n task_discount_spec = self._task.get_discount_spec()\n if task_discount_spec is not None:\n return task_discount_spec\n else:\n return super(Environment, self).discount_spec()", "def add_discount(self, discount):\n self.gamma = discount", "def apply_discount(...
[ "0.59988624", "0.58988583", "0.5898098", "0.5847042", "0.5623778", "0.5441156", "0.5411965", "0.52358055", "0.5201164", "0.5162187", "0.5124095", "0.5123308", "0.5074123", "0.49902314", "0.4981849", "0.49234065", "0.49178696", "0.49120015", "0.4909685", "0.4880568", "0.488056...
0.5466874
5
Describes the `TimeStep` fields returned by `step()`. Override this method to define an environment that uses nonstandard values for any of the items returned by `step()`. For example, an environment with arrayvalued rewards.
def time_step_spec(self) -> ts.TimeStep: return ts.time_step_spec(self.observation_spec(), self.reward_spec())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def step_env(self):\n raise NotImplementedError\n # Not needed for this homework", "def __init__(self, step_time, step=None):\n self.step_vector = step\n self.step_time = step_time\n self.ref_timer = None", "def test_no_timesteps_property(self):\n expected_values = {\n...
[ "0.59744036", "0.5681818", "0.5647982", "0.5532288", "0.5392571", "0.53780484", "0.5341918", "0.53083825", "0.5307224", "0.5280149", "0.52758807", "0.5254811", "0.52457494", "0.5231521", "0.522465", "0.5220665", "0.5217766", "0.5196336", "0.51878935", "0.51844066", "0.5177493...
0.54468334
4
Returns the current timestep.
def current_time_step(self) -> ts.TimeStep: return self._current_time_step
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_time_step(self):\n return self._time_step", "def time_step(self):\n return self._time_step", "def GetTimeStep(self):\n time_step = None\n\n time_step = self._solver_collection.GetTimeStep()\n \n if not time_step is None:\n\n self.time_step = time_ste...
[ "0.87353057", "0.83974636", "0.81454396", "0.7974334", "0.7622381", "0.75591457", "0.7524713", "0.75237733", "0.75237733", "0.7476018", "0.7327006", "0.7309547", "0.73024344", "0.7277173", "0.7277173", "0.7277173", "0.7277173", "0.72636175", "0.7225894", "0.72174436", "0.7172...
0.9203299
0
Starts a new sequence and returns the first `TimeStep` of this sequence.
def reset(self) -> ts.TimeStep: self._current_time_step = self._reset() return self._current_time_step
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_first_step(self):\n return self.get_step_by_index(0)", "def start(self):\n\t\tself._start = time.clock()\n\t\tif self._initial is None:\n\t\t\tself._initial = self._start\n\t\treturn self", "def before(self, time: float) -> 'Trajectory':\n return self.split(time)[0]", "def start(self, s...
[ "0.6460188", "0.6099878", "0.59512615", "0.5906501", "0.5806284", "0.57064736", "0.5619724", "0.5561282", "0.5480415", "0.5464584", "0.5464028", "0.54418164", "0.5433138", "0.5427295", "0.5409704", "0.5409105", "0.5397404", "0.5380647", "0.5376489", "0.5375282", "0.53676325",...
0.5536652
8
Updates the environment according to the action and returns a `TimeStep`. If the environment returned a `TimeStep` with `StepType.LAST` at the previous step the implementation of `_step` in the environment should call `reset` to start a new sequence and ignore `action`. This method will start a new sequence if called after the environment has been constructed and `reset` has not been called. In this case `action` will be ignored. If `should_reset(current_time_step)` is True, then this method will `reset` by itself. In this case `action` will be ignored.
def step(self, action: types.NestedArray) -> ts.TimeStep: if self._current_time_step is None or self.should_reset( self._current_time_step ): return self.reset() self._current_time_step = self._step(action) return self._current_time_step
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def step(self, action):\n if self._reset_next_step:\n return self.reset()\n\n # Apply the game_rules\n for rule in self.game_rules:\n rule.step(self._state, self._meta_state)\n\n # Apply the action\n self.action_space.step(self._state, action)\n\n # S...
[ "0.6702035", "0.66238755", "0.64235044", "0.6258338", "0.62385756", "0.6237361", "0.6150759", "0.6134917", "0.61343175", "0.61312205", "0.60947925", "0.5976863", "0.5962188", "0.5953434", "0.59465617", "0.59439987", "0.58978987", "0.58788586", "0.58774835", "0.58655053", "0.5...
0.776358
0
Frees any resources used by the environment. Implement this method for an environment backed by an external process. This method be used directly ```python env = Env(...) Use env. env.close() ``` or via a context manager ```python
def close(self) -> None: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def close_env(self):\n self.env.close()", "def close(self): \n\t\tself.env.close()", "def terminate(self):\n super(ReacherEnv, self).close()", "def close(self):\n if self.__env:\n self.__env.close()\n self.__env = None\n super(DiskCache, self).close()", "de...
[ "0.7741044", "0.7280734", "0.71917665", "0.71788585", "0.67623675", "0.6581033", "0.6527342", "0.64914477", "0.6491418", "0.64707375", "0.6448349", "0.6402401", "0.6377555", "0.6377555", "0.63491684", "0.6225186", "0.6214336", "0.6168082", "0.61409414", "0.6053957", "0.605207...
0.0
-1
Allows the environment to be used in a withstatement context.
def __enter__(self): return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def visit_with(self: Parser, node: doc.With) -> None:\n with contextlib.ExitStack() as stack:\n stack.enter_context(self.var_table.with_frame())\n for item in node.items:\n frame = self.eval_expr(item.context_expr)\n if not isinstance(frame, Frame):\n self.repo...
[ "0.6312834", "0.61630744", "0.6097616", "0.60587364", "0.58667314", "0.58018786", "0.5615465", "0.5607413", "0.5506247", "0.5476283", "0.54457474", "0.5373202", "0.53038436", "0.5298539", "0.52976507", "0.52976507", "0.52566886", "0.5256446", "0.5254769", "0.52320963", "0.522...
0.49726027
52
Allows the environment to be used in a withstatement context.
def __exit__(self, unused_exception_type, unused_exc_value, unused_traceback): self.close()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def visit_with(self: Parser, node: doc.With) -> None:\n with contextlib.ExitStack() as stack:\n stack.enter_context(self.var_table.with_frame())\n for item in node.items:\n frame = self.eval_expr(item.context_expr)\n if not isinstance(frame, Frame):\n self.repo...
[ "0.6312834", "0.61630744", "0.6097616", "0.60587364", "0.58667314", "0.58018786", "0.5615465", "0.5607413", "0.5506247", "0.5476283", "0.54457474", "0.5373202", "0.53038436", "0.5298539", "0.52976507", "0.52976507", "0.52566886", "0.5256446", "0.5254769", "0.52320963", "0.522...
0.0
-1
Returns the environment info returned on the last step.
def get_info(self) -> types.NestedArray: raise NotImplementedError('No support of get_info for this environment.')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def environment(self) -> dict:\n return self._environment_info", "def get_environment(self):\r\n return self.mcas[0].get_environment()", "def environment(self) -> pulumi.Output['outputs.EnvironmentResponse']:\n return pulumi.get(self, \"environment\")", "def getEnvironment(self):\n ...
[ "0.72315943", "0.7073374", "0.6950367", "0.68680507", "0.67611235", "0.6643641", "0.66212034", "0.65775234", "0.65664876", "0.65062505", "0.64626306", "0.6376371", "0.6369222", "0.6312315", "0.6308322", "0.6302491", "0.62798655", "0.6228703", "0.62262505", "0.62212014", "0.61...
0.5656913
80
Returns the `state` of the environment. The `state` contains everything required to restore the environment to the current configuration. This can contain e.g. The current time_step. The number of steps taken in the environment (for finite horizon MDPs). Hidden state (for POMDPs). Callers should not assume anything about the contents or format of the returned `state`. It should be treated as a token that can be passed back to `set_state()` later. Note that the returned `state` handle should not be modified by the environment later on, and ensuring this (e.g. using copy.deepcopy) is the responsibility of the environment.
def get_state(self) -> Any: raise NotImplementedError( 'This environment has not implemented `get_state()`.' )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_state(self, state):\n return state", "def get_state(self):\n return self._env.get_state()", "def get_state(self) -> FrameState:\n assert self.__state is not None\n return self.__state", "def get_state(self):\n return self.env.sim.get_state()", "def get_state(self)...
[ "0.6438888", "0.63937896", "0.63231564", "0.6284206", "0.6247405", "0.6247405", "0.6247405", "0.6247405", "0.6247405", "0.61721283", "0.61721283", "0.61721283", "0.61721283", "0.61721283", "0.61721283", "0.61721283", "0.61721283", "0.61721283", "0.61721283", "0.61721283", "0....
0.60081613
85
Restores the environment to a given `state`. See definition of `state` in the documentation for get_state().
def set_state(self, state: Any) -> None: raise NotImplementedError( 'This environment has not implemented `set_state()`.' )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def restore_state(self, state: ale_py.ALEState):\n self.ale.restoreState(state)", "def restore_state(self, state):\n state_ref = self.ale.decodeState(state)\n self.ale.restoreState(state_ref)\n self.ale.deleteState(state_ref)", "def restore_full_state(self, state):\n state_re...
[ "0.77453834", "0.7433805", "0.7283735", "0.72225296", "0.68740267", "0.68357325", "0.6653472", "0.6638538", "0.6638538", "0.65565914", "0.6460083", "0.6392536", "0.62604415", "0.6251954", "0.62414163", "0.6240911", "0.62085205", "0.6192636", "0.6182717", "0.6182548", "0.61741...
0.6518785
10
Updates the environment according to action and returns a `TimeStep`. See `step(self, action)` docstring for more details.
def _step(self, action: types.NestedArray) -> ts.TimeStep:
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def step(self, action: types.NestedArray) -> ts.TimeStep:\n if self._current_time_step is None or self.should_reset(\n self._current_time_step\n ):\n return self.reset()\n\n self._current_time_step = self._step(action)\n return self._current_time_step", "def step(\n self,\n action: ...
[ "0.78822833", "0.7701546", "0.74653417", "0.735205", "0.73112786", "0.7296633", "0.72683334", "0.72642654", "0.72118324", "0.71811354", "0.7162281", "0.71551687", "0.71234053", "0.7118485", "0.70683175", "0.6997027", "0.6996414", "0.699088", "0.6952764", "0.6945095", "0.69395...
0.70150334
15
Starts a new sequence, returns the first `TimeStep` of this sequence. See `reset(self)` docstring for more details
def _reset(self) -> ts.TimeStep:
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reset(self) -> ts.TimeStep:\n self._current_time_step = self._reset()\n return self._current_time_step", "def start(self):\n\t\tself._start = time.clock()\n\t\tif self._initial is None:\n\t\t\tself._initial = self._start\n\t\treturn self", "def get_first_step(self):\n return self.get_step_by_i...
[ "0.6659816", "0.64686", "0.61162376", "0.6072993", "0.60596496", "0.58604884", "0.56744826", "0.5647901", "0.5628664", "0.5623361", "0.56153256", "0.55805075", "0.5575825", "0.5544547", "0.54956526", "0.5452446", "0.5385656", "0.53372717", "0.53334737", "0.5326875", "0.532644...
0.5530532
14
This endpoint is used by the CLI to determines if the API is available or not.
def root(): return {}, 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def apicheck():\n\n async def predicate(ctx: commands.Context):\n travitia_keys = await ctx.bot.get_shared_api_tokens(\"travitia\")\n key = travitia_keys.get(\"api_key\") is None\n if ctx.invoked_with == \"help\" and key:\n return False\n if key:\n await ctx.sen...
[ "0.7066539", "0.6811271", "0.6799337", "0.6644127", "0.65873927", "0.6556331", "0.6556331", "0.6400296", "0.6316991", "0.62912995", "0.6280422", "0.6267303", "0.62298983", "0.6227067", "0.61904055", "0.61815757", "0.6173037", "0.6171916", "0.61550844", "0.6096815", "0.6094444...
0.0
-1
Check whether an access token is blacklisted or not.
def check_token_in_blacklist(decrypted_token): from .models import BlacklistToken jti = decrypted_token['jti'] if BlacklistToken.check_blacklist(jti): raise InvalidToken("Token is blacklisted. Please log in again.") return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_blacklisted(token):\n if Revoked.query.filter_by(token=token).first():\n return True\n return False", "def check_if_token_in_blacklist(decrypted_token):\n return (\n decrypted_token[\"jti\"] in BLACKLIST\n ) # if True, go to revoked_token_callback", "de...
[ "0.7900954", "0.74636424", "0.74054307", "0.73200846", "0.7160083", "0.71336967", "0.6999018", "0.697642", "0.67577994", "0.6653733", "0.6649965", "0.66363907", "0.66145855", "0.64979804", "0.6386341", "0.636993", "0.63569194", "0.6339204", "0.63004833", "0.62949896", "0.6245...
0.69158584
8
{get} / Easy check Easy for health check. Healthy 1.0.0
def index(): logging.debug('Healthy check.') pass # healthy check
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def health_check():\n app.logger.info(\"Health Check!\")\n return Response(\"All Good!\", status=200)", "def health_check(request):\n return Response(\"OK\",\n status=status.HTTP_200_OK)", "def health_check():\n # TODO: implement any other checking logic.\n return '', 200", ...
[ "0.80769634", "0.78025705", "0.7788893", "0.7768372", "0.7759578", "0.769682", "0.76605624", "0.75939524", "0.75455374", "0.7503516", "0.7423095", "0.7414183", "0.7406174", "0.73959845", "0.7341213", "0.73356587", "0.73113114", "0.72746754", "0.72720677", "0.7227638", "0.7220...
0.76294434
7
{get} /healthy Another easy check A path for another health check. Healthy 1.0.0
def index(): logging.debug('Healthy check.') pass # healthy check
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def healthcheck():\n return make_response(jsonify(status=200, message='Healthy'), status.HTTP_200_OK)", "def health_check():\n app.logger.info(\"Health Check!\")\n return Response(\"All Good!\", status=200)", "def health():\n global _is_healthy\n template = render_template('health.html', healthy...
[ "0.7397413", "0.73224634", "0.73210156", "0.7185449", "0.70526415", "0.70526195", "0.70261866", "0.68668026", "0.67948645", "0.6786084", "0.6785235", "0.6748028", "0.667565", "0.6667806", "0.6618256", "0.6600627", "0.658808", "0.65732", "0.6528593", "0.652601", "0.65006965", ...
0.672783
13
it makes the flow of a given input through the network, all data are stored in the layers "y" and "v"
def flow(input_): global number_of_neurons_by_layer if len(input_) != number_of_neurons_by_layer[0]: raise IndexError( f"\033[91mInput length is incorrect. It must be {number_of_neurons_by_layer[0]}.\033[m") layers[0]["y"][1:] = np.array(input_).flatten().reshape(len(input_), 1) for i_lay in range(1, len(layers)): layers[i_lay]["v"][:] = logistic( layers[i_lay]["weigths"] @ layers[i_lay-1]["y"] )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def trainNet():", "def forward(self, x):\n # sources保存特征图,loc与conf保存所有PriorBox的位置与类别预测特征\n sources = list()\n loc = list()\n conf = list()\n\n # 对输入图像卷积到conv4_3,将特征添加到sources中\n for k in range(23):\n x = self.vgg[k](x)\n\n s = self.L2Norm(x)\n so...
[ "0.6696856", "0.6663844", "0.6572504", "0.6489043", "0.6439873", "0.6390039", "0.63803667", "0.630682", "0.63015246", "0.6280952", "0.6269826", "0.62485385", "0.62225485", "0.62188786", "0.6218298", "0.6182807", "0.61798155", "0.61786693", "0.6172364", "0.61677724", "0.616357...
0.76247156
0
it computes the error vector between desired and obtained output, stored at the last layer
def error(input_, output): global number_of_neurons_by_layer if len(output) != number_of_neurons_by_layer[-1]: raise IndexError( f"\033[91mDesired output length is incorrect. It must be {number_of_neurons_by_layer[-1]}.\033[m") output = np.array(output).reshape(len(output), 1) flow(input_) layers[-1]["error"] = output - layers[-1]["v"]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def error2(input_, output):\n error(input_, output)\n layers[-1][\"error2\"] = layers[-1][\"error\"].T @ layers[-1][\"error\"]", "def getError(outputVector, targetVector):\r\n return np.sum((outputVector-targetVector)**2)", "def get_error(self, params):\n return self.endog - self.predict(pa...
[ "0.73773164", "0.7060913", "0.69024837", "0.68995315", "0.6740334", "0.66105175", "0.6591121", "0.6574008", "0.65447205", "0.653256", "0.65291804", "0.64597213", "0.6338442", "0.6333448", "0.6331635", "0.6329993", "0.6325577", "0.63097376", "0.63089925", "0.62953514", "0.6232...
0.73490536
1
it computes the sum of quadratic error of a given input, stored at the last layer
def error2(input_, output): error(input_, output) layers[-1]["error2"] = layers[-1]["error"].T @ layers[-1]["error"]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __error(self, R, P, Q, K, beta):\n e = 0\n for i in xrange(len(R)):\n for j in xrange(len(R[i])):\n if R[i][j] > 0:\n\n # loss function error sum( (y-y_hat)^2 )\n e = e + pow(R[i][j]-numpy.dot(P[i,:],Q[:,j]), 2)\n\n ...
[ "0.6533413", "0.62851167", "0.61784077", "0.6051464", "0.6023184", "0.5898153", "0.5836756", "0.58310413", "0.58302575", "0.5818229", "0.57950467", "0.5793478", "0.5786754", "0.57682145", "0.57595587", "0.5749468", "0.5714533", "0.57093656", "0.5703172", "0.5694905", "0.56835...
0.5932937
5
it computes "delta" and "Delta_w"
def backpropagate(eta, momentum): for i_lay in range(len(layers)-1, 0, -1): lay = layers[i_lay] if i_lay == len(layers)-1: lay["delta"] = lay["error"] * dlogistic(lay["v"]) else: lay["delta"] = (layers[i_lay+1]["weigths"][:, 1:].T @ layers[i_lay+1] ["delta"]) * dlogistic(lay["v"]) lay["Delta_w"] = eta * lay["delta"] @ layers[i_lay - 1]["y"].T +\ momentum * lay["Delta_w"]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getDelta(self,u,w,v=None):\r\n if v==None :\r\n return self._deltaDot[u,w]\r\n elif self._sigma[u,v]==0 or self._sigma[u,w]==0 or self._sigma[w,v]==0:\r\n return 0.0\r\n elif (self._d[u,v]==self._d[u,w]+self._d[w,v]):\r\n return 1.0 * self._sigma[u,w]*self....
[ "0.7014136", "0.67604864", "0.67501944", "0.6740648", "0.6670336", "0.65870315", "0.6544045", "0.6426848", "0.6362435", "0.6287381", "0.62232274", "0.6205806", "0.6161899", "0.6146763", "0.6138199", "0.6127331", "0.6109769", "0.60939455", "0.60689414", "0.6068591", "0.6068253...
0.0
-1
once you have "Delta_w", it makes $ w < w + Delta_w
def updateweigths(): for i_lay in range(1, len(layers)): layers[i_lay]["weigths"] += layers[i_lay]["Delta_w"]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def optimise(w, w_delta):\n return w.assign(w - w_delta)", "def update_before(self, x: int, y: float, w: float) -> None:\n old_value = 0\n if x < len(self.x):\n old_value = self.y[x]\n self.update(x + 1, y, w)\n while len(self.x) < x:\n self.x.append(len(self....
[ "0.665439", "0.6648047", "0.5889416", "0.5889381", "0.588397", "0.5883008", "0.57802653", "0.5774275", "0.57511485", "0.57244766", "0.5699618", "0.56226575", "0.5588297", "0.5588012", "0.55859", "0.5545932", "0.55233616", "0.5523173", "0.55197215", "0.5486571", "0.5483729", ...
0.52951324
36
it gets the list of weigths
def getweigths(): ls = [] for i_lay in range(1, len(layers)): ls.append(layers[i_lay]["weigths"]) return ls
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def weights(self) -> List[float]:", "def get_weights(self):", "def show_rel_wt(list_obj):\r\n total = sum_list(list_obj)\r\n wt_list = []\r\n \r\n for num in list_obj:\r\n weight = int((num / total) * 100)\r\n wt_list.append(f\"{weight}%\")\r\n \r\n return wt_list", "def get_w...
[ "0.6625915", "0.62661403", "0.6248362", "0.6228295", "0.6228295", "0.6228295", "0.6188413", "0.61738515", "0.6153207", "0.6063258", "0.5998551", "0.5988195", "0.59823006", "0.59580696", "0.59580696", "0.59547997", "0.59507", "0.5947943", "0.5947943", "0.5919658", "0.58991927"...
0.78056127
0
it gets the list of "Delta_w"
def get_Delta_weigths(): ls = [] for i_lay in range(1, len(layers)): ls.append(layers[i_lay]["Delta_w"]) return ls
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getdelta(self):\n\t\tmyhmag.initializehelmholtz()\n\t\tabar = 13.714285714285715\n\t\tzbar = abar/2.0\n\t\tself.data[\"delta\"] = np.zeros(len(self.data[\"rho\"]))\n\t\tfor i in range(len(self.data[\"rho\"])):\n\t\t\tadgradred,hydrograd,my_nu,my_alpha,self.data[\"delta\"][i],my_gamma1,my_cp,my_cph,my_c_s,failt...
[ "0.6032407", "0.59571075", "0.57902676", "0.5779536", "0.5758633", "0.57385635", "0.56800616", "0.5667207", "0.5647853", "0.56092405", "0.5599284", "0.5591161", "0.55750877", "0.557497", "0.557401", "0.5565431", "0.55429536", "0.552723", "0.54701483", "0.54661304", "0.5458280...
0.79851145
0
it performs the cyclic mode of training
def train_cyclic(inputs, outputs, eta=0.55, maxit=1000, momentum=0.1, plot=False): global ERROR ERROR.clear() min_error = 100 ins_outs = list(zip(inputs, outputs)) counter = 0 while counter <= maxit: counter += 1 shuffle(ins_outs) for pair in ins_outs: i, o = pair error2(i, o) ERROR.append(layers[-1]["error2"].item()) try: if ERROR[-1] < min_error: min_error = ERROR[-1] optimal_w = getweigths() min_error_counter = counter print( f"Minimum error found = {min_error}, at counter = {min_error_counter}", end="\r") except: pass backpropagate(eta, momentum) updateweigths() setweigths(optimal_w) print(f"\vMinimum error reached at the {min_error_counter}st cycle") if plot: plt.plot(np.arange(len(ERROR)), ERROR, "b*-") plt.xlabel("Number of cycles") plt.ylabel("Sum of quadratic errors") plt.title("CYCLIC MODE\nERROR vs CYCLES") plt.grid() plt.show()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def train():\n pass", "def train(self):\n pass", "def train(self):\n pass", "def train(self):\n pass", "def train(self):\n pass", "def train(self):\n pass", "def train(self)->None:", "def train(self, training_steps=10):", "def train(self):\n return",...
[ "0.700375", "0.6836105", "0.6836105", "0.6836105", "0.6836105", "0.6836105", "0.6526964", "0.64998597", "0.6478274", "0.6437638", "0.64010245", "0.6325889", "0.6291698", "0.6285171", "0.6252986", "0.62359476", "0.6207061", "0.61956275", "0.6193689", "0.61917514", "0.61817634"...
0.0
-1
it performs the batch mode of training
def train_batch(inputs, outputs, eta=0.55, maxit=1000, momentum=0.1, plot=False): global ERROR ERROR.clear() min_error = 100 ins_outs = list(zip(inputs, outputs)) counter = 0 while counter <= maxit: counter += 1 shuffle(ins_outs) Dws = [] errors = [] for pair in ins_outs: i, o = pair error2(i, o) errors.append(layers[-1]["error2"].item()) ws = getweigths() backpropagate(eta, momentum) Dws.append(get_Delta_weigths()) setweigths(ws) ERROR.append(sum(errors)) try: if ERROR[-1] < min_error: min_error = ERROR[-1] optimal_w = getweigths() min_error_counter = counter print( f"Minimum error found = {min_error}, at counter = {min_error_counter}", end="\r") except: pass Delta_w = [] for ws in range(len(Dws[0])): Delta_w.append( sum( [Dws[pattern][ws] for pattern in range(len(ins_outs))] ) ) set_Delta_weigths(Delta_w) updateweigths() setweigths(optimal_w) print(f"\vMinimum error reached at the {min_error_counter}st cycle") if plot: plt.plot(np.arange(len(ERROR)), ERROR, "b*-") plt.xlabel("Number of cycles") plt.ylabel("Sum of quadratic errors") plt.title("BATCH MODE:\nERROR vs CYCLES") plt.grid() plt.show()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def train(self, batch):\n pass", "def train(self, num_batches: int):", "def train_test_model_batch():\n train=learning.Train_kmer_clf()\n train.run()", "def train(self, batch_training=False):\n raise NotImplementedError", "def train_next_batch(self, batch_size=None):", "def epoch(self...
[ "0.846066", "0.8322821", "0.77974415", "0.7654369", "0.76157534", "0.7293694", "0.7097855", "0.7097493", "0.7072412", "0.7032535", "0.70106894", "0.70053935", "0.6989753", "0.69883996", "0.69795567", "0.69641083", "0.69641083", "0.69641083", "0.69641083", "0.69641083", "0.696...
0.0
-1
Sets/clears a software breakpoint address > the address of the software breakpoint instruction > the instruction to be programmed (either the software breakpoint opcode or the original instruction the software breakopint was replacing). flags > One or more of the SWBPFlags listed below returns the original/old opcode at address
def set_sw_bp(address, instruction, flags): log.info("Debug:: set/remove bp at address 0x%0x, instructions 0x%0x, flags = 0x%0x" % ( address, instruction, flags)) # Accept addressing both from FLASH_START and from 0x0 addr = address & (FLASH_START-1) single_page_access = False buffer_size = PAGE_SIZE * 16 # Canopus: single page read-modify-write is possible within the first 16kb of flash. # SAMRH71: single page read-modify-write is possible in whole flash. if addr < 16384 or "RH71" in device: buffer_size = PAGE_SIZE single_page_access = True buffer_mask = long(buffer_size-1) data_buffer = bytearray(buffer_size) # Get the start address to the flash page(es) we need to erase start_addr = addr & ~(buffer_mask) absolute_start_addr = address & ~(buffer_mask) # Get BP address within the buffer bp_addr = addr & buffer_mask prog_read("pgm", absolute_start_addr, buffer_size, data_buffer) org_inst = 0 n = 0 # Replace instruction in data_buffer while(n < 2): org_inst += data_buffer[bp_addr+n] << (n*8) data_buffer[bp_addr+n] = ((instruction >> (n*8)) & 0xff) n = n+1 if single_page_access: if "RH71" in device: # Remove flash offset, if any, and mask away page internal address bits. # FARG bitfield in EFC_FCR page_number = addr & 0x3fff00 # SAMRH71 has page_size 256 # Erase and write page (two separate commands on SAMRH71) dev.Write32(efc_fcr, efc_cmd_ep | page_number) waitForFlashReady() dev.Write(start_addr, data_buffer, 0, PAGE_SIZE) dev.Write32(efc_fcr, efc_cmd_wp | page_number) waitForFlashReady() else: dev.Write(start_addr, data_buffer, 0, PAGE_SIZE) # Remove flash offset, if any, and mask away page internal address bits. # Then shift right once to position page_number in the FARG bitfield in EFC_FCR page_number = (addr & 0x3ffe00)/2 # Canopus has page_size 512 # Erase and write page (one single command on Canopus) dev.Write32(efc_fcr, efc_cmd_ewp | page_number) waitForFlashReady() else: # Erase 16 pages (16pages == buffer_size). The "0x200" sets the number of pages to erase. dev.Write32(efc_fcr, efc_cmd_epa | (start_addr >> 1) | 0x200) waitForFlashReady() prog_write("Pgm", absolute_start_addr, buffer_size, data_buffer) return org_inst
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_breakpoint(self, addr: int) -> Optional[Breakpoint]:\n if not self.enabled:\n self.enable()\n\n if not self.can_support_address(addr):\n LOG.error('Breakpoint out of range 0x%X', addr)\n return None\n\n if self.available_breakpoints == 0:\n L...
[ "0.5881102", "0.5785291", "0.5218082", "0.5208455", "0.5201885", "0.51153344", "0.5073578", "0.50049704", "0.49999252", "0.4934474", "0.48571247", "0.4838905", "0.48162797", "0.47711107", "0.47597492", "0.4733747", "0.46354747", "0.46129856", "0.46129563", "0.46084633", "0.45...
0.65059483
0
Wrapper around the Node structure of tree for inserting, querying
def __init__(self, theta, k, num_buckets, fp_size, bucket_size, max_iter): self.root: Optional[Node] = None self.theta: float = theta self.k: int = k self.num_buckets = num_buckets self.fp_size = fp_size self.bucket_size = bucket_size self.max_iter = max_iter self.aggregate_size = self.get_insternal_size()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def binary_tree():\n\n class Node(object):\n def __init__(self, data):\n self.left = None\n self.right = None\n self.data = data\n\n # Create a root\n root = Node(data=1)\n root.left = Node(data=2)\n root.right = Node(data=3)\n root.left.left = Node(data=4)...
[ "0.71559274", "0.6676784", "0.6605543", "0.6523582", "0.650738", "0.6498414", "0.64883316", "0.648779", "0.64652824", "0.6465034", "0.6465034", "0.64216363", "0.64207244", "0.6376458", "0.6372433", "0.63648695", "0.6343655", "0.63167363", "0.6310797", "0.6310797", "0.63025945...
0.0
-1
Creates a new node from this read and adds it into the tree
def insert(self, dataset: List[Read]) -> bool: node_to_insert = Node(self.k, self.num_buckets, self.fp_size, self.bucket_size, self.max_iter) node_to_insert.populate_dataset_info(dataset) self.aggregate_size += node_to_insert.get_size() if self.root is None: self.root = node_to_insert return True parent = None current = self.root while current: if current.num_children() == 0: """ current is a leaf representing a dataset, so create a new parent that contains node_to_insert and current as children """ new_parent = Node(self.k, self.num_buckets, self.fp_size, self.bucket_size, self.max_iter) self.aggregate_size += new_parent.get_size() new_parent.parent = parent # Kmers from existing and new leaf new_parent.filter = deepcopy(current.filter) new_parent.insert_kmers_from_dataset(dataset) # Set appropriate parent/child pointers current.parent = new_parent node_to_insert.parent = new_parent new_parent.children.append(current) new_parent.children.append(node_to_insert) # Special case where root is a leaf if parent is None: # current is root -> new_parent is now root self.root = new_parent return True # Set new_parent as child of old parent idx = parent.children.index(current) parent.children[idx] = new_parent return True elif current.num_children() == 1: # insert kmers current.insert_kmers_from_dataset(dataset) # we found an empty slot to insert into current.children.append(node_to_insert) return True elif current.num_children() == 2: # insert kmers current.insert_kmers_from_dataset(dataset) # select "best" child score_0 = current.children[0].score(dataset) score_1 = current.children[1].score(dataset) best_child = 0 if score_0 < score_1 else 1 # recur parent = current current = current.children[best_child] raise Exception("Did not insert successfully!")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_node(self, node):", "def add_node(self, name):\n for node in self.get_children():\n if node.read_name() == name:\n break\n else:\n root = self.get_sobj()\n sobj = self._bld.NewObject(root)\n node = self.__class__(self._std, self._bl...
[ "0.69235027", "0.68231606", "0.6610997", "0.66002446", "0.6579251", "0.6456872", "0.6415634", "0.6410044", "0.63237524", "0.63021314", "0.62788886", "0.6263254", "0.6262798", "0.62442696", "0.6242336", "0.62385815", "0.62191033", "0.6210085", "0.62071997", "0.61693835", "0.61...
0.0
-1
Perform a DFS of the tree and collects reads that pass similarity test.
def query(self, query: str) -> List[str]: nodes_to_explore: Deque[Node] = deque() nodes_to_explore.append(self.root) out: List[str] = [] while nodes_to_explore: current = nodes_to_explore.popleft() total_kmers_found = 0 total_kmers = 0 for kmer in kmers_in_string(query, self.k): if current.filter.contains(kmer): total_kmers_found += 1 total_kmers += 1 if total_kmers_found >= self.theta * total_kmers: for child in current.children: nodes_to_explore.append(child) if current.num_children() == 0: out.append(current.dataset_id) return out
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_scan_recursive(self):\n self.run_scan(self.tempdir, self.root_fcount + self.nest_fcount + 1)", "def _analyze(self, node, visited = set([])):\n for ch in self._get_children(node):\n if ch not in visited:\n visited.add(ch)\n # In this first sweep I wa...
[ "0.5999934", "0.54931384", "0.5351289", "0.5340773", "0.5317956", "0.5283371", "0.5265533", "0.52155614", "0.51734805", "0.51664525", "0.5139534", "0.5117433", "0.50639457", "0.50228846", "0.5016315", "0.50101423", "0.50033206", "0.49670547", "0.49583322", "0.49560136", "0.49...
0.0
-1
A wrapper for backward comptibility with other data structure implementations
def contains(self, query): return self.query(query)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self):\n dict.__init__(self)\n self.datatype = None", "def _get_to_actual_data(raw):\n raise NotImplemented", "def __array__(self):\n return dict2rec(self)", "def __getitem__(self):\n pass", "def makeFMData(from_dict, locked=False):\n\n class FMData(object...
[ "0.5697675", "0.54845077", "0.5461735", "0.54345965", "0.5431139", "0.54021233", "0.5388815", "0.5388815", "0.53862596", "0.53862596", "0.5333713", "0.5331575", "0.53289396", "0.5323391", "0.5321574", "0.53174174", "0.5310203", "0.53006154", "0.52964413", "0.5296287", "0.5275...
0.0
-1
Returns the total number of bytes occupied by the filter object
def get_insternal_size(self): return ( sys.getsizeof(self.theta) + sys.getsizeof(self.num_buckets) + sys.getsizeof(self.k) + sys.getsizeof(self.fp_size) + sys.getsizeof(self.max_iter) + sys.getsizeof(self.bucket_size) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __len__(self):\n return sum(f.count for f in self.filters)", "def container_size(self):\n import cPickle\n import sys\n t = cPickle.dumps(self.filter_bitarray)\n return sys.getsizeof(t)", "def capacity(self):\n return sum(f.capacity for f in self.filters)", "def ...
[ "0.78136486", "0.73444617", "0.73030394", "0.7215649", "0.6866789", "0.6819548", "0.67372525", "0.6728737", "0.67182195", "0.66980165", "0.6695262", "0.6689676", "0.6685823", "0.6674033", "0.6666413", "0.665279", "0.6636472", "0.6635328", "0.6619961", "0.6601492", "0.6596799"...
0.0
-1
Represents a single node of Cuckoo Tree.
def __init__(self, k, num_buckets, fp_size, bucket_size, max_iter): self.children: List[Node] = [] self.parent: Optional[Node] = None self.filter = CuckooFilterBit(num_buckets, fp_size, bucket_size, max_iter) self.dataset_id: Optional[str] = None self.k = k
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def tree(self) -> Node:\n return Node(self.to_string())", "def node(self):\n return Node(self)", "def __repr__(self):\n return 'TreeNode({0})'.format(self.data)", "def __repr__(self):\n return 'Node({!r})'.format(self.data)", "def node(self):\n return self._node", "def ...
[ "0.7352759", "0.7162796", "0.698228", "0.6661938", "0.65606713", "0.65606713", "0.65407693", "0.65407693", "0.64917505", "0.6490089", "0.6484594", "0.64840937", "0.64418876", "0.6436587", "0.6350738", "0.6347878", "0.6335503", "0.6335503", "0.63311744", "0.6306258", "0.629181...
0.0
-1
"Hamming distance" score where lower is better
def score(self, dataset: List[Read]) -> int: kmers_in_common = 0 for read in dataset: for kmer in read.kmers(self.k): if self.filter.contains(kmer): kmers_in_common += 1 return self.filter.num_items_in_filter - kmers_in_common
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hamming_dist(s1, s2):\n\n if s1 is None or s2 is None:\n return np.NaN\n if pd.isnull(s1) or pd.isnull(s2):\n return np.NaN\n\n # Create the similarity measure object\n measure = sm.HammingDistance()\n\n s1 = gh.convert_to_str_unicode(s1)\n s2 = gh.convert_to_str_unicode(s2)\n\n...
[ "0.7202649", "0.71405065", "0.7085281", "0.7053139", "0.6996998", "0.6948701", "0.69338876", "0.69194096", "0.6818797", "0.6787892", "0.6770964", "0.6747005", "0.6712575", "0.6708986", "0.6693751", "0.6689196", "0.6669471", "0.665242", "0.66389847", "0.6632668", "0.6616107", ...
0.0
-1
Returns the total number of bytes occupied by the filter object
def get_size(self): return ( sys.getsizeof(self.children) + sys.getsizeof(self.parent) + sys.getsizeof(self.dataset_id) + sys.getsizeof(self.k) + self.filter.get_size() )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __len__(self):\n return sum(f.count for f in self.filters)", "def container_size(self):\n import cPickle\n import sys\n t = cPickle.dumps(self.filter_bitarray)\n return sys.getsizeof(t)", "def capacity(self):\n return sum(f.capacity for f in self.filters)", "def ...
[ "0.78136486", "0.73444617", "0.73030394", "0.6866789", "0.6819548", "0.67372525", "0.6728737", "0.67182195", "0.66980165", "0.6695262", "0.6689676", "0.6685823", "0.6674033", "0.6666413", "0.665279", "0.6636472", "0.6635328", "0.6619961", "0.6601492", "0.6596799", "0.6581194"...
0.7215649
3
Change the position of the turtle.
def setposition(self, x, y, bearing=None): self.posX = x self.posY = y # self check of position inside canvas if self.posX < self._min_x: self.posX = self._min_x if self.posY < self._min_y: self.posY = self._max_y if self.posX > self._max_x: self.posX = self._max_x if self.posY > self._max_y: self.posY = self._max_y if bearing is None: self._add_point() elif isinstance(bearing, int): self.setbearing(bearing) else: raise ValueError("Bearing must be an integer")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def repositionTurtle(t, x, y):\n t.up()\n t.goto(x, y)\n t.down()", "def goto(x, y):\n turtleTmp.setposition(x, y)", "def setTurtle(t):\r\n t.pu()\r\n t.goto(initialCoordinates())", "def move_turtle(self):\n self.forward(self.move_speed)", "def set_position(self, x, y):\n se...
[ "0.7971479", "0.7698009", "0.73912793", "0.71698403", "0.70017356", "0.69530845", "0.6844458", "0.6767161", "0.6726359", "0.6678807", "0.651918", "0.651298", "0.64901376", "0.64820397", "0.6479972", "0.6466013", "0.6383914", "0.63821214", "0.6373982", "0.63284934", "0.6324661...
0.0
-1
Change the bearing (angle) of the turtle.
def setbearing(self, bearing): diff = self.bearing - bearing self.b_change = diff self.bearing = bearing self._add_point() self.b_change = 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bearing(self, value: int):\n self._bearing = value", "def set_angle(self, ang):\n if ang < 0:\n ang = 0\n elif ang > 180:\n ang = 180\n dutyCycle = 5 + (ang*5/180)\n self.servoPort.ChangeDutyCycle(dutyCycle)", "def setAngle(self,angle = 2.5):\n ...
[ "0.6930348", "0.6852178", "0.6837454", "0.67657924", "0.6641104", "0.66320354", "0.66257477", "0.6583451", "0.65234107", "0.64924616", "0.64834297", "0.64331305", "0.63996845", "0.6354722", "0.6261629", "0.6239155", "0.62279516", "0.62081057", "0.62069297", "0.61839217", "0.6...
0.6944382
0
Draw a circle, or part of a circle. From its current position, the turtle will draw a series of short lines, turning slightly between each. If radius is positive, it will turn to its left; a negative radius will make it turn to its right.
def circle(self, radius, extent=360): temp = self.bearing self.b_change = 0; tempSpeed = self.speedVar self.speedVar = 1 for i in range(0, (extent//2)): n = math.fabs(math.radians(self.b_change) * radius) if(radius >= 0): self.forward(n) self.left(2) else: self.forward(n) self.right(2) if(radius >= 0): self.bearing = (temp + extent) else: self.bearing = (temp - extent) self.speedVar = tempSpeed
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw_circle(c):\n turtle.circle(c.radius)", "def draw_circle(c):\n turtle.circle(c.radius)", "def drawCircle(t, x, y, radius):\r\n t.up()\r\n t.goto(x + radius, y)\r\n t.setheading(90)\r\n t.down()\r\n for count in range(120):\r\n t.left(3)\r\n t.forward(2.0 * math.pi * r...
[ "0.77814776", "0.77814776", "0.77214444", "0.74263126", "0.74239993", "0.7414104", "0.7346996", "0.72157526", "0.69786805", "0.6978031", "0.69246596", "0.6809172", "0.6785606", "0.6784033", "0.6783516", "0.6775677", "0.67311746", "0.67182976", "0.669799", "0.6684738", "0.6618...
0.6391658
34
this method is called by an admin user to approve the lyrics of a song
def approve_lyrics(): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_lyrics_approved():", "def approve (self, response) :\n if 'event' in response and 'moderator' in response :\n eventId = response ['event']\n userId = response ['moderator']\n else :\n raise ModerationError (response)\n\n mod_status = 'OK'\n if 'status' in response :\n ...
[ "0.68488747", "0.56438977", "0.55508363", "0.55483466", "0.5504152", "0.5491618", "0.54619044", "0.54378915", "0.54029", "0.5392849", "0.53796095", "0.5319239", "0.52956706", "0.5290236", "0.5269116", "0.52477455", "0.5243103", "0.5234158", "0.52026176", "0.5178342", "0.51669...
0.8136522
0
This method is called to check if a song already has lyrics so as to avoid duplicity of lyrics
def song_has_lyrics(): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_existing_lyrics(self, song_id):\n\t\tlyrics = self.db.lyrics.find_one({'song_id': song_id})['lyrics']\n\t\treturn lyrics", "def add_lyrics(self):\n\n conn = self.conn\n conn.text_factory = str\n c = conn.cursor()\n\n c.execute(\"SELECT songs.id, artist, title, url FROM songs L...
[ "0.67191", "0.6461495", "0.64484215", "0.62452585", "0.6116291", "0.6073265", "0.603493", "0.60067284", "0.59900224", "0.59794277", "0.5927802", "0.5798049", "0.56955206", "0.56919104", "0.56533647", "0.5607341", "0.5586889", "0.55832136", "0.553297", "0.5515789", "0.54869497...
0.7237534
0
This is called to compare a lyrics note to the original to ensure they are not the same..if they are , such a lyrics note is rejected
def lyrics_note_is_same_as_original(): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _check_duplicate_notes(self, tokens, curr_note, step) -> bool:\n same_note_cnt = 0\n idx = step - 3\n while idx > 0:\n prev_note = self._get_num(self.tgt_dict.string(tokens[0, idx : idx + 1]))\n if prev_note != curr_note:\n break...
[ "0.6234901", "0.6063626", "0.6018202", "0.59463143", "0.57870716", "0.5715159", "0.57044494", "0.56992126", "0.5657637", "0.5643236", "0.5580238", "0.5577914", "0.5575537", "0.5558365", "0.5553078", "0.5549295", "0.5545337", "0.54852504", "0.54813206", "0.54713225", "0.546813...
0.82412505
0
Checks if the lyrics has been approved or not
def is_lyrics_approved():
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def approve_lyrics():\n pass", "def song_has_lyrics():\n pass", "def is_approved(self) -> bool:\n return self.state == Order.OrderState.APPROVED.choice_value", "def approve_tweet(worker_responses):\n approvals = [len(get_tweet_text(response)) > 0 for response in worker_responses]\n return ...
[ "0.7582891", "0.6132226", "0.5762543", "0.5721694", "0.56735605", "0.5672874", "0.5540766", "0.5498608", "0.54710966", "0.5407162", "0.5385652", "0.53676486", "0.53605825", "0.5347708", "0.53281146", "0.5326186", "0.53110224", "0.5303602", "0.5287871", "0.5286905", "0.5272448...
0.8734696
0
r"""Calculate the cold plasma dispersion surfaces according to equation 2.64 in Plasma Waves by Swanson (2nd ed.)
def disp_surf_calc(kc_x_max, kc_z_max, m_i, wp_e): # Make vectors of the wave numbers kc_z = np.linspace(1e-6, kc_z_max, 35) kc_x = np.linspace(1e-6, kc_x_max, 35) # Turn those vectors into matrices kc_x_mat, kc_z_mat = np.meshgrid(kc_x, kc_z) # Find some of the numbers that appear later in the calculations kc_ = np.sqrt(kc_x_mat ** 2 + kc_z_mat ** 2) # Absolute value of k theta_ = np.arctan2(kc_x_mat, kc_z_mat) # The angle between k and B wc_i = 1 / m_i # The ion gyro frequency wp_i = wp_e / np.sqrt(m_i) # The ion plasma frequency wp_ = np.sqrt(wp_e ** 2 + wp_i ** 2) # The total plasma frequency # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # For every k_perp and k_par, turn the dispersion relation into a # polynomial equation and solve it. # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # The polynomial coefficients are calculated pol_koeff_8 = -2 * kc_ ** 2 pol_koeff_8 -= (1 + wc_i ** 2 + 3 * wp_ ** 2) * np.ones(kc_.shape) pol_koeff_6 = (2 * kc_ ** 2 + wp_ ** 2) * (1 + wc_i ** 2 + 2 * wp_ ** 2) pol_koeff_6 += kc_ ** 4 + (wp_ ** 2 + wc_i) ** 2 pol_koeff_4 = -kc_ ** 4 * (1 + wc_i ** 2 + wp_ ** 2) pol_koeff_4 -= 2 * kc_ ** 2 * (wp_ ** 2 + wc_i) ** 2 pol_koeff_4 -= (kc_ * wp_) ** 2 * (1 + wc_i ** 2 - wc_i) * ( 1 + np.cos(theta_) ** 2) pol_koeff_4 -= wp_ ** 2 * (wp_ ** 2 + wc_i) ** 2 pol_koeff_2 = kc_ ** 4 * (wp_ ** 2 * (1 + wc_i ** 2 - wc_i) * np.cos( theta_) ** 2 + wc_i * (wp_ ** 2 + wc_i)) pol_koeff_2 += kc_ ** 2 * wp_ ** 2 * wc_i * (wp_ ** 2 + wc_i) * ( 1 + np.cos(theta_) ** 2) pol_koeff_0 = -kc_ ** 4 * wc_i ** 2 * wp_ ** 2 * np.cos(theta_) ** 2 w_final = np.zeros((10, len(kc_z), len(kc_x))) # For each k, solve the equation for k_z, k_x in itertools.product(range(len(kc_z)), range(len(kc_x))): disp_polynomial = [1, 0, pol_koeff_8[k_z, k_x], 0, pol_koeff_6[k_z, k_x], 0, pol_koeff_4[k_z, k_x], 0, pol_koeff_2[k_z, k_x], 0, pol_koeff_0[k_z, k_x]] # theoretically should be real (A. Tjulin) w_temp = np.real(np.roots(disp_polynomial)) # We need to sort the answers to get nice surfaces. w_final[:, k_z, k_x] = np.sort(w_temp) n2_ = kc_ ** 2 / w_final ** 2 v_ph_c = np.sqrt(1. / n2_) va_c = 1 / (wp_e * np.sqrt(m_i)) v_ph_va = v_ph_c / va_c diel_tensor = _calc_diel(kc_, w_final, theta_, wp_e, wp_i, wc_i) e_x, e_y, e_z, e_per, e_tot, e_pol = _calc_e(diel_tensor) e_par = (kc_x_mat * e_x + kc_z_mat * e_z) / kc_ b_x, b_y, b_z, b_par, b_per, b_pol, b_tot = _calc_b(kc_x_mat, kc_z_mat, w_final, e_x, e_y, e_z) dk_x, dk_z = [kc_x_mat[1], kc_z_mat[1]] dw_x, dw_z = [np.zeros(w_final.shape) for _ in range(2)] dw_x[:, :, 1:] = np.diff(w_final, axis=2) dw_z[:, 1:, :] = np.diff(w_final, axis=1) v_x, v_z = [dw_ / dk for dw_, dk in zip([dw_x, dw_z], [dk_x, dk_z])] s_par, s_tot = _calc_s(e_x, e_y, e_z, b_x, b_y, b_z) # Compute ion and electron velocities v_ex, v_ey, v_ez, v_ix, v_iy, v_iz = _calc_vei(m_i, wc_i, w_final, e_x, e_y, e_z) # Ratio of parallel and perpendicular to B speed vepar_perp = v_ez * np.conj(v_ez) vepar_perp /= (v_ex * np.conj(v_ex) + v_ey * np.conj(v_ey)) vipar_perp = v_iz * np.conj(v_iz) vipar_perp /= (v_ix * np.conj(v_ix) + v_iy * np.conj(v_iy)) # Total particle speeds v_e2 = v_ex * np.conj(v_ex) + v_ey * np.conj(v_ey) + v_ez * np.conj(v_ez) v_i2 = v_ix * np.conj(v_ix) + v_iy * np.conj(v_iy) + v_iz * np.conj(v_iz) # Ion and electron energies m_e = -1 en_e = 0.5 * m_e * v_e2 en_i = 0.5 * m_i * v_i2 # Ratio of particle and field energy densities ratio_part_field = _calc_part2fields(wp_e, en_e, en_i, e_tot, b_tot) # Continuity equation dn_e_n, dn_i_n, dne_dni = _calc_continuity(kc_x_mat, kc_z_mat, w_final, v_ex, v_ez, v_ix, v_iz) dn_e_n_db_b = dn_e_n / b_tot dn_i_n_db_b = dn_i_n / b_tot dn_e_n_dbpar_b = dn_e_n / b_par dn_i_n_dbpar_b = dn_i_n / b_par dn_e = dn_e_n * wp_e ** 2 k_dot_e = e_x * kc_x_mat + e_z * kc_z_mat k_dot_e = np.sqrt(k_dot_e * np.conj(k_dot_e)) # Build output dict extra_param = {"Degree of electromagnetism": np.log10(b_tot / e_tot), "Degree of longitudinality": np.abs(e_par) / e_tot, "Degree of parallelity E": e_z / e_tot, "Degree of parallelity B": np.sqrt( b_z * np.conj(b_z)) / b_tot, "Ellipticity E": e_pol, "Ellipticity B": b_pol, "E_part/E_field": np.log10(ratio_part_field), "v_g": np.sqrt(v_x ** 2 + v_z ** 2), "v_ph/v_a": np.log10(v_ph_va), "E_e/E_i": np.log10(en_e / en_i), "v_e/v_i": np.log10(np.sqrt(v_e2 / v_i2)), "v_epara/v_eperp": np.log10(vepar_perp), "v_ipara/v_iperp": np.log10(vipar_perp), "dn_e/dn_i": np.log10(dne_dni), "(dn_e/n)/ (dB/B)": np.log10(dn_e_n_db_b), "(dn_i/n)/(dB/B)": np.log10(dn_i_n_db_b), "(dn_i/n)/(dBpar/B)": np.log10(dn_i_n_dbpar_b), "(dn_e/n)/(dB/B)": np.log10(dn_e / k_dot_e), "(dn_e/n)/(dBpar /B)": np.log10(dn_e_n_dbpar_b), " Spar/Stot": s_par / s_tot} for k, v in zip(extra_param.keys(), extra_param.values()): extra_param[k] = np.transpose(np.real(v), [0, 2, 1]) kx_ = np.transpose(kc_x_mat) kz_ = np.transpose(kc_z_mat) wf_ = np.transpose(w_final, [0, 2, 1]) return kx_, kz_, wf_, extra_param
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compute_mixing_coefficients_surf(self):\n [Ly,N] = self.b.shape\n z_u_w = self.grid_dict['z_u_w']\n\n # SET UP NEW MIXING COEFFICIENT ARRAYS\n self.Kv_surf = np.zeros([Ly,N+1])\n self.Kt_surf = np.zeros([Ly,N+1])\n \n self.ghat = np.zeros([Ly,N+1])\n \n\n ...
[ "0.6823951", "0.68156433", "0.64645", "0.62532675", "0.5977594", "0.5888927", "0.5858084", "0.5850966", "0.5778458", "0.5767043", "0.5753279", "0.5737354", "0.5723255", "0.5714657", "0.57088953", "0.5705945", "0.56355387", "0.56164163", "0.561608", "0.56118447", "0.5599761", ...
0.7354267
0
Restaurant fixture for future tests
def restaurant_only(): work_time = { "Понедельник": "8:00-23:00", "Вторник": "8:00-23:00", "Среда": "8:00-23:00", "Четверг": "8:00-23:00", "Пятница": "8:00-23:00", "Суббота": "8:00-23:00", "Воскресенье": "Выходной", } restaurant = Restaurant("Снежинка", work_time, False) return restaurant
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setUp(self):\n valid_name = \"Tungalo\"\n valid_location = \"Rivne\"\n valid_status = 0\n valid_tables_count = 10\n valid_description = \"description\"\n\n self.restaurant = Restaurant()\n self.restaurant.name = valid_name\n self.restaurant.location = val...
[ "0.7482953", "0.6990066", "0.67108124", "0.66626084", "0.6656253", "0.6604087", "0.6598095", "0.64222395", "0.6398554", "0.6376477", "0.63677603", "0.6358689", "0.63136774", "0.6313415", "0.629336", "0.6270328", "0.62372196", "0.6197963", "0.61795425", "0.617826", "0.6164018"...
0.0
-1
Kitchen fixture for future tests
def kitchen_only(restaurant_only): kitchen = Kitchen(restaurant_only) return kitchen
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fixtures():", "def setUpFixture(self):\n pass", "def _fixture_setup(self):\n pass", "def fixture_example_data():\n import_example_data()", "def fixture_runner():\n return CliRunner()", "def setUp(self):\n self.setup_beets()", "def setUp(self):\n \n \n ...
[ "0.8008476", "0.7767613", "0.74596703", "0.74033487", "0.72472155", "0.6927232", "0.69233274", "0.69233274", "0.69233274", "0.69233274", "0.69233274", "0.69233274", "0.69233274", "0.69233274", "0.69233274", "0.68981403", "0.68428254", "0.67888683", "0.6758065", "0.67523795", ...
0.0
-1
Delivery fixture for future tests
def delivery_only(restaurant_only): delivery = Delivery(restaurant_only) return delivery
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_create_confirm_delivery_details(self):\n pass", "def fixtures():", "def setUp(self):\n self.client = APIClient()\n\n self.speciality = models.Speciality.objects.create(\n name='Speciality'\n )\n\n self.payload = {\n 'name': \"Knee Replacement\",...
[ "0.71651965", "0.68431896", "0.6447273", "0.6385813", "0.6308756", "0.6252802", "0.62355095", "0.62030375", "0.61696184", "0.6162334", "0.61370283", "0.6062642", "0.6026275", "0.60202163", "0.60082394", "0.6001781", "0.6001781", "0.6001781", "0.5990583", "0.59405696", "0.5933...
0.0
-1
Hall fixture for future tests
def hall_only(restaurant_only): hall = Hall(restaurant_only, max=50) return hall
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fixtures():", "def setUpFixture(self):\n pass", "def _fixture_setup(self):\n pass", "def fixture_example_data():\n import_example_data()", "def start_fixture(self):\n pass", "def tearDownFixture(self):\n pass", "def data_manager_fixture():\n\n class DataManager:\n ...
[ "0.8068017", "0.78472394", "0.7669949", "0.725615", "0.7145169", "0.70357", "0.70156425", "0.6989071", "0.678195", "0.67077214", "0.6646608", "0.6646608", "0.6646608", "0.6646608", "0.6646608", "0.6646608", "0.6646608", "0.6646608", "0.6646608", "0.6626644", "0.662342", "0....
0.0
-1
Simple restaurant instance creating test
def test_simple_restaurant(restaurant_only): assert restaurant_only
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setUp(self):\n valid_name = \"Tungalo\"\n valid_location = \"Rivne\"\n valid_status = 0\n valid_tables_count = 10\n valid_description = \"description\"\n\n self.restaurant = Restaurant()\n self.restaurant.name = valid_name\n self.restaurant.location = val...
[ "0.80320793", "0.7007531", "0.6976299", "0.69665337", "0.68096566", "0.67881805", "0.67846525", "0.67737657", "0.6733063", "0.6733063", "0.6733063", "0.67071205", "0.6684821", "0.6684315", "0.6668093", "0.6642441", "0.6629448", "0.6628427", "0.6583304", "0.6565177", "0.656517...
0.6356295
37
Simple restaurant instance creating test with correct Kitchen, Hall and Delivery
def test_full_restaurant(restaurant_full): assert restaurant_full
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setUp(self):\n valid_name = \"Tungalo\"\n valid_location = \"Rivne\"\n valid_status = 0\n valid_tables_count = 10\n valid_description = \"description\"\n\n self.restaurant = Restaurant()\n self.restaurant.name = valid_name\n self.restaurant.location = val...
[ "0.8021308", "0.7417864", "0.7310501", "0.7144182", "0.70938045", "0.6831627", "0.6762305", "0.67300665", "0.669786", "0.66929805", "0.6612574", "0.65892005", "0.65892005", "0.65892005", "0.65802336", "0.65279776", "0.6520043", "0.6517237", "0.648618", "0.6472757", "0.6470774...
0.6852933
5
Test Restaurant.__check_conditions decorator Test must be passed if functions with this decorator raised error cause of Hall, Delivery or Kitchen was not setted.
def test_open_no_setup(restaurant_only, hall_only, kitchen_only, delivery_only): # Here checks not all variants, cause restaurant_only is not isolated # object. They were previously check and working alongside # but affects result if together. # no setups with pytest.raises(CustomWarning): restaurant_only.open() assert restaurant_only.is_working is False, "You need to setup Kitchen, Delivery and Hall" # only kitchen with pytest.raises(CustomWarning): restaurant_only.set_kitchen(kitchen_only) restaurant_only.open() assert restaurant_only.is_working is False, "You need to setup Kitchen, Delivery and Hall" # only delivery and kitchen with pytest.raises(CustomWarning): restaurant_only.set_delivery(delivery_only) restaurant_only.set_kitchen(kitchen_only) restaurant_only.open() assert restaurant_only.is_working is False, "You need to setup Kitchen, Delivery and Hall"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_available(self):\n feature_guard = _make_requires(True, \"Error text\")\n results = []\n\n @feature_guard\n def inner():\n results.append(True)\n return True\n\n assert inner() is True\n assert [True] == results", "def test_simple_restauran...
[ "0.62134707", "0.5922159", "0.5859138", "0.57996106", "0.5779631", "0.57793766", "0.5778328", "0.57430685", "0.5736257", "0.5732914", "0.57291234", "0.5714089", "0.5701453", "0.5699153", "0.5698433", "0.5688214", "0.5600252", "0.55860335", "0.55774295", "0.5557103", "0.555048...
0.66464823
0
Test of cooking the same product twice. Test passed if second cooking of same product raise ValueError
def test_cook_twice(cook_not_busy, product_for_cook): cook_not_busy.cook_dish(product_for_cook) with pytest.raises(ValueError): cook_not_busy.cook_dish(product_for_cook)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def checker(self, product):\n for item in self.instock:\n if item == product:\n return True\n return False", "def test_buyTicket_AlreadySold():\n assert not testUser2.buyTicket(testTicket1)\n assert testTicket1 in testUser1.inventory\n assert testTicket1 not in te...
[ "0.6690166", "0.63332933", "0.62514263", "0.61649024", "0.6153124", "0.605767", "0.6029322", "0.60229874", "0.6018796", "0.6007936", "0.5988192", "0.5973974", "0.5963615", "0.5908742", "0.58811826", "0.58582234", "0.585461", "0.5827044", "0.5807381", "0.58039653", "0.579343",...
0.7491278
0
Test of cooking by busy cook Test passed if busy cook raise a CustomWarning
def test_busy_cook(cook_busy, product_for_cook): with pytest.raises(CustomWarning): assert cook_busy.cook_dish(product_for_cook)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_cook_twice(cook_not_busy, product_for_cook):\n\n cook_not_busy.cook_dish(product_for_cook)\n with pytest.raises(ValueError):\n cook_not_busy.cook_dish(product_for_cook)", "def test_cook_set_free(cook_busy, product_for_cook):\n cook_busy.set_free(True)\n # if product needs to be cooked...
[ "0.65008634", "0.625711", "0.620796", "0.58594835", "0.5785558", "0.5728688", "0.57037574", "0.57026243", "0.5623727", "0.56204015", "0.5618192", "0.56137985", "0.5599557", "0.5592458", "0.5572111", "0.55385923", "0.5527014", "0.551451", "0.551242", "0.55018896", "0.5490242",...
0.7838216
0
Test of changing state of cook. Busy cook set to free and then tries to cook the dish. Cooking should be successful (product.get_need_cook_status should be False)
def test_cook_set_free(cook_busy, product_for_cook): cook_busy.set_free(True) # if product needs to be cooked assert product_for_cook.get_need_cook_status() is True cook_busy.cook_dish(product_for_cook) assert product_for_cook.get_need_cook_status() is False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_cook_twice(cook_not_busy, product_for_cook):\n\n cook_not_busy.cook_dish(product_for_cook)\n with pytest.raises(ValueError):\n cook_not_busy.cook_dish(product_for_cook)", "def test_update_state1(self):\n pass", "def test_update_state(self):\n pass", "def test_update_state2...
[ "0.61100876", "0.5935458", "0.59113294", "0.5875447", "0.58063436", "0.57781756", "0.57589597", "0.5701074", "0.5680897", "0.5638473", "0.5637342", "0.56042886", "0.5569046", "0.5534861", "0.5534524", "0.54650354", "0.545623", "0.54499906", "0.542222", "0.5383806", "0.5364995...
0.75115085
0
Formats the output of a transaction receipt to its proper values
def output_transaction_receipt_formatter(receipt): if receipt is None: return None logs_formatter = compose(functools.partial(map, outputLogFormatter), list) formatters = { 'blockNumber': to_decimal, 'transactionIndex': to_decimal, 'cumulativeGasUsed': to_decimal, 'gasUsed': to_decimal, 'logs': lambda l: logs_formatter(l) if is_array(l) else l, } return { key: formatters.get(key, identity)(value) for key, value in receipt.items() }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def print_receipt(self) -> typing.List[str]:\n lines = []\n euro_total=0\n usd_total=0\n gbp_total=0\n\n for item in self._items.items():\n euro_price = self._get_product_price(item[0]) * item[1]\n usd_price = self.get_price_in_currency(euro_price,\"USD\")\n...
[ "0.6561996", "0.6189167", "0.60160506", "0.59889376", "0.5762017", "0.57275635", "0.56979066", "0.5656792", "0.56265295", "0.5626407", "0.55991745", "0.55775195", "0.5537981", "0.55367833", "0.5487953", "0.5431081", "0.5430332", "0.5380763", "0.5369978", "0.53524035", "0.5349...
0.676757
0
Formats the output of a block to its proper values
def outputBlockFormatter(block): # Transform to number block["gasLimit"] = to_decimal(block["gasLimit"]) block["gasUsed"] = to_decimal(block["gasUsed"]) block["size"] = to_decimal(block["size"]) block["timestamp"] = to_decimal(block["timestamp"]) if block.get("number"): block["number"] = to_decimal(block["number"]) block["difficulty"] = to_decimal(block["difficulty"]) block["totalDifficulty"] = to_decimal(block["totalDifficulty"]) if is_array(block.get("transactions")): for item in block["transactions"]: if not is_string(item): item = output_transaction_formatter(item) return block
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reformat_block(specline, values):\n data = reformat_spec_line(specline)\n desc = '\\n'.join(values)\n data.append(desc)\n return data", "def verbose(self, block: Block):\n print('\\n\\n==============================')\n print('Hash:\\t\\t', block.hash.hexdigest())\n print('Pr...
[ "0.64059097", "0.6007487", "0.5995631", "0.5994798", "0.57876045", "0.5775755", "0.5706006", "0.56863886", "0.5680554", "0.5670126", "0.5669903", "0.5630315", "0.5629678", "0.5619763", "0.5568151", "0.55656964", "0.5549295", "0.55125326", "0.5497574", "0.5473819", "0.54342616...
0.7147282
0
Formats the output of a log
def outputLogFormatter(log): if log.get("blockNumber"): log["blockNumber"] = to_decimal(log["blockNumber"]) if log.get("transactionIndex"): log["transactionIndex"] = to_decimal(log["transactionIndex"]) if log.get("logIndex"): log["logIndex"] = to_decimal(log["logIndex"]) return log
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def format(self, record):\n msg = logging.Formatter.format(self, record)\n label, color = self.label(record)\n if self.strip:\n return \"{:10s}{}\".format(label, sub(\"\\033\\\\[[0-9]+m\", \"\", msg, 0))\n else:\n return \"\\033[1;{}m{:10s}\\033[0m{}\".format(color...
[ "0.6809159", "0.6568247", "0.6517458", "0.64604336", "0.63606316", "0.63424927", "0.6341781", "0.63280874", "0.6323291", "0.63123155", "0.6245183", "0.6183246", "0.6177255", "0.6145785", "0.61174196", "0.61150354", "0.6070282", "0.60515445", "0.6051452", "0.6049557", "0.60433...
0.71652734
0
Formats the input of a whisper post and converts all values to HEX
def inputPostFormatter(post): post["ttl"] = from_decimal(post["ttl"]) post["workToProve"] = from_decimal(post.get("workToProve", 0)) post["priority"] = from_decimal(post["priority"]) if not is_array(post.get("topics")): post["topics"] = [post["topics"]] if post.get("topics") else [] post["topics"] = [topic if is_0x_prefixed(topic) else encode_hex(topic) for topic in post["topics"]] return post
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def outputPostFormatter(post):\n\n post[\"expiry\"] = to_decimal(post[\"expiry\"])\n post[\"sent\"] = to_decimal(post[\"sent\"])\n post[\"ttl\"] = to_decimal(post[\"ttl\"])\n post[\"workProved\"] = to_decimal(post[\"workProved\"])\n\n if not post.get(\"topics\"):\n post[\"topics\"] = []\n\n ...
[ "0.6161056", "0.6024747", "0.5889979", "0.5726662", "0.5721067", "0.5715071", "0.5712564", "0.56954545", "0.56508905", "0.5574359", "0.5507912", "0.5491416", "0.5455812", "0.543887", "0.54006004", "0.537959", "0.536963", "0.5341674", "0.5320729", "0.5312651", "0.5311158", "...
0.6506106
0
Formats the output of a received post message
def outputPostFormatter(post): post["expiry"] = to_decimal(post["expiry"]) post["sent"] = to_decimal(post["sent"]) post["ttl"] = to_decimal(post["ttl"]) post["workProved"] = to_decimal(post["workProved"]) if not post.get("topics"): post["topics"] = [] post["topics"] = [decode_hex(topic) for topic in post["topics"]] return post
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def format(self, message):", "def inputPostFormatter(post):\n\n post[\"ttl\"] = from_decimal(post[\"ttl\"])\n post[\"workToProve\"] = from_decimal(post.get(\"workToProve\", 0))\n post[\"priority\"] = from_decimal(post[\"priority\"])\n\n if not is_array(post.get(\"topics\")):\n post[\"topics\"]...
[ "0.6517435", "0.6332223", "0.6274383", "0.6151796", "0.6082813", "0.60550827", "0.5994535", "0.580875", "0.58030015", "0.57783115", "0.5777758", "0.57161885", "0.56622404", "0.5656379", "0.56329155", "0.5609236", "0.5584325", "0.5571311", "0.55095625", "0.5502143", "0.5492511...
0.71348196
0
DO NOT TOUCH THIS FUNCTION. IT IS USED FOR COMPUTER EVALUATION OF YOUR CODE
def main(): test_cases = ast.literal_eval(sys.argv[1]) results = str(my_info()) + '\t\t' for test_case in test_cases: mode = test_case[0] id_1 = int(test_case[1]) id_2 = int(test_case[2]) if mode == 'jc': results += str(Jaccard_Coefficient(id_1, id_2)) + '\t\t' elif mode == 'cc': results += str(Correlation_Coefficient(id_1, id_2)) + '\t\t' else: exit('bad command') print results + '\n'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def exercise_b2_106():\r\n pass", "def exercise_b2_113():\r\n pass", "def exercise_b2_107():\r\n pass", "def exercise_b2_53():\r\n pass", "def exo2():", "def exercise_b2_69():\r\n pass", "def substantiate():", "def cx():", "def exercise_b2_70():\r\n pass", "def exercise_b2_98():...
[ "0.6566932", "0.6499155", "0.64852047", "0.64321226", "0.6431028", "0.6361083", "0.6330522", "0.625579", "0.62346584", "0.6231363", "0.6221362", "0.61771435", "0.61722547", "0.6136739", "0.61362576", "0.6126891", "0.611284", "0.6068753", "0.6059188", "0.60250646", "0.59817815...
0.0
-1
Shorthand for assert. Saves 3 whole characters!
def ok_(expr, msg=None): if not expr: raise AssertionError(msg)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_third_equal(self):\n self.assertEqual(heaviest_word(\"take me to semynak\"), \"semynak\")", "def test_sanity(self):\n self.assertEquals(2 + 2, 4)", "def test_spaces(self):\n self.assertValue({\n 'foo bar': 'something here',\n },\n \"foo_bar: something_here...
[ "0.6651435", "0.6275559", "0.624227", "0.62251574", "0.61541015", "0.6134255", "0.61262155", "0.60943294", "0.60038877", "0.60038704", "0.59775376", "0.59726155", "0.59587276", "0.5954388", "0.593926", "0.5934084", "0.5925226", "0.59227633", "0.5909867", "0.5902318", "0.58977...
0.0
-1
Shorthand for 'assert a == b, "%r != %r" % (a, b)
def eq_(a, b, msg=None): if not a == b: raise AssertionError(msg or "%r != %r" % (a, b))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def eq_(a, b, msg=None):\n assert a == b, msg or \"%r != %r\" % (a, b)", "def eq_(a, b, msg=None):\n assert a == b, msg or \"%r != %r\" % (a, b)", "def assert_eq(a, b, msg=None):\n assert a == b, msg or __safe_error(\"!=\", a, b)", "def assert_not_equal(self, first, second, msg=\"\"):\r\n ass...
[ "0.7919735", "0.7919735", "0.79195523", "0.7669848", "0.766435", "0.7557559", "0.7554662", "0.74091434", "0.73751223", "0.7365663", "0.7247073", "0.7191134", "0.71108264", "0.71013314", "0.70558685", "0.70555484", "0.704943", "0.6989262", "0.685035", "0.68350255", "0.6828854"...
0.7456572
7
Checks only authenticated users can see the page
def test_csv_import_auth(self): path = reverse("import-csv") request = RequestFactory().get(path) request.user = mixer.blend(User) response = csv_import(request) assert response.status_code == 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_authenticated(self, request, **kwargs):\r\n return True", "def has_permission(self, request, view):\n if request.user.is_authenticated():\n return True\n return False", "def is_allowed_to_submit(request):\n return not settings.REQUIRE_LOGIN or request.user.is_authentic...
[ "0.75106347", "0.74165606", "0.7196165", "0.7195553", "0.71893704", "0.7175715", "0.71588826", "0.71588826", "0.71588826", "0.71588826", "0.71588826", "0.71588826", "0.71588826", "0.71113145", "0.70598507", "0.7030012", "0.70088905", "0.699589", "0.69223857", "0.69180375", "0...
0.0
-1
Checks unauthenticated users can not see the page
def test_csv_import_unauth(self): path = reverse("import-csv") request = RequestFactory().get(path) request.user = AnonymousUser() response = csv_import(request) assert response.status_code == 302
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_not_logged_user_cannot_access(self):\n\n utils.test_not_logged_cannot_access(self, self.url)", "def test_not_logged_user_cannot_access(self):\n\n utils.test_not_logged_cannot_access(self, self.url)", "def test_not_logged_user_cannot_access(self):\n\n utils.test_not_logged_cannot_a...
[ "0.7784443", "0.7784443", "0.7784443", "0.7784443", "0.7555281", "0.73508805", "0.73336077", "0.7329427", "0.72262084", "0.72135955", "0.72135955", "0.715947", "0.70824975", "0.7073744", "0.7048739", "0.70219946", "0.69940305", "0.6948124", "0.6944818", "0.69262964", "0.69061...
0.0
-1
Checks only authenticated users can see the page
def test_setting_csv_auth(self): path = reverse("setting-csv") request = RequestFactory().get(path) request.user = mixer.blend(User) response = csv_setting(request) assert response.status_code == 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_authenticated(self, request, **kwargs):\r\n return True", "def has_permission(self, request, view):\n if request.user.is_authenticated():\n return True\n return False", "def is_allowed_to_submit(request):\n return not settings.REQUIRE_LOGIN or request.user.is_authentic...
[ "0.75106347", "0.74165606", "0.7196165", "0.7195553", "0.71893704", "0.7175715", "0.71588826", "0.71588826", "0.71588826", "0.71588826", "0.71588826", "0.71588826", "0.71588826", "0.71113145", "0.70598507", "0.7030012", "0.70088905", "0.699589", "0.69223857", "0.69180375", "0...
0.0
-1
Checks unauthenticated users can not see the page
def test_setting_csv_unauth(self): path = reverse("setting-csv") request = RequestFactory().get(path) request.user = AnonymousUser() response = csv_setting(request) assert response.status_code == 302
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_not_logged_user_cannot_access(self):\n\n utils.test_not_logged_cannot_access(self, self.url)", "def test_not_logged_user_cannot_access(self):\n\n utils.test_not_logged_cannot_access(self, self.url)", "def test_not_logged_user_cannot_access(self):\n\n utils.test_not_logged_cannot_a...
[ "0.7784443", "0.7784443", "0.7784443", "0.7784443", "0.7555281", "0.73508805", "0.73336077", "0.7329427", "0.72262084", "0.72135955", "0.72135955", "0.715947", "0.70824975", "0.7073744", "0.7048739", "0.70219946", "0.69940305", "0.6948124", "0.6944818", "0.69262964", "0.69061...
0.0
-1
Tests the import from local file for cities works fine
def test_csv_import_city(self): from django.contrib.messages import get_messages path = reverse("import-csv") user = mixer.blend(User, is_staff=True, is_superuser=True) file = open("city.csv") client = Client() client.force_login(user) r = client.post(path, {"title": "city", "csv_file": file}) messages = list(get_messages(r.wsgi_request)) assert r.status_code == 200 assert len(messages) == 1 assert str(messages[0]) == "Successfully Uploaded!"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def GetWorldCities():\n return GetDataFromCsvFile('world_cities.csv')", "def _import_insee_city(self, cr, uid, ids, data_dir, context=None):\n if context is None:\n context = {}\n filepath = os.path.abspath(os.path.join(data_dir, 'comsimp2011.csv'))\n city_obj = self.pool.get('in...
[ "0.6581644", "0.64106566", "0.6348623", "0.6177727", "0.59844106", "0.58887535", "0.58847845", "0.5876422", "0.58704174", "0.5821457", "0.5782551", "0.57631767", "0.5752825", "0.573229", "0.5729731", "0.5711235", "0.56939304", "0.5689702", "0.5689702", "0.5689702", "0.5674587...
0.656946
1
Tests the import from local file for hotels works fine
def test_csv_import_hotel_success(self): from django.contrib.messages import get_messages path = reverse("import-csv") user = mixer.blend(User, is_staff=True, is_superuser=True) file = open("city.csv") client = Client() client.force_login(user) client.post(path, {"title": "city", "csv_file": file}) file = open("hotel.csv") r = client.post(path, {"title": "hotel", "csv_file": file}) messages = list(get_messages(r.wsgi_request)) assert r.status_code == 200 assert len(messages) == 1 assert str(messages[0]) == "Successfully Uploaded!"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_imports(self):\n\n # DEVICES\n from surrortg.devices.udp import ( # noqa:F401\n UdpActuator,\n UdpBot,\n UdpCar,\n UdpInput,\n )\n from surrortg.devices.udp.udp_protocol import ( # noqa:F811,F401\n open_remote_endpoint,\n...
[ "0.6580002", "0.62604517", "0.62208253", "0.6155901", "0.60002035", "0.5990556", "0.593152", "0.5869385", "0.5839019", "0.5812476", "0.58026296", "0.5798949", "0.5780236", "0.5765925", "0.5747201", "0.57247925", "0.5714153", "0.5711708", "0.5696462", "0.56960547", "0.5638083"...
0.0
-1
Tests hotels which their cities aren't in database can not get imported form local file
def test_csv_import_hotel_fail(self): from django.contrib.messages import get_messages path = reverse("import-csv") user = mixer.blend(User, is_staff=True, is_superuser=True) client = Client() client.force_login(user) file = open("hotel.csv") r = client.post(path, {"title": "hotel", "csv_file": file}) messages = list(get_messages(r.wsgi_request)) assert r.status_code == 200 assert len(messages) >= 1 for message in messages: assert "can not import" in str(message)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_unknown_countries(self):\n # Currently, there are no Countries or Regions\n self.assertEqual(Country.objects.count(), 0)\n self.assertEqual(Region.objects.count(), 0)\n\n # Call the command with countries that are not recognized by the iso3166 library\n self.call_command...
[ "0.63880724", "0.60217434", "0.6000848", "0.59790474", "0.59473", "0.5909674", "0.5837335", "0.57937455", "0.5778223", "0.5773335", "0.5719565", "0.57075006", "0.56792784", "0.5651297", "0.56426704", "0.5636547", "0.5630291", "0.56268394", "0.5595516", "0.5582883", "0.5576265...
0.58699983
6
Tests the import from remote file for cities works fine
def test_setting_csv_city(self): from django.contrib.messages import get_messages path = reverse("setting-csv") user = mixer.blend(User, is_staff=True, is_superuser=True) client = Client() client.force_login(user) r = client.post(path, {"title": "city", "url": "http://rachel.maykinmedia.nl/djangocase/city.csv", "username": "python-demo", "password": "claw30_bumps", "save": "on"}) messages = list(get_messages(r.wsgi_request)) assert r.status_code == 200 assert len(messages) == 1 assert str(messages[0]) == "Successfully Uploaded!"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_csv_import_city(self):\n from django.contrib.messages import get_messages\n path = reverse(\"import-csv\")\n user = mixer.blend(User, is_staff=True, is_superuser=True)\n file = open(\"city.csv\")\n client = Client()\n client.force_login(user)\n r = client.p...
[ "0.67597806", "0.65911627", "0.6365303", "0.6147274", "0.5999421", "0.59644586", "0.5793917", "0.57783747", "0.57087487", "0.56976557", "0.568284", "0.5604537", "0.55910504", "0.5550587", "0.55388725", "0.5531237", "0.55103743", "0.549801", "0.5494643", "0.5475696", "0.547448...
0.5930846
6
Tests the import from remote file for hotels works fine
def test_setting_csv_hotel_success(self): from django.contrib.messages import get_messages path = reverse("setting-csv") user = mixer.blend(User, is_staff=True, is_superuser=True) client = Client() client.force_login(user) client.post(path, {"title": "city", "url": "http://rachel.maykinmedia.nl/djangocase/city.csv", "username": "python-demo", "password": "claw30_bumps", "save": "on"}) r = client.post(path, {"title": "hotel", "url": "http://rachel.maykinmedia.nl/djangocase/hotel.csv", "username": "python-demo", "password": "claw30_bumps", "save": "on"}) messages = list(get_messages(r.wsgi_request)) assert r.status_code == 200 assert len(messages) == 1 assert str(messages[0]) == "Successfully Uploaded!"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_from_remote(self, url: Optional[str] = None) -> None:\n raise NotImplementedError", "def test_importfile_valid_remotepath_valid_localpath(self):\n\n # create a temporary file\n handle,remotepath = tempfile.mkstemp()\n indata = \"hubcheck\\ntool session shell test\\n%s\" % (re...
[ "0.6129432", "0.5878348", "0.58344084", "0.5827213", "0.5798477", "0.57732666", "0.57479435", "0.56994355", "0.5604567", "0.5604356", "0.56015086", "0.56014234", "0.55989224", "0.5594116", "0.5569717", "0.55627763", "0.5552308", "0.55374223", "0.55238146", "0.55180776", "0.54...
0.0
-1
Tests hotels which their cities aren't in database can not get imported form remote file
def test_setting_csv_hotel_fail(self): from django.contrib.messages import get_messages path = reverse("setting-csv") user = mixer.blend(User, is_staff=True, is_superuser=True) client = Client() client.force_login(user) r = client.post(path, {"title": "hotel", "url": "http://rachel.maykinmedia.nl/djangocase/hotel.csv", "username": "python-demo", "password": "claw30_bumps", "save": "on"}) messages = list(get_messages(r.wsgi_request)) assert r.status_code == 200 assert len(messages) > 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def test_get_bad_location_data(self):\n city_name = 'notarealplace'\n response = await self.http_client.fetch(request=HTTPRequest(\n url=self.get_url(path=\"/location-data/{}\".format(city_name)),\n method='GET'\n ), raise_error=False)\n self.assertEqual(resp...
[ "0.63873357", "0.63659126", "0.6264148", "0.59355015", "0.5915979", "0.5888972", "0.5843924", "0.5843775", "0.58309996", "0.58118176", "0.5742887", "0.57028115", "0.561459", "0.5560846", "0.5550713", "0.55434555", "0.55350614", "0.5522535", "0.5517603", "0.55035067", "0.54660...
0.5511484
19
Checks whether the search functionality works fine
def test_search(self): from importCsv.models import City, Hotel path = reverse("search") user = mixer.blend(User, is_staff=True, is_superuser=True) city = mixer.blend(City, abbrev="tes", name="test") mixer.blend(Hotel, city=city, data="testData", name="test hotel") client = Client() client.force_login(user) r = client.post(path, {"tes": "on"}) assert r.status_code == 200 assert r.content.find(b'test hotel')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_search(self):\n pass", "def test_search(self):\n pass", "def test_search(self):\n pass", "def search():\n pass", "def test_non_existent_term_search(self):\n\n expected_results = []\n results = self.searcher.search(\"asdasdasdas\")\n\n self.assertListEqu...
[ "0.73777276", "0.73777276", "0.73777276", "0.7130437", "0.70597166", "0.7040227", "0.7010459", "0.6978306", "0.6912243", "0.69074774", "0.68966824", "0.68724257", "0.68662024", "0.68647575", "0.678795", "0.67700964", "0.6743339", "0.67405117", "0.6720262", "0.6709872", "0.668...
0.63336676
61
Validates the functionality of logout
def test_logout(self): from django.contrib.messages import get_messages path = reverse("logout") user = mixer.blend(User, is_staff=True, is_superuser=True) client = Client() client.force_login(user) client.post('/admin/') r = client.post(path) messages = list(get_messages(r.wsgi_request)) assert str(messages[0]) == "Successfully logged out"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def logout():", "def logout(self):", "def logout(self):\n pass", "def logout():\n login()", "def logout(self):\r\n # should redirect\r\n check_for_get_code(self, 302, reverse('logout'))", "def logout_user():\n pass", "def logout(request):\n if request.user.is_authenticated...
[ "0.76756245", "0.75859284", "0.73818344", "0.73260444", "0.72436315", "0.72046804", "0.7117295", "0.7113737", "0.7077796", "0.69774604", "0.6975256", "0.69690436", "0.69545597", "0.69395155", "0.6934152", "0.69264627", "0.6844665", "0.68182045", "0.6816941", "0.6815389", "0.6...
0.64820796
69
Checks the main page is Ok
def test_main(self): path = reverse("main") request = RequestFactory().get(path) response = index(request) assert response.status_code == 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _check_page(self, html_content):\n if \"Sign in for the best experience\" in html_content:\n valid_page = False\n elif \"The request could not be satisfied.\" in html_content:\n valid_page = False\n else:\n valid_page = True\n return valid_page", "...
[ "0.73718345", "0.7190955", "0.6915523", "0.65855736", "0.65250415", "0.6493413", "0.6427644", "0.64157593", "0.6401849", "0.6310914", "0.6305969", "0.630592", "0.6293213", "0.62864685", "0.6239085", "0.62373424", "0.61935997", "0.61764014", "0.6162644", "0.61491907", "0.61336...
0.0
-1
Checks getting file from remote url works fine
def test_get_file_fail(self): from django.contrib.messages import get_messages path = reverse("setting-csv") user = mixer.blend(User, is_staff=True, is_superuser=True) client = Client() client.force_login(user) r = client.post(path, {"title": "hotel", "url": "http://rachel.wrongurltofetchdata.nl/djangocase/hotel.csv", "username": "py", "password": "30_bumps", "save": "on"}) messages = list(get_messages(r.wsgi_request)) assert r.status_code == 200 assert len(messages) == 1 assert "Received an error" in str(messages[0])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getfile(url):\n try:\n return urlreq.urlopen(url)\n except urlreq.HTTPError as e:\n safeprint(\"Sever returned with response code \" + str(e.getcode()) + \", download failed.\")", "def check_remote_file_exists(url, login=None, password=None):\r\n credentials = None\r\n if lo...
[ "0.7467245", "0.7420734", "0.72722775", "0.71854055", "0.7185126", "0.71281976", "0.7114665", "0.70842046", "0.70659125", "0.706313", "0.6903022", "0.68697876", "0.6751283", "0.671117", "0.6564248", "0.654372", "0.65414447", "0.6529368", "0.6481161", "0.64615464", "0.64566016...
0.0
-1
Kludge in running testsuistes as a sub process. Testsuite objects have already been made,and used to allow test selection. Having done this, the objects are dumped, and new ones are started up in sub shells.
def runTestSuites(self): self.testsuitesToXML() tss = [] jobStatus = {} for t in self.testsuites: d = t.testsuitedir runner = os.path.join(self.basepath, 'testSuiteRunner.py') tdir = os.path.join(d, 'testsuite.out') cmd = 'python %s %s>& %s' % (runner, d,tdir) #print 'about to popen the cmd: %s' % cmd tss.append((t.name, popen2.Popen3(cmd))) jobStatus[t.name] = ('running', nowSecs()) ntests = len(tss) printJobStatus(jobStatus) while tss: toRemove = [p for p in tss if p[1].poll() != -1] if toRemove: [tss.remove(p) for p in toRemove] for p in toRemove: jobStatus[p[0]] = ('completed', nowSecs()) printJobStatus(jobStatus) time.sleep(10) print 'all %d tests have completed' % ntests
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def RunTest(self):\n self.TestLs()\n self.TestTerminate()\n self.TestMultipleProcesses()", "def __main() :\n launchTests()", "def runtest(self):", "def startTestRun(self):", "def runTest(self):\n unittest.main()\n ChoreTest.clean_up()", "def runTest(self):\n self.testsAct...
[ "0.67804116", "0.63147813", "0.62815255", "0.62448347", "0.6162691", "0.611946", "0.61153394", "0.60913515", "0.60768825", "0.6026399", "0.59964746", "0.59423214", "0.59258866", "0.5913008", "0.59060097", "0.59013414", "0.5897372", "0.5894514", "0.5878061", "0.58522886", "0.5...
0.60532445
9
TEST THE USER ADD SUCCESS
def test_create_valid_user_success(self): payload = { "email": "test@gmail.com", "name": "Test", 'password': 'test123' } res = self.client.post(CREATE_USER_URL, payload) self.assertEqual(res.status_code, status.HTTP_201_CREATED) user = get_user_model().objects.get(**res.data) self.assertTrue(user.check_password(payload['password'])) self.assertNotIn('password', res.data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_add_user(self):\n pass", "def testAdd1(self):\n self.assertEquals(models.SUCCESS, self.users.add(\"userA\", \"password\"))", "def test_addUser(self):\n self.new_user.saveUser()\n self.assertEqual(len(User.users_list),1)", "def test_main_add_user(self):\n with self....
[ "0.88546467", "0.8262478", "0.82284814", "0.8155481", "0.81260383", "0.81209815", "0.8071225", "0.80570364", "0.8053225", "0.8029433", "0.802561", "0.7957674", "0.7928939", "0.7879095", "0.78571725", "0.773885", "0.77226776", "0.77226776", "0.77226776", "0.77198654", "0.76304...
0.0
-1
TEST CREATING A USER THAT ALREADY EXISTS
def test_create_user_exists(self): payload = { "email": "test@gmail.com", "name": "Test", 'password': 'test123' } create_user(**payload) res = self.client.post(CREATE_USER_URL, payload) self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_user_creation(self):\n self.assertTrue(User.objects.exists())", "def test_existing_user(self):\n user = User.objects.create(username=self.username)\n actual = get_user_if_exists(None, self.details)\n self.assertDictEqual(actual, {'is_new': False, 'user': user})", "def test_...
[ "0.8342608", "0.8204365", "0.82029146", "0.8191392", "0.8067835", "0.797487", "0.797487", "0.797487", "0.79677063", "0.79604864", "0.7947312", "0.79398227", "0.7937801", "0.79361814", "0.7914857", "0.7861646", "0.78466517", "0.78347856", "0.7821123", "0.781935", "0.7808139", ...
0.78193086
20
TEST A SHOT PASSWORD
def test_password_too_short(self): payload = { "email": "test@gmail.com", "name": "Test", 'password': 'tTTt' } res = self.client.post(CREATE_USER_URL, payload) self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST) user_exitst = get_user_model().objects.filter( email=payload['email'] ).exists() self.assertFalse(user_exitst)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_random_password():\n output = sh.random_password()\n assert isinstance(output, str) is True\n assert len(output) == 16", "def test_valid_password(self):\n pass_field = Field(\"\".join(['x' * (int(self.app.config['MAX_PWD_LEN']))]))\n\n valid_password(None, pass_field)", "def tes...
[ "0.78173095", "0.7571552", "0.75606406", "0.75471747", "0.7364602", "0.7311012", "0.7281988", "0.7226423", "0.7189455", "0.7154671", "0.70966953", "0.70853573", "0.7078164", "0.70628875", "0.7048393", "0.7000666", "0.6985219", "0.69709235", "0.6954122", "0.69389814", "0.69250...
0.65491563
61
TEST IF THE AUTH IS REQUIRED
def test_retrieve_user_unauthorized(self): res = self.client.get(ME_URL) self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_auth():", "def _check_auth(self):\n if self.authToken:\n return True\n else:\n msg = \"you need to login\"\n self.raise_error(msg)", "def requires_auth(self):\n return True", "def check_auth(username, password):\n # return username == app...
[ "0.84601325", "0.7771871", "0.77694154", "0.76264864", "0.7594435", "0.75779384", "0.7522689", "0.75105244", "0.7504356", "0.7393126", "0.7392858", "0.7392858", "0.7383449", "0.7368314", "0.7348574", "0.7347608", "0.7333207", "0.7320931", "0.7320824", "0.73014426", "0.7292921...
0.0
-1
TEST UPDATE FOR AUTHENTICATED USER
def test_update_user_profile(self): payload = {"name": "Lucifer", 'password': "12346987"} res = self.client.patch(ME_URL, payload) self.user.refresh_from_db() self.assertEqual(self.user.name, payload['name']) self.assertTrue(self.user.check_password(payload['password'])) self.assertEqual(res.status_code, status.HTTP_200_OK)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_update_user(self):\n pass", "def test_user_update_request(self):\n pass", "def test_update(self):\n user = self.custodian_1_user\n user_client = self.custodian_1_client\n urls = [reverse('api:user-detail', kwargs={'pk': user.pk})]\n new_first_name = \"New Firs...
[ "0.845656", "0.79875004", "0.7690915", "0.76854897", "0.76540387", "0.7620719", "0.7569771", "0.75459176", "0.7516532", "0.7494765", "0.74924505", "0.74789107", "0.7471998", "0.7468943", "0.7455768", "0.7437972", "0.7426278", "0.7419078", "0.74119693", "0.74057174", "0.73813"...
0.74192697
17
Log control data at each step during evaluation.
def _log_control_data(self, action, global_reward): action_r = ','.join(['%d' % a for a in action]) cur_control = {'episode': self.cur_episode, 'step': self.t, 'action': action_r, 'reward': global_reward} self.control_data.append(cur_control)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _log(self, data):\n if self.log_data is not None:\n self.log_data(data)", "def on_eval_batch_begin(self, step, logs=None):", "def on_eval_begin(self, logs=None):", "def log_eval(self, epoch, dataset_name):\n pass", "def record(self, step):", "def on_log(self):\n monito...
[ "0.6422396", "0.6307403", "0.6306143", "0.6290289", "0.6281441", "0.6219549", "0.61849844", "0.61498964", "0.6084978", "0.6076392", "0.60656595", "0.60503274", "0.5975851", "0.597538", "0.59376127", "0.5935147", "0.5904615", "0.58918566", "0.5876711", "0.5858225", "0.58474666...
0.65032554
0
Returns agents fingerprints (policies).
def get_fingerprint(self): return self.fp
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _gpg_fingerprints(self) -> List[str]:\n return self._gpg_keys.fingerprints", "def get_fingerprints(self, jid: JID) -> List[str]:\n return []", "def get_hostfingerprint_list(self):\n return self.hostfingerprint", "def get_policies():\r\n policy = policies.values()\r\n return pol...
[ "0.59994614", "0.588699", "0.5650054", "0.5606896", "0.55353206", "0.5501572", "0.5438777", "0.5225652", "0.52099985", "0.51455563", "0.5142792", "0.51344025", "0.5094674", "0.50501364", "0.50284547", "0.49927717", "0.49804366", "0.49701825", "0.49695376", "0.49271792", "0.49...
0.0
-1
Get actions of each agents neighbour in the graph.
def get_neighbor_action(self, action): naction = [] for i in range(self.n_agent): naction.append(action[self.neighbor_mask[i] == 1]) return naction
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def actions(self) -> list:\n if self.debug: print(f\"StateNode.actions()\")\n if not self._examined:\n if self.debug: print(f\"\\tExamining...\")\n self._edges = self.state.actions()\n for e in self._edges:\n e: Action\n e.source = self\n...
[ "0.7049442", "0.6467031", "0.643074", "0.6362425", "0.62686896", "0.6207406", "0.6155371", "0.6118249", "0.6094078", "0.6003385", "0.59684646", "0.59684646", "0.59386194", "0.5927922", "0.5917781", "0.58938307", "0.58807933", "0.5880295", "0.5880295", "0.5880295", "0.5880295"...
0.71311367
0
Return the apprpriate observations to the agents depending on the type of algoithm being run. params
def _get_state(self, obs_env): state = [] obs_env = obs_env.reshape(self.n_agent, 2) for i in range(self.n_agent): local_obs = obs_env[i] if self.agent.startswith('ia2c'): imgs = [local_obs] if not self.agent == 'ia2c_fp': # ia2c for j in np.where(self.neighbor_mask[i] == 1)[0]: imgs.append(obs_env[j]) imgs = np.array(imgs, dtype=np.float32) fps = np.array([], dtype=np.float32) else: # ia2c_fp fps = [] for j in np.where(self.neighbor_mask[i] == 1)[0]: imgs.append(obs_env[j]) fps.append(self.fp[j]) imgs = np.array(imgs, dtype=np.float32) fps = np.concatenate(fps).astype(np.float32) agent_obs = [imgs, fps] else: # ma2c agent_obs = local_obs.astype(np.float32) state.append(agent_obs) return state # return [[obs_env, np.array([], dtype=np.float32)] for _ in range(self.n_agent)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def collect_experiences(self):\n for i in range(self.num_frames_per_proc):\n # Do one agent-environment interaction\n\n preprocessed_obs0 = self.preprocess_obss(self.obs0, device=self.device)\n \n preprocessed_obs1 = self.preprocess_obss(self.obs1, device=self.dev...
[ "0.5695615", "0.5606592", "0.56018066", "0.5594157", "0.55773884", "0.55531305", "0.5503704", "0.549341", "0.54833376", "0.54497266", "0.5354468", "0.5281192", "0.52682257", "0.52564496", "0.5245968", "0.52178377", "0.52174217", "0.52096766", "0.52088886", "0.5184523", "0.518...
0.0
-1
Save control data from evaluation to disk.
def output_data(self): if not self.is_record: logging.error('Env: no record to output!') else: control_data = pd.DataFrame(self.control_data) control_data.to_csv(self.output_path + ('%s_%s_control.csv' % (self.name, self.agent)))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save_data(self):\n pass", "def saveData(self):\n pass", "def save(self) -> None:\n self.saver.save_model_and_weights(self.model)\n self.saver.save_data_shuffle_indices(\n self.data.eval_shuffler.ds_inds\n )\n self.saver.save_input_scaler(self.data.x....
[ "0.6351816", "0.6287839", "0.6247706", "0.6224278", "0.6199671", "0.6142667", "0.61253536", "0.61243397", "0.61077684", "0.6106945", "0.6079188", "0.6070314", "0.6067908", "0.6066707", "0.60651135", "0.6057868", "0.6051476", "0.6047747", "0.6005017", "0.59850013", "0.5973669"...
0.0
-1
Reset environment state, set new random seeds, reset metrics, update episode counter etc.
def reset(self, gui=False, test_ind=-1): # self.gui = gui # if gui: # # save episode to disk # if self._global_frames: # make_video_from_rgb_imgs(self._global_frames, self.output_path, f"episode_global_{self.cur_episode}") # for agent_id, frames in self._agent_frames.items(): # if frames: # make_video_from_rgb_imgs(frames, self.output_path, f"episode_{self.cur_episode}_{agent_id}") # # clear frames of previous episode # self._global_frames = [] # self._agent_frames = {agent_id: [] for agent_id in self.agent_tags} if (self.train_mode): seed = self.seed elif (test_ind < 0): seed = self.seed-1 else: seed = self.test_seeds[test_ind] np.random.seed(seed) self.seed += 1 self.cur_episode += 1 self.t = 0 # step counter for each episode self.rewards = [0] # to keep track of global rewards obs = self.env.reset(done_only=False).cpu().numpy() # if self.gui: # self._global_frames.append(self.env.map_to_colors().astype(np.uint8)) # for agent_id, agent_obs in obs.items(): # self._agent_frames[agent_id].append(agent_obs.astype(np.uint8)) # obs = list(obs.values()) obs = self._get_state(obs) # new return obs
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _reset_seeds(self) -> None:\n self._seeds = [None for _ in range(self.num_envs)]", "def _hard_reset(self):\n self._reset_specific_envs(np.ones_like(self.episodes_done))\n self._update_other_info()", "def _soft_reset(self):\n self._reset_specific_envs(self.episodes_done)\n ...
[ "0.76082146", "0.74018556", "0.71205187", "0.70204157", "0.6981054", "0.6923713", "0.6894734", "0.68613243", "0.68281037", "0.6792506", "0.67748046", "0.67734873", "0.6745905", "0.6744908", "0.67360026", "0.6713903", "0.6712834", "0.66821176", "0.6635531", "0.65819466", "0.65...
0.61540705
80
not used in ssd.
def terminate(self): return
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def use(self):", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__(self):\n\t\treturn", "def exo2():", "def __call__(self) -> None:", "def support(self):", "def sth():", "def cx():", "def __call__(self):\n pass", "def __cal...
[ "0.6666852", "0.6380146", "0.6380146", "0.6380146", "0.6380146", "0.6380146", "0.6362416", "0.6315402", "0.6302781", "0.62878007", "0.6131016", "0.6083334", "0.6069174", "0.6069174", "0.60661525", "0.60107166", "0.60062444", "0.59841406", "0.59841406", "0.59841406", "0.598414...
0.0
-1
Sets agents fingerprints (policies); distributions over actions given the current state.
def update_fingerprint(self, fp): self.fp = fp
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def step(self):\n\t\tnumpy.random.shuffle(self.agents_list)\n\t\tfor agent in self.agents_list:\n\t\t\tagent.produce()\n\t\tfor agent in self.agents_list:\n\t\t\tagent.charge()\n\t\tfor agent in self.agents_list:\n\t\t\tif agent.strategy == 0: \n\t\t\t\tagent.retribute()\n\t\tfor agent in self.agents_list:\n\t\t\t...
[ "0.57742363", "0.5736501", "0.5717755", "0.5712141", "0.5538278", "0.5537794", "0.5531449", "0.55293477", "0.55289406", "0.5468341", "0.5435091", "0.53988713", "0.5367964", "0.53655124", "0.53448045", "0.5333574", "0.5322495", "0.53154963", "0.5305376", "0.52951205", "0.52815...
0.0
-1
! resources object of Resources class contain resources from config file options object of MergeOptions class contain merge options from config file str_name default value same as the class name "SynsetsSUMOMerger2"
def __init__(self, resources, options, str_name = 'SynsetsSUMOMerger2'): super(SynsetsSUMOMerger2, self).__init__(resources, options, str_name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def resources(self):", "def register_resources(self, resources):\n from tw.api import merge_resources\n merge_resources(self.request_local.resources, resources)", "def MergeLogic(self) -> str:", "def _merge_resource(self, resource, desired, unmanaged):\n unmanaged_resource = unmanaged[re...
[ "0.5547208", "0.54064465", "0.5403034", "0.53563666", "0.53249174", "0.5320973", "0.53159565", "0.5253915", "0.5152775", "0.51470643", "0.5145946", "0.5095782", "0.50935775", "0.5056793", "0.501863", "0.50170356", "0.49941415", "0.4964399", "0.49395525", "0.4936977", "0.49245...
0.82576424
0
! Create dictionary based on mapping PLWN on SUMO ontology file. Dictionary format and mapping PLWN on SUMO ontology file format are presented below.
def get_plwn2sumo_dict(self): if not os.path.exists(self.resources().mapping_sumo_file()): raise IOError( "%s file not found!" % \ self.resources().mapping_sumo_file() ) plwn2sumo_dict = defaultdict(set) with open(self.resources().mapping_sumo_file()) as sumofile: next(sumofile) for line in sumofile: synset_id = int(line.strip().split(';')[0]) sumo = line.strip().split(';')[-2] plwn2sumo_dict[sumo].add(synset_id) return plwn2sumo_dict
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mapping_stratum(download_files =True):\r\n # get code description _index \r\n ix_= AGSO_PROPERTIES['props_codes'].index('name')\r\n def mfunc_(d): \r\n \"\"\" Set individual layer in dict of properties \"\"\"\r\n _p= {c: k.lower() if c not in ('code', 'label', 'name') else k \r\n ...
[ "0.61954165", "0.5963542", "0.5888741", "0.5863957", "0.58637106", "0.58131486", "0.5805316", "0.57554185", "0.5731265", "0.57232267", "0.5722358", "0.5715003", "0.5702462", "0.5668924", "0.56644607", "0.5656563", "0.56421024", "0.56056917", "0.55921763", "0.558435", "0.55836...
0.6826442
0
! Merge two given graphs, namely synsets graph and SUMO graph. The final graph contain one type of nodes, namely synsets nodes. Each synset node has an attribute named "synset",
def merge(self, g1, g2): logger = logging.getLogger(__name__) g = BaseGraph() g.copy_graph_from(g1) plwn2sumo_dict = defaultdict(set) plwn2sumo_dict = self.get_plwn2sumo_dict() synset_on_vertex_dict = {} for node in g.all_nodes(): synset_id = node.synset.synset_id if synset_id in synset_on_vertex_dict: logger.warning("ID of some synset is not unique.") continue synset_on_vertex_dict[synset_id] = node num_of_edge = 0 for edge in g2.all_edges(): num_of_edge += 1 logger.info("%d/%d", num_of_edge, g2.num_edges()) parent_sumo_concept = edge.source().sumo child_sumo_concept = edge.target().sumo if parent_sumo_concept not in plwn2sumo_dict: logger.warning("The mapping file doesn't contain sumo concept '%s'.", parent_sumo_concept) continue if child_sumo_concept not in plwn2sumo_dict: logger.warning("The mapping file doesn't contain sumo concept '%s'.", child_sumo_concept) continue for parent_syn_id in plwn2sumo_dict[parent_sumo_concept]: if parent_syn_id not in synset_on_vertex_dict: logger.warning("The mapping file contains synset '%d' that is not in the graph.", parent_syn_id) continue p_node = synset_on_vertex_dict[parent_syn_id] for child_syn_id in plwn2sumo_dict[child_sumo_concept]: if child_syn_id not in synset_on_vertex_dict: logger.warning("The mapping file contains synset '%d' that is not in the graph.", child_syn_id) continue ch_node = synset_on_vertex_dict[child_syn_id] g.add_edge(p_node, ch_node, [("rel", edge.rel)], simply=True) return g
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def merge_graph(self, other):\n self.add_nodes( (nLabel,nInfo) for nLabel,nInfo in other.nodes() )\n \n for nLabel,nInfo in other.nodes():\n for edgeLabel,edgeInfo in other.edgesFrom(nLabel):\n self.add_edge(edgeLabel,edgeInfo)", "def merge(self, other: \"GraphSet\")...
[ "0.6905344", "0.6667315", "0.66233647", "0.654952", "0.6426134", "0.6303256", "0.63004285", "0.6278962", "0.6188941", "0.6159831", "0.61385214", "0.60857415", "0.60827035", "0.59966093", "0.5975047", "0.58856934", "0.5844211", "0.58405745", "0.58178586", "0.57996076", "0.5799...
0.77939695
0
Downloiad the page at given URL
def get_page(self, url): """ @param url: Url we want to crawl""" """ @type url: String """ """@return the page""" try: u = urlopen(url) html = u.read().decode('utf-8') # except Exception as e: # logging.exception(e) finally: print("Closing") u.close() return html
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def redirect(url):", "def goto(self, page: str):\n self.get(urllib.parse.urljoin(settings.config.host, page))", "def __open_page(self, url):\n try:\n # Opens the url\n page = request.urlopen(url)\n except Exception as e:\n print(e, url)\n return ...
[ "0.6163118", "0.5895792", "0.5818599", "0.58166754", "0.5812692", "0.57281524", "0.5682992", "0.5658968", "0.56213826", "0.5620462", "0.5577548", "0.5556134", "0.55078506", "0.5499445", "0.5490812", "0.547787", "0.54733974", "0.5458218", "0.54565185", "0.5416221", "0.54098094...
0.5585596
10
Returns the BeautifulSoup object of the given page
def get_soup(self, html): if html is not None: soup = BeautifulSoup(html, "html.parser") return soup else: return
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def page_soup(page):\n return bs4.BeautifulSoup(page, 'html.parser')", "def _get_soup(self, page=''):\n content = requests.get('%s/%s' % (BASE_URL, page)).text\n return BeautifulSoup(content)", "def get_soup(page='1'):\n content = requests.get('%s/?page=%s' % (BASE_URL, page)).text\n ret...
[ "0.8628668", "0.84880644", "0.84137076", "0.8070522", "0.80435354", "0.79836047", "0.78635615", "0.78475887", "0.747077", "0.73754066", "0.72366697", "0.72015417", "0.714936", "0.71203345", "0.7103755", "0.7096957", "0.70346177", "0.7005713", "0.6984478", "0.6955197", "0.6935...
0.7005158
18
Get the links of interest from the given Beuti
def get_links(self, soup): """ @param soup: BeautifulSoup object that cointains the targeted links """ """ @type soup: BeautifulSoup object """ for link in soup.select('a[href^="https://"]'): # All links which have a href element href = link.get('href') # The actually href element of the link if not any(href.endswith(x) for x in ['.csv', '.xls', '.xlsx']): print("No excel") continue if not href in self.url_queue: self.url_queue.append(href) # Add the URL to our queue
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def iter_links(self):", "def get_links() -> list:\n headers = {\n \"User-Agent\": \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36\",\n \"Accept\": \"text/html\",\n \"Accept-Encoding\": \"gzip, deflate\",\n }\n ...
[ "0.66900754", "0.6568194", "0.6410215", "0.6353673", "0.6319181", "0.63110644", "0.6267211", "0.6240298", "0.62356913", "0.62124115", "0.611109", "0.60906845", "0.60674167", "0.6038508", "0.60245275", "0.60238755", "0.60237354", "0.6021248", "0.60163826", "0.6006266", "0.5980...
0.5816899
41
Create a temp folder to download
def get_files(self): # self.folder= +str(int(time.time())) if not os.path.exists(self.folder): os.mkdir(self.folder) while len(self.url_queue): # If we have URLs to crawl - we crawl href = self.url_queue.popleft() # We grab a URL from the left of the list filename = href.rsplit('/', 1)[-1] print("Downloading %s to %s..." % (href, filename)) fullname = os.path.join(self.folder, filename) urlretrieve(href, fullname) self.xlfnames.append(filename)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_temp_folder():\n path_join = os.path.join(tempfile.gettempdir(), id_generator(5))\n os.makedirs(path_join)\n return path_join", "def get_tmp():\n\n # tmp = os.getcwd()\n # if Settings.get_download_path() != \"\":\n # tmp = os.path.join(Settings.get_download_path(), \"...
[ "0.74962807", "0.73743665", "0.72294396", "0.7167176", "0.71236885", "0.7032736", "0.7019592", "0.6932763", "0.6831103", "0.6811089", "0.68001753", "0.6794936", "0.67714673", "0.6741706", "0.67387897", "0.67336327", "0.67199904", "0.6681555", "0.66798896", "0.66495675", "0.66...
0.0
-1
downloads the htmlpage and looks for the links with excel files
def run_downloader(self): """calls to the file downloader""" try: html = self.get_page(self.url) soup = self.get_soup(html) if soup is not None: # If we have soup - self.get_links(soup) self.get_files() else: self.producer("THESS_ENV_CITYOFTHESS_DAILY_YEARLY_DATA_ERROR", 'data source format is not as expected', e) return False except Exception as e: self.producer("THESS_ENV_CITYOFTHESS_DAILY_YEARLY_DATA_ERROR", 'data source format is not as expected', e) return False return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_pages():\n\n excel_filename = 'Result_' + datetime.datetime.now().strftime('%Y-%m-%d %H-%M-%S') + '.xlsx'\n workbook = xlsxwriter.Workbook(excel_filename)\n worksheet_all = workbook.add_worksheet()\n\n create_headers(worksheet_all, workbook)\n\n row = 1\n col = 0\n\n cell_format = wo...
[ "0.6434533", "0.6253483", "0.613989", "0.61118245", "0.6107879", "0.605488", "0.60413843", "0.60022914", "0.5989048", "0.59720886", "0.5906197", "0.5898894", "0.5878793", "0.58744365", "0.5851572", "0.58209777", "0.57681555", "0.5762606", "0.57579845", "0.5752446", "0.5693416...
0.60851127
5