query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Transforms message into PlatformMessage object
def parse(cls, message): if isinstance(message, PlatformMessage): inst = PlatformMessage.parse(message.serialize()) return inst inst = PlatformMessage() if message is not None: assert isinstance(message, (list, tuple)), "Message is expected to be a list ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def message_to_python(self, raw_message):\n return self.Message(self, raw_message)", "def get_interface(cls, message):\r\n if message is not None:\r\n if isinstance(message, PlatformMessage):\r\n return message.interface\r\n assert isinstance(message, (list, tup...
[ "0.66961044", "0.61725485", "0.61444753", "0.6085341", "0.60676634", "0.59354925", "0.5832414", "0.58319014", "0.58083576", "0.57996356", "0.5798074", "0.5764045", "0.57599103", "0.5742993", "0.5738603", "0.57220566", "0.5719687", "0.5718132", "0.57123685", "0.5708723", "0.57...
0.7441321
0
Return sender of serialized message
def get_sender(cls, message): if message is not None: if isinstance(message, PlatformMessage): return message.sender assert isinstance(message, (list, tuple)), "Message is expected to be a list or a tuple" assert len(message) >= 4, "Message's length expec...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __get_sender_id(self):\n return self.__sender_id", "def get_sender_email(message):\r\n message_headers = message['payload']['headers']\r\n for header in message_headers:\r\n if header['name'] == 'From':\r\n return header['value']", "def sender(self):\n key, alt = ('Sen...
[ "0.7114796", "0.6647468", "0.6598164", "0.6566656", "0.65436935", "0.65187633", "0.65110844", "0.65012693", "0.6501094", "0.64227396", "0.640637", "0.6369679", "0.6141742", "0.6132606", "0.61057776", "0.6090527", "0.6054466", "0.6002109", "0.5983544", "0.5983544", "0.59530574...
0.7315821
0
Return interface of serialized message
def get_interface(cls, message): if message is not None: if isinstance(message, PlatformMessage): return message.interface assert isinstance(message, (list, tuple)), "Message is expected to be a list or a tuple" assert len(message) >= 4, "Message's length...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def intf_get_notif_serializer():\n serializer = sl_interface_pb2.SLInterfaceGetNotifMsg()\n return serializer", "def _recv_serialized(self, socket):\n msg = pickle.loads(socket.recv())\n return msg", "def _stringify_proto(obj):\n if isinstance(obj, str): return obj\n elif isinstance(o...
[ "0.6683183", "0.6607191", "0.6602124", "0.6388921", "0.6382543", "0.63370854", "0.6267809", "0.61996573", "0.6124349", "0.6091265", "0.6090545", "0.607747", "0.60588247", "0.6057081", "0.6040386", "0.6030836", "0.60173637", "0.5962537", "0.59547883", "0.5940483", "0.5939426",...
0.58528733
27
Return method of serialized message
def get_method(cls, message): if message is not None: if isinstance(message, PlatformMessage): return message.method assert isinstance(message, (list, tuple)), "Message is expected to be a list or a tuple" assert len(message) >= 4, "Message's length expec...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get( self ):\n return self.__to_message_function( self.__raw_payload )", "def serialize_message(self) -> bytes:\n return self.compile_message().serialize()", "def __message_content__(self) -> MessageContent:", "def parse(self, serialized):\n raise NotImplementedError(\"Calling an abstrac...
[ "0.64785546", "0.6399759", "0.6097286", "0.6095025", "0.6048653", "0.5926648", "0.5798314", "0.5763225", "0.5758533", "0.5757418", "0.5754897", "0.57526416", "0.5739287", "0.5705692", "0.570088", "0.5696248", "0.5656703", "0.5653811", "0.56207484", "0.56186205", "0.5613904", ...
0.6116328
2
Return args of serialized message
def get_args(cls, message): if message is not None: if isinstance(message, PlatformMessage): return message.args assert isinstance(message, (list, tuple)), "Message is expected to be a list or a tuple" assert len(message) >= 4, "Message's length expected ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _func_serialize(self, args):\n return args", "def func_deserialize(self, args): # pragma: no cover\n if len(args) == 0:\n return []\n x = eval(args.decode(\"utf-8\"))\n return x", "def toArgs(self):\n # FIXME - undocumented exception\n post_args = self....
[ "0.6681042", "0.6476981", "0.6381497", "0.6343124", "0.63172007", "0.6315931", "0.6190981", "0.61659044", "0.61323375", "0.61323375", "0.61323375", "0.6127745", "0.60635227", "0.6060811", "0.5984353", "0.59649163", "0.5958156", "0.593882", "0.59370756", "0.5936084", "0.591632...
0.7136301
0
Return kwargs of serialized message
def get_kwargs(cls, message): if message is not None: if isinstance(message, PlatformMessage): return message.kwargs assert isinstance(message, (list, tuple)), "Message is expected to be a list or a tuple" assert len(message) >= 4, "Message's length expec...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def toArgs(self):\n # FIXME - undocumented exception\n post_args = self.toPostArgs()\n kvargs = {}\n for k, v in post_args.items():\n if not k.startswith('openid.'):\n raise ValueError(\n 'This message can only be encoded as a POST, because i...
[ "0.6532804", "0.63777333", "0.6250696", "0.61953866", "0.60052204", "0.60052204", "0.59744835", "0.59685344", "0.59614235", "0.59334373", "0.59222966", "0.5830837", "0.5793464", "0.579085", "0.57832605", "0.57062554", "0.5704295", "0.57024705", "0.56911284", "0.5642286", "0.5...
0.721355
0
Transforms self into list with key fields values
def serialize(self): return [self._signature, self.sender, self.interface, self.method, self.args, self.kwargs]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def values(self):\r\n return [self[k] for k in self]", "def items(self):\r\n return [(k, self[k]) for k in self]", "def items(self):\r\n L = []\r\n for key, value in self.data.items():\r\n o = key()\r\n if o is not None:\r\n L.append((o, value))\...
[ "0.67881566", "0.6781893", "0.6743758", "0.6743758", "0.6675744", "0.6648665", "0.66283107", "0.6555832", "0.65546185", "0.65546185", "0.65546185", "0.6533801", "0.6497453", "0.64785767", "0.6443632", "0.6415694", "0.64099073", "0.64046633", "0.63925886", "0.63925886", "0.638...
0.0
-1
Creates message with reply indicating successfull ending of method call
def success(cls, retval, retvalname='value'): if isinstance(retval, dict) and retvalname is None: retval["__result__"] = "success" # TODO: right here just modified input dict. That's not good else: retval = {"__result__": "success", retvalname: retval} return Plat...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def finished(self, reply):\n pass", "def endMessage(self):", "def end():\n return say()", "def finish(self, message):\n self.stdout = message\n self.returncode = 0", "def reply_object():\n reply_object = {\"code\": \"\"}\n return reply_object", "def acknowledgement(self, mes...
[ "0.69738877", "0.67656934", "0.6293983", "0.6158718", "0.6092211", "0.60331434", "0.59224075", "0.59184587", "0.5894392", "0.58468145", "0.58453137", "0.5838691", "0.58309144", "0.58224344", "0.581687", "0.5788007", "0.57852334", "0.57048154", "0.5697229", "0.56179005", "0.56...
0.55243546
29
Creates message with reply indicating failiing ending of method call
def failure(cls, state, errcode=-1): return PlatformMessage(method="__reply__", kwargs={"__result__": "fail", "state": state, "errcode": errcode})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def endMessage(self):", "def finished(self, reply):\n pass", "def end():\n return say()", "def end(update, context) -> int:\n update.callback_query.edit_message_text(\n 'Bye! I hope we can talk again some day.')\n\n logger.info(\"User [%s] exited the conversation, [Exit], from [Main Me...
[ "0.7095744", "0.69297856", "0.66436666", "0.62102324", "0.6166731", "0.6165642", "0.60455865", "0.5994747", "0.5920864", "0.5850099", "0.5848156", "0.5846051", "0.5840882", "0.5801606", "0.5791126", "0.57714516", "0.5769898", "0.5752875", "0.5749393", "0.5738365", "0.5730379"...
0.0
-1
Creates message with reply indicating failiing ending of method call by exception
def failure_exception(cls, state, exception): return PlatformMessage(method="__reply__", kwargs={"__result__": "fail", "state": state, "errcode": -2, "e": exception})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def endMessage(self):", "def end():\n return say()", "def exit_with_message(message: str) -> NoReturn:\n raise CallParseError(input_string, message)", "def finished(self, reply):\n pass", "def sendErrorMessage(msg): #@NoSelf", "def whenException(self, channel, call):", "def abort(self...
[ "0.64972866", "0.60457224", "0.59910274", "0.5865661", "0.5790894", "0.5711769", "0.56799406", "0.5674615", "0.56623536", "0.56480086", "0.5638144", "0.5630722", "0.563067", "0.5607326", "0.5588698", "0.5577028", "0.5576119", "0.55424887", "0.5533531", "0.55177295", "0.549935...
0.521757
74
Creates message with reply indicating some stage of method call is taken place
def notify(cls, state): return PlatformMessage(method="__reply__", kwargs={"state": state})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __call__(self, answer):\n method_name = 'on_%s' % sanitize_answer(answer)\n method = getattr(self, method_name, None)\n if method:\n msg = method()\n else:\n msg = self.default(answer)\n return msg", "def Message(self, *args, **kwargs):\n pass",...
[ "0.5911461", "0.5875573", "0.5865693", "0.5813939", "0.58092356", "0.58092356", "0.58038336", "0.57274956", "0.5696109", "0.56097317", "0.56019354", "0.55925906", "0.5573429", "0.55502784", "0.55441946", "0.5507472", "0.54945314", "0.5474131", "0.547276", "0.5455448", "0.5434...
0.57788986
7
Returns conversation for a thread
def conversation(self, thread): assert isinstance(thread, int) and 0 <= thread < len(self._threads), "Thread {} don't exists at channel {}!".\ format(thread, self.name) return self._threads[thread]["conversation"]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_thread_for_message(id):\n query = 'SELECT thread_id from messages WHERE id like %s'\n return __perform__(query, (id,), method='fetchone')", "def find_all_messages_in_thread_for_person(cnx: db_connector, thread_id: int, is_singlechat: bool):\n result = []\n cursor = cnx.cursor(buffered...
[ "0.65898234", "0.6572362", "0.6492414", "0.64143395", "0.6396454", "0.60996646", "0.60811704", "0.6057928", "0.58519685", "0.57067835", "0.5651671", "0.5620204", "0.55983543", "0.5575817", "0.55702025", "0.55671215", "0.55500007", "0.5483425", "0.54719275", "0.5438706", "0.54...
0.8358802
0
Subscribes specified instance onto channel
def subscribe(self, inst): if inst not in self._subscribers: self._subscribers.append(inst) vprint("{} is subscribed to {}".format(inst.name, self.name))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def subscribe(self, channel, **kwargs):\n pass", "def subscribe(self, inst, channel):\r\n if channel not in self._channels:\r\n self._channels[channel] = TalkChannel(channel, print_messages=self.verbose, timeref=self._timeref)\r\n self._channels[channel].subscribe(inst)", "def s...
[ "0.73403704", "0.7077951", "0.6784527", "0.6784527", "0.6784527", "0.65946174", "0.65910834", "0.6586851", "0.63931453", "0.6311332", "0.62572837", "0.62503433", "0.62227", "0.61942494", "0.6113197", "0.6018034", "0.6016623", "0.5920134", "0.5876832", "0.587313", "0.58557457"...
0.6719197
5
Unsubscribes specified instance from channel
def unsubscribe(self, inst): if inst in self._subscribers: self._subscribers.remove(inst) vprint("{} is unsubscribed from {}".format(inst.name, self.name))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unsubscribe(self, channel, update_handler=None):\n pass", "def unsubscribe(self, inst, channel):\r\n if channel not in self._channels:\r\n raise ValueError(\"Channel {} not exists!\".format(channel))\r\n self._channels[channel].unsubscribe(inst)\r\n return\r\n # ...
[ "0.77895993", "0.7697996", "0.7457277", "0.7457277", "0.7457277", "0.7457277", "0.7457277", "0.7024203", "0.6909685", "0.68771696", "0.6872388", "0.68624234", "0.6827826", "0.67210144", "0.67210144", "0.66898584", "0.66726786", "0.66592056", "0.6605039", "0.66029906", "0.6552...
0.74092454
7
Sends message into thread
def send_message(self, context, message): if context.channel == "__void__": return if self._busy: self._queue.append((context, message)) return thread = context.thread _msg = message message = message.serialize() self._busy = T...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def send(self, message):", "def send_to_gui(self, message):\n message.on_thread_side()\n self.queue.put(message)\n self.sig.set()\n logger.debug(\"Message %r has been send to GUI\", message.message_id)", "def _send(self, message):\n logger.info(message)\n self.bu...
[ "0.7251886", "0.7028535", "0.7008835", "0.70002615", "0.69917643", "0.69660854", "0.69660854", "0.69660854", "0.6932752", "0.69234616", "0.6904757", "0.68988985", "0.68747884", "0.68623", "0.68569106", "0.68312794", "0.6817026", "0.6795177", "0.6756574", "0.6755073", "0.67476...
0.6107267
100
Incoming message handler that can be overridden by derived classes
def _incoming_handler(self, context, message, fake_reply): return self._map[message.method](context, fake_reply, *message.args, **message.kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def handle(self, message):", "def handle_message(self, message):", "def handle_message(self, msg):\n pass", "def processMessage(self, *args, **kwargs):\r\n pass", "def handleMessage(msg):", "def process(self, msg):\n print \"HANDLER: received a msg: %s\" % msg", "def handle(self, m...
[ "0.76674205", "0.7387368", "0.72166294", "0.7163365", "0.70100635", "0.6967958", "0.69566435", "0.67521715", "0.6731579", "0.6724816", "0.66986245", "0.6680195", "0.66316897", "0.6626075", "0.6598926", "0.65867203", "0.65795434", "0.65649223", "0.656297", "0.655957", "0.65346...
0.6481269
25
Common method for handling incoming messages from talk channel For customization redefine _incoming_handler please
def incoming(self, context, message, fake_reply=None): if message.interface != self._id: return False if message.is_reply: return False if message.method not in self._methods: eprint("{}:{} Unsupported method {}".format(self._host.name, self._name, messa...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def handle_incoming_message(obj, reply_channel):\n if int(obj[message_type_key]) == 0:\n try:\n sub_obj = create_subscriber_object(reply_channel, obj)\n subscribers[reply_channel.name] = sub_obj\n except ApiException as exc:\n send_save_to_channel(reply_channel, st...
[ "0.7198676", "0.70006275", "0.68786156", "0.6589068", "0.6574055", "0.6566164", "0.654098", "0.65259284", "0.6513632", "0.64729154", "0.64671236", "0.644941", "0.6425101", "0.64106405", "0.6365977", "0.63636965", "0.6342746", "0.6326662", "0.6321269", "0.6301516", "0.6211517"...
0.6243993
20
Prepares message for fake_next_op method
def fake_op_message(interface, reply, on_channel=None, on_message=None, after=None, execute=False, on_success=None, on_failure=None): assert isinstance(interface, str), "fake_op_info: interface should be a string" assert isinstance(reply, ProtocolReply), "fake_op_info: reply should be a P...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _fake_next_op(self, context, message, dry_run=False):\r\n if context.channel in self._fake_ops:\r\n channel = context.channel\r\n if len(self._fake_ops[channel]) > 0:\r\n if \"on_message\" not in self._fake_ops[channel][0] \\\r\n or self._fake_...
[ "0.6204553", "0.57941633", "0.57386607", "0.5676188", "0.55851084", "0.5510728", "0.54746866", "0.5442715", "0.54413027", "0.5374735", "0.5304368", "0.5282957", "0.52620536", "0.5245205", "0.52339035", "0.52218646", "0.52083987", "0.52082664", "0.5190077", "0.5156617", "0.512...
0.5297616
11
Checks whether incoming message could be processed
def supports(self, message): if message.method == '__testing__': return True return self._interface.supports(message)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_for_incoming_info(self):\n\n if self.test_message_response:\n self.parse_incoming_message(self.test_message_response)\n return True\n\n POLL_ONLY_TIMEOUT_VALUE = 0\n got_at_least_one = False\n while (True):\n readables, writables, errors = sele...
[ "0.7115128", "0.6838178", "0.6745119", "0.6687847", "0.6675145", "0.66316074", "0.65846056", "0.65725505", "0.6522481", "0.6514949", "0.65149397", "0.629913", "0.629913", "0.6284352", "0.62244946", "0.61825305", "0.61751574", "0.613391", "0.61156756", "0.6089335", "0.60848534...
0.0
-1
Checks whether or not reply to message m2 should be faked If m1's field is None then m2's field value is not compared at all
def _fake_message_compare(m1, m2): m1 = m1.serialize() m2 = m2.serialize() diff = False for i in range(len(m1)): if m1[i] is None: continue if m1[i] != m2[i]: diff = True break return not diff
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testNoneAssignment(self):\n class MyMessage(messages.Message):\n\n my_field = messages.StringField(1)\n\n m1 = MyMessage()\n m2 = MyMessage()\n m2.my_field = None\n self.assertEquals(m1, m2)", "def __eq__(self, other):\n if not isinstance(other, SendMmsReq...
[ "0.61221015", "0.60199493", "0.5698596", "0.56491196", "0.56256944", "0.55935615", "0.5585704", "0.55634177", "0.55492973", "0.55470234", "0.55111665", "0.5495307", "0.546077", "0.5453699", "0.5440305", "0.5392359", "0.5371638", "0.5361735", "0.5357879", "0.53478", "0.5341549...
0.7252573
0
Registers information for faking replies
def _register_fake_next_op(self, channel, fake_info): assert isinstance(fake_info, (list, tuple, dict)), "fake_info should be a dict or list of dict or tuple of dict" if isinstance(fake_info, (tuple, list)): for f in fake_info: assert isinstance(f, dict), "fake_info shoul...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _post_answer(self, answer):\n print(answer)\n self.messages_received.append(answer)", "def _post_answer(self, answer):\n print(answer)\n self.messages_received.append(answer)", "def register_message():\n global mss_cnt\n\n gmess = Graph()\n\n # Construimos el mensaje de...
[ "0.576329", "0.576329", "0.5649313", "0.56348217", "0.55767995", "0.5420812", "0.5407427", "0.5336492", "0.5320446", "0.53109264", "0.5305628", "0.5286324", "0.5285838", "0.5274294", "0.52733195", "0.52711815", "0.5264393", "0.5212218", "0.518291", "0.51799375", "0.50915813",...
0.47367492
74
Implements method "__testing__" that would be supported by all protocols
def _general_testing(self, context, kind, *args, **kwargs): if kind == "fake_next_op": self._register_fake_next_op(context.channel, *args, **kwargs) self._reply(context, proto_success({}, None), None) return True self._reply(context, proto_failure({"Unsupported t...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_should_implement(self):\n pass", "def test_supported_protocol(self):\n assert self.handler.SUPPORTED_PROTOCOL is None", "def test_protocols(container, protocol):\n assert isinstance(container, protocol)", "def test(self):\n raise NotImplementedError", "def test_differentPro...
[ "0.71973604", "0.71074444", "0.7052137", "0.70288616", "0.67791927", "0.6558383", "0.6532375", "0.6532375", "0.6508606", "0.6500069", "0.6478221", "0.6394548", "0.6394548", "0.6394548", "0.63722515", "0.63722515", "0.63722515", "0.63722515", "0.63722515", "0.62949234", "0.628...
0.6247752
24
Checks whether reply to this message should be faked and fakes it if required
def _fake_next_op(self, context, message, dry_run=False): if context.channel in self._fake_ops: channel = context.channel if len(self._fake_ops[channel]) > 0: if "on_message" not in self._fake_ops[channel][0] \ or self._fake_message_compare(se...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_message_missing_body(self):\n receipt_handle = 'blah'\n msg = [{\"ReceiptHandle\": receipt_handle}]\n with patch.object(self.dead_letter, 'remove_message_from_queue') as dequeue_fake:\n self.dead_letter.handle_messages(msg)\n\n # Ensure message dequeued.\n ...
[ "0.59258664", "0.59085613", "0.57893336", "0.57723594", "0.5757017", "0.57384187", "0.5699918", "0.5680963", "0.5651382", "0.5646977", "0.5619524", "0.5575746", "0.55438524", "0.5535469", "0.5488948", "0.5482735", "0.5448364", "0.54328763", "0.5418796", "0.5418293", "0.538297...
0.5759425
4
Implements builtin methods of protocol base class
def _process_message_general(self, context, message): f = self._fake_next_op(context, message) if f is True: return True elif f is not False: return f elif message.method == "__testing__": self._general_testing(context, *message.args, **messag...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def protocol(self):\n ...", "def protocol(self):\n\n raise NotImplementedError()", "def __subclasshook__(self, *args, **kwargs): # real signature unknown\n pass", "def __subclasshook__(self, *args, **kwargs): # real signature unknown\n pass", "def __subclasshook__(self, *args, *...
[ "0.6998625", "0.66639465", "0.62314713", "0.62314713", "0.62314713", "0.62314713", "0.62314713", "0.62314713", "0.62314713", "0.62314713", "0.62314713", "0.62314713", "0.62314713", "0.62314713", "0.62314713", "0.62314713", "0.62314713", "0.62314713", "0.62314713", "0.62314713",...
0.0
-1
Processes incoming message First tries to process protocol's builtins If message weren't processed by builtins then message is passed to protocol's interface object
def process_message(self, context, message): r = self._process_message_general(context, message) if r is True: return elif r is not False: self._interface.incoming(context, message, r) else: self._interface.incoming(context, message, None)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _process_msg(cls, msg):\n raise NotImplementedError", "def handle_protobuf(self, message: protobuf.ProtocolMessage) -> None:", "def process_message(self, msg, src):", "def handleMessage(msg):", "def _process_message(self, obj):\n pass", "def _r_on_incoming_message(self, string, protocol...
[ "0.6494044", "0.6347405", "0.6252538", "0.61391824", "0.6027273", "0.6013206", "0.59676176", "0.59675497", "0.594518", "0.59192914", "0.5894395", "0.5877927", "0.58471686", "0.5832104", "0.58276415", "0.58168703", "0.5714494", "0.5707053", "0.56793517", "0.567809", "0.5669059...
0.547723
32
Validates that certain fields are exist in self._context and are having specified value Use to make sure if protocol's FSM is in right state
def _validate_context(self, content): result = False if self._context is not None: for k in content: if k not in self._context or self._context[k] != content[k]: break result = True return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _validate(self, instance, value):", "def is_valid(self, value):\r\n pass", "def validate(self, instance, value):", "def validate(self, instance, value):", "def _validate_post_fields(self, value, name, result):\n state = result.get(\"state\")\n persistent_state = result.get(\"persis...
[ "0.63824505", "0.6050479", "0.60258853", "0.60258853", "0.5951332", "0.5951332", "0.5933391", "0.5919774", "0.5896552", "0.5887481", "0.5868621", "0.58041704", "0.57922333", "0.57876384", "0.5754241", "0.57441854", "0.5739001", "0.5719968", "0.57187736", "0.569956", "0.568105...
0.55769426
32
Exposes protected data to a caller. Be extremely careful with it contains originals, not a copies
def expose_data(self): return _ExposedFarmData(self._platforms, self._awaiting, self._channels)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_data_protected(self): \n pass", "def protected(_):\n return False # This protects nothing", "def write_protected(cls, **kwargs: Any) -> \"DataSchema[ObjType]\":\n return super().write_protected(**kwargs) # type: ignore", "def __setattr__(self,name,value):\n\n ...
[ "0.7407494", "0.6308666", "0.597678", "0.5667471", "0.5667471", "0.5665087", "0.5659497", "0.56185216", "0.5615482", "0.55298316", "0.5414067", "0.53964454", "0.53828067", "0.53819466", "0.53760314", "0.5359521", "0.535882", "0.5358776", "0.5326243", "0.5324696", "0.529068", ...
0.481632
100
Returns running state for specified platform
def is_running(self, platform): if platform not in self._platforms: raise ValueError("Platform {} is not registered".format(platform)) return self._platforms[platform].running
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def state(self):\n data = self.coordinator.data[self._host_name][self._node_name][self._vm_id]\n if data[\"status\"] == \"running\":\n return STATE_ON\n return STATE_OFF", "def running_state(self) -> int | None:\n return self.cluster.get(\"running_state\")", "def running(...
[ "0.61530626", "0.60770965", "0.6040421", "0.60027874", "0.5839394", "0.58060783", "0.5787307", "0.57715446", "0.57025546", "0.56921524", "0.56680083", "0.55934256", "0.55586237", "0.553357", "0.5515652", "0.55087674", "0.5503442", "0.5490447", "0.5443907", "0.54368734", "0.54...
0.6958679
0
Checks whether all platforms are running or not
def all_is_running(self): return all(p.running for p in self._platforms.values())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_platform():\n system = platform.system()\n distro = platform.platform()\n is_raspberry_pi = False\n try:\n info = open(\"/proc/cpuinfo\").read()\n except FileNotFoundError:\n is_raspberry_pi = False\n else:\n # bcm2708: Raspberry Pi 1\n # bcm2709: Raspberry P...
[ "0.7448283", "0.7354531", "0.69025147", "0.68859076", "0.6782263", "0.6772269", "0.67584515", "0.6750433", "0.6750433", "0.67330426", "0.6725543", "0.6633071", "0.6604187", "0.65855706", "0.65599746", "0.6514661", "0.65100974", "0.6507308", "0.650659", "0.6505434", "0.643089"...
0.7654313
0
Checks whether all platforms are stopped or not
def all_is_stopped(self): return all(not p.running for p in self._platforms.values())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def emergency_stop(self):\r\n eprint(\"Emergency platforms stop\")\r\n stop_list = []\r\n for p in self._platforms:\r\n stop_list.append(self._platforms[p])\r\n\r\n success = True\r\n while len(stop_list) > 0: # NOTE: stop platforms in reverse order\r\n p = ...
[ "0.75852925", "0.7146642", "0.6602631", "0.65657175", "0.628147", "0.62783736", "0.61895704", "0.61452276", "0.60707706", "0.60059714", "0.5987051", "0.5946374", "0.5929576", "0.5919849", "0.5919849", "0.5917849", "0.58654445", "0.58415127", "0.582378", "0.58181137", "0.57904...
0.82884616
0
Registers new platform (or at least tries). If new platform depends on platforms that were not registered yet then registration of this platform would be deferred. Registration would be continued after registering all the platforms that this platform depends on
def register_platform(self, factory, kind, parent=None, wait=None): self._try_register_platform(factory, kind, parent, wait)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def _register_hardware_platform(\n hass: HomeAssistant, integration_domain: str, platform: HardwareProtocol\n) -> None:\n if integration_domain == DOMAIN:\n return\n if not hasattr(platform, \"async_info\"):\n raise HomeAssistantError(f\"Invalid hardware platform {platform}\")\n has...
[ "0.71603966", "0.7015166", "0.6779148", "0.65924436", "0.6380313", "0.6261706", "0.6097711", "0.5934751", "0.58920646", "0.5801316", "0.5635861", "0.56350684", "0.5605743", "0.55982506", "0.5503871", "0.5495337", "0.5468214", "0.54491985", "0.5402476", "0.536305", "0.5344545"...
0.7204478
0
Worker method that do actually registers platform
def _try_register_platform(self, factory, kind, parent, wait, awaiting=False): name = factory.name assert kind is not None, "instance kind can't be None (instance name is {})".format(name) if factory.name is None: factory.name = name = "random_name" # TODO: use GUID ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def async_process_hardware_platforms(hass: HomeAssistant) -> None:\n hass.data[DOMAIN][\"hardware_platform\"] = {}\n\n await async_process_integration_platforms(hass, DOMAIN, _register_hardware_platform)", "async def _register_hardware_platform(\n hass: HomeAssistant, integration_domain: str, plat...
[ "0.67203164", "0.6423017", "0.62060404", "0.61684966", "0.6153153", "0.5944612", "0.59082896", "0.5862393", "0.5726266", "0.5700211", "0.56732666", "0.5646277", "0.5646277", "0.5623879", "0.56002146", "0.5568672", "0.5550284", "0.5511287", "0.54954875", "0.54954875", "0.54710...
0.5712664
9
Runs through platforms which registration were deferred and tries to register them again
def _check_awaiting(self): # TODO: check for wait loops for w in list(self._awaiting.values()): self._try_register_platform(w["instance"], w["kind"], w["parent"], w["wait"], awaiting=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def async_process_hardware_platforms(hass: HomeAssistant) -> None:\n hass.data[DOMAIN][\"hardware_platform\"] = {}\n\n await async_process_integration_platforms(hass, DOMAIN, _register_hardware_platform)", "def _try_register_platform(self, factory, kind, parent, wait, awaiting=False):\r\n name...
[ "0.667766", "0.6472627", "0.64608395", "0.6125786", "0.6103623", "0.585542", "0.5823268", "0.5784557", "0.57517254", "0.56858265", "0.5681627", "0.5622832", "0.5608007", "0.55615294", "0.5553192", "0.55258495", "0.5492156", "0.54448736", "0.54345196", "0.5424958", "0.53986436...
0.6857937
0
Stops platforms as it can
def emergency_stop(self): eprint("Emergency platforms stop") stop_list = [] for p in self._platforms: stop_list.append(self._platforms[p]) success = True while len(stop_list) > 0: # NOTE: stop platforms in reverse order p = stop_list.pop(-1) ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def platform_stop(self):\n self.platform.stop()", "def stopEngines():\n pass", "def stop(self):\n # Cleanup platform first.\n self.cleanup()\n\n if self.init_lhost:\n self._lhost.stop()\n\n self.status = False # pylint: disable=attribute-defined-outside-init", ...
[ "0.8364842", "0.75134593", "0.7007069", "0.6861968", "0.6861968", "0.6861968", "0.6861968", "0.6804117", "0.6767265", "0.67454034", "0.6689028", "0.66532433", "0.66270936", "0.65178484", "0.65178484", "0.6510709", "0.64955664", "0.6493137", "0.6477048", "0.6451692", "0.645169...
0.7356
2
Unregisters platform instance. Can recursively unregister all instance's nested platforms. If recursion is not used and by the instant unregister is called there is still nested platforms then exception would be rised
def unregister_platform_instance(self, instance, recursive=False): platform_to_remove = None for k, v in self._platforms.items(): if v == instance: platform_to_remove = k break if platform_to_remove is None: raise ValueError("No plat...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unregister_platform(self, name, recursive=False):\r\n if name in dict(self._platforms):\r\n self.unregister_platform_instance(self._platforms[name], recursive)", "def unregister_platform(self, platform_uuid):\n return self.do_rpc('unregister_platform', platform_uuid=platform_uuid)", ...
[ "0.8009664", "0.6706928", "0.6103871", "0.5728736", "0.57112795", "0.568827", "0.5645297", "0.5567026", "0.5547235", "0.55387", "0.55205977", "0.5519004", "0.5469608", "0.54393387", "0.5368441", "0.5361209", "0.53546786", "0.53507906", "0.5317688", "0.52915245", "0.5263275", ...
0.8235111
0
Wrap for unregister_platform_instance method
def unregister_platform(self, name, recursive=False): if name in dict(self._platforms): self.unregister_platform_instance(self._platforms[name], recursive)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def deregister_instance(InstanceId=None):\n pass", "def unregister_platform_instance(self, instance, recursive=False):\r\n platform_to_remove = None\r\n for k, v in self._platforms.items():\r\n if v == instance:\r\n platform_to_remove = k\r\n break\r\n ...
[ "0.7437885", "0.722585", "0.71067107", "0.6713975", "0.67006856", "0.65715706", "0.6490679", "0.6454682", "0.6433686", "0.64128125", "0.6409299", "0.6344683", "0.6314368", "0.6292651", "0.6288555", "0.62754303", "0.62084085", "0.6114578", "0.609524", "0.6088872", "0.6082098",...
0.6979844
3
Unregisters platforms factory. Usually happens after successful platform registration
def unregister_factory(self, instance): to_remove = None for k, v in self._awaiting.items(): if v["instance"] == instance: to_remove = k break if to_remove is not None: del self._awaiting[to_remove]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unregister_platform(self, name, recursive=False):\r\n if name in dict(self._platforms):\r\n self.unregister_platform_instance(self._platforms[name], recursive)", "def tear_down_registry(registry):\n for reg_adp in list(registry.registeredAdapters()):\n registry.unregisterAdapter(f...
[ "0.74393696", "0.6878564", "0.6781992", "0.67377704", "0.6643691", "0.65110755", "0.63454175", "0.6340024", "0.6208858", "0.62054634", "0.61191297", "0.60082805", "0.6006337", "0.59341156", "0.59182423", "0.58983845", "0.5870383", "0.5865489", "0.586217", "0.5841826", "0.5835...
0.55174696
41
Says whether or not specified platform (by instance) is subscribed to channel
def is_subscribed(self, inst, channel): if channel not in self._channels: return False return inst in self._channels[channel].subscribers
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_channel(self):\n return True", "def single_channel():\n return True", "def is_event_channel(channel: discord.TextChannel) -> bool:\n return get_active_feature(channel) == ActivationState.EVENT", "def check_event_channel(ctx: commands.Context) -> bool:\n if get_active_feature(ctx.ch...
[ "0.6758969", "0.65061855", "0.6170213", "0.60398626", "0.59377897", "0.58887047", "0.58596504", "0.5826945", "0.57663727", "0.574901", "0.57350725", "0.57127315", "0.5697036", "0.5667392", "0.5624958", "0.5623939", "0.5619179", "0.56188387", "0.55817115", "0.55813885", "0.557...
0.62435955
2
Subscribes specified platform to channel
def subscribe(self, inst, channel): if channel not in self._channels: self._channels[channel] = TalkChannel(channel, print_messages=self.verbose, timeref=self._timeref) self._channels[channel].subscribe(inst)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def subscribe(self, channel, **kwargs):\n pass", "def subscribe(receiver):", "def subscribe(receiver):", "def subscribe(receiver):", "async def _subscribe_to_channels(self, ws: WSAssistant):\n try:\n # BitMart WebSocket API currently offers only spot/user/order private channel.\n ...
[ "0.66808635", "0.5979494", "0.5979494", "0.5979494", "0.5952215", "0.5772565", "0.5757093", "0.57420313", "0.567808", "0.5661305", "0.56558084", "0.5654329", "0.56333697", "0.56230813", "0.5613967", "0.55897367", "0.558452", "0.55815876", "0.55511", "0.54930943", "0.54869413"...
0.5703723
8
Unsubscribes specified platform to channel
def unsubscribe(self, inst, channel): if channel not in self._channels: raise ValueError("Channel {} not exists!".format(channel)) self._channels[channel].unsubscribe(inst) return # TODO: ?delete channels if there is no subscribers # if len(self._channels[channe...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unsubscribe(self, channel, update_handler=None):\n pass", "def unsubscribe(receiver):", "def unsubscribe(receiver):", "def unsubscribe(receiver):", "def unsubscribe(receiver):", "def unsubscribe(receiver):", "def unregister_publisher(self, hostname):", "def unsubscribe(self, subject):\n ...
[ "0.6753091", "0.67168313", "0.67168313", "0.67168313", "0.67168313", "0.67168313", "0.61841947", "0.614403", "0.61410797", "0.60061914", "0.5981644", "0.59452146", "0.59342414", "0.59306574", "0.59306574", "0.59159696", "0.58706665", "0.58182955", "0.5811057", "0.58029413", "...
0.60036814
10
Starts new thread on specified channel
def start_thread(self, topic_caster, channel, interface, reply_to_tc=None): if channel not in self._channels: raise ValueError("Channel {} not exists!".format(channel)) return TalkContext(channel, self._channels[channel].start_thread(topic_caster, reply_to_tc), interface)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, channel, name, server):\n threading.Thread.__init__(self, target=self._run)\n self.__channel = channel\n self.__transport = channel.get_transport()\n self.__name = name\n self.__server = server", "def join(self, channel):\n raise NotImplementedError", ...
[ "0.6507928", "0.6503015", "0.6434967", "0.6381614", "0.6208421", "0.6115024", "0.61078566", "0.6104068", "0.6061114", "0.60220224", "0.60070866", "0.6001835", "0.59062725", "0.58968794", "0.58294946", "0.5827837", "0.57995164", "0.578793", "0.57634115", "0.57456595", "0.57436...
0.5886065
14
Sends message to specified channel. Also updates Message with Sources's Name (puts it as first component of message)
def send_message(self, context, message, processing=None): # TODO: chained context to trace consequent requests # TODO: option to build UML out of conversation if processing is None: if message.is_reply: processing = 0 else: proces...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def send_message(self, channel : str, message : str):\n await self._connection.send_message(channel, message)", "def sendMsg(self, channel, message, length=None):\n self.logger.info(\"Sending in %s: %s\" % (channel, message))\n self.msg(channel, message, length)", "def send(self, msg...
[ "0.6885398", "0.675431", "0.65821993", "0.6575302", "0.65634346", "0.63894933", "0.6296932", "0.62330484", "0.6228483", "0.6189294", "0.61043847", "0.61043113", "0.60943514", "0.6092948", "0.6089668", "0.60888904", "0.60853773", "0.6045753", "0.60221964", "0.59493864", "0.594...
0.0
-1
Invokes received messages processing by platforms Usualy is called automatically from send_message method
def process_messages(self): for p in self._platforms.values(): if p.received_messages > 0: p.queue_received_messages() for p in self._platforms.values(): if p.queued_messages > 0: p.process_queued_messages()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def receive_message(self, message):", "def handleMessage(msg):", "def execute_message_received(self, message_received):\n pass", "def processMessage(self, *args, **kwargs):\r\n pass", "def receive(self, message):", "def handle_message(self, message):", "def run(self):\n alogger.info(...
[ "0.7143642", "0.71015793", "0.7093893", "0.702307", "0.6902395", "0.6859738", "0.68166494", "0.6779305", "0.6749306", "0.6745342", "0.6691393", "0.6681336", "0.6598424", "0.6589076", "0.65837705", "0.6558769", "0.6531817", "0.65218943", "0.6481661", "0.64773434", "0.646337", ...
0.6632475
12
Creates Platform's instance Should be called when conditions for creation this very instance are met platforms that this instance depends on should be already registered
def finish_registration(self): base_platform = self._args.get("base_platform", None) lcls = {} try: exec("from platforms.{}.main import RootClass as rc; cl = rc".format(base_platform), globals(), lcls) except ModuleNotFoundError as e: eprint("Package 'platfo...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def new_platform(self, id):\n p = Platform(self, id, [])\n self.platforms[id] = p\n return p", "def platform_init(self):\n if isinstance(self.imu, MockImuController) or isinstance(self.pwm_controller, MockPWMController):\n print(\"Mock components detected, creating mock ant...
[ "0.6996833", "0.6901165", "0.6643392", "0.66429996", "0.6620305", "0.66013455", "0.65052664", "0.64866644", "0.6392875", "0.6308356", "0.6216505", "0.6207374", "0.61393857", "0.6117521", "0.60896856", "0.60258096", "0.6019131", "0.6019086", "0.60079926", "0.5970026", "0.59591...
0.706969
0
Rules specific dict with accumulated (summary) statistics
def stats(self): return {}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def summary(self):\n\n stats = {\n 'invalid': self.num_invalid,\n 'tested': self.num_tested,\n }\n return {\n k: v for k, v in stats.items()\n if k == 'invalid' or v != 0\n }", "def compute_metrics(self, results: list) -> dict:", "def comp...
[ "0.6340423", "0.62687165", "0.6262667", "0.62574226", "0.62306124", "0.61828154", "0.6159125", "0.61552644", "0.61206615", "0.6068211", "0.6058657", "0.6044734", "0.60149944", "0.601353", "0.5946292", "0.593793", "0.5930342", "0.5924686", "0.5853451", "0.5822528", "0.5802484"...
0.5944312
15
Scoreboard handler for incoming commands
def cmd(self, context, message): return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def received_message(self, msg):\n command = int(msg[:8], base=16)\n msg = msg[8:]\n self.log.debug(\"CONTROLLER - RECEIVED COMMAND: \" + str(command))\n self.log.debug(\"CONTROLLER - MSG: \" + str([int(msg[i:i+8], base=16) for i in range(0, len(msg), 8)]))\n if command == 0:\n ...
[ "0.6840156", "0.6722847", "0.65974706", "0.64543533", "0.64348984", "0.6269596", "0.6221818", "0.6210287", "0.62007767", "0.6155704", "0.61550456", "0.6142969", "0.61363643", "0.6136049", "0.60979337", "0.6096791", "0.60831475", "0.6043705", "0.60334855", "0.60296637", "0.597...
0.0
-1
Scoreboard handler for incoming responses
def response(self, context, message): return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def handle(self) -> None:\r\n\r\n if self.data.get(\"message-id\") != None:\r\n if self.data[\"status\"] == \"error\":\r\n print(self.data[\"error\"])\r\n return\r\n else:\r\n requestData = self.obs.pendingResponses.pop(self.data[\"message-i...
[ "0.667267", "0.60985816", "0.60786134", "0.60786134", "0.60626084", "0.60359406", "0.59885734", "0.59627366", "0.59537965", "0.592757", "0.5912877", "0.5900186", "0.58672774", "0.5839805", "0.5789892", "0.5711895", "0.56955504", "0.56955504", "0.56919116", "0.56762016", "0.56...
0.53896457
71
Use for commands that can't be handled
def _unhandled(self, context, message, reason): # TODO: call host's method instead self._host.unhandled.append((context.str, message.serialize(), reason)) self._host.expected[context.str] = None eprint("{}: Command {} can't be handled due to {}".format(self._host.name, message.serial...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _command(self, *cmd, handler=None):", "def check_commands(self):\n pass", "def accept_command():\n # TODO", "def commands():", "def commands():\n pass", "def commands():\n pass", "def commands():\n pass", "def commands():\n pass", "def command():\n pass", "async d...
[ "0.7336675", "0.7285146", "0.7252042", "0.70243955", "0.6982735", "0.6982735", "0.6982735", "0.6982735", "0.6879094", "0.67902136", "0.67638505", "0.6750508", "0.67460364", "0.6706385", "0.66952014", "0.66615903", "0.6648451", "0.6611464", "0.65597415", "0.6558557", "0.653128...
0.0
-1
Coverage summary tupple with amount of cases and covered cases May be used to determine coverage percentage
def coverage(self): return 0, 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_project_test_coverage(self) -> None:\n print_statistics = {}\n total_number_columns = 0\n number_columns_without_tests = 0\n\n for model_name in self.dbt_tests.keys():\n columns = self.dbt_tests[model_name]\n\n model_number_columns = 0\n model_co...
[ "0.71685416", "0.68137187", "0.6776906", "0.6677319", "0.6650848", "0.6639017", "0.6626381", "0.65998113", "0.6589576", "0.65476686", "0.6533306", "0.6496852", "0.6492893", "0.64699256", "0.64314204", "0.64311635", "0.64148486", "0.63928735", "0.6356816", "0.63496864", "0.633...
0.7372552
0
Coverage handler for incoming messages
def receive_message(self, context, message): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sample_handler(controller, msg, pkt):\n pass", "def handle(self, message):", "def handle_message(self, message):", "def processMessage(self, *args, **kwargs):\r\n pass", "def handle_message(self, message):\n\n try:\n controller_func = get_controller_func(message.code)\n\n ...
[ "0.6550436", "0.6415692", "0.6320498", "0.60273343", "0.60133165", "0.60033965", "0.6000999", "0.59604585", "0.595996", "0.5894243", "0.58883953", "0.58782285", "0.58495873", "0.5848556", "0.5842221", "0.5829234", "0.58113396", "0.5810897", "0.57167685", "0.57069564", "0.5706...
0.5589948
28
Encodes the input sequence and returns the hidden state from the last step of the encoder RNN.
def encode(self, x): _, hid = self.encoder(x) #All RNN classes output a tuple of 2 objects: the output of the RNN first and the hidden state from the last item in return hid #the input sequence second. We're only interested in the hidden state
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _encode(self):\n with tf.variable_scope('passage_encoding'):\n self.sep_p_encodes, _ = rnn('bi-lstm', self.p_emb, self.p_length, self.hidden_size)\n with tf.variable_scope('question_encoding'):\n self.sep_q_encodes, _ = rnn('bi-lstm', self.q_emb, self.q_length, self.hidden_s...
[ "0.683423", "0.6820723", "0.6807832", "0.675225", "0.66331315", "0.66327035", "0.65157163", "0.6451546", "0.643727", "0.6415194", "0.6381192", "0.6375881", "0.6340776", "0.6330177", "0.6329643", "0.63056594", "0.62932295", "0.6283715", "0.62812126", "0.6278949", "0.6253341", ...
0.7965193
0
Gives access to the hidden state of the individual components of the input batch. Since encode() encodes the whole batch of sequences in one call, but decoding is performed for every batch sequence individually, this method becomes necessary.
def get_encoded_item(self, encoded, index): #for vanilla RNN and GRU, since they have a hidden state represented as a single tensor ##return encoded[:, index:index+1] #for LSTM, since it has a hidden state represented as a tuple of two tensors: the cell state and the hidden state return encoded[0][:, ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def encode(self, x):\n _, hid = self.encoder(x) #All RNN classes output a tuple of 2 objects: the output of the RNN first and the hidden state from the last item in\n return hid #the input sequence second. We're only interested in the hidden state", "def _encode(self):\n with tf.variab...
[ "0.6531696", "0.6165337", "0.61568475", "0.59570867", "0.59406155", "0.5908961", "0.58463246", "0.5821808", "0.5770365", "0.57613736", "0.57613736", "0.5760088", "0.5760088", "0.5760088", "0.5757003", "0.57509893", "0.5750539", "0.57451725", "0.570995", "0.5698639", "0.569119...
0.0
-1
Performs one single decoding step for one example. It passes the hidden state for the decoder and input the tensor with the embeddings vector for the input token. The result of the decoder is passed to the output net to obtain the logits for every item in the dictionary. It outputs those logits and the new hidden state...
def decode_one(self, hid, input_x): out, new_hid = self.decoder(input_x.unsqueeze(0), hid) out = self.output(out) return out.squeeze(dim=0), new_hid
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def decode():\n with tf.Session() as sess:\n # Create model and load parameters.\n model = create_model(True)\n model.batch_size = 1 # We decode one sentence at a time.\n init_model(sess, model)\n\n # Load vocabularies.\n vocab, rev_vocab = data_utils.get_vocabulary(FL...
[ "0.7202692", "0.6928575", "0.6782827", "0.6653837", "0.65899867", "0.6541342", "0.6538928", "0.6499897", "0.6485077", "0.641824", "0.64113784", "0.637471", "0.6374259", "0.6316419", "0.6304639", "0.62677693", "0.62158835", "0.6194001", "0.61918765", "0.61859244", "0.6164222",...
0.0
-1
Decodes sequence by feeding token into net again and acts according to probabilities.
def decode_chain_argmax(self, hid, begin_emb, seq_len, stop_at_token=None): res_logits = [] res_tokens = [] cur_emb = begin_emb for _ in range(seq_len): out_logits, hid = self.decode_one(hid, cur_emb) out_token_v = torch.max(out_logits, dim=1)[1] #uses argmax to go from logits to the decode...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def decode(self, passage_vectors, question_vectors, init_with_question=True):\n with tf.variable_scope('pn_decoder'):\n fake_inputs = tf.zeros(\n [tf.shape(passage_vectors)[0], 2, 1]) # not used\n sequence_len = tf.tile([2], [tf.shape(passage_vectors)[0]])\n ...
[ "0.5983819", "0.5868206", "0.5792075", "0.57452136", "0.5722459", "0.569437", "0.569437", "0.56869286", "0.5666454", "0.5658044", "0.56570065", "0.56113213", "0.560299", "0.5571299", "0.5535273", "0.55341107", "0.55276024", "0.5525068", "0.55168015", "0.5509156", "0.5509141",...
0.0
-1
Almost the same as decode_chain_argmax(), but instead of using argmax, it performs the random sampling from the returned probability distribution.
def decode_chain_sampling(self, hid, begin_emb, seq_len, stop_at_token=None, device='cpu'): res_logits = [] res_actions = [] cur_emb = begin_emb for _ in range(seq_len): out_logits, hid = self.decode_one(hid, cur_emb) out_probs_v = F.softmax(out_logits, dim=1) out_probs = out_probs_v....
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _random_max_wrap(*args):\n _, opt_pt = random_maximise(*args)\n return opt_pt", "def custom_argmax(arr):\n return np.random.choice(np.flatnonzero(arr == arr.max()))", "def argmax_random_tie(seq, key=identity):\n return argmax(shuffled(seq), key=key)", "def argmax(values):\n\tvalues = np.a...
[ "0.68480635", "0.6822088", "0.67059606", "0.65348536", "0.63732535", "0.63186467", "0.6183038", "0.59274244", "0.58021593", "0.57685935", "0.5748391", "0.5623231", "0.56098354", "0.5572641", "0.5567708", "0.55628234", "0.55474985", "0.55280584", "0.55236834", "0.54596233", "0...
0.0
-1
Returns a free socket port. It works by creating an empty socket, binding it to port 0 so that the OS automatically assigns a free port to it, obtaining the port using `getsockname` and then immediately closing it. The application intending to use this port should bind to it immediately so that no other application bin...
def get_free_port(): sock = socket.socket() # bind to a random port (so that the OS automatically assigns us a free port) sock.bind(('', 0)) # obtain the random port value port = sock.getsockname()[1] # close the socket so that the port gets free sock.close() return port
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_free_port():\n s = socket.socket()\n s.bind(('', 0))\n _, port = s.getsockname()\n s.close()\n return port", "def get_free_port():\n s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM)\n s.bind(('127.0.0.1', 0))\n _, port = s.getsockname()\n s.close()\n return port", ...
[ "0.89505553", "0.8727646", "0.87067574", "0.8690947", "0.8689836", "0.8675498", "0.8668015", "0.86642563", "0.86574286", "0.85628986", "0.8248419", "0.8209171", "0.81788385", "0.81242055", "0.81235737", "0.8103176", "0.80116427", "0.79655474", "0.79650843", "0.7938987", "0.78...
0.85797644
9
Start a web app at the given port for serving the jigna view for the given template and context.
def start_web_app(template, context, port=8000): from tornado.ioloop import IOLoop from jigna.web_app import WebApp ioloop = IOLoop.instance() app = WebApp(template=template, context=context) app.listen(port) print 'Starting the web app on port %s ...' % port ioloop.start()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def serve(port):\n app.run(host='0.0.0.0', port=port, debug=True)", "def run():\n return render_template('index.html')", "def serve() -> None:\n uvicorn.run(\n \"bartender.web.application:get_app\",\n workers=settings.workers_count,\n host=settings.host,\n port=settings.por...
[ "0.6824245", "0.6593554", "0.65677285", "0.65660465", "0.6564316", "0.6545133", "0.65020066", "0.6479876", "0.6479806", "0.6458077", "0.64405406", "0.64051074", "0.6399282", "0.6394062", "0.63920164", "0.63898516", "0.6380909", "0.6349641", "0.62751275", "0.62736714", "0.6271...
0.8381516
0
Builds a differentiable augmentation pipeline based on its class type.
def build_aug(aug_type, **kwargs): if aug_type not in _AUGMENTATIONS: raise ValueError(f'Invalid augmentation type: `{aug_type}`!\n' f'Types allowed: {list(_AUGMENTATIONS)}.') return _AUGMENTATIONS[aug_type](**kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _make_pipeline(preprocessors, classifier):\n if isinstance(preprocessors, list):\n # support only preprocessing of lenght 2\n return make_pipeline(preprocessors[0], preprocessors[1], classifier)\n if preprocessors is None:\n return make_pipeline(classifier)\n\n return make_pipelin...
[ "0.618366", "0.57090163", "0.554121", "0.55004793", "0.5495945", "0.54675126", "0.54427373", "0.5428508", "0.53878695", "0.5384956", "0.5383808", "0.53803486", "0.53243446", "0.5311842", "0.5292812", "0.5282065", "0.5245183", "0.52103126", "0.5209436", "0.5195009", "0.5174649...
0.5901364
1
Test case based on fashion mnist tutorial
def test_kafka_output_sequence(): fashion_mnist = tf.keras.datasets.fashion_mnist ((train_images, train_labels), (test_images, _)) = fashion_mnist.load_data() class_names = [ "T-shirt/top", "Trouser", "Pullover", "Dress", "Coat", "Sandal", "Shirt", ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_keras_mnist():\n data = fetch(\"mnist\")\n check(data, n_samples_train=60000, n_samples_test=10000, n_features=28 * 28)", "def main():\n\n os.system(\"rm -rf images; mkdir images\")\n\n if (len(sys.argv) > 1):\n N = int(sys.argv[1])\n else:\n N = 10\n\n x_test = np.load(\...
[ "0.7775563", "0.7588313", "0.74669766", "0.7369658", "0.7182041", "0.7171548", "0.7122935", "0.7103584", "0.7040259", "0.70040536", "0.69778866", "0.69447285", "0.69191986", "0.69013685", "0.68820626", "0.68727684", "0.68639225", "0.68560404", "0.6851476", "0.68482596", "0.68...
0.0
-1
Test the functionality of the KafkaGroupIODataset when the consumer group is being newly created.
def test_kafka_group_io_dataset_primary_cg(): dataset = tfio.experimental.streaming.KafkaGroupIODataset( topics=["key-partition-test"], group_id="cgtestprimary", servers="localhost:9092", configuration=[ "session.timeout.ms=7000", "max.poll.interval.ms=8000", ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_kafka_group_io_dataset_primary_cg_new_topic():\n dataset = tfio.experimental.streaming.KafkaGroupIODataset(\n topics=[\"key-test\"],\n group_id=\"cgtestprimary\",\n servers=\"localhost:9092\",\n configuration=[\n \"session.timeout.ms=7000\",\n \"max.pol...
[ "0.71414036", "0.7126442", "0.7073285", "0.6968703", "0.6968703", "0.6883464", "0.68237174", "0.6809277", "0.680548", "0.68034434", "0.67761457", "0.67453897", "0.6516205", "0.65071094", "0.6487364", "0.6473713", "0.6469515", "0.63676554", "0.63668543", "0.6361959", "0.630479...
0.65959835
12
Test the functionality of the KafkaGroupIODataset when the consumer group has read all the messages and committed the offsets.
def test_kafka_group_io_dataset_primary_cg_no_lag(): dataset = tfio.experimental.streaming.KafkaGroupIODataset( topics=["key-partition-test"], group_id="cgtestprimary", servers="localhost:9092", configuration=["session.timeout.ms=7000", "max.poll.interval.ms=8000"], ) assert ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_kafka_group_io_dataset_auto_offset_reset():\n\n dataset = tfio.experimental.streaming.KafkaGroupIODataset(\n topics=[\"key-partition-test\"],\n group_id=\"cgglobaloffsetearliest\",\n servers=\"localhost:9092\",\n configuration=[\n \"session.timeout.ms=7000\",\n ...
[ "0.6880712", "0.67094135", "0.66918844", "0.64915144", "0.63925254", "0.6284364", "0.61970186", "0.6131777", "0.61313844", "0.60980684", "0.60387677", "0.59368175", "0.59367937", "0.5835078", "0.56977046", "0.5690426", "0.56897986", "0.56894445", "0.56839097", "0.567647", "0....
0.62748325
6
Test the functionality of the KafkaGroupIODataset when the existing consumer group reads data from a new topic.
def test_kafka_group_io_dataset_primary_cg_new_topic(): dataset = tfio.experimental.streaming.KafkaGroupIODataset( topics=["key-test"], group_id="cgtestprimary", servers="localhost:9092", configuration=[ "session.timeout.ms=7000", "max.poll.interval.ms=8000", ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_kafka_group_io_dataset_resume_primary_cg_new_topic():\n import tensorflow_io.kafka as kafka_io\n\n # Write new messages to the topic\n for i in range(10, 100):\n message = f\"D{i}\"\n kafka_io.write_kafka(message=message, topic=\"key-test\")\n # Read only the newly sent 90 messag...
[ "0.7862941", "0.73936385", "0.70595884", "0.68379927", "0.68281376", "0.6740333", "0.6714614", "0.6704045", "0.6651035", "0.6603814", "0.64650786", "0.6370465", "0.6314179", "0.60142636", "0.5972484", "0.5948897", "0.5936028", "0.5844628", "0.5811538", "0.58090895", "0.580786...
0.76525635
1
Test the functionality of the KafkaGroupIODataset when the consumer group is yet to catch up with the newly added messages only (Instead of reading from the beginning).
def test_kafka_group_io_dataset_resume_primary_cg(): import tensorflow_io.kafka as kafka_io # Write new messages to the topic for i in range(10, 100): message = f"D{i}" kafka_io.write_kafka(message=message, topic="key-partition-test") # Read only the newly sent 90 messages dataset =...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_kafka_group_io_dataset_resume_primary_cg_new_topic():\n import tensorflow_io.kafka as kafka_io\n\n # Write new messages to the topic\n for i in range(10, 100):\n message = f\"D{i}\"\n kafka_io.write_kafka(message=message, topic=\"key-test\")\n # Read only the newly sent 90 messag...
[ "0.76577485", "0.73002553", "0.6971321", "0.6940536", "0.6914675", "0.6828621", "0.6707299", "0.6647323", "0.6454408", "0.64065754", "0.62465376", "0.6183665", "0.61425513", "0.6138433", "0.6063199", "0.6052937", "0.60468554", "0.5998803", "0.5994782", "0.59467036", "0.592855...
0.7597071
1
Test the functionality of the KafkaGroupIODataset when the consumer group is yet to catch up with the newly added messages only (Instead of reading from the beginning) from the new topic.
def test_kafka_group_io_dataset_resume_primary_cg_new_topic(): import tensorflow_io.kafka as kafka_io # Write new messages to the topic for i in range(10, 100): message = f"D{i}" kafka_io.write_kafka(message=message, topic="key-test") # Read only the newly sent 90 messages dataset =...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_kafka_group_io_dataset_resume_primary_cg():\n import tensorflow_io.kafka as kafka_io\n\n # Write new messages to the topic\n for i in range(10, 100):\n message = f\"D{i}\"\n kafka_io.write_kafka(message=message, topic=\"key-partition-test\")\n # Read only the newly sent 90 messag...
[ "0.74266845", "0.72838503", "0.69508654", "0.69229776", "0.6898682", "0.6788215", "0.66945463", "0.65659136", "0.64467585", "0.6332732", "0.62817466", "0.62599236", "0.6245761", "0.61413795", "0.6088383", "0.6065624", "0.6036422", "0.60189974", "0.6011775", "0.598527", "0.594...
0.7933696
0
Test the functionality of the KafkaGroupIODataset when a secondary consumer group is created and is yet to catch up all the messages, from the beginning.
def test_kafka_group_io_dataset_secondary_cg(): dataset = tfio.experimental.streaming.KafkaGroupIODataset( topics=["key-partition-test"], group_id="cgtestsecondary", servers="localhost:9092", configuration=[ "session.timeout.ms=7000", "max.poll.interval.ms=80...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_kafka_group_io_dataset_resume_primary_cg_new_topic():\n import tensorflow_io.kafka as kafka_io\n\n # Write new messages to the topic\n for i in range(10, 100):\n message = f\"D{i}\"\n kafka_io.write_kafka(message=message, topic=\"key-test\")\n # Read only the newly sent 90 messag...
[ "0.73009527", "0.72444355", "0.7244203", "0.70529", "0.6862954", "0.6756388", "0.66355544", "0.6621006", "0.64989007", "0.64381534", "0.63905436", "0.6337798", "0.62951696", "0.62927544", "0.6165134", "0.6079073", "0.6057113", "0.6047525", "0.5999353", "0.59861374", "0.593344...
0.7149818
3
Test the functionality of the KafkaGroupIODataset when a new consumer group reads data from multiple topics from the beginning.
def test_kafka_group_io_dataset_tertiary_cg_multiple_topics(): dataset = tfio.experimental.streaming.KafkaGroupIODataset( topics=["key-partition-test", "key-test"], group_id="cgtesttertiary", servers="localhost:9092", configuration=[ "session.timeout.ms=7000", ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_kafka_group_io_dataset_resume_primary_cg_new_topic():\n import tensorflow_io.kafka as kafka_io\n\n # Write new messages to the topic\n for i in range(10, 100):\n message = f\"D{i}\"\n kafka_io.write_kafka(message=message, topic=\"key-test\")\n # Read only the newly sent 90 messag...
[ "0.7916993", "0.7726499", "0.75612617", "0.73106486", "0.7162436", "0.7129199", "0.71261126", "0.6688718", "0.6652531", "0.63588357", "0.629463", "0.62792355", "0.61795956", "0.61230063", "0.61221826", "0.6078071", "0.6073669", "0.6049846", "0.5994584", "0.59919417", "0.59703...
0.7474016
3
Test the functionality of the `auto.offset.reset` configuration at global and topic level
def test_kafka_group_io_dataset_auto_offset_reset(): dataset = tfio.experimental.streaming.KafkaGroupIODataset( topics=["key-partition-test"], group_id="cgglobaloffsetearliest", servers="localhost:9092", configuration=[ "session.timeout.ms=7000", "max.poll.in...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_issue_reset_time(self):\n pass", "def test_reset():\n dev = _aws_device(wires=2)\n dev._circuit = CIRCUIT\n dev._task = TASK\n\n dev.reset()\n assert dev.circuit is None\n assert dev.task is None", "def reset_topic(bot, trigger):\n global report\n\n if get_state():\n ...
[ "0.58305633", "0.5826309", "0.57031226", "0.56880814", "0.5674384", "0.56572354", "0.55940706", "0.55784523", "0.5553939", "0.5552377", "0.5534695", "0.5404209", "0.54005706", "0.5393411", "0.53927755", "0.53910935", "0.53739655", "0.5346079", "0.5346079", "0.5346079", "0.534...
0.6541038
0
Test the functionality of the KafkaGroupIODataset when the consumer is configured to have an invalid stream_timeout value which is less than the message_timeout value.
def test_kafka_group_io_dataset_invalid_stream_timeout(): STREAM_TIMEOUT = -20 try: tfio.experimental.streaming.KafkaGroupIODataset( topics=["key-partition-test", "key-test"], group_id="cgteststreaminvalid", servers="localhost:9092", stream_timeout=STREAM...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_kafka_group_io_dataset_stream_timeout_check():\n import tensorflow_io.kafka as kafka_io\n\n def write_messages_background():\n # Write new messages to the topic in a background thread\n time.sleep(6)\n for i in range(100, 200):\n message = f\"D{i}\"\n kafka...
[ "0.7494565", "0.68445444", "0.61413234", "0.6002265", "0.59668875", "0.5940467", "0.59011936", "0.58735675", "0.58508337", "0.58345395", "0.57948846", "0.57629395", "0.57347316", "0.57000494", "0.5662307", "0.5627969", "0.5604364", "0.5560788", "0.5560788", "0.55548257", "0.5...
0.83997965
0
Test the functionality of the KafkaGroupIODataset when the consumer is configured to have a valid stream_timeout value and thus waits for the new messages from kafka.
def test_kafka_group_io_dataset_stream_timeout_check(): import tensorflow_io.kafka as kafka_io def write_messages_background(): # Write new messages to the topic in a background thread time.sleep(6) for i in range(100, 200): message = f"D{i}" kafka_io.write_kafka...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_kafka_group_io_dataset_invalid_stream_timeout():\n\n STREAM_TIMEOUT = -20\n try:\n tfio.experimental.streaming.KafkaGroupIODataset(\n topics=[\"key-partition-test\", \"key-test\"],\n group_id=\"cgteststreaminvalid\",\n servers=\"localhost:9092\",\n ...
[ "0.790406", "0.6366534", "0.6356977", "0.61877793", "0.60831314", "0.60248893", "0.5973552", "0.59520674", "0.5936202", "0.58431596", "0.57945126", "0.5725716", "0.5712746", "0.570165", "0.5689419", "0.56843454", "0.5650357", "0.55747294", "0.557346", "0.5550135", "0.5498954"...
0.8004889
0
Test the functionality of batch.num.messages property of KafkaBatchIODataset/KafkaGroupIODataset.
def test_kafka_mini_dataset_size(): import tensorflow_io.kafka as kafka_io # Write new messages to the topic for i in range(200, 10000): message = f"D{i}" kafka_io.write_kafka(message=message, topic="key-partition-test") BATCH_NUM_MESSAGES = 5000 dataset = tfio.experimental.streami...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_kafka_batch_io_dataset():\n\n dataset = tfio.experimental.streaming.KafkaBatchIODataset(\n topics=[\"mini-batch-test\"],\n group_id=\"cgminibatchtrain\",\n servers=None,\n stream_timeout=5000,\n configuration=[\n \"session.timeout.ms=7000\",\n \"...
[ "0.64023566", "0.6360138", "0.63011515", "0.6271337", "0.61075103", "0.6065402", "0.60378826", "0.60176194", "0.59386075", "0.5936413", "0.59298617", "0.59279746", "0.5922301", "0.5909433", "0.5898732", "0.58711314", "0.58583987", "0.58453405", "0.5823402", "0.5822543", "0.58...
0.684737
0
Test the functionality of the KafkaBatchIODataset by training a model directly on the incoming kafka message batch(of type tf.data.Dataset), in an onlinetraining fashion.
def test_kafka_batch_io_dataset(): dataset = tfio.experimental.streaming.KafkaBatchIODataset( topics=["mini-batch-test"], group_id="cgminibatchtrain", servers=None, stream_timeout=5000, configuration=[ "session.timeout.ms=7000", "max.poll.interval.ms=...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def train(self, batch):\n pass", "def train(self, batch_training=False):\n raise NotImplementedError", "def train(self, num_batches: int):", "def train(train_dataset: torch.utils.data.Dataset, test_dataset: torch.utils.data.Dataset,\n training_config: dict = train_config, global_config...
[ "0.725532", "0.704524", "0.6841591", "0.6800355", "0.67653406", "0.67564285", "0.66826314", "0.6670544", "0.6661017", "0.6658394", "0.65870076", "0.6586631", "0.65774953", "0.65341204", "0.6532416", "0.6496773", "0.6493735", "0.64827245", "0.6464911", "0.64211214", "0.6411292...
0.7809584
0
Hook to be use by subclasses to define default ACLs in context.
def __base_acl__(self) -> list: _acls = [ (Allow, 'g:briefy_qa', ['add', 'delete', 'edit', 'list', 'view']) ] return _acls
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __acl__():", "def get_acl(registry=None):", "def load_acl(self):\n macl_class = self.app.config.get('MACL_CLASS', None)\n macl_dict = self.app.config.get('MACL_DEFINITION', None)\n default_roles = self.app.config.get('MACL_DEFAULT_ROLES', None)\n\n if default_roles is not None:\...
[ "0.6822682", "0.6064362", "0.6048131", "0.6034133", "0.58422464", "0.5725353", "0.57216316", "0.5718573", "0.5646452", "0.55869544", "0.5586848", "0.55686396", "0.55665946", "0.545657", "0.5445769", "0.5405563", "0.5397825", "0.53963953", "0.5391042", "0.5391042", "0.5389581"...
0.62626284
1
List of fields allowed in filtering and sorting.
def filter_allowed_fields(self): allowed_fields = super().filter_allowed_fields # Remove assignment_id allowed_fields.remove('assignment_id') return allowed_fields
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fields(self):\n return [f[1] for f in sorted(self.dd.fields.items())]", "def getFields(self):\n return sorted(self.schema.fields, key=lambda f: f.name)", "def fields(self) -> List[Field]: # pragma: no cover\n pass", "def _field_names(self):\n return [self._sanitize_field_name...
[ "0.7203694", "0.70437205", "0.69849616", "0.69056183", "0.6868574", "0.6849462", "0.67701536", "0.67247665", "0.67190754", "0.6691722", "0.668793", "0.668793", "0.66687506", "0.66492057", "0.6648726", "0.66373897", "0.66218805", "0.6619963", "0.6591684", "0.65700096", "0.6567...
0.64064956
33
Default filters for this Service.
def default_filters(self, query) -> object: assignment_id = self.request.matchdict.get('assignment_id') if assignment_id: query.filter(self.model.assignment_id == assignment_id) return query
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def std_filters():\n kwargs = {\n \"sentence_filters\":[punctuation_filter],\n \"word_filters\":[small_word_filter, stopword_filter, stemming_filter]\n }\n return kwargs", "def get_filters(self):", "def default_search_filters(cls):\n q = QueryDict(mutable=True)\n q.setlist(...
[ "0.725708", "0.7212429", "0.71636355", "0.6952968", "0.69228226", "0.6897654", "0.6820288", "0.6785757", "0.6766489", "0.6697421", "0.6685094", "0.6655373", "0.66130894", "0.6598326", "0.6537085", "0.6485721", "0.6480506", "0.6385164", "0.6371988", "0.63620067", "0.6340479", ...
0.0
-1
Creates a new pathfinding service
def __init__( self, web3: Web3, contract_manager: ContractManager, registry_address: Address, sync_start_block: int = 0, required_confirmations: int = 8, poll_interval: int = 10, ): super().__init__() self.web3 = web3 self.contract_mana...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def createService(data):\n return Service(data).create()", "def new(\n cls,\n name: str,\n description: str,\n registration_schema: JSON,\n result_schema: JSON,\n database_session: Session) -> 'Service':\n raise NotImplementedError()", ...
[ "0.66257864", "0.6032868", "0.60326034", "0.6012139", "0.5914998", "0.5851863", "0.57294667", "0.57228637", "0.5708061", "0.5678534", "0.5656917", "0.56435645", "0.5626607", "0.5623124", "0.5580457", "0.55738074", "0.55718637", "0.55306184", "0.552144", "0.5487335", "0.547394...
0.0
-1
Checks if a token network is followed by the pathfinding service.
def follows_token_network(self, token_network_address: Address) -> bool: assert is_checksum_address(token_network_address) return token_network_address in self.token_networks.keys()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _is_request_to_token_url(self, request):\n if not self.token_url:\n return False\n\n if self.token_url == request.path:\n return True\n\n request.match(self.token_url)\n\n if request.matchdict:\n return True\n\n return False", "def reach(sel...
[ "0.5835548", "0.5597924", "0.5571266", "0.5485773", "0.5480768", "0.5480768", "0.54687196", "0.5454214", "0.5441436", "0.5436674", "0.5424187", "0.5416221", "0.53875804", "0.5344155", "0.5312403", "0.529961", "0.5279912", "0.5251868", "0.52469206", "0.52468187", "0.5241061", ...
0.67394125
0
Returns the `TokenNetwork` for the given address or `None` for unknown networks.
def _get_token_network(self, token_network_address: Address) -> Optional[TokenNetwork]: assert is_checksum_address(token_network_address) if not self.follows_token_network(token_network_address): return None else: return self.token_networks[token_network_address]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_network(address: str, netmask: str) -> IPv4Network:\n net = IPv4Network(f\"{address}/{netmask}\", strict=False)\n return net", "def _get_network(name):\n\n if name not in _NAME_TO_NETS:\n raise ValueError('Network name [%s] not recognized.' % name)\n return _NAME_TO_NETS[name].model", "def g...
[ "0.63604003", "0.61137646", "0.610317", "0.59229994", "0.57684386", "0.5742403", "0.5735001", "0.5647258", "0.5518795", "0.55021477", "0.5481944", "0.539838", "0.53970176", "0.5341332", "0.52976215", "0.5289916", "0.5274275", "0.52362585", "0.5219444", "0.52164847", "0.521336...
0.8030924
0
!TXT! Return the URL for the online feedback service
def get_feedback_url(self): return self.get_setting('service_feedback_url')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def support_url(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"support_url\")", "def Url(self) -> str:", "def feedback():\n return render_template(\"feedback.html\")", "def get_url():\n config = configparser.RawConfigParser()\n config.read(\"speech.cfg\")\n region = config.get('a...
[ "0.61752695", "0.61325103", "0.6079804", "0.6072169", "0.6037325", "0.59497833", "0.5917915", "0.5912349", "0.5900737", "0.58638495", "0.5862365", "0.58503306", "0.583637", "0.58221316", "0.5819276", "0.580274", "0.5801738", "0.5754639", "0.57428956", "0.57399815", "0.5739981...
0.78211945
0
Load CliMAF standard operators. Invoked by standard CliMAF setup The operators list also show in variable 'cscripts' They are documented elsewhere
def load_standard_operators(): # # Compute scripts # cscript('select' ,scriptpath+'mcdo.sh "${operator}" "${out}" "${var}" "${period_iso}" "${domain}" "${alias}" "${units}" "${missing}" ${ins} ', commuteWithTimeConcatenation=True, commuteWithSpaceConcatenation=True) # cscript('c...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _basic_operators_init():\n global BASIC_OPERATORS\n\n BASIC_OPERATORS = {\n \"angle_between\": {\n \"node\": \"angleBetween\",\n \"inputs\": [\n [\"vector1X\", \"vector1Y\", \"vector1Z\"],\n [\"vector2X\", \"vector2Y\", \"vector2Z\"],\n ...
[ "0.5800942", "0.54121894", "0.5350327", "0.52697974", "0.5171791", "0.5129815", "0.511153", "0.5089675", "0.5077119", "0.50660294", "0.50468045", "0.5024355", "0.5013382", "0.5007174", "0.5001452", "0.49958974", "0.49821275", "0.497245", "0.49616897", "0.49017334", "0.4900833...
0.7239035
0
Return the query list as a DataFrame.
def get_query_list(): prov_list = QueryProvider.list_data_environments() print("Generating documentation for for the following providers") print(", ".join(list(PROVIDERS))) print("Skipping the following providers") print(", ".join(list(set(prov_list) - set(PROVIDERS)))) env_providers = {prov: Q...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def query_to_df(query):\n df = pd.DataFrame(query.all())\n df.columns = [x['name'] for x in query.column_descriptions]\n return df", "def query2df(query):\n df = pd.DataFrame(data = list(itertools.product([0, 1], repeat=len(query.variables))), columns=query.variables)\n df['p'] = query.values.flat...
[ "0.7733995", "0.7497827", "0.73814756", "0.7326154", "0.7304706", "0.7227527", "0.7195444", "0.71918434", "0.71499586", "0.7113315", "0.70896244", "0.7081603", "0.70788634", "0.70317006", "0.70228714", "0.7020063", "0.7013619", "0.6978598", "0.6976442", "0.68564945", "0.68412...
0.6495787
50
Generate query list document.
def generate_document(query_df): # sourcery skip: identity-comprehension doc_lines = [ "Data Queries Reference", "=" * len("Data Queries Reference"), "", "", ] # This line fails if re-written as dict(query_df.groupby("Environment")) # pylint: disable=unnecessary-compreh...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_query(self):\n return", "def get_query_list():\n prov_list = QueryProvider.list_data_environments()\n\n print(\"Generating documentation for for the following providers\")\n print(\", \".join(list(PROVIDERS)))\n print(\"Skipping the following providers\")\n print(\", \".joi...
[ "0.64937806", "0.6459933", "0.60802174", "0.60519475", "0.59186524", "0.57771593", "0.5772095", "0.57147473", "0.5705186", "0.5635158", "0.5617996", "0.5612918", "0.55703384", "0.55453086", "0.5526077", "0.5504283", "0.54998153", "0.54894817", "0.5426078", "0.54242843", "0.54...
0.63552195
2
Fit the model from data in X.
def fit(self, X, y=None): if self.n_rows is None: n_rows = X.shape[0] else: n_rows = self.n_rows self.shape_ = n_rows, X.shape[1] self.scaler_ = MinMaxScaler().fit(X) return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fit(self, X):\n raise NotImplementedError", "def fit(self, X):", "def fit(self, X, y):", "def fit(self, X, y):", "def fit(self, X, y):", "def fit(self, X,y):\n pass", "def fit(self, X):\n self._fit_X = X", "def fit(self, X, y=...):\n ...", "def fit(self, X, y=...):\n...
[ "0.8456153", "0.84106666", "0.8192276", "0.8192276", "0.8192276", "0.8108174", "0.8097223", "0.8097219", "0.8097219", "0.8097219", "0.8097219", "0.8097219", "0.8097219", "0.8097219", "0.8097219", "0.8097219", "0.8097219", "0.808293", "0.8073022", "0.8018419", "0.79754156", ...
0.0
-1
Fit the model from data in X. PCA is fit to estimate the rotation and UniformSampler is fit to transformed data.
def fit(self, X, y=None): self.pca_ = self._make_pca() transformed = self.pca_.fit_transform(X) self.sampler_ = UniformSampler(n_rows=self.n_rows).fit(transformed) return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fit_transform(self, X, y=None):\n X = np.asfortranarray(X, dtype=np.float64)\n Q = np.empty(\n (self.n_components, X.shape[1]), dtype=np.float64, order='F')\n U = np.empty(\n (X.shape[0], self.n_components), dtype=np.float64, order='F')\n w = np.empty(self.n_co...
[ "0.74559265", "0.7146048", "0.7100563", "0.7100563", "0.7100563", "0.7031072", "0.6953819", "0.68829376", "0.6859341", "0.6859341", "0.6859318", "0.6813972", "0.67715836", "0.6764781", "0.67534685", "0.6715386", "0.66989195", "0.6691517", "0.6683969", "0.6655785", "0.6646973"...
0.7536608
0
Return specific sample Sample is generated from transformed distribution and transformed back to the original space.
def get_sample(self, seed): transformed = self.sampler_.get_sample(seed) return self.pca_.inverse_transform(transformed)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_sample(self):\n mu = self._get_mean()\n sample = self.random.normal(mu)\n return sample", "def _get_sample(self):\n mu = self._get_mean().reshape((1, self.out_dim))\n sigma = self.variables[\"s\"]\n sample = self.random.normal(mu, sigma)\n sample = sample...
[ "0.69831187", "0.6970329", "0.6696096", "0.66158164", "0.64837795", "0.642031", "0.6373376", "0.63586676", "0.6337623", "0.62676066", "0.62550586", "0.62331295", "0.62140554", "0.6144703", "0.614365", "0.6135289", "0.61312723", "0.6130087", "0.61157453", "0.6106311", "0.60812...
0.6645877
3
Optimise the marginal likelihood. work with the log of beta fmin works better that way.
def optimise_GP_kernel(self,iters=1000): new_params=SCG(self.ll_hyper,self.ll_hyper_grad,np.hstack((self.DGPLVM_tar.GP.kernel.get_params(), np.log(self.DGPLVM_tar.GP.beta))),maxiters=iters,display=True,func_flg=0) #gtol=1e-10,epsilon=1e-10, # new_params = fmin_cg(self.ll,np.hstack((self....
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def log_marginal_likelihood(X_train,y_train,phi,tau=1.,Ve=1.e-10):", "def log_marginal_likelihood(self, eval_gradient=False):\n A, B, mu_tilda, gamma_tilda, r_grid, n_grid = self.calc_tau(return_all=True)\n\n gamma = self.GAMMA\n gamma_y = self.GAMMA_Y\n\n A1 = np.copy(self.baseTau0)\...
[ "0.66340697", "0.6619952", "0.6452456", "0.6451917", "0.6415813", "0.64041406", "0.6392549", "0.63788503", "0.6315891", "0.6298545", "0.62985104", "0.6243928", "0.61450654", "0.6131926", "0.6124716", "0.6111718", "0.6108412", "0.60599285", "0.60583085", "0.60303396", "0.60056...
0.0
-1
This function deletes duplicate values from a singly linked list
def remove_dups(ll: SinglyLinkedList): seen = set() current = ll.head prev = None while current is not None: if current.data in seen: prev.next = current.next temp = current current = current.next temp.next = None else: seen.add...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_dup2(linkedlist):", "def remove_duplicates_slow(linked_list):\n current = linked_list.head\n while current:\n runner = current\n while runner:\n if runner.next_node and runner.next_node.value == current.value:\n # delete this duplicate\n run...
[ "0.84461933", "0.8152717", "0.8094504", "0.8059363", "0.78755367", "0.7790879", "0.7774575", "0.7728998", "0.76568514", "0.7549787", "0.74926317", "0.7353447", "0.7180826", "0.7150074", "0.71448135", "0.71269375", "0.7072273", "0.70498395", "0.695619", "0.6727125", "0.6698436...
0.8174585
1
Returns the model properties as a dict
def to_dict(self): result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def to_dict(self):\n return self.properties", "def to_dict(self):\n return self.properties", "def get_properties(self):\n return self.properties", "def asdict(self):\n return self._prop_dict", "def json(self):\n rv = {\n prop: getattr(self, prop)\n f...
[ "0.7751993", "0.7751993", "0.73391134", "0.7334895", "0.7297356", "0.727818", "0.7159078", "0.71578115", "0.71494967", "0.71494967", "0.71283495", "0.71275014", "0.7122587", "0.71079814", "0.7060394", "0.7043251", "0.7034103", "0.70233124", "0.69635814", "0.69586295", "0.6900...
0.0
-1
Returns the string representation of the model
def to_str(self): import simplejson as json if six.PY2: import sys reload(sys) sys.setdefaultencoding("utf-8") return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __str__(self):\n return super().__str__() + self.model.__str__()", "def __str__(self) -> str:\n # noinspection PyUnresolvedReferences\n opts = self._meta\n if self.name_field:\n result = str(opts.get_field(self.name_field).value_from_object(self))\n else:\n ...
[ "0.85856134", "0.7814518", "0.77898884", "0.7751367", "0.7751367", "0.7712228", "0.76981676", "0.76700574", "0.7651133", "0.7597206", "0.75800353", "0.7568254", "0.7538184", "0.75228703", "0.7515832", "0.7498764", "0.74850684", "0.74850684", "0.7467648", "0.74488163", "0.7442...
0.0
-1
Returns true if both objects are equal
def __eq__(self, other): if not isinstance(other, UpdateUserOption): return False return self.__dict__ == other.__dict__
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __eq__(self, other):\n return are_equal(self, other)", "def __eq__(self, other):\n return are_equal(self, other)", "def __eq__(self,other):\n try: return self.object==other.object and isinstance(self,type(other))\n except: return False", "def __eq__(self, other):\n if i...
[ "0.8089139", "0.8089139", "0.8054507", "0.79827213", "0.79669285", "0.79669285", "0.79669285", "0.79669285", "0.79669285", "0.79669285", "0.79669285", "0.79669285", "0.79669285", "0.79669285", "0.79669285", "0.79669285", "0.79669285", "0.79669285", "0.79669285", "0.79669285", ...
0.0
-1
Returns true if both objects are not equal
def __ne__(self, other): return not self == other
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __ne__(self, other: object) -> bool:\n if self.__eq__(other):\n return False\n return True", "def __ne__(self, other: object) -> bool:\n return not self.__eq__(other)", "def __ne__(self, other) -> bool:\n return not self.__eq__(other)", "def __eq__(self, other):\n ...
[ "0.845611", "0.8391477", "0.8144138", "0.81410587", "0.8132492", "0.8093973", "0.80920255", "0.80920255", "0.80920255", "0.8085325", "0.8085325", "0.8076365", "0.8076365", "0.8065748" ]
0.0
-1
Create a board with distinct regions. Each region is continuous and separated from all other regions by at least two cells. The regions are generated using a Dirichlet process in which new cells are added to existed regions with a probability proportional to their boundary.
def make_partioned_regions(shape, alpha=1.0, max_regions=5, min_regions=2): ring = np.array([[1,1,1],[1,0,1],[1,1,1]], dtype=np.int16) adjacent = np.array([ # Diagonals don't count as adjacent [-1,0,0,1], [0,-1,1,0]], dtype=np.int16).T nearby = np.meshgrid([-2,-1,0,1,2], [-2,-1,0,1,2]) ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_grains(self, cells):\n\t\tfor cell_num in range(cells):\n\t\t\trandom_row = random.randrange(0,self.space.shape[0],1)\n\t\t\tsample_cell = np.random.choice(self.space[random_row],1)\n\t\t\tsample_cell = sample_cell[0]\n\t\t\twhile sample_cell.state != 0:\n\t\t\t\trandom_row = random.randrange(0,self.s...
[ "0.61022526", "0.607753", "0.59886265", "0.5929426", "0.59226096", "0.58879864", "0.5817077", "0.5783131", "0.57591105", "0.57534856", "0.5751205", "0.575003", "0.57356465", "0.57249886", "0.57249135", "0.5723089", "0.56941694", "0.56906545", "0.5687791", "0.56527513", "0.564...
0.71277833
0
Create a fence around unmasked regions such that nothing inside the regions can escape. Note that this is a little bit more aggressive than it strictly needs to be.
def build_fence(mask, shuffle=True): mask = mask.astype(np.int32) _i = np.array([-1,-1,-1,0,0,0,1,1,1], dtype=np.int32) _j = np.array([-1,0,1,-1,0,1,-1,0,1], dtype=np.int32) neighbors = ndimage.convolve(mask, np.ones((3,3)), mode='wrap') fence = np.zeros_like(mask) edge_i, edge_j = np.nonzero(ma...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_spurious_landmarks(self):\r\n \r\n remove = np.argwhere(self.lm_counter < 0)\r\n self.lm = np.delete(self.lm, remove, axis=0)\r\n self.lm_cvar = np.delete(self.lm_cvar, remove, axis=0)\r\n self.lm_counter = np.delete(self.lm_counter, remove)\r\n \r\n retu...
[ "0.5550262", "0.5375474", "0.52712905", "0.52280205", "0.522766", "0.5217894", "0.5148621", "0.51361966", "0.513452", "0.51226854", "0.5100473", "0.5098816", "0.5098428", "0.5061148", "0.5000526", "0.4992203", "0.49582353", "0.49527928", "0.49484676", "0.49470642", "0.4946795...
0.56984526
0
Populates an isolated region of the board. For examples of different region types, see ``safelife/levels/random/defaults.yaml``.
def populate_region(mask, layer_params): from .speedups import ( NEW_CELL_MASK, CAN_OSCILLATE_MASK, INCLUDE_VIOLATIONS_MASK) border = ndimage.maximum_filter(mask, size=3, mode='wrap') ^ mask interior = ndimage.minimum_filter(mask, size=3, mode='wrap') gen_mask = mask * ( NEW_CELL_MASK ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initialize_region(self):\n self.new_region_name = \"\"\n self.map.regions.create_new_region()", "def set_region(sender, instance, *args, **kwargs):\n if instance.geocity and not instance.georegion:\n instance.georegion = instance.geocity.region", "def test_assign_to_regions(self):\n...
[ "0.6742911", "0.5997335", "0.5913795", "0.573142", "0.56936634", "0.56373113", "0.56364506", "0.5610511", "0.5602637", "0.55689424", "0.5567876", "0.553663", "0.5532952", "0.55236477", "0.5481019", "0.5457451", "0.5454251", "0.544278", "0.5432112", "0.5432112", "0.5432112", ...
0.5901017
3
Add agents and exits to the board. This modifies both the board and regions in place.
def add_agents_and_exit(board, regions, agents, agent_types): agent_vals = [] point_tables = [] agent_names = [] agent_types = {'default': DEFAULT_AGENT, **agent_types} for agent_type in _fix_random_values(agents): agent_type = _fix_random_values(agent_type) if agent_type not in agen...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_to_simulation(self,agent):\n self.agents[agent.name] = agent\n self.network.add_node(agent)\n \n #agent given a grid queue at initialization\n grid_queue = [gq for gq in self.grid_queues.values() if gq.accepts(agent)][agent.sex]\n agent.grid_queue = grid_queue.inde...
[ "0.5694394", "0.5502142", "0.5498129", "0.5477356", "0.5419938", "0.53479344", "0.5290117", "0.52449757", "0.52416706", "0.51957417", "0.5178578", "0.5174868", "0.51587874", "0.5111144", "0.5063345", "0.5056426", "0.50496", "0.50238144", "0.50177455", "0.49965593", "0.4988289...
0.716102
0
Randomly generate a new SafeLife game board. Generation proceeds by creating several different random "regions", and then filling in each region with one of several types of patterns or tasks. Regions can be surrounded by fences / walls to make it harder for patterns to spread from one region to another. Each set of pa...
def gen_game( board_shape=(25,25), min_performance=-1, partitioning={}, starting_region=None, later_regions=None, buffer_region=None, named_regions={}, agents=['default'], agent_types={}, **etc): board_shape = _fix_random_values(board_shape) min_performance = _fix_random_values(min_perfo...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate(self):\n for i in range(4):\n random_first = randomize_first_box()\n self.randomize(random_first)\n for i in range(9):\n random_pos = randomize_position()\n self.randomize(random_pos)\n self.board.solve()", "def draw_random_setup(types...
[ "0.68248934", "0.64361274", "0.6384287", "0.6220165", "0.614266", "0.613112", "0.6068092", "0.6057356", "0.6054699", "0.6002918", "0.5973913", "0.5956003", "0.59538376", "0.59481156", "0.59429073", "0.59363997", "0.59216344", "0.59191567", "0.59100956", "0.58849156", "0.58718...
0.64837104
1
Highlights separable regions that stable with the given period. A "separable" region is one which can be removed from the board without effecting any of the rest of the board.
def stability_mask(board, period=6, remove_agent=True): if remove_agent: board = board * ((board & CellTypes.agent) == 0) neighborhood = np.ones((3,3)) alive = (board & CellTypes.alive) // CellTypes.alive neighbors = ndimage.convolve(alive, neighborhood, mode='wrap') max_neighbors = neighbo...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_partioned_regions(shape, alpha=1.0, max_regions=5, min_regions=2):\n ring = np.array([[1,1,1],[1,0,1],[1,1,1]], dtype=np.int16)\n adjacent = np.array([ # Diagonals don't count as adjacent\n [-1,0,0,1],\n [0,-1,1,0]], dtype=np.int16).T\n nearby = np.meshgrid([-2,-1,0,1,2], [-2,-1,0,...
[ "0.54850954", "0.508608", "0.50796324", "0.506026", "0.50399494", "0.50315195", "0.502835", "0.49149758", "0.49066833", "0.49031895", "0.48982894", "0.48454717", "0.48394826", "0.48072115", "0.47997016", "0.47875527", "0.47487685", "0.4743557", "0.47257307", "0.4671103", "0.4...
0.47201967
19
override this to do logic on input_update messages
def do_on_input_update(self, msg_id, payload, player): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def data_input_changed(self):\n self.message.data = self.dataInput.toPlainText()\n self.validate_data_input(self.message.dlc)", "def update(self, msg):\n pass", "def process_IN_MODIFY(self, event):", "def handle_input(self, event):\n pass", "def oppdater(self, input):\n r...
[ "0.7064037", "0.68353105", "0.679278", "0.6780095", "0.6757028", "0.66975", "0.6632578", "0.6601894", "0.65531427", "0.6519125", "0.64932", "0.6461436", "0.645683", "0.64120686", "0.6398695", "0.63841844", "0.6377531", "0.63212633", "0.6266965", "0.6207796", "0.6190815", "0...
0.79876757
0
Given the state of the 'game', decide what your cells ('game.me.cells') should do.
def step(self, game: Game): print("Tick #{}".format(game.time_left)) splitValue = getSplitValue(game) print (getSplitValue(game)) for cell in game.me.cells: if game.time_left < 6: cell.trade(99999) if cell.mass >= splitValue: ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _check_cells(self):\n for row_number in range(self.number_cells_y):\n for col_number in range(self.number_cells_x):\n alive_neighbours = self._get_neighbours(row_number,col_number)\n \n self.to_be_updated[row_number][col_number] = False\n ...
[ "0.69623125", "0.6414086", "0.6400114", "0.63498706", "0.6227592", "0.6179046", "0.6162203", "0.61035144", "0.6049122", "0.604795", "0.6018108", "0.60137135", "0.60055864", "0.6002554", "0.598495", "0.59398884", "0.5925738", "0.5910711", "0.59071994", "0.5905565", "0.5895984"...
0.5600861
62
Collect data into fixedlength chunks or blocks
def grouper(iterable, n, fillvalue=None): # grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx args = [iter(iterable)] * n return izip_longest(fillvalue=fillvalue, *args)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _chunk_data(self):\n for n in range(0, len(self.data) + 1, len(self.data) //\n self.num_of_chunks):\n yield self.data[0 + n:len(self.data) // self.num_of_chunks + n]", "def chunks(data: list, n: int) -> list:\n for i in range(0, len(data), n):\n yield data[i:...
[ "0.719677", "0.6773225", "0.6730021", "0.6680324", "0.6640408", "0.6590066", "0.65835804", "0.6442601", "0.6409235", "0.6338401", "0.6329446", "0.63054276", "0.62845904", "0.62551296", "0.6208536", "0.61931676", "0.61904657", "0.61624444", "0.6139798", "0.61395085", "0.613243...
0.0
-1
Temporal workaround for static template tag to support SCRIPT_NAME
def static(parser, token): return NewStaticNode.handle_token(parser, token)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_script_name(t_req):\n if settings.FORCE_SCRIPT_NAME is not None:\n return force_text(settings.FORCE_SCRIPT_NAME)\n\n # If Apache's mod_rewrite had a whack at the URL, Apache set either\n # SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any\n # rewrites. Unfortunately...
[ "0.6034971", "0.5963628", "0.59341127", "0.58655584", "0.5860948", "0.58456707", "0.58188504", "0.5782478", "0.5782478", "0.57443565", "0.5699513", "0.5699513", "0.56902325", "0.56528944", "0.5573736", "0.5472369", "0.5468209", "0.5464693", "0.5461462", "0.54162765", "0.53910...
0.0
-1